-rw-r--r-- 22577 lib25519-20241004/crypto_multiscalar/ed25519/amd64-maa4-p3/ge25519_double.S raw
#include "crypto_asm_hidden.h"
#define mask63 CRYPTO_SHARED_NAMESPACE(mask63)
// ge25519_double
.p2align 5
ASM_HIDDEN _CRYPTO_NAMESPACE(ge25519_double)
.globl _CRYPTO_NAMESPACE(ge25519_double)
ASM_HIDDEN CRYPTO_NAMESPACE(ge25519_double)
.globl CRYPTO_NAMESPACE(ge25519_double)
_CRYPTO_NAMESPACE(ge25519_double):
CRYPTO_NAMESPACE(ge25519_double):
movq %rsp,%r11
andq $-32,%rsp
subq $288,%rsp
movq %r11,0(%rsp)
movq %r12,8(%rsp)
movq %r13,16(%rsp)
movq %r14,24(%rsp)
movq %r15,32(%rsp)
movq %rbx,40(%rsp)
movq %rbp,48(%rsp)
movq %rdi,56(%rsp)
movq %rsi,%rdi
/* dbl p1p1 */
// square
movq 0(%rdi),%rbx
movq 8(%rdi),%rbp
movq 16(%rdi),%rcx
movq 24(%rdi),%rsi
movq %rsi,%rax
mulq %rsi
movq %rax,%r12
xorq %r13,%r13
movq $38,%rax
mulq %rdx
movq %rax,%r14
movq %rdx,%r15
movq %rbp,%rax
mulq %rsi
movq %rax,%r8
xorq %r9,%r9
movq %rdx,%r10
xorq %r11,%r11
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq %rcx,%rax
mulq %rcx
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq %rcx,%rax
mulq %rsi
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq $38,%rax
mulq %r10
imul $38,%r11,%r11
movq %rax,%r10
addq %rdx,%r11
movq $38,%rax
mulq %r12
imul $38,%r13,%r13
movq %rax,%r12
addq %rdx,%r13
movq %rbx,%rax
mulq %rsi
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq %rbp,%rax
mulq %rcx
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq $38,%rax
mulq %r8
imul $38,%r9,%r9
movq %rax,%r8
addq %rdx,%r9
movq %rbx,%rax
mulq %rbx
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq %rbx,%rax
mulq %rbp
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq %rbx,%rax
mulq %rcx
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
movq %rbp,%rax
mulq %rbp
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
addq %r9,%r10
adcq $0,%r11
addq %r11,%r12
adcq $0,%r13
addq %r13,%r14
adcq $0,%r15
shld $1,%r14,%r15
imul $19,%r15,%r15
andq mask63(%rip),%r14
addq %r15,%r8
adcq $0,%r10
adcq $0,%r12
adcq $0,%r14
movq %r8,64(%rsp)
movq %r10,72(%rsp)
movq %r12,80(%rsp)
movq %r14,88(%rsp)
// square
movq 32(%rdi),%rbx
movq 40(%rdi),%rbp
movq 48(%rdi),%rcx
movq 56(%rdi),%rsi
movq %rsi,%rax
mulq %rsi
movq %rax,%r12
xorq %r13,%r13
movq $38,%rax
mulq %rdx
movq %rax,%r14
movq %rdx,%r15
movq %rbp,%rax
mulq %rsi
movq %rax,%r8
xorq %r9,%r9
movq %rdx,%r10
xorq %r11,%r11
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq %rcx,%rax
mulq %rcx
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq %rcx,%rax
mulq %rsi
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq $38,%rax
mulq %r10
imul $38,%r11,%r11
movq %rax,%r10
addq %rdx,%r11
movq $38,%rax
mulq %r12
imul $38,%r13,%r13
movq %rax,%r12
addq %rdx,%r13
movq %rbx,%rax
mulq %rsi
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq %rbp,%rax
mulq %rcx
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq $38,%rax
mulq %r8
imul $38,%r9,%r9
movq %rax,%r8
addq %rdx,%r9
movq %rbx,%rax
mulq %rbx
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq %rbx,%rax
mulq %rbp
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq %rbx,%rax
mulq %rcx
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
movq %rbp,%rax
mulq %rbp
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
addq %r9,%r10
adcq $0,%r11
addq %r11,%r12
adcq $0,%r13
addq %r13,%r14
adcq $0,%r15
shld $1,%r14,%r15
imul $19,%r15,%r15
andq mask63(%rip),%r14
addq %r15,%r8
adcq $0,%r10
adcq $0,%r12
adcq $0,%r14
movq %r8,96(%rsp)
movq %r10,104(%rsp)
movq %r12,112(%rsp)
movq %r14,120(%rsp)
// square
movq 64(%rdi),%rbx
movq 72(%rdi),%rbp
movq 80(%rdi),%rcx
movq 88(%rdi),%rsi
movq %rsi,%rax
mulq %rsi
movq %rax,%r12
xorq %r13,%r13
movq $38,%rax
mulq %rdx
movq %rax,%r14
movq %rdx,%r15
movq %rbp,%rax
mulq %rsi
movq %rax,%r8
xorq %r9,%r9
movq %rdx,%r10
xorq %r11,%r11
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq %rcx,%rax
mulq %rcx
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq %rcx,%rax
mulq %rsi
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq $38,%rax
mulq %r10
imul $38,%r11,%r11
movq %rax,%r10
addq %rdx,%r11
movq $38,%rax
mulq %r12
imul $38,%r13,%r13
movq %rax,%r12
addq %rdx,%r13
movq %rbx,%rax
mulq %rsi
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq %rbp,%rax
mulq %rcx
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq $38,%rax
mulq %r8
imul $38,%r9,%r9
movq %rax,%r8
addq %rdx,%r9
movq %rbx,%rax
mulq %rbx
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq %rbx,%rax
mulq %rbp
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq %rbx,%rax
mulq %rcx
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
movq %rbp,%rax
mulq %rbp
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
addq %r9,%r10
adcq $0,%r11
addq %r11,%r12
adcq $0,%r13
addq %r13,%r14
adcq $0,%r15
shld $1,%r14,%r15
imul $19,%r15,%r15
andq mask63(%rip),%r14
addq %r15,%r8
adcq $0,%r10
adcq $0,%r12
adcq $0,%r14
// double
addq %r8,%r8
adcq %r10,%r10
adcq %r12,%r12
adcq %r14,%r14
movq $0,%rdx
movq $38,%rcx
cmovae %rdx,%rcx
addq %rcx,%r8
adcq %rdx,%r10
adcq %rdx,%r12
adcq %rdx,%r14
cmovc %rcx,%rdx
addq %rdx,%r8
movq %r8,128(%rsp)
movq %r10,136(%rsp)
movq %r12,144(%rsp)
movq %r14,152(%rsp)
// neg
movq $0,%r8
movq $0,%r9
movq $0,%r10
movq $0,%r11
subq 64(%rsp),%r8
sbbq 72(%rsp),%r9
sbbq 80(%rsp),%r10
sbbq 88(%rsp),%r11
movq $0,%rdx
movq $38,%rax
cmovae %rdx,%rax
subq %rax,%r8
sbbq %rdx,%r9
sbbq %rdx,%r10
sbbq %rdx,%r11
cmovc %rax,%rdx
subq %rdx,%r8
movq %r8,64(%rsp)
movq %r9,72(%rsp)
movq %r10,80(%rsp)
movq %r11,88(%rsp)
// copy
movq %r8,%r12
movq %r9,%r13
movq %r10,%r14
movq %r11,%r15
// sub
subq 96(%rsp),%r8
sbbq 104(%rsp),%r9
sbbq 112(%rsp),%r10
sbbq 120(%rsp),%r11
movq $0,%rdx
movq $38,%rax
cmovae %rdx,%rax
subq %rax,%r8
sbbq %rdx,%r9
sbbq %rdx,%r10
sbbq %rdx,%r11
cmovc %rax,%rdx
subq %rdx,%r8
movq %r8,224(%rsp)
movq %r9,232(%rsp)
movq %r10,240(%rsp)
movq %r11,248(%rsp)
// add
addq 96(%rsp),%r12
adcq 104(%rsp),%r13
adcq 112(%rsp),%r14
adcq 120(%rsp),%r15
movq $0,%rdx
movq $38,%rax
cmovae %rdx,%rax
addq %rax,%r12
adcq %rdx,%r13
adcq %rdx,%r14
adcq %rdx,%r15
cmovc %rax,%rdx
subq %rdx,%r12
movq %r12,192(%rsp)
movq %r13,200(%rsp)
movq %r14,208(%rsp)
movq %r15,216(%rsp)
// sub
subq 128(%rsp),%r12
sbbq 136(%rsp),%r13
sbbq 144(%rsp),%r14
sbbq 152(%rsp),%r15
movq $0,%rdx
movq $38,%rax
cmovae %rdx,%rax
subq %rax,%r12
sbbq %rdx,%r13
sbbq %rdx,%r14
sbbq %rdx,%r15
cmovc %rax,%rdx
subq %rdx,%r12
movq %r12,256(%rsp)
movq %r13,264(%rsp)
movq %r14,272(%rsp)
movq %r15,280(%rsp)
// add
movq 0(%rdi),%rbx
movq 8(%rdi),%rbp
movq 16(%rdi),%rcx
movq 24(%rdi),%rsi
addq 32(%rdi),%rbx
adcq 40(%rdi),%rbp
adcq 48(%rdi),%rcx
adcq 56(%rdi),%rsi
movq $0,%rdx
movq $38,%rax
cmovae %rdx,%rax
addq %rax,%rbx
adcq %rdx,%rbp
adcq %rdx,%rcx
adcq %rdx,%rsi
cmovc %rax,%rdx
addq %rdx,%rbx
// square
movq %rsi,%rax
mulq %rsi
movq %rax,%r12
xorq %r13,%r13
movq $38,%rax
mulq %rdx
movq %rax,%r14
movq %rdx,%r15
movq %rbp,%rax
mulq %rsi
movq %rax,%r8
xorq %r9,%r9
movq %rdx,%r10
xorq %r11,%r11
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq %rcx,%rax
mulq %rcx
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq %rcx,%rax
mulq %rsi
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq $38,%rax
mulq %r10
imul $38,%r11,%r11
movq %rax,%r10
addq %rdx,%r11
movq $38,%rax
mulq %r12
imul $38,%r13,%r13
movq %rax,%r12
addq %rdx,%r13
movq %rbx,%rax
mulq %rsi
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq %rbp,%rax
mulq %rcx
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq $38,%rax
mulq %r8
imul $38,%r9,%r9
movq %rax,%r8
addq %rdx,%r9
movq %rbx,%rax
mulq %rbx
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq %rbx,%rax
mulq %rbp
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq %rbx,%rax
mulq %rcx
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
movq %rbp,%rax
mulq %rbp
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
addq %r9,%r10
adcq $0,%r11
addq %r11,%r12
adcq $0,%r13
addq %r13,%r14
adcq $0,%r15
shld $1,%r14,%r15
imul $19,%r15,%r15
andq mask63(%rip),%r14
addq %r15,%r8
adcq $0,%r10
adcq $0,%r12
adcq $0,%r14
// add
addq 64(%rsp),%r8
adcq 72(%rsp),%r10
adcq 80(%rsp),%r12
adcq 88(%rsp),%r14
movq $0,%rdx
movq $38,%rax
cmovae %rdx,%rax
addq %rax,%r8
adcq %rdx,%r10
adcq %rdx,%r12
adcq %rdx,%r14
cmovc %rax,%rdx
addq %rdx,%r8
// sub
subq 96(%rsp),%r8
sbbq 104(%rsp),%r10
sbbq 112(%rsp),%r12
sbbq 120(%rsp),%r14
movq $0,%rdx
movq $38,%rax
cmovae %rdx,%rax
subq %rax,%r8
sbbq %rdx,%r10
sbbq %rdx,%r12
sbbq %rdx,%r14
cmovc %rax,%rdx
subq %rdx,%r8
movq %r8,160(%rsp)
movq %r10,168(%rsp)
movq %r12,176(%rsp)
movq %r14,184(%rsp)
/* p1p1 to p3 */
movq 56(%rsp),%rdi
// mul
movq 168(%rsp),%rax
mulq 280(%rsp)
movq %rax,%r8
xorq %r9,%r9
movq %rdx,%r10
xorq %r11,%r11
movq 176(%rsp),%rax
mulq 272(%rsp)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 184(%rsp),%rax
mulq 264(%rsp)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 176(%rsp),%rax
mulq 280(%rsp)
addq %rax,%r10
adcq $0,%r11
movq %rdx,%r12
xorq %r13,%r13
movq 184(%rsp),%rax
mulq 272(%rsp)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq $38,%rax
mulq %r10
imul $38,%r11,%r11
movq %rax,%r10
addq %rdx,%r11
movq 184(%rsp),%rax
mulq 280(%rsp)
addq %rax,%r12
adcq $0,%r13
movq $38,%rax
mulq %rdx
movq %rax,%r14
movq %rdx,%r15
movq $38,%rax
mulq %r12
imul $38,%r13,%r13
movq %rax,%r12
addq %rdx,%r13
movq 160(%rsp),%rax
mulq 280(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 168(%rsp),%rax
mulq 272(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 176(%rsp),%rax
mulq 264(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 184(%rsp),%rax
mulq 256(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq $38,%rax
mulq %r8
imul $38,%r9,%r9
movq %rax,%r8
addq %rdx,%r9
movq 160(%rsp),%rax
mulq 256(%rsp)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 160(%rsp),%rax
mulq 264(%rsp)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq 168(%rsp),%rax
mulq 256(%rsp)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq 160(%rsp),%rax
mulq 272(%rsp)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
movq 168(%rsp),%rax
mulq 264(%rsp)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
movq 176(%rsp),%rax
mulq 256(%rsp)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
addq %r9,%r10
adcq $0,%r11
addq %r11,%r12
adcq $0,%r13
addq %r13,%r14
adcq $0,%r15
shld $1,%r14,%r15
andq mask63(%rip),%r14
imul $19,%r15,%r15
addq %r15,%r8
adcq $0,%r10
adcq $0,%r12
adcq $0,%r14
movq %r8,0(%rdi)
movq %r10,8(%rdi)
movq %r12,16(%rdi)
movq %r14,24(%rdi)
// mul
movq 200(%rsp),%rax
mulq 248(%rsp)
movq %rax,%r8
xorq %r9,%r9
movq %rdx,%r10
xorq %r11,%r11
movq 208(%rsp),%rax
mulq 240(%rsp)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 216(%rsp),%rax
mulq 232(%rsp)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 208(%rsp),%rax
mulq 248(%rsp)
addq %rax,%r10
adcq $0,%r11
movq %rdx,%r12
xorq %r13,%r13
movq 216(%rsp),%rax
mulq 240(%rsp)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq $38,%rax
mulq %r10
imul $38,%r11,%r11
movq %rax,%r10
addq %rdx,%r11
movq 216(%rsp),%rax
mulq 248(%rsp)
addq %rax,%r12
adcq $0,%r13
movq $38,%rax
mulq %rdx
movq %rax,%r14
movq %rdx,%r15
movq $38,%rax
mulq %r12
imul $38,%r13,%r13
movq %rax,%r12
addq %rdx,%r13
movq 192(%rsp),%rax
mulq 248(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 200(%rsp),%rax
mulq 240(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 208(%rsp),%rax
mulq 232(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 216(%rsp),%rax
mulq 224(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq $38,%rax
mulq %r8
imul $38,%r9,%r9
movq %rax,%r8
addq %rdx,%r9
movq 192(%rsp),%rax
mulq 224(%rsp)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 192(%rsp),%rax
mulq 232(%rsp)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq 200(%rsp),%rax
mulq 224(%rsp)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq 192(%rsp),%rax
mulq 240(%rsp)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
movq 200(%rsp),%rax
mulq 232(%rsp)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
movq 208(%rsp),%rax
mulq 224(%rsp)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
addq %r9,%r10
adcq $0,%r11
addq %r11,%r12
adcq $0,%r13
addq %r13,%r14
adcq $0,%r15
shld $1,%r14,%r15
andq mask63(%rip),%r14
imul $19,%r15,%r15
addq %r15,%r8
adcq $0,%r10
adcq $0,%r12
adcq $0,%r14
movq %r8,32(%rdi)
movq %r10,40(%rdi)
movq %r12,48(%rdi)
movq %r14,56(%rdi)
// mul
movq 200(%rsp),%rax
mulq 280(%rsp)
movq %rax,%r8
xorq %r9,%r9
movq %rdx,%r10
xorq %r11,%r11
movq 208(%rsp),%rax
mulq 272(%rsp)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 216(%rsp),%rax
mulq 264(%rsp)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 208(%rsp),%rax
mulq 280(%rsp)
addq %rax,%r10
adcq $0,%r11
movq %rdx,%r12
xorq %r13,%r13
movq 216(%rsp),%rax
mulq 272(%rsp)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq $38,%rax
mulq %r10
imul $38,%r11,%r11
movq %rax,%r10
addq %rdx,%r11
movq 216(%rsp),%rax
mulq 280(%rsp)
addq %rax,%r12
adcq $0,%r13
movq $38,%rax
mulq %rdx
movq %rax,%r14
movq %rdx,%r15
movq $38,%rax
mulq %r12
imul $38,%r13,%r13
movq %rax,%r12
addq %rdx,%r13
movq 192(%rsp),%rax
mulq 280(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 200(%rsp),%rax
mulq 272(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 208(%rsp),%rax
mulq 264(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 216(%rsp),%rax
mulq 256(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq $38,%rax
mulq %r8
imul $38,%r9,%r9
movq %rax,%r8
addq %rdx,%r9
movq 192(%rsp),%rax
mulq 256(%rsp)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 192(%rsp),%rax
mulq 264(%rsp)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq 200(%rsp),%rax
mulq 256(%rsp)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq 192(%rsp),%rax
mulq 272(%rsp)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
movq 200(%rsp),%rax
mulq 264(%rsp)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
movq 208(%rsp),%rax
mulq 256(%rsp)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
addq %r9,%r10
adcq $0,%r11
addq %r11,%r12
adcq $0,%r13
addq %r13,%r14
adcq $0,%r15
shld $1,%r14,%r15
andq mask63(%rip),%r14
imul $19,%r15,%r15
addq %r15,%r8
adcq $0,%r10
adcq $0,%r12
adcq $0,%r14
movq %r8,64(%rdi)
movq %r10,72(%rdi)
movq %r12,80(%rdi)
movq %r14,88(%rdi)
// mul
movq 168(%rsp),%rax
mulq 248(%rsp)
movq %rax,%r8
xorq %r9,%r9
movq %rdx,%r10
xorq %r11,%r11
movq 176(%rsp),%rax
mulq 240(%rsp)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 184(%rsp),%rax
mulq 232(%rsp)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 176(%rsp),%rax
mulq 248(%rsp)
addq %rax,%r10
adcq $0,%r11
movq %rdx,%r12
xorq %r13,%r13
movq 184(%rsp),%rax
mulq 240(%rsp)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq $38,%rax
mulq %r10
imul $38,%r11,%r11
movq %rax,%r10
addq %rdx,%r11
movq 184(%rsp),%rax
mulq 248(%rsp)
addq %rax,%r12
adcq $0,%r13
movq $38,%rax
mulq %rdx
movq %rax,%r14
movq %rdx,%r15
movq $38,%rax
mulq %r12
imul $38,%r13,%r13
movq %rax,%r12
addq %rdx,%r13
movq 160(%rsp),%rax
mulq 248(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 168(%rsp),%rax
mulq 240(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 176(%rsp),%rax
mulq 232(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq 184(%rsp),%rax
mulq 224(%rsp)
addq %rax,%r14
adcq $0,%r15
addq %rdx,%r8
adcq $0,%r9
movq $38,%rax
mulq %r8
imul $38,%r9,%r9
movq %rax,%r8
addq %rdx,%r9
movq 160(%rsp),%rax
mulq 224(%rsp)
addq %rax,%r8
adcq $0,%r9
addq %rdx,%r10
adcq $0,%r11
movq 160(%rsp),%rax
mulq 232(%rsp)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq 168(%rsp),%rax
mulq 224(%rsp)
addq %rax,%r10
adcq $0,%r11
addq %rdx,%r12
adcq $0,%r13
movq 160(%rsp),%rax
mulq 240(%rsp)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
movq 168(%rsp),%rax
mulq 232(%rsp)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
movq 176(%rsp),%rax
mulq 224(%rsp)
addq %rax,%r12
adcq $0,%r13
addq %rdx,%r14
adcq $0,%r15
addq %r9,%r10
adcq $0,%r11
addq %r11,%r12
adcq $0,%r13
addq %r13,%r14
adcq $0,%r15
shld $1,%r14,%r15
andq mask63(%rip),%r14
imul $19,%r15,%r15
addq %r15,%r8
adcq $0,%r10
adcq $0,%r12
adcq $0,%r14
movq %r8,96(%rdi)
movq %r10,104(%rdi)
movq %r12,112(%rdi)
movq %r14,120(%rdi)
movq 0(%rsp),%r11
movq 8(%rsp),%r12
movq 16(%rsp),%r13
movq 24(%rsp),%r14
movq 32(%rsp),%r15
movq 40(%rsp),%rbx
movq 48(%rsp),%rbp
movq %r11,%rsp
ret
.section .note.GNU-stack,"",@progbits