diff options
Diffstat (limited to 'libs/libsodium/src/crypto_scalarmult/curve25519')
23 files changed, 4019 insertions, 0 deletions
diff --git a/libs/libsodium/src/crypto_scalarmult/curve25519/ref10/x25519_ref10.c b/libs/libsodium/src/crypto_scalarmult/curve25519/ref10/x25519_ref10.c new file mode 100644 index 0000000000..7b93a7247b --- /dev/null +++ b/libs/libsodium/src/crypto_scalarmult/curve25519/ref10/x25519_ref10.c @@ -0,0 +1,159 @@ + +#include <stddef.h> +#include <stdint.h> + +#include "../scalarmult_curve25519.h" +#include "export.h" +#include "private/ed25519_ref10.h" +#include "utils.h" +#include "x25519_ref10.h" + +/* + * Reject small order points early to mitigate the implications of + * unexpected optimizations that would affect the ref10 code. + * See https://eprint.iacr.org/2017/806.pdf for reference. + */ +static int +has_small_order(const unsigned char s[32]) +{ + CRYPTO_ALIGN(16) + static const unsigned char blacklist[][32] = { + { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, + { 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, + { 0xe0, 0xeb, 0x7a, 0x7c, 0x3b, 0x41, 0xb8, 0xae, 0x16, 0x56, 0xe3, 0xfa, 0xf1, 0x9f, 0xc4, 0x6a, 0xda, 0x09, 0x8d, 0xeb, 0x9c, 0x32, 0xb1, 0xfd, 0x86, 0x62, 0x05, 0x16, 0x5f, 0x49, 0xb8, 0x00 }, + { 0x5f, 0x9c, 0x95, 0xbc, 0xa3, 0x50, 0x8c, 0x24, 0xb1, 0xd0, 0xb1, 0x55, 0x9c, 0x83, 0xef, 0x5b, 0x04, 0x44, 0x5c, 0xc4, 0x58, 0x1c, 0x8e, 0x86, 0xd8, 0x22, 0x4e, 0xdd, 0xd0, 0x9f, 0x11, 0x57 }, + { 0xec, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f }, + { 0xed, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f }, + { 0xee, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f }, + { 0xcd, 0xeb, 0x7a, 0x7c, 0x3b, 0x41, 0xb8, 0xae, 0x16, 0x56, 0xe3, 0xfa, 0xf1, 0x9f, 0xc4, 0x6a, 0xda, 0x09, 0x8d, 0xeb, 0x9c, 0x32, 0xb1, 0xfd, 0x86, 0x62, 0x05, 0x16, 0x5f, 0x49, 0xb8, 0x80 }, + { 0x4c, 0x9c, 0x95, 0xbc, 0xa3, 0x50, 0x8c, 0x24, 0xb1, 0xd0, 0xb1, 0x55, 0x9c, 0x83, 0xef, 0x5b, 0x04, 0x44, 0x5c, 0xc4, 0x58, 0x1c, 0x8e, 0x86, 0xd8, 0x22, 0x4e, 0xdd, 0xd0, 0x9f, 0x11, 0xd7 }, + { 0xd9, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff }, + { 0xda, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff }, + { 0xdb, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff } + }; + unsigned char c[12] = { 0 }; + unsigned int k; + size_t i, j; + + COMPILER_ASSERT(12 == sizeof blacklist / sizeof blacklist[0]); + for (j = 0; j < 32; j++) { + for (i = 0; i < sizeof blacklist / sizeof blacklist[0]; i++) { + c[i] |= s[j] ^ blacklist[i][j]; + } + } + k = 0; + for (i = 0; i < sizeof blacklist / sizeof blacklist[0]; i++) { + k |= (c[i] - 1); + } + return (int) ((k >> 8) & 1); +} + +static int +crypto_scalarmult_curve25519_ref10(unsigned char *q, + const unsigned char *n, + const unsigned char *p) +{ + unsigned char *t = q; + unsigned int i; + fe25519 x1; + fe25519 x2; + fe25519 z2; + fe25519 x3; + fe25519 z3; + fe25519 tmp0; + fe25519 tmp1; + int pos; + unsigned int swap; + unsigned int b; + + if (has_small_order(p)) { + return -1; + } + for (i = 0; i < 32; i++) { + t[i] = n[i]; + } + t[0] &= 248; + t[31] &= 127; + t[31] |= 64; + fe25519_frombytes(x1, p); + fe25519_1(x2); + fe25519_0(z2); + fe25519_copy(x3, x1); + fe25519_1(z3); + + swap = 0; + for (pos = 254; pos >= 0; --pos) { + b = t[pos / 8] >> (pos & 7); + b &= 1; + swap ^= b; + fe25519_cswap(x2, x3, swap); + fe25519_cswap(z2, z3, swap); + swap = b; + fe25519_sub(tmp0, x3, z3); + fe25519_sub(tmp1, x2, z2); + fe25519_add(x2, x2, z2); + fe25519_add(z2, x3, z3); + fe25519_mul(z3, tmp0, x2); + fe25519_mul(z2, z2, tmp1); + fe25519_sq(tmp0, tmp1); + fe25519_sq(tmp1, x2); + fe25519_add(x3, z3, z2); + fe25519_sub(z2, z3, z2); + fe25519_mul(x2, tmp1, tmp0); + fe25519_sub(tmp1, tmp1, tmp0); + fe25519_sq(z2, z2); + fe25519_scalar_product(z3, tmp1, 121666); + fe25519_sq(x3, x3); + fe25519_add(tmp0, tmp0, z3); + fe25519_mul(z3, x1, z2); + fe25519_mul(z2, tmp1, tmp0); + } + fe25519_cswap(x2, x3, swap); + fe25519_cswap(z2, z3, swap); + + fe25519_invert(z2, z2); + fe25519_mul(x2, x2, z2); + fe25519_tobytes(q, x2); + + return 0; +} + +static void +edwards_to_montgomery(fe25519 montgomeryX, const fe25519 edwardsY, const fe25519 edwardsZ) +{ + fe25519 tempX; + fe25519 tempZ; + + fe25519_add(tempX, edwardsZ, edwardsY); + fe25519_sub(tempZ, edwardsZ, edwardsY); + fe25519_invert(tempZ, tempZ); + fe25519_mul(montgomeryX, tempX, tempZ); +} + +static int +crypto_scalarmult_curve25519_ref10_base(unsigned char *q, + const unsigned char *n) +{ + unsigned char *t = q; + ge25519_p3 A; + fe25519 pk; + unsigned int i; + + for (i = 0; i < 32; i++) { + t[i] = n[i]; + } + t[0] &= 248; + t[31] &= 127; + t[31] |= 64; + ge25519_scalarmult_base(&A, t); + edwards_to_montgomery(pk, A.Y, A.Z); + fe25519_tobytes(q, pk); + + return 0; +} + +struct crypto_scalarmult_curve25519_implementation + crypto_scalarmult_curve25519_ref10_implementation = { + SODIUM_C99(.mult =) crypto_scalarmult_curve25519_ref10, + SODIUM_C99(.mult_base =) crypto_scalarmult_curve25519_ref10_base + }; diff --git a/libs/libsodium/src/crypto_scalarmult/curve25519/ref10/x25519_ref10.h b/libs/libsodium/src/crypto_scalarmult/curve25519/ref10/x25519_ref10.h new file mode 100644 index 0000000000..ea52a62a69 --- /dev/null +++ b/libs/libsodium/src/crypto_scalarmult/curve25519/ref10/x25519_ref10.h @@ -0,0 +1,10 @@ +#ifndef x25519_ref10_H +#define x25519_ref10_H + +#include "crypto_scalarmult_curve25519.h" +#include "../scalarmult_curve25519.h" + +extern struct crypto_scalarmult_curve25519_implementation + crypto_scalarmult_curve25519_ref10_implementation; + +#endif diff --git a/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/consts.S b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/consts.S new file mode 100644 index 0000000000..67f1f0183e --- /dev/null +++ b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/consts.S @@ -0,0 +1,25 @@ +#ifdef IN_SANDY2X + +/* + REDMASK51 is from amd64-51/consts.s. +*/ + +#include "consts_namespace.h" +.data +.p2align 4 +v0_0: .quad 0, 0 +v1_0: .quad 1, 0 +v2_1: .quad 2, 1 +v9_0: .quad 9, 0 +v9_9: .quad 9, 9 +v19_19: .quad 19, 19 +v38_1: .quad 38, 1 +v38_38: .quad 38, 38 +v121666_121666: .quad 121666, 121666 +m25: .quad 33554431, 33554431 +m26: .quad 67108863, 67108863 +subc0: .quad 0x07FFFFDA, 0x03FFFFFE +subc2: .quad 0x07FFFFFE, 0x03FFFFFE +REDMASK51: .quad 0x0007FFFFFFFFFFFF + +#endif diff --git a/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/consts_namespace.h b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/consts_namespace.h new file mode 100644 index 0000000000..9f81fa61c4 --- /dev/null +++ b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/consts_namespace.h @@ -0,0 +1,20 @@ +#ifndef consts_namespace_H +#define consts_namespace_H + +#define v0_0 crypto_scalarmult_curve25519_sandy2x_v0_0 +#define v1_0 crypto_scalarmult_curve25519_sandy2x_v1_0 +#define v2_1 crypto_scalarmult_curve25519_sandy2x_v2_1 +#define v9_0 crypto_scalarmult_curve25519_sandy2x_v9_0 +#define v9_9 crypto_scalarmult_curve25519_sandy2x_v9_9 +#define v19_19 crypto_scalarmult_curve25519_sandy2x_v19_19 +#define v38_1 crypto_scalarmult_curve25519_sandy2x_v38_1 +#define v38_38 crypto_scalarmult_curve25519_sandy2x_v38_38 +#define v121666_121666 crypto_scalarmult_curve25519_sandy2x_v121666_121666 +#define m25 crypto_scalarmult_curve25519_sandy2x_m25 +#define m26 crypto_scalarmult_curve25519_sandy2x_m26 +#define subc0 crypto_scalarmult_curve25519_sandy2x_subc0 +#define subc2 crypto_scalarmult_curve25519_sandy2x_subc2 +#define REDMASK51 crypto_scalarmult_curve25519_sandy2x_REDMASK51 + +#endif /* ifndef consts_namespace_H */ + diff --git a/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/curve25519_sandy2x.c b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/curve25519_sandy2x.c new file mode 100644 index 0000000000..98b7cf79e5 --- /dev/null +++ b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/curve25519_sandy2x.c @@ -0,0 +1,114 @@ +/* + This file is adapted from ref10/scalarmult.c: + The code for Mongomery ladder is replace by the ladder assembly function; + Inversion is done in the same way as amd64-51/. + (fe is first converted into fe51 after Mongomery ladder) +*/ + +#include <stddef.h> + +#ifdef HAVE_AVX_ASM + +#include "utils.h" +#include "curve25519_sandy2x.h" +#include "../scalarmult_curve25519.h" +#include "fe.h" +#include "fe51.h" +#include "ladder.h" +#include "ladder_base.h" + +#define x1 var[0] +#define x2 var[1] +#define z2 var[2] + +static int +crypto_scalarmult_curve25519_sandy2x(unsigned char *q, const unsigned char *n, + const unsigned char *p) +{ + unsigned char *t = q; + fe var[3]; + fe51 x_51; + fe51 z_51; + unsigned int i; + + for (i = 0; i < 32; i++) { + t[i] = n[i]; + } + t[0] &= 248; + t[31] &= 127; + t[31] |= 64; + + fe_frombytes(x1, p); + + ladder(var, t); + + z_51.v[0] = (z2[1] << 26) + z2[0]; + z_51.v[1] = (z2[3] << 26) + z2[2]; + z_51.v[2] = (z2[5] << 26) + z2[4]; + z_51.v[3] = (z2[7] << 26) + z2[6]; + z_51.v[4] = (z2[9] << 26) + z2[8]; + + x_51.v[0] = (x2[1] << 26) + x2[0]; + x_51.v[1] = (x2[3] << 26) + x2[2]; + x_51.v[2] = (x2[5] << 26) + x2[4]; + x_51.v[3] = (x2[7] << 26) + x2[6]; + x_51.v[4] = (x2[9] << 26) + x2[8]; + + fe51_invert(&z_51, &z_51); + fe51_mul(&x_51, &x_51, &z_51); + fe51_pack(q, &x_51); + + return 0; +} + +#undef x2 +#undef z2 + +#define x2 var[0] +#define z2 var[1] + +static int +crypto_scalarmult_curve25519_sandy2x_base(unsigned char *q, + const unsigned char *n) +{ + unsigned char *t = q; + fe var[3]; + fe51 x_51; + fe51 z_51; + unsigned int i; + + for (i = 0;i < 32; i++) { + t[i] = n[i]; + } + t[0] &= 248; + t[31] &= 127; + t[31] |= 64; + + ladder_base(var, t); + + z_51.v[0] = (z2[1] << 26) + z2[0]; + z_51.v[1] = (z2[3] << 26) + z2[2]; + z_51.v[2] = (z2[5] << 26) + z2[4]; + z_51.v[3] = (z2[7] << 26) + z2[6]; + z_51.v[4] = (z2[9] << 26) + z2[8]; + + x_51.v[0] = (x2[1] << 26) + x2[0]; + x_51.v[1] = (x2[3] << 26) + x2[2]; + x_51.v[2] = (x2[5] << 26) + x2[4]; + x_51.v[3] = (x2[7] << 26) + x2[6]; + x_51.v[4] = (x2[9] << 26) + x2[8]; + + fe51_invert(&z_51, &z_51); + fe51_mul(&x_51, &x_51, &z_51); + fe51_pack(q, &x_51); + + return 0; +} + +struct crypto_scalarmult_curve25519_implementation +crypto_scalarmult_curve25519_sandy2x_implementation = { + SODIUM_C99(.mult = ) crypto_scalarmult_curve25519_sandy2x, + SODIUM_C99(.mult_base = ) crypto_scalarmult_curve25519_sandy2x_base +}; + +#endif diff --git a/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/curve25519_sandy2x.h b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/curve25519_sandy2x.h new file mode 100644 index 0000000000..f02d980187 --- /dev/null +++ b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/curve25519_sandy2x.h @@ -0,0 +1,9 @@ +#ifndef curve25519_sandy2x_H +#define curve25519_sandy2x_H + +#include "crypto_scalarmult_curve25519.h" + +extern struct crypto_scalarmult_curve25519_implementation + crypto_scalarmult_curve25519_sandy2x_implementation; + +#endif diff --git a/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/fe.h b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/fe.h new file mode 100644 index 0000000000..b1115f8691 --- /dev/null +++ b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/fe.h @@ -0,0 +1,26 @@ +/* + This file is adapted from ref10/fe.h: + All the redundant functions are removed. +*/ + +#ifndef fe_H +#define fe_H + +#include <stdint.h> +#include <stdlib.h> + +typedef uint64_t fe[10]; + +/* +fe means field element. +Here the field is \Z/(2^255-19). +An element t, entries t[0]...t[9], represents the integer +t[0]+2^26 t[1]+2^51 t[2]+2^77 t[3]+2^102 t[4]+...+2^230 t[9]. +Bounds on each t[i] vary depending on context. +*/ + +#define fe_frombytes crypto_scalarmult_curve25519_sandy2x_fe_frombytes + +extern void fe_frombytes(fe, const unsigned char *); + +#endif diff --git a/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/fe51.h b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/fe51.h new file mode 100644 index 0000000000..8e3f199b24 --- /dev/null +++ b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/fe51.h @@ -0,0 +1,35 @@ +/* + This file is adapted from amd64-51/fe25519.h: + 'fe25519' is renamed as 'fe51'; + All the redundant functions are removed; + New function fe51_nsquare is introduced. +*/ + +#ifndef fe51_H +#define fe51_H + +#ifdef __cplusplus +extern "C" { +#endif + +#include <stdint.h> +#include <stdlib.h> + +#include "fe51_namespace.h" + +typedef struct +{ + uint64_t v[5]; +} +fe51; + +extern void fe51_pack(unsigned char *, const fe51 *); +extern void fe51_mul(fe51 *, const fe51 *, const fe51 *); +extern void fe51_nsquare(fe51 *, const fe51 *, int); +extern void fe51_invert(fe51 *, const fe51 *); + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/fe51_invert.c b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/fe51_invert.c new file mode 100644 index 0000000000..ec9bb1a91f --- /dev/null +++ b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/fe51_invert.c @@ -0,0 +1,58 @@ +/* + This file is adapted from amd64-51/fe25519_invert.c: + Loops of squares are replaced by nsquares for better performance. +*/ + +#include "fe51.h" + +#ifdef HAVE_AVX_ASM + +#define fe51_square(x, y) fe51_nsquare(x, y, 1) + +void +fe51_invert(fe51 *r, const fe51 *x) +{ + fe51 z2; + fe51 z9; + fe51 z11; + fe51 z2_5_0; + fe51 z2_10_0; + fe51 z2_20_0; + fe51 z2_50_0; + fe51 z2_100_0; + fe51 t; + + /* 2 */ fe51_square(&z2,x); + /* 4 */ fe51_square(&t,&z2); + /* 8 */ fe51_square(&t,&t); + /* 9 */ fe51_mul(&z9,&t,x); + /* 11 */ fe51_mul(&z11,&z9,&z2); + /* 22 */ fe51_square(&t,&z11); + /* 2^5 - 2^0 = 31 */ fe51_mul(&z2_5_0,&t,&z9); + + /* 2^10 - 2^5 */ fe51_nsquare(&t,&z2_5_0, 5); + /* 2^10 - 2^0 */ fe51_mul(&z2_10_0,&t,&z2_5_0); + + /* 2^20 - 2^10 */ fe51_nsquare(&t,&z2_10_0, 10); + /* 2^20 - 2^0 */ fe51_mul(&z2_20_0,&t,&z2_10_0); + + /* 2^40 - 2^20 */ fe51_nsquare(&t,&z2_20_0, 20); + /* 2^40 - 2^0 */ fe51_mul(&t,&t,&z2_20_0); + + /* 2^50 - 2^10 */ fe51_nsquare(&t,&t,10); + /* 2^50 - 2^0 */ fe51_mul(&z2_50_0,&t,&z2_10_0); + + /* 2^100 - 2^50 */ fe51_nsquare(&t,&z2_50_0, 50); + /* 2^100 - 2^0 */ fe51_mul(&z2_100_0,&t,&z2_50_0); + + /* 2^200 - 2^100 */ fe51_nsquare(&t,&z2_100_0, 100); + /* 2^200 - 2^0 */ fe51_mul(&t,&t,&z2_100_0); + + /* 2^250 - 2^50 */ fe51_nsquare(&t,&t, 50); + /* 2^250 - 2^0 */ fe51_mul(&t,&t,&z2_50_0); + + /* 2^255 - 2^5 */ fe51_nsquare(&t,&t,5); + /* 2^255 - 21 */ fe51_mul(r,&t,&z11); +} + +#endif diff --git a/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/fe51_mul.S b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/fe51_mul.S new file mode 100644 index 0000000000..83501b0395 --- /dev/null +++ b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/fe51_mul.S @@ -0,0 +1,197 @@ +#ifdef IN_SANDY2X + +/* + This file is basically amd64-51/fe25519_mul.s. +*/ +#include "fe51_namespace.h" +#include "consts_namespace.h" +.text +.p2align 5 +#ifdef ASM_HIDE_SYMBOL +ASM_HIDE_SYMBOL fe51_mul +ASM_HIDE_SYMBOL _fe51_mul +#endif +.globl fe51_mul +.globl _fe51_mul +#ifdef __ELF__ +.type fe51_mul, @function +.type _fe51_mul, @function +#endif +fe51_mul: +_fe51_mul: +mov %rsp,%r11 +and $31,%r11 +add $96,%r11 +sub %r11,%rsp +movq %r11,0(%rsp) +movq %r12,8(%rsp) +movq %r13,16(%rsp) +movq %r14,24(%rsp) +movq %r15,32(%rsp) +movq %rbx,40(%rsp) +movq %rbp,48(%rsp) +movq %rdi,56(%rsp) +mov %rdx,%rcx +movq 24(%rsi),%rdx +imulq $19,%rdx,%rax +movq %rax,64(%rsp) +mulq 16(%rcx) +mov %rax,%r8 +mov %rdx,%r9 +movq 32(%rsi),%rdx +imulq $19,%rdx,%rax +movq %rax,72(%rsp) +mulq 8(%rcx) +add %rax,%r8 +adc %rdx,%r9 +movq 0(%rsi),%rax +mulq 0(%rcx) +add %rax,%r8 +adc %rdx,%r9 +movq 0(%rsi),%rax +mulq 8(%rcx) +mov %rax,%r10 +mov %rdx,%r11 +movq 0(%rsi),%rax +mulq 16(%rcx) +mov %rax,%r12 +mov %rdx,%r13 +movq 0(%rsi),%rax +mulq 24(%rcx) +mov %rax,%r14 +mov %rdx,%r15 +movq 0(%rsi),%rax +mulq 32(%rcx) +mov %rax,%rbx +mov %rdx,%rbp +movq 8(%rsi),%rax +mulq 0(%rcx) +add %rax,%r10 +adc %rdx,%r11 +movq 8(%rsi),%rax +mulq 8(%rcx) +add %rax,%r12 +adc %rdx,%r13 +movq 8(%rsi),%rax +mulq 16(%rcx) +add %rax,%r14 +adc %rdx,%r15 +movq 8(%rsi),%rax +mulq 24(%rcx) +add %rax,%rbx +adc %rdx,%rbp +movq 8(%rsi),%rdx +imulq $19,%rdx,%rax +mulq 32(%rcx) +add %rax,%r8 +adc %rdx,%r9 +movq 16(%rsi),%rax +mulq 0(%rcx) +add %rax,%r12 +adc %rdx,%r13 +movq 16(%rsi),%rax +mulq 8(%rcx) +add %rax,%r14 +adc %rdx,%r15 +movq 16(%rsi),%rax +mulq 16(%rcx) +add %rax,%rbx +adc %rdx,%rbp +movq 16(%rsi),%rdx +imulq $19,%rdx,%rax +mulq 24(%rcx) +add %rax,%r8 +adc %rdx,%r9 +movq 16(%rsi),%rdx +imulq $19,%rdx,%rax +mulq 32(%rcx) +add %rax,%r10 +adc %rdx,%r11 +movq 24(%rsi),%rax +mulq 0(%rcx) +add %rax,%r14 +adc %rdx,%r15 +movq 24(%rsi),%rax +mulq 8(%rcx) +add %rax,%rbx +adc %rdx,%rbp +movq 64(%rsp),%rax +mulq 24(%rcx) +add %rax,%r10 +adc %rdx,%r11 +movq 64(%rsp),%rax +mulq 32(%rcx) +add %rax,%r12 +adc %rdx,%r13 +movq 32(%rsi),%rax +mulq 0(%rcx) +add %rax,%rbx +adc %rdx,%rbp +movq 72(%rsp),%rax +mulq 16(%rcx) +add %rax,%r10 +adc %rdx,%r11 +movq 72(%rsp),%rax +mulq 24(%rcx) +add %rax,%r12 +adc %rdx,%r13 +movq 72(%rsp),%rax +mulq 32(%rcx) +add %rax,%r14 +adc %rdx,%r15 +movq REDMASK51(%rip),%rsi +shld $13,%r8,%r9 +and %rsi,%r8 +shld $13,%r10,%r11 +and %rsi,%r10 +add %r9,%r10 +shld $13,%r12,%r13 +and %rsi,%r12 +add %r11,%r12 +shld $13,%r14,%r15 +and %rsi,%r14 +add %r13,%r14 +shld $13,%rbx,%rbp +and %rsi,%rbx +add %r15,%rbx +imulq $19,%rbp,%rdx +add %rdx,%r8 +mov %r8,%rdx +shr $51,%rdx +add %r10,%rdx +mov %rdx,%rcx +shr $51,%rdx +and %rsi,%r8 +add %r12,%rdx +mov %rdx,%r9 +shr $51,%rdx +and %rsi,%rcx +add %r14,%rdx +mov %rdx,%rax +shr $51,%rdx +and %rsi,%r9 +add %rbx,%rdx +mov %rdx,%r10 +shr $51,%rdx +and %rsi,%rax +imulq $19,%rdx,%rdx +add %rdx,%r8 +and %rsi,%r10 +movq %r8,0(%rdi) +movq %rcx,8(%rdi) +movq %r9,16(%rdi) +movq %rax,24(%rdi) +movq %r10,32(%rdi) +movq 0(%rsp),%r11 +movq 8(%rsp),%r12 +movq 16(%rsp),%r13 +movq 24(%rsp),%r14 +movq 32(%rsp),%r15 +movq 40(%rsp),%rbx +movq 48(%rsp),%rbp +add %r11,%rsp +mov %rdi,%rax +mov %rsi,%rdx +ret + +#endif diff --git a/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/fe51_namespace.h b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/fe51_namespace.h new file mode 100644 index 0000000000..057f242ca1 --- /dev/null +++ b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/fe51_namespace.h @@ -0,0 +1,16 @@ +#ifndef fe51_namespace_H +#define fe51_namespace_H + +#define fe51 crypto_scalarmult_curve25519_sandy2x_fe51 +#define _fe51 _crypto_scalarmult_curve25519_sandy2x_fe51 +#define fe51_pack crypto_scalarmult_curve25519_sandy2x_fe51_pack +#define _fe51_pack _crypto_scalarmult_curve25519_sandy2x_fe51_pack +#define fe51_mul crypto_scalarmult_curve25519_sandy2x_fe51_mul +#define _fe51_mul _crypto_scalarmult_curve25519_sandy2x_fe51_mul +#define fe51_nsquare crypto_scalarmult_curve25519_sandy2x_fe51_nsquare +#define _fe51_nsquare _crypto_scalarmult_curve25519_sandy2x_fe51_nsquare + +#define fe51_invert crypto_scalarmult_curve25519_sandy2x_fe51_invert + +#endif /* ifndef fe51_namespace_H */ + diff --git a/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/fe51_nsquare.S b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/fe51_nsquare.S new file mode 100644 index 0000000000..41c3054805 --- /dev/null +++ b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/fe51_nsquare.S @@ -0,0 +1,172 @@ +#ifdef IN_SANDY2X + +/* + This file is adapted from amd64-51/fe25519_square.s: + Adding loop to perform n squares. +*/ +#include "fe51_namespace.h" +#include "consts_namespace.h" +.p2align 5 + +#ifdef ASM_HIDE_SYMBOL +ASM_HIDE_SYMBOL fe51_nsquare +ASM_HIDE_SYMBOL _fe51_nsquare +#endif +.globl fe51_nsquare +.globl _fe51_nsquare +#ifdef __ELF__ +.type fe51_nsquare, @function +.type _fe51_nsquare, @function +#endif +fe51_nsquare: +_fe51_nsquare: + +mov %rsp,%r11 +and $31,%r11 +add $64,%r11 +sub %r11,%rsp +movq %r11,0(%rsp) +movq %r12,8(%rsp) +movq %r13,16(%rsp) +movq %r14,24(%rsp) +movq %r15,32(%rsp) +movq %rbx,40(%rsp) +movq %rbp,48(%rsp) +movq 0(%rsi),%rcx +movq 8(%rsi),%r8 +movq 16(%rsi),%r9 +movq 24(%rsi),%rax +movq 32(%rsi),%rsi +movq %r9,16(%rdi) +movq %rax,24(%rdi) +movq %rsi,32(%rdi) +mov %rdx,%rsi + +.p2align 4 +._loop: +sub $1,%rsi +mov %rcx,%rax +mul %rcx +add %rcx,%rcx +mov %rax,%r9 +mov %rdx,%r10 +mov %rcx,%rax +mul %r8 +mov %rax,%r11 +mov %rdx,%r12 +mov %rcx,%rax +mulq 16(%rdi) +mov %rax,%r13 +mov %rdx,%r14 +mov %rcx,%rax +mulq 24(%rdi) +mov %rax,%r15 +mov %rdx,%rbx +mov %rcx,%rax +mulq 32(%rdi) +mov %rax,%rcx +mov %rdx,%rbp +mov %r8,%rax +mul %r8 +add %r8,%r8 +add %rax,%r13 +adc %rdx,%r14 +mov %r8,%rax +mulq 16(%rdi) +add %rax,%r15 +adc %rdx,%rbx +mov %r8,%rax +imulq $19, %r8,%r8 +mulq 24(%rdi) +add %rax,%rcx +adc %rdx,%rbp +mov %r8,%rax +mulq 32(%rdi) +add %rax,%r9 +adc %rdx,%r10 +movq 16(%rdi),%rax +mulq 16(%rdi) +add %rax,%rcx +adc %rdx,%rbp +shld $13,%rcx,%rbp +movq 16(%rdi),%rax +imulq $38, %rax,%rax +mulq 24(%rdi) +add %rax,%r9 +adc %rdx,%r10 +shld $13,%r9,%r10 +movq 16(%rdi),%rax +imulq $38, %rax,%rax +mulq 32(%rdi) +add %rax,%r11 +adc %rdx,%r12 +movq 24(%rdi),%rax +imulq $19, %rax,%rax +mulq 24(%rdi) +add %rax,%r11 +adc %rdx,%r12 +shld $13,%r11,%r12 +movq 24(%rdi),%rax +imulq $38, %rax,%rax +mulq 32(%rdi) +add %rax,%r13 +adc %rdx,%r14 +shld $13,%r13,%r14 +movq 32(%rdi),%rax +imulq $19, %rax,%rax +mulq 32(%rdi) +add %rax,%r15 +adc %rdx,%rbx +shld $13,%r15,%rbx +movq REDMASK51(%rip),%rdx +and %rdx,%rcx +add %rbx,%rcx +and %rdx,%r9 +and %rdx,%r11 +add %r10,%r11 +and %rdx,%r13 +add %r12,%r13 +and %rdx,%r15 +add %r14,%r15 +imulq $19, %rbp,%rbp +lea (%r9,%rbp),%r9 +mov %r9,%rax +shr $51,%r9 +add %r11,%r9 +and %rdx,%rax +mov %r9,%r8 +shr $51,%r9 +add %r13,%r9 +and %rdx,%r8 +mov %r9,%r10 +shr $51,%r9 +add %r15,%r9 +and %rdx,%r10 +movq %r10,16(%rdi) +mov %r9,%r10 +shr $51,%r9 +add %rcx,%r9 +and %rdx,%r10 +movq %r10,24(%rdi) +mov %r9,%r10 +shr $51,%r9 +imulq $19, %r9,%r9 +lea (%rax,%r9),%rcx +and %rdx,%r10 +movq %r10,32(%rdi) +cmp $0,%rsi +jne ._loop + +movq %rcx,0(%rdi) +movq %r8,8(%rdi) +movq 0(%rsp),%r11 +movq 8(%rsp),%r12 +movq 16(%rsp),%r13 +movq 24(%rsp),%r14 +movq 32(%rsp),%r15 +movq 40(%rsp),%rbx +movq 48(%rsp),%rbp +add %r11,%rsp +ret + +#endif diff --git a/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/fe51_pack.S b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/fe51_pack.S new file mode 100644 index 0000000000..500c858476 --- /dev/null +++ b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/fe51_pack.S @@ -0,0 +1,226 @@ +#ifdef IN_SANDY2X + +/* + This file is the result of merging + amd64-51/fe25519_pack.c and amd64-51/fe25519_freeze.s. +*/ +#include "fe51_namespace.h" +#include "consts_namespace.h" +.p2align 5 + +#ifdef ASM_HIDE_SYMBOL +ASM_HIDE_SYMBOL fe51_pack +ASM_HIDE_SYMBOL _fe51_pack +#endif +.globl fe51_pack +.globl _fe51_pack +#ifdef __ELF__ +.type fe51_pack, @function +.type _fe51_pack, @function +#endif +fe51_pack: +_fe51_pack: + +mov %rsp,%r11 +and $31,%r11 +add $32,%r11 +sub %r11,%rsp +movq %r11,0(%rsp) +movq %r12,8(%rsp) +movq 0(%rsi),%rdx +movq 8(%rsi),%rcx +movq 16(%rsi),%r8 +movq 24(%rsi),%r9 +movq 32(%rsi),%rsi +movq REDMASK51(%rip),%rax +lea -18(%rax),%r10 +mov $3,%r11 + +.p2align 4 +._reduceloop: +mov %rdx,%r12 +shr $51,%r12 +and %rax,%rdx +add %r12,%rcx +mov %rcx,%r12 +shr $51,%r12 +and %rax,%rcx +add %r12,%r8 +mov %r8,%r12 +shr $51,%r12 +and %rax,%r8 +add %r12,%r9 +mov %r9,%r12 +shr $51,%r12 +and %rax,%r9 +add %r12,%rsi +mov %rsi,%r12 +shr $51,%r12 +and %rax,%rsi +imulq $19, %r12,%r12 +add %r12,%rdx +sub $1,%r11 +ja ._reduceloop + +mov $1,%r12 +cmp %r10,%rdx +cmovl %r11,%r12 +cmp %rax,%rcx +cmovne %r11,%r12 +cmp %rax,%r8 +cmovne %r11,%r12 +cmp %rax,%r9 +cmovne %r11,%r12 +cmp %rax,%rsi +cmovne %r11,%r12 +neg %r12 +and %r12,%rax +and %r12,%r10 +sub %r10,%rdx +sub %rax,%rcx +sub %rax,%r8 +sub %rax,%r9 +sub %rax,%rsi +mov %rdx,%rax +and $0xFF,%eax +movb %al,0(%rdi) +mov %rdx,%rax +shr $8,%rax +and $0xFF,%eax +movb %al,1(%rdi) +mov %rdx,%rax +shr $16,%rax +and $0xFF,%eax +movb %al,2(%rdi) +mov %rdx,%rax +shr $24,%rax +and $0xFF,%eax +movb %al,3(%rdi) +mov %rdx,%rax +shr $32,%rax +and $0xFF,%eax +movb %al,4(%rdi) +mov %rdx,%rax +shr $40,%rax +and $0xFF,%eax +movb %al,5(%rdi) +mov %rdx,%rdx +shr $48,%rdx +mov %rcx,%rax +shl $3,%rax +and $0xF8,%eax +xor %rdx,%rax +movb %al,6(%rdi) +mov %rcx,%rdx +shr $5,%rdx +and $0xFF,%edx +movb %dl,7(%rdi) +mov %rcx,%rdx +shr $13,%rdx +and $0xFF,%edx +movb %dl,8(%rdi) +mov %rcx,%rdx +shr $21,%rdx +and $0xFF,%edx +movb %dl,9(%rdi) +mov %rcx,%rdx +shr $29,%rdx +and $0xFF,%edx +movb %dl,10(%rdi) +mov %rcx,%rdx +shr $37,%rdx +and $0xFF,%edx +movb %dl,11(%rdi) +mov %rcx,%rdx +shr $45,%rdx +mov %r8,%rcx +shl $6,%rcx +and $0xC0,%ecx +xor %rdx,%rcx +movb %cl,12(%rdi) +mov %r8,%rdx +shr $2,%rdx +and $0xFF,%edx +movb %dl,13(%rdi) +mov %r8,%rdx +shr $10,%rdx +and $0xFF,%edx +movb %dl,14(%rdi) +mov %r8,%rdx +shr $18,%rdx +and $0xFF,%edx +movb %dl,15(%rdi) +mov %r8,%rdx +shr $26,%rdx +and $0xFF,%edx +movb %dl,16(%rdi) +mov %r8,%rdx +shr $34,%rdx +and $0xFF,%edx +movb %dl,17(%rdi) +mov %r8,%rdx +shr $42,%rdx +movb %dl,18(%rdi) +mov %r8,%rdx +shr $50,%rdx +mov %r9,%rcx +shl $1,%rcx +and $0xFE,%ecx +xor %rdx,%rcx +movb %cl,19(%rdi) +mov %r9,%rdx +shr $7,%rdx +and $0xFF,%edx +movb %dl,20(%rdi) +mov %r9,%rdx +shr $15,%rdx +and $0xFF,%edx +movb %dl,21(%rdi) +mov %r9,%rdx +shr $23,%rdx +and $0xFF,%edx +movb %dl,22(%rdi) +mov %r9,%rdx +shr $31,%rdx +and $0xFF,%edx +movb %dl,23(%rdi) +mov %r9,%rdx +shr $39,%rdx +and $0xFF,%edx +movb %dl,24(%rdi) +mov %r9,%rdx +shr $47,%rdx +mov %rsi,%rcx +shl $4,%rcx +and $0xF0,%ecx +xor %rdx,%rcx +movb %cl,25(%rdi) +mov %rsi,%rdx +shr $4,%rdx +and $0xFF,%edx +movb %dl,26(%rdi) +mov %rsi,%rdx +shr $12,%rdx +and $0xFF,%edx +movb %dl,27(%rdi) +mov %rsi,%rdx +shr $20,%rdx +and $0xFF,%edx +movb %dl,28(%rdi) +mov %rsi,%rdx +shr $28,%rdx +and $0xFF,%edx +movb %dl,29(%rdi) +mov %rsi,%rdx +shr $36,%rdx +and $0xFF,%edx +movb %dl,30(%rdi) +mov %rsi,%rsi +shr $44,%rsi +movb %sil,31(%rdi) +movq 0(%rsp),%r11 +movq 8(%rsp),%r12 +add %r11,%rsp +ret + +#endif diff --git a/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/fe_frombytes_sandy2x.c b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/fe_frombytes_sandy2x.c new file mode 100644 index 0000000000..2fe081ee2a --- /dev/null +++ b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/fe_frombytes_sandy2x.c @@ -0,0 +1,78 @@ +/* + This file is basically ref10/fe_frombytes.h. +*/ + +#include "fe.h" + +#ifdef HAVE_AVX_ASM + +static uint64_t +load_3(const unsigned char *in) +{ + uint64_t result; + result = (uint64_t) in[0]; + result |= ((uint64_t) in[1]) << 8; + result |= ((uint64_t) in[2]) << 16; + return result; +} + +static uint64_t +load_4(const unsigned char *in) +{ + uint64_t result; + result = (uint64_t) in[0]; + result |= ((uint64_t) in[1]) << 8; + result |= ((uint64_t) in[2]) << 16; + result |= ((uint64_t) in[3]) << 24; + return result; +} + +void +fe_frombytes(fe h, const unsigned char *s) +{ + uint64_t h0 = load_4(s); + uint64_t h1 = load_3(s + 4) << 6; + uint64_t h2 = load_3(s + 7) << 5; + uint64_t h3 = load_3(s + 10) << 3; + uint64_t h4 = load_3(s + 13) << 2; + uint64_t h5 = load_4(s + 16); + uint64_t h6 = load_3(s + 20) << 7; + uint64_t h7 = load_3(s + 23) << 5; + uint64_t h8 = load_3(s + 26) << 4; + uint64_t h9 = (load_3(s + 29) & 8388607) << 2; + uint64_t carry0; + uint64_t carry1; + uint64_t carry2; + uint64_t carry3; + uint64_t carry4; + uint64_t carry5; + uint64_t carry6; + uint64_t carry7; + uint64_t carry8; + uint64_t carry9; + + carry9 = h9 >> 25; h0 += carry9 * 19; h9 &= 0x1FFFFFF; + carry1 = h1 >> 25; h2 += carry1; h1 &= 0x1FFFFFF; + carry3 = h3 >> 25; h4 += carry3; h3 &= 0x1FFFFFF; + carry5 = h5 >> 25; h6 += carry5; h5 &= 0x1FFFFFF; + carry7 = h7 >> 25; h8 += carry7; h7 &= 0x1FFFFFF; + + carry0 = h0 >> 26; h1 += carry0; h0 &= 0x3FFFFFF; + carry2 = h2 >> 26; h3 += carry2; h2 &= 0x3FFFFFF; + carry4 = h4 >> 26; h5 += carry4; h4 &= 0x3FFFFFF; + carry6 = h6 >> 26; h7 += carry6; h6 &= 0x3FFFFFF; + carry8 = h8 >> 26; h9 += carry8; h8 &= 0x3FFFFFF; + + h[0] = h0; + h[1] = h1; + h[2] = h2; + h[3] = h3; + h[4] = h4; + h[5] = h5; + h[6] = h6; + h[7] = h7; + h[8] = h8; + h[9] = h9; +} + +#endif diff --git a/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/ladder.S b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/ladder.S new file mode 100644 index 0000000000..c5c06021d8 --- /dev/null +++ b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/ladder.S @@ -0,0 +1,1440 @@ +#ifdef IN_SANDY2X + +#include "ladder_namespace.h" +#include "consts_namespace.h" +.p2align 5 + +#ifdef ASM_HIDE_SYMBOL +ASM_HIDE_SYMBOL ladder +ASM_HIDE_SYMBOL _ladder +#endif +.globl ladder +.globl _ladder +#ifdef __ELF__ +.type ladder, @function +.type _ladder, @function +#endif +ladder: +_ladder: + +mov %rsp,%r11 +and $31,%r11 +add $1856,%r11 +sub %r11,%rsp +movq %r11,1824(%rsp) +movq %r12,1832(%rsp) +movq %r13,1840(%rsp) +movq %r14,1848(%rsp) +vmovdqa v0_0(%rip),%xmm0 +vmovdqa v1_0(%rip),%xmm1 +vmovdqu 0(%rdi),%xmm2 +vmovdqa %xmm2,0(%rsp) +vmovdqu 16(%rdi),%xmm2 +vmovdqa %xmm2,16(%rsp) +vmovdqu 32(%rdi),%xmm2 +vmovdqa %xmm2,32(%rsp) +vmovdqu 48(%rdi),%xmm2 +vmovdqa %xmm2,48(%rsp) +vmovdqu 64(%rdi),%xmm2 +vmovdqa %xmm2,64(%rsp) +vmovdqa %xmm1,80(%rsp) +vmovdqa %xmm0,96(%rsp) +vmovdqa %xmm0,112(%rsp) +vmovdqa %xmm0,128(%rsp) +vmovdqa %xmm0,144(%rsp) +vmovdqa %xmm1,%xmm0 +vpxor %xmm1,%xmm1,%xmm1 +vpxor %xmm2,%xmm2,%xmm2 +vpxor %xmm3,%xmm3,%xmm3 +vpxor %xmm4,%xmm4,%xmm4 +vpxor %xmm5,%xmm5,%xmm5 +vpxor %xmm6,%xmm6,%xmm6 +vpxor %xmm7,%xmm7,%xmm7 +vpxor %xmm8,%xmm8,%xmm8 +vpxor %xmm9,%xmm9,%xmm9 +vmovdqu 0(%rdi),%xmm10 +vmovdqa %xmm10,160(%rsp) +vmovdqu 16(%rdi),%xmm10 +vmovdqa %xmm10,176(%rsp) +vpmuludq v19_19(%rip),%xmm10,%xmm10 +vmovdqa %xmm10,192(%rsp) +vmovdqu 32(%rdi),%xmm10 +vmovdqa %xmm10,208(%rsp) +vpmuludq v19_19(%rip),%xmm10,%xmm10 +vmovdqa %xmm10,224(%rsp) +vmovdqu 48(%rdi),%xmm10 +vmovdqa %xmm10,240(%rsp) +vpmuludq v19_19(%rip),%xmm10,%xmm10 +vmovdqa %xmm10,256(%rsp) +vmovdqu 64(%rdi),%xmm10 +vmovdqa %xmm10,272(%rsp) +vpmuludq v19_19(%rip),%xmm10,%xmm10 +vmovdqa %xmm10,288(%rsp) +vmovdqu 8(%rdi),%xmm10 +vpmuludq v2_1(%rip),%xmm10,%xmm10 +vmovdqa %xmm10,304(%rsp) +vpmuludq v19_19(%rip),%xmm10,%xmm10 +vmovdqa %xmm10,320(%rsp) +vmovdqu 24(%rdi),%xmm10 +vpmuludq v2_1(%rip),%xmm10,%xmm10 +vmovdqa %xmm10,336(%rsp) +vpmuludq v19_19(%rip),%xmm10,%xmm10 +vmovdqa %xmm10,352(%rsp) +vmovdqu 40(%rdi),%xmm10 +vpmuludq v2_1(%rip),%xmm10,%xmm10 +vmovdqa %xmm10,368(%rsp) +vpmuludq v19_19(%rip),%xmm10,%xmm10 +vmovdqa %xmm10,384(%rsp) +vmovdqu 56(%rdi),%xmm10 +vpmuludq v2_1(%rip),%xmm10,%xmm10 +vmovdqa %xmm10,400(%rsp) +vpmuludq v19_19(%rip),%xmm10,%xmm10 +vmovdqa %xmm10,416(%rsp) +vmovdqu 0(%rdi),%xmm10 +vmovdqu 64(%rdi),%xmm11 +vblendps $12, %xmm11, %xmm10, %xmm10 +vpshufd $2,%xmm10,%xmm10 +vpmuludq v38_1(%rip),%xmm10,%xmm10 +vmovdqa %xmm10,432(%rsp) +movq 0(%rsi),%rdx +movq 8(%rsi),%rcx +movq 16(%rsi),%r8 +movq 24(%rsi),%r9 +shrd $1,%rcx,%rdx +shrd $1,%r8,%rcx +shrd $1,%r9,%r8 +shr $1,%r9 +xorq 0(%rsi),%rdx +xorq 8(%rsi),%rcx +xorq 16(%rsi),%r8 +xorq 24(%rsi),%r9 +leaq 800(%rsp),%rsi +mov $64,%rax + +.p2align 4 +._ladder_small_loop: +mov %rdx,%r10 +mov %rcx,%r11 +mov %r8,%r12 +mov %r9,%r13 +shr $1,%rdx +shr $1,%rcx +shr $1,%r8 +shr $1,%r9 +and $1,%r10d +and $1,%r11d +and $1,%r12d +and $1,%r13d +neg %r10 +neg %r11 +neg %r12 +neg %r13 +movl %r10d,0(%rsi) +movl %r11d,256(%rsi) +movl %r12d,512(%rsi) +movl %r13d,768(%rsi) +add $4,%rsi +sub $1,%rax +jne ._ladder_small_loop +mov $255,%rdx +add $760,%rsi + +.p2align 4 +._ladder_loop: +sub $1,%rdx +vbroadcastss 0(%rsi),%xmm10 +sub $4,%rsi +vmovdqa 0(%rsp),%xmm11 +vmovdqa 80(%rsp),%xmm12 +vpxor %xmm11,%xmm0,%xmm13 +vpand %xmm10,%xmm13,%xmm13 +vpxor %xmm13,%xmm0,%xmm0 +vpxor %xmm13,%xmm11,%xmm11 +vpxor %xmm12,%xmm1,%xmm13 +vpand %xmm10,%xmm13,%xmm13 +vpxor %xmm13,%xmm1,%xmm1 +vpxor %xmm13,%xmm12,%xmm12 +vmovdqa 16(%rsp),%xmm13 +vmovdqa 96(%rsp),%xmm14 +vpxor %xmm13,%xmm2,%xmm15 +vpand %xmm10,%xmm15,%xmm15 +vpxor %xmm15,%xmm2,%xmm2 +vpxor %xmm15,%xmm13,%xmm13 +vpxor %xmm14,%xmm3,%xmm15 +vpand %xmm10,%xmm15,%xmm15 +vpxor %xmm15,%xmm3,%xmm3 +vpxor %xmm15,%xmm14,%xmm14 +vmovdqa %xmm13,0(%rsp) +vmovdqa %xmm14,16(%rsp) +vmovdqa 32(%rsp),%xmm13 +vmovdqa 112(%rsp),%xmm14 +vpxor %xmm13,%xmm4,%xmm15 +vpand %xmm10,%xmm15,%xmm15 +vpxor %xmm15,%xmm4,%xmm4 +vpxor %xmm15,%xmm13,%xmm13 +vpxor %xmm14,%xmm5,%xmm15 +vpand %xmm10,%xmm15,%xmm15 +vpxor %xmm15,%xmm5,%xmm5 +vpxor %xmm15,%xmm14,%xmm14 +vmovdqa %xmm13,32(%rsp) +vmovdqa %xmm14,80(%rsp) +vmovdqa 48(%rsp),%xmm13 +vmovdqa 128(%rsp),%xmm14 +vpxor %xmm13,%xmm6,%xmm15 +vpand %xmm10,%xmm15,%xmm15 +vpxor %xmm15,%xmm6,%xmm6 +vpxor %xmm15,%xmm13,%xmm13 +vpxor %xmm14,%xmm7,%xmm15 +vpand %xmm10,%xmm15,%xmm15 +vpxor %xmm15,%xmm7,%xmm7 +vpxor %xmm15,%xmm14,%xmm14 +vmovdqa %xmm13,48(%rsp) +vmovdqa %xmm14,96(%rsp) +vmovdqa 64(%rsp),%xmm13 +vmovdqa 144(%rsp),%xmm14 +vpxor %xmm13,%xmm8,%xmm15 +vpand %xmm10,%xmm15,%xmm15 +vpxor %xmm15,%xmm8,%xmm8 +vpxor %xmm15,%xmm13,%xmm13 +vpxor %xmm14,%xmm9,%xmm15 +vpand %xmm10,%xmm15,%xmm15 +vpxor %xmm15,%xmm9,%xmm9 +vpxor %xmm15,%xmm14,%xmm14 +vmovdqa %xmm13,64(%rsp) +vmovdqa %xmm14,112(%rsp) +vpaddq subc0(%rip),%xmm11,%xmm10 +vpsubq %xmm12,%xmm10,%xmm10 +vpaddq %xmm12,%xmm11,%xmm11 +vpunpckhqdq %xmm10,%xmm11,%xmm12 +vpunpcklqdq %xmm10,%xmm11,%xmm10 +vpaddq %xmm1,%xmm0,%xmm11 +vpaddq subc0(%rip),%xmm0,%xmm0 +vpsubq %xmm1,%xmm0,%xmm0 +vpunpckhqdq %xmm11,%xmm0,%xmm1 +vpunpcklqdq %xmm11,%xmm0,%xmm0 +vpmuludq %xmm0,%xmm10,%xmm11 +vpmuludq %xmm1,%xmm10,%xmm13 +vmovdqa %xmm1,128(%rsp) +vpaddq %xmm1,%xmm1,%xmm1 +vpmuludq %xmm0,%xmm12,%xmm14 +vmovdqa %xmm0,144(%rsp) +vpaddq %xmm14,%xmm13,%xmm13 +vpmuludq %xmm1,%xmm12,%xmm0 +vmovdqa %xmm1,448(%rsp) +vpaddq %xmm3,%xmm2,%xmm1 +vpaddq subc2(%rip),%xmm2,%xmm2 +vpsubq %xmm3,%xmm2,%xmm2 +vpunpckhqdq %xmm1,%xmm2,%xmm3 +vpunpcklqdq %xmm1,%xmm2,%xmm1 +vpmuludq %xmm1,%xmm10,%xmm2 +vpaddq %xmm2,%xmm0,%xmm0 +vpmuludq %xmm3,%xmm10,%xmm2 +vmovdqa %xmm3,464(%rsp) +vpaddq %xmm3,%xmm3,%xmm3 +vpmuludq %xmm1,%xmm12,%xmm14 +vmovdqa %xmm1,480(%rsp) +vpaddq %xmm14,%xmm2,%xmm2 +vpmuludq %xmm3,%xmm12,%xmm1 +vmovdqa %xmm3,496(%rsp) +vpaddq %xmm5,%xmm4,%xmm3 +vpaddq subc2(%rip),%xmm4,%xmm4 +vpsubq %xmm5,%xmm4,%xmm4 +vpunpckhqdq %xmm3,%xmm4,%xmm5 +vpunpcklqdq %xmm3,%xmm4,%xmm3 +vpmuludq %xmm3,%xmm10,%xmm4 +vpaddq %xmm4,%xmm1,%xmm1 +vpmuludq %xmm5,%xmm10,%xmm4 +vmovdqa %xmm5,512(%rsp) +vpaddq %xmm5,%xmm5,%xmm5 +vpmuludq %xmm3,%xmm12,%xmm14 +vmovdqa %xmm3,528(%rsp) +vpaddq %xmm14,%xmm4,%xmm4 +vpaddq %xmm7,%xmm6,%xmm3 +vpaddq subc2(%rip),%xmm6,%xmm6 +vpsubq %xmm7,%xmm6,%xmm6 +vpunpckhqdq %xmm3,%xmm6,%xmm7 +vpunpcklqdq %xmm3,%xmm6,%xmm3 +vpmuludq %xmm3,%xmm10,%xmm6 +vpmuludq %xmm5,%xmm12,%xmm14 +vmovdqa %xmm5,544(%rsp) +vpmuludq v19_19(%rip),%xmm5,%xmm5 +vmovdqa %xmm5,560(%rsp) +vpaddq %xmm14,%xmm6,%xmm6 +vpmuludq %xmm7,%xmm10,%xmm5 +vmovdqa %xmm7,576(%rsp) +vpaddq %xmm7,%xmm7,%xmm7 +vpmuludq %xmm3,%xmm12,%xmm14 +vmovdqa %xmm3,592(%rsp) +vpaddq %xmm14,%xmm5,%xmm5 +vpmuludq v19_19(%rip),%xmm3,%xmm3 +vmovdqa %xmm3,608(%rsp) +vpaddq %xmm9,%xmm8,%xmm3 +vpaddq subc2(%rip),%xmm8,%xmm8 +vpsubq %xmm9,%xmm8,%xmm8 +vpunpckhqdq %xmm3,%xmm8,%xmm9 +vpunpcklqdq %xmm3,%xmm8,%xmm3 +vmovdqa %xmm3,624(%rsp) +vpmuludq %xmm7,%xmm12,%xmm8 +vmovdqa %xmm7,640(%rsp) +vpmuludq v19_19(%rip),%xmm7,%xmm7 +vmovdqa %xmm7,656(%rsp) +vpmuludq %xmm3,%xmm10,%xmm7 +vpaddq %xmm7,%xmm8,%xmm8 +vpmuludq %xmm9,%xmm10,%xmm7 +vmovdqa %xmm9,672(%rsp) +vpaddq %xmm9,%xmm9,%xmm9 +vpmuludq %xmm3,%xmm12,%xmm10 +vpaddq %xmm10,%xmm7,%xmm7 +vpmuludq v19_19(%rip),%xmm3,%xmm3 +vmovdqa %xmm3,688(%rsp) +vpmuludq v19_19(%rip),%xmm12,%xmm12 +vpmuludq %xmm9,%xmm12,%xmm3 +vmovdqa %xmm9,704(%rsp) +vpaddq %xmm3,%xmm11,%xmm11 +vmovdqa 0(%rsp),%xmm3 +vmovdqa 16(%rsp),%xmm9 +vpaddq subc2(%rip),%xmm3,%xmm10 +vpsubq %xmm9,%xmm10,%xmm10 +vpaddq %xmm9,%xmm3,%xmm3 +vpunpckhqdq %xmm10,%xmm3,%xmm9 +vpunpcklqdq %xmm10,%xmm3,%xmm3 +vpmuludq 144(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm0,%xmm0 +vpmuludq 128(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm2,%xmm2 +vpmuludq 480(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm1,%xmm1 +vpmuludq 464(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm4,%xmm4 +vpmuludq 528(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm6,%xmm6 +vpmuludq 512(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm5,%xmm5 +vpmuludq 592(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm8,%xmm8 +vpmuludq 576(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm7,%xmm7 +vpmuludq v19_19(%rip),%xmm3,%xmm3 +vpmuludq 624(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm11,%xmm11 +vpmuludq 672(%rsp),%xmm3,%xmm3 +vpaddq %xmm3,%xmm13,%xmm13 +vpmuludq 144(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm2,%xmm2 +vpmuludq 448(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm1,%xmm1 +vpmuludq 480(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm4,%xmm4 +vpmuludq 496(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm6,%xmm6 +vpmuludq 528(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm5,%xmm5 +vpmuludq 544(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm8,%xmm8 +vpmuludq 592(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm7,%xmm7 +vpmuludq v19_19(%rip),%xmm9,%xmm9 +vpmuludq 640(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm11,%xmm11 +vpmuludq 624(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm13,%xmm13 +vpmuludq 704(%rsp),%xmm9,%xmm9 +vpaddq %xmm9,%xmm0,%xmm0 +vmovdqa 32(%rsp),%xmm3 +vmovdqa 80(%rsp),%xmm9 +vpaddq subc2(%rip),%xmm3,%xmm10 +vpsubq %xmm9,%xmm10,%xmm10 +vpaddq %xmm9,%xmm3,%xmm3 +vpunpckhqdq %xmm10,%xmm3,%xmm9 +vpunpcklqdq %xmm10,%xmm3,%xmm3 +vpmuludq 144(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm1,%xmm1 +vpmuludq 128(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm4,%xmm4 +vpmuludq 480(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm6,%xmm6 +vpmuludq 464(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm5,%xmm5 +vpmuludq 528(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm8,%xmm8 +vpmuludq 512(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm7,%xmm7 +vpmuludq v19_19(%rip),%xmm3,%xmm3 +vpmuludq 592(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm11,%xmm11 +vpmuludq 576(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm13,%xmm13 +vpmuludq 624(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm0,%xmm0 +vpmuludq 672(%rsp),%xmm3,%xmm3 +vpaddq %xmm3,%xmm2,%xmm2 +vpmuludq 144(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm4,%xmm4 +vpmuludq 448(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm6,%xmm6 +vpmuludq 480(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm5,%xmm5 +vpmuludq 496(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm8,%xmm8 +vpmuludq 528(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm7,%xmm7 +vpmuludq v19_19(%rip),%xmm9,%xmm9 +vpmuludq 544(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm11,%xmm11 +vpmuludq 592(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm13,%xmm13 +vpmuludq 640(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm0,%xmm0 +vpmuludq 624(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm2,%xmm2 +vpmuludq 704(%rsp),%xmm9,%xmm9 +vpaddq %xmm9,%xmm1,%xmm1 +vmovdqa 48(%rsp),%xmm3 +vmovdqa 96(%rsp),%xmm9 +vpaddq subc2(%rip),%xmm3,%xmm10 +vpsubq %xmm9,%xmm10,%xmm10 +vpaddq %xmm9,%xmm3,%xmm3 +vpunpckhqdq %xmm10,%xmm3,%xmm9 +vpunpcklqdq %xmm10,%xmm3,%xmm3 +vpmuludq 144(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm6,%xmm6 +vpmuludq 128(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm5,%xmm5 +vpmuludq 480(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm8,%xmm8 +vpmuludq 464(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm7,%xmm7 +vpmuludq v19_19(%rip),%xmm3,%xmm3 +vpmuludq 528(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm11,%xmm11 +vpmuludq 512(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm13,%xmm13 +vpmuludq 592(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm0,%xmm0 +vpmuludq 576(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm2,%xmm2 +vpmuludq 624(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm1,%xmm1 +vpmuludq 672(%rsp),%xmm3,%xmm3 +vpaddq %xmm3,%xmm4,%xmm4 +vpmuludq 144(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm5,%xmm5 +vpmuludq 448(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm8,%xmm8 +vpmuludq 480(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm7,%xmm7 +vpmuludq v19_19(%rip),%xmm9,%xmm9 +vpmuludq 496(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm11,%xmm11 +vpmuludq 528(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm13,%xmm13 +vpmuludq 544(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm0,%xmm0 +vpmuludq 592(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm2,%xmm2 +vpmuludq 640(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm1,%xmm1 +vpmuludq 624(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm4,%xmm4 +vpmuludq 704(%rsp),%xmm9,%xmm9 +vpaddq %xmm9,%xmm6,%xmm6 +vmovdqa 64(%rsp),%xmm3 +vmovdqa 112(%rsp),%xmm9 +vpaddq subc2(%rip),%xmm3,%xmm10 +vpsubq %xmm9,%xmm10,%xmm10 +vpaddq %xmm9,%xmm3,%xmm3 +vpunpckhqdq %xmm10,%xmm3,%xmm9 +vpunpcklqdq %xmm10,%xmm3,%xmm3 +vpmuludq 144(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm8,%xmm8 +vpmuludq 128(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm7,%xmm7 +vpmuludq v19_19(%rip),%xmm3,%xmm3 +vpmuludq 480(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm11,%xmm11 +vpmuludq 464(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm13,%xmm13 +vpmuludq 528(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm0,%xmm0 +vpmuludq 512(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm2,%xmm2 +vpmuludq 592(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm1,%xmm1 +vpmuludq 576(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm4,%xmm4 +vpmuludq 624(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm6,%xmm6 +vpmuludq 672(%rsp),%xmm3,%xmm3 +vpaddq %xmm3,%xmm5,%xmm5 +vpmuludq 144(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm7,%xmm7 +vpmuludq v19_19(%rip),%xmm9,%xmm9 +vpmuludq 448(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm11,%xmm11 +vpmuludq 480(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm13,%xmm13 +vpmuludq 496(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm0,%xmm0 +vpmuludq 528(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm2,%xmm2 +vpmuludq 544(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm1,%xmm1 +vpmuludq 592(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm4,%xmm4 +vpmuludq 640(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm6,%xmm6 +vpmuludq 624(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm5,%xmm5 +vpmuludq 704(%rsp),%xmm9,%xmm9 +vpaddq %xmm9,%xmm8,%xmm8 +vpsrlq $25,%xmm4,%xmm3 +vpaddq %xmm3,%xmm6,%xmm6 +vpand m25(%rip),%xmm4,%xmm4 +vpsrlq $26,%xmm11,%xmm3 +vpaddq %xmm3,%xmm13,%xmm13 +vpand m26(%rip),%xmm11,%xmm11 +vpsrlq $26,%xmm6,%xmm3 +vpaddq %xmm3,%xmm5,%xmm5 +vpand m26(%rip),%xmm6,%xmm6 +vpsrlq $25,%xmm13,%xmm3 +vpaddq %xmm3,%xmm0,%xmm0 +vpand m25(%rip),%xmm13,%xmm13 +vpsrlq $25,%xmm5,%xmm3 +vpaddq %xmm3,%xmm8,%xmm8 +vpand m25(%rip),%xmm5,%xmm5 +vpsrlq $26,%xmm0,%xmm3 +vpaddq %xmm3,%xmm2,%xmm2 +vpand m26(%rip),%xmm0,%xmm0 +vpsrlq $26,%xmm8,%xmm3 +vpaddq %xmm3,%xmm7,%xmm7 +vpand m26(%rip),%xmm8,%xmm8 +vpsrlq $25,%xmm2,%xmm3 +vpaddq %xmm3,%xmm1,%xmm1 +vpand m25(%rip),%xmm2,%xmm2 +vpsrlq $25,%xmm7,%xmm3 +vpsllq $4,%xmm3,%xmm9 +vpaddq %xmm3,%xmm11,%xmm11 +vpsllq $1,%xmm3,%xmm3 +vpaddq %xmm3,%xmm9,%xmm9 +vpaddq %xmm9,%xmm11,%xmm11 +vpand m25(%rip),%xmm7,%xmm7 +vpsrlq $26,%xmm1,%xmm3 +vpaddq %xmm3,%xmm4,%xmm4 +vpand m26(%rip),%xmm1,%xmm1 +vpsrlq $26,%xmm11,%xmm3 +vpaddq %xmm3,%xmm13,%xmm13 +vpand m26(%rip),%xmm11,%xmm11 +vpsrlq $25,%xmm4,%xmm3 +vpaddq %xmm3,%xmm6,%xmm6 +vpand m25(%rip),%xmm4,%xmm4 +vpunpcklqdq %xmm13,%xmm11,%xmm3 +vpunpckhqdq %xmm13,%xmm11,%xmm9 +vpaddq subc0(%rip),%xmm9,%xmm10 +vpsubq %xmm3,%xmm10,%xmm10 +vpaddq %xmm9,%xmm3,%xmm3 +vpunpckhqdq %xmm3,%xmm10,%xmm9 +vpunpcklqdq %xmm3,%xmm10,%xmm10 +vpmuludq %xmm10,%xmm10,%xmm3 +vpaddq %xmm10,%xmm10,%xmm10 +vpmuludq %xmm9,%xmm10,%xmm11 +vpunpcklqdq %xmm2,%xmm0,%xmm12 +vpunpckhqdq %xmm2,%xmm0,%xmm0 +vpaddq subc2(%rip),%xmm0,%xmm2 +vpsubq %xmm12,%xmm2,%xmm2 +vpaddq %xmm0,%xmm12,%xmm12 +vpunpckhqdq %xmm12,%xmm2,%xmm0 +vpunpcklqdq %xmm12,%xmm2,%xmm2 +vpmuludq %xmm2,%xmm10,%xmm12 +vpaddq %xmm9,%xmm9,%xmm13 +vpmuludq %xmm13,%xmm9,%xmm9 +vpaddq %xmm9,%xmm12,%xmm12 +vpmuludq %xmm0,%xmm10,%xmm9 +vpmuludq %xmm2,%xmm13,%xmm14 +vpaddq %xmm14,%xmm9,%xmm9 +vpunpcklqdq %xmm4,%xmm1,%xmm14 +vpunpckhqdq %xmm4,%xmm1,%xmm1 +vpaddq subc2(%rip),%xmm1,%xmm4 +vpsubq %xmm14,%xmm4,%xmm4 +vpaddq %xmm1,%xmm14,%xmm14 +vpunpckhqdq %xmm14,%xmm4,%xmm1 +vpunpcklqdq %xmm14,%xmm4,%xmm4 +vmovdqa %xmm1,0(%rsp) +vpaddq %xmm1,%xmm1,%xmm1 +vmovdqa %xmm1,16(%rsp) +vpmuludq v19_19(%rip),%xmm1,%xmm1 +vmovdqa %xmm1,32(%rsp) +vpmuludq %xmm4,%xmm10,%xmm1 +vpmuludq %xmm2,%xmm2,%xmm14 +vpaddq %xmm14,%xmm1,%xmm1 +vpmuludq 0(%rsp),%xmm10,%xmm14 +vpmuludq %xmm4,%xmm13,%xmm15 +vpaddq %xmm15,%xmm14,%xmm14 +vpunpcklqdq %xmm5,%xmm6,%xmm15 +vpunpckhqdq %xmm5,%xmm6,%xmm5 +vpaddq subc2(%rip),%xmm5,%xmm6 +vpsubq %xmm15,%xmm6,%xmm6 +vpaddq %xmm5,%xmm15,%xmm15 +vpunpckhqdq %xmm15,%xmm6,%xmm5 +vpunpcklqdq %xmm15,%xmm6,%xmm6 +vmovdqa %xmm6,48(%rsp) +vpmuludq v19_19(%rip),%xmm6,%xmm6 +vmovdqa %xmm6,64(%rsp) +vmovdqa %xmm5,80(%rsp) +vpmuludq v38_38(%rip),%xmm5,%xmm5 +vmovdqa %xmm5,96(%rsp) +vpmuludq 48(%rsp),%xmm10,%xmm5 +vpaddq %xmm0,%xmm0,%xmm6 +vpmuludq %xmm6,%xmm0,%xmm0 +vpaddq %xmm0,%xmm5,%xmm5 +vpmuludq 80(%rsp),%xmm10,%xmm0 +vpmuludq %xmm4,%xmm6,%xmm15 +vpaddq %xmm15,%xmm0,%xmm0 +vpmuludq %xmm6,%xmm13,%xmm15 +vpaddq %xmm15,%xmm1,%xmm1 +vpmuludq %xmm6,%xmm2,%xmm15 +vpaddq %xmm15,%xmm14,%xmm14 +vpunpcklqdq %xmm7,%xmm8,%xmm15 +vpunpckhqdq %xmm7,%xmm8,%xmm7 +vpaddq subc2(%rip),%xmm7,%xmm8 +vpsubq %xmm15,%xmm8,%xmm8 +vpaddq %xmm7,%xmm15,%xmm15 +vpunpckhqdq %xmm15,%xmm8,%xmm7 +vpunpcklqdq %xmm15,%xmm8,%xmm8 +vmovdqa %xmm8,112(%rsp) +vpmuludq v19_19(%rip),%xmm8,%xmm8 +vmovdqa %xmm8,448(%rsp) +vpmuludq 112(%rsp),%xmm10,%xmm8 +vpmuludq %xmm7,%xmm10,%xmm10 +vpmuludq v38_38(%rip),%xmm7,%xmm15 +vpmuludq %xmm15,%xmm7,%xmm7 +vpaddq %xmm7,%xmm8,%xmm8 +vpmuludq %xmm15,%xmm13,%xmm7 +vpaddq %xmm7,%xmm3,%xmm3 +vpmuludq %xmm15,%xmm2,%xmm7 +vpaddq %xmm7,%xmm11,%xmm11 +vpmuludq 80(%rsp),%xmm13,%xmm7 +vpaddq %xmm7,%xmm7,%xmm7 +vpaddq %xmm7,%xmm8,%xmm8 +vpmuludq 16(%rsp),%xmm13,%xmm7 +vpaddq %xmm7,%xmm5,%xmm5 +vpmuludq 48(%rsp),%xmm13,%xmm7 +vpaddq %xmm7,%xmm0,%xmm0 +vpmuludq 112(%rsp),%xmm13,%xmm7 +vpaddq %xmm7,%xmm10,%xmm10 +vpmuludq %xmm15,%xmm6,%xmm7 +vpaddq %xmm7,%xmm12,%xmm12 +vpmuludq %xmm15,%xmm4,%xmm7 +vpaddq %xmm7,%xmm9,%xmm9 +vpaddq %xmm2,%xmm2,%xmm2 +vpmuludq %xmm4,%xmm2,%xmm7 +vpaddq %xmm7,%xmm5,%xmm5 +vpmuludq 448(%rsp),%xmm2,%xmm7 +vpaddq %xmm7,%xmm3,%xmm3 +vpmuludq 448(%rsp),%xmm6,%xmm7 +vpaddq %xmm7,%xmm11,%xmm11 +vpmuludq 0(%rsp),%xmm2,%xmm7 +vpaddq %xmm7,%xmm0,%xmm0 +vpmuludq 48(%rsp),%xmm2,%xmm7 +vpaddq %xmm7,%xmm8,%xmm8 +vpmuludq 80(%rsp),%xmm2,%xmm2 +vpaddq %xmm2,%xmm10,%xmm10 +vpmuludq 96(%rsp),%xmm4,%xmm2 +vpaddq %xmm2,%xmm11,%xmm11 +vpmuludq %xmm4,%xmm4,%xmm2 +vpaddq %xmm2,%xmm8,%xmm8 +vpaddq %xmm4,%xmm4,%xmm2 +vpmuludq 448(%rsp),%xmm2,%xmm4 +vpaddq %xmm4,%xmm12,%xmm12 +vpmuludq 16(%rsp),%xmm15,%xmm4 +vpaddq %xmm4,%xmm1,%xmm1 +vpmuludq 48(%rsp),%xmm15,%xmm4 +vpaddq %xmm4,%xmm14,%xmm14 +vpmuludq 96(%rsp),%xmm6,%xmm4 +vpaddq %xmm4,%xmm3,%xmm3 +vmovdqa 16(%rsp),%xmm4 +vpmuludq 448(%rsp),%xmm4,%xmm4 +vpaddq %xmm4,%xmm9,%xmm9 +vpmuludq 16(%rsp),%xmm6,%xmm4 +vpaddq %xmm4,%xmm8,%xmm8 +vpmuludq 48(%rsp),%xmm6,%xmm4 +vpaddq %xmm4,%xmm10,%xmm10 +vpmuludq 80(%rsp),%xmm15,%xmm4 +vpaddq %xmm4,%xmm4,%xmm4 +vpaddq %xmm4,%xmm5,%xmm5 +vpmuludq 112(%rsp),%xmm15,%xmm4 +vpaddq %xmm4,%xmm0,%xmm0 +vmovdqa 48(%rsp),%xmm4 +vpaddq %xmm4,%xmm4,%xmm4 +vpmuludq 448(%rsp),%xmm4,%xmm4 +vpaddq %xmm4,%xmm1,%xmm1 +vmovdqa 80(%rsp),%xmm4 +vpaddq %xmm4,%xmm4,%xmm4 +vpmuludq 448(%rsp),%xmm4,%xmm4 +vpaddq %xmm4,%xmm14,%xmm14 +vpmuludq 64(%rsp),%xmm2,%xmm4 +vpaddq %xmm4,%xmm3,%xmm3 +vmovdqa 16(%rsp),%xmm4 +vpmuludq 64(%rsp),%xmm4,%xmm4 +vpaddq %xmm4,%xmm11,%xmm11 +vmovdqa 16(%rsp),%xmm4 +vpmuludq 96(%rsp),%xmm4,%xmm4 +vpaddq %xmm4,%xmm12,%xmm12 +vmovdqa 48(%rsp),%xmm4 +vpmuludq 96(%rsp),%xmm4,%xmm4 +vpaddq %xmm4,%xmm9,%xmm9 +vpmuludq 0(%rsp),%xmm2,%xmm2 +vpaddq %xmm2,%xmm10,%xmm10 +vmovdqa 32(%rsp),%xmm2 +vpmuludq 0(%rsp),%xmm2,%xmm2 +vpaddq %xmm2,%xmm3,%xmm3 +vmovdqa 64(%rsp),%xmm2 +vpmuludq 48(%rsp),%xmm2,%xmm2 +vpaddq %xmm2,%xmm12,%xmm12 +vmovdqa 96(%rsp),%xmm2 +vpmuludq 80(%rsp),%xmm2,%xmm2 +vpaddq %xmm2,%xmm1,%xmm1 +vmovdqa 448(%rsp),%xmm2 +vpmuludq 112(%rsp),%xmm2,%xmm2 +vpaddq %xmm2,%xmm5,%xmm5 +vpsrlq $26,%xmm3,%xmm2 +vpaddq %xmm2,%xmm11,%xmm11 +vpand m26(%rip),%xmm3,%xmm3 +vpsrlq $25,%xmm14,%xmm2 +vpaddq %xmm2,%xmm5,%xmm5 +vpand m25(%rip),%xmm14,%xmm14 +vpsrlq $25,%xmm11,%xmm2 +vpaddq %xmm2,%xmm12,%xmm12 +vpand m25(%rip),%xmm11,%xmm11 +vpsrlq $26,%xmm5,%xmm2 +vpaddq %xmm2,%xmm0,%xmm0 +vpand m26(%rip),%xmm5,%xmm5 +vpsrlq $26,%xmm12,%xmm2 +vpaddq %xmm2,%xmm9,%xmm9 +vpand m26(%rip),%xmm12,%xmm12 +vpsrlq $25,%xmm0,%xmm2 +vpaddq %xmm2,%xmm8,%xmm8 +vpand m25(%rip),%xmm0,%xmm0 +vpsrlq $25,%xmm9,%xmm2 +vpaddq %xmm2,%xmm1,%xmm1 +vpand m25(%rip),%xmm9,%xmm9 +vpsrlq $26,%xmm8,%xmm2 +vpaddq %xmm2,%xmm10,%xmm10 +vpand m26(%rip),%xmm8,%xmm8 +vpsrlq $26,%xmm1,%xmm2 +vpaddq %xmm2,%xmm14,%xmm14 +vpand m26(%rip),%xmm1,%xmm1 +vpsrlq $25,%xmm10,%xmm2 +vpsllq $4,%xmm2,%xmm4 +vpaddq %xmm2,%xmm3,%xmm3 +vpsllq $1,%xmm2,%xmm2 +vpaddq %xmm2,%xmm4,%xmm4 +vpaddq %xmm4,%xmm3,%xmm3 +vpand m25(%rip),%xmm10,%xmm10 +vpsrlq $25,%xmm14,%xmm2 +vpaddq %xmm2,%xmm5,%xmm5 +vpand m25(%rip),%xmm14,%xmm14 +vpsrlq $26,%xmm3,%xmm2 +vpaddq %xmm2,%xmm11,%xmm11 +vpand m26(%rip),%xmm3,%xmm3 +vpunpckhqdq %xmm11,%xmm3,%xmm2 +vmovdqa %xmm2,0(%rsp) +vpshufd $0,%xmm3,%xmm2 +vpshufd $0,%xmm11,%xmm3 +vpmuludq 160(%rsp),%xmm2,%xmm4 +vpmuludq 432(%rsp),%xmm3,%xmm6 +vpaddq %xmm6,%xmm4,%xmm4 +vpmuludq 176(%rsp),%xmm2,%xmm6 +vpmuludq 304(%rsp),%xmm3,%xmm7 +vpaddq %xmm7,%xmm6,%xmm6 +vpmuludq 208(%rsp),%xmm2,%xmm7 +vpmuludq 336(%rsp),%xmm3,%xmm11 +vpaddq %xmm11,%xmm7,%xmm7 +vpmuludq 240(%rsp),%xmm2,%xmm11 +vpmuludq 368(%rsp),%xmm3,%xmm13 +vpaddq %xmm13,%xmm11,%xmm11 +vpmuludq 272(%rsp),%xmm2,%xmm2 +vpmuludq 400(%rsp),%xmm3,%xmm3 +vpaddq %xmm3,%xmm2,%xmm2 +vpunpckhqdq %xmm9,%xmm12,%xmm3 +vmovdqa %xmm3,16(%rsp) +vpshufd $0,%xmm12,%xmm3 +vpshufd $0,%xmm9,%xmm9 +vpmuludq 288(%rsp),%xmm3,%xmm12 +vpaddq %xmm12,%xmm4,%xmm4 +vpmuludq 416(%rsp),%xmm9,%xmm12 +vpaddq %xmm12,%xmm4,%xmm4 +vpmuludq 160(%rsp),%xmm3,%xmm12 +vpaddq %xmm12,%xmm6,%xmm6 +vpmuludq 432(%rsp),%xmm9,%xmm12 +vpaddq %xmm12,%xmm6,%xmm6 +vpmuludq 176(%rsp),%xmm3,%xmm12 +vpaddq %xmm12,%xmm7,%xmm7 +vpmuludq 304(%rsp),%xmm9,%xmm12 +vpaddq %xmm12,%xmm7,%xmm7 +vpmuludq 208(%rsp),%xmm3,%xmm12 +vpaddq %xmm12,%xmm11,%xmm11 +vpmuludq 336(%rsp),%xmm9,%xmm12 +vpaddq %xmm12,%xmm11,%xmm11 +vpmuludq 240(%rsp),%xmm3,%xmm3 +vpaddq %xmm3,%xmm2,%xmm2 +vpmuludq 368(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm2,%xmm2 +vpunpckhqdq %xmm14,%xmm1,%xmm3 +vmovdqa %xmm3,32(%rsp) +vpshufd $0,%xmm1,%xmm1 +vpshufd $0,%xmm14,%xmm3 +vpmuludq 256(%rsp),%xmm1,%xmm9 +vpaddq %xmm9,%xmm4,%xmm4 +vpmuludq 384(%rsp),%xmm3,%xmm9 +vpaddq %xmm9,%xmm4,%xmm4 +vpmuludq 288(%rsp),%xmm1,%xmm9 +vpaddq %xmm9,%xmm6,%xmm6 +vpmuludq 416(%rsp),%xmm3,%xmm9 +vpaddq %xmm9,%xmm6,%xmm6 +vpmuludq 160(%rsp),%xmm1,%xmm9 +vpaddq %xmm9,%xmm7,%xmm7 +vpmuludq 432(%rsp),%xmm3,%xmm9 +vpaddq %xmm9,%xmm7,%xmm7 +vpmuludq 176(%rsp),%xmm1,%xmm9 +vpaddq %xmm9,%xmm11,%xmm11 +vpmuludq 304(%rsp),%xmm3,%xmm9 +vpaddq %xmm9,%xmm11,%xmm11 +vpmuludq 208(%rsp),%xmm1,%xmm1 +vpaddq %xmm1,%xmm2,%xmm2 +vpmuludq 336(%rsp),%xmm3,%xmm1 +vpaddq %xmm1,%xmm2,%xmm2 +vpunpckhqdq %xmm0,%xmm5,%xmm1 +vmovdqa %xmm1,48(%rsp) +vpshufd $0,%xmm5,%xmm1 +vpshufd $0,%xmm0,%xmm0 +vpmuludq 224(%rsp),%xmm1,%xmm3 +vpaddq %xmm3,%xmm4,%xmm4 +vpmuludq 352(%rsp),%xmm0,%xmm3 +vpaddq %xmm3,%xmm4,%xmm4 +vpmuludq 256(%rsp),%xmm1,%xmm3 +vpaddq %xmm3,%xmm6,%xmm6 +vpmuludq 384(%rsp),%xmm0,%xmm3 +vpaddq %xmm3,%xmm6,%xmm6 +vpmuludq 288(%rsp),%xmm1,%xmm3 +vpaddq %xmm3,%xmm7,%xmm7 +vpmuludq 416(%rsp),%xmm0,%xmm3 +vpaddq %xmm3,%xmm7,%xmm7 +vpmuludq 160(%rsp),%xmm1,%xmm3 +vpaddq %xmm3,%xmm11,%xmm11 +vpmuludq 432(%rsp),%xmm0,%xmm3 +vpaddq %xmm3,%xmm11,%xmm11 +vpmuludq 176(%rsp),%xmm1,%xmm1 +vpaddq %xmm1,%xmm2,%xmm2 +vpmuludq 304(%rsp),%xmm0,%xmm0 +vpaddq %xmm0,%xmm2,%xmm2 +vpunpckhqdq %xmm10,%xmm8,%xmm0 +vmovdqa %xmm0,64(%rsp) +vpshufd $0,%xmm8,%xmm0 +vpshufd $0,%xmm10,%xmm1 +vpmuludq 192(%rsp),%xmm0,%xmm3 +vpaddq %xmm3,%xmm4,%xmm4 +vpmuludq 320(%rsp),%xmm1,%xmm3 +vpaddq %xmm3,%xmm4,%xmm4 +vpmuludq 224(%rsp),%xmm0,%xmm3 +vpaddq %xmm3,%xmm6,%xmm6 +vpmuludq 352(%rsp),%xmm1,%xmm3 +vpaddq %xmm3,%xmm6,%xmm6 +vpmuludq 256(%rsp),%xmm0,%xmm3 +vpaddq %xmm3,%xmm7,%xmm7 +vpmuludq 384(%rsp),%xmm1,%xmm3 +vpaddq %xmm3,%xmm7,%xmm7 +vpmuludq 288(%rsp),%xmm0,%xmm3 +vpaddq %xmm3,%xmm11,%xmm11 +vpmuludq 416(%rsp),%xmm1,%xmm3 +vpaddq %xmm3,%xmm11,%xmm11 +vpmuludq 160(%rsp),%xmm0,%xmm0 +vpaddq %xmm0,%xmm2,%xmm2 +vpmuludq 432(%rsp),%xmm1,%xmm0 +vpaddq %xmm0,%xmm2,%xmm2 +vmovdqa %xmm4,80(%rsp) +vmovdqa %xmm6,96(%rsp) +vmovdqa %xmm7,112(%rsp) +vmovdqa %xmm11,448(%rsp) +vmovdqa %xmm2,496(%rsp) +vmovdqa 144(%rsp),%xmm0 +vpmuludq %xmm0,%xmm0,%xmm1 +vpaddq %xmm0,%xmm0,%xmm0 +vmovdqa 128(%rsp),%xmm2 +vpmuludq %xmm2,%xmm0,%xmm3 +vmovdqa 480(%rsp),%xmm4 +vpmuludq %xmm4,%xmm0,%xmm5 +vmovdqa 464(%rsp),%xmm6 +vpmuludq %xmm6,%xmm0,%xmm7 +vmovdqa 528(%rsp),%xmm8 +vpmuludq %xmm8,%xmm0,%xmm9 +vpmuludq 512(%rsp),%xmm0,%xmm10 +vpmuludq 592(%rsp),%xmm0,%xmm11 +vpmuludq 576(%rsp),%xmm0,%xmm12 +vpmuludq 624(%rsp),%xmm0,%xmm13 +vmovdqa 672(%rsp),%xmm14 +vpmuludq %xmm14,%xmm0,%xmm0 +vpmuludq v38_38(%rip),%xmm14,%xmm15 +vpmuludq %xmm15,%xmm14,%xmm14 +vpaddq %xmm14,%xmm13,%xmm13 +vpaddq %xmm6,%xmm6,%xmm14 +vpmuludq %xmm14,%xmm6,%xmm6 +vpaddq %xmm6,%xmm11,%xmm11 +vpaddq %xmm2,%xmm2,%xmm6 +vpmuludq %xmm6,%xmm2,%xmm2 +vpaddq %xmm2,%xmm5,%xmm5 +vpmuludq %xmm15,%xmm6,%xmm2 +vpaddq %xmm2,%xmm1,%xmm1 +vpmuludq %xmm15,%xmm4,%xmm2 +vpaddq %xmm2,%xmm3,%xmm3 +vpmuludq 544(%rsp),%xmm6,%xmm2 +vpaddq %xmm2,%xmm11,%xmm11 +vpmuludq 592(%rsp),%xmm6,%xmm2 +vpaddq %xmm2,%xmm12,%xmm12 +vpmuludq 640(%rsp),%xmm6,%xmm2 +vpaddq %xmm2,%xmm13,%xmm13 +vpmuludq 624(%rsp),%xmm6,%xmm2 +vpaddq %xmm2,%xmm0,%xmm0 +vpmuludq %xmm4,%xmm6,%xmm2 +vpaddq %xmm2,%xmm7,%xmm7 +vpmuludq %xmm14,%xmm6,%xmm2 +vpaddq %xmm2,%xmm9,%xmm9 +vpmuludq %xmm8,%xmm6,%xmm2 +vpaddq %xmm2,%xmm10,%xmm10 +vpmuludq %xmm15,%xmm14,%xmm2 +vpaddq %xmm2,%xmm5,%xmm5 +vpmuludq %xmm15,%xmm8,%xmm2 +vpaddq %xmm2,%xmm7,%xmm7 +vpmuludq %xmm4,%xmm4,%xmm2 +vpaddq %xmm2,%xmm9,%xmm9 +vpmuludq %xmm14,%xmm4,%xmm2 +vpaddq %xmm2,%xmm10,%xmm10 +vpaddq %xmm4,%xmm4,%xmm2 +vpmuludq %xmm8,%xmm2,%xmm4 +vpaddq %xmm4,%xmm11,%xmm11 +vpmuludq 688(%rsp),%xmm2,%xmm4 +vpaddq %xmm4,%xmm1,%xmm1 +vpmuludq 688(%rsp),%xmm14,%xmm4 +vpaddq %xmm4,%xmm3,%xmm3 +vpmuludq 512(%rsp),%xmm2,%xmm4 +vpaddq %xmm4,%xmm12,%xmm12 +vpmuludq 592(%rsp),%xmm2,%xmm4 +vpaddq %xmm4,%xmm13,%xmm13 +vpmuludq 576(%rsp),%xmm2,%xmm2 +vpaddq %xmm2,%xmm0,%xmm0 +vpmuludq 656(%rsp),%xmm8,%xmm2 +vpaddq %xmm2,%xmm3,%xmm3 +vpmuludq %xmm8,%xmm14,%xmm2 +vpaddq %xmm2,%xmm12,%xmm12 +vpmuludq %xmm8,%xmm8,%xmm2 +vpaddq %xmm2,%xmm13,%xmm13 +vpaddq %xmm8,%xmm8,%xmm2 +vpmuludq 688(%rsp),%xmm2,%xmm4 +vpaddq %xmm4,%xmm5,%xmm5 +vpmuludq 544(%rsp),%xmm15,%xmm4 +vpaddq %xmm4,%xmm9,%xmm9 +vpmuludq 592(%rsp),%xmm15,%xmm4 +vpaddq %xmm4,%xmm10,%xmm10 +vpmuludq 656(%rsp),%xmm14,%xmm4 +vpaddq %xmm4,%xmm1,%xmm1 +vmovdqa 544(%rsp),%xmm4 +vpmuludq 688(%rsp),%xmm4,%xmm4 +vpaddq %xmm4,%xmm7,%xmm7 +vpmuludq 544(%rsp),%xmm14,%xmm4 +vpaddq %xmm4,%xmm13,%xmm13 +vpmuludq 592(%rsp),%xmm14,%xmm4 +vpaddq %xmm4,%xmm0,%xmm0 +vpmuludq 640(%rsp),%xmm15,%xmm4 +vpaddq %xmm4,%xmm11,%xmm11 +vpmuludq 624(%rsp),%xmm15,%xmm4 +vpaddq %xmm4,%xmm12,%xmm12 +vmovdqa 592(%rsp),%xmm4 +vpaddq %xmm4,%xmm4,%xmm4 +vpmuludq 688(%rsp),%xmm4,%xmm4 +vpaddq %xmm4,%xmm9,%xmm9 +vpmuludq 608(%rsp),%xmm2,%xmm4 +vpaddq %xmm4,%xmm1,%xmm1 +vmovdqa 544(%rsp),%xmm4 +vpmuludq 608(%rsp),%xmm4,%xmm4 +vpaddq %xmm4,%xmm3,%xmm3 +vmovdqa 544(%rsp),%xmm4 +vpmuludq 656(%rsp),%xmm4,%xmm4 +vpaddq %xmm4,%xmm5,%xmm5 +vmovdqa 592(%rsp),%xmm4 +vpmuludq 656(%rsp),%xmm4,%xmm4 +vpaddq %xmm4,%xmm7,%xmm7 +vmovdqa 640(%rsp),%xmm4 +vpmuludq 688(%rsp),%xmm4,%xmm4 +vpaddq %xmm4,%xmm10,%xmm10 +vpmuludq 512(%rsp),%xmm2,%xmm2 +vpaddq %xmm2,%xmm0,%xmm0 +vmovdqa 560(%rsp),%xmm2 +vpmuludq 512(%rsp),%xmm2,%xmm2 +vpaddq %xmm2,%xmm1,%xmm1 +vmovdqa 608(%rsp),%xmm2 +vpmuludq 592(%rsp),%xmm2,%xmm2 +vpaddq %xmm2,%xmm5,%xmm5 +vmovdqa 656(%rsp),%xmm2 +vpmuludq 576(%rsp),%xmm2,%xmm2 +vpaddq %xmm2,%xmm9,%xmm9 +vmovdqa 688(%rsp),%xmm2 +vpmuludq 624(%rsp),%xmm2,%xmm2 +vpaddq %xmm2,%xmm11,%xmm11 +vpsrlq $26,%xmm1,%xmm2 +vpaddq %xmm2,%xmm3,%xmm3 +vpand m26(%rip),%xmm1,%xmm1 +vpsrlq $25,%xmm10,%xmm2 +vpaddq %xmm2,%xmm11,%xmm11 +vpand m25(%rip),%xmm10,%xmm10 +vpsrlq $25,%xmm3,%xmm2 +vpaddq %xmm2,%xmm5,%xmm5 +vpand m25(%rip),%xmm3,%xmm3 +vpsrlq $26,%xmm11,%xmm2 +vpaddq %xmm2,%xmm12,%xmm12 +vpand m26(%rip),%xmm11,%xmm11 +vpsrlq $26,%xmm5,%xmm2 +vpaddq %xmm2,%xmm7,%xmm7 +vpand m26(%rip),%xmm5,%xmm5 +vpsrlq $25,%xmm12,%xmm2 +vpaddq %xmm2,%xmm13,%xmm13 +vpand m25(%rip),%xmm12,%xmm12 +vpsrlq $25,%xmm7,%xmm2 +vpaddq %xmm2,%xmm9,%xmm9 +vpand m25(%rip),%xmm7,%xmm7 +vpsrlq $26,%xmm13,%xmm2 +vpaddq %xmm2,%xmm0,%xmm0 +vpand m26(%rip),%xmm13,%xmm13 +vpsrlq $26,%xmm9,%xmm2 +vpaddq %xmm2,%xmm10,%xmm10 +vpand m26(%rip),%xmm9,%xmm9 +vpsrlq $25,%xmm0,%xmm2 +vpsllq $4,%xmm2,%xmm4 +vpaddq %xmm2,%xmm1,%xmm1 +vpsllq $1,%xmm2,%xmm2 +vpaddq %xmm2,%xmm4,%xmm4 +vpaddq %xmm4,%xmm1,%xmm1 +vpand m25(%rip),%xmm0,%xmm0 +vpsrlq $25,%xmm10,%xmm2 +vpaddq %xmm2,%xmm11,%xmm11 +vpand m25(%rip),%xmm10,%xmm10 +vpsrlq $26,%xmm1,%xmm2 +vpaddq %xmm2,%xmm3,%xmm3 +vpand m26(%rip),%xmm1,%xmm1 +vpunpckhqdq %xmm3,%xmm1,%xmm2 +vpunpcklqdq %xmm3,%xmm1,%xmm1 +vmovdqa %xmm1,464(%rsp) +vpaddq subc0(%rip),%xmm2,%xmm3 +vpsubq %xmm1,%xmm3,%xmm3 +vpunpckhqdq %xmm3,%xmm2,%xmm1 +vpunpcklqdq %xmm3,%xmm2,%xmm2 +vmovdqa %xmm2,480(%rsp) +vmovdqa %xmm1,512(%rsp) +vpsllq $1,%xmm1,%xmm1 +vmovdqa %xmm1,528(%rsp) +vpmuludq v121666_121666(%rip),%xmm3,%xmm3 +vmovdqa 80(%rsp),%xmm1 +vpunpcklqdq %xmm1,%xmm3,%xmm2 +vpunpckhqdq %xmm1,%xmm3,%xmm1 +vpunpckhqdq %xmm7,%xmm5,%xmm3 +vpunpcklqdq %xmm7,%xmm5,%xmm4 +vmovdqa %xmm4,544(%rsp) +vpaddq subc2(%rip),%xmm3,%xmm5 +vpsubq %xmm4,%xmm5,%xmm5 +vpunpckhqdq %xmm5,%xmm3,%xmm4 +vpunpcklqdq %xmm5,%xmm3,%xmm3 +vmovdqa %xmm3,560(%rsp) +vmovdqa %xmm4,576(%rsp) +vpsllq $1,%xmm4,%xmm4 +vmovdqa %xmm4,592(%rsp) +vpmuludq v121666_121666(%rip),%xmm5,%xmm5 +vmovdqa 96(%rsp),%xmm3 +vpunpcklqdq %xmm3,%xmm5,%xmm4 +vpunpckhqdq %xmm3,%xmm5,%xmm3 +vpunpckhqdq %xmm10,%xmm9,%xmm5 +vpunpcklqdq %xmm10,%xmm9,%xmm6 +vmovdqa %xmm6,608(%rsp) +vpaddq subc2(%rip),%xmm5,%xmm7 +vpsubq %xmm6,%xmm7,%xmm7 +vpunpckhqdq %xmm7,%xmm5,%xmm6 +vpunpcklqdq %xmm7,%xmm5,%xmm5 +vmovdqa %xmm5,624(%rsp) +vmovdqa %xmm6,640(%rsp) +vpsllq $1,%xmm6,%xmm6 +vmovdqa %xmm6,656(%rsp) +vpmuludq v121666_121666(%rip),%xmm7,%xmm7 +vmovdqa 112(%rsp),%xmm5 +vpunpcklqdq %xmm5,%xmm7,%xmm6 +vpunpckhqdq %xmm5,%xmm7,%xmm5 +vpunpckhqdq %xmm12,%xmm11,%xmm7 +vpunpcklqdq %xmm12,%xmm11,%xmm8 +vmovdqa %xmm8,672(%rsp) +vpaddq subc2(%rip),%xmm7,%xmm9 +vpsubq %xmm8,%xmm9,%xmm9 +vpunpckhqdq %xmm9,%xmm7,%xmm8 +vpunpcklqdq %xmm9,%xmm7,%xmm7 +vmovdqa %xmm7,688(%rsp) +vmovdqa %xmm8,704(%rsp) +vpsllq $1,%xmm8,%xmm8 +vmovdqa %xmm8,720(%rsp) +vpmuludq v121666_121666(%rip),%xmm9,%xmm9 +vmovdqa 448(%rsp),%xmm7 +vpunpcklqdq %xmm7,%xmm9,%xmm8 +vpunpckhqdq %xmm7,%xmm9,%xmm7 +vpunpckhqdq %xmm0,%xmm13,%xmm9 +vpunpcklqdq %xmm0,%xmm13,%xmm0 +vmovdqa %xmm0,448(%rsp) +vpaddq subc2(%rip),%xmm9,%xmm10 +vpsubq %xmm0,%xmm10,%xmm10 +vpunpckhqdq %xmm10,%xmm9,%xmm0 +vpunpcklqdq %xmm10,%xmm9,%xmm9 +vmovdqa %xmm9,736(%rsp) +vmovdqa %xmm0,752(%rsp) +vpsllq $1,%xmm0,%xmm0 +vmovdqa %xmm0,768(%rsp) +vpmuludq v121666_121666(%rip),%xmm10,%xmm10 +vmovdqa 496(%rsp),%xmm0 +vpunpcklqdq %xmm0,%xmm10,%xmm9 +vpunpckhqdq %xmm0,%xmm10,%xmm0 +vpsrlq $26,%xmm2,%xmm10 +vpaddq %xmm10,%xmm1,%xmm1 +vpand m26(%rip),%xmm2,%xmm2 +vpsrlq $25,%xmm5,%xmm10 +vpaddq %xmm10,%xmm8,%xmm8 +vpand m25(%rip),%xmm5,%xmm5 +vpsrlq $25,%xmm1,%xmm10 +vpaddq %xmm10,%xmm4,%xmm4 +vpand m25(%rip),%xmm1,%xmm1 +vpsrlq $26,%xmm8,%xmm10 +vpaddq %xmm10,%xmm7,%xmm7 +vpand m26(%rip),%xmm8,%xmm8 +vpsrlq $26,%xmm4,%xmm10 +vpaddq %xmm10,%xmm3,%xmm3 +vpand m26(%rip),%xmm4,%xmm4 +vpsrlq $25,%xmm7,%xmm10 +vpaddq %xmm10,%xmm9,%xmm9 +vpand m25(%rip),%xmm7,%xmm7 +vpsrlq $25,%xmm3,%xmm10 +vpaddq %xmm10,%xmm6,%xmm6 +vpand m25(%rip),%xmm3,%xmm3 +vpsrlq $26,%xmm9,%xmm10 +vpaddq %xmm10,%xmm0,%xmm0 +vpand m26(%rip),%xmm9,%xmm9 +vpsrlq $26,%xmm6,%xmm10 +vpaddq %xmm10,%xmm5,%xmm5 +vpand m26(%rip),%xmm6,%xmm6 +vpsrlq $25,%xmm0,%xmm10 +vpsllq $4,%xmm10,%xmm11 +vpaddq %xmm10,%xmm2,%xmm2 +vpsllq $1,%xmm10,%xmm10 +vpaddq %xmm10,%xmm11,%xmm11 +vpaddq %xmm11,%xmm2,%xmm2 +vpand m25(%rip),%xmm0,%xmm0 +vpsrlq $25,%xmm5,%xmm10 +vpaddq %xmm10,%xmm8,%xmm8 +vpand m25(%rip),%xmm5,%xmm5 +vpsrlq $26,%xmm2,%xmm10 +vpaddq %xmm10,%xmm1,%xmm1 +vpand m26(%rip),%xmm2,%xmm2 +vpunpckhqdq %xmm1,%xmm2,%xmm10 +vmovdqa %xmm10,80(%rsp) +vpunpcklqdq %xmm1,%xmm2,%xmm1 +vpunpckhqdq %xmm3,%xmm4,%xmm2 +vmovdqa %xmm2,96(%rsp) +vpunpcklqdq %xmm3,%xmm4,%xmm2 +vpunpckhqdq %xmm5,%xmm6,%xmm3 +vmovdqa %xmm3,112(%rsp) +vpunpcklqdq %xmm5,%xmm6,%xmm3 +vpunpckhqdq %xmm7,%xmm8,%xmm4 +vmovdqa %xmm4,128(%rsp) +vpunpcklqdq %xmm7,%xmm8,%xmm4 +vpunpckhqdq %xmm0,%xmm9,%xmm5 +vmovdqa %xmm5,144(%rsp) +vpunpcklqdq %xmm0,%xmm9,%xmm0 +vmovdqa 464(%rsp),%xmm5 +vpaddq %xmm5,%xmm1,%xmm1 +vpunpcklqdq %xmm1,%xmm5,%xmm6 +vpunpckhqdq %xmm1,%xmm5,%xmm1 +vpmuludq 512(%rsp),%xmm6,%xmm5 +vpmuludq 480(%rsp),%xmm1,%xmm7 +vpaddq %xmm7,%xmm5,%xmm5 +vpmuludq 560(%rsp),%xmm6,%xmm7 +vpmuludq 528(%rsp),%xmm1,%xmm8 +vpaddq %xmm8,%xmm7,%xmm7 +vpmuludq 576(%rsp),%xmm6,%xmm8 +vpmuludq 560(%rsp),%xmm1,%xmm9 +vpaddq %xmm9,%xmm8,%xmm8 +vpmuludq 624(%rsp),%xmm6,%xmm9 +vpmuludq 592(%rsp),%xmm1,%xmm10 +vpaddq %xmm10,%xmm9,%xmm9 +vpmuludq 640(%rsp),%xmm6,%xmm10 +vpmuludq 624(%rsp),%xmm1,%xmm11 +vpaddq %xmm11,%xmm10,%xmm10 +vpmuludq 688(%rsp),%xmm6,%xmm11 +vpmuludq 656(%rsp),%xmm1,%xmm12 +vpaddq %xmm12,%xmm11,%xmm11 +vpmuludq 704(%rsp),%xmm6,%xmm12 +vpmuludq 688(%rsp),%xmm1,%xmm13 +vpaddq %xmm13,%xmm12,%xmm12 +vpmuludq 736(%rsp),%xmm6,%xmm13 +vpmuludq 720(%rsp),%xmm1,%xmm14 +vpaddq %xmm14,%xmm13,%xmm13 +vpmuludq 752(%rsp),%xmm6,%xmm14 +vpmuludq 736(%rsp),%xmm1,%xmm15 +vpaddq %xmm15,%xmm14,%xmm14 +vpmuludq 480(%rsp),%xmm6,%xmm6 +vpmuludq v19_19(%rip),%xmm1,%xmm1 +vpmuludq 768(%rsp),%xmm1,%xmm1 +vpaddq %xmm1,%xmm6,%xmm6 +vmovdqa 544(%rsp),%xmm1 +vpaddq %xmm1,%xmm2,%xmm2 +vpunpcklqdq %xmm2,%xmm1,%xmm15 +vpunpckhqdq %xmm2,%xmm1,%xmm1 +vpmuludq 480(%rsp),%xmm15,%xmm2 +vpaddq %xmm2,%xmm7,%xmm7 +vpmuludq 512(%rsp),%xmm15,%xmm2 +vpaddq %xmm2,%xmm8,%xmm8 +vpmuludq 560(%rsp),%xmm15,%xmm2 +vpaddq %xmm2,%xmm9,%xmm9 +vpmuludq 576(%rsp),%xmm15,%xmm2 +vpaddq %xmm2,%xmm10,%xmm10 +vpmuludq 624(%rsp),%xmm15,%xmm2 +vpaddq %xmm2,%xmm11,%xmm11 +vpmuludq 640(%rsp),%xmm15,%xmm2 +vpaddq %xmm2,%xmm12,%xmm12 +vpmuludq 688(%rsp),%xmm15,%xmm2 +vpaddq %xmm2,%xmm13,%xmm13 +vpmuludq 704(%rsp),%xmm15,%xmm2 +vpaddq %xmm2,%xmm14,%xmm14 +vpmuludq v19_19(%rip),%xmm15,%xmm15 +vpmuludq 736(%rsp),%xmm15,%xmm2 +vpaddq %xmm2,%xmm6,%xmm6 +vpmuludq 752(%rsp),%xmm15,%xmm15 +vpaddq %xmm15,%xmm5,%xmm5 +vpmuludq 480(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm8,%xmm8 +vpmuludq 528(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm9,%xmm9 +vpmuludq 560(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm10,%xmm10 +vpmuludq 592(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm11,%xmm11 +vpmuludq 624(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm12,%xmm12 +vpmuludq 656(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm13,%xmm13 +vpmuludq 688(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm14,%xmm14 +vpmuludq v19_19(%rip),%xmm1,%xmm1 +vpmuludq 720(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm6,%xmm6 +vpmuludq 736(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm5,%xmm5 +vpmuludq 768(%rsp),%xmm1,%xmm1 +vpaddq %xmm1,%xmm7,%xmm7 +vmovdqa 608(%rsp),%xmm1 +vpaddq %xmm1,%xmm3,%xmm3 +vpunpcklqdq %xmm3,%xmm1,%xmm2 +vpunpckhqdq %xmm3,%xmm1,%xmm1 +vpmuludq 480(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm9,%xmm9 +vpmuludq 512(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm10,%xmm10 +vpmuludq 560(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm11,%xmm11 +vpmuludq 576(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm12,%xmm12 +vpmuludq 624(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm13,%xmm13 +vpmuludq 640(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm14,%xmm14 +vpmuludq v19_19(%rip),%xmm2,%xmm2 +vpmuludq 688(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm6,%xmm6 +vpmuludq 704(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm5,%xmm5 +vpmuludq 736(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm7,%xmm7 +vpmuludq 752(%rsp),%xmm2,%xmm2 +vpaddq %xmm2,%xmm8,%xmm8 +vpmuludq 480(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm10,%xmm10 +vpmuludq 528(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm11,%xmm11 +vpmuludq 560(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm12,%xmm12 +vpmuludq 592(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm13,%xmm13 +vpmuludq 624(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm14,%xmm14 +vpmuludq v19_19(%rip),%xmm1,%xmm1 +vpmuludq 656(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm6,%xmm6 +vpmuludq 688(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm5,%xmm5 +vpmuludq 720(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm7,%xmm7 +vpmuludq 736(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm8,%xmm8 +vpmuludq 768(%rsp),%xmm1,%xmm1 +vpaddq %xmm1,%xmm9,%xmm9 +vmovdqa 672(%rsp),%xmm1 +vpaddq %xmm1,%xmm4,%xmm4 +vpunpcklqdq %xmm4,%xmm1,%xmm2 +vpunpckhqdq %xmm4,%xmm1,%xmm1 +vpmuludq 480(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm11,%xmm11 +vpmuludq 512(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm12,%xmm12 +vpmuludq 560(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm13,%xmm13 +vpmuludq 576(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm14,%xmm14 +vpmuludq v19_19(%rip),%xmm2,%xmm2 +vpmuludq 624(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm6,%xmm6 +vpmuludq 640(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm5,%xmm5 +vpmuludq 688(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm7,%xmm7 +vpmuludq 704(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm8,%xmm8 +vpmuludq 736(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm9,%xmm9 +vpmuludq 752(%rsp),%xmm2,%xmm2 +vpaddq %xmm2,%xmm10,%xmm10 +vpmuludq 480(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm12,%xmm12 +vpmuludq 528(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm13,%xmm13 +vpmuludq 560(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm14,%xmm14 +vpmuludq v19_19(%rip),%xmm1,%xmm1 +vpmuludq 592(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm6,%xmm6 +vpmuludq 624(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm5,%xmm5 +vpmuludq 656(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm7,%xmm7 +vpmuludq 688(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm8,%xmm8 +vpmuludq 720(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm9,%xmm9 +vpmuludq 736(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm10,%xmm10 +vpmuludq 768(%rsp),%xmm1,%xmm1 +vpaddq %xmm1,%xmm11,%xmm11 +vmovdqa 448(%rsp),%xmm1 +vpaddq %xmm1,%xmm0,%xmm0 +vpunpcklqdq %xmm0,%xmm1,%xmm2 +vpunpckhqdq %xmm0,%xmm1,%xmm0 +vpmuludq 480(%rsp),%xmm2,%xmm1 +vpaddq %xmm1,%xmm13,%xmm13 +vpmuludq 512(%rsp),%xmm2,%xmm1 +vpaddq %xmm1,%xmm14,%xmm14 +vpmuludq v19_19(%rip),%xmm2,%xmm2 +vpmuludq 560(%rsp),%xmm2,%xmm1 +vpaddq %xmm1,%xmm6,%xmm6 +vpmuludq 576(%rsp),%xmm2,%xmm1 +vpaddq %xmm1,%xmm5,%xmm5 +vpmuludq 624(%rsp),%xmm2,%xmm1 +vpaddq %xmm1,%xmm7,%xmm7 +vpmuludq 640(%rsp),%xmm2,%xmm1 +vpaddq %xmm1,%xmm8,%xmm8 +vpmuludq 688(%rsp),%xmm2,%xmm1 +vpaddq %xmm1,%xmm9,%xmm9 +vpmuludq 704(%rsp),%xmm2,%xmm1 +vpaddq %xmm1,%xmm10,%xmm10 +vpmuludq 736(%rsp),%xmm2,%xmm1 +vpaddq %xmm1,%xmm11,%xmm11 +vpmuludq 752(%rsp),%xmm2,%xmm2 +vpaddq %xmm2,%xmm12,%xmm12 +vpmuludq 480(%rsp),%xmm0,%xmm1 +vpaddq %xmm1,%xmm14,%xmm14 +vpmuludq v19_19(%rip),%xmm0,%xmm0 +vpmuludq 528(%rsp),%xmm0,%xmm1 +vpaddq %xmm1,%xmm6,%xmm6 +vpmuludq 560(%rsp),%xmm0,%xmm1 +vpaddq %xmm1,%xmm5,%xmm5 +vpmuludq 592(%rsp),%xmm0,%xmm1 +vpaddq %xmm1,%xmm7,%xmm7 +vpmuludq 624(%rsp),%xmm0,%xmm1 +vpaddq %xmm1,%xmm8,%xmm8 +vpmuludq 656(%rsp),%xmm0,%xmm1 +vpaddq %xmm1,%xmm9,%xmm9 +vpmuludq 688(%rsp),%xmm0,%xmm1 +vpaddq %xmm1,%xmm10,%xmm10 +vpmuludq 720(%rsp),%xmm0,%xmm1 +vpaddq %xmm1,%xmm11,%xmm11 +vpmuludq 736(%rsp),%xmm0,%xmm1 +vpaddq %xmm1,%xmm12,%xmm12 +vpmuludq 768(%rsp),%xmm0,%xmm0 +vpaddq %xmm0,%xmm13,%xmm13 +vpsrlq $26,%xmm6,%xmm0 +vpaddq %xmm0,%xmm5,%xmm5 +vpand m26(%rip),%xmm6,%xmm6 +vpsrlq $25,%xmm10,%xmm0 +vpaddq %xmm0,%xmm11,%xmm11 +vpand m25(%rip),%xmm10,%xmm10 +vpsrlq $25,%xmm5,%xmm0 +vpaddq %xmm0,%xmm7,%xmm7 +vpand m25(%rip),%xmm5,%xmm5 +vpsrlq $26,%xmm11,%xmm0 +vpaddq %xmm0,%xmm12,%xmm12 +vpand m26(%rip),%xmm11,%xmm11 +vpsrlq $26,%xmm7,%xmm0 +vpaddq %xmm0,%xmm8,%xmm8 +vpand m26(%rip),%xmm7,%xmm7 +vpsrlq $25,%xmm12,%xmm0 +vpaddq %xmm0,%xmm13,%xmm13 +vpand m25(%rip),%xmm12,%xmm12 +vpsrlq $25,%xmm8,%xmm0 +vpaddq %xmm0,%xmm9,%xmm9 +vpand m25(%rip),%xmm8,%xmm8 +vpsrlq $26,%xmm13,%xmm0 +vpaddq %xmm0,%xmm14,%xmm14 +vpand m26(%rip),%xmm13,%xmm13 +vpsrlq $26,%xmm9,%xmm0 +vpaddq %xmm0,%xmm10,%xmm10 +vpand m26(%rip),%xmm9,%xmm9 +vpsrlq $25,%xmm14,%xmm0 +vpsllq $4,%xmm0,%xmm1 +vpaddq %xmm0,%xmm6,%xmm6 +vpsllq $1,%xmm0,%xmm0 +vpaddq %xmm0,%xmm1,%xmm1 +vpaddq %xmm1,%xmm6,%xmm6 +vpand m25(%rip),%xmm14,%xmm14 +vpsrlq $25,%xmm10,%xmm0 +vpaddq %xmm0,%xmm11,%xmm11 +vpand m25(%rip),%xmm10,%xmm10 +vpsrlq $26,%xmm6,%xmm0 +vpaddq %xmm0,%xmm5,%xmm5 +vpand m26(%rip),%xmm6,%xmm6 +vpunpckhqdq %xmm5,%xmm6,%xmm1 +vpunpcklqdq %xmm5,%xmm6,%xmm0 +vpunpckhqdq %xmm8,%xmm7,%xmm3 +vpunpcklqdq %xmm8,%xmm7,%xmm2 +vpunpckhqdq %xmm10,%xmm9,%xmm5 +vpunpcklqdq %xmm10,%xmm9,%xmm4 +vpunpckhqdq %xmm12,%xmm11,%xmm7 +vpunpcklqdq %xmm12,%xmm11,%xmm6 +vpunpckhqdq %xmm14,%xmm13,%xmm9 +vpunpcklqdq %xmm14,%xmm13,%xmm8 +cmp $0,%rdx +jne ._ladder_loop +vmovdqu %xmm1,160(%rdi) +vmovdqu %xmm0,80(%rdi) +vmovdqu %xmm3,176(%rdi) +vmovdqu %xmm2,96(%rdi) +vmovdqu %xmm5,192(%rdi) +vmovdqu %xmm4,112(%rdi) +vmovdqu %xmm7,208(%rdi) +vmovdqu %xmm6,128(%rdi) +vmovdqu %xmm9,224(%rdi) +vmovdqu %xmm8,144(%rdi) +movq 1824(%rsp),%r11 +movq 1832(%rsp),%r12 +movq 1840(%rsp),%r13 +movq 1848(%rsp),%r14 +add %r11,%rsp +ret + +#endif diff --git a/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/ladder.h b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/ladder.h new file mode 100644 index 0000000000..ccf4ecaecd --- /dev/null +++ b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/ladder.h @@ -0,0 +1,18 @@ +#ifndef ladder_H +#define ladder_H + +#ifdef __cplusplus +extern "C" { +#endif + +#include "fe.h" +#include "ladder_namespace.h" + +extern void ladder(fe *, const unsigned char *); + +#ifdef __cplusplus +} +#endif + +#endif /* ifndef ladder_H */ + diff --git a/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/ladder_base.S b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/ladder_base.S new file mode 100644 index 0000000000..f290d2565e --- /dev/null +++ b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/ladder_base.S @@ -0,0 +1,1295 @@ +#ifdef IN_SANDY2X + +#include "ladder_base_namespace.h" +#include "consts_namespace.h" +.p2align 5 + +#ifdef ASM_HIDE_SYMBOL +ASM_HIDE_SYMBOL ladder_base +ASM_HIDE_SYMBOL _ladder_base +#endif +.globl ladder_base +.globl _ladder_base +#ifdef __ELF__ +.type ladder_base, @function +.type _ladder_base, @function +#endif +ladder_base: +_ladder_base: + +mov %rsp,%r11 +and $31,%r11 +add $1568,%r11 +sub %r11,%rsp +movq %r11,1536(%rsp) +movq %r12,1544(%rsp) +movq %r13,1552(%rsp) +vmovdqa v0_0(%rip),%xmm0 +vmovdqa v1_0(%rip),%xmm1 +vmovdqa v9_0(%rip),%xmm2 +vmovdqa %xmm2,0(%rsp) +vmovdqa %xmm0,16(%rsp) +vmovdqa %xmm0,32(%rsp) +vmovdqa %xmm0,48(%rsp) +vmovdqa %xmm0,64(%rsp) +vmovdqa %xmm1,80(%rsp) +vmovdqa %xmm0,96(%rsp) +vmovdqa %xmm0,112(%rsp) +vmovdqa %xmm0,128(%rsp) +vmovdqa %xmm0,144(%rsp) +vmovdqa %xmm1,%xmm0 +vpxor %xmm1,%xmm1,%xmm1 +vpxor %xmm2,%xmm2,%xmm2 +vpxor %xmm3,%xmm3,%xmm3 +vpxor %xmm4,%xmm4,%xmm4 +vpxor %xmm5,%xmm5,%xmm5 +vpxor %xmm6,%xmm6,%xmm6 +vpxor %xmm7,%xmm7,%xmm7 +vpxor %xmm8,%xmm8,%xmm8 +vpxor %xmm9,%xmm9,%xmm9 +movq 0(%rsi),%rdx +movq 8(%rsi),%rcx +movq 16(%rsi),%r8 +movq 24(%rsi),%r9 +shrd $1,%rcx,%rdx +shrd $1,%r8,%rcx +shrd $1,%r9,%r8 +shr $1,%r9 +xorq 0(%rsi),%rdx +xorq 8(%rsi),%rcx +xorq 16(%rsi),%r8 +xorq 24(%rsi),%r9 +leaq 512(%rsp),%rsi +mov $64,%rax + +.p2align 4 +._ladder_base_small_loop: +mov %rdx,%r10 +mov %rcx,%r11 +mov %r8,%r12 +mov %r9,%r13 +shr $1,%rdx +shr $1,%rcx +shr $1,%r8 +shr $1,%r9 +and $1,%r10d +and $1,%r11d +and $1,%r12d +and $1,%r13d +neg %r10 +neg %r11 +neg %r12 +neg %r13 +movl %r10d,0(%rsi) +movl %r11d,256(%rsi) +movl %r12d,512(%rsi) +movl %r13d,768(%rsi) +add $4,%rsi +sub $1,%rax +jne ._ladder_base_small_loop +mov $255,%rdx +add $760,%rsi + +.p2align 4 +._ladder_base_loop: +sub $1,%rdx +vbroadcastss 0(%rsi),%xmm10 +sub $4,%rsi +vmovdqa 0(%rsp),%xmm11 +vmovdqa 80(%rsp),%xmm12 +vpxor %xmm11,%xmm0,%xmm13 +vpand %xmm10,%xmm13,%xmm13 +vpxor %xmm13,%xmm0,%xmm0 +vpxor %xmm13,%xmm11,%xmm11 +vpxor %xmm12,%xmm1,%xmm13 +vpand %xmm10,%xmm13,%xmm13 +vpxor %xmm13,%xmm1,%xmm1 +vpxor %xmm13,%xmm12,%xmm12 +vmovdqa 16(%rsp),%xmm13 +vmovdqa 96(%rsp),%xmm14 +vpxor %xmm13,%xmm2,%xmm15 +vpand %xmm10,%xmm15,%xmm15 +vpxor %xmm15,%xmm2,%xmm2 +vpxor %xmm15,%xmm13,%xmm13 +vpxor %xmm14,%xmm3,%xmm15 +vpand %xmm10,%xmm15,%xmm15 +vpxor %xmm15,%xmm3,%xmm3 +vpxor %xmm15,%xmm14,%xmm14 +vmovdqa %xmm13,0(%rsp) +vmovdqa %xmm14,16(%rsp) +vmovdqa 32(%rsp),%xmm13 +vmovdqa 112(%rsp),%xmm14 +vpxor %xmm13,%xmm4,%xmm15 +vpand %xmm10,%xmm15,%xmm15 +vpxor %xmm15,%xmm4,%xmm4 +vpxor %xmm15,%xmm13,%xmm13 +vpxor %xmm14,%xmm5,%xmm15 +vpand %xmm10,%xmm15,%xmm15 +vpxor %xmm15,%xmm5,%xmm5 +vpxor %xmm15,%xmm14,%xmm14 +vmovdqa %xmm13,32(%rsp) +vmovdqa %xmm14,80(%rsp) +vmovdqa 48(%rsp),%xmm13 +vmovdqa 128(%rsp),%xmm14 +vpxor %xmm13,%xmm6,%xmm15 +vpand %xmm10,%xmm15,%xmm15 +vpxor %xmm15,%xmm6,%xmm6 +vpxor %xmm15,%xmm13,%xmm13 +vpxor %xmm14,%xmm7,%xmm15 +vpand %xmm10,%xmm15,%xmm15 +vpxor %xmm15,%xmm7,%xmm7 +vpxor %xmm15,%xmm14,%xmm14 +vmovdqa %xmm13,48(%rsp) +vmovdqa %xmm14,96(%rsp) +vmovdqa 64(%rsp),%xmm13 +vmovdqa 144(%rsp),%xmm14 +vpxor %xmm13,%xmm8,%xmm15 +vpand %xmm10,%xmm15,%xmm15 +vpxor %xmm15,%xmm8,%xmm8 +vpxor %xmm15,%xmm13,%xmm13 +vpxor %xmm14,%xmm9,%xmm15 +vpand %xmm10,%xmm15,%xmm15 +vpxor %xmm15,%xmm9,%xmm9 +vpxor %xmm15,%xmm14,%xmm14 +vmovdqa %xmm13,64(%rsp) +vmovdqa %xmm14,112(%rsp) +vpaddq subc0(%rip),%xmm11,%xmm10 +vpsubq %xmm12,%xmm10,%xmm10 +vpaddq %xmm12,%xmm11,%xmm11 +vpunpckhqdq %xmm10,%xmm11,%xmm12 +vpunpcklqdq %xmm10,%xmm11,%xmm10 +vpaddq %xmm1,%xmm0,%xmm11 +vpaddq subc0(%rip),%xmm0,%xmm0 +vpsubq %xmm1,%xmm0,%xmm0 +vpunpckhqdq %xmm11,%xmm0,%xmm1 +vpunpcklqdq %xmm11,%xmm0,%xmm0 +vpmuludq %xmm0,%xmm10,%xmm11 +vpmuludq %xmm1,%xmm10,%xmm13 +vmovdqa %xmm1,128(%rsp) +vpaddq %xmm1,%xmm1,%xmm1 +vpmuludq %xmm0,%xmm12,%xmm14 +vmovdqa %xmm0,144(%rsp) +vpaddq %xmm14,%xmm13,%xmm13 +vpmuludq %xmm1,%xmm12,%xmm0 +vmovdqa %xmm1,160(%rsp) +vpaddq %xmm3,%xmm2,%xmm1 +vpaddq subc2(%rip),%xmm2,%xmm2 +vpsubq %xmm3,%xmm2,%xmm2 +vpunpckhqdq %xmm1,%xmm2,%xmm3 +vpunpcklqdq %xmm1,%xmm2,%xmm1 +vpmuludq %xmm1,%xmm10,%xmm2 +vpaddq %xmm2,%xmm0,%xmm0 +vpmuludq %xmm3,%xmm10,%xmm2 +vmovdqa %xmm3,176(%rsp) +vpaddq %xmm3,%xmm3,%xmm3 +vpmuludq %xmm1,%xmm12,%xmm14 +vmovdqa %xmm1,192(%rsp) +vpaddq %xmm14,%xmm2,%xmm2 +vpmuludq %xmm3,%xmm12,%xmm1 +vmovdqa %xmm3,208(%rsp) +vpaddq %xmm5,%xmm4,%xmm3 +vpaddq subc2(%rip),%xmm4,%xmm4 +vpsubq %xmm5,%xmm4,%xmm4 +vpunpckhqdq %xmm3,%xmm4,%xmm5 +vpunpcklqdq %xmm3,%xmm4,%xmm3 +vpmuludq %xmm3,%xmm10,%xmm4 +vpaddq %xmm4,%xmm1,%xmm1 +vpmuludq %xmm5,%xmm10,%xmm4 +vmovdqa %xmm5,224(%rsp) +vpaddq %xmm5,%xmm5,%xmm5 +vpmuludq %xmm3,%xmm12,%xmm14 +vmovdqa %xmm3,240(%rsp) +vpaddq %xmm14,%xmm4,%xmm4 +vpaddq %xmm7,%xmm6,%xmm3 +vpaddq subc2(%rip),%xmm6,%xmm6 +vpsubq %xmm7,%xmm6,%xmm6 +vpunpckhqdq %xmm3,%xmm6,%xmm7 +vpunpcklqdq %xmm3,%xmm6,%xmm3 +vpmuludq %xmm3,%xmm10,%xmm6 +vpmuludq %xmm5,%xmm12,%xmm14 +vmovdqa %xmm5,256(%rsp) +vpmuludq v19_19(%rip),%xmm5,%xmm5 +vmovdqa %xmm5,272(%rsp) +vpaddq %xmm14,%xmm6,%xmm6 +vpmuludq %xmm7,%xmm10,%xmm5 +vmovdqa %xmm7,288(%rsp) +vpaddq %xmm7,%xmm7,%xmm7 +vpmuludq %xmm3,%xmm12,%xmm14 +vmovdqa %xmm3,304(%rsp) +vpaddq %xmm14,%xmm5,%xmm5 +vpmuludq v19_19(%rip),%xmm3,%xmm3 +vmovdqa %xmm3,320(%rsp) +vpaddq %xmm9,%xmm8,%xmm3 +vpaddq subc2(%rip),%xmm8,%xmm8 +vpsubq %xmm9,%xmm8,%xmm8 +vpunpckhqdq %xmm3,%xmm8,%xmm9 +vpunpcklqdq %xmm3,%xmm8,%xmm3 +vmovdqa %xmm3,336(%rsp) +vpmuludq %xmm7,%xmm12,%xmm8 +vmovdqa %xmm7,352(%rsp) +vpmuludq v19_19(%rip),%xmm7,%xmm7 +vmovdqa %xmm7,368(%rsp) +vpmuludq %xmm3,%xmm10,%xmm7 +vpaddq %xmm7,%xmm8,%xmm8 +vpmuludq %xmm9,%xmm10,%xmm7 +vmovdqa %xmm9,384(%rsp) +vpaddq %xmm9,%xmm9,%xmm9 +vpmuludq %xmm3,%xmm12,%xmm10 +vpaddq %xmm10,%xmm7,%xmm7 +vpmuludq v19_19(%rip),%xmm3,%xmm3 +vmovdqa %xmm3,400(%rsp) +vpmuludq v19_19(%rip),%xmm12,%xmm12 +vpmuludq %xmm9,%xmm12,%xmm3 +vmovdqa %xmm9,416(%rsp) +vpaddq %xmm3,%xmm11,%xmm11 +vmovdqa 0(%rsp),%xmm3 +vmovdqa 16(%rsp),%xmm9 +vpaddq subc2(%rip),%xmm3,%xmm10 +vpsubq %xmm9,%xmm10,%xmm10 +vpaddq %xmm9,%xmm3,%xmm3 +vpunpckhqdq %xmm10,%xmm3,%xmm9 +vpunpcklqdq %xmm10,%xmm3,%xmm3 +vpmuludq 144(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm0,%xmm0 +vpmuludq 128(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm2,%xmm2 +vpmuludq 192(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm1,%xmm1 +vpmuludq 176(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm4,%xmm4 +vpmuludq 240(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm6,%xmm6 +vpmuludq 224(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm5,%xmm5 +vpmuludq 304(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm8,%xmm8 +vpmuludq 288(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm7,%xmm7 +vpmuludq v19_19(%rip),%xmm3,%xmm3 +vpmuludq 336(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm11,%xmm11 +vpmuludq 384(%rsp),%xmm3,%xmm3 +vpaddq %xmm3,%xmm13,%xmm13 +vpmuludq 144(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm2,%xmm2 +vpmuludq 160(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm1,%xmm1 +vpmuludq 192(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm4,%xmm4 +vpmuludq 208(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm6,%xmm6 +vpmuludq 240(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm5,%xmm5 +vpmuludq 256(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm8,%xmm8 +vpmuludq 304(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm7,%xmm7 +vpmuludq v19_19(%rip),%xmm9,%xmm9 +vpmuludq 352(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm11,%xmm11 +vpmuludq 336(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm13,%xmm13 +vpmuludq 416(%rsp),%xmm9,%xmm9 +vpaddq %xmm9,%xmm0,%xmm0 +vmovdqa 32(%rsp),%xmm3 +vmovdqa 80(%rsp),%xmm9 +vpaddq subc2(%rip),%xmm3,%xmm10 +vpsubq %xmm9,%xmm10,%xmm10 +vpaddq %xmm9,%xmm3,%xmm3 +vpunpckhqdq %xmm10,%xmm3,%xmm9 +vpunpcklqdq %xmm10,%xmm3,%xmm3 +vpmuludq 144(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm1,%xmm1 +vpmuludq 128(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm4,%xmm4 +vpmuludq 192(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm6,%xmm6 +vpmuludq 176(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm5,%xmm5 +vpmuludq 240(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm8,%xmm8 +vpmuludq 224(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm7,%xmm7 +vpmuludq v19_19(%rip),%xmm3,%xmm3 +vpmuludq 304(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm11,%xmm11 +vpmuludq 288(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm13,%xmm13 +vpmuludq 336(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm0,%xmm0 +vpmuludq 384(%rsp),%xmm3,%xmm3 +vpaddq %xmm3,%xmm2,%xmm2 +vpmuludq 144(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm4,%xmm4 +vpmuludq 160(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm6,%xmm6 +vpmuludq 192(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm5,%xmm5 +vpmuludq 208(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm8,%xmm8 +vpmuludq 240(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm7,%xmm7 +vpmuludq v19_19(%rip),%xmm9,%xmm9 +vpmuludq 256(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm11,%xmm11 +vpmuludq 304(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm13,%xmm13 +vpmuludq 352(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm0,%xmm0 +vpmuludq 336(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm2,%xmm2 +vpmuludq 416(%rsp),%xmm9,%xmm9 +vpaddq %xmm9,%xmm1,%xmm1 +vmovdqa 48(%rsp),%xmm3 +vmovdqa 96(%rsp),%xmm9 +vpaddq subc2(%rip),%xmm3,%xmm10 +vpsubq %xmm9,%xmm10,%xmm10 +vpaddq %xmm9,%xmm3,%xmm3 +vpunpckhqdq %xmm10,%xmm3,%xmm9 +vpunpcklqdq %xmm10,%xmm3,%xmm3 +vpmuludq 144(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm6,%xmm6 +vpmuludq 128(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm5,%xmm5 +vpmuludq 192(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm8,%xmm8 +vpmuludq 176(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm7,%xmm7 +vpmuludq v19_19(%rip),%xmm3,%xmm3 +vpmuludq 240(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm11,%xmm11 +vpmuludq 224(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm13,%xmm13 +vpmuludq 304(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm0,%xmm0 +vpmuludq 288(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm2,%xmm2 +vpmuludq 336(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm1,%xmm1 +vpmuludq 384(%rsp),%xmm3,%xmm3 +vpaddq %xmm3,%xmm4,%xmm4 +vpmuludq 144(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm5,%xmm5 +vpmuludq 160(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm8,%xmm8 +vpmuludq 192(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm7,%xmm7 +vpmuludq v19_19(%rip),%xmm9,%xmm9 +vpmuludq 208(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm11,%xmm11 +vpmuludq 240(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm13,%xmm13 +vpmuludq 256(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm0,%xmm0 +vpmuludq 304(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm2,%xmm2 +vpmuludq 352(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm1,%xmm1 +vpmuludq 336(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm4,%xmm4 +vpmuludq 416(%rsp),%xmm9,%xmm9 +vpaddq %xmm9,%xmm6,%xmm6 +vmovdqa 64(%rsp),%xmm3 +vmovdqa 112(%rsp),%xmm9 +vpaddq subc2(%rip),%xmm3,%xmm10 +vpsubq %xmm9,%xmm10,%xmm10 +vpaddq %xmm9,%xmm3,%xmm3 +vpunpckhqdq %xmm10,%xmm3,%xmm9 +vpunpcklqdq %xmm10,%xmm3,%xmm3 +vpmuludq 144(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm8,%xmm8 +vpmuludq 128(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm7,%xmm7 +vpmuludq v19_19(%rip),%xmm3,%xmm3 +vpmuludq 192(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm11,%xmm11 +vpmuludq 176(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm13,%xmm13 +vpmuludq 240(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm0,%xmm0 +vpmuludq 224(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm2,%xmm2 +vpmuludq 304(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm1,%xmm1 +vpmuludq 288(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm4,%xmm4 +vpmuludq 336(%rsp),%xmm3,%xmm10 +vpaddq %xmm10,%xmm6,%xmm6 +vpmuludq 384(%rsp),%xmm3,%xmm3 +vpaddq %xmm3,%xmm5,%xmm5 +vpmuludq 144(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm7,%xmm7 +vpmuludq v19_19(%rip),%xmm9,%xmm9 +vpmuludq 160(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm11,%xmm11 +vpmuludq 192(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm13,%xmm13 +vpmuludq 208(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm0,%xmm0 +vpmuludq 240(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm2,%xmm2 +vpmuludq 256(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm1,%xmm1 +vpmuludq 304(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm4,%xmm4 +vpmuludq 352(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm6,%xmm6 +vpmuludq 336(%rsp),%xmm9,%xmm3 +vpaddq %xmm3,%xmm5,%xmm5 +vpmuludq 416(%rsp),%xmm9,%xmm9 +vpaddq %xmm9,%xmm8,%xmm8 +vpsrlq $25,%xmm4,%xmm3 +vpaddq %xmm3,%xmm6,%xmm6 +vpand m25(%rip),%xmm4,%xmm4 +vpsrlq $26,%xmm11,%xmm3 +vpaddq %xmm3,%xmm13,%xmm13 +vpand m26(%rip),%xmm11,%xmm11 +vpsrlq $26,%xmm6,%xmm3 +vpaddq %xmm3,%xmm5,%xmm5 +vpand m26(%rip),%xmm6,%xmm6 +vpsrlq $25,%xmm13,%xmm3 +vpaddq %xmm3,%xmm0,%xmm0 +vpand m25(%rip),%xmm13,%xmm13 +vpsrlq $25,%xmm5,%xmm3 +vpaddq %xmm3,%xmm8,%xmm8 +vpand m25(%rip),%xmm5,%xmm5 +vpsrlq $26,%xmm0,%xmm3 +vpaddq %xmm3,%xmm2,%xmm2 +vpand m26(%rip),%xmm0,%xmm0 +vpsrlq $26,%xmm8,%xmm3 +vpaddq %xmm3,%xmm7,%xmm7 +vpand m26(%rip),%xmm8,%xmm8 +vpsrlq $25,%xmm2,%xmm3 +vpaddq %xmm3,%xmm1,%xmm1 +vpand m25(%rip),%xmm2,%xmm2 +vpsrlq $25,%xmm7,%xmm3 +vpsllq $4,%xmm3,%xmm9 +vpaddq %xmm3,%xmm11,%xmm11 +vpsllq $1,%xmm3,%xmm3 +vpaddq %xmm3,%xmm9,%xmm9 +vpaddq %xmm9,%xmm11,%xmm11 +vpand m25(%rip),%xmm7,%xmm7 +vpsrlq $26,%xmm1,%xmm3 +vpaddq %xmm3,%xmm4,%xmm4 +vpand m26(%rip),%xmm1,%xmm1 +vpsrlq $26,%xmm11,%xmm3 +vpaddq %xmm3,%xmm13,%xmm13 +vpand m26(%rip),%xmm11,%xmm11 +vpsrlq $25,%xmm4,%xmm3 +vpaddq %xmm3,%xmm6,%xmm6 +vpand m25(%rip),%xmm4,%xmm4 +vpunpcklqdq %xmm13,%xmm11,%xmm3 +vpunpckhqdq %xmm13,%xmm11,%xmm9 +vpaddq subc0(%rip),%xmm9,%xmm10 +vpsubq %xmm3,%xmm10,%xmm10 +vpaddq %xmm9,%xmm3,%xmm3 +vpunpckhqdq %xmm3,%xmm10,%xmm9 +vpunpcklqdq %xmm3,%xmm10,%xmm10 +vpmuludq %xmm10,%xmm10,%xmm3 +vpaddq %xmm10,%xmm10,%xmm10 +vpmuludq %xmm9,%xmm10,%xmm11 +vpunpcklqdq %xmm2,%xmm0,%xmm12 +vpunpckhqdq %xmm2,%xmm0,%xmm0 +vpaddq subc2(%rip),%xmm0,%xmm2 +vpsubq %xmm12,%xmm2,%xmm2 +vpaddq %xmm0,%xmm12,%xmm12 +vpunpckhqdq %xmm12,%xmm2,%xmm0 +vpunpcklqdq %xmm12,%xmm2,%xmm2 +vpmuludq %xmm2,%xmm10,%xmm12 +vpaddq %xmm9,%xmm9,%xmm13 +vpmuludq %xmm13,%xmm9,%xmm9 +vpaddq %xmm9,%xmm12,%xmm12 +vpmuludq %xmm0,%xmm10,%xmm9 +vpmuludq %xmm2,%xmm13,%xmm14 +vpaddq %xmm14,%xmm9,%xmm9 +vpunpcklqdq %xmm4,%xmm1,%xmm14 +vpunpckhqdq %xmm4,%xmm1,%xmm1 +vpaddq subc2(%rip),%xmm1,%xmm4 +vpsubq %xmm14,%xmm4,%xmm4 +vpaddq %xmm1,%xmm14,%xmm14 +vpunpckhqdq %xmm14,%xmm4,%xmm1 +vpunpcklqdq %xmm14,%xmm4,%xmm4 +vmovdqa %xmm1,0(%rsp) +vpaddq %xmm1,%xmm1,%xmm1 +vmovdqa %xmm1,16(%rsp) +vpmuludq v19_19(%rip),%xmm1,%xmm1 +vmovdqa %xmm1,32(%rsp) +vpmuludq %xmm4,%xmm10,%xmm1 +vpmuludq %xmm2,%xmm2,%xmm14 +vpaddq %xmm14,%xmm1,%xmm1 +vpmuludq 0(%rsp),%xmm10,%xmm14 +vpmuludq %xmm4,%xmm13,%xmm15 +vpaddq %xmm15,%xmm14,%xmm14 +vpunpcklqdq %xmm5,%xmm6,%xmm15 +vpunpckhqdq %xmm5,%xmm6,%xmm5 +vpaddq subc2(%rip),%xmm5,%xmm6 +vpsubq %xmm15,%xmm6,%xmm6 +vpaddq %xmm5,%xmm15,%xmm15 +vpunpckhqdq %xmm15,%xmm6,%xmm5 +vpunpcklqdq %xmm15,%xmm6,%xmm6 +vmovdqa %xmm6,48(%rsp) +vpmuludq v19_19(%rip),%xmm6,%xmm6 +vmovdqa %xmm6,64(%rsp) +vmovdqa %xmm5,80(%rsp) +vpmuludq v38_38(%rip),%xmm5,%xmm5 +vmovdqa %xmm5,96(%rsp) +vpmuludq 48(%rsp),%xmm10,%xmm5 +vpaddq %xmm0,%xmm0,%xmm6 +vpmuludq %xmm6,%xmm0,%xmm0 +vpaddq %xmm0,%xmm5,%xmm5 +vpmuludq 80(%rsp),%xmm10,%xmm0 +vpmuludq %xmm4,%xmm6,%xmm15 +vpaddq %xmm15,%xmm0,%xmm0 +vpmuludq %xmm6,%xmm13,%xmm15 +vpaddq %xmm15,%xmm1,%xmm1 +vpmuludq %xmm6,%xmm2,%xmm15 +vpaddq %xmm15,%xmm14,%xmm14 +vpunpcklqdq %xmm7,%xmm8,%xmm15 +vpunpckhqdq %xmm7,%xmm8,%xmm7 +vpaddq subc2(%rip),%xmm7,%xmm8 +vpsubq %xmm15,%xmm8,%xmm8 +vpaddq %xmm7,%xmm15,%xmm15 +vpunpckhqdq %xmm15,%xmm8,%xmm7 +vpunpcklqdq %xmm15,%xmm8,%xmm8 +vmovdqa %xmm8,112(%rsp) +vpmuludq v19_19(%rip),%xmm8,%xmm8 +vmovdqa %xmm8,160(%rsp) +vpmuludq 112(%rsp),%xmm10,%xmm8 +vpmuludq %xmm7,%xmm10,%xmm10 +vpmuludq v38_38(%rip),%xmm7,%xmm15 +vpmuludq %xmm15,%xmm7,%xmm7 +vpaddq %xmm7,%xmm8,%xmm8 +vpmuludq %xmm15,%xmm13,%xmm7 +vpaddq %xmm7,%xmm3,%xmm3 +vpmuludq %xmm15,%xmm2,%xmm7 +vpaddq %xmm7,%xmm11,%xmm11 +vpmuludq 80(%rsp),%xmm13,%xmm7 +vpaddq %xmm7,%xmm7,%xmm7 +vpaddq %xmm7,%xmm8,%xmm8 +vpmuludq 16(%rsp),%xmm13,%xmm7 +vpaddq %xmm7,%xmm5,%xmm5 +vpmuludq 48(%rsp),%xmm13,%xmm7 +vpaddq %xmm7,%xmm0,%xmm0 +vpmuludq 112(%rsp),%xmm13,%xmm7 +vpaddq %xmm7,%xmm10,%xmm10 +vpmuludq %xmm15,%xmm6,%xmm7 +vpaddq %xmm7,%xmm12,%xmm12 +vpmuludq %xmm15,%xmm4,%xmm7 +vpaddq %xmm7,%xmm9,%xmm9 +vpaddq %xmm2,%xmm2,%xmm2 +vpmuludq %xmm4,%xmm2,%xmm7 +vpaddq %xmm7,%xmm5,%xmm5 +vpmuludq 160(%rsp),%xmm2,%xmm7 +vpaddq %xmm7,%xmm3,%xmm3 +vpmuludq 160(%rsp),%xmm6,%xmm7 +vpaddq %xmm7,%xmm11,%xmm11 +vpmuludq 0(%rsp),%xmm2,%xmm7 +vpaddq %xmm7,%xmm0,%xmm0 +vpmuludq 48(%rsp),%xmm2,%xmm7 +vpaddq %xmm7,%xmm8,%xmm8 +vpmuludq 80(%rsp),%xmm2,%xmm2 +vpaddq %xmm2,%xmm10,%xmm10 +vpmuludq 96(%rsp),%xmm4,%xmm2 +vpaddq %xmm2,%xmm11,%xmm11 +vpmuludq %xmm4,%xmm4,%xmm2 +vpaddq %xmm2,%xmm8,%xmm8 +vpaddq %xmm4,%xmm4,%xmm2 +vpmuludq 160(%rsp),%xmm2,%xmm4 +vpaddq %xmm4,%xmm12,%xmm12 +vpmuludq 16(%rsp),%xmm15,%xmm4 +vpaddq %xmm4,%xmm1,%xmm1 +vpmuludq 48(%rsp),%xmm15,%xmm4 +vpaddq %xmm4,%xmm14,%xmm14 +vpmuludq 96(%rsp),%xmm6,%xmm4 +vpaddq %xmm4,%xmm3,%xmm3 +vmovdqa 16(%rsp),%xmm4 +vpmuludq 160(%rsp),%xmm4,%xmm4 +vpaddq %xmm4,%xmm9,%xmm9 +vpmuludq 16(%rsp),%xmm6,%xmm4 +vpaddq %xmm4,%xmm8,%xmm8 +vpmuludq 48(%rsp),%xmm6,%xmm4 +vpaddq %xmm4,%xmm10,%xmm10 +vpmuludq 80(%rsp),%xmm15,%xmm4 +vpaddq %xmm4,%xmm4,%xmm4 +vpaddq %xmm4,%xmm5,%xmm5 +vpmuludq 112(%rsp),%xmm15,%xmm4 +vpaddq %xmm4,%xmm0,%xmm0 +vmovdqa 48(%rsp),%xmm4 +vpaddq %xmm4,%xmm4,%xmm4 +vpmuludq 160(%rsp),%xmm4,%xmm4 +vpaddq %xmm4,%xmm1,%xmm1 +vmovdqa 80(%rsp),%xmm4 +vpaddq %xmm4,%xmm4,%xmm4 +vpmuludq 160(%rsp),%xmm4,%xmm4 +vpaddq %xmm4,%xmm14,%xmm14 +vpmuludq 64(%rsp),%xmm2,%xmm4 +vpaddq %xmm4,%xmm3,%xmm3 +vmovdqa 16(%rsp),%xmm4 +vpmuludq 64(%rsp),%xmm4,%xmm4 +vpaddq %xmm4,%xmm11,%xmm11 +vmovdqa 16(%rsp),%xmm4 +vpmuludq 96(%rsp),%xmm4,%xmm4 +vpaddq %xmm4,%xmm12,%xmm12 +vmovdqa 48(%rsp),%xmm4 +vpmuludq 96(%rsp),%xmm4,%xmm4 +vpaddq %xmm4,%xmm9,%xmm9 +vpmuludq 0(%rsp),%xmm2,%xmm2 +vpaddq %xmm2,%xmm10,%xmm10 +vmovdqa 32(%rsp),%xmm2 +vpmuludq 0(%rsp),%xmm2,%xmm2 +vpaddq %xmm2,%xmm3,%xmm3 +vmovdqa 64(%rsp),%xmm2 +vpmuludq 48(%rsp),%xmm2,%xmm2 +vpaddq %xmm2,%xmm12,%xmm12 +vmovdqa 96(%rsp),%xmm2 +vpmuludq 80(%rsp),%xmm2,%xmm2 +vpaddq %xmm2,%xmm1,%xmm1 +vmovdqa 160(%rsp),%xmm2 +vpmuludq 112(%rsp),%xmm2,%xmm2 +vpaddq %xmm2,%xmm5,%xmm5 +vpsrlq $26,%xmm3,%xmm2 +vpaddq %xmm2,%xmm11,%xmm11 +vpand m26(%rip),%xmm3,%xmm3 +vpsrlq $25,%xmm14,%xmm2 +vpaddq %xmm2,%xmm5,%xmm5 +vpand m25(%rip),%xmm14,%xmm14 +vpsrlq $25,%xmm11,%xmm2 +vpaddq %xmm2,%xmm12,%xmm12 +vpand m25(%rip),%xmm11,%xmm11 +vpsrlq $26,%xmm5,%xmm2 +vpaddq %xmm2,%xmm0,%xmm0 +vpand m26(%rip),%xmm5,%xmm5 +vpsrlq $26,%xmm12,%xmm2 +vpaddq %xmm2,%xmm9,%xmm9 +vpand m26(%rip),%xmm12,%xmm12 +vpsrlq $25,%xmm0,%xmm2 +vpaddq %xmm2,%xmm8,%xmm8 +vpand m25(%rip),%xmm0,%xmm0 +vpsrlq $25,%xmm9,%xmm2 +vpaddq %xmm2,%xmm1,%xmm1 +vpand m25(%rip),%xmm9,%xmm9 +vpsrlq $26,%xmm8,%xmm2 +vpaddq %xmm2,%xmm10,%xmm10 +vpand m26(%rip),%xmm8,%xmm8 +vpsrlq $26,%xmm1,%xmm2 +vpaddq %xmm2,%xmm14,%xmm14 +vpand m26(%rip),%xmm1,%xmm1 +vpsrlq $25,%xmm10,%xmm2 +vpsllq $4,%xmm2,%xmm4 +vpaddq %xmm2,%xmm3,%xmm3 +vpsllq $1,%xmm2,%xmm2 +vpaddq %xmm2,%xmm4,%xmm4 +vpaddq %xmm4,%xmm3,%xmm3 +vpand m25(%rip),%xmm10,%xmm10 +vpsrlq $25,%xmm14,%xmm2 +vpaddq %xmm2,%xmm5,%xmm5 +vpand m25(%rip),%xmm14,%xmm14 +vpsrlq $26,%xmm3,%xmm2 +vpaddq %xmm2,%xmm11,%xmm11 +vpand m26(%rip),%xmm3,%xmm3 +vpunpckhqdq %xmm11,%xmm3,%xmm2 +vmovdqa %xmm2,0(%rsp) +vpunpcklqdq %xmm11,%xmm3,%xmm2 +vpmuludq v9_9(%rip),%xmm2,%xmm2 +vmovdqa %xmm2,80(%rsp) +vpunpckhqdq %xmm9,%xmm12,%xmm2 +vmovdqa %xmm2,16(%rsp) +vpunpcklqdq %xmm9,%xmm12,%xmm2 +vpmuludq v9_9(%rip),%xmm2,%xmm2 +vmovdqa %xmm2,96(%rsp) +vpunpckhqdq %xmm14,%xmm1,%xmm2 +vmovdqa %xmm2,32(%rsp) +vpunpcklqdq %xmm14,%xmm1,%xmm1 +vpmuludq v9_9(%rip),%xmm1,%xmm1 +vmovdqa %xmm1,112(%rsp) +vpunpckhqdq %xmm0,%xmm5,%xmm1 +vmovdqa %xmm1,48(%rsp) +vpunpcklqdq %xmm0,%xmm5,%xmm0 +vpmuludq v9_9(%rip),%xmm0,%xmm0 +vmovdqa %xmm0,160(%rsp) +vpunpckhqdq %xmm10,%xmm8,%xmm0 +vmovdqa %xmm0,64(%rsp) +vpunpcklqdq %xmm10,%xmm8,%xmm0 +vpmuludq v9_9(%rip),%xmm0,%xmm0 +vmovdqa %xmm0,208(%rsp) +vmovdqa 144(%rsp),%xmm0 +vpmuludq %xmm0,%xmm0,%xmm1 +vpaddq %xmm0,%xmm0,%xmm0 +vmovdqa 128(%rsp),%xmm2 +vpmuludq %xmm2,%xmm0,%xmm3 +vmovdqa 192(%rsp),%xmm4 +vpmuludq %xmm4,%xmm0,%xmm5 +vmovdqa 176(%rsp),%xmm6 +vpmuludq %xmm6,%xmm0,%xmm7 +vmovdqa 240(%rsp),%xmm8 +vpmuludq %xmm8,%xmm0,%xmm9 +vpmuludq 224(%rsp),%xmm0,%xmm10 +vpmuludq 304(%rsp),%xmm0,%xmm11 +vpmuludq 288(%rsp),%xmm0,%xmm12 +vpmuludq 336(%rsp),%xmm0,%xmm13 +vmovdqa 384(%rsp),%xmm14 +vpmuludq %xmm14,%xmm0,%xmm0 +vpmuludq v38_38(%rip),%xmm14,%xmm15 +vpmuludq %xmm15,%xmm14,%xmm14 +vpaddq %xmm14,%xmm13,%xmm13 +vpaddq %xmm6,%xmm6,%xmm14 +vpmuludq %xmm14,%xmm6,%xmm6 +vpaddq %xmm6,%xmm11,%xmm11 +vpaddq %xmm2,%xmm2,%xmm6 +vpmuludq %xmm6,%xmm2,%xmm2 +vpaddq %xmm2,%xmm5,%xmm5 +vpmuludq %xmm15,%xmm6,%xmm2 +vpaddq %xmm2,%xmm1,%xmm1 +vpmuludq %xmm15,%xmm4,%xmm2 +vpaddq %xmm2,%xmm3,%xmm3 +vpmuludq 256(%rsp),%xmm6,%xmm2 +vpaddq %xmm2,%xmm11,%xmm11 +vpmuludq 304(%rsp),%xmm6,%xmm2 +vpaddq %xmm2,%xmm12,%xmm12 +vpmuludq 352(%rsp),%xmm6,%xmm2 +vpaddq %xmm2,%xmm13,%xmm13 +vpmuludq 336(%rsp),%xmm6,%xmm2 +vpaddq %xmm2,%xmm0,%xmm0 +vpmuludq %xmm4,%xmm6,%xmm2 +vpaddq %xmm2,%xmm7,%xmm7 +vpmuludq %xmm14,%xmm6,%xmm2 +vpaddq %xmm2,%xmm9,%xmm9 +vpmuludq %xmm8,%xmm6,%xmm2 +vpaddq %xmm2,%xmm10,%xmm10 +vpmuludq %xmm15,%xmm14,%xmm2 +vpaddq %xmm2,%xmm5,%xmm5 +vpmuludq %xmm15,%xmm8,%xmm2 +vpaddq %xmm2,%xmm7,%xmm7 +vpmuludq %xmm4,%xmm4,%xmm2 +vpaddq %xmm2,%xmm9,%xmm9 +vpmuludq %xmm14,%xmm4,%xmm2 +vpaddq %xmm2,%xmm10,%xmm10 +vpaddq %xmm4,%xmm4,%xmm2 +vpmuludq %xmm8,%xmm2,%xmm4 +vpaddq %xmm4,%xmm11,%xmm11 +vpmuludq 400(%rsp),%xmm2,%xmm4 +vpaddq %xmm4,%xmm1,%xmm1 +vpmuludq 400(%rsp),%xmm14,%xmm4 +vpaddq %xmm4,%xmm3,%xmm3 +vpmuludq 224(%rsp),%xmm2,%xmm4 +vpaddq %xmm4,%xmm12,%xmm12 +vpmuludq 304(%rsp),%xmm2,%xmm4 +vpaddq %xmm4,%xmm13,%xmm13 +vpmuludq 288(%rsp),%xmm2,%xmm2 +vpaddq %xmm2,%xmm0,%xmm0 +vpmuludq 368(%rsp),%xmm8,%xmm2 +vpaddq %xmm2,%xmm3,%xmm3 +vpmuludq %xmm8,%xmm14,%xmm2 +vpaddq %xmm2,%xmm12,%xmm12 +vpmuludq %xmm8,%xmm8,%xmm2 +vpaddq %xmm2,%xmm13,%xmm13 +vpaddq %xmm8,%xmm8,%xmm2 +vpmuludq 400(%rsp),%xmm2,%xmm4 +vpaddq %xmm4,%xmm5,%xmm5 +vpmuludq 256(%rsp),%xmm15,%xmm4 +vpaddq %xmm4,%xmm9,%xmm9 +vpmuludq 304(%rsp),%xmm15,%xmm4 +vpaddq %xmm4,%xmm10,%xmm10 +vpmuludq 368(%rsp),%xmm14,%xmm4 +vpaddq %xmm4,%xmm1,%xmm1 +vmovdqa 256(%rsp),%xmm4 +vpmuludq 400(%rsp),%xmm4,%xmm4 +vpaddq %xmm4,%xmm7,%xmm7 +vpmuludq 256(%rsp),%xmm14,%xmm4 +vpaddq %xmm4,%xmm13,%xmm13 +vpmuludq 304(%rsp),%xmm14,%xmm4 +vpaddq %xmm4,%xmm0,%xmm0 +vpmuludq 352(%rsp),%xmm15,%xmm4 +vpaddq %xmm4,%xmm11,%xmm11 +vpmuludq 336(%rsp),%xmm15,%xmm4 +vpaddq %xmm4,%xmm12,%xmm12 +vmovdqa 304(%rsp),%xmm4 +vpaddq %xmm4,%xmm4,%xmm4 +vpmuludq 400(%rsp),%xmm4,%xmm4 +vpaddq %xmm4,%xmm9,%xmm9 +vpmuludq 320(%rsp),%xmm2,%xmm4 +vpaddq %xmm4,%xmm1,%xmm1 +vmovdqa 256(%rsp),%xmm4 +vpmuludq 320(%rsp),%xmm4,%xmm4 +vpaddq %xmm4,%xmm3,%xmm3 +vmovdqa 256(%rsp),%xmm4 +vpmuludq 368(%rsp),%xmm4,%xmm4 +vpaddq %xmm4,%xmm5,%xmm5 +vmovdqa 304(%rsp),%xmm4 +vpmuludq 368(%rsp),%xmm4,%xmm4 +vpaddq %xmm4,%xmm7,%xmm7 +vmovdqa 352(%rsp),%xmm4 +vpmuludq 400(%rsp),%xmm4,%xmm4 +vpaddq %xmm4,%xmm10,%xmm10 +vpmuludq 224(%rsp),%xmm2,%xmm2 +vpaddq %xmm2,%xmm0,%xmm0 +vmovdqa 272(%rsp),%xmm2 +vpmuludq 224(%rsp),%xmm2,%xmm2 +vpaddq %xmm2,%xmm1,%xmm1 +vmovdqa 320(%rsp),%xmm2 +vpmuludq 304(%rsp),%xmm2,%xmm2 +vpaddq %xmm2,%xmm5,%xmm5 +vmovdqa 368(%rsp),%xmm2 +vpmuludq 288(%rsp),%xmm2,%xmm2 +vpaddq %xmm2,%xmm9,%xmm9 +vmovdqa 400(%rsp),%xmm2 +vpmuludq 336(%rsp),%xmm2,%xmm2 +vpaddq %xmm2,%xmm11,%xmm11 +vpsrlq $26,%xmm1,%xmm2 +vpaddq %xmm2,%xmm3,%xmm3 +vpand m26(%rip),%xmm1,%xmm1 +vpsrlq $25,%xmm10,%xmm2 +vpaddq %xmm2,%xmm11,%xmm11 +vpand m25(%rip),%xmm10,%xmm10 +vpsrlq $25,%xmm3,%xmm2 +vpaddq %xmm2,%xmm5,%xmm5 +vpand m25(%rip),%xmm3,%xmm3 +vpsrlq $26,%xmm11,%xmm2 +vpaddq %xmm2,%xmm12,%xmm12 +vpand m26(%rip),%xmm11,%xmm11 +vpsrlq $26,%xmm5,%xmm2 +vpaddq %xmm2,%xmm7,%xmm7 +vpand m26(%rip),%xmm5,%xmm5 +vpsrlq $25,%xmm12,%xmm2 +vpaddq %xmm2,%xmm13,%xmm13 +vpand m25(%rip),%xmm12,%xmm12 +vpsrlq $25,%xmm7,%xmm2 +vpaddq %xmm2,%xmm9,%xmm9 +vpand m25(%rip),%xmm7,%xmm7 +vpsrlq $26,%xmm13,%xmm2 +vpaddq %xmm2,%xmm0,%xmm0 +vpand m26(%rip),%xmm13,%xmm13 +vpsrlq $26,%xmm9,%xmm2 +vpaddq %xmm2,%xmm10,%xmm10 +vpand m26(%rip),%xmm9,%xmm9 +vpsrlq $25,%xmm0,%xmm2 +vpsllq $4,%xmm2,%xmm4 +vpaddq %xmm2,%xmm1,%xmm1 +vpsllq $1,%xmm2,%xmm2 +vpaddq %xmm2,%xmm4,%xmm4 +vpaddq %xmm4,%xmm1,%xmm1 +vpand m25(%rip),%xmm0,%xmm0 +vpsrlq $25,%xmm10,%xmm2 +vpaddq %xmm2,%xmm11,%xmm11 +vpand m25(%rip),%xmm10,%xmm10 +vpsrlq $26,%xmm1,%xmm2 +vpaddq %xmm2,%xmm3,%xmm3 +vpand m26(%rip),%xmm1,%xmm1 +vpunpckhqdq %xmm3,%xmm1,%xmm2 +vpunpcklqdq %xmm3,%xmm1,%xmm1 +vmovdqa %xmm1,176(%rsp) +vpaddq subc0(%rip),%xmm2,%xmm3 +vpsubq %xmm1,%xmm3,%xmm3 +vpunpckhqdq %xmm3,%xmm2,%xmm1 +vpunpcklqdq %xmm3,%xmm2,%xmm2 +vmovdqa %xmm2,192(%rsp) +vmovdqa %xmm1,224(%rsp) +vpsllq $1,%xmm1,%xmm1 +vmovdqa %xmm1,240(%rsp) +vpmuludq v121666_121666(%rip),%xmm3,%xmm3 +vmovdqa 80(%rsp),%xmm1 +vpunpcklqdq %xmm1,%xmm3,%xmm2 +vpunpckhqdq %xmm1,%xmm3,%xmm1 +vpunpckhqdq %xmm7,%xmm5,%xmm3 +vpunpcklqdq %xmm7,%xmm5,%xmm4 +vmovdqa %xmm4,256(%rsp) +vpaddq subc2(%rip),%xmm3,%xmm5 +vpsubq %xmm4,%xmm5,%xmm5 +vpunpckhqdq %xmm5,%xmm3,%xmm4 +vpunpcklqdq %xmm5,%xmm3,%xmm3 +vmovdqa %xmm3,272(%rsp) +vmovdqa %xmm4,288(%rsp) +vpsllq $1,%xmm4,%xmm4 +vmovdqa %xmm4,304(%rsp) +vpmuludq v121666_121666(%rip),%xmm5,%xmm5 +vmovdqa 96(%rsp),%xmm3 +vpunpcklqdq %xmm3,%xmm5,%xmm4 +vpunpckhqdq %xmm3,%xmm5,%xmm3 +vpunpckhqdq %xmm10,%xmm9,%xmm5 +vpunpcklqdq %xmm10,%xmm9,%xmm6 +vmovdqa %xmm6,320(%rsp) +vpaddq subc2(%rip),%xmm5,%xmm7 +vpsubq %xmm6,%xmm7,%xmm7 +vpunpckhqdq %xmm7,%xmm5,%xmm6 +vpunpcklqdq %xmm7,%xmm5,%xmm5 +vmovdqa %xmm5,336(%rsp) +vmovdqa %xmm6,352(%rsp) +vpsllq $1,%xmm6,%xmm6 +vmovdqa %xmm6,368(%rsp) +vpmuludq v121666_121666(%rip),%xmm7,%xmm7 +vmovdqa 112(%rsp),%xmm5 +vpunpcklqdq %xmm5,%xmm7,%xmm6 +vpunpckhqdq %xmm5,%xmm7,%xmm5 +vpunpckhqdq %xmm12,%xmm11,%xmm7 +vpunpcklqdq %xmm12,%xmm11,%xmm8 +vmovdqa %xmm8,384(%rsp) +vpaddq subc2(%rip),%xmm7,%xmm9 +vpsubq %xmm8,%xmm9,%xmm9 +vpunpckhqdq %xmm9,%xmm7,%xmm8 +vpunpcklqdq %xmm9,%xmm7,%xmm7 +vmovdqa %xmm7,400(%rsp) +vmovdqa %xmm8,416(%rsp) +vpsllq $1,%xmm8,%xmm8 +vmovdqa %xmm8,432(%rsp) +vpmuludq v121666_121666(%rip),%xmm9,%xmm9 +vmovdqa 160(%rsp),%xmm7 +vpunpcklqdq %xmm7,%xmm9,%xmm8 +vpunpckhqdq %xmm7,%xmm9,%xmm7 +vpunpckhqdq %xmm0,%xmm13,%xmm9 +vpunpcklqdq %xmm0,%xmm13,%xmm0 +vmovdqa %xmm0,160(%rsp) +vpaddq subc2(%rip),%xmm9,%xmm10 +vpsubq %xmm0,%xmm10,%xmm10 +vpunpckhqdq %xmm10,%xmm9,%xmm0 +vpunpcklqdq %xmm10,%xmm9,%xmm9 +vmovdqa %xmm9,448(%rsp) +vmovdqa %xmm0,464(%rsp) +vpsllq $1,%xmm0,%xmm0 +vmovdqa %xmm0,480(%rsp) +vpmuludq v121666_121666(%rip),%xmm10,%xmm10 +vmovdqa 208(%rsp),%xmm0 +vpunpcklqdq %xmm0,%xmm10,%xmm9 +vpunpckhqdq %xmm0,%xmm10,%xmm0 +vpsrlq $26,%xmm2,%xmm10 +vpaddq %xmm10,%xmm1,%xmm1 +vpand m26(%rip),%xmm2,%xmm2 +vpsrlq $25,%xmm5,%xmm10 +vpaddq %xmm10,%xmm8,%xmm8 +vpand m25(%rip),%xmm5,%xmm5 +vpsrlq $25,%xmm1,%xmm10 +vpaddq %xmm10,%xmm4,%xmm4 +vpand m25(%rip),%xmm1,%xmm1 +vpsrlq $26,%xmm8,%xmm10 +vpaddq %xmm10,%xmm7,%xmm7 +vpand m26(%rip),%xmm8,%xmm8 +vpsrlq $26,%xmm4,%xmm10 +vpaddq %xmm10,%xmm3,%xmm3 +vpand m26(%rip),%xmm4,%xmm4 +vpsrlq $25,%xmm7,%xmm10 +vpaddq %xmm10,%xmm9,%xmm9 +vpand m25(%rip),%xmm7,%xmm7 +vpsrlq $25,%xmm3,%xmm10 +vpaddq %xmm10,%xmm6,%xmm6 +vpand m25(%rip),%xmm3,%xmm3 +vpsrlq $26,%xmm9,%xmm10 +vpaddq %xmm10,%xmm0,%xmm0 +vpand m26(%rip),%xmm9,%xmm9 +vpsrlq $26,%xmm6,%xmm10 +vpaddq %xmm10,%xmm5,%xmm5 +vpand m26(%rip),%xmm6,%xmm6 +vpsrlq $25,%xmm0,%xmm10 +vpsllq $4,%xmm10,%xmm11 +vpaddq %xmm10,%xmm2,%xmm2 +vpsllq $1,%xmm10,%xmm10 +vpaddq %xmm10,%xmm11,%xmm11 +vpaddq %xmm11,%xmm2,%xmm2 +vpand m25(%rip),%xmm0,%xmm0 +vpsrlq $25,%xmm5,%xmm10 +vpaddq %xmm10,%xmm8,%xmm8 +vpand m25(%rip),%xmm5,%xmm5 +vpsrlq $26,%xmm2,%xmm10 +vpaddq %xmm10,%xmm1,%xmm1 +vpand m26(%rip),%xmm2,%xmm2 +vpunpckhqdq %xmm1,%xmm2,%xmm10 +vmovdqa %xmm10,80(%rsp) +vpunpcklqdq %xmm1,%xmm2,%xmm1 +vpunpckhqdq %xmm3,%xmm4,%xmm2 +vmovdqa %xmm2,96(%rsp) +vpunpcklqdq %xmm3,%xmm4,%xmm2 +vpunpckhqdq %xmm5,%xmm6,%xmm3 +vmovdqa %xmm3,112(%rsp) +vpunpcklqdq %xmm5,%xmm6,%xmm3 +vpunpckhqdq %xmm7,%xmm8,%xmm4 +vmovdqa %xmm4,128(%rsp) +vpunpcklqdq %xmm7,%xmm8,%xmm4 +vpunpckhqdq %xmm0,%xmm9,%xmm5 +vmovdqa %xmm5,144(%rsp) +vpunpcklqdq %xmm0,%xmm9,%xmm0 +vmovdqa 176(%rsp),%xmm5 +vpaddq %xmm5,%xmm1,%xmm1 +vpunpcklqdq %xmm1,%xmm5,%xmm6 +vpunpckhqdq %xmm1,%xmm5,%xmm1 +vpmuludq 224(%rsp),%xmm6,%xmm5 +vpmuludq 192(%rsp),%xmm1,%xmm7 +vpaddq %xmm7,%xmm5,%xmm5 +vpmuludq 272(%rsp),%xmm6,%xmm7 +vpmuludq 240(%rsp),%xmm1,%xmm8 +vpaddq %xmm8,%xmm7,%xmm7 +vpmuludq 288(%rsp),%xmm6,%xmm8 +vpmuludq 272(%rsp),%xmm1,%xmm9 +vpaddq %xmm9,%xmm8,%xmm8 +vpmuludq 336(%rsp),%xmm6,%xmm9 +vpmuludq 304(%rsp),%xmm1,%xmm10 +vpaddq %xmm10,%xmm9,%xmm9 +vpmuludq 352(%rsp),%xmm6,%xmm10 +vpmuludq 336(%rsp),%xmm1,%xmm11 +vpaddq %xmm11,%xmm10,%xmm10 +vpmuludq 400(%rsp),%xmm6,%xmm11 +vpmuludq 368(%rsp),%xmm1,%xmm12 +vpaddq %xmm12,%xmm11,%xmm11 +vpmuludq 416(%rsp),%xmm6,%xmm12 +vpmuludq 400(%rsp),%xmm1,%xmm13 +vpaddq %xmm13,%xmm12,%xmm12 +vpmuludq 448(%rsp),%xmm6,%xmm13 +vpmuludq 432(%rsp),%xmm1,%xmm14 +vpaddq %xmm14,%xmm13,%xmm13 +vpmuludq 464(%rsp),%xmm6,%xmm14 +vpmuludq 448(%rsp),%xmm1,%xmm15 +vpaddq %xmm15,%xmm14,%xmm14 +vpmuludq 192(%rsp),%xmm6,%xmm6 +vpmuludq v19_19(%rip),%xmm1,%xmm1 +vpmuludq 480(%rsp),%xmm1,%xmm1 +vpaddq %xmm1,%xmm6,%xmm6 +vmovdqa 256(%rsp),%xmm1 +vpaddq %xmm1,%xmm2,%xmm2 +vpunpcklqdq %xmm2,%xmm1,%xmm15 +vpunpckhqdq %xmm2,%xmm1,%xmm1 +vpmuludq 192(%rsp),%xmm15,%xmm2 +vpaddq %xmm2,%xmm7,%xmm7 +vpmuludq 224(%rsp),%xmm15,%xmm2 +vpaddq %xmm2,%xmm8,%xmm8 +vpmuludq 272(%rsp),%xmm15,%xmm2 +vpaddq %xmm2,%xmm9,%xmm9 +vpmuludq 288(%rsp),%xmm15,%xmm2 +vpaddq %xmm2,%xmm10,%xmm10 +vpmuludq 336(%rsp),%xmm15,%xmm2 +vpaddq %xmm2,%xmm11,%xmm11 +vpmuludq 352(%rsp),%xmm15,%xmm2 +vpaddq %xmm2,%xmm12,%xmm12 +vpmuludq 400(%rsp),%xmm15,%xmm2 +vpaddq %xmm2,%xmm13,%xmm13 +vpmuludq 416(%rsp),%xmm15,%xmm2 +vpaddq %xmm2,%xmm14,%xmm14 +vpmuludq v19_19(%rip),%xmm15,%xmm15 +vpmuludq 448(%rsp),%xmm15,%xmm2 +vpaddq %xmm2,%xmm6,%xmm6 +vpmuludq 464(%rsp),%xmm15,%xmm15 +vpaddq %xmm15,%xmm5,%xmm5 +vpmuludq 192(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm8,%xmm8 +vpmuludq 240(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm9,%xmm9 +vpmuludq 272(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm10,%xmm10 +vpmuludq 304(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm11,%xmm11 +vpmuludq 336(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm12,%xmm12 +vpmuludq 368(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm13,%xmm13 +vpmuludq 400(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm14,%xmm14 +vpmuludq v19_19(%rip),%xmm1,%xmm1 +vpmuludq 432(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm6,%xmm6 +vpmuludq 448(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm5,%xmm5 +vpmuludq 480(%rsp),%xmm1,%xmm1 +vpaddq %xmm1,%xmm7,%xmm7 +vmovdqa 320(%rsp),%xmm1 +vpaddq %xmm1,%xmm3,%xmm3 +vpunpcklqdq %xmm3,%xmm1,%xmm2 +vpunpckhqdq %xmm3,%xmm1,%xmm1 +vpmuludq 192(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm9,%xmm9 +vpmuludq 224(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm10,%xmm10 +vpmuludq 272(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm11,%xmm11 +vpmuludq 288(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm12,%xmm12 +vpmuludq 336(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm13,%xmm13 +vpmuludq 352(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm14,%xmm14 +vpmuludq v19_19(%rip),%xmm2,%xmm2 +vpmuludq 400(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm6,%xmm6 +vpmuludq 416(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm5,%xmm5 +vpmuludq 448(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm7,%xmm7 +vpmuludq 464(%rsp),%xmm2,%xmm2 +vpaddq %xmm2,%xmm8,%xmm8 +vpmuludq 192(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm10,%xmm10 +vpmuludq 240(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm11,%xmm11 +vpmuludq 272(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm12,%xmm12 +vpmuludq 304(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm13,%xmm13 +vpmuludq 336(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm14,%xmm14 +vpmuludq v19_19(%rip),%xmm1,%xmm1 +vpmuludq 368(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm6,%xmm6 +vpmuludq 400(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm5,%xmm5 +vpmuludq 432(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm7,%xmm7 +vpmuludq 448(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm8,%xmm8 +vpmuludq 480(%rsp),%xmm1,%xmm1 +vpaddq %xmm1,%xmm9,%xmm9 +vmovdqa 384(%rsp),%xmm1 +vpaddq %xmm1,%xmm4,%xmm4 +vpunpcklqdq %xmm4,%xmm1,%xmm2 +vpunpckhqdq %xmm4,%xmm1,%xmm1 +vpmuludq 192(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm11,%xmm11 +vpmuludq 224(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm12,%xmm12 +vpmuludq 272(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm13,%xmm13 +vpmuludq 288(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm14,%xmm14 +vpmuludq v19_19(%rip),%xmm2,%xmm2 +vpmuludq 336(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm6,%xmm6 +vpmuludq 352(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm5,%xmm5 +vpmuludq 400(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm7,%xmm7 +vpmuludq 416(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm8,%xmm8 +vpmuludq 448(%rsp),%xmm2,%xmm3 +vpaddq %xmm3,%xmm9,%xmm9 +vpmuludq 464(%rsp),%xmm2,%xmm2 +vpaddq %xmm2,%xmm10,%xmm10 +vpmuludq 192(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm12,%xmm12 +vpmuludq 240(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm13,%xmm13 +vpmuludq 272(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm14,%xmm14 +vpmuludq v19_19(%rip),%xmm1,%xmm1 +vpmuludq 304(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm6,%xmm6 +vpmuludq 336(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm5,%xmm5 +vpmuludq 368(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm7,%xmm7 +vpmuludq 400(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm8,%xmm8 +vpmuludq 432(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm9,%xmm9 +vpmuludq 448(%rsp),%xmm1,%xmm2 +vpaddq %xmm2,%xmm10,%xmm10 +vpmuludq 480(%rsp),%xmm1,%xmm1 +vpaddq %xmm1,%xmm11,%xmm11 +vmovdqa 160(%rsp),%xmm1 +vpaddq %xmm1,%xmm0,%xmm0 +vpunpcklqdq %xmm0,%xmm1,%xmm2 +vpunpckhqdq %xmm0,%xmm1,%xmm0 +vpmuludq 192(%rsp),%xmm2,%xmm1 +vpaddq %xmm1,%xmm13,%xmm13 +vpmuludq 224(%rsp),%xmm2,%xmm1 +vpaddq %xmm1,%xmm14,%xmm14 +vpmuludq v19_19(%rip),%xmm2,%xmm2 +vpmuludq 272(%rsp),%xmm2,%xmm1 +vpaddq %xmm1,%xmm6,%xmm6 +vpmuludq 288(%rsp),%xmm2,%xmm1 +vpaddq %xmm1,%xmm5,%xmm5 +vpmuludq 336(%rsp),%xmm2,%xmm1 +vpaddq %xmm1,%xmm7,%xmm7 +vpmuludq 352(%rsp),%xmm2,%xmm1 +vpaddq %xmm1,%xmm8,%xmm8 +vpmuludq 400(%rsp),%xmm2,%xmm1 +vpaddq %xmm1,%xmm9,%xmm9 +vpmuludq 416(%rsp),%xmm2,%xmm1 +vpaddq %xmm1,%xmm10,%xmm10 +vpmuludq 448(%rsp),%xmm2,%xmm1 +vpaddq %xmm1,%xmm11,%xmm11 +vpmuludq 464(%rsp),%xmm2,%xmm2 +vpaddq %xmm2,%xmm12,%xmm12 +vpmuludq 192(%rsp),%xmm0,%xmm1 +vpaddq %xmm1,%xmm14,%xmm14 +vpmuludq v19_19(%rip),%xmm0,%xmm0 +vpmuludq 240(%rsp),%xmm0,%xmm1 +vpaddq %xmm1,%xmm6,%xmm6 +vpmuludq 272(%rsp),%xmm0,%xmm1 +vpaddq %xmm1,%xmm5,%xmm5 +vpmuludq 304(%rsp),%xmm0,%xmm1 +vpaddq %xmm1,%xmm7,%xmm7 +vpmuludq 336(%rsp),%xmm0,%xmm1 +vpaddq %xmm1,%xmm8,%xmm8 +vpmuludq 368(%rsp),%xmm0,%xmm1 +vpaddq %xmm1,%xmm9,%xmm9 +vpmuludq 400(%rsp),%xmm0,%xmm1 +vpaddq %xmm1,%xmm10,%xmm10 +vpmuludq 432(%rsp),%xmm0,%xmm1 +vpaddq %xmm1,%xmm11,%xmm11 +vpmuludq 448(%rsp),%xmm0,%xmm1 +vpaddq %xmm1,%xmm12,%xmm12 +vpmuludq 480(%rsp),%xmm0,%xmm0 +vpaddq %xmm0,%xmm13,%xmm13 +vpsrlq $26,%xmm6,%xmm0 +vpaddq %xmm0,%xmm5,%xmm5 +vpand m26(%rip),%xmm6,%xmm6 +vpsrlq $25,%xmm10,%xmm0 +vpaddq %xmm0,%xmm11,%xmm11 +vpand m25(%rip),%xmm10,%xmm10 +vpsrlq $25,%xmm5,%xmm0 +vpaddq %xmm0,%xmm7,%xmm7 +vpand m25(%rip),%xmm5,%xmm5 +vpsrlq $26,%xmm11,%xmm0 +vpaddq %xmm0,%xmm12,%xmm12 +vpand m26(%rip),%xmm11,%xmm11 +vpsrlq $26,%xmm7,%xmm0 +vpaddq %xmm0,%xmm8,%xmm8 +vpand m26(%rip),%xmm7,%xmm7 +vpsrlq $25,%xmm12,%xmm0 +vpaddq %xmm0,%xmm13,%xmm13 +vpand m25(%rip),%xmm12,%xmm12 +vpsrlq $25,%xmm8,%xmm0 +vpaddq %xmm0,%xmm9,%xmm9 +vpand m25(%rip),%xmm8,%xmm8 +vpsrlq $26,%xmm13,%xmm0 +vpaddq %xmm0,%xmm14,%xmm14 +vpand m26(%rip),%xmm13,%xmm13 +vpsrlq $26,%xmm9,%xmm0 +vpaddq %xmm0,%xmm10,%xmm10 +vpand m26(%rip),%xmm9,%xmm9 +vpsrlq $25,%xmm14,%xmm0 +vpsllq $4,%xmm0,%xmm1 +vpaddq %xmm0,%xmm6,%xmm6 +vpsllq $1,%xmm0,%xmm0 +vpaddq %xmm0,%xmm1,%xmm1 +vpaddq %xmm1,%xmm6,%xmm6 +vpand m25(%rip),%xmm14,%xmm14 +vpsrlq $25,%xmm10,%xmm0 +vpaddq %xmm0,%xmm11,%xmm11 +vpand m25(%rip),%xmm10,%xmm10 +vpsrlq $26,%xmm6,%xmm0 +vpaddq %xmm0,%xmm5,%xmm5 +vpand m26(%rip),%xmm6,%xmm6 +vpunpckhqdq %xmm5,%xmm6,%xmm1 +vpunpcklqdq %xmm5,%xmm6,%xmm0 +vpunpckhqdq %xmm8,%xmm7,%xmm3 +vpunpcklqdq %xmm8,%xmm7,%xmm2 +vpunpckhqdq %xmm10,%xmm9,%xmm5 +vpunpcklqdq %xmm10,%xmm9,%xmm4 +vpunpckhqdq %xmm12,%xmm11,%xmm7 +vpunpcklqdq %xmm12,%xmm11,%xmm6 +vpunpckhqdq %xmm14,%xmm13,%xmm9 +vpunpcklqdq %xmm14,%xmm13,%xmm8 +cmp $0,%rdx +jne ._ladder_base_loop +vmovdqu %xmm1,80(%rdi) +vmovdqu %xmm0,0(%rdi) +vmovdqu %xmm3,96(%rdi) +vmovdqu %xmm2,16(%rdi) +vmovdqu %xmm5,112(%rdi) +vmovdqu %xmm4,32(%rdi) +vmovdqu %xmm7,128(%rdi) +vmovdqu %xmm6,48(%rdi) +vmovdqu %xmm9,144(%rdi) +vmovdqu %xmm8,64(%rdi) +movq 1536(%rsp),%r11 +movq 1544(%rsp),%r12 +movq 1552(%rsp),%r13 +add %r11,%rsp +ret + +#endif diff --git a/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/ladder_base.h b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/ladder_base.h new file mode 100644 index 0000000000..a69be13f0d --- /dev/null +++ b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/ladder_base.h @@ -0,0 +1,18 @@ +#ifndef ladder_base_H +#define ladder_base_H + +#ifdef __cplusplus +extern "C" { +#endif + +#include "fe.h" +#include "ladder_base_namespace.h" + +extern void ladder_base(fe *, const unsigned char *); + +#ifdef __cplusplus +} +#endif + +#endif /* ifndef ladder_base_H */ + diff --git a/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/ladder_base_namespace.h b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/ladder_base_namespace.h new file mode 100644 index 0000000000..304546a185 --- /dev/null +++ b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/ladder_base_namespace.h @@ -0,0 +1,8 @@ +#ifndef ladder_base_namespace_H +#define ladder_base_namespace_H + +#define ladder_base crypto_scalarmult_curve25519_sandy2x_ladder_base +#define _ladder_base _crypto_scalarmult_curve25519_sandy2x_ladder_base + +#endif /* ifndef ladder_base_namespace_H */ + diff --git a/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/ladder_namespace.h b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/ladder_namespace.h new file mode 100644 index 0000000000..6637074bec --- /dev/null +++ b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/ladder_namespace.h @@ -0,0 +1,8 @@ +#ifndef ladder_namespace_H +#define ladder_namespace_H + +#define ladder crypto_scalarmult_curve25519_sandy2x_ladder +#define _ladder _crypto_scalarmult_curve25519_sandy2x_ladder + +#endif /* ifndef ladder_namespace_H */ + diff --git a/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/sandy2x.S b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/sandy2x.S new file mode 100644 index 0000000000..1fd632057b --- /dev/null +++ b/libs/libsodium/src/crypto_scalarmult/curve25519/sandy2x/sandy2x.S @@ -0,0 +1,17 @@ + +#ifdef HAVE_AVX_ASM + +#define IN_SANDY2X + +#include "consts.S" +#include "fe51_mul.S" +#include "fe51_nsquare.S" +#include "fe51_pack.S" +#include "ladder.S" +#include "ladder_base.S" + +#if defined(__linux__) && defined(__ELF__) +.section .note.GNU-stack,"",%progbits +#endif + +#endif diff --git a/libs/libsodium/src/crypto_scalarmult/curve25519/scalarmult_curve25519.c b/libs/libsodium/src/crypto_scalarmult/curve25519/scalarmult_curve25519.c new file mode 100644 index 0000000000..2d3ffc0563 --- /dev/null +++ b/libs/libsodium/src/crypto_scalarmult/curve25519/scalarmult_curve25519.c @@ -0,0 +1,59 @@ + +#include "crypto_scalarmult_curve25519.h" +#include "private/implementations.h" +#include "scalarmult_curve25519.h" +#include "runtime.h" + +#ifdef HAVE_AVX_ASM +# include "sandy2x/curve25519_sandy2x.h" +#endif +#include "ref10/x25519_ref10.h" +static const crypto_scalarmult_curve25519_implementation *implementation = + &crypto_scalarmult_curve25519_ref10_implementation; + +int +crypto_scalarmult_curve25519(unsigned char *q, const unsigned char *n, + const unsigned char *p) +{ + size_t i; + volatile unsigned char d = 0; + + if (implementation->mult(q, n, p) != 0) { + return -1; /* LCOV_EXCL_LINE */ + } + for (i = 0; i < crypto_scalarmult_curve25519_BYTES; i++) { + d |= q[i]; + } + return -(1 & ((d - 1) >> 8)); +} + +int +crypto_scalarmult_curve25519_base(unsigned char *q, const unsigned char *n) +{ + return implementation->mult_base(q, n); +} + +size_t +crypto_scalarmult_curve25519_bytes(void) +{ + return crypto_scalarmult_curve25519_BYTES; +} + +size_t +crypto_scalarmult_curve25519_scalarbytes(void) +{ + return crypto_scalarmult_curve25519_SCALARBYTES; +} + +int +_crypto_scalarmult_curve25519_pick_best_implementation(void) +{ + implementation = &crypto_scalarmult_curve25519_ref10_implementation; + +#ifdef HAVE_AVX_ASM + if (sodium_runtime_has_avx()) { + implementation = &crypto_scalarmult_curve25519_sandy2x_implementation; + } +#endif + return 0; +} diff --git a/libs/libsodium/src/crypto_scalarmult/curve25519/scalarmult_curve25519.h b/libs/libsodium/src/crypto_scalarmult/curve25519/scalarmult_curve25519.h new file mode 100644 index 0000000000..66edbf6a8a --- /dev/null +++ b/libs/libsodium/src/crypto_scalarmult/curve25519/scalarmult_curve25519.h @@ -0,0 +1,11 @@ + +#ifndef scalarmult_poly1305_H +#define scalarmult_poly1305_H + +typedef struct crypto_scalarmult_curve25519_implementation { + int (*mult)(unsigned char *q, const unsigned char *n, + const unsigned char *p); + int (*mult_base)(unsigned char *q, const unsigned char *n); +} crypto_scalarmult_curve25519_implementation; + +#endif |