diff options
author | Uros Majstorovic <majstor@majstor.org> | 2017-05-03 21:10:08 +0200 |
---|---|---|
committer | Uros Majstorovic <majstor@majstor.org> | 2017-05-03 21:10:08 +0200 |
commit | 25de5e761daab8b897a4f09ff8503e6f43c299f9 (patch) | |
tree | a9a1c185c7f89d67f9250e42b2aa53eefc9a6770 /code/crypto |
initial commit
Diffstat (limited to 'code/crypto')
64 files changed, 24573 insertions, 0 deletions
diff --git a/code/crypto/Makefile b/code/crypto/Makefile new file mode 100644 index 0000000..cfd3bf8 --- /dev/null +++ b/code/crypto/Makefile @@ -0,0 +1,22 @@ +MAKE=make +CFLAGS=-Iinclude -I.. -D__BEGIN_HIDDEN_DECLS= -D__END_HIDDEN_DECLS= -O3 $(PIC) +obj = e_chacha20poly1305.o crypto.o +obj_dep = compat/explicit_bzero.o compat/timingsafe_memcmp.o compat/timingsafe_bcmp.o \ + chacha/chacha.o poly1305/poly1305.o curve25519/curve25519.o curve25519/curve25519-generic.o \ + sha/sha256.o sha/sha512.o +subdirs = compat curve25519 chacha poly1305 sha + +%.o: %.c + $(CC) $(CFLAGS) -c $< + +all: $(obj) + for i in $(subdirs); do \ + (cd $$i && $(MAKE) && cd ..) || exit; \ + done + $(AR) rcs libecpcr.a $(obj) $(obj_dep) + +clean: + for i in $(subdirs) test; do \ + (cd $$i && $(MAKE) clean && cd ..) || exit; \ + done + rm -f *.o *.a diff --git a/code/crypto/chacha/Makefile b/code/crypto/chacha/Makefile new file mode 100644 index 0000000..dae3373 --- /dev/null +++ b/code/crypto/chacha/Makefile @@ -0,0 +1,14 @@ +CFLAGS=-I../include -D__BEGIN_HIDDEN_DECLS= -D__END_HIDDEN_DECLS= -O3 $(PIC) +obj = chacha.o + +all: libchacha.a +dep: all + +%.o: %.c + $(CC) $(CFLAGS) -c $< + +libchacha.a: $(obj) + $(AR) rcs $@ $(obj) + +clean: + rm -f *.o *.a diff --git a/code/crypto/chacha/chacha-merged.c b/code/crypto/chacha/chacha-merged.c new file mode 100644 index 0000000..557dfd5 --- /dev/null +++ b/code/crypto/chacha/chacha-merged.c @@ -0,0 +1,270 @@ +/* $OpenBSD: chacha-merged.c,v 1.7 2014/07/11 08:47:47 bcook Exp $ */ +/* +chacha-merged.c version 20080118 +D. J. Bernstein +Public domain. +*/ + +#include <sys/types.h> + +#include <stdint.h> + +#define CHACHA_MINKEYLEN 16 +#define CHACHA_NONCELEN 8 +#define CHACHA_CTRLEN 8 +#define CHACHA_STATELEN (CHACHA_NONCELEN+CHACHA_CTRLEN) +#define CHACHA_BLOCKLEN 64 + +struct chacha_ctx { + u_int input[16]; + uint8_t ks[CHACHA_BLOCKLEN]; + uint8_t unused; +}; + +static inline void chacha_keysetup(struct chacha_ctx *x, const u_char *k, + u_int kbits) + __attribute__((__bounded__(__minbytes__, 2, CHACHA_MINKEYLEN))); +static inline void chacha_ivsetup(struct chacha_ctx *x, const u_char *iv, + const u_char *ctr) + __attribute__((__bounded__(__minbytes__, 2, CHACHA_NONCELEN))) + __attribute__((__bounded__(__minbytes__, 3, CHACHA_CTRLEN))); +static inline void chacha_encrypt_bytes(struct chacha_ctx *x, const u_char *m, + u_char *c, u_int bytes) + __attribute__((__bounded__(__buffer__, 2, 4))) + __attribute__((__bounded__(__buffer__, 3, 4))); + +typedef unsigned char u8; +typedef unsigned int u32; + +typedef struct chacha_ctx chacha_ctx; + +#define U8C(v) (v##U) +#define U32C(v) (v##U) + +#define U8V(v) ((u8)(v) & U8C(0xFF)) +#define U32V(v) ((u32)(v) & U32C(0xFFFFFFFF)) + +#define ROTL32(v, n) \ + (U32V((v) << (n)) | ((v) >> (32 - (n)))) + +#define U8TO32_LITTLE(p) \ + (((u32)((p)[0])) | \ + ((u32)((p)[1]) << 8) | \ + ((u32)((p)[2]) << 16) | \ + ((u32)((p)[3]) << 24)) + +#define U32TO8_LITTLE(p, v) \ + do { \ + (p)[0] = U8V((v)); \ + (p)[1] = U8V((v) >> 8); \ + (p)[2] = U8V((v) >> 16); \ + (p)[3] = U8V((v) >> 24); \ + } while (0) + +#define ROTATE(v,c) (ROTL32(v,c)) +#define XOR(v,w) ((v) ^ (w)) +#define PLUS(v,w) (U32V((v) + (w))) +#define PLUSONE(v) (PLUS((v),1)) + +#define QUARTERROUND(a,b,c,d) \ + a = PLUS(a,b); d = ROTATE(XOR(d,a),16); \ + c = PLUS(c,d); b = ROTATE(XOR(b,c),12); \ + a = PLUS(a,b); d = ROTATE(XOR(d,a), 8); \ + c = PLUS(c,d); b = ROTATE(XOR(b,c), 7); + +static const char sigma[16] = "expand 32-byte k"; +static const char tau[16] = "expand 16-byte k"; + +static inline void +chacha_keysetup(chacha_ctx *x, const u8 *k, u32 kbits) +{ + const char *constants; + + x->input[4] = U8TO32_LITTLE(k + 0); + x->input[5] = U8TO32_LITTLE(k + 4); + x->input[6] = U8TO32_LITTLE(k + 8); + x->input[7] = U8TO32_LITTLE(k + 12); + if (kbits == 256) { /* recommended */ + k += 16; + constants = sigma; + } else { /* kbits == 128 */ + constants = tau; + } + x->input[8] = U8TO32_LITTLE(k + 0); + x->input[9] = U8TO32_LITTLE(k + 4); + x->input[10] = U8TO32_LITTLE(k + 8); + x->input[11] = U8TO32_LITTLE(k + 12); + x->input[0] = U8TO32_LITTLE(constants + 0); + x->input[1] = U8TO32_LITTLE(constants + 4); + x->input[2] = U8TO32_LITTLE(constants + 8); + x->input[3] = U8TO32_LITTLE(constants + 12); +} + +static inline void +chacha_ivsetup(chacha_ctx *x, const u8 *iv, const u8 *counter) +{ + x->input[12] = counter == NULL ? 0 : U8TO32_LITTLE(counter + 0); + x->input[13] = counter == NULL ? 0 : U8TO32_LITTLE(counter + 4); + x->input[14] = U8TO32_LITTLE(iv + 0); + x->input[15] = U8TO32_LITTLE(iv + 4); +} + +static inline void +chacha_encrypt_bytes(chacha_ctx *x, const u8 *m, u8 *c, u32 bytes) +{ + u32 x0, x1, x2, x3, x4, x5, x6, x7; + u32 x8, x9, x10, x11, x12, x13, x14, x15; + u32 j0, j1, j2, j3, j4, j5, j6, j7; + u32 j8, j9, j10, j11, j12, j13, j14, j15; + u8 *ctarget = NULL; + u8 tmp[64]; + u_int i; + + if (!bytes) + return; + + j0 = x->input[0]; + j1 = x->input[1]; + j2 = x->input[2]; + j3 = x->input[3]; + j4 = x->input[4]; + j5 = x->input[5]; + j6 = x->input[6]; + j7 = x->input[7]; + j8 = x->input[8]; + j9 = x->input[9]; + j10 = x->input[10]; + j11 = x->input[11]; + j12 = x->input[12]; + j13 = x->input[13]; + j14 = x->input[14]; + j15 = x->input[15]; + + for (;;) { + if (bytes < 64) { + for (i = 0; i < bytes; ++i) + tmp[i] = m[i]; + m = tmp; + ctarget = c; + c = tmp; + } + x0 = j0; + x1 = j1; + x2 = j2; + x3 = j3; + x4 = j4; + x5 = j5; + x6 = j6; + x7 = j7; + x8 = j8; + x9 = j9; + x10 = j10; + x11 = j11; + x12 = j12; + x13 = j13; + x14 = j14; + x15 = j15; + for (i = 20; i > 0; i -= 2) { + QUARTERROUND(x0, x4, x8, x12) + QUARTERROUND(x1, x5, x9, x13) + QUARTERROUND(x2, x6, x10, x14) + QUARTERROUND(x3, x7, x11, x15) + QUARTERROUND(x0, x5, x10, x15) + QUARTERROUND(x1, x6, x11, x12) + QUARTERROUND(x2, x7, x8, x13) + QUARTERROUND(x3, x4, x9, x14) + } + x0 = PLUS(x0, j0); + x1 = PLUS(x1, j1); + x2 = PLUS(x2, j2); + x3 = PLUS(x3, j3); + x4 = PLUS(x4, j4); + x5 = PLUS(x5, j5); + x6 = PLUS(x6, j6); + x7 = PLUS(x7, j7); + x8 = PLUS(x8, j8); + x9 = PLUS(x9, j9); + x10 = PLUS(x10, j10); + x11 = PLUS(x11, j11); + x12 = PLUS(x12, j12); + x13 = PLUS(x13, j13); + x14 = PLUS(x14, j14); + x15 = PLUS(x15, j15); + + if (bytes < 64) { + U32TO8_LITTLE(x->ks + 0, x0); + U32TO8_LITTLE(x->ks + 4, x1); + U32TO8_LITTLE(x->ks + 8, x2); + U32TO8_LITTLE(x->ks + 12, x3); + U32TO8_LITTLE(x->ks + 16, x4); + U32TO8_LITTLE(x->ks + 20, x5); + U32TO8_LITTLE(x->ks + 24, x6); + U32TO8_LITTLE(x->ks + 28, x7); + U32TO8_LITTLE(x->ks + 32, x8); + U32TO8_LITTLE(x->ks + 36, x9); + U32TO8_LITTLE(x->ks + 40, x10); + U32TO8_LITTLE(x->ks + 44, x11); + U32TO8_LITTLE(x->ks + 48, x12); + U32TO8_LITTLE(x->ks + 52, x13); + U32TO8_LITTLE(x->ks + 56, x14); + U32TO8_LITTLE(x->ks + 60, x15); + } + + x0 = XOR(x0, U8TO32_LITTLE(m + 0)); + x1 = XOR(x1, U8TO32_LITTLE(m + 4)); + x2 = XOR(x2, U8TO32_LITTLE(m + 8)); + x3 = XOR(x3, U8TO32_LITTLE(m + 12)); + x4 = XOR(x4, U8TO32_LITTLE(m + 16)); + x5 = XOR(x5, U8TO32_LITTLE(m + 20)); + x6 = XOR(x6, U8TO32_LITTLE(m + 24)); + x7 = XOR(x7, U8TO32_LITTLE(m + 28)); + x8 = XOR(x8, U8TO32_LITTLE(m + 32)); + x9 = XOR(x9, U8TO32_LITTLE(m + 36)); + x10 = XOR(x10, U8TO32_LITTLE(m + 40)); + x11 = XOR(x11, U8TO32_LITTLE(m + 44)); + x12 = XOR(x12, U8TO32_LITTLE(m + 48)); + x13 = XOR(x13, U8TO32_LITTLE(m + 52)); + x14 = XOR(x14, U8TO32_LITTLE(m + 56)); + x15 = XOR(x15, U8TO32_LITTLE(m + 60)); + + j12 = PLUSONE(j12); + if (!j12) { + j13 = PLUSONE(j13); + /* + * Stopping at 2^70 bytes per nonce is the user's + * responsibility. + */ + } + + U32TO8_LITTLE(c + 0, x0); + U32TO8_LITTLE(c + 4, x1); + U32TO8_LITTLE(c + 8, x2); + U32TO8_LITTLE(c + 12, x3); + U32TO8_LITTLE(c + 16, x4); + U32TO8_LITTLE(c + 20, x5); + U32TO8_LITTLE(c + 24, x6); + U32TO8_LITTLE(c + 28, x7); + U32TO8_LITTLE(c + 32, x8); + U32TO8_LITTLE(c + 36, x9); + U32TO8_LITTLE(c + 40, x10); + U32TO8_LITTLE(c + 44, x11); + U32TO8_LITTLE(c + 48, x12); + U32TO8_LITTLE(c + 52, x13); + U32TO8_LITTLE(c + 56, x14); + U32TO8_LITTLE(c + 60, x15); + + if (bytes <= 64) { + if (bytes < 64) { + for (i = 0; i < bytes; ++i) + ctarget[i] = c[i]; + } + x->input[12] = j12; + x->input[13] = j13; + x->unused = 64 - bytes; + return; + } + bytes -= 64; + c += 64; + m += 64; + } +} diff --git a/code/crypto/chacha/chacha.c b/code/crypto/chacha/chacha.c new file mode 100644 index 0000000..fba03c9 --- /dev/null +++ b/code/crypto/chacha/chacha.c @@ -0,0 +1,77 @@ +/* $OpenBSD: chacha.c,v 1.7 2015/12/09 14:07:55 bcook Exp $ */ +/* + * Copyright (c) 2014 Joel Sing <jsing@openbsd.org> + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +#include <stdint.h> + +#include <chacha.h> + +#include "chacha-merged.c" + +void +ChaCha_set_key(ChaCha_ctx *ctx, const unsigned char *key, uint32_t keybits) +{ + chacha_keysetup((chacha_ctx *)ctx, key, keybits); + ctx->unused = 0; +} + +void +ChaCha_set_iv(ChaCha_ctx *ctx, const unsigned char *iv, + const unsigned char *counter) +{ + chacha_ivsetup((chacha_ctx *)ctx, iv, counter); + ctx->unused = 0; +} + +void +ChaCha(ChaCha_ctx *ctx, unsigned char *out, const unsigned char *in, size_t len) +{ + unsigned char *k; + int i, l; + + /* Consume remaining keystream, if any exists. */ + if (ctx->unused > 0) { + k = ctx->ks + 64 - ctx->unused; + l = (len > ctx->unused) ? ctx->unused : len; + for (i = 0; i < l; i++) + *(out++) = *(in++) ^ *(k++); + ctx->unused -= l; + len -= l; + } + + chacha_encrypt_bytes((chacha_ctx *)ctx, in, out, (uint32_t)len); +} + +void +CRYPTO_chacha_20(unsigned char *out, const unsigned char *in, size_t len, + const unsigned char key[32], const unsigned char iv[8], uint64_t counter) +{ + struct chacha_ctx ctx; + + /* + * chacha_ivsetup expects the counter to be in u8. Rather than + * converting size_t to u8 and then back again, pass a counter of + * NULL and manually assign it afterwards. + */ + chacha_keysetup(&ctx, key, 256); + chacha_ivsetup(&ctx, iv, NULL); + if (counter != 0) { + ctx.input[12] = (uint32_t)counter; + ctx.input[13] = (uint32_t)(counter >> 32); + } + + chacha_encrypt_bytes(&ctx, in, out, (uint32_t)len); +} diff --git a/code/crypto/compat/Makefile b/code/crypto/compat/Makefile new file mode 100644 index 0000000..2c0fa5c --- /dev/null +++ b/code/crypto/compat/Makefile @@ -0,0 +1,15 @@ +CFLAGS=-I../include -D__BEGIN_HIDDEN_DECLS= -D__END_HIDDEN_DECLS= -O3 $(PIC) +getentropy = getentropy_osx +obj = explicit_bzero.o timingsafe_memcmp.o timingsafe_bcmp.o # arc4random.o arc4random_uniform.o $(getentropy).o + +all: libcompat.a +dep: all + +%.o: %.c + $(CC) $(CFLAGS) -c $< + +libcompat.a: $(obj) + $(AR) rcs $@ $(obj) + +clean: + rm -f *.o *.a diff --git a/code/crypto/compat/arc4random.c b/code/crypto/compat/arc4random.c new file mode 100644 index 0000000..0d3a64f --- /dev/null +++ b/code/crypto/compat/arc4random.c @@ -0,0 +1,198 @@ +/* $OpenBSD: arc4random.c,v 1.54 2015/09/13 08:31:47 guenther Exp $ */ + +/* + * Copyright (c) 1996, David Mazieres <dm@uun.org> + * Copyright (c) 2008, Damien Miller <djm@openbsd.org> + * Copyright (c) 2013, Markus Friedl <markus@openbsd.org> + * Copyright (c) 2014, Theo de Raadt <deraadt@openbsd.org> + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +/* + * ChaCha based random number generator for OpenBSD. + */ + +#include <fcntl.h> +#include <limits.h> +#include <signal.h> +#include <stdint.h> +#include <stdlib.h> +#include <string.h> +#include <unistd.h> +#include <sys/types.h> +#include <sys/time.h> + +#include <compat.h> + +#define KEYSTREAM_ONLY +#include "chacha_private.h" + +#define minimum(a, b) ((a) < (b) ? (a) : (b)) + +#if defined(__GNUC__) || defined(_MSC_VER) +#define inline __inline +#else /* __GNUC__ || _MSC_VER */ +#define inline +#endif /* !__GNUC__ && !_MSC_VER */ + +#define KEYSZ 32 +#define IVSZ 8 +#define BLOCKSZ 64 +#define RSBUFSZ (16*BLOCKSZ) + +/* Marked MAP_INHERIT_ZERO, so zero'd out in fork children. */ +static struct _rs { + size_t rs_have; /* valid bytes at end of rs_buf */ + size_t rs_count; /* bytes till reseed */ +} *rs; + +/* Maybe be preserved in fork children, if _rs_allocate() decides. */ +static struct _rsx { + chacha_ctx rs_chacha; /* chacha context for random keystream */ + u_char rs_buf[RSBUFSZ]; /* keystream blocks */ +} *rsx; + +static inline int _rs_allocate(struct _rs **, struct _rsx **); +static inline void _rs_forkdetect(void); +#include "arc4random.h" + +static inline void _rs_rekey(u_char *dat, size_t datlen); + +static inline void +_rs_init(u_char *buf, size_t n) +{ + if (n < KEYSZ + IVSZ) + return; + + if (rs == NULL) { + if (_rs_allocate(&rs, &rsx) == -1) + abort(); + } + + chacha_keysetup(&rsx->rs_chacha, buf, KEYSZ * 8, 0); + chacha_ivsetup(&rsx->rs_chacha, buf + KEYSZ); +} + +static void +_rs_stir(void) +{ + u_char rnd[KEYSZ + IVSZ]; + + if (getentropy(rnd, sizeof rnd) == -1) + _getentropy_fail(); + + if (!rs) + _rs_init(rnd, sizeof(rnd)); + else + _rs_rekey(rnd, sizeof(rnd)); + explicit_bzero(rnd, sizeof(rnd)); /* discard source seed */ + + /* invalidate rs_buf */ + rs->rs_have = 0; + memset(rsx->rs_buf, 0, sizeof(rsx->rs_buf)); + + rs->rs_count = 1600000; +} + +static inline void +_rs_stir_if_needed(size_t len) +{ + _rs_forkdetect(); + if (!rs || rs->rs_count <= len) + _rs_stir(); + if (rs->rs_count <= len) + rs->rs_count = 0; + else + rs->rs_count -= len; +} + +static inline void +_rs_rekey(u_char *dat, size_t datlen) +{ +#ifndef KEYSTREAM_ONLY + memset(rsx->rs_buf, 0, sizeof(rsx->rs_buf)); +#endif + /* fill rs_buf with the keystream */ + chacha_encrypt_bytes(&rsx->rs_chacha, rsx->rs_buf, + rsx->rs_buf, sizeof(rsx->rs_buf)); + /* mix in optional user provided data */ + if (dat) { + size_t i, m; + + m = minimum(datlen, KEYSZ + IVSZ); + for (i = 0; i < m; i++) + rsx->rs_buf[i] ^= dat[i]; + } + /* immediately reinit for backtracking resistance */ + _rs_init(rsx->rs_buf, KEYSZ + IVSZ); + memset(rsx->rs_buf, 0, KEYSZ + IVSZ); + rs->rs_have = sizeof(rsx->rs_buf) - KEYSZ - IVSZ; +} + +static inline void +_rs_random_buf(void *_buf, size_t n) +{ + u_char *buf = (u_char *)_buf; + u_char *keystream; + size_t m; + + _rs_stir_if_needed(n); + while (n > 0) { + if (rs->rs_have > 0) { + m = minimum(n, rs->rs_have); + keystream = rsx->rs_buf + sizeof(rsx->rs_buf) + - rs->rs_have; + memcpy(buf, keystream, m); + memset(keystream, 0, m); + buf += m; + n -= m; + rs->rs_have -= m; + } + if (rs->rs_have == 0) + _rs_rekey(NULL, 0); + } +} + +static inline void +_rs_random_u32(uint32_t *val) +{ + u_char *keystream; + + _rs_stir_if_needed(sizeof(*val)); + if (rs->rs_have < sizeof(*val)) + _rs_rekey(NULL, 0); + keystream = rsx->rs_buf + sizeof(rsx->rs_buf) - rs->rs_have; + memcpy(val, keystream, sizeof(*val)); + memset(keystream, 0, sizeof(*val)); + rs->rs_have -= sizeof(*val); +} + +uint32_t +arc4random(void) +{ + uint32_t val; + + _ARC4_LOCK(); + _rs_random_u32(&val); + _ARC4_UNLOCK(); + return val; +} + +void +arc4random_buf(void *buf, size_t n) +{ + _ARC4_LOCK(); + _rs_random_buf(buf, n); + _ARC4_UNLOCK(); +} diff --git a/code/crypto/compat/arc4random.h b/code/crypto/compat/arc4random.h new file mode 100644 index 0000000..762aec2 --- /dev/null +++ b/code/crypto/compat/arc4random.h @@ -0,0 +1,35 @@ +#ifndef LIBCRYPTOCOMPAT_ARC4RANDOM_H +#define LIBCRYPTOCOMPAT_ARC4RANDOM_H + +#include <sys/param.h> + +#if defined(_AIX) +#include "arc4random_aix.h" + +#elif defined(__FreeBSD__) +#include "arc4random_freebsd.h" + +#elif defined(__hpux) +#include "arc4random_hpux.h" + +#elif defined(__linux__) +#include "arc4random_linux.h" + +#elif defined(__NetBSD__) +#include "arc4random_netbsd.h" + +#elif defined(__APPLE__) +#include "arc4random_osx.h" + +#elif defined(__sun) +#include "arc4random_solaris.h" + +#elif defined(_WIN32) +#include "arc4random_win.h" + +#else +#error "No arc4random hooks defined for this platform." + +#endif + +#endif diff --git a/code/crypto/compat/arc4random_aix.h b/code/crypto/compat/arc4random_aix.h new file mode 100644 index 0000000..3142a1f --- /dev/null +++ b/code/crypto/compat/arc4random_aix.h @@ -0,0 +1,81 @@ +/* $OpenBSD: arc4random_aix.h,v 1.2 2016/06/30 12:19:51 bcook Exp $ */ + +/* + * Copyright (c) 1996, David Mazieres <dm@uun.org> + * Copyright (c) 2008, Damien Miller <djm@openbsd.org> + * Copyright (c) 2013, Markus Friedl <markus@openbsd.org> + * Copyright (c) 2014, Theo de Raadt <deraadt@openbsd.org> + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +/* + * Stub functions for portability. + */ + +#include <sys/mman.h> + +#include <pthread.h> +#include <signal.h> + +static pthread_mutex_t arc4random_mtx = PTHREAD_MUTEX_INITIALIZER; +#define _ARC4_LOCK() pthread_mutex_lock(&arc4random_mtx) +#define _ARC4_UNLOCK() pthread_mutex_unlock(&arc4random_mtx) + +#define _ARC4_ATFORK(f) pthread_atfork(NULL, NULL, (f)) + +static inline void +_getentropy_fail(void) +{ + raise(SIGKILL); +} + +static volatile sig_atomic_t _rs_forked; + +static inline void +_rs_forkhandler(void) +{ + _rs_forked = 1; +} + +static inline void +_rs_forkdetect(void) +{ + static pid_t _rs_pid = 0; + pid_t pid = getpid(); + + if (_rs_pid == 0 || _rs_pid != pid || _rs_forked) { + _rs_pid = pid; + _rs_forked = 0; + if (rs) + memset(rs, 0, sizeof(*rs)); + } +} + +static inline int +_rs_allocate(struct _rs **rsp, struct _rsx **rsxp) +{ + if ((*rsp = mmap(NULL, sizeof(**rsp), PROT_READ|PROT_WRITE, + MAP_ANON|MAP_PRIVATE, -1, 0)) == MAP_FAILED) + return (-1); + + if ((*rsxp = mmap(NULL, sizeof(**rsxp), PROT_READ|PROT_WRITE, + MAP_ANON|MAP_PRIVATE, -1, 0)) == MAP_FAILED) { + munmap(*rsp, sizeof(**rsp)); + *rsp = NULL; + return (-1); + } + + _ARC4_ATFORK(_rs_forkhandler); + return (0); +} diff --git a/code/crypto/compat/arc4random_freebsd.h b/code/crypto/compat/arc4random_freebsd.h new file mode 100644 index 0000000..3faa5e4 --- /dev/null +++ b/code/crypto/compat/arc4random_freebsd.h @@ -0,0 +1,87 @@ +/* $OpenBSD: arc4random_freebsd.h,v 1.4 2016/06/30 12:19:51 bcook Exp $ */ + +/* + * Copyright (c) 1996, David Mazieres <dm@uun.org> + * Copyright (c) 2008, Damien Miller <djm@openbsd.org> + * Copyright (c) 2013, Markus Friedl <markus@openbsd.org> + * Copyright (c) 2014, Theo de Raadt <deraadt@openbsd.org> + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +/* + * Stub functions for portability. + */ + +#include <sys/mman.h> + +#include <pthread.h> +#include <signal.h> + +static pthread_mutex_t arc4random_mtx = PTHREAD_MUTEX_INITIALIZER; +#define _ARC4_LOCK() pthread_mutex_lock(&arc4random_mtx) +#define _ARC4_UNLOCK() pthread_mutex_unlock(&arc4random_mtx) + +/* + * Unfortunately, pthread_atfork() is broken on FreeBSD (at least 9 and 10) if + * a program does not link to -lthr. Callbacks registered with pthread_atfork() + * appear to fail silently. So, it is not always possible to detect a PID + * wraparound. + */ +#define _ARC4_ATFORK(f) pthread_atfork(NULL, NULL, (f)) + +static inline void +_getentropy_fail(void) +{ + raise(SIGKILL); +} + +static volatile sig_atomic_t _rs_forked; + +static inline void +_rs_forkhandler(void) +{ + _rs_forked = 1; +} + +static inline void +_rs_forkdetect(void) +{ + static pid_t _rs_pid = 0; + pid_t pid = getpid(); + + if (_rs_pid == 0 || _rs_pid != pid || _rs_forked) { + _rs_pid = pid; + _rs_forked = 0; + if (rs) + memset(rs, 0, sizeof(*rs)); + } +} + +static inline int +_rs_allocate(struct _rs **rsp, struct _rsx **rsxp) +{ + if ((*rsp = mmap(NULL, sizeof(**rsp), PROT_READ|PROT_WRITE, + MAP_ANON|MAP_PRIVATE, -1, 0)) == MAP_FAILED) + return (-1); + + if ((*rsxp = mmap(NULL, sizeof(**rsxp), PROT_READ|PROT_WRITE, + MAP_ANON|MAP_PRIVATE, -1, 0)) == MAP_FAILED) { + munmap(*rsp, sizeof(**rsp)); + *rsp = NULL; + return (-1); + } + + _ARC4_ATFORK(_rs_forkhandler); + return (0); +} diff --git a/code/crypto/compat/arc4random_hpux.h b/code/crypto/compat/arc4random_hpux.h new file mode 100644 index 0000000..2a3fe8c --- /dev/null +++ b/code/crypto/compat/arc4random_hpux.h @@ -0,0 +1,81 @@ +/* $OpenBSD: arc4random_hpux.h,v 1.3 2016/06/30 12:19:51 bcook Exp $ */ + +/* + * Copyright (c) 1996, David Mazieres <dm@uun.org> + * Copyright (c) 2008, Damien Miller <djm@openbsd.org> + * Copyright (c) 2013, Markus Friedl <markus@openbsd.org> + * Copyright (c) 2014, Theo de Raadt <deraadt@openbsd.org> + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +/* + * Stub functions for portability. + */ + +#include <sys/mman.h> + +#include <pthread.h> +#include <signal.h> + +static pthread_mutex_t arc4random_mtx = PTHREAD_MUTEX_INITIALIZER; +#define _ARC4_LOCK() pthread_mutex_lock(&arc4random_mtx) +#define _ARC4_UNLOCK() pthread_mutex_unlock(&arc4random_mtx) + +#define _ARC4_ATFORK(f) pthread_atfork(NULL, NULL, (f)) + +static inline void +_getentropy_fail(void) +{ + raise(SIGKILL); +} + +static volatile sig_atomic_t _rs_forked; + +static inline void +_rs_forkhandler(void) +{ + _rs_forked = 1; +} + +static inline void +_rs_forkdetect(void) +{ + static pid_t _rs_pid = 0; + pid_t pid = getpid(); + + if (_rs_pid == 0 || _rs_pid != pid || _rs_forked) { + _rs_pid = pid; + _rs_forked = 0; + if (rs) + memset(rs, 0, sizeof(*rs)); + } +} + +static inline int +_rs_allocate(struct _rs **rsp, struct _rsx **rsxp) +{ + if ((*rsp = mmap(NULL, sizeof(**rsp), PROT_READ|PROT_WRITE, + MAP_ANON|MAP_PRIVATE, -1, 0)) == MAP_FAILED) + return (-1); + + if ((*rsxp = mmap(NULL, sizeof(**rsxp), PROT_READ|PROT_WRITE, + MAP_ANON|MAP_PRIVATE, -1, 0)) == MAP_FAILED) { + munmap(*rsp, sizeof(**rsp)); + *rsp = NULL; + return (-1); + } + + _ARC4_ATFORK(_rs_forkhandler); + return (0); +} diff --git a/code/crypto/compat/arc4random_linux.h b/code/crypto/compat/arc4random_linux.h new file mode 100644 index 0000000..879f966 --- /dev/null +++ b/code/crypto/compat/arc4random_linux.h @@ -0,0 +1,88 @@ +/* $OpenBSD: arc4random_linux.h,v 1.11 2016/06/30 12:19:51 bcook Exp $ */ + +/* + * Copyright (c) 1996, David Mazieres <dm@uun.org> + * Copyright (c) 2008, Damien Miller <djm@openbsd.org> + * Copyright (c) 2013, Markus Friedl <markus@openbsd.org> + * Copyright (c) 2014, Theo de Raadt <deraadt@openbsd.org> + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +/* + * Stub functions for portability. + */ + +#include <sys/mman.h> + +#include <pthread.h> +#include <signal.h> + +static pthread_mutex_t arc4random_mtx = PTHREAD_MUTEX_INITIALIZER; +#define _ARC4_LOCK() pthread_mutex_lock(&arc4random_mtx) +#define _ARC4_UNLOCK() pthread_mutex_unlock(&arc4random_mtx) + +#ifdef __GLIBC__ +extern void *__dso_handle; +extern int __register_atfork(void (*)(void), void(*)(void), void (*)(void), void *); +#define _ARC4_ATFORK(f) __register_atfork(NULL, NULL, (f), __dso_handle) +#else +#define _ARC4_ATFORK(f) pthread_atfork(NULL, NULL, (f)) +#endif + +static inline void +_getentropy_fail(void) +{ + raise(SIGKILL); +} + +static volatile sig_atomic_t _rs_forked; + +static inline void +_rs_forkhandler(void) +{ + _rs_forked = 1; +} + +static inline void +_rs_forkdetect(void) +{ + static pid_t _rs_pid = 0; + pid_t pid = getpid(); + + /* XXX unusual calls to clone() can bypass checks */ + if (_rs_pid == 0 || _rs_pid == 1 || _rs_pid != pid || _rs_forked) { + _rs_pid = pid; + _rs_forked = 0; + if (rs) + memset(rs, 0, sizeof(*rs)); + } +} + +static inline int +_rs_allocate(struct _rs **rsp, struct _rsx **rsxp) +{ + if ((*rsp = mmap(NULL, sizeof(**rsp), PROT_READ|PROT_WRITE, + MAP_ANON|MAP_PRIVATE, -1, 0)) == MAP_FAILED) + return (-1); + + if ((*rsxp = mmap(NULL, sizeof(**rsxp), PROT_READ|PROT_WRITE, + MAP_ANON|MAP_PRIVATE, -1, 0)) == MAP_FAILED) { + munmap(*rsp, sizeof(**rsp)); + *rsp = NULL; + return (-1); + } + + _ARC4_ATFORK(_rs_forkhandler); + return (0); +} diff --git a/code/crypto/compat/arc4random_netbsd.h b/code/crypto/compat/arc4random_netbsd.h new file mode 100644 index 0000000..611997d --- /dev/null +++ b/code/crypto/compat/arc4random_netbsd.h @@ -0,0 +1,87 @@ +/* $OpenBSD: arc4random_netbsd.h,v 1.3 2016/06/30 12:19:51 bcook Exp $ */ + +/* + * Copyright (c) 1996, David Mazieres <dm@uun.org> + * Copyright (c) 2008, Damien Miller <djm@openbsd.org> + * Copyright (c) 2013, Markus Friedl <markus@openbsd.org> + * Copyright (c) 2014, Theo de Raadt <deraadt@openbsd.org> + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +/* + * Stub functions for portability. + */ + +#include <sys/mman.h> + +#include <pthread.h> +#include <signal.h> + +static pthread_mutex_t arc4random_mtx = PTHREAD_MUTEX_INITIALIZER; +#define _ARC4_LOCK() pthread_mutex_lock(&arc4random_mtx) +#define _ARC4_UNLOCK() pthread_mutex_unlock(&arc4random_mtx) + +/* + * Unfortunately, pthread_atfork() is broken on FreeBSD (at least 9 and 10) if + * a program does not link to -lthr. Callbacks registered with pthread_atfork() + * appear to fail silently. So, it is not always possible to detect a PID + * wraparound. + */ +#define _ARC4_ATFORK(f) pthread_atfork(NULL, NULL, (f)) + +static inline void +_getentropy_fail(void) +{ + raise(SIGKILL); +} + +static volatile sig_atomic_t _rs_forked; + +static inline void +_rs_forkhandler(void) +{ + _rs_forked = 1; +} + +static inline void +_rs_forkdetect(void) +{ + static pid_t _rs_pid = 0; + pid_t pid = getpid(); + + if (_rs_pid == 0 || _rs_pid != pid || _rs_forked) { + _rs_pid = pid; + _rs_forked = 0; + if (rs) + memset(rs, 0, sizeof(*rs)); + } +} + +static inline int +_rs_allocate(struct _rs **rsp, struct _rsx **rsxp) +{ + if ((*rsp = mmap(NULL, sizeof(**rsp), PROT_READ|PROT_WRITE, + MAP_ANON|MAP_PRIVATE, -1, 0)) == MAP_FAILED) + return (-1); + + if ((*rsxp = mmap(NULL, sizeof(**rsxp), PROT_READ|PROT_WRITE, + MAP_ANON|MAP_PRIVATE, -1, 0)) == MAP_FAILED) { + munmap(*rsp, sizeof(**rsp)); + *rsp = NULL; + return (-1); + } + + _ARC4_ATFORK(_rs_forkhandler); + return (0); +} diff --git a/code/crypto/compat/arc4random_osx.h b/code/crypto/compat/arc4random_osx.h new file mode 100644 index 0000000..818ae6b --- /dev/null +++ b/code/crypto/compat/arc4random_osx.h @@ -0,0 +1,81 @@ +/* $OpenBSD: arc4random_osx.h,v 1.11 2016/06/30 12:19:51 bcook Exp $ */ + +/* + * Copyright (c) 1996, David Mazieres <dm@uun.org> + * Copyright (c) 2008, Damien Miller <djm@openbsd.org> + * Copyright (c) 2013, Markus Friedl <markus@openbsd.org> + * Copyright (c) 2014, Theo de Raadt <deraadt@openbsd.org> + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +/* + * Stub functions for portability. + */ + +#include <sys/mman.h> + +#include <pthread.h> +#include <signal.h> + +static pthread_mutex_t arc4random_mtx = PTHREAD_MUTEX_INITIALIZER; +#define _ARC4_LOCK() pthread_mutex_lock(&arc4random_mtx) +#define _ARC4_UNLOCK() pthread_mutex_unlock(&arc4random_mtx) + +#define _ARC4_ATFORK(f) pthread_atfork(NULL, NULL, (f)) + +static inline void +_getentropy_fail(void) +{ + raise(SIGKILL); +} + +static volatile sig_atomic_t _rs_forked; + +static inline void +_rs_forkhandler(void) +{ + _rs_forked = 1; +} + +static inline void +_rs_forkdetect(void) +{ + static pid_t _rs_pid = 0; + pid_t pid = getpid(); + + if (_rs_pid == 0 || _rs_pid != pid || _rs_forked) { + _rs_pid = pid; + _rs_forked = 0; + if (rs) + memset(rs, 0, sizeof(*rs)); + } +} + +static inline int +_rs_allocate(struct _rs **rsp, struct _rsx **rsxp) +{ + if ((*rsp = mmap(NULL, sizeof(**rsp), PROT_READ|PROT_WRITE, + MAP_ANON|MAP_PRIVATE, -1, 0)) == MAP_FAILED) + return (-1); + + if ((*rsxp = mmap(NULL, sizeof(**rsxp), PROT_READ|PROT_WRITE, + MAP_ANON|MAP_PRIVATE, -1, 0)) == MAP_FAILED) { + munmap(*rsp, sizeof(**rsp)); + *rsp = NULL; + return (-1); + } + + _ARC4_ATFORK(_rs_forkhandler); + return (0); +} diff --git a/code/crypto/compat/arc4random_solaris.h b/code/crypto/compat/arc4random_solaris.h new file mode 100644 index 0000000..b1084cd --- /dev/null +++ b/code/crypto/compat/arc4random_solaris.h @@ -0,0 +1,81 @@ +/* $OpenBSD: arc4random_solaris.h,v 1.10 2016/06/30 12:19:51 bcook Exp $ */ + +/* + * Copyright (c) 1996, David Mazieres <dm@uun.org> + * Copyright (c) 2008, Damien Miller <djm@openbsd.org> + * Copyright (c) 2013, Markus Friedl <markus@openbsd.org> + * Copyright (c) 2014, Theo de Raadt <deraadt@openbsd.org> + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +/* + * Stub functions for portability. + */ + +#include <sys/mman.h> + +#include <pthread.h> +#include <signal.h> + +static pthread_mutex_t arc4random_mtx = PTHREAD_MUTEX_INITIALIZER; +#define _ARC4_LOCK() pthread_mutex_lock(&arc4random_mtx) +#define _ARC4_UNLOCK() pthread_mutex_unlock(&arc4random_mtx) + +#define _ARC4_ATFORK(f) pthread_atfork(NULL, NULL, (f)) + +static inline void +_getentropy_fail(void) +{ + raise(SIGKILL); +} + +static volatile sig_atomic_t _rs_forked; + +static inline void +_rs_forkhandler(void) +{ + _rs_forked = 1; +} + +static inline void +_rs_forkdetect(void) +{ + static pid_t _rs_pid = 0; + pid_t pid = getpid(); + + if (_rs_pid == 0 || _rs_pid != pid || _rs_forked) { + _rs_pid = pid; + _rs_forked = 0; + if (rs) + memset(rs, 0, sizeof(*rs)); + } +} + +static inline int +_rs_allocate(struct _rs **rsp, struct _rsx **rsxp) +{ + if ((*rsp = mmap(NULL, sizeof(**rsp), PROT_READ|PROT_WRITE, + MAP_ANON|MAP_PRIVATE, -1, 0)) == MAP_FAILED) + return (-1); + + if ((*rsxp = mmap(NULL, sizeof(**rsxp), PROT_READ|PROT_WRITE, + MAP_ANON|MAP_PRIVATE, -1, 0)) == MAP_FAILED) { + munmap(*rsp, sizeof(**rsp)); + *rsp = NULL; + return (-1); + } + + _ARC4_ATFORK(_rs_forkhandler); + return (0); +} diff --git a/code/crypto/compat/arc4random_uniform.c b/code/crypto/compat/arc4random_uniform.c new file mode 100644 index 0000000..2d22434 --- /dev/null +++ b/code/crypto/compat/arc4random_uniform.c @@ -0,0 +1,56 @@ +/* $OpenBSD: arc4random_uniform.c,v 1.2 2015/09/13 08:31:47 guenther Exp $ */ + +/* + * Copyright (c) 2008, Damien Miller <djm@openbsd.org> + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +#include <sys/types.h> +#include <stdlib.h> + +/* + * Calculate a uniformly distributed random number less than upper_bound + * avoiding "modulo bias". + * + * Uniformity is achieved by generating new random numbers until the one + * returned is outside the range [0, 2**32 % upper_bound). This + * guarantees the selected random number will be inside + * [2**32 % upper_bound, 2**32) which maps back to [0, upper_bound) + * after reduction modulo upper_bound. + */ +uint32_t +arc4random_uniform(uint32_t upper_bound) +{ + uint32_t r, min; + + if (upper_bound < 2) + return 0; + + /* 2**32 % x == (2**32 - x) % x */ + min = -upper_bound % upper_bound; + + /* + * This could theoretically loop forever but each retry has + * p > 0.5 (worst case, usually far better) of selecting a + * number inside the range we need, so it should rarely need + * to re-roll. + */ + for (;;) { + r = arc4random(); + if (r >= min) + break; + } + + return r % upper_bound; +} diff --git a/code/crypto/compat/arc4random_win.h b/code/crypto/compat/arc4random_win.h new file mode 100644 index 0000000..deec8a1 --- /dev/null +++ b/code/crypto/compat/arc4random_win.h @@ -0,0 +1,78 @@ +/* $OpenBSD: arc4random_win.h,v 1.6 2016/06/30 12:17:29 bcook Exp $ */ + +/* + * Copyright (c) 1996, David Mazieres <dm@uun.org> + * Copyright (c) 2008, Damien Miller <djm@openbsd.org> + * Copyright (c) 2013, Markus Friedl <markus@openbsd.org> + * Copyright (c) 2014, Theo de Raadt <deraadt@openbsd.org> + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +/* + * Stub functions for portability. + */ + +#include <windows.h> + +static volatile HANDLE arc4random_mtx = NULL; + +/* + * Initialize the mutex on the first lock attempt. On collision, each thread + * will attempt to allocate a mutex and compare-and-swap it into place as the + * global mutex. On failure to swap in the global mutex, the mutex is closed. + */ +#define _ARC4_LOCK() { \ + if (!arc4random_mtx) { \ + HANDLE p = CreateMutex(NULL, FALSE, NULL); \ + if (InterlockedCompareExchangePointer((void **)&arc4random_mtx, (void *)p, NULL)) \ + CloseHandle(p); \ + } \ + WaitForSingleObject(arc4random_mtx, INFINITE); \ +} \ + +#define _ARC4_UNLOCK() ReleaseMutex(arc4random_mtx) + +static inline void +_getentropy_fail(void) +{ + TerminateProcess(GetCurrentProcess(), 0); +} + +static inline int +_rs_allocate(struct _rs **rsp, struct _rsx **rsxp) +{ + *rsp = VirtualAlloc(NULL, sizeof(**rsp), + MEM_RESERVE | MEM_COMMIT, PAGE_READWRITE); + if (*rsp == NULL) + return (-1); + + *rsxp = VirtualAlloc(NULL, sizeof(**rsxp), + MEM_RESERVE | MEM_COMMIT, PAGE_READWRITE); + if (*rsxp == NULL) { + VirtualFree(*rsp, 0, MEM_RELEASE); + *rsp = NULL; + return (-1); + } + return (0); +} + +static inline void +_rs_forkhandler(void) +{ +} + +static inline void +_rs_forkdetect(void) +{ +} diff --git a/code/crypto/compat/chacha_private.h b/code/crypto/compat/chacha_private.h new file mode 100644 index 0000000..7c3680f --- /dev/null +++ b/code/crypto/compat/chacha_private.h @@ -0,0 +1,222 @@ +/* +chacha-merged.c version 20080118 +D. J. Bernstein +Public domain. +*/ + +/* $OpenBSD: chacha_private.h,v 1.2 2013/10/04 07:02:27 djm Exp $ */ + +typedef unsigned char u8; +typedef unsigned int u32; + +typedef struct +{ + u32 input[16]; /* could be compressed */ +} chacha_ctx; + +#define U8C(v) (v##U) +#define U32C(v) (v##U) + +#define U8V(v) ((u8)(v) & U8C(0xFF)) +#define U32V(v) ((u32)(v) & U32C(0xFFFFFFFF)) + +#define ROTL32(v, n) \ + (U32V((v) << (n)) | ((v) >> (32 - (n)))) + +#define U8TO32_LITTLE(p) \ + (((u32)((p)[0]) ) | \ + ((u32)((p)[1]) << 8) | \ + ((u32)((p)[2]) << 16) | \ + ((u32)((p)[3]) << 24)) + +#define U32TO8_LITTLE(p, v) \ + do { \ + (p)[0] = U8V((v) ); \ + (p)[1] = U8V((v) >> 8); \ + (p)[2] = U8V((v) >> 16); \ + (p)[3] = U8V((v) >> 24); \ + } while (0) + +#define ROTATE(v,c) (ROTL32(v,c)) +#define XOR(v,w) ((v) ^ (w)) +#define PLUS(v,w) (U32V((v) + (w))) +#define PLUSONE(v) (PLUS((v),1)) + +#define QUARTERROUND(a,b,c,d) \ + a = PLUS(a,b); d = ROTATE(XOR(d,a),16); \ + c = PLUS(c,d); b = ROTATE(XOR(b,c),12); \ + a = PLUS(a,b); d = ROTATE(XOR(d,a), 8); \ + c = PLUS(c,d); b = ROTATE(XOR(b,c), 7); + +static const char sigma[16] = "expand 32-byte k"; +static const char tau[16] = "expand 16-byte k"; + +static void +chacha_keysetup(chacha_ctx *x,const u8 *k,u32 kbits,u32 ivbits) +{ + const char *constants; + + x->input[4] = U8TO32_LITTLE(k + 0); + x->input[5] = U8TO32_LITTLE(k + 4); + x->input[6] = U8TO32_LITTLE(k + 8); + x->input[7] = U8TO32_LITTLE(k + 12); + if (kbits == 256) { /* recommended */ + k += 16; + constants = sigma; + } else { /* kbits == 128 */ + constants = tau; + } + x->input[8] = U8TO32_LITTLE(k + 0); + x->input[9] = U8TO32_LITTLE(k + 4); + x->input[10] = U8TO32_LITTLE(k + 8); + x->input[11] = U8TO32_LITTLE(k + 12); + x->input[0] = U8TO32_LITTLE(constants + 0); + x->input[1] = U8TO32_LITTLE(constants + 4); + x->input[2] = U8TO32_LITTLE(constants + 8); + x->input[3] = U8TO32_LITTLE(constants + 12); +} + +static void +chacha_ivsetup(chacha_ctx *x,const u8 *iv) +{ + x->input[12] = 0; + x->input[13] = 0; + x->input[14] = U8TO32_LITTLE(iv + 0); + x->input[15] = U8TO32_LITTLE(iv + 4); +} + +static void +chacha_encrypt_bytes(chacha_ctx *x,const u8 *m,u8 *c,u32 bytes) +{ + u32 x0, x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15; + u32 j0, j1, j2, j3, j4, j5, j6, j7, j8, j9, j10, j11, j12, j13, j14, j15; + u8 *ctarget = NULL; + u8 tmp[64]; + u_int i; + + if (!bytes) return; + + j0 = x->input[0]; + j1 = x->input[1]; + j2 = x->input[2]; + j3 = x->input[3]; + j4 = x->input[4]; + j5 = x->input[5]; + j6 = x->input[6]; + j7 = x->input[7]; + j8 = x->input[8]; + j9 = x->input[9]; + j10 = x->input[10]; + j11 = x->input[11]; + j12 = x->input[12]; + j13 = x->input[13]; + j14 = x->input[14]; + j15 = x->input[15]; + + for (;;) { + if (bytes < 64) { + for (i = 0;i < bytes;++i) tmp[i] = m[i]; + m = tmp; + ctarget = c; + c = tmp; + } + x0 = j0; + x1 = j1; + x2 = j2; + x3 = j3; + x4 = j4; + x5 = j5; + x6 = j6; + x7 = j7; + x8 = j8; + x9 = j9; + x10 = j10; + x11 = j11; + x12 = j12; + x13 = j13; + x14 = j14; + x15 = j15; + for (i = 20;i > 0;i -= 2) { + QUARTERROUND( x0, x4, x8,x12) + QUARTERROUND( x1, x5, x9,x13) + QUARTERROUND( x2, x6,x10,x14) + QUARTERROUND( x3, x7,x11,x15) + QUARTERROUND( x0, x5,x10,x15) + QUARTERROUND( x1, x6,x11,x12) + QUARTERROUND( x2, x7, x8,x13) + QUARTERROUND( x3, x4, x9,x14) + } + x0 = PLUS(x0,j0); + x1 = PLUS(x1,j1); + x2 = PLUS(x2,j2); + x3 = PLUS(x3,j3); + x4 = PLUS(x4,j4); + x5 = PLUS(x5,j5); + x6 = PLUS(x6,j6); + x7 = PLUS(x7,j7); + x8 = PLUS(x8,j8); + x9 = PLUS(x9,j9); + x10 = PLUS(x10,j10); + x11 = PLUS(x11,j11); + x12 = PLUS(x12,j12); + x13 = PLUS(x13,j13); + x14 = PLUS(x14,j14); + x15 = PLUS(x15,j15); + +#ifndef KEYSTREAM_ONLY + x0 = XOR(x0,U8TO32_LITTLE(m + 0)); + x1 = XOR(x1,U8TO32_LITTLE(m + 4)); + x2 = XOR(x2,U8TO32_LITTLE(m + 8)); + x3 = XOR(x3,U8TO32_LITTLE(m + 12)); + x4 = XOR(x4,U8TO32_LITTLE(m + 16)); + x5 = XOR(x5,U8TO32_LITTLE(m + 20)); + x6 = XOR(x6,U8TO32_LITTLE(m + 24)); + x7 = XOR(x7,U8TO32_LITTLE(m + 28)); + x8 = XOR(x8,U8TO32_LITTLE(m + 32)); + x9 = XOR(x9,U8TO32_LITTLE(m + 36)); + x10 = XOR(x10,U8TO32_LITTLE(m + 40)); + x11 = XOR(x11,U8TO32_LITTLE(m + 44)); + x12 = XOR(x12,U8TO32_LITTLE(m + 48)); + x13 = XOR(x13,U8TO32_LITTLE(m + 52)); + x14 = XOR(x14,U8TO32_LITTLE(m + 56)); + x15 = XOR(x15,U8TO32_LITTLE(m + 60)); +#endif + + j12 = PLUSONE(j12); + if (!j12) { + j13 = PLUSONE(j13); + /* stopping at 2^70 bytes per nonce is user's responsibility */ + } + + U32TO8_LITTLE(c + 0,x0); + U32TO8_LITTLE(c + 4,x1); + U32TO8_LITTLE(c + 8,x2); + U32TO8_LITTLE(c + 12,x3); + U32TO8_LITTLE(c + 16,x4); + U32TO8_LITTLE(c + 20,x5); + U32TO8_LITTLE(c + 24,x6); + U32TO8_LITTLE(c + 28,x7); + U32TO8_LITTLE(c + 32,x8); + U32TO8_LITTLE(c + 36,x9); + U32TO8_LITTLE(c + 40,x10); + U32TO8_LITTLE(c + 44,x11); + U32TO8_LITTLE(c + 48,x12); + U32TO8_LITTLE(c + 52,x13); + U32TO8_LITTLE(c + 56,x14); + U32TO8_LITTLE(c + 60,x15); + + if (bytes <= 64) { + if (bytes < 64) { + for (i = 0;i < bytes;++i) ctarget[i] = c[i]; + } + x->input[12] = j12; + x->input[13] = j13; + return; + } + bytes -= 64; + c += 64; +#ifndef KEYSTREAM_ONLY + m += 64; +#endif + } +} diff --git a/code/crypto/compat/explicit_bzero.c b/code/crypto/compat/explicit_bzero.c new file mode 100644 index 0000000..5dd0103 --- /dev/null +++ b/code/crypto/compat/explicit_bzero.c @@ -0,0 +1,19 @@ +/* $OpenBSD: explicit_bzero.c,v 1.4 2015/08/31 02:53:57 guenther Exp $ */ +/* + * Public domain. + * Written by Matthew Dempsky. + */ + +#include <string.h> + +__attribute__((weak)) void +__explicit_bzero_hook(void *buf, size_t len) +{ +} + +void +explicit_bzero(void *buf, size_t len) +{ + memset(buf, 0, len); + __explicit_bzero_hook(buf, len); +} diff --git a/code/crypto/compat/getentropy_aix.c b/code/crypto/compat/getentropy_aix.c new file mode 100644 index 0000000..b046412 --- /dev/null +++ b/code/crypto/compat/getentropy_aix.c @@ -0,0 +1,426 @@ +/* $OpenBSD: getentropy_aix.c,v 1.5 2016/08/07 03:27:21 tb Exp $ */ + +/* + * Copyright (c) 2015 Michael Felt <aixtools@gmail.com> + * Copyright (c) 2014 Theo de Raadt <deraadt@openbsd.org> + * Copyright (c) 2014 Bob Beck <beck@obtuse.com> + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + * + * Emulation of getentropy(2) as documented at: + * http://man.openbsd.org/getentropy.2 + */ +/* + * -lperfstat is needed for the psuedo entropy data + */ + +#include <sys/mman.h> +#include <sys/procfs.h> +#include <sys/protosw.h> +#include <sys/resource.h> +#include <sys/socket.h> +#include <sys/stat.h> +#include <sys/statvfs.h> +#include <sys/timers.h> +#include <errno.h> +#include <fcntl.h> +#include <signal.h> +#include <stdio.h> +#include <string.h> +#include <termios.h> + +#include <compat.h> +#include <sha.h> + +#include <libperfstat.h> + +#define REPEAT 5 +#define min(a, b) (((a) < (b)) ? (a) : (b)) + +#define HX(a, b) \ + do { \ + if ((a)) \ + HD(errno); \ + else \ + HD(b); \ + } while (0) + +#define HR(x, l) (SHA512_Update(&ctx, (char *)(x), (l))) +#define HD(x) (SHA512_Update(&ctx, (char *)&(x), sizeof (x))) +#define HF(x) (SHA512_Update(&ctx, (char *)&(x), sizeof (void*))) + +int getentropy(void *buf, size_t len); + +static int gotdata(char *buf, size_t len); +static int getentropy_urandom(void *buf, size_t len, const char *path, + int devfscheck); +static int getentropy_fallback(void *buf, size_t len); + +int +getentropy(void *buf, size_t len) +{ + int ret = -1; + + if (len > 256) { + errno = EIO; + return (-1); + } + + /* + * Try to get entropy with /dev/urandom + */ + ret = getentropy_urandom(buf, len, "/dev/urandom", 0); + if (ret != -1) + return (ret); + + /* + * Entropy collection via /dev/urandom has failed. + * + * No other API exists for collecting entropy, and we have + * no failsafe way to get it on AIX that is not sensitive + * to resource exhaustion. + * + * We have very few options: + * - Even syslog_r is unsafe to call at this low level, so + * there is no way to alert the user or program. + * - Cannot call abort() because some systems have unsafe + * corefiles. + * - Could raise(SIGKILL) resulting in silent program termination. + * - Return EIO, to hint that arc4random's stir function + * should raise(SIGKILL) + * - Do the best under the circumstances.... + * + * This code path exists to bring light to the issue that AIX + * does not provide a failsafe API for entropy collection. + * + * We hope this demonstrates that AIX should consider + * providing a new failsafe API which works in a chroot or + * when file descriptors are exhausted. + */ +#undef FAIL_INSTEAD_OF_TRYING_FALLBACK +#ifdef FAIL_INSTEAD_OF_TRYING_FALLBACK + raise(SIGKILL); +#endif + ret = getentropy_fallback(buf, len); + if (ret != -1) + return (ret); + + errno = EIO; + return (ret); +} + +/* + * Basic sanity checking; wish we could do better. + */ +static int +gotdata(char *buf, size_t len) +{ + char any_set = 0; + size_t i; + + for (i = 0; i < len; ++i) + any_set |= buf[i]; + if (any_set == 0) + return (-1); + return (0); +} + +static int +getentropy_urandom(void *buf, size_t len, const char *path, int devfscheck) +{ + struct stat st; + size_t i; + int fd, flags; + int save_errno = errno; + +start: + + flags = O_RDONLY; +#ifdef O_NOFOLLOW + flags |= O_NOFOLLOW; +#endif +#ifdef O_CLOEXEC + flags |= O_CLOEXEC; +#endif + fd = open(path, flags, 0); + if (fd == -1) { + if (errno == EINTR) + goto start; + goto nodevrandom; + } +#ifndef O_CLOEXEC + fcntl(fd, F_SETFD, fcntl(fd, F_GETFD) | FD_CLOEXEC); +#endif + + /* Lightly verify that the device node looks sane */ + if (fstat(fd, &st) == -1 || !S_ISCHR(st.st_mode)) { + close(fd); + goto nodevrandom; + } + for (i = 0; i < len; ) { + size_t wanted = len - i; + ssize_t ret = read(fd, (char *)buf + i, wanted); + + if (ret == -1) { + if (errno == EAGAIN || errno == EINTR) + continue; + close(fd); + goto nodevrandom; + } + i += ret; + } + close(fd); + if (gotdata(buf, len) == 0) { + errno = save_errno; + return (0); /* satisfied */ + } +nodevrandom: + errno = EIO; + return (-1); +} + +static const int cl[] = { + CLOCK_REALTIME, +#ifdef CLOCK_MONOTONIC + CLOCK_MONOTONIC, +#endif +#ifdef CLOCK_MONOTONIC_RAW + CLOCK_MONOTONIC_RAW, +#endif +#ifdef CLOCK_TAI + CLOCK_TAI, +#endif +#ifdef CLOCK_VIRTUAL + CLOCK_VIRTUAL, +#endif +#ifdef CLOCK_UPTIME + CLOCK_UPTIME, +#endif +#ifdef CLOCK_PROCESS_CPUTIME_ID + CLOCK_PROCESS_CPUTIME_ID, +#endif +#ifdef CLOCK_THREAD_CPUTIME_ID + CLOCK_THREAD_CPUTIME_ID, +#endif +}; + +static int +getentropy_fallback(void *buf, size_t len) +{ + uint8_t results[SHA512_DIGEST_LENGTH]; + int save_errno = errno, e, pgs = sysconf(_SC_PAGESIZE), faster = 0, repeat; + static int cnt; + struct timespec ts; + struct timeval tv; + perfstat_cpu_total_t cpustats; +#ifdef _AIX61 + perfstat_cpu_total_wpar_t cpustats_wpar; +#endif + perfstat_partition_total_t lparstats; + perfstat_disk_total_t diskinfo; + perfstat_netinterface_total_t netinfo; + struct rusage ru; + sigset_t sigset; + struct stat st; + SHA512_CTX ctx; + static pid_t lastpid; + pid_t pid; + size_t i, ii, m; + char *p; + + pid = getpid(); + if (lastpid == pid) { + faster = 1; + repeat = 2; + } else { + faster = 0; + lastpid = pid; + repeat = REPEAT; + } + for (i = 0; i < len; ) { + int j; + SHA512_Init(&ctx); + for (j = 0; j < repeat; j++) { + HX((e = gettimeofday(&tv, NULL)) == -1, tv); + if (e != -1) { + cnt += (int)tv.tv_sec; + cnt += (int)tv.tv_usec; + } + + HX(perfstat_cpu_total(NULL, &cpustats, + sizeof(cpustats), 1) == -1, cpustats); + +#ifdef _AIX61 + HX(perfstat_cpu_total_wpar(NULL, &cpustats_wpar, + sizeof(cpustats_wpar), 1) == -1, cpustats_wpar); +#endif + + HX(perfstat_partition_total(NULL, &lparstats, + sizeof(lparstats), 1) == -1, lparstats); + + HX(perfstat_disk_total(NULL, &diskinfo, + sizeof(diskinfo), 1) == -1, diskinfo); + + HX(perfstat_netinterface_total(NULL, &netinfo, + sizeof(netinfo), 1) == -1, netinfo); + + for (ii = 0; ii < sizeof(cl)/sizeof(cl[0]); ii++) + HX(clock_gettime(cl[ii], &ts) == -1, ts); + + HX((pid = getpid()) == -1, pid); + HX((pid = getsid(pid)) == -1, pid); + HX((pid = getppid()) == -1, pid); + HX((pid = getpgid(0)) == -1, pid); + HX((e = getpriority(0, 0)) == -1, e); + + if (!faster) { + ts.tv_sec = 0; + ts.tv_nsec = 1; + (void) nanosleep(&ts, NULL); + } + + HX(sigpending(&sigset) == -1, sigset); + HX(sigprocmask(SIG_BLOCK, NULL, &sigset) == -1, + sigset); + + HF(getentropy); /* an addr in this library */ + HF(printf); /* an addr in libc */ + p = (char *)&p; + HD(p); /* an addr on stack */ + p = (char *)&errno; + HD(p); /* the addr of errno */ + + if (i == 0) { + struct sockaddr_storage ss; + struct statvfs stvfs; + struct termios tios; + socklen_t ssl; + off_t off; + + /* + * Prime-sized mappings encourage fragmentation; + * thus exposing some address entropy. + */ + struct mm { + size_t npg; + void *p; + } mm[] = { + { 17, MAP_FAILED }, { 3, MAP_FAILED }, + { 11, MAP_FAILED }, { 2, MAP_FAILED }, + { 5, MAP_FAILED }, { 3, MAP_FAILED }, + { 7, MAP_FAILED }, { 1, MAP_FAILED }, + { 57, MAP_FAILED }, { 3, MAP_FAILED }, + { 131, MAP_FAILED }, { 1, MAP_FAILED }, + }; + + for (m = 0; m < sizeof mm/sizeof(mm[0]); m++) { + HX(mm[m].p = mmap(NULL, + mm[m].npg * pgs, + PROT_READ|PROT_WRITE, + MAP_PRIVATE|MAP_ANON, -1, + (off_t)0), mm[m].p); + if (mm[m].p != MAP_FAILED) { + size_t mo; + + /* Touch some memory... */ + p = mm[m].p; + mo = cnt % + (mm[m].npg * pgs - 1); + p[mo] = 1; + cnt += (int)((long)(mm[m].p) + / pgs); + } + + /* Check cnts and times... */ + for (ii = 0; ii < sizeof(cl)/sizeof(cl[0]); + ii++) { + HX((e = clock_gettime(cl[ii], + &ts)) == -1, ts); + if (e != -1) + cnt += (int)ts.tv_nsec; + } + + HX((e = getrusage(RUSAGE_SELF, + &ru)) == -1, ru); + if (e != -1) { + cnt += (int)ru.ru_utime.tv_sec; + cnt += (int)ru.ru_utime.tv_usec; + } + } + + for (m = 0; m < sizeof mm/sizeof(mm[0]); m++) { + if (mm[m].p != MAP_FAILED) + munmap(mm[m].p, mm[m].npg * pgs); + mm[m].p = MAP_FAILED; + } + + HX(stat(".", &st) == -1, st); + HX(statvfs(".", &stvfs) == -1, stvfs); + + HX(stat("/", &st) == -1, st); + HX(statvfs("/", &stvfs) == -1, stvfs); + + HX((e = fstat(0, &st)) == -1, st); + if (e == -1) { + if (S_ISREG(st.st_mode) || + S_ISFIFO(st.st_mode) || + S_ISSOCK(st.st_mode)) { + HX(fstatvfs(0, &stvfs) == -1, + stvfs); + HX((off = lseek(0, (off_t)0, + SEEK_CUR)) < 0, off); + } + if (S_ISCHR(st.st_mode)) { + HX(tcgetattr(0, &tios) == -1, + tios); + } else if (S_ISSOCK(st.st_mode)) { + memset(&ss, 0, sizeof ss); + ssl = sizeof(ss); + HX(getpeername(0, + (void *)&ss, &ssl) == -1, + ss); + } + } + + HX((e = getrusage(RUSAGE_CHILDREN, + &ru)) == -1, ru); + if (e != -1) { + cnt += (int)ru.ru_utime.tv_sec; + cnt += (int)ru.ru_utime.tv_usec; + } + } else { + /* Subsequent hashes absorb previous result */ + HD(results); + } + + HX((e = gettimeofday(&tv, NULL)) == -1, tv); + if (e != -1) { + cnt += (int)tv.tv_sec; + cnt += (int)tv.tv_usec; + } + + HD(cnt); + } + SHA512_Final(results, &ctx); + memcpy((char *)buf + i, results, min(sizeof(results), len - i)); + i += min(sizeof(results), len - i); + } + explicit_bzero(&ctx, sizeof ctx); + explicit_bzero(results, sizeof results); + if (gotdata(buf, len) == 0) { + errno = save_errno; + return (0); /* satisfied */ + } + errno = EIO; + return (-1); +} diff --git a/code/crypto/compat/getentropy_freebsd.c b/code/crypto/compat/getentropy_freebsd.c new file mode 100644 index 0000000..30cd68e --- /dev/null +++ b/code/crypto/compat/getentropy_freebsd.c @@ -0,0 +1,62 @@ +/* $OpenBSD: getentropy_freebsd.c,v 1.3 2016/08/07 03:27:21 tb Exp $ */ + +/* + * Copyright (c) 2014 Pawel Jakub Dawidek <pjd@FreeBSD.org> + * Copyright (c) 2014 Brent Cook <bcook@openbsd.org> + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + * + * Emulation of getentropy(2) as documented at: + * http://man.openbsd.org/getentropy.2 + */ + +#include <sys/types.h> +#include <sys/sysctl.h> + +#include <errno.h> +#include <stddef.h> + +/* + * Derived from lib/libc/gen/arc4random.c from FreeBSD. + */ +static size_t +getentropy_sysctl(u_char *buf, size_t size) +{ + int mib[2]; + size_t len, done; + + mib[0] = CTL_KERN; + mib[1] = KERN_ARND; + done = 0; + + do { + len = size; + if (sysctl(mib, 2, buf, &len, NULL, 0) == -1) + return (done); + done += len; + buf += len; + size -= len; + } while (size > 0); + + return (done); +} + +int +getentropy(void *buf, size_t len) +{ + if (len <= 256 && getentropy_sysctl(buf, len) == len) + return (0); + + errno = EIO; + return (-1); +} diff --git a/code/crypto/compat/getentropy_hpux.c b/code/crypto/compat/getentropy_hpux.c new file mode 100644 index 0000000..a74e53c --- /dev/null +++ b/code/crypto/compat/getentropy_hpux.c @@ -0,0 +1,421 @@ +/* $OpenBSD: getentropy_hpux.c,v 1.5 2016/08/07 03:27:21 tb Exp $ */ + +/* + * Copyright (c) 2014 Theo de Raadt <deraadt@openbsd.org> + * Copyright (c) 2014 Bob Beck <beck@obtuse.com> + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + * + * Emulation of getentropy(2) as documented at: + * http://man.openbsd.org/getentropy.2 + */ + +#include <sys/types.h> +#include <sys/param.h> +#include <sys/ioctl.h> +#include <sys/resource.h> +#include <sys/syscall.h> +#include <sys/statvfs.h> +#include <sys/socket.h> +#include <sys/mount.h> +#include <sys/mman.h> +#include <sys/stat.h> +#include <sys/time.h> +#include <stdlib.h> +#include <stdint.h> +#include <stdio.h> +#include <termios.h> +#include <fcntl.h> +#include <signal.h> +#include <string.h> +#include <errno.h> +#include <unistd.h> +#include <time.h> + +#include <compat.h> +#include <sha.h> + +#include <sys/vfs.h> + +#include <sys/pstat.h> + +#define REPEAT 5 +#define min(a, b) (((a) < (b)) ? (a) : (b)) + +#define HX(a, b) \ + do { \ + if ((a)) \ + HD(errno); \ + else \ + HD(b); \ + } while (0) + +#define HR(x, l) (SHA512_Update(&ctx, (char *)(x), (l))) +#define HD(x) (SHA512_Update(&ctx, (char *)&(x), sizeof (x))) +#define HF(x) (SHA512_Update(&ctx, (char *)&(x), sizeof (void*))) + +int getentropy(void *buf, size_t len); + +static int gotdata(char *buf, size_t len); +static int getentropy_urandom(void *buf, size_t len, const char *path, + int devfscheck); +static int getentropy_fallback(void *buf, size_t len); + +int +getentropy(void *buf, size_t len) +{ + int ret = -1; + + if (len > 256) { + errno = EIO; + return (-1); + } + + /* + * Try to get entropy with /dev/urandom + */ + ret = getentropy_urandom(buf, len, "/dev/urandom", 0); + if (ret != -1) + return (ret); + + /* + * Entropy collection via /dev/urandom has failed. + * + * No other API exists for collecting entropy, and we have + * no failsafe way to get it on hpux that is not sensitive + * to resource exhaustion. + * + * We have very few options: + * - Even syslog_r is unsafe to call at this low level, so + * there is no way to alert the user or program. + * - Cannot call abort() because some systems have unsafe + * corefiles. + * - Could raise(SIGKILL) resulting in silent program termination. + * - Return EIO, to hint that arc4random's stir function + * should raise(SIGKILL) + * - Do the best under the circumstances.... + * + * This code path exists to bring light to the issue that hpux + * does not provide a failsafe API for entropy collection. + * + * We hope this demonstrates that hpux should consider + * providing a new failsafe API which works in a chroot or + * when file descriptors are exhausted. + */ +#undef FAIL_INSTEAD_OF_TRYING_FALLBACK +#ifdef FAIL_INSTEAD_OF_TRYING_FALLBACK + raise(SIGKILL); +#endif + ret = getentropy_fallback(buf, len); + if (ret != -1) + return (ret); + + errno = EIO; + return (ret); +} + +/* + * Basic sanity checking; wish we could do better. + */ +static int +gotdata(char *buf, size_t len) +{ + char any_set = 0; + size_t i; + + for (i = 0; i < len; ++i) + any_set |= buf[i]; + if (any_set == 0) + return (-1); + return (0); +} + +static int +getentropy_urandom(void *buf, size_t len, const char *path, int devfscheck) +{ + struct stat st; + size_t i; + int fd, flags; + int save_errno = errno; + +start: + + flags = O_RDONLY; +#ifdef O_NOFOLLOW + flags |= O_NOFOLLOW; +#endif +#ifdef O_CLOEXEC + flags |= O_CLOEXEC; +#endif + fd = open(path, flags, 0); + if (fd == -1) { + if (errno == EINTR) + goto start; + goto nodevrandom; + } +#ifndef O_CLOEXEC + fcntl(fd, F_SETFD, fcntl(fd, F_GETFD) | FD_CLOEXEC); +#endif + + /* Lightly verify that the device node looks sane */ + if (fstat(fd, &st) == -1 || !S_ISCHR(st.st_mode)) { + close(fd); + goto nodevrandom; + } + for (i = 0; i < len; ) { + size_t wanted = len - i; + ssize_t ret = read(fd, (char *)buf + i, wanted); + + if (ret == -1) { + if (errno == EAGAIN || errno == EINTR) + continue; + close(fd); + goto nodevrandom; + } + i += ret; + } + close(fd); + if (gotdata(buf, len) == 0) { + errno = save_errno; + return (0); /* satisfied */ + } +nodevrandom: + errno = EIO; + return (-1); +} + +static const int cl[] = { + CLOCK_REALTIME, +#ifdef CLOCK_MONOTONIC + CLOCK_MONOTONIC, +#endif +#ifdef CLOCK_MONOTONIC_RAW + CLOCK_MONOTONIC_RAW, +#endif +#ifdef CLOCK_TAI + CLOCK_TAI, +#endif +#ifdef CLOCK_VIRTUAL + CLOCK_VIRTUAL, +#endif +#ifdef CLOCK_UPTIME + CLOCK_UPTIME, +#endif +#ifdef CLOCK_PROCESS_CPUTIME_ID + CLOCK_PROCESS_CPUTIME_ID, +#endif +#ifdef CLOCK_THREAD_CPUTIME_ID + CLOCK_THREAD_CPUTIME_ID, +#endif +}; + +static int +getentropy_fallback(void *buf, size_t len) +{ + uint8_t results[SHA512_DIGEST_LENGTH]; + int save_errno = errno, e, pgs = sysconf(_SC_PAGESIZE), faster = 0, repeat; + static int cnt; + struct timespec ts; + struct timeval tv; + struct pst_vminfo pvi; + struct pst_vm_status pvs; + struct pst_dynamic pdy; + struct rusage ru; + sigset_t sigset; + struct stat st; + SHA512_CTX ctx; + static pid_t lastpid; + pid_t pid; + size_t i, ii, m; + char *p; + + pid = getpid(); + if (lastpid == pid) { + faster = 1; + repeat = 2; + } else { + faster = 0; + lastpid = pid; + repeat = REPEAT; + } + for (i = 0; i < len; ) { + int j; + SHA512_Init(&ctx); + for (j = 0; j < repeat; j++) { + HX((e = gettimeofday(&tv, NULL)) == -1, tv); + if (e != -1) { + cnt += (int)tv.tv_sec; + cnt += (int)tv.tv_usec; + } + + HX(pstat_getvminfo(&pvi, sizeof(pvi), 1, 0) != 1, pvi); + HX(pstat_getprocvm(&pvs, sizeof(pvs), 0, 0) != 1, pvs); + + for (ii = 0; ii < sizeof(cl)/sizeof(cl[0]); ii++) + HX(clock_gettime(cl[ii], &ts) == -1, ts); + + HX((pid = getpid()) == -1, pid); + HX((pid = getsid(pid)) == -1, pid); + HX((pid = getppid()) == -1, pid); + HX((pid = getpgid(0)) == -1, pid); + HX((e = getpriority(0, 0)) == -1, e); + + if(pstat_getdynamic(&pdy, sizeof(pdy), 1, 0) != 1) { + HD(errno); + } else { + HD(pdy.psd_avg_1_min); + HD(pdy.psd_avg_5_min); + HD(pdy.psd_avg_15_min); + } + + if (!faster) { + ts.tv_sec = 0; + ts.tv_nsec = 1; + (void) nanosleep(&ts, NULL); + } + + HX(sigpending(&sigset) == -1, sigset); + HX(sigprocmask(SIG_BLOCK, NULL, &sigset) == -1, + sigset); + + HF(getentropy); /* an addr in this library */ + HF(printf); /* an addr in libc */ + p = (char *)&p; + HD(p); /* an addr on stack */ + p = (char *)&errno; + HD(p); /* the addr of errno */ + + if (i == 0) { + struct sockaddr_storage ss; + struct statvfs stvfs; + struct termios tios; + socklen_t ssl; + off_t off; + + /* + * Prime-sized mappings encourage fragmentation; + * thus exposing some address entropy. + */ + struct mm { + size_t npg; + void *p; + } mm[] = { + { 17, MAP_FAILED }, { 3, MAP_FAILED }, + { 11, MAP_FAILED }, { 2, MAP_FAILED }, + { 5, MAP_FAILED }, { 3, MAP_FAILED }, + { 7, MAP_FAILED }, { 1, MAP_FAILED }, + { 57, MAP_FAILED }, { 3, MAP_FAILED }, + { 131, MAP_FAILED }, { 1, MAP_FAILED }, + }; + + for (m = 0; m < sizeof mm/sizeof(mm[0]); m++) { + HX(mm[m].p = mmap(NULL, + mm[m].npg * pgs, + PROT_READ|PROT_WRITE, + MAP_PRIVATE|MAP_ANON, -1, + (off_t)0), mm[m].p); + if (mm[m].p != MAP_FAILED) { + size_t mo; + + /* Touch some memory... */ + p = mm[m].p; + mo = cnt % + (mm[m].npg * pgs - 1); + p[mo] = 1; + cnt += (int)((long)(mm[m].p) + / pgs); + } + + /* Check cnts and times... */ + for (ii = 0; ii < sizeof(cl)/sizeof(cl[0]); + ii++) { + HX((e = clock_gettime(cl[ii], + &ts)) == -1, ts); + if (e != -1) + cnt += (int)ts.tv_nsec; + } + + HX((e = getrusage(RUSAGE_SELF, + &ru)) == -1, ru); + if (e != -1) { + cnt += (int)ru.ru_utime.tv_sec; + cnt += (int)ru.ru_utime.tv_usec; + } + } + + for (m = 0; m < sizeof mm/sizeof(mm[0]); m++) { + if (mm[m].p != MAP_FAILED) + munmap(mm[m].p, mm[m].npg * pgs); + mm[m].p = MAP_FAILED; + } + + HX(stat(".", &st) == -1, st); + HX(statvfs(".", &stvfs) == -1, stvfs); + + HX(stat("/", &st) == -1, st); + HX(statvfs("/", &stvfs) == -1, stvfs); + + HX((e = fstat(0, &st)) == -1, st); + if (e == -1) { + if (S_ISREG(st.st_mode) || + S_ISFIFO(st.st_mode) || + S_ISSOCK(st.st_mode)) { + HX(fstatvfs(0, &stvfs) == -1, + stvfs); + HX((off = lseek(0, (off_t)0, + SEEK_CUR)) < 0, off); + } + if (S_ISCHR(st.st_mode)) { + HX(tcgetattr(0, &tios) == -1, + tios); + } else if (S_ISSOCK(st.st_mode)) { + memset(&ss, 0, sizeof ss); + ssl = sizeof(ss); + HX(getpeername(0, + (void *)&ss, &ssl) == -1, + ss); + } + } + + HX((e = getrusage(RUSAGE_CHILDREN, + &ru)) == -1, ru); + if (e != -1) { + cnt += (int)ru.ru_utime.tv_sec; + cnt += (int)ru.ru_utime.tv_usec; + } + } else { + /* Subsequent hashes absorb previous result */ + HD(results); + } + + HX((e = gettimeofday(&tv, NULL)) == -1, tv); + if (e != -1) { + cnt += (int)tv.tv_sec; + cnt += (int)tv.tv_usec; + } + + HD(cnt); + } + SHA512_Final(results, &ctx); + memcpy((char *)buf + i, results, min(sizeof(results), len - i)); + i += min(sizeof(results), len - i); + } + explicit_bzero(&ctx, sizeof ctx); + explicit_bzero(results, sizeof results); + if (gotdata(buf, len) == 0) { + errno = save_errno; + return (0); /* satisfied */ + } + errno = EIO; + return (-1); +} diff --git a/code/crypto/compat/getentropy_linux.c b/code/crypto/compat/getentropy_linux.c new file mode 100644 index 0000000..37362e9 --- /dev/null +++ b/code/crypto/compat/getentropy_linux.c @@ -0,0 +1,549 @@ +/* $OpenBSD: getentropy_linux.c,v 1.43 2016/08/07 03:27:21 tb Exp $ */ + +/* + * Copyright (c) 2014 Theo de Raadt <deraadt@openbsd.org> + * Copyright (c) 2014 Bob Beck <beck@obtuse.com> + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + * + * Emulation of getentropy(2) as documented at: + * http://man.openbsd.org/getentropy.2 + */ + +#define _POSIX_C_SOURCE 199309L +#define _GNU_SOURCE 1 +#include <sys/types.h> +#include <sys/param.h> +#include <sys/ioctl.h> +#include <sys/resource.h> +#include <sys/syscall.h> +#ifdef SYS__sysctl +#include <linux/sysctl.h> +#endif +#include <sys/statvfs.h> +#include <sys/socket.h> +#include <sys/mount.h> +#include <sys/mman.h> +#include <sys/stat.h> +#include <sys/time.h> +#include <stdlib.h> +#include <stdint.h> +#include <stdio.h> +#include <link.h> +#include <termios.h> +#include <fcntl.h> +#include <signal.h> +#include <string.h> +#include <errno.h> +#include <unistd.h> +#include <time.h> + +#include <compat.h> +#include <sha.h> + +#include <linux/types.h> +#include <linux/random.h> +#ifdef HAVE_GETAUXVAL +#include <sys/auxv.h> +#endif +#include <sys/vfs.h> + +#define REPEAT 5 +#define min(a, b) (((a) < (b)) ? (a) : (b)) + +#define HX(a, b) \ + do { \ + if ((a)) \ + HD(errno); \ + else \ + HD(b); \ + } while (0) + +#define HR(x, l) (SHA512_Update(&ctx, (char *)(x), (l))) +#define HD(x) (SHA512_Update(&ctx, (char *)&(x), sizeof (x))) +#define HF(x) (SHA512_Update(&ctx, (char *)&(x), sizeof (void*))) + +int getentropy(void *buf, size_t len); + +static int gotdata(char *buf, size_t len); +#ifdef SYS_getrandom +static int getentropy_getrandom(void *buf, size_t len); +#endif +static int getentropy_urandom(void *buf, size_t len); +#ifdef SYS__sysctl +static int getentropy_sysctl(void *buf, size_t len); +#endif +static int getentropy_fallback(void *buf, size_t len); +static int getentropy_phdr(struct dl_phdr_info *info, size_t size, void *data); + +int +getentropy(void *buf, size_t len) +{ + int ret = -1; + + if (len > 256) { + errno = EIO; + return (-1); + } + +#ifdef SYS_getrandom + /* + * Try descriptor-less getrandom() + */ + ret = getentropy_getrandom(buf, len); + if (ret != -1) + return (ret); + if (errno != ENOSYS) + return (-1); +#endif + + /* + * Try to get entropy with /dev/urandom + * + * This can fail if the process is inside a chroot or if file + * descriptors are exhausted. + */ + ret = getentropy_urandom(buf, len); + if (ret != -1) + return (ret); + +#ifdef SYS__sysctl + /* + * Try to use sysctl CTL_KERN, KERN_RANDOM, RANDOM_UUID. + * sysctl is a failsafe API, so it guarantees a result. This + * should work inside a chroot, or when file descriptors are + * exhausted. + * + * However this can fail if the Linux kernel removes support + * for sysctl. Starting in 2007, there have been efforts to + * deprecate the sysctl API/ABI, and push callers towards use + * of the chroot-unavailable fd-using /proc mechanism -- + * essentially the same problems as /dev/urandom. + * + * Numerous setbacks have been encountered in their deprecation + * schedule, so as of June 2014 the kernel ABI still exists on + * most Linux architectures. The sysctl() stub in libc is missing + * on some systems. There are also reports that some kernels + * spew messages to the console. + */ + ret = getentropy_sysctl(buf, len); + if (ret != -1) + return (ret); +#endif /* SYS__sysctl */ + + /* + * Entropy collection via /dev/urandom and sysctl have failed. + * + * No other API exists for collecting entropy. See the large + * comment block above. + * + * We have very few options: + * - Even syslog_r is unsafe to call at this low level, so + * there is no way to alert the user or program. + * - Cannot call abort() because some systems have unsafe + * corefiles. + * - Could raise(SIGKILL) resulting in silent program termination. + * - Return EIO, to hint that arc4random's stir function + * should raise(SIGKILL) + * - Do the best under the circumstances.... + * + * This code path exists to bring light to the issue that Linux + * does not provide a failsafe API for entropy collection. + * + * We hope this demonstrates that Linux should either retain their + * sysctl ABI, or consider providing a new failsafe API which + * works in a chroot or when file descriptors are exhausted. + */ +#undef FAIL_INSTEAD_OF_TRYING_FALLBACK +#ifdef FAIL_INSTEAD_OF_TRYING_FALLBACK + raise(SIGKILL); +#endif + ret = getentropy_fallback(buf, len); + if (ret != -1) + return (ret); + + errno = EIO; + return (ret); +} + +/* + * Basic sanity checking; wish we could do better. + */ +static int +gotdata(char *buf, size_t len) +{ + char any_set = 0; + size_t i; + + for (i = 0; i < len; ++i) + any_set |= buf[i]; + if (any_set == 0) + return (-1); + return (0); +} + +#ifdef SYS_getrandom +static int +getentropy_getrandom(void *buf, size_t len) +{ + int pre_errno = errno; + int ret; + if (len > 256) + return (-1); + do { + ret = syscall(SYS_getrandom, buf, len, 0); + } while (ret == -1 && errno == EINTR); + + if (ret != len) + return (-1); + errno = pre_errno; + return (0); +} +#endif + +static int +getentropy_urandom(void *buf, size_t len) +{ + struct stat st; + size_t i; + int fd, cnt, flags; + int save_errno = errno; + +start: + + flags = O_RDONLY; +#ifdef O_NOFOLLOW + flags |= O_NOFOLLOW; +#endif +#ifdef O_CLOEXEC + flags |= O_CLOEXEC; +#endif + fd = open("/dev/urandom", flags, 0); + if (fd == -1) { + if (errno == EINTR) + goto start; + goto nodevrandom; + } +#ifndef O_CLOEXEC + fcntl(fd, F_SETFD, fcntl(fd, F_GETFD) | FD_CLOEXEC); +#endif + + /* Lightly verify that the device node looks sane */ + if (fstat(fd, &st) == -1 || !S_ISCHR(st.st_mode)) { + close(fd); + goto nodevrandom; + } + if (ioctl(fd, RNDGETENTCNT, &cnt) == -1) { + close(fd); + goto nodevrandom; + } + for (i = 0; i < len; ) { + size_t wanted = len - i; + ssize_t ret = read(fd, (char *)buf + i, wanted); + + if (ret == -1) { + if (errno == EAGAIN || errno == EINTR) + continue; + close(fd); + goto nodevrandom; + } + i += ret; + } + close(fd); + if (gotdata(buf, len) == 0) { + errno = save_errno; + return (0); /* satisfied */ + } +nodevrandom: + errno = EIO; + return (-1); +} + +#ifdef SYS__sysctl +static int +getentropy_sysctl(void *buf, size_t len) +{ + static int mib[] = { CTL_KERN, KERN_RANDOM, RANDOM_UUID }; + size_t i; + int save_errno = errno; + + for (i = 0; i < len; ) { + size_t chunk = min(len - i, 16); + + /* SYS__sysctl because some systems already removed sysctl() */ + struct __sysctl_args args = { + .name = mib, + .nlen = 3, + .oldval = (char *)buf + i, + .oldlenp = &chunk, + }; + if (syscall(SYS__sysctl, &args) != 0) + goto sysctlfailed; + i += chunk; + } + if (gotdata(buf, len) == 0) { + errno = save_errno; + return (0); /* satisfied */ + } +sysctlfailed: + errno = EIO; + return (-1); +} +#endif /* SYS__sysctl */ + +static const int cl[] = { + CLOCK_REALTIME, +#ifdef CLOCK_MONOTONIC + CLOCK_MONOTONIC, +#endif +#ifdef CLOCK_MONOTONIC_RAW + CLOCK_MONOTONIC_RAW, +#endif +#ifdef CLOCK_TAI + CLOCK_TAI, +#endif +#ifdef CLOCK_VIRTUAL + CLOCK_VIRTUAL, +#endif +#ifdef CLOCK_UPTIME + CLOCK_UPTIME, +#endif +#ifdef CLOCK_PROCESS_CPUTIME_ID + CLOCK_PROCESS_CPUTIME_ID, +#endif +#ifdef CLOCK_THREAD_CPUTIME_ID + CLOCK_THREAD_CPUTIME_ID, +#endif +}; + +static int +getentropy_phdr(struct dl_phdr_info *info, size_t size, void *data) +{ + SHA512_CTX *ctx = data; + + SHA512_Update(ctx, &info->dlpi_addr, sizeof (info->dlpi_addr)); + return (0); +} + +static int +getentropy_fallback(void *buf, size_t len) +{ + uint8_t results[SHA512_DIGEST_LENGTH]; + int save_errno = errno, e, pgs = getpagesize(), faster = 0, repeat; + static int cnt; + struct timespec ts; + struct timeval tv; + struct rusage ru; + sigset_t sigset; + struct stat st; + SHA512_CTX ctx; + static pid_t lastpid; + pid_t pid; + size_t i, ii, m; + char *p; + + pid = getpid(); + if (lastpid == pid) { + faster = 1; + repeat = 2; + } else { + faster = 0; + lastpid = pid; + repeat = REPEAT; + } + for (i = 0; i < len; ) { + int j; + SHA512_Init(&ctx); + for (j = 0; j < repeat; j++) { + HX((e = gettimeofday(&tv, NULL)) == -1, tv); + if (e != -1) { + cnt += (int)tv.tv_sec; + cnt += (int)tv.tv_usec; + } + + dl_iterate_phdr(getentropy_phdr, &ctx); + + for (ii = 0; ii < sizeof(cl)/sizeof(cl[0]); ii++) + HX(clock_gettime(cl[ii], &ts) == -1, ts); + + HX((pid = getpid()) == -1, pid); + HX((pid = getsid(pid)) == -1, pid); + HX((pid = getppid()) == -1, pid); + HX((pid = getpgid(0)) == -1, pid); + HX((e = getpriority(0, 0)) == -1, e); + + if (!faster) { + ts.tv_sec = 0; + ts.tv_nsec = 1; + (void) nanosleep(&ts, NULL); + } + + HX(sigpending(&sigset) == -1, sigset); + HX(sigprocmask(SIG_BLOCK, NULL, &sigset) == -1, + sigset); + + HF(getentropy); /* an addr in this library */ + HF(printf); /* an addr in libc */ + p = (char *)&p; + HD(p); /* an addr on stack */ + p = (char *)&errno; + HD(p); /* the addr of errno */ + + if (i == 0) { + struct sockaddr_storage ss; + struct statvfs stvfs; + struct termios tios; + struct statfs stfs; + socklen_t ssl; + off_t off; + + /* + * Prime-sized mappings encourage fragmentation; + * thus exposing some address entropy. + */ + struct mm { + size_t npg; + void *p; + } mm[] = { + { 17, MAP_FAILED }, { 3, MAP_FAILED }, + { 11, MAP_FAILED }, { 2, MAP_FAILED }, + { 5, MAP_FAILED }, { 3, MAP_FAILED }, + { 7, MAP_FAILED }, { 1, MAP_FAILED }, + { 57, MAP_FAILED }, { 3, MAP_FAILED }, + { 131, MAP_FAILED }, { 1, MAP_FAILED }, + }; + + for (m = 0; m < sizeof mm/sizeof(mm[0]); m++) { + HX(mm[m].p = mmap(NULL, + mm[m].npg * pgs, + PROT_READ|PROT_WRITE, + MAP_PRIVATE|MAP_ANON, -1, + (off_t)0), mm[m].p); + if (mm[m].p != MAP_FAILED) { + size_t mo; + + /* Touch some memory... */ + p = mm[m].p; + mo = cnt % + (mm[m].npg * pgs - 1); + p[mo] = 1; + cnt += (int)((long)(mm[m].p) + / pgs); + } + + /* Check cnts and times... */ + for (ii = 0; ii < sizeof(cl)/sizeof(cl[0]); + ii++) { + HX((e = clock_gettime(cl[ii], + &ts)) == -1, ts); + if (e != -1) + cnt += (int)ts.tv_nsec; + } + + HX((e = getrusage(RUSAGE_SELF, + &ru)) == -1, ru); + if (e != -1) { + cnt += (int)ru.ru_utime.tv_sec; + cnt += (int)ru.ru_utime.tv_usec; + } + } + + for (m = 0; m < sizeof mm/sizeof(mm[0]); m++) { + if (mm[m].p != MAP_FAILED) + munmap(mm[m].p, mm[m].npg * pgs); + mm[m].p = MAP_FAILED; + } + + HX(stat(".", &st) == -1, st); + HX(statvfs(".", &stvfs) == -1, stvfs); + HX(statfs(".", &stfs) == -1, stfs); + + HX(stat("/", &st) == -1, st); + HX(statvfs("/", &stvfs) == -1, stvfs); + HX(statfs("/", &stfs) == -1, stfs); + + HX((e = fstat(0, &st)) == -1, st); + if (e == -1) { + if (S_ISREG(st.st_mode) || + S_ISFIFO(st.st_mode) || + S_ISSOCK(st.st_mode)) { + HX(fstatvfs(0, &stvfs) == -1, + stvfs); + HX(fstatfs(0, &stfs) == -1, + stfs); + HX((off = lseek(0, (off_t)0, + SEEK_CUR)) < 0, off); + } + if (S_ISCHR(st.st_mode)) { + HX(tcgetattr(0, &tios) == -1, + tios); + } else if (S_ISSOCK(st.st_mode)) { + memset(&ss, 0, sizeof ss); + ssl = sizeof(ss); + HX(getpeername(0, + (void *)&ss, &ssl) == -1, + ss); + } + } + + HX((e = getrusage(RUSAGE_CHILDREN, + &ru)) == -1, ru); + if (e != -1) { + cnt += (int)ru.ru_utime.tv_sec; + cnt += (int)ru.ru_utime.tv_usec; + } + } else { + /* Subsequent hashes absorb previous result */ + HD(results); + } + + HX((e = gettimeofday(&tv, NULL)) == -1, tv); + if (e != -1) { + cnt += (int)tv.tv_sec; + cnt += (int)tv.tv_usec; + } + + HD(cnt); + } +#ifdef HAVE_GETAUXVAL +#ifdef AT_RANDOM + /* Not as random as you think but we take what we are given */ + p = (char *) getauxval(AT_RANDOM); + if (p) + HR(p, 16); +#endif +#ifdef AT_SYSINFO_EHDR + p = (char *) getauxval(AT_SYSINFO_EHDR); + if (p) + HR(p, pgs); +#endif +#ifdef AT_BASE + p = (char *) getauxval(AT_BASE); + if (p) + HD(p); +#endif +#endif + + SHA512_Final(results, &ctx); + memcpy((char *)buf + i, results, min(sizeof(results), len - i)); + i += min(sizeof(results), len - i); + } + explicit_bzero(&ctx, sizeof ctx); + explicit_bzero(results, sizeof results); + if (gotdata(buf, len) == 0) { + errno = save_errno; + return (0); /* satisfied */ + } + errno = EIO; + return (-1); +} diff --git a/code/crypto/compat/getentropy_netbsd.c b/code/crypto/compat/getentropy_netbsd.c new file mode 100644 index 0000000..45d68c9 --- /dev/null +++ b/code/crypto/compat/getentropy_netbsd.c @@ -0,0 +1,64 @@ +/* $OpenBSD: getentropy_netbsd.c,v 1.3 2016/08/07 03:27:21 tb Exp $ */ + +/* + * Copyright (c) 2014 Pawel Jakub Dawidek <pjd@FreeBSD.org> + * Copyright (c) 2014 Brent Cook <bcook@openbsd.org> + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + * + * Emulation of getentropy(2) as documented at: + * http://man.openbsd.org/getentropy.2 + */ + +#include <sys/types.h> +#include <sys/sysctl.h> + +#include <errno.h> +#include <stddef.h> + +/* + * Derived from lib/libc/gen/arc4random.c from FreeBSD. + */ +static size_t +getentropy_sysctl(u_char *buf, size_t size) +{ + int mib[2]; + size_t len, done; + + mib[0] = CTL_KERN; + mib[1] = KERN_ARND; + done = 0; + + do { + len = size; + if (sysctl(mib, 2, buf, &len, NULL, 0) == -1) + return (done); + done += len; + buf += len; + size -= len; + } while (size > 0); + + return (done); +} + +int +getentropy(void *buf, size_t len) +{ + if (len <= 256 && + getentropy_sysctl(buf, len) == len) { + return (0); + } + + errno = EIO; + return (-1); +} diff --git a/code/crypto/compat/getentropy_osx.c b/code/crypto/compat/getentropy_osx.c new file mode 100644 index 0000000..44a313c --- /dev/null +++ b/code/crypto/compat/getentropy_osx.c @@ -0,0 +1,442 @@ +/* $OpenBSD: getentropy_osx.c,v 1.11 2016/09/03 15:24:09 bcook Exp $ */ + +/* + * Copyright (c) 2014 Theo de Raadt <deraadt@openbsd.org> + * Copyright (c) 2014 Bob Beck <beck@obtuse.com> + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + * + * Emulation of getentropy(2) as documented at: + * http://man.openbsd.org/getentropy.2 + */ + +#include <TargetConditionals.h> +#include <sys/types.h> +#include <sys/param.h> +#include <sys/ioctl.h> +#include <sys/resource.h> +#include <sys/syscall.h> +#include <sys/sysctl.h> +#include <sys/statvfs.h> +#include <sys/socket.h> +#include <sys/mount.h> +#include <sys/mman.h> +#include <sys/stat.h> +#include <sys/time.h> +#include <stdlib.h> +#include <stdint.h> +#include <stdio.h> +#include <termios.h> +#include <fcntl.h> +#include <signal.h> +#include <string.h> +#include <errno.h> +#include <unistd.h> +#include <time.h> +#include <mach/mach_time.h> +#include <mach/mach_host.h> +#include <mach/host_info.h> +#if TARGET_OS_OSX +#include <sys/socketvar.h> +#include <sys/vmmeter.h> +#endif +#include <netinet/in.h> +#include <netinet/tcp.h> +#if TARGET_OS_OSX +#include <netinet/udp.h> +#include <netinet/ip_var.h> +#include <netinet/tcp_var.h> +#include <netinet/udp_var.h> +#endif +#include <CommonCrypto/CommonDigest.h> +#define SHA512_Update(a, b, c) (CC_SHA512_Update((a), (b), (c))) +#define SHA512_Init(xxx) (CC_SHA512_Init((xxx))) +#define SHA512_Final(xxx, yyy) (CC_SHA512_Final((xxx), (yyy))) +#define SHA512_CTX CC_SHA512_CTX +#define SHA512_DIGEST_LENGTH CC_SHA512_DIGEST_LENGTH + +#include <compat.h> + +#define REPEAT 5 +#define min(a, b) (((a) < (b)) ? (a) : (b)) + +#define HX(a, b) \ + do { \ + if ((a)) \ + HD(errno); \ + else \ + HD(b); \ + } while (0) + +#define HR(x, l) (SHA512_Update(&ctx, (char *)(x), (l))) +#define HD(x) (SHA512_Update(&ctx, (char *)&(x), sizeof (x))) +#define HF(x) (SHA512_Update(&ctx, (char *)&(x), sizeof (void*))) + +int getentropy(void *buf, size_t len); + +static int gotdata(char *buf, size_t len); +static int getentropy_urandom(void *buf, size_t len); +static int getentropy_fallback(void *buf, size_t len); + +int +getentropy(void *buf, size_t len) +{ + int ret = -1; + + if (len > 256) { + errno = EIO; + return (-1); + } + + /* + * Try to get entropy with /dev/urandom + * + * This can fail if the process is inside a chroot or if file + * descriptors are exhausted. + */ + ret = getentropy_urandom(buf, len); + if (ret != -1) + return (ret); + + /* + * Entropy collection via /dev/urandom and sysctl have failed. + * + * No other API exists for collecting entropy, and we have + * no failsafe way to get it on OSX that is not sensitive + * to resource exhaustion. + * + * We have very few options: + * - Even syslog_r is unsafe to call at this low level, so + * there is no way to alert the user or program. + * - Cannot call abort() because some systems have unsafe + * corefiles. + * - Could raise(SIGKILL) resulting in silent program termination. + * - Return EIO, to hint that arc4random's stir function + * should raise(SIGKILL) + * - Do the best under the circumstances.... + * + * This code path exists to bring light to the issue that OSX + * does not provide a failsafe API for entropy collection. + * + * We hope this demonstrates that OSX should consider + * providing a new failsafe API which works in a chroot or + * when file descriptors are exhausted. + */ +#undef FAIL_INSTEAD_OF_TRYING_FALLBACK +#ifdef FAIL_INSTEAD_OF_TRYING_FALLBACK + raise(SIGKILL); +#endif + ret = getentropy_fallback(buf, len); + if (ret != -1) + return (ret); + + errno = EIO; + return (ret); +} + +/* + * Basic sanity checking; wish we could do better. + */ +static int +gotdata(char *buf, size_t len) +{ + char any_set = 0; + size_t i; + + for (i = 0; i < len; ++i) + any_set |= buf[i]; + if (any_set == 0) + return (-1); + return (0); +} + +static int +getentropy_urandom(void *buf, size_t len) +{ + struct stat st; + size_t i; + int fd, flags; + int save_errno = errno; + +start: + + flags = O_RDONLY; +#ifdef O_NOFOLLOW + flags |= O_NOFOLLOW; +#endif +#ifdef O_CLOEXEC + flags |= O_CLOEXEC; +#endif + fd = open("/dev/urandom", flags, 0); + if (fd == -1) { + if (errno == EINTR) + goto start; + goto nodevrandom; + } +#ifndef O_CLOEXEC + fcntl(fd, F_SETFD, fcntl(fd, F_GETFD) | FD_CLOEXEC); +#endif + + /* Lightly verify that the device node looks sane */ + if (fstat(fd, &st) == -1 || !S_ISCHR(st.st_mode)) { + close(fd); + goto nodevrandom; + } + for (i = 0; i < len; ) { + size_t wanted = len - i; + ssize_t ret = read(fd, (char *)buf + i, wanted); + + if (ret == -1) { + if (errno == EAGAIN || errno == EINTR) + continue; + close(fd); + goto nodevrandom; + } + i += ret; + } + close(fd); + if (gotdata(buf, len) == 0) { + errno = save_errno; + return (0); /* satisfied */ + } +nodevrandom: + errno = EIO; + return (-1); +} + +#if TARGET_OS_OSX +static int tcpmib[] = { CTL_NET, AF_INET, IPPROTO_TCP, TCPCTL_STATS }; +static int udpmib[] = { CTL_NET, AF_INET, IPPROTO_UDP, UDPCTL_STATS }; +static int ipmib[] = { CTL_NET, AF_INET, IPPROTO_IP, IPCTL_STATS }; +#endif +static int kmib[] = { CTL_KERN, KERN_USRSTACK }; +static int hwmib[] = { CTL_HW, HW_USERMEM }; + +static int +getentropy_fallback(void *buf, size_t len) +{ + uint8_t results[SHA512_DIGEST_LENGTH]; + int save_errno = errno, e, pgs = getpagesize(), faster = 0, repeat; + static int cnt; + struct timespec ts; + struct timeval tv; + struct rusage ru; + sigset_t sigset; + struct stat st; + SHA512_CTX ctx; + static pid_t lastpid; + pid_t pid; + size_t i, ii, m; + char *p; +#if TARGET_OS_OSX + struct tcpstat tcpstat; + struct udpstat udpstat; + struct ipstat ipstat; +#endif + u_int64_t mach_time; + unsigned int idata; + void *addr; + + pid = getpid(); + if (lastpid == pid) { + faster = 1; + repeat = 2; + } else { + faster = 0; + lastpid = pid; + repeat = REPEAT; + } + for (i = 0; i < len; ) { + int j; + SHA512_Init(&ctx); + for (j = 0; j < repeat; j++) { + HX((e = gettimeofday(&tv, NULL)) == -1, tv); + if (e != -1) { + cnt += (int)tv.tv_sec; + cnt += (int)tv.tv_usec; + } + + mach_time = mach_absolute_time(); + HD(mach_time); + + ii = sizeof(addr); + HX(sysctl(kmib, sizeof(kmib) / sizeof(kmib[0]), + &addr, &ii, NULL, 0) == -1, addr); + + ii = sizeof(idata); + HX(sysctl(hwmib, sizeof(hwmib) / sizeof(hwmib[0]), + &idata, &ii, NULL, 0) == -1, idata); + +#if TARGET_OS_OSX + ii = sizeof(tcpstat); + HX(sysctl(tcpmib, sizeof(tcpmib) / sizeof(tcpmib[0]), + &tcpstat, &ii, NULL, 0) == -1, tcpstat); + + ii = sizeof(udpstat); + HX(sysctl(udpmib, sizeof(udpmib) / sizeof(udpmib[0]), + &udpstat, &ii, NULL, 0) == -1, udpstat); + + ii = sizeof(ipstat); + HX(sysctl(ipmib, sizeof(ipmib) / sizeof(ipmib[0]), + &ipstat, &ii, NULL, 0) == -1, ipstat); +#endif + + HX((pid = getpid()) == -1, pid); + HX((pid = getsid(pid)) == -1, pid); + HX((pid = getppid()) == -1, pid); + HX((pid = getpgid(0)) == -1, pid); + HX((e = getpriority(0, 0)) == -1, e); + + if (!faster) { + ts.tv_sec = 0; + ts.tv_nsec = 1; + (void) nanosleep(&ts, NULL); + } + + HX(sigpending(&sigset) == -1, sigset); + HX(sigprocmask(SIG_BLOCK, NULL, &sigset) == -1, + sigset); + + HF(getentropy); /* an addr in this library */ + HF(printf); /* an addr in libc */ + p = (char *)&p; + HD(p); /* an addr on stack */ + p = (char *)&errno; + HD(p); /* the addr of errno */ + + if (i == 0) { + struct sockaddr_storage ss; + struct statvfs stvfs; + struct termios tios; + struct statfs stfs; + socklen_t ssl; + off_t off; + + /* + * Prime-sized mappings encourage fragmentation; + * thus exposing some address entropy. + */ + struct mm { + size_t npg; + void *p; + } mm[] = { + { 17, MAP_FAILED }, { 3, MAP_FAILED }, + { 11, MAP_FAILED }, { 2, MAP_FAILED }, + { 5, MAP_FAILED }, { 3, MAP_FAILED }, + { 7, MAP_FAILED }, { 1, MAP_FAILED }, + { 57, MAP_FAILED }, { 3, MAP_FAILED }, + { 131, MAP_FAILED }, { 1, MAP_FAILED }, + }; + + for (m = 0; m < sizeof mm/sizeof(mm[0]); m++) { + HX(mm[m].p = mmap(NULL, + mm[m].npg * pgs, + PROT_READ|PROT_WRITE, + MAP_PRIVATE|MAP_ANON, -1, + (off_t)0), mm[m].p); + if (mm[m].p != MAP_FAILED) { + size_t mo; + + /* Touch some memory... */ + p = mm[m].p; + mo = cnt % + (mm[m].npg * pgs - 1); + p[mo] = 1; + cnt += (int)((long)(mm[m].p) + / pgs); + } + + /* Check cnts and times... */ + mach_time = mach_absolute_time(); + HD(mach_time); + cnt += (int)mach_time; + + HX((e = getrusage(RUSAGE_SELF, + &ru)) == -1, ru); + if (e != -1) { + cnt += (int)ru.ru_utime.tv_sec; + cnt += (int)ru.ru_utime.tv_usec; + } + } + + for (m = 0; m < sizeof mm/sizeof(mm[0]); m++) { + if (mm[m].p != MAP_FAILED) + munmap(mm[m].p, mm[m].npg * pgs); + mm[m].p = MAP_FAILED; + } + + HX(stat(".", &st) == -1, st); + HX(statvfs(".", &stvfs) == -1, stvfs); + HX(statfs(".", &stfs) == -1, stfs); + + HX(stat("/", &st) == -1, st); + HX(statvfs("/", &stvfs) == -1, stvfs); + HX(statfs("/", &stfs) == -1, stfs); + + HX((e = fstat(0, &st)) == -1, st); + if (e == -1) { + if (S_ISREG(st.st_mode) || + S_ISFIFO(st.st_mode) || + S_ISSOCK(st.st_mode)) { + HX(fstatvfs(0, &stvfs) == -1, + stvfs); + HX(fstatfs(0, &stfs) == -1, + stfs); + HX((off = lseek(0, (off_t)0, + SEEK_CUR)) < 0, off); + } + if (S_ISCHR(st.st_mode)) { + HX(tcgetattr(0, &tios) == -1, + tios); + } else if (S_ISSOCK(st.st_mode)) { + memset(&ss, 0, sizeof ss); + ssl = sizeof(ss); + HX(getpeername(0, + (void *)&ss, &ssl) == -1, + ss); + } + } + + HX((e = getrusage(RUSAGE_CHILDREN, + &ru)) == -1, ru); + if (e != -1) { + cnt += (int)ru.ru_utime.tv_sec; + cnt += (int)ru.ru_utime.tv_usec; + } + } else { + /* Subsequent hashes absorb previous result */ + HD(results); + } + + HX((e = gettimeofday(&tv, NULL)) == -1, tv); + if (e != -1) { + cnt += (int)tv.tv_sec; + cnt += (int)tv.tv_usec; + } + + HD(cnt); + } + + SHA512_Final(results, &ctx); + memcpy((char *)buf + i, results, min(sizeof(results), len - i)); + i += min(sizeof(results), len - i); + } + explicit_bzero(&ctx, sizeof ctx); + explicit_bzero(results, sizeof results); + if (gotdata(buf, len) == 0) { + errno = save_errno; + return (0); /* satisfied */ + } + errno = EIO; + return (-1); +} diff --git a/code/crypto/compat/getentropy_solaris.c b/code/crypto/compat/getentropy_solaris.c new file mode 100644 index 0000000..017a2cf --- /dev/null +++ b/code/crypto/compat/getentropy_solaris.c @@ -0,0 +1,447 @@ +/* $OpenBSD: getentropy_solaris.c,v 1.12 2016/08/07 03:27:21 tb Exp $ */ + +/* + * Copyright (c) 2014 Theo de Raadt <deraadt@openbsd.org> + * Copyright (c) 2014 Bob Beck <beck@obtuse.com> + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + * + * Emulation of getentropy(2) as documented at: + * http://man.openbsd.org/getentropy.2 + */ + +#include <sys/types.h> +#include <sys/param.h> +#include <sys/ioctl.h> +#include <sys/resource.h> +#include <sys/syscall.h> +#include <sys/statvfs.h> +#include <sys/socket.h> +#include <sys/mount.h> +#include <sys/mman.h> +#include <sys/stat.h> +#include <sys/time.h> +#include <stdlib.h> +#include <stdint.h> +#include <stdio.h> +#include <link.h> +#include <termios.h> +#include <fcntl.h> +#include <signal.h> +#include <string.h> +#include <errno.h> +#include <unistd.h> +#include <time.h> +#include <sys/sha2.h> +#define SHA512_Init SHA512Init +#define SHA512_Update SHA512Update +#define SHA512_Final SHA512Final + +#include <sys/vfs.h> +#include <sys/statfs.h> +#include <sys/loadavg.h> + +#include <compat.h> + +#define REPEAT 5 +#define min(a, b) (((a) < (b)) ? (a) : (b)) + +#define HX(a, b) \ + do { \ + if ((a)) \ + HD(errno); \ + else \ + HD(b); \ + } while (0) + +#define HR(x, l) (SHA512_Update(&ctx, (char *)(x), (l))) +#define HD(x) (SHA512_Update(&ctx, (char *)&(x), sizeof (x))) +#define HF(x) (SHA512_Update(&ctx, (char *)&(x), sizeof (void*))) + +int getentropy(void *buf, size_t len); + +static int gotdata(char *buf, size_t len); +static int getentropy_urandom(void *buf, size_t len, const char *path, + int devfscheck); +static int getentropy_fallback(void *buf, size_t len); +static int getentropy_phdr(struct dl_phdr_info *info, size_t size, void *data); + +int +getentropy(void *buf, size_t len) +{ + int ret = -1; + + if (len > 256) { + errno = EIO; + return (-1); + } + + /* + * Try to get entropy with /dev/urandom + * + * Solaris provides /dev/urandom as a symbolic link to + * /devices/pseudo/random@0:urandom which is provided by + * a devfs filesystem. Best practice is to use O_NOFOLLOW, + * so we must try the unpublished name directly. + * + * This can fail if the process is inside a chroot which lacks + * the devfs mount, or if file descriptors are exhausted. + */ + ret = getentropy_urandom(buf, len, + "/devices/pseudo/random@0:urandom", 1); + if (ret != -1) + return (ret); + + /* + * Unfortunately, chroot spaces on Solaris are sometimes setup + * with direct device node of the well-known /dev/urandom name + * (perhaps to avoid dragging all of devfs into the space). + * + * This can fail if the process is inside a chroot or if file + * descriptors are exhausted. + */ + ret = getentropy_urandom(buf, len, "/dev/urandom", 0); + if (ret != -1) + return (ret); + + /* + * Entropy collection via /dev/urandom has failed. + * + * No other API exists for collecting entropy, and we have + * no failsafe way to get it on Solaris that is not sensitive + * to resource exhaustion. + * + * We have very few options: + * - Even syslog_r is unsafe to call at this low level, so + * there is no way to alert the user or program. + * - Cannot call abort() because some systems have unsafe + * corefiles. + * - Could raise(SIGKILL) resulting in silent program termination. + * - Return EIO, to hint that arc4random's stir function + * should raise(SIGKILL) + * - Do the best under the circumstances.... + * + * This code path exists to bring light to the issue that Solaris + * does not provide a failsafe API for entropy collection. + * + * We hope this demonstrates that Solaris should consider + * providing a new failsafe API which works in a chroot or + * when file descriptors are exhausted. + */ +#undef FAIL_INSTEAD_OF_TRYING_FALLBACK +#ifdef FAIL_INSTEAD_OF_TRYING_FALLBACK + raise(SIGKILL); +#endif + ret = getentropy_fallback(buf, len); + if (ret != -1) + return (ret); + + errno = EIO; + return (ret); +} + +/* + * Basic sanity checking; wish we could do better. + */ +static int +gotdata(char *buf, size_t len) +{ + char any_set = 0; + size_t i; + + for (i = 0; i < len; ++i) + any_set |= buf[i]; + if (any_set == 0) + return (-1); + return (0); +} + +static int +getentropy_urandom(void *buf, size_t len, const char *path, int devfscheck) +{ + struct stat st; + size_t i; + int fd, flags; + int save_errno = errno; + +start: + + flags = O_RDONLY; +#ifdef O_NOFOLLOW + flags |= O_NOFOLLOW; +#endif +#ifdef O_CLOEXEC + flags |= O_CLOEXEC; +#endif + fd = open(path, flags, 0); + if (fd == -1) { + if (errno == EINTR) + goto start; + goto nodevrandom; + } +#ifndef O_CLOEXEC + fcntl(fd, F_SETFD, fcntl(fd, F_GETFD) | FD_CLOEXEC); +#endif + + /* Lightly verify that the device node looks sane */ + if (fstat(fd, &st) == -1 || !S_ISCHR(st.st_mode) || + (devfscheck && (strcmp(st.st_fstype, "devfs") != 0))) { + close(fd); + goto nodevrandom; + } + for (i = 0; i < len; ) { + size_t wanted = len - i; + ssize_t ret = read(fd, (char *)buf + i, wanted); + + if (ret == -1) { + if (errno == EAGAIN || errno == EINTR) + continue; + close(fd); + goto nodevrandom; + } + i += ret; + } + close(fd); + if (gotdata(buf, len) == 0) { + errno = save_errno; + return (0); /* satisfied */ + } +nodevrandom: + errno = EIO; + return (-1); +} + +static const int cl[] = { + CLOCK_REALTIME, +#ifdef CLOCK_MONOTONIC + CLOCK_MONOTONIC, +#endif +#ifdef CLOCK_MONOTONIC_RAW + CLOCK_MONOTONIC_RAW, +#endif +#ifdef CLOCK_TAI + CLOCK_TAI, +#endif +#ifdef CLOCK_VIRTUAL + CLOCK_VIRTUAL, +#endif +#ifdef CLOCK_UPTIME + CLOCK_UPTIME, +#endif +#ifdef CLOCK_PROCESS_CPUTIME_ID + CLOCK_PROCESS_CPUTIME_ID, +#endif +#ifdef CLOCK_THREAD_CPUTIME_ID + CLOCK_THREAD_CPUTIME_ID, +#endif +}; + +static int +getentropy_phdr(struct dl_phdr_info *info, size_t size, void *data) +{ + SHA512_CTX *ctx = data; + + SHA512_Update(ctx, &info->dlpi_addr, sizeof (info->dlpi_addr)); + return (0); +} + +static int +getentropy_fallback(void *buf, size_t len) +{ + uint8_t results[SHA512_DIGEST_LENGTH]; + int save_errno = errno, e, pgs = getpagesize(), faster = 0, repeat; + static int cnt; + struct timespec ts; + struct timeval tv; + double loadavg[3]; + struct rusage ru; + sigset_t sigset; + struct stat st; + SHA512_CTX ctx; + static pid_t lastpid; + pid_t pid; + size_t i, ii, m; + char *p; + + pid = getpid(); + if (lastpid == pid) { + faster = 1; + repeat = 2; + } else { + faster = 0; + lastpid = pid; + repeat = REPEAT; + } + for (i = 0; i < len; ) { + int j; + SHA512_Init(&ctx); + for (j = 0; j < repeat; j++) { + HX((e = gettimeofday(&tv, NULL)) == -1, tv); + if (e != -1) { + cnt += (int)tv.tv_sec; + cnt += (int)tv.tv_usec; + } + + dl_iterate_phdr(getentropy_phdr, &ctx); + + for (ii = 0; ii < sizeof(cl)/sizeof(cl[0]); ii++) + HX(clock_gettime(cl[ii], &ts) == -1, ts); + + HX((pid = getpid()) == -1, pid); + HX((pid = getsid(pid)) == -1, pid); + HX((pid = getppid()) == -1, pid); + HX((pid = getpgid(0)) == -1, pid); + HX((e = getpriority(0, 0)) == -1, e); + HX((getloadavg(loadavg, 3) == -1), loadavg); + + if (!faster) { + ts.tv_sec = 0; + ts.tv_nsec = 1; + (void) nanosleep(&ts, NULL); + } + + HX(sigpending(&sigset) == -1, sigset); + HX(sigprocmask(SIG_BLOCK, NULL, &sigset) == -1, + sigset); + + HF(getentropy); /* an addr in this library */ + HF(printf); /* an addr in libc */ + p = (char *)&p; + HD(p); /* an addr on stack */ + p = (char *)&errno; + HD(p); /* the addr of errno */ + + if (i == 0) { + struct sockaddr_storage ss; + struct statvfs stvfs; + struct termios tios; + socklen_t ssl; + off_t off; + + /* + * Prime-sized mappings encourage fragmentation; + * thus exposing some address entropy. + */ + struct mm { + size_t npg; + void *p; + } mm[] = { + { 17, MAP_FAILED }, { 3, MAP_FAILED }, + { 11, MAP_FAILED }, { 2, MAP_FAILED }, + { 5, MAP_FAILED }, { 3, MAP_FAILED }, + { 7, MAP_FAILED }, { 1, MAP_FAILED }, + { 57, MAP_FAILED }, { 3, MAP_FAILED }, + { 131, MAP_FAILED }, { 1, MAP_FAILED }, + }; + + for (m = 0; m < sizeof mm/sizeof(mm[0]); m++) { + HX(mm[m].p = mmap(NULL, + mm[m].npg * pgs, + PROT_READ|PROT_WRITE, + MAP_PRIVATE|MAP_ANON, -1, + (off_t)0), mm[m].p); + if (mm[m].p != MAP_FAILED) { + size_t mo; + + /* Touch some memory... */ + p = mm[m].p; + mo = cnt % + (mm[m].npg * pgs - 1); + p[mo] = 1; + cnt += (int)((long)(mm[m].p) + / pgs); + } + + /* Check cnts and times... */ + for (ii = 0; ii < sizeof(cl)/sizeof(cl[0]); + ii++) { + HX((e = clock_gettime(cl[ii], + &ts)) == -1, ts); + if (e != -1) + cnt += (int)ts.tv_nsec; + } + + HX((e = getrusage(RUSAGE_SELF, + &ru)) == -1, ru); + if (e != -1) { + cnt += (int)ru.ru_utime.tv_sec; + cnt += (int)ru.ru_utime.tv_usec; + } + } + + for (m = 0; m < sizeof mm/sizeof(mm[0]); m++) { + if (mm[m].p != MAP_FAILED) + munmap(mm[m].p, mm[m].npg * pgs); + mm[m].p = MAP_FAILED; + } + + HX(stat(".", &st) == -1, st); + HX(statvfs(".", &stvfs) == -1, stvfs); + + HX(stat("/", &st) == -1, st); + HX(statvfs("/", &stvfs) == -1, stvfs); + + HX((e = fstat(0, &st)) == -1, st); + if (e == -1) { + if (S_ISREG(st.st_mode) || + S_ISFIFO(st.st_mode) || + S_ISSOCK(st.st_mode)) { + HX(fstatvfs(0, &stvfs) == -1, + stvfs); + HX((off = lseek(0, (off_t)0, + SEEK_CUR)) < 0, off); + } + if (S_ISCHR(st.st_mode)) { + HX(tcgetattr(0, &tios) == -1, + tios); + } else if (S_ISSOCK(st.st_mode)) { + memset(&ss, 0, sizeof ss); + ssl = sizeof(ss); + HX(getpeername(0, + (void *)&ss, &ssl) == -1, + ss); + } + } + + HX((e = getrusage(RUSAGE_CHILDREN, + &ru)) == -1, ru); + if (e != -1) { + cnt += (int)ru.ru_utime.tv_sec; + cnt += (int)ru.ru_utime.tv_usec; + } + } else { + /* Subsequent hashes absorb previous result */ + HD(results); + } + + HX((e = gettimeofday(&tv, NULL)) == -1, tv); + if (e != -1) { + cnt += (int)tv.tv_sec; + cnt += (int)tv.tv_usec; + } + + HD(cnt); + } + SHA512_Final(results, &ctx); + memcpy((char *)buf + i, results, min(sizeof(results), len - i)); + i += min(sizeof(results), len - i); + } + explicit_bzero(&ctx, sizeof ctx); + explicit_bzero(results, sizeof results); + if (gotdata(buf, len) == 0) { + errno = save_errno; + return (0); /* satisfied */ + } + errno = EIO; + return (-1); +} diff --git a/code/crypto/compat/getentropy_win.c b/code/crypto/compat/getentropy_win.c new file mode 100644 index 0000000..2abeb27 --- /dev/null +++ b/code/crypto/compat/getentropy_win.c @@ -0,0 +1,59 @@ +/* $OpenBSD: getentropy_win.c,v 1.5 2016/08/07 03:27:21 tb Exp $ */ + +/* + * Copyright (c) 2014, Theo de Raadt <deraadt@openbsd.org> + * Copyright (c) 2014, Bob Beck <beck@obtuse.com> + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + * + * Emulation of getentropy(2) as documented at: + * http://man.openbsd.org/getentropy.2 + */ + +#include <windows.h> +#include <errno.h> +#include <stdint.h> +#include <sys/types.h> +#include <wincrypt.h> +#include <process.h> + +int getentropy(void *buf, size_t len); + +/* + * On Windows, CryptGenRandom is supposed to be a well-seeded + * cryptographically strong random number generator. + */ +int +getentropy(void *buf, size_t len) +{ + HCRYPTPROV provider; + + if (len > 256) { + errno = EIO; + return (-1); + } + + if (CryptAcquireContext(&provider, NULL, NULL, PROV_RSA_FULL, + CRYPT_VERIFYCONTEXT) == 0) + goto fail; + if (CryptGenRandom(provider, len, buf) == 0) { + CryptReleaseContext(provider, 0); + goto fail; + } + CryptReleaseContext(provider, 0); + return (0); + +fail: + errno = EIO; + return (-1); +} diff --git a/code/crypto/compat/timingsafe_bcmp.c b/code/crypto/compat/timingsafe_bcmp.c new file mode 100644 index 0000000..552e844 --- /dev/null +++ b/code/crypto/compat/timingsafe_bcmp.c @@ -0,0 +1,29 @@ +/* $OpenBSD: timingsafe_bcmp.c,v 1.3 2015/08/31 02:53:57 guenther Exp $ */ +/* + * Copyright (c) 2010 Damien Miller. All rights reserved. + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +#include <string.h> + +int +timingsafe_bcmp(const void *b1, const void *b2, size_t n) +{ + const unsigned char *p1 = b1, *p2 = b2; + int ret = 0; + + for (; n > 0; n--) + ret |= *p1++ ^ *p2++; + return (ret != 0); +} diff --git a/code/crypto/compat/timingsafe_memcmp.c b/code/crypto/compat/timingsafe_memcmp.c new file mode 100644 index 0000000..bb210a3 --- /dev/null +++ b/code/crypto/compat/timingsafe_memcmp.c @@ -0,0 +1,46 @@ +/* $OpenBSD: timingsafe_memcmp.c,v 1.2 2015/08/31 02:53:57 guenther Exp $ */ +/* + * Copyright (c) 2014 Google Inc. + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +#include <limits.h> +#include <string.h> + +int +timingsafe_memcmp(const void *b1, const void *b2, size_t len) +{ + const unsigned char *p1 = b1, *p2 = b2; + size_t i; + int res = 0, done = 0; + + for (i = 0; i < len; i++) { + /* lt is -1 if p1[i] < p2[i]; else 0. */ + int lt = (p1[i] - p2[i]) >> CHAR_BIT; + + /* gt is -1 if p1[i] > p2[i]; else 0. */ + int gt = (p2[i] - p1[i]) >> CHAR_BIT; + + /* cmp is 1 if p1[i] > p2[i]; -1 if p1[i] < p2[i]; else 0. */ + int cmp = lt - gt; + + /* set res = cmp if !done. */ + res |= cmp & ~done; + + /* set done if p1[i] != p2[i]. */ + done |= lt | gt; + } + + return (res); +} diff --git a/code/crypto/crypto.c b/code/crypto/crypto.c new file mode 100644 index 0000000..f57e8d8 --- /dev/null +++ b/code/crypto/crypto.c @@ -0,0 +1,95 @@ +#include <core.h> + +#include <string.h> +#include <curve25519.h> + +static int dh_mkpair(ecp_dh_public_t *pub, ecp_dh_private_t *priv, ecp_rng_t *rand_buf) { + int rv = X25519_keypair(*pub, *priv, rand_buf); + if (!rv) return ECP_ERR; + return ECP_OK; +} + +static int dh_shsec(ecp_aead_key_t *shared, ecp_dh_public_t *pub, ecp_dh_private_t *priv) { + int rv = X25519(*shared, *priv, *pub); + if (!rv) return ECP_ERR; + return ECP_OK; +} + +static unsigned char *dh_pub_get_buf(ecp_dh_public_t *p) { + return (unsigned char *)p; +} + +static void dh_pub_to_buf(unsigned char *b, ecp_dh_public_t *p) { + memcpy(b, p, ECP_ECDH_SIZE_KEY); +} + +static void dh_pub_from_buf(ecp_dh_public_t *p, unsigned char *b) { + memcpy(p, b, ECP_ECDH_SIZE_KEY); +} + +static int dh_pub_eq(unsigned char *p1, ecp_dh_public_t *p2) { + return !memcmp(p1, p2, ECP_ECDH_SIZE_KEY); +} + +static unsigned int dh_pub_hash_fn(unsigned char *p) { + return *((unsigned int *)p); +} + +static int dh_pub_hash_eq(unsigned char *p1, unsigned char *p2) { + return !memcmp(p1, p2, ECP_ECDH_SIZE_KEY); +} + +static ssize_t aead_enc(unsigned char *ct, size_t cl, unsigned char *pt, size_t pl, ecp_aead_key_t *k, unsigned char *n) { + size_t ol; + int rv = aead_chacha20_poly1305_seal(ct, &ol, cl, *k, ECP_AEAD_SIZE_TAG, n, ECP_AEAD_SIZE_NONCE, pt, pl, NULL, 0); + if (!rv) return ECP_ERR; + return ol; +} + +static ssize_t aead_dec(unsigned char *pt, size_t pl, unsigned char *ct, size_t cl, ecp_aead_key_t *k, unsigned char *n) { + size_t ol; + int rv = aead_chacha20_poly1305_open(pt, &ol, pl, *k, ECP_AEAD_SIZE_TAG, n, ECP_AEAD_SIZE_NONCE, ct, cl, NULL, 0); + if (!rv) return ECP_ERR; + return ol; +} + +int dsa_mkpair(ecp_dsa_public_t *pub, ecp_dsa_private_t *priv, ecp_rng_t *rand_buf) { + unsigned char key[2*ECP_DSA_SIZE_KEY]; + + int rv = ED25519_keypair(*pub, key, rand_buf); + if (!rv) return ECP_ERR; + memcpy(priv, key, ECP_DSA_SIZE_KEY); + return ECP_OK; +} + +int dsa_sign(unsigned char *sig, unsigned char *m, size_t ml, ecp_dsa_public_t *p, ecp_dsa_private_t *s) { + unsigned char key[2*ECP_DSA_SIZE_KEY]; + memcpy(key, s, ECP_DSA_SIZE_KEY); + memcpy(key+ECP_DSA_SIZE_KEY, p, ECP_DSA_SIZE_KEY); + + int rv = ED25519_sign(sig, m, ml, key); + if (!rv) return ECP_ERR; + return ECP_OK; +} + +int dsa_verify(unsigned char *m, size_t ml, unsigned char *sig, ecp_dsa_public_t *p) { + return ED25519_verify(m, ml, sig, *p); +} + +int ecp_crypto_init(ECPCryptoIface *cr) { + cr->init = 1; + cr->dh_mkpair = dh_mkpair; + cr->dh_shsec = dh_shsec; + cr->dh_pub_get_buf = dh_pub_get_buf; + cr->dh_pub_to_buf = dh_pub_to_buf; + cr->dh_pub_from_buf = dh_pub_from_buf; + cr->dh_pub_eq = dh_pub_eq; + cr->dh_pub_hash_fn = dh_pub_hash_fn; + cr->dh_pub_hash_eq = dh_pub_hash_eq; + cr->aead_enc = aead_enc; + cr->aead_dec = aead_dec; + cr->dsa_mkpair = dsa_mkpair; + cr->dsa_sign = dsa_sign; + cr->dsa_verify = dsa_verify; + return ECP_OK; +} diff --git a/code/crypto/crypto.h b/code/crypto/crypto.h new file mode 100644 index 0000000..7bd27d5 --- /dev/null +++ b/code/crypto/crypto.h @@ -0,0 +1,33 @@ +#include <stddef.h> + +#define CURVE25519_SIZE_KEY 32 +#define CHACHA20_SIZE_KEY 32 +#define POLY1305_SIZE_TAG 16 +#define CHACHA20_SIZE_NONCE 8 + +#define ECP_ECDH_SIZE_KEY 32 +#define ECP_AEAD_SIZE_KEY 32 +#define ECP_AEAD_SIZE_TAG 16 +#define ECP_AEAD_SIZE_NONCE 8 + +#define ECP_DSA_SIZE_KEY 32 + +typedef uint8_t ecp_dh_public_t[ECP_ECDH_SIZE_KEY]; +typedef uint8_t ecp_dh_private_t[ECP_ECDH_SIZE_KEY]; +typedef uint8_t ecp_aead_key_t[ECP_AEAD_SIZE_KEY]; +typedef uint8_t ecp_dsa_public_t[ECP_DSA_SIZE_KEY]; +typedef uint8_t ecp_dsa_private_t[ECP_DSA_SIZE_KEY]; + +int +aead_chacha20_poly1305_seal(unsigned char *out, size_t *out_len, + size_t max_out_len, unsigned char key[32], unsigned char tag_len, + const unsigned char *nonce, size_t nonce_len, + const unsigned char *in, size_t in_len, + const unsigned char *ad, size_t ad_len); + +int +aead_chacha20_poly1305_open(unsigned char *out, size_t *out_len, + size_t max_out_len, unsigned char key[32], unsigned char tag_len, + const unsigned char *nonce, size_t nonce_len, + const unsigned char *in, size_t in_len, + const unsigned char *ad, size_t ad_len); diff --git a/code/crypto/curve25519/Makefile b/code/crypto/curve25519/Makefile new file mode 100644 index 0000000..470d31e --- /dev/null +++ b/code/crypto/curve25519/Makefile @@ -0,0 +1,14 @@ +CFLAGS=-I../include -D__BEGIN_HIDDEN_DECLS= -D__END_HIDDEN_DECLS= -DED25519 -O3 $(PIC) +obj = curve25519.o curve25519-generic.o + +all: libcurve25519.a +dep: all + +%.o: %.c + $(CC) $(CFLAGS) -c $< + +libcurve25519.a: $(obj) + $(AR) rcs $@ $(obj) + +clean: + rm -f *.o *.a diff --git a/code/crypto/curve25519/curve25519-generic.c b/code/crypto/curve25519/curve25519-generic.c new file mode 100644 index 0000000..e7373d2 --- /dev/null +++ b/code/crypto/curve25519/curve25519-generic.c @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2015, Google Inc. + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY + * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION + * OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN + * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +/* + * This code is mostly taken from the ref10 version of Ed25519 in SUPERCOP + * 20141124 (http://bench.cr.yp.to/supercop.html). That code is released as + * public domain but this file has the ISC license just to keep licencing + * simple. + * + * The field functions are shared by Ed25519 and X25519 where possible. + */ + +#include "curve25519_internal.h" + +void +x25519_scalar_mult(uint8_t out[32], const uint8_t scalar[32], + const uint8_t point[32]) +{ + x25519_scalar_mult_generic(out, scalar, point); +} diff --git a/code/crypto/curve25519/curve25519.c b/code/crypto/curve25519/curve25519.c new file mode 100644 index 0000000..f5f57e6 --- /dev/null +++ b/code/crypto/curve25519/curve25519.c @@ -0,0 +1,4940 @@ +/* + * Copyright (c) 2015, Google Inc. + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY + * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION + * OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN + * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +/* + * This code is mostly taken from the ref10 version of Ed25519 in SUPERCOP + * 20141124 (http://bench.cr.yp.to/supercop.html). That code is released as + * public domain but this file has the ISC license just to keep licencing + * simple. + * + * The field functions are shared by Ed25519 and X25519 where possible. + */ + +#include <stdlib.h> +#include <string.h> + +#include <compat.h> +#include <curve25519.h> + +#ifdef ED25519 +#include <sha.h> +#endif + +#include "curve25519_internal.h" + +static const int64_t kBottom25Bits = 0x1ffffffLL; +static const int64_t kBottom26Bits = 0x3ffffffLL; +static const int64_t kTop39Bits = 0xfffffffffe000000LL; +static const int64_t kTop38Bits = 0xfffffffffc000000LL; + +static uint64_t load_3(const uint8_t *in) { + uint64_t result; + result = (uint64_t)in[0]; + result |= ((uint64_t)in[1]) << 8; + result |= ((uint64_t)in[2]) << 16; + return result; +} + +static uint64_t load_4(const uint8_t *in) { + uint64_t result; + result = (uint64_t)in[0]; + result |= ((uint64_t)in[1]) << 8; + result |= ((uint64_t)in[2]) << 16; + result |= ((uint64_t)in[3]) << 24; + return result; +} + +static void fe_frombytes(fe h, const uint8_t *s) { + /* Ignores top bit of h. */ + int64_t h0 = load_4(s); + int64_t h1 = load_3(s + 4) << 6; + int64_t h2 = load_3(s + 7) << 5; + int64_t h3 = load_3(s + 10) << 3; + int64_t h4 = load_3(s + 13) << 2; + int64_t h5 = load_4(s + 16); + int64_t h6 = load_3(s + 20) << 7; + int64_t h7 = load_3(s + 23) << 5; + int64_t h8 = load_3(s + 26) << 4; + int64_t h9 = (load_3(s + 29) & 8388607) << 2; + int64_t carry0; + int64_t carry1; + int64_t carry2; + int64_t carry3; + int64_t carry4; + int64_t carry5; + int64_t carry6; + int64_t carry7; + int64_t carry8; + int64_t carry9; + + carry9 = h9 + (1 << 24); h0 += (carry9 >> 25) * 19; h9 -= carry9 & kTop39Bits; + carry1 = h1 + (1 << 24); h2 += carry1 >> 25; h1 -= carry1 & kTop39Bits; + carry3 = h3 + (1 << 24); h4 += carry3 >> 25; h3 -= carry3 & kTop39Bits; + carry5 = h5 + (1 << 24); h6 += carry5 >> 25; h5 -= carry5 & kTop39Bits; + carry7 = h7 + (1 << 24); h8 += carry7 >> 25; h7 -= carry7 & kTop39Bits; + + carry0 = h0 + (1 << 25); h1 += carry0 >> 26; h0 -= carry0 & kTop38Bits; + carry2 = h2 + (1 << 25); h3 += carry2 >> 26; h2 -= carry2 & kTop38Bits; + carry4 = h4 + (1 << 25); h5 += carry4 >> 26; h4 -= carry4 & kTop38Bits; + carry6 = h6 + (1 << 25); h7 += carry6 >> 26; h6 -= carry6 & kTop38Bits; + carry8 = h8 + (1 << 25); h9 += carry8 >> 26; h8 -= carry8 & kTop38Bits; + + h[0] = h0; + h[1] = h1; + h[2] = h2; + h[3] = h3; + h[4] = h4; + h[5] = h5; + h[6] = h6; + h[7] = h7; + h[8] = h8; + h[9] = h9; +} + +/* Preconditions: + * |h| bounded by 1.1*2^26,1.1*2^25,1.1*2^26,1.1*2^25,etc. + * + * Write p=2^255-19; q=floor(h/p). + * Basic claim: q = floor(2^(-255)(h + 19 2^(-25)h9 + 2^(-1))). + * + * Proof: + * Have |h|<=p so |q|<=1 so |19^2 2^(-255) q|<1/4. + * Also have |h-2^230 h9|<2^231 so |19 2^(-255)(h-2^230 h9)|<1/4. + * + * Write y=2^(-1)-19^2 2^(-255)q-19 2^(-255)(h-2^230 h9). + * Then 0<y<1. + * + * Write r=h-pq. + * Have 0<=r<=p-1=2^255-20. + * Thus 0<=r+19(2^-255)r<r+19(2^-255)2^255<=2^255-1. + * + * Write x=r+19(2^-255)r+y. + * Then 0<x<2^255 so floor(2^(-255)x) = 0 so floor(q+2^(-255)x) = q. + * + * Have q+2^(-255)x = 2^(-255)(h + 19 2^(-25) h9 + 2^(-1)) + * so floor(2^(-255)(h + 19 2^(-25) h9 + 2^(-1))) = q. */ +static void fe_tobytes(uint8_t *s, const fe h) { + int32_t h0 = h[0]; + int32_t h1 = h[1]; + int32_t h2 = h[2]; + int32_t h3 = h[3]; + int32_t h4 = h[4]; + int32_t h5 = h[5]; + int32_t h6 = h[6]; + int32_t h7 = h[7]; + int32_t h8 = h[8]; + int32_t h9 = h[9]; + int32_t q; + + q = (19 * h9 + (((int32_t) 1) << 24)) >> 25; + q = (h0 + q) >> 26; + q = (h1 + q) >> 25; + q = (h2 + q) >> 26; + q = (h3 + q) >> 25; + q = (h4 + q) >> 26; + q = (h5 + q) >> 25; + q = (h6 + q) >> 26; + q = (h7 + q) >> 25; + q = (h8 + q) >> 26; + q = (h9 + q) >> 25; + + /* Goal: Output h-(2^255-19)q, which is between 0 and 2^255-20. */ + h0 += 19 * q; + /* Goal: Output h-2^255 q, which is between 0 and 2^255-20. */ + + h1 += h0 >> 26; h0 &= kBottom26Bits; + h2 += h1 >> 25; h1 &= kBottom25Bits; + h3 += h2 >> 26; h2 &= kBottom26Bits; + h4 += h3 >> 25; h3 &= kBottom25Bits; + h5 += h4 >> 26; h4 &= kBottom26Bits; + h6 += h5 >> 25; h5 &= kBottom25Bits; + h7 += h6 >> 26; h6 &= kBottom26Bits; + h8 += h7 >> 25; h7 &= kBottom25Bits; + h9 += h8 >> 26; h8 &= kBottom26Bits; + h9 &= kBottom25Bits; + /* h10 = carry9 */ + + /* Goal: Output h0+...+2^255 h10-2^255 q, which is between 0 and 2^255-20. + * Have h0+...+2^230 h9 between 0 and 2^255-1; + * evidently 2^255 h10-2^255 q = 0. + * Goal: Output h0+...+2^230 h9. */ + + s[0] = h0 >> 0; + s[1] = h0 >> 8; + s[2] = h0 >> 16; + s[3] = (h0 >> 24) | ((uint32_t)(h1) << 2); + s[4] = h1 >> 6; + s[5] = h1 >> 14; + s[6] = (h1 >> 22) | ((uint32_t)(h2) << 3); + s[7] = h2 >> 5; + s[8] = h2 >> 13; + s[9] = (h2 >> 21) | ((uint32_t)(h3) << 5); + s[10] = h3 >> 3; + s[11] = h3 >> 11; + s[12] = (h3 >> 19) | ((uint32_t)(h4) << 6); + s[13] = h4 >> 2; + s[14] = h4 >> 10; + s[15] = h4 >> 18; + s[16] = h5 >> 0; + s[17] = h5 >> 8; + s[18] = h5 >> 16; + s[19] = (h5 >> 24) | ((uint32_t)(h6) << 1); + s[20] = h6 >> 7; + s[21] = h6 >> 15; + s[22] = (h6 >> 23) | ((uint32_t)(h7) << 3); + s[23] = h7 >> 5; + s[24] = h7 >> 13; + s[25] = (h7 >> 21) | ((uint32_t)(h8) << 4); + s[26] = h8 >> 4; + s[27] = h8 >> 12; + s[28] = (h8 >> 20) | ((uint32_t)(h9) << 6); + s[29] = h9 >> 2; + s[30] = h9 >> 10; + s[31] = h9 >> 18; +} + +/* h = f */ +static void fe_copy(fe h, const fe f) { + memmove(h, f, sizeof(int32_t) * 10); +} + +/* h = 0 */ +static void fe_0(fe h) { memset(h, 0, sizeof(int32_t) * 10); } + +/* h = 1 */ +static void fe_1(fe h) { + memset(h, 0, sizeof(int32_t) * 10); + h[0] = 1; +} + +/* h = f + g + * Can overlap h with f or g. + * + * Preconditions: + * |f| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc. + * |g| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc. + * + * Postconditions: + * |h| bounded by 1.1*2^26,1.1*2^25,1.1*2^26,1.1*2^25,etc. */ +static void fe_add(fe h, const fe f, const fe g) { + unsigned i; + for (i = 0; i < 10; i++) { + h[i] = f[i] + g[i]; + } +} + +/* h = f - g + * Can overlap h with f or g. + * + * Preconditions: + * |f| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc. + * |g| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc. + * + * Postconditions: + * |h| bounded by 1.1*2^26,1.1*2^25,1.1*2^26,1.1*2^25,etc. */ +static void fe_sub(fe h, const fe f, const fe g) { + unsigned i; + for (i = 0; i < 10; i++) { + h[i] = f[i] - g[i]; + } +} + +/* h = f * g + * Can overlap h with f or g. + * + * Preconditions: + * |f| bounded by 1.65*2^26,1.65*2^25,1.65*2^26,1.65*2^25,etc. + * |g| bounded by 1.65*2^26,1.65*2^25,1.65*2^26,1.65*2^25,etc. + * + * Postconditions: + * |h| bounded by 1.01*2^25,1.01*2^24,1.01*2^25,1.01*2^24,etc. + * + * Notes on implementation strategy: + * + * Using schoolbook multiplication. + * Karatsuba would save a little in some cost models. + * + * Most multiplications by 2 and 19 are 32-bit precomputations; + * cheaper than 64-bit postcomputations. + * + * There is one remaining multiplication by 19 in the carry chain; + * one *19 precomputation can be merged into this, + * but the resulting data flow is considerably less clean. + * + * There are 12 carries below. + * 10 of them are 2-way parallelizable and vectorizable. + * Can get away with 11 carries, but then data flow is much deeper. + * + * With tighter constraints on inputs can squeeze carries into int32. */ +static void fe_mul(fe h, const fe f, const fe g) { + int32_t f0 = f[0]; + int32_t f1 = f[1]; + int32_t f2 = f[2]; + int32_t f3 = f[3]; + int32_t f4 = f[4]; + int32_t f5 = f[5]; + int32_t f6 = f[6]; + int32_t f7 = f[7]; + int32_t f8 = f[8]; + int32_t f9 = f[9]; + int32_t g0 = g[0]; + int32_t g1 = g[1]; + int32_t g2 = g[2]; + int32_t g3 = g[3]; + int32_t g4 = g[4]; + int32_t g5 = g[5]; + int32_t g6 = g[6]; + int32_t g7 = g[7]; + int32_t g8 = g[8]; + int32_t g9 = g[9]; + int32_t g1_19 = 19 * g1; /* 1.959375*2^29 */ + int32_t g2_19 = 19 * g2; /* 1.959375*2^30; still ok */ + int32_t g3_19 = 19 * g3; + int32_t g4_19 = 19 * g4; + int32_t g5_19 = 19 * g5; + int32_t g6_19 = 19 * g6; + int32_t g7_19 = 19 * g7; + int32_t g8_19 = 19 * g8; + int32_t g9_19 = 19 * g9; + int32_t f1_2 = 2 * f1; + int32_t f3_2 = 2 * f3; + int32_t f5_2 = 2 * f5; + int32_t f7_2 = 2 * f7; + int32_t f9_2 = 2 * f9; + int64_t f0g0 = f0 * (int64_t) g0; + int64_t f0g1 = f0 * (int64_t) g1; + int64_t f0g2 = f0 * (int64_t) g2; + int64_t f0g3 = f0 * (int64_t) g3; + int64_t f0g4 = f0 * (int64_t) g4; + int64_t f0g5 = f0 * (int64_t) g5; + int64_t f0g6 = f0 * (int64_t) g6; + int64_t f0g7 = f0 * (int64_t) g7; + int64_t f0g8 = f0 * (int64_t) g8; + int64_t f0g9 = f0 * (int64_t) g9; + int64_t f1g0 = f1 * (int64_t) g0; + int64_t f1g1_2 = f1_2 * (int64_t) g1; + int64_t f1g2 = f1 * (int64_t) g2; + int64_t f1g3_2 = f1_2 * (int64_t) g3; + int64_t f1g4 = f1 * (int64_t) g4; + int64_t f1g5_2 = f1_2 * (int64_t) g5; + int64_t f1g6 = f1 * (int64_t) g6; + int64_t f1g7_2 = f1_2 * (int64_t) g7; + int64_t f1g8 = f1 * (int64_t) g8; + int64_t f1g9_38 = f1_2 * (int64_t) g9_19; + int64_t f2g0 = f2 * (int64_t) g0; + int64_t f2g1 = f2 * (int64_t) g1; + int64_t f2g2 = f2 * (int64_t) g2; + int64_t f2g3 = f2 * (int64_t) g3; + int64_t f2g4 = f2 * (int64_t) g4; + int64_t f2g5 = f2 * (int64_t) g5; + int64_t f2g6 = f2 * (int64_t) g6; + int64_t f2g7 = f2 * (int64_t) g7; + int64_t f2g8_19 = f2 * (int64_t) g8_19; + int64_t f2g9_19 = f2 * (int64_t) g9_19; + int64_t f3g0 = f3 * (int64_t) g0; + int64_t f3g1_2 = f3_2 * (int64_t) g1; + int64_t f3g2 = f3 * (int64_t) g2; + int64_t f3g3_2 = f3_2 * (int64_t) g3; + int64_t f3g4 = f3 * (int64_t) g4; + int64_t f3g5_2 = f3_2 * (int64_t) g5; + int64_t f3g6 = f3 * (int64_t) g6; + int64_t f3g7_38 = f3_2 * (int64_t) g7_19; + int64_t f3g8_19 = f3 * (int64_t) g8_19; + int64_t f3g9_38 = f3_2 * (int64_t) g9_19; + int64_t f4g0 = f4 * (int64_t) g0; + int64_t f4g1 = f4 * (int64_t) g1; + int64_t f4g2 = f4 * (int64_t) g2; + int64_t f4g3 = f4 * (int64_t) g3; + int64_t f4g4 = f4 * (int64_t) g4; + int64_t f4g5 = f4 * (int64_t) g5; + int64_t f4g6_19 = f4 * (int64_t) g6_19; + int64_t f4g7_19 = f4 * (int64_t) g7_19; + int64_t f4g8_19 = f4 * (int64_t) g8_19; + int64_t f4g9_19 = f4 * (int64_t) g9_19; + int64_t f5g0 = f5 * (int64_t) g0; + int64_t f5g1_2 = f5_2 * (int64_t) g1; + int64_t f5g2 = f5 * (int64_t) g2; + int64_t f5g3_2 = f5_2 * (int64_t) g3; + int64_t f5g4 = f5 * (int64_t) g4; + int64_t f5g5_38 = f5_2 * (int64_t) g5_19; + int64_t f5g6_19 = f5 * (int64_t) g6_19; + int64_t f5g7_38 = f5_2 * (int64_t) g7_19; + int64_t f5g8_19 = f5 * (int64_t) g8_19; + int64_t f5g9_38 = f5_2 * (int64_t) g9_19; + int64_t f6g0 = f6 * (int64_t) g0; + int64_t f6g1 = f6 * (int64_t) g1; + int64_t f6g2 = f6 * (int64_t) g2; + int64_t f6g3 = f6 * (int64_t) g3; + int64_t f6g4_19 = f6 * (int64_t) g4_19; + int64_t f6g5_19 = f6 * (int64_t) g5_19; + int64_t f6g6_19 = f6 * (int64_t) g6_19; + int64_t f6g7_19 = f6 * (int64_t) g7_19; + int64_t f6g8_19 = f6 * (int64_t) g8_19; + int64_t f6g9_19 = f6 * (int64_t) g9_19; + int64_t f7g0 = f7 * (int64_t) g0; + int64_t f7g1_2 = f7_2 * (int64_t) g1; + int64_t f7g2 = f7 * (int64_t) g2; + int64_t f7g3_38 = f7_2 * (int64_t) g3_19; + int64_t f7g4_19 = f7 * (int64_t) g4_19; + int64_t f7g5_38 = f7_2 * (int64_t) g5_19; + int64_t f7g6_19 = f7 * (int64_t) g6_19; + int64_t f7g7_38 = f7_2 * (int64_t) g7_19; + int64_t f7g8_19 = f7 * (int64_t) g8_19; + int64_t f7g9_38 = f7_2 * (int64_t) g9_19; + int64_t f8g0 = f8 * (int64_t) g0; + int64_t f8g1 = f8 * (int64_t) g1; + int64_t f8g2_19 = f8 * (int64_t) g2_19; + int64_t f8g3_19 = f8 * (int64_t) g3_19; + int64_t f8g4_19 = f8 * (int64_t) g4_19; + int64_t f8g5_19 = f8 * (int64_t) g5_19; + int64_t f8g6_19 = f8 * (int64_t) g6_19; + int64_t f8g7_19 = f8 * (int64_t) g7_19; + int64_t f8g8_19 = f8 * (int64_t) g8_19; + int64_t f8g9_19 = f8 * (int64_t) g9_19; + int64_t f9g0 = f9 * (int64_t) g0; + int64_t f9g1_38 = f9_2 * (int64_t) g1_19; + int64_t f9g2_19 = f9 * (int64_t) g2_19; + int64_t f9g3_38 = f9_2 * (int64_t) g3_19; + int64_t f9g4_19 = f9 * (int64_t) g4_19; + int64_t f9g5_38 = f9_2 * (int64_t) g5_19; + int64_t f9g6_19 = f9 * (int64_t) g6_19; + int64_t f9g7_38 = f9_2 * (int64_t) g7_19; + int64_t f9g8_19 = f9 * (int64_t) g8_19; + int64_t f9g9_38 = f9_2 * (int64_t) g9_19; + int64_t h0 = f0g0+f1g9_38+f2g8_19+f3g7_38+f4g6_19+f5g5_38+f6g4_19+f7g3_38+f8g2_19+f9g1_38; + int64_t h1 = f0g1+f1g0 +f2g9_19+f3g8_19+f4g7_19+f5g6_19+f6g5_19+f7g4_19+f8g3_19+f9g2_19; + int64_t h2 = f0g2+f1g1_2 +f2g0 +f3g9_38+f4g8_19+f5g7_38+f6g6_19+f7g5_38+f8g4_19+f9g3_38; + int64_t h3 = f0g3+f1g2 +f2g1 +f3g0 +f4g9_19+f5g8_19+f6g7_19+f7g6_19+f8g5_19+f9g4_19; + int64_t h4 = f0g4+f1g3_2 +f2g2 +f3g1_2 +f4g0 +f5g9_38+f6g8_19+f7g7_38+f8g6_19+f9g5_38; + int64_t h5 = f0g5+f1g4 +f2g3 +f3g2 +f4g1 +f5g0 +f6g9_19+f7g8_19+f8g7_19+f9g6_19; + int64_t h6 = f0g6+f1g5_2 +f2g4 +f3g3_2 +f4g2 +f5g1_2 +f6g0 +f7g9_38+f8g8_19+f9g7_38; + int64_t h7 = f0g7+f1g6 +f2g5 +f3g4 +f4g3 +f5g2 +f6g1 +f7g0 +f8g9_19+f9g8_19; + int64_t h8 = f0g8+f1g7_2 +f2g6 +f3g5_2 +f4g4 +f5g3_2 +f6g2 +f7g1_2 +f8g0 +f9g9_38; + int64_t h9 = f0g9+f1g8 +f2g7 +f3g6 +f4g5 +f5g4 +f6g3 +f7g2 +f8g1 +f9g0 ; + int64_t carry0; + int64_t carry1; + int64_t carry2; + int64_t carry3; + int64_t carry4; + int64_t carry5; + int64_t carry6; + int64_t carry7; + int64_t carry8; + int64_t carry9; + + /* |h0| <= (1.65*1.65*2^52*(1+19+19+19+19)+1.65*1.65*2^50*(38+38+38+38+38)) + * i.e. |h0| <= 1.4*2^60; narrower ranges for h2, h4, h6, h8 + * |h1| <= (1.65*1.65*2^51*(1+1+19+19+19+19+19+19+19+19)) + * i.e. |h1| <= 1.7*2^59; narrower ranges for h3, h5, h7, h9 */ + + carry0 = h0 + (1 << 25); h1 += carry0 >> 26; h0 -= carry0 & kTop38Bits; + carry4 = h4 + (1 << 25); h5 += carry4 >> 26; h4 -= carry4 & kTop38Bits; + /* |h0| <= 2^25 */ + /* |h4| <= 2^25 */ + /* |h1| <= 1.71*2^59 */ + /* |h5| <= 1.71*2^59 */ + + carry1 = h1 + (1 << 24); h2 += carry1 >> 25; h1 -= carry1 & kTop39Bits; + carry5 = h5 + (1 << 24); h6 += carry5 >> 25; h5 -= carry5 & kTop39Bits; + /* |h1| <= 2^24; from now on fits into int32 */ + /* |h5| <= 2^24; from now on fits into int32 */ + /* |h2| <= 1.41*2^60 */ + /* |h6| <= 1.41*2^60 */ + + carry2 = h2 + (1 << 25); h3 += carry2 >> 26; h2 -= carry2 & kTop38Bits; + carry6 = h6 + (1 << 25); h7 += carry6 >> 26; h6 -= carry6 & kTop38Bits; + /* |h2| <= 2^25; from now on fits into int32 unchanged */ + /* |h6| <= 2^25; from now on fits into int32 unchanged */ + /* |h3| <= 1.71*2^59 */ + /* |h7| <= 1.71*2^59 */ + + carry3 = h3 + (1 << 24); h4 += carry3 >> 25; h3 -= carry3 & kTop39Bits; + carry7 = h7 + (1 << 24); h8 += carry7 >> 25; h7 -= carry7 & kTop39Bits; + /* |h3| <= 2^24; from now on fits into int32 unchanged */ + /* |h7| <= 2^24; from now on fits into int32 unchanged */ + /* |h4| <= 1.72*2^34 */ + /* |h8| <= 1.41*2^60 */ + + carry4 = h4 + (1 << 25); h5 += carry4 >> 26; h4 -= carry4 & kTop38Bits; + carry8 = h8 + (1 << 25); h9 += carry8 >> 26; h8 -= carry8 & kTop38Bits; + /* |h4| <= 2^25; from now on fits into int32 unchanged */ + /* |h8| <= 2^25; from now on fits into int32 unchanged */ + /* |h5| <= 1.01*2^24 */ + /* |h9| <= 1.71*2^59 */ + + carry9 = h9 + (1 << 24); h0 += (carry9 >> 25) * 19; h9 -= carry9 & kTop39Bits; + /* |h9| <= 2^24; from now on fits into int32 unchanged */ + /* |h0| <= 1.1*2^39 */ + + carry0 = h0 + (1 << 25); h1 += carry0 >> 26; h0 -= carry0 & kTop38Bits; + /* |h0| <= 2^25; from now on fits into int32 unchanged */ + /* |h1| <= 1.01*2^24 */ + + h[0] = h0; + h[1] = h1; + h[2] = h2; + h[3] = h3; + h[4] = h4; + h[5] = h5; + h[6] = h6; + h[7] = h7; + h[8] = h8; + h[9] = h9; +} + +/* h = f * f + * Can overlap h with f. + * + * Preconditions: + * |f| bounded by 1.65*2^26,1.65*2^25,1.65*2^26,1.65*2^25,etc. + * + * Postconditions: + * |h| bounded by 1.01*2^25,1.01*2^24,1.01*2^25,1.01*2^24,etc. + * + * See fe_mul.c for discussion of implementation strategy. */ +static void fe_sq(fe h, const fe f) { + int32_t f0 = f[0]; + int32_t f1 = f[1]; + int32_t f2 = f[2]; + int32_t f3 = f[3]; + int32_t f4 = f[4]; + int32_t f5 = f[5]; + int32_t f6 = f[6]; + int32_t f7 = f[7]; + int32_t f8 = f[8]; + int32_t f9 = f[9]; + int32_t f0_2 = 2 * f0; + int32_t f1_2 = 2 * f1; + int32_t f2_2 = 2 * f2; + int32_t f3_2 = 2 * f3; + int32_t f4_2 = 2 * f4; + int32_t f5_2 = 2 * f5; + int32_t f6_2 = 2 * f6; + int32_t f7_2 = 2 * f7; + int32_t f5_38 = 38 * f5; /* 1.959375*2^30 */ + int32_t f6_19 = 19 * f6; /* 1.959375*2^30 */ + int32_t f7_38 = 38 * f7; /* 1.959375*2^30 */ + int32_t f8_19 = 19 * f8; /* 1.959375*2^30 */ + int32_t f9_38 = 38 * f9; /* 1.959375*2^30 */ + int64_t f0f0 = f0 * (int64_t) f0; + int64_t f0f1_2 = f0_2 * (int64_t) f1; + int64_t f0f2_2 = f0_2 * (int64_t) f2; + int64_t f0f3_2 = f0_2 * (int64_t) f3; + int64_t f0f4_2 = f0_2 * (int64_t) f4; + int64_t f0f5_2 = f0_2 * (int64_t) f5; + int64_t f0f6_2 = f0_2 * (int64_t) f6; + int64_t f0f7_2 = f0_2 * (int64_t) f7; + int64_t f0f8_2 = f0_2 * (int64_t) f8; + int64_t f0f9_2 = f0_2 * (int64_t) f9; + int64_t f1f1_2 = f1_2 * (int64_t) f1; + int64_t f1f2_2 = f1_2 * (int64_t) f2; + int64_t f1f3_4 = f1_2 * (int64_t) f3_2; + int64_t f1f4_2 = f1_2 * (int64_t) f4; + int64_t f1f5_4 = f1_2 * (int64_t) f5_2; + int64_t f1f6_2 = f1_2 * (int64_t) f6; + int64_t f1f7_4 = f1_2 * (int64_t) f7_2; + int64_t f1f8_2 = f1_2 * (int64_t) f8; + int64_t f1f9_76 = f1_2 * (int64_t) f9_38; + int64_t f2f2 = f2 * (int64_t) f2; + int64_t f2f3_2 = f2_2 * (int64_t) f3; + int64_t f2f4_2 = f2_2 * (int64_t) f4; + int64_t f2f5_2 = f2_2 * (int64_t) f5; + int64_t f2f6_2 = f2_2 * (int64_t) f6; + int64_t f2f7_2 = f2_2 * (int64_t) f7; + int64_t f2f8_38 = f2_2 * (int64_t) f8_19; + int64_t f2f9_38 = f2 * (int64_t) f9_38; + int64_t f3f3_2 = f3_2 * (int64_t) f3; + int64_t f3f4_2 = f3_2 * (int64_t) f4; + int64_t f3f5_4 = f3_2 * (int64_t) f5_2; + int64_t f3f6_2 = f3_2 * (int64_t) f6; + int64_t f3f7_76 = f3_2 * (int64_t) f7_38; + int64_t f3f8_38 = f3_2 * (int64_t) f8_19; + int64_t f3f9_76 = f3_2 * (int64_t) f9_38; + int64_t f4f4 = f4 * (int64_t) f4; + int64_t f4f5_2 = f4_2 * (int64_t) f5; + int64_t f4f6_38 = f4_2 * (int64_t) f6_19; + int64_t f4f7_38 = f4 * (int64_t) f7_38; + int64_t f4f8_38 = f4_2 * (int64_t) f8_19; + int64_t f4f9_38 = f4 * (int64_t) f9_38; + int64_t f5f5_38 = f5 * (int64_t) f5_38; + int64_t f5f6_38 = f5_2 * (int64_t) f6_19; + int64_t f5f7_76 = f5_2 * (int64_t) f7_38; + int64_t f5f8_38 = f5_2 * (int64_t) f8_19; + int64_t f5f9_76 = f5_2 * (int64_t) f9_38; + int64_t f6f6_19 = f6 * (int64_t) f6_19; + int64_t f6f7_38 = f6 * (int64_t) f7_38; + int64_t f6f8_38 = f6_2 * (int64_t) f8_19; + int64_t f6f9_38 = f6 * (int64_t) f9_38; + int64_t f7f7_38 = f7 * (int64_t) f7_38; + int64_t f7f8_38 = f7_2 * (int64_t) f8_19; + int64_t f7f9_76 = f7_2 * (int64_t) f9_38; + int64_t f8f8_19 = f8 * (int64_t) f8_19; + int64_t f8f9_38 = f8 * (int64_t) f9_38; + int64_t f9f9_38 = f9 * (int64_t) f9_38; + int64_t h0 = f0f0 +f1f9_76+f2f8_38+f3f7_76+f4f6_38+f5f5_38; + int64_t h1 = f0f1_2+f2f9_38+f3f8_38+f4f7_38+f5f6_38; + int64_t h2 = f0f2_2+f1f1_2 +f3f9_76+f4f8_38+f5f7_76+f6f6_19; + int64_t h3 = f0f3_2+f1f2_2 +f4f9_38+f5f8_38+f6f7_38; + int64_t h4 = f0f4_2+f1f3_4 +f2f2 +f5f9_76+f6f8_38+f7f7_38; + int64_t h5 = f0f5_2+f1f4_2 +f2f3_2 +f6f9_38+f7f8_38; + int64_t h6 = f0f6_2+f1f5_4 +f2f4_2 +f3f3_2 +f7f9_76+f8f8_19; + int64_t h7 = f0f7_2+f1f6_2 +f2f5_2 +f3f4_2 +f8f9_38; + int64_t h8 = f0f8_2+f1f7_4 +f2f6_2 +f3f5_4 +f4f4 +f9f9_38; + int64_t h9 = f0f9_2+f1f8_2 +f2f7_2 +f3f6_2 +f4f5_2; + int64_t carry0; + int64_t carry1; + int64_t carry2; + int64_t carry3; + int64_t carry4; + int64_t carry5; + int64_t carry6; + int64_t carry7; + int64_t carry8; + int64_t carry9; + + carry0 = h0 + (1 << 25); h1 += carry0 >> 26; h0 -= carry0 & kTop38Bits; + carry4 = h4 + (1 << 25); h5 += carry4 >> 26; h4 -= carry4 & kTop38Bits; + + carry1 = h1 + (1 << 24); h2 += carry1 >> 25; h1 -= carry1 & kTop39Bits; + carry5 = h5 + (1 << 24); h6 += carry5 >> 25; h5 -= carry5 & kTop39Bits; + + carry2 = h2 + (1 << 25); h3 += carry2 >> 26; h2 -= carry2 & kTop38Bits; + carry6 = h6 + (1 << 25); h7 += carry6 >> 26; h6 -= carry6 & kTop38Bits; + + carry3 = h3 + (1 << 24); h4 += carry3 >> 25; h3 -= carry3 & kTop39Bits; + carry7 = h7 + (1 << 24); h8 += carry7 >> 25; h7 -= carry7 & kTop39Bits; + + carry4 = h4 + (1 << 25); h5 += carry4 >> 26; h4 -= carry4 & kTop38Bits; + carry8 = h8 + (1 << 25); h9 += carry8 >> 26; h8 -= carry8 & kTop38Bits; + + carry9 = h9 + (1 << 24); h0 += (carry9 >> 25) * 19; h9 -= carry9 & kTop39Bits; + + carry0 = h0 + (1 << 25); h1 += carry0 >> 26; h0 -= carry0 & kTop38Bits; + + h[0] = h0; + h[1] = h1; + h[2] = h2; + h[3] = h3; + h[4] = h4; + h[5] = h5; + h[6] = h6; + h[7] = h7; + h[8] = h8; + h[9] = h9; +} + +static void fe_invert(fe out, const fe z) { + fe t0; + fe t1; + fe t2; + fe t3; + int i; + + fe_sq(t0, z); + for (i = 1; i < 1; ++i) { + fe_sq(t0, t0); + } + fe_sq(t1, t0); + for (i = 1; i < 2; ++i) { + fe_sq(t1, t1); + } + fe_mul(t1, z, t1); + fe_mul(t0, t0, t1); + fe_sq(t2, t0); + for (i = 1; i < 1; ++i) { + fe_sq(t2, t2); + } + fe_mul(t1, t1, t2); + fe_sq(t2, t1); + for (i = 1; i < 5; ++i) { + fe_sq(t2, t2); + } + fe_mul(t1, t2, t1); + fe_sq(t2, t1); + for (i = 1; i < 10; ++i) { + fe_sq(t2, t2); + } + fe_mul(t2, t2, t1); + fe_sq(t3, t2); + for (i = 1; i < 20; ++i) { + fe_sq(t3, t3); + } + fe_mul(t2, t3, t2); + fe_sq(t2, t2); + for (i = 1; i < 10; ++i) { + fe_sq(t2, t2); + } + fe_mul(t1, t2, t1); + fe_sq(t2, t1); + for (i = 1; i < 50; ++i) { + fe_sq(t2, t2); + } + fe_mul(t2, t2, t1); + fe_sq(t3, t2); + for (i = 1; i < 100; ++i) { + fe_sq(t3, t3); + } + fe_mul(t2, t3, t2); + fe_sq(t2, t2); + for (i = 1; i < 50; ++i) { + fe_sq(t2, t2); + } + fe_mul(t1, t2, t1); + fe_sq(t1, t1); + for (i = 1; i < 5; ++i) { + fe_sq(t1, t1); + } + fe_mul(out, t1, t0); +} + +/* h = -f + * + * Preconditions: + * |f| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc. + * + * Postconditions: + * |h| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc. */ +static void fe_neg(fe h, const fe f) { + unsigned i; + for (i = 0; i < 10; i++) { + h[i] = -f[i]; + } +} + +/* Replace (f,g) with (g,g) if b == 1; + * replace (f,g) with (f,g) if b == 0. + * + * Preconditions: b in {0,1}. */ +static void fe_cmov(fe f, const fe g, unsigned b) { + b = 0-b; + unsigned i; + for (i = 0; i < 10; i++) { + int32_t x = f[i] ^ g[i]; + x &= b; + f[i] ^= x; + } +} + +/* return 0 if f == 0 + * return 1 if f != 0 + * + * Preconditions: + * |f| bounded by 1.1*2^26,1.1*2^25,1.1*2^26,1.1*2^25,etc. */ +static int fe_isnonzero(const fe f) { + uint8_t s[32]; + fe_tobytes(s, f); + + static const uint8_t zero[32] = {0}; + return timingsafe_memcmp(s, zero, sizeof(zero)) != 0; +} + +/* return 1 if f is in {1,3,5,...,q-2} + * return 0 if f is in {0,2,4,...,q-1} + * + * Preconditions: + * |f| bounded by 1.1*2^26,1.1*2^25,1.1*2^26,1.1*2^25,etc. */ +static int fe_isnegative(const fe f) { + uint8_t s[32]; + fe_tobytes(s, f); + return s[0] & 1; +} + +/* h = 2 * f * f + * Can overlap h with f. + * + * Preconditions: + * |f| bounded by 1.65*2^26,1.65*2^25,1.65*2^26,1.65*2^25,etc. + * + * Postconditions: + * |h| bounded by 1.01*2^25,1.01*2^24,1.01*2^25,1.01*2^24,etc. + * + * See fe_mul.c for discussion of implementation strategy. */ +static void fe_sq2(fe h, const fe f) { + int32_t f0 = f[0]; + int32_t f1 = f[1]; + int32_t f2 = f[2]; + int32_t f3 = f[3]; + int32_t f4 = f[4]; + int32_t f5 = f[5]; + int32_t f6 = f[6]; + int32_t f7 = f[7]; + int32_t f8 = f[8]; + int32_t f9 = f[9]; + int32_t f0_2 = 2 * f0; + int32_t f1_2 = 2 * f1; + int32_t f2_2 = 2 * f2; + int32_t f3_2 = 2 * f3; + int32_t f4_2 = 2 * f4; + int32_t f5_2 = 2 * f5; + int32_t f6_2 = 2 * f6; + int32_t f7_2 = 2 * f7; + int32_t f5_38 = 38 * f5; /* 1.959375*2^30 */ + int32_t f6_19 = 19 * f6; /* 1.959375*2^30 */ + int32_t f7_38 = 38 * f7; /* 1.959375*2^30 */ + int32_t f8_19 = 19 * f8; /* 1.959375*2^30 */ + int32_t f9_38 = 38 * f9; /* 1.959375*2^30 */ + int64_t f0f0 = f0 * (int64_t) f0; + int64_t f0f1_2 = f0_2 * (int64_t) f1; + int64_t f0f2_2 = f0_2 * (int64_t) f2; + int64_t f0f3_2 = f0_2 * (int64_t) f3; + int64_t f0f4_2 = f0_2 * (int64_t) f4; + int64_t f0f5_2 = f0_2 * (int64_t) f5; + int64_t f0f6_2 = f0_2 * (int64_t) f6; + int64_t f0f7_2 = f0_2 * (int64_t) f7; + int64_t f0f8_2 = f0_2 * (int64_t) f8; + int64_t f0f9_2 = f0_2 * (int64_t) f9; + int64_t f1f1_2 = f1_2 * (int64_t) f1; + int64_t f1f2_2 = f1_2 * (int64_t) f2; + int64_t f1f3_4 = f1_2 * (int64_t) f3_2; + int64_t f1f4_2 = f1_2 * (int64_t) f4; + int64_t f1f5_4 = f1_2 * (int64_t) f5_2; + int64_t f1f6_2 = f1_2 * (int64_t) f6; + int64_t f1f7_4 = f1_2 * (int64_t) f7_2; + int64_t f1f8_2 = f1_2 * (int64_t) f8; + int64_t f1f9_76 = f1_2 * (int64_t) f9_38; + int64_t f2f2 = f2 * (int64_t) f2; + int64_t f2f3_2 = f2_2 * (int64_t) f3; + int64_t f2f4_2 = f2_2 * (int64_t) f4; + int64_t f2f5_2 = f2_2 * (int64_t) f5; + int64_t f2f6_2 = f2_2 * (int64_t) f6; + int64_t f2f7_2 = f2_2 * (int64_t) f7; + int64_t f2f8_38 = f2_2 * (int64_t) f8_19; + int64_t f2f9_38 = f2 * (int64_t) f9_38; + int64_t f3f3_2 = f3_2 * (int64_t) f3; + int64_t f3f4_2 = f3_2 * (int64_t) f4; + int64_t f3f5_4 = f3_2 * (int64_t) f5_2; + int64_t f3f6_2 = f3_2 * (int64_t) f6; + int64_t f3f7_76 = f3_2 * (int64_t) f7_38; + int64_t f3f8_38 = f3_2 * (int64_t) f8_19; + int64_t f3f9_76 = f3_2 * (int64_t) f9_38; + int64_t f4f4 = f4 * (int64_t) f4; + int64_t f4f5_2 = f4_2 * (int64_t) f5; + int64_t f4f6_38 = f4_2 * (int64_t) f6_19; + int64_t f4f7_38 = f4 * (int64_t) f7_38; + int64_t f4f8_38 = f4_2 * (int64_t) f8_19; + int64_t f4f9_38 = f4 * (int64_t) f9_38; + int64_t f5f5_38 = f5 * (int64_t) f5_38; + int64_t f5f6_38 = f5_2 * (int64_t) f6_19; + int64_t f5f7_76 = f5_2 * (int64_t) f7_38; + int64_t f5f8_38 = f5_2 * (int64_t) f8_19; + int64_t f5f9_76 = f5_2 * (int64_t) f9_38; + int64_t f6f6_19 = f6 * (int64_t) f6_19; + int64_t f6f7_38 = f6 * (int64_t) f7_38; + int64_t f6f8_38 = f6_2 * (int64_t) f8_19; + int64_t f6f9_38 = f6 * (int64_t) f9_38; + int64_t f7f7_38 = f7 * (int64_t) f7_38; + int64_t f7f8_38 = f7_2 * (int64_t) f8_19; + int64_t f7f9_76 = f7_2 * (int64_t) f9_38; + int64_t f8f8_19 = f8 * (int64_t) f8_19; + int64_t f8f9_38 = f8 * (int64_t) f9_38; + int64_t f9f9_38 = f9 * (int64_t) f9_38; + int64_t h0 = f0f0 +f1f9_76+f2f8_38+f3f7_76+f4f6_38+f5f5_38; + int64_t h1 = f0f1_2+f2f9_38+f3f8_38+f4f7_38+f5f6_38; + int64_t h2 = f0f2_2+f1f1_2 +f3f9_76+f4f8_38+f5f7_76+f6f6_19; + int64_t h3 = f0f3_2+f1f2_2 +f4f9_38+f5f8_38+f6f7_38; + int64_t h4 = f0f4_2+f1f3_4 +f2f2 +f5f9_76+f6f8_38+f7f7_38; + int64_t h5 = f0f5_2+f1f4_2 +f2f3_2 +f6f9_38+f7f8_38; + int64_t h6 = f0f6_2+f1f5_4 +f2f4_2 +f3f3_2 +f7f9_76+f8f8_19; + int64_t h7 = f0f7_2+f1f6_2 +f2f5_2 +f3f4_2 +f8f9_38; + int64_t h8 = f0f8_2+f1f7_4 +f2f6_2 +f3f5_4 +f4f4 +f9f9_38; + int64_t h9 = f0f9_2+f1f8_2 +f2f7_2 +f3f6_2 +f4f5_2; + int64_t carry0; + int64_t carry1; + int64_t carry2; + int64_t carry3; + int64_t carry4; + int64_t carry5; + int64_t carry6; + int64_t carry7; + int64_t carry8; + int64_t carry9; + + h0 += h0; + h1 += h1; + h2 += h2; + h3 += h3; + h4 += h4; + h5 += h5; + h6 += h6; + h7 += h7; + h8 += h8; + h9 += h9; + + carry0 = h0 + (1 << 25); h1 += carry0 >> 26; h0 -= carry0 & kTop38Bits; + carry4 = h4 + (1 << 25); h5 += carry4 >> 26; h4 -= carry4 & kTop38Bits; + + carry1 = h1 + (1 << 24); h2 += carry1 >> 25; h1 -= carry1 & kTop39Bits; + carry5 = h5 + (1 << 24); h6 += carry5 >> 25; h5 -= carry5 & kTop39Bits; + + carry2 = h2 + (1 << 25); h3 += carry2 >> 26; h2 -= carry2 & kTop38Bits; + carry6 = h6 + (1 << 25); h7 += carry6 >> 26; h6 -= carry6 & kTop38Bits; + + carry3 = h3 + (1 << 24); h4 += carry3 >> 25; h3 -= carry3 & kTop39Bits; + carry7 = h7 + (1 << 24); h8 += carry7 >> 25; h7 -= carry7 & kTop39Bits; + + carry4 = h4 + (1 << 25); h5 += carry4 >> 26; h4 -= carry4 & kTop38Bits; + carry8 = h8 + (1 << 25); h9 += carry8 >> 26; h8 -= carry8 & kTop38Bits; + + carry9 = h9 + (1 << 24); h0 += (carry9 >> 25) * 19; h9 -= carry9 & kTop39Bits; + + carry0 = h0 + (1 << 25); h1 += carry0 >> 26; h0 -= carry0 & kTop38Bits; + + h[0] = h0; + h[1] = h1; + h[2] = h2; + h[3] = h3; + h[4] = h4; + h[5] = h5; + h[6] = h6; + h[7] = h7; + h[8] = h8; + h[9] = h9; +} + +static void fe_pow22523(fe out, const fe z) { + fe t0; + fe t1; + fe t2; + int i; + + fe_sq(t0, z); + for (i = 1; i < 1; ++i) { + fe_sq(t0, t0); + } + fe_sq(t1, t0); + for (i = 1; i < 2; ++i) { + fe_sq(t1, t1); + } + fe_mul(t1, z, t1); + fe_mul(t0, t0, t1); + fe_sq(t0, t0); + for (i = 1; i < 1; ++i) { + fe_sq(t0, t0); + } + fe_mul(t0, t1, t0); + fe_sq(t1, t0); + for (i = 1; i < 5; ++i) { + fe_sq(t1, t1); + } + fe_mul(t0, t1, t0); + fe_sq(t1, t0); + for (i = 1; i < 10; ++i) { + fe_sq(t1, t1); + } + fe_mul(t1, t1, t0); + fe_sq(t2, t1); + for (i = 1; i < 20; ++i) { + fe_sq(t2, t2); + } + fe_mul(t1, t2, t1); + fe_sq(t1, t1); + for (i = 1; i < 10; ++i) { + fe_sq(t1, t1); + } + fe_mul(t0, t1, t0); + fe_sq(t1, t0); + for (i = 1; i < 50; ++i) { + fe_sq(t1, t1); + } + fe_mul(t1, t1, t0); + fe_sq(t2, t1); + for (i = 1; i < 100; ++i) { + fe_sq(t2, t2); + } + fe_mul(t1, t2, t1); + fe_sq(t1, t1); + for (i = 1; i < 50; ++i) { + fe_sq(t1, t1); + } + fe_mul(t0, t1, t0); + fe_sq(t0, t0); + for (i = 1; i < 2; ++i) { + fe_sq(t0, t0); + } + fe_mul(out, t0, z); +} + +void x25519_ge_tobytes(uint8_t *s, const ge_p2 *h) { + fe recip; + fe x; + fe y; + + fe_invert(recip, h->Z); + fe_mul(x, h->X, recip); + fe_mul(y, h->Y, recip); + fe_tobytes(s, y); + s[31] ^= fe_isnegative(x) << 7; +} + +#ifdef ED25519 +static void ge_p3_tobytes(uint8_t *s, const ge_p3 *h) { + fe recip; + fe x; + fe y; + + fe_invert(recip, h->Z); + fe_mul(x, h->X, recip); + fe_mul(y, h->Y, recip); + fe_tobytes(s, y); + s[31] ^= fe_isnegative(x) << 7; +} +#endif + +static const fe d = {-10913610, 13857413, -15372611, 6949391, 114729, + -8787816, -6275908, -3247719, -18696448, -12055116}; + +static const fe sqrtm1 = {-32595792, -7943725, 9377950, 3500415, 12389472, + -272473, -25146209, -2005654, 326686, 11406482}; + +int x25519_ge_frombytes_vartime(ge_p3 *h, const uint8_t *s) { + fe u; + fe v; + fe v3; + fe vxx; + fe check; + + fe_frombytes(h->Y, s); + fe_1(h->Z); + fe_sq(u, h->Y); + fe_mul(v, u, d); + fe_sub(u, u, h->Z); /* u = y^2-1 */ + fe_add(v, v, h->Z); /* v = dy^2+1 */ + + fe_sq(v3, v); + fe_mul(v3, v3, v); /* v3 = v^3 */ + fe_sq(h->X, v3); + fe_mul(h->X, h->X, v); + fe_mul(h->X, h->X, u); /* x = uv^7 */ + + fe_pow22523(h->X, h->X); /* x = (uv^7)^((q-5)/8) */ + fe_mul(h->X, h->X, v3); + fe_mul(h->X, h->X, u); /* x = uv^3(uv^7)^((q-5)/8) */ + + fe_sq(vxx, h->X); + fe_mul(vxx, vxx, v); + fe_sub(check, vxx, u); /* vx^2-u */ + if (fe_isnonzero(check)) { + fe_add(check, vxx, u); /* vx^2+u */ + if (fe_isnonzero(check)) { + return -1; + } + fe_mul(h->X, h->X, sqrtm1); + } + + if (fe_isnegative(h->X) != (s[31] >> 7)) { + fe_neg(h->X, h->X); + } + + fe_mul(h->T, h->X, h->Y); + return 0; +} + +static void ge_p2_0(ge_p2 *h) { + fe_0(h->X); + fe_1(h->Y); + fe_1(h->Z); +} + +static void ge_p3_0(ge_p3 *h) { + fe_0(h->X); + fe_1(h->Y); + fe_1(h->Z); + fe_0(h->T); +} + +static void ge_cached_0(ge_cached *h) { + fe_1(h->YplusX); + fe_1(h->YminusX); + fe_1(h->Z); + fe_0(h->T2d); +} + +static void ge_precomp_0(ge_precomp *h) { + fe_1(h->yplusx); + fe_1(h->yminusx); + fe_0(h->xy2d); +} + +/* r = p */ +static void ge_p3_to_p2(ge_p2 *r, const ge_p3 *p) { + fe_copy(r->X, p->X); + fe_copy(r->Y, p->Y); + fe_copy(r->Z, p->Z); +} + +static const fe d2 = {-21827239, -5839606, -30745221, 13898782, 229458, + 15978800, -12551817, -6495438, 29715968, 9444199}; + +/* r = p */ +void x25519_ge_p3_to_cached(ge_cached *r, const ge_p3 *p) { + fe_add(r->YplusX, p->Y, p->X); + fe_sub(r->YminusX, p->Y, p->X); + fe_copy(r->Z, p->Z); + fe_mul(r->T2d, p->T, d2); +} + +/* r = p */ +void x25519_ge_p1p1_to_p2(ge_p2 *r, const ge_p1p1 *p) { + fe_mul(r->X, p->X, p->T); + fe_mul(r->Y, p->Y, p->Z); + fe_mul(r->Z, p->Z, p->T); +} + +/* r = p */ +void x25519_ge_p1p1_to_p3(ge_p3 *r, const ge_p1p1 *p) { + fe_mul(r->X, p->X, p->T); + fe_mul(r->Y, p->Y, p->Z); + fe_mul(r->Z, p->Z, p->T); + fe_mul(r->T, p->X, p->Y); +} + +/* r = p */ +static void ge_p1p1_to_cached(ge_cached *r, const ge_p1p1 *p) { + ge_p3 t; + x25519_ge_p1p1_to_p3(&t, p); + x25519_ge_p3_to_cached(r, &t); +} + +/* r = 2 * p */ +static void ge_p2_dbl(ge_p1p1 *r, const ge_p2 *p) { + fe t0; + + fe_sq(r->X, p->X); + fe_sq(r->Z, p->Y); + fe_sq2(r->T, p->Z); + fe_add(r->Y, p->X, p->Y); + fe_sq(t0, r->Y); + fe_add(r->Y, r->Z, r->X); + fe_sub(r->Z, r->Z, r->X); + fe_sub(r->X, t0, r->Y); + fe_sub(r->T, r->T, r->Z); +} + +/* r = 2 * p */ +static void ge_p3_dbl(ge_p1p1 *r, const ge_p3 *p) { + ge_p2 q; + ge_p3_to_p2(&q, p); + ge_p2_dbl(r, &q); +} + +/* r = p + q */ +static void ge_madd(ge_p1p1 *r, const ge_p3 *p, const ge_precomp *q) { + fe t0; + + fe_add(r->X, p->Y, p->X); + fe_sub(r->Y, p->Y, p->X); + fe_mul(r->Z, r->X, q->yplusx); + fe_mul(r->Y, r->Y, q->yminusx); + fe_mul(r->T, q->xy2d, p->T); + fe_add(t0, p->Z, p->Z); + fe_sub(r->X, r->Z, r->Y); + fe_add(r->Y, r->Z, r->Y); + fe_add(r->Z, t0, r->T); + fe_sub(r->T, t0, r->T); +} + +#ifdef ED25519 +/* r = p - q */ +static void ge_msub(ge_p1p1 *r, const ge_p3 *p, const ge_precomp *q) { + fe t0; + + fe_add(r->X, p->Y, p->X); + fe_sub(r->Y, p->Y, p->X); + fe_mul(r->Z, r->X, q->yminusx); + fe_mul(r->Y, r->Y, q->yplusx); + fe_mul(r->T, q->xy2d, p->T); + fe_add(t0, p->Z, p->Z); + fe_sub(r->X, r->Z, r->Y); + fe_add(r->Y, r->Z, r->Y); + fe_sub(r->Z, t0, r->T); + fe_add(r->T, t0, r->T); +} +#endif + +/* r = p + q */ +void x25519_ge_add(ge_p1p1 *r, const ge_p3 *p, const ge_cached *q) { + fe t0; + + fe_add(r->X, p->Y, p->X); + fe_sub(r->Y, p->Y, p->X); + fe_mul(r->Z, r->X, q->YplusX); + fe_mul(r->Y, r->Y, q->YminusX); + fe_mul(r->T, q->T2d, p->T); + fe_mul(r->X, p->Z, q->Z); + fe_add(t0, r->X, r->X); + fe_sub(r->X, r->Z, r->Y); + fe_add(r->Y, r->Z, r->Y); + fe_add(r->Z, t0, r->T); + fe_sub(r->T, t0, r->T); +} + +/* r = p - q */ +void x25519_ge_sub(ge_p1p1 *r, const ge_p3 *p, const ge_cached *q) { + fe t0; + + fe_add(r->X, p->Y, p->X); + fe_sub(r->Y, p->Y, p->X); + fe_mul(r->Z, r->X, q->YminusX); + fe_mul(r->Y, r->Y, q->YplusX); + fe_mul(r->T, q->T2d, p->T); + fe_mul(r->X, p->Z, q->Z); + fe_add(t0, r->X, r->X); + fe_sub(r->X, r->Z, r->Y); + fe_add(r->Y, r->Z, r->Y); + fe_sub(r->Z, t0, r->T); + fe_add(r->T, t0, r->T); +} + +static uint8_t equal(signed char b, signed char c) { + uint8_t ub = b; + uint8_t uc = c; + uint8_t x = ub ^ uc; /* 0: yes; 1..255: no */ + uint32_t y = x; /* 0: yes; 1..255: no */ + y -= 1; /* 4294967295: yes; 0..254: no */ + y >>= 31; /* 1: yes; 0: no */ + return y; +} + +static void cmov(ge_precomp *t, const ge_precomp *u, uint8_t b) { + fe_cmov(t->yplusx, u->yplusx, b); + fe_cmov(t->yminusx, u->yminusx, b); + fe_cmov(t->xy2d, u->xy2d, b); +} + +void x25519_ge_scalarmult_small_precomp( + ge_p3 *h, const uint8_t a[32], const uint8_t precomp_table[15 * 2 * 32]) { + /* precomp_table is first expanded into matching |ge_precomp| + * elements. */ + ge_precomp multiples[15]; + + unsigned i; + for (i = 0; i < 15; i++) { + const uint8_t *bytes = &precomp_table[i*(2 * 32)]; + fe x, y; + fe_frombytes(x, bytes); + fe_frombytes(y, bytes + 32); + + ge_precomp *out = &multiples[i]; + fe_add(out->yplusx, y, x); + fe_sub(out->yminusx, y, x); + fe_mul(out->xy2d, x, y); + fe_mul(out->xy2d, out->xy2d, d2); + } + + /* See the comment above |k25519SmallPrecomp| about the structure of the + * precomputed elements. This loop does 64 additions and 64 doublings to + * calculate the result. */ + ge_p3_0(h); + + for (i = 63; i < 64; i--) { + unsigned j; + signed char index = 0; + + for (j = 0; j < 4; j++) { + const uint8_t bit = 1 & (a[(8 * j) + (i / 8)] >> (i & 7)); + index |= (bit << j); + } + + ge_precomp e; + ge_precomp_0(&e); + + for (j = 1; j < 16; j++) { + cmov(&e, &multiples[j-1], equal(index, j)); + } + + ge_cached cached; + ge_p1p1 r; + x25519_ge_p3_to_cached(&cached, h); + x25519_ge_add(&r, h, &cached); + x25519_ge_p1p1_to_p3(h, &r); + + ge_madd(&r, h, &e); + x25519_ge_p1p1_to_p3(h, &r); + } +} + +#if defined(OPENSSL_SMALL) + +/* This block of code replaces the standard base-point table with a much smaller + * one. The standard table is 30,720 bytes while this one is just 960. + * + * This table contains 15 pairs of group elements, (x, y), where each field + * element is serialised with |fe_tobytes|. If |i| is the index of the group + * element then consider i+1 as a four-bit number: (i₀, i₁, i₂, i₃) (where i₀ + * is the most significant bit). The value of the group element is then: + * (i₀×2^192 + i₁×2^128 + i₂×2^64 + i₃)G, where G is the generator. */ +static const uint8_t k25519SmallPrecomp[15 * 2 * 32] = { + 0x1a, 0xd5, 0x25, 0x8f, 0x60, 0x2d, 0x56, 0xc9, 0xb2, 0xa7, 0x25, 0x95, + 0x60, 0xc7, 0x2c, 0x69, 0x5c, 0xdc, 0xd6, 0xfd, 0x31, 0xe2, 0xa4, 0xc0, + 0xfe, 0x53, 0x6e, 0xcd, 0xd3, 0x36, 0x69, 0x21, 0x58, 0x66, 0x66, 0x66, + 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, + 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, + 0x66, 0x66, 0x66, 0x66, 0x02, 0xa2, 0xed, 0xf4, 0x8f, 0x6b, 0x0b, 0x3e, + 0xeb, 0x35, 0x1a, 0xd5, 0x7e, 0xdb, 0x78, 0x00, 0x96, 0x8a, 0xa0, 0xb4, + 0xcf, 0x60, 0x4b, 0xd4, 0xd5, 0xf9, 0x2d, 0xbf, 0x88, 0xbd, 0x22, 0x62, + 0x13, 0x53, 0xe4, 0x82, 0x57, 0xfa, 0x1e, 0x8f, 0x06, 0x2b, 0x90, 0xba, + 0x08, 0xb6, 0x10, 0x54, 0x4f, 0x7c, 0x1b, 0x26, 0xed, 0xda, 0x6b, 0xdd, + 0x25, 0xd0, 0x4e, 0xea, 0x42, 0xbb, 0x25, 0x03, 0xa2, 0xfb, 0xcc, 0x61, + 0x67, 0x06, 0x70, 0x1a, 0xc4, 0x78, 0x3a, 0xff, 0x32, 0x62, 0xdd, 0x2c, + 0xab, 0x50, 0x19, 0x3b, 0xf2, 0x9b, 0x7d, 0xb8, 0xfd, 0x4f, 0x29, 0x9c, + 0xa7, 0x91, 0xba, 0x0e, 0x46, 0x5e, 0x51, 0xfe, 0x1d, 0xbf, 0xe5, 0xe5, + 0x9b, 0x95, 0x0d, 0x67, 0xf8, 0xd1, 0xb5, 0x5a, 0xa1, 0x93, 0x2c, 0xc3, + 0xde, 0x0e, 0x97, 0x85, 0x2d, 0x7f, 0xea, 0xab, 0x3e, 0x47, 0x30, 0x18, + 0x24, 0xe8, 0xb7, 0x60, 0xae, 0x47, 0x80, 0xfc, 0xe5, 0x23, 0xe7, 0xc2, + 0xc9, 0x85, 0xe6, 0x98, 0xa0, 0x29, 0x4e, 0xe1, 0x84, 0x39, 0x2d, 0x95, + 0x2c, 0xf3, 0x45, 0x3c, 0xff, 0xaf, 0x27, 0x4c, 0x6b, 0xa6, 0xf5, 0x4b, + 0x11, 0xbd, 0xba, 0x5b, 0x9e, 0xc4, 0xa4, 0x51, 0x1e, 0xbe, 0xd0, 0x90, + 0x3a, 0x9c, 0xc2, 0x26, 0xb6, 0x1e, 0xf1, 0x95, 0x7d, 0xc8, 0x6d, 0x52, + 0xe6, 0x99, 0x2c, 0x5f, 0x9a, 0x96, 0x0c, 0x68, 0x29, 0xfd, 0xe2, 0xfb, + 0xe6, 0xbc, 0xec, 0x31, 0x08, 0xec, 0xe6, 0xb0, 0x53, 0x60, 0xc3, 0x8c, + 0xbe, 0xc1, 0xb3, 0x8a, 0x8f, 0xe4, 0x88, 0x2b, 0x55, 0xe5, 0x64, 0x6e, + 0x9b, 0xd0, 0xaf, 0x7b, 0x64, 0x2a, 0x35, 0x25, 0x10, 0x52, 0xc5, 0x9e, + 0x58, 0x11, 0x39, 0x36, 0x45, 0x51, 0xb8, 0x39, 0x93, 0xfc, 0x9d, 0x6a, + 0xbe, 0x58, 0xcb, 0xa4, 0x0f, 0x51, 0x3c, 0x38, 0x05, 0xca, 0xab, 0x43, + 0x63, 0x0e, 0xf3, 0x8b, 0x41, 0xa6, 0xf8, 0x9b, 0x53, 0x70, 0x80, 0x53, + 0x86, 0x5e, 0x8f, 0xe3, 0xc3, 0x0d, 0x18, 0xc8, 0x4b, 0x34, 0x1f, 0xd8, + 0x1d, 0xbc, 0xf2, 0x6d, 0x34, 0x3a, 0xbe, 0xdf, 0xd9, 0xf6, 0xf3, 0x89, + 0xa1, 0xe1, 0x94, 0x9f, 0x5d, 0x4c, 0x5d, 0xe9, 0xa1, 0x49, 0x92, 0xef, + 0x0e, 0x53, 0x81, 0x89, 0x58, 0x87, 0xa6, 0x37, 0xf1, 0xdd, 0x62, 0x60, + 0x63, 0x5a, 0x9d, 0x1b, 0x8c, 0xc6, 0x7d, 0x52, 0xea, 0x70, 0x09, 0x6a, + 0xe1, 0x32, 0xf3, 0x73, 0x21, 0x1f, 0x07, 0x7b, 0x7c, 0x9b, 0x49, 0xd8, + 0xc0, 0xf3, 0x25, 0x72, 0x6f, 0x9d, 0xed, 0x31, 0x67, 0x36, 0x36, 0x54, + 0x40, 0x92, 0x71, 0xe6, 0x11, 0x28, 0x11, 0xad, 0x93, 0x32, 0x85, 0x7b, + 0x3e, 0xb7, 0x3b, 0x49, 0x13, 0x1c, 0x07, 0xb0, 0x2e, 0x93, 0xaa, 0xfd, + 0xfd, 0x28, 0x47, 0x3d, 0x8d, 0xd2, 0xda, 0xc7, 0x44, 0xd6, 0x7a, 0xdb, + 0x26, 0x7d, 0x1d, 0xb8, 0xe1, 0xde, 0x9d, 0x7a, 0x7d, 0x17, 0x7e, 0x1c, + 0x37, 0x04, 0x8d, 0x2d, 0x7c, 0x5e, 0x18, 0x38, 0x1e, 0xaf, 0xc7, 0x1b, + 0x33, 0x48, 0x31, 0x00, 0x59, 0xf6, 0xf2, 0xca, 0x0f, 0x27, 0x1b, 0x63, + 0x12, 0x7e, 0x02, 0x1d, 0x49, 0xc0, 0x5d, 0x79, 0x87, 0xef, 0x5e, 0x7a, + 0x2f, 0x1f, 0x66, 0x55, 0xd8, 0x09, 0xd9, 0x61, 0x38, 0x68, 0xb0, 0x07, + 0xa3, 0xfc, 0xcc, 0x85, 0x10, 0x7f, 0x4c, 0x65, 0x65, 0xb3, 0xfa, 0xfa, + 0xa5, 0x53, 0x6f, 0xdb, 0x74, 0x4c, 0x56, 0x46, 0x03, 0xe2, 0xd5, 0x7a, + 0x29, 0x1c, 0xc6, 0x02, 0xbc, 0x59, 0xf2, 0x04, 0x75, 0x63, 0xc0, 0x84, + 0x2f, 0x60, 0x1c, 0x67, 0x76, 0xfd, 0x63, 0x86, 0xf3, 0xfa, 0xbf, 0xdc, + 0xd2, 0x2d, 0x90, 0x91, 0xbd, 0x33, 0xa9, 0xe5, 0x66, 0x0c, 0xda, 0x42, + 0x27, 0xca, 0xf4, 0x66, 0xc2, 0xec, 0x92, 0x14, 0x57, 0x06, 0x63, 0xd0, + 0x4d, 0x15, 0x06, 0xeb, 0x69, 0x58, 0x4f, 0x77, 0xc5, 0x8b, 0xc7, 0xf0, + 0x8e, 0xed, 0x64, 0xa0, 0xb3, 0x3c, 0x66, 0x71, 0xc6, 0x2d, 0xda, 0x0a, + 0x0d, 0xfe, 0x70, 0x27, 0x64, 0xf8, 0x27, 0xfa, 0xf6, 0x5f, 0x30, 0xa5, + 0x0d, 0x6c, 0xda, 0xf2, 0x62, 0x5e, 0x78, 0x47, 0xd3, 0x66, 0x00, 0x1c, + 0xfd, 0x56, 0x1f, 0x5d, 0x3f, 0x6f, 0xf4, 0x4c, 0xd8, 0xfd, 0x0e, 0x27, + 0xc9, 0x5c, 0x2b, 0xbc, 0xc0, 0xa4, 0xe7, 0x23, 0x29, 0x02, 0x9f, 0x31, + 0xd6, 0xe9, 0xd7, 0x96, 0xf4, 0xe0, 0x5e, 0x0b, 0x0e, 0x13, 0xee, 0x3c, + 0x09, 0xed, 0xf2, 0x3d, 0x76, 0x91, 0xc3, 0xa4, 0x97, 0xae, 0xd4, 0x87, + 0xd0, 0x5d, 0xf6, 0x18, 0x47, 0x1f, 0x1d, 0x67, 0xf2, 0xcf, 0x63, 0xa0, + 0x91, 0x27, 0xf8, 0x93, 0x45, 0x75, 0x23, 0x3f, 0xd1, 0xf1, 0xad, 0x23, + 0xdd, 0x64, 0x93, 0x96, 0x41, 0x70, 0x7f, 0xf7, 0xf5, 0xa9, 0x89, 0xa2, + 0x34, 0xb0, 0x8d, 0x1b, 0xae, 0x19, 0x15, 0x49, 0x58, 0x23, 0x6d, 0x87, + 0x15, 0x4f, 0x81, 0x76, 0xfb, 0x23, 0xb5, 0xea, 0xcf, 0xac, 0x54, 0x8d, + 0x4e, 0x42, 0x2f, 0xeb, 0x0f, 0x63, 0xdb, 0x68, 0x37, 0xa8, 0xcf, 0x8b, + 0xab, 0xf5, 0xa4, 0x6e, 0x96, 0x2a, 0xb2, 0xd6, 0xbe, 0x9e, 0xbd, 0x0d, + 0xb4, 0x42, 0xa9, 0xcf, 0x01, 0x83, 0x8a, 0x17, 0x47, 0x76, 0xc4, 0xc6, + 0x83, 0x04, 0x95, 0x0b, 0xfc, 0x11, 0xc9, 0x62, 0xb8, 0x0c, 0x76, 0x84, + 0xd9, 0xb9, 0x37, 0xfa, 0xfc, 0x7c, 0xc2, 0x6d, 0x58, 0x3e, 0xb3, 0x04, + 0xbb, 0x8c, 0x8f, 0x48, 0xbc, 0x91, 0x27, 0xcc, 0xf9, 0xb7, 0x22, 0x19, + 0x83, 0x2e, 0x09, 0xb5, 0x72, 0xd9, 0x54, 0x1c, 0x4d, 0xa1, 0xea, 0x0b, + 0xf1, 0xc6, 0x08, 0x72, 0x46, 0x87, 0x7a, 0x6e, 0x80, 0x56, 0x0a, 0x8a, + 0xc0, 0xdd, 0x11, 0x6b, 0xd6, 0xdd, 0x47, 0xdf, 0x10, 0xd9, 0xd8, 0xea, + 0x7c, 0xb0, 0x8f, 0x03, 0x00, 0x2e, 0xc1, 0x8f, 0x44, 0xa8, 0xd3, 0x30, + 0x06, 0x89, 0xa2, 0xf9, 0x34, 0xad, 0xdc, 0x03, 0x85, 0xed, 0x51, 0xa7, + 0x82, 0x9c, 0xe7, 0x5d, 0x52, 0x93, 0x0c, 0x32, 0x9a, 0x5b, 0xe1, 0xaa, + 0xca, 0xb8, 0x02, 0x6d, 0x3a, 0xd4, 0xb1, 0x3a, 0xf0, 0x5f, 0xbe, 0xb5, + 0x0d, 0x10, 0x6b, 0x38, 0x32, 0xac, 0x76, 0x80, 0xbd, 0xca, 0x94, 0x71, + 0x7a, 0xf2, 0xc9, 0x35, 0x2a, 0xde, 0x9f, 0x42, 0x49, 0x18, 0x01, 0xab, + 0xbc, 0xef, 0x7c, 0x64, 0x3f, 0x58, 0x3d, 0x92, 0x59, 0xdb, 0x13, 0xdb, + 0x58, 0x6e, 0x0a, 0xe0, 0xb7, 0x91, 0x4a, 0x08, 0x20, 0xd6, 0x2e, 0x3c, + 0x45, 0xc9, 0x8b, 0x17, 0x79, 0xe7, 0xc7, 0x90, 0x99, 0x3a, 0x18, 0x25, +}; + +void x25519_ge_scalarmult_base(ge_p3 *h, const uint8_t a[32]) { + x25519_ge_scalarmult_small_precomp(h, a, k25519SmallPrecomp); +} + +#else + +/* k25519Precomp[i][j] = (j+1)*256^i*B */ +static const ge_precomp k25519Precomp[32][8] = { + { + { + {25967493, -14356035, 29566456, 3660896, -12694345, 4014787, + 27544626, -11754271, -6079156, 2047605}, + {-12545711, 934262, -2722910, 3049990, -727428, 9406986, 12720692, + 5043384, 19500929, -15469378}, + {-8738181, 4489570, 9688441, -14785194, 10184609, -12363380, + 29287919, 11864899, -24514362, -4438546}, + }, + { + {-12815894, -12976347, -21581243, 11784320, -25355658, -2750717, + -11717903, -3814571, -358445, -10211303}, + {-21703237, 6903825, 27185491, 6451973, -29577724, -9554005, + -15616551, 11189268, -26829678, -5319081}, + {26966642, 11152617, 32442495, 15396054, 14353839, -12752335, + -3128826, -9541118, -15472047, -4166697}, + }, + { + {15636291, -9688557, 24204773, -7912398, 616977, -16685262, + 27787600, -14772189, 28944400, -1550024}, + {16568933, 4717097, -11556148, -1102322, 15682896, -11807043, + 16354577, -11775962, 7689662, 11199574}, + {30464156, -5976125, -11779434, -15670865, 23220365, 15915852, + 7512774, 10017326, -17749093, -9920357}, + }, + { + {-17036878, 13921892, 10945806, -6033431, 27105052, -16084379, + -28926210, 15006023, 3284568, -6276540}, + {23599295, -8306047, -11193664, -7687416, 13236774, 10506355, + 7464579, 9656445, 13059162, 10374397}, + {7798556, 16710257, 3033922, 2874086, 28997861, 2835604, 32406664, + -3839045, -641708, -101325}, + }, + { + {10861363, 11473154, 27284546, 1981175, -30064349, 12577861, + 32867885, 14515107, -15438304, 10819380}, + {4708026, 6336745, 20377586, 9066809, -11272109, 6594696, -25653668, + 12483688, -12668491, 5581306}, + {19563160, 16186464, -29386857, 4097519, 10237984, -4348115, + 28542350, 13850243, -23678021, -15815942}, + }, + { + {-15371964, -12862754, 32573250, 4720197, -26436522, 5875511, + -19188627, -15224819, -9818940, -12085777}, + {-8549212, 109983, 15149363, 2178705, 22900618, 4543417, 3044240, + -15689887, 1762328, 14866737}, + {-18199695, -15951423, -10473290, 1707278, -17185920, 3916101, + -28236412, 3959421, 27914454, 4383652}, + }, + { + {5153746, 9909285, 1723747, -2777874, 30523605, 5516873, 19480852, + 5230134, -23952439, -15175766}, + {-30269007, -3463509, 7665486, 10083793, 28475525, 1649722, + 20654025, 16520125, 30598449, 7715701}, + {28881845, 14381568, 9657904, 3680757, -20181635, 7843316, + -31400660, 1370708, 29794553, -1409300}, + }, + { + {14499471, -2729599, -33191113, -4254652, 28494862, 14271267, + 30290735, 10876454, -33154098, 2381726}, + {-7195431, -2655363, -14730155, 462251, -27724326, 3941372, + -6236617, 3696005, -32300832, 15351955}, + {27431194, 8222322, 16448760, -3907995, -18707002, 11938355, + -32961401, -2970515, 29551813, 10109425}, + }, + }, + { + { + {-13657040, -13155431, -31283750, 11777098, 21447386, 6519384, + -2378284, -1627556, 10092783, -4764171}, + {27939166, 14210322, 4677035, 16277044, -22964462, -12398139, + -32508754, 12005538, -17810127, 12803510}, + {17228999, -15661624, -1233527, 300140, -1224870, -11714777, + 30364213, -9038194, 18016357, 4397660}, + }, + { + {-10958843, -7690207, 4776341, -14954238, 27850028, -15602212, + -26619106, 14544525, -17477504, 982639}, + {29253598, 15796703, -2863982, -9908884, 10057023, 3163536, 7332899, + -4120128, -21047696, 9934963}, + {5793303, 16271923, -24131614, -10116404, 29188560, 1206517, + -14747930, 4559895, -30123922, -10897950}, + }, + { + {-27643952, -11493006, 16282657, -11036493, 28414021, -15012264, + 24191034, 4541697, -13338309, 5500568}, + {12650548, -1497113, 9052871, 11355358, -17680037, -8400164, + -17430592, 12264343, 10874051, 13524335}, + {25556948, -3045990, 714651, 2510400, 23394682, -10415330, 33119038, + 5080568, -22528059, 5376628}, + }, + { + {-26088264, -4011052, -17013699, -3537628, -6726793, 1920897, + -22321305, -9447443, 4535768, 1569007}, + {-2255422, 14606630, -21692440, -8039818, 28430649, 8775819, + -30494562, 3044290, 31848280, 12543772}, + {-22028579, 2943893, -31857513, 6777306, 13784462, -4292203, + -27377195, -2062731, 7718482, 14474653}, + }, + { + {2385315, 2454213, -22631320, 46603, -4437935, -15680415, 656965, + -7236665, 24316168, -5253567}, + {13741529, 10911568, -33233417, -8603737, -20177830, -1033297, + 33040651, -13424532, -20729456, 8321686}, + {21060490, -2212744, 15712757, -4336099, 1639040, 10656336, + 23845965, -11874838, -9984458, 608372}, + }, + { + {-13672732, -15087586, -10889693, -7557059, -6036909, 11305547, + 1123968, -6780577, 27229399, 23887}, + {-23244140, -294205, -11744728, 14712571, -29465699, -2029617, + 12797024, -6440308, -1633405, 16678954}, + {-29500620, 4770662, -16054387, 14001338, 7830047, 9564805, + -1508144, -4795045, -17169265, 4904953}, + }, + { + {24059557, 14617003, 19037157, -15039908, 19766093, -14906429, + 5169211, 16191880, 2128236, -4326833}, + {-16981152, 4124966, -8540610, -10653797, 30336522, -14105247, + -29806336, 916033, -6882542, -2986532}, + {-22630907, 12419372, -7134229, -7473371, -16478904, 16739175, + 285431, 2763829, 15736322, 4143876}, + }, + { + {2379352, 11839345, -4110402, -5988665, 11274298, 794957, 212801, + -14594663, 23527084, -16458268}, + {33431127, -11130478, -17838966, -15626900, 8909499, 8376530, + -32625340, 4087881, -15188911, -14416214}, + {1767683, 7197987, -13205226, -2022635, -13091350, 448826, 5799055, + 4357868, -4774191, -16323038}, + }, + }, + { + { + {6721966, 13833823, -23523388, -1551314, 26354293, -11863321, + 23365147, -3949732, 7390890, 2759800}, + {4409041, 2052381, 23373853, 10530217, 7676779, -12885954, 21302353, + -4264057, 1244380, -12919645}, + {-4421239, 7169619, 4982368, -2957590, 30256825, -2777540, 14086413, + 9208236, 15886429, 16489664}, + }, + { + {1996075, 10375649, 14346367, 13311202, -6874135, -16438411, + -13693198, 398369, -30606455, -712933}, + {-25307465, 9795880, -2777414, 14878809, -33531835, 14780363, + 13348553, 12076947, -30836462, 5113182}, + {-17770784, 11797796, 31950843, 13929123, -25888302, 12288344, + -30341101, -7336386, 13847711, 5387222}, + }, + { + {-18582163, -3416217, 17824843, -2340966, 22744343, -10442611, + 8763061, 3617786, -19600662, 10370991}, + {20246567, -14369378, 22358229, -543712, 18507283, -10413996, + 14554437, -8746092, 32232924, 16763880}, + {9648505, 10094563, 26416693, 14745928, -30374318, -6472621, + 11094161, 15689506, 3140038, -16510092}, + }, + { + {-16160072, 5472695, 31895588, 4744994, 8823515, 10365685, + -27224800, 9448613, -28774454, 366295}, + {19153450, 11523972, -11096490, -6503142, -24647631, 5420647, + 28344573, 8041113, 719605, 11671788}, + {8678025, 2694440, -6808014, 2517372, 4964326, 11152271, -15432916, + -15266516, 27000813, -10195553}, + }, + { + {-15157904, 7134312, 8639287, -2814877, -7235688, 10421742, 564065, + 5336097, 6750977, -14521026}, + {11836410, -3979488, 26297894, 16080799, 23455045, 15735944, + 1695823, -8819122, 8169720, 16220347}, + {-18115838, 8653647, 17578566, -6092619, -8025777, -16012763, + -11144307, -2627664, -5990708, -14166033}, + }, + { + {-23308498, -10968312, 15213228, -10081214, -30853605, -11050004, + 27884329, 2847284, 2655861, 1738395}, + {-27537433, -14253021, -25336301, -8002780, -9370762, 8129821, + 21651608, -3239336, -19087449, -11005278}, + {1533110, 3437855, 23735889, 459276, 29970501, 11335377, 26030092, + 5821408, 10478196, 8544890}, + }, + { + {32173121, -16129311, 24896207, 3921497, 22579056, -3410854, + 19270449, 12217473, 17789017, -3395995}, + {-30552961, -2228401, -15578829, -10147201, 13243889, 517024, + 15479401, -3853233, 30460520, 1052596}, + {-11614875, 13323618, 32618793, 8175907, -15230173, 12596687, + 27491595, -4612359, 3179268, -9478891}, + }, + { + {31947069, -14366651, -4640583, -15339921, -15125977, -6039709, + -14756777, -16411740, 19072640, -9511060}, + {11685058, 11822410, 3158003, -13952594, 33402194, -4165066, + 5977896, -5215017, 473099, 5040608}, + {-20290863, 8198642, -27410132, 11602123, 1290375, -2799760, + 28326862, 1721092, -19558642, -3131606}, + }, + }, + { + { + {7881532, 10687937, 7578723, 7738378, -18951012, -2553952, 21820786, + 8076149, -27868496, 11538389}, + {-19935666, 3899861, 18283497, -6801568, -15728660, -11249211, + 8754525, 7446702, -5676054, 5797016}, + {-11295600, -3793569, -15782110, -7964573, 12708869, -8456199, + 2014099, -9050574, -2369172, -5877341}, + }, + { + {-22472376, -11568741, -27682020, 1146375, 18956691, 16640559, + 1192730, -3714199, 15123619, 10811505}, + {14352098, -3419715, -18942044, 10822655, 32750596, 4699007, -70363, + 15776356, -28886779, -11974553}, + {-28241164, -8072475, -4978962, -5315317, 29416931, 1847569, + -20654173, -16484855, 4714547, -9600655}, + }, + { + {15200332, 8368572, 19679101, 15970074, -31872674, 1959451, + 24611599, -4543832, -11745876, 12340220}, + {12876937, -10480056, 33134381, 6590940, -6307776, 14872440, + 9613953, 8241152, 15370987, 9608631}, + {-4143277, -12014408, 8446281, -391603, 4407738, 13629032, -7724868, + 15866074, -28210621, -8814099}, + }, + { + {26660628, -15677655, 8393734, 358047, -7401291, 992988, -23904233, + 858697, 20571223, 8420556}, + {14620715, 13067227, -15447274, 8264467, 14106269, 15080814, + 33531827, 12516406, -21574435, -12476749}, + {236881, 10476226, 57258, -14677024, 6472998, 2466984, 17258519, + 7256740, 8791136, 15069930}, + }, + { + {1276410, -9371918, 22949635, -16322807, -23493039, -5702186, + 14711875, 4874229, -30663140, -2331391}, + {5855666, 4990204, -13711848, 7294284, -7804282, 1924647, -1423175, + -7912378, -33069337, 9234253}, + {20590503, -9018988, 31529744, -7352666, -2706834, 10650548, + 31559055, -11609587, 18979186, 13396066}, + }, + { + {24474287, 4968103, 22267082, 4407354, 24063882, -8325180, + -18816887, 13594782, 33514650, 7021958}, + {-11566906, -6565505, -21365085, 15928892, -26158305, 4315421, + -25948728, -3916677, -21480480, 12868082}, + {-28635013, 13504661, 19988037, -2132761, 21078225, 6443208, + -21446107, 2244500, -12455797, -8089383}, + }, + { + {-30595528, 13793479, -5852820, 319136, -25723172, -6263899, + 33086546, 8957937, -15233648, 5540521}, + {-11630176, -11503902, -8119500, -7643073, 2620056, 1022908, + -23710744, -1568984, -16128528, -14962807}, + {23152971, 775386, 27395463, 14006635, -9701118, 4649512, 1689819, + 892185, -11513277, -15205948}, + }, + { + {9770129, 9586738, 26496094, 4324120, 1556511, -3550024, 27453819, + 4763127, -19179614, 5867134}, + {-32765025, 1927590, 31726409, -4753295, 23962434, -16019500, + 27846559, 5931263, -29749703, -16108455}, + {27461885, -2977536, 22380810, 1815854, -23033753, -3031938, + 7283490, -15148073, -19526700, 7734629}, + }, + }, + { + { + {-8010264, -9590817, -11120403, 6196038, 29344158, -13430885, + 7585295, -3176626, 18549497, 15302069}, + {-32658337, -6171222, -7672793, -11051681, 6258878, 13504381, + 10458790, -6418461, -8872242, 8424746}, + {24687205, 8613276, -30667046, -3233545, 1863892, -1830544, + 19206234, 7134917, -11284482, -828919}, + }, + { + {11334899, -9218022, 8025293, 12707519, 17523892, -10476071, + 10243738, -14685461, -5066034, 16498837}, + {8911542, 6887158, -9584260, -6958590, 11145641, -9543680, 17303925, + -14124238, 6536641, 10543906}, + {-28946384, 15479763, -17466835, 568876, -1497683, 11223454, + -2669190, -16625574, -27235709, 8876771}, + }, + { + {-25742899, -12566864, -15649966, -846607, -33026686, -796288, + -33481822, 15824474, -604426, -9039817}, + {10330056, 70051, 7957388, -9002667, 9764902, 15609756, 27698697, + -4890037, 1657394, 3084098}, + {10477963, -7470260, 12119566, -13250805, 29016247, -5365589, + 31280319, 14396151, -30233575, 15272409}, + }, + { + {-12288309, 3169463, 28813183, 16658753, 25116432, -5630466, + -25173957, -12636138, -25014757, 1950504}, + {-26180358, 9489187, 11053416, -14746161, -31053720, 5825630, + -8384306, -8767532, 15341279, 8373727}, + {28685821, 7759505, -14378516, -12002860, -31971820, 4079242, + 298136, -10232602, -2878207, 15190420}, + }, + { + {-32932876, 13806336, -14337485, -15794431, -24004620, 10940928, + 8669718, 2742393, -26033313, -6875003}, + {-1580388, -11729417, -25979658, -11445023, -17411874, -10912854, + 9291594, -16247779, -12154742, 6048605}, + {-30305315, 14843444, 1539301, 11864366, 20201677, 1900163, + 13934231, 5128323, 11213262, 9168384}, + }, + { + {-26280513, 11007847, 19408960, -940758, -18592965, -4328580, + -5088060, -11105150, 20470157, -16398701}, + {-23136053, 9282192, 14855179, -15390078, -7362815, -14408560, + -22783952, 14461608, 14042978, 5230683}, + {29969567, -2741594, -16711867, -8552442, 9175486, -2468974, + 21556951, 3506042, -5933891, -12449708}, + }, + { + {-3144746, 8744661, 19704003, 4581278, -20430686, 6830683, + -21284170, 8971513, -28539189, 15326563}, + {-19464629, 10110288, -17262528, -3503892, -23500387, 1355669, + -15523050, 15300988, -20514118, 9168260}, + {-5353335, 4488613, -23803248, 16314347, 7780487, -15638939, + -28948358, 9601605, 33087103, -9011387}, + }, + { + {-19443170, -15512900, -20797467, -12445323, -29824447, 10229461, + -27444329, -15000531, -5996870, 15664672}, + {23294591, -16632613, -22650781, -8470978, 27844204, 11461195, + 13099750, -2460356, 18151676, 13417686}, + {-24722913, -4176517, -31150679, 5988919, -26858785, 6685065, + 1661597, -12551441, 15271676, -15452665}, + }, + }, + { + { + {11433042, -13228665, 8239631, -5279517, -1985436, -725718, + -18698764, 2167544, -6921301, -13440182}, + {-31436171, 15575146, 30436815, 12192228, -22463353, 9395379, + -9917708, -8638997, 12215110, 12028277}, + {14098400, 6555944, 23007258, 5757252, -15427832, -12950502, + 30123440, 4617780, -16900089, -655628}, + }, + { + {-4026201, -15240835, 11893168, 13718664, -14809462, 1847385, + -15819999, 10154009, 23973261, -12684474}, + {-26531820, -3695990, -1908898, 2534301, -31870557, -16550355, + 18341390, -11419951, 32013174, -10103539}, + {-25479301, 10876443, -11771086, -14625140, -12369567, 1838104, + 21911214, 6354752, 4425632, -837822}, + }, + { + {-10433389, -14612966, 22229858, -3091047, -13191166, 776729, + -17415375, -12020462, 4725005, 14044970}, + {19268650, -7304421, 1555349, 8692754, -21474059, -9910664, 6347390, + -1411784, -19522291, -16109756}, + {-24864089, 12986008, -10898878, -5558584, -11312371, -148526, + 19541418, 8180106, 9282262, 10282508}, + }, + { + {-26205082, 4428547, -8661196, -13194263, 4098402, -14165257, + 15522535, 8372215, 5542595, -10702683}, + {-10562541, 14895633, 26814552, -16673850, -17480754, -2489360, + -2781891, 6993761, -18093885, 10114655}, + {-20107055, -929418, 31422704, 10427861, -7110749, 6150669, + -29091755, -11529146, 25953725, -106158}, + }, + { + {-4234397, -8039292, -9119125, 3046000, 2101609, -12607294, + 19390020, 6094296, -3315279, 12831125}, + {-15998678, 7578152, 5310217, 14408357, -33548620, -224739, + 31575954, 6326196, 7381791, -2421839}, + {-20902779, 3296811, 24736065, -16328389, 18374254, 7318640, + 6295303, 8082724, -15362489, 12339664}, + }, + { + {27724736, 2291157, 6088201, -14184798, 1792727, 5857634, 13848414, + 15768922, 25091167, 14856294}, + {-18866652, 8331043, 24373479, 8541013, -701998, -9269457, 12927300, + -12695493, -22182473, -9012899}, + {-11423429, -5421590, 11632845, 3405020, 30536730, -11674039, + -27260765, 13866390, 30146206, 9142070}, + }, + { + {3924129, -15307516, -13817122, -10054960, 12291820, -668366, + -27702774, 9326384, -8237858, 4171294}, + {-15921940, 16037937, 6713787, 16606682, -21612135, 2790944, + 26396185, 3731949, 345228, -5462949}, + {-21327538, 13448259, 25284571, 1143661, 20614966, -8849387, + 2031539, -12391231, -16253183, -13582083}, + }, + { + {31016211, -16722429, 26371392, -14451233, -5027349, 14854137, + 17477601, 3842657, 28012650, -16405420}, + {-5075835, 9368966, -8562079, -4600902, -15249953, 6970560, + -9189873, 16292057, -8867157, 3507940}, + {29439664, 3537914, 23333589, 6997794, -17555561, -11018068, + -15209202, -15051267, -9164929, 6580396}, + }, + }, + { + { + {-12185861, -7679788, 16438269, 10826160, -8696817, -6235611, + 17860444, -9273846, -2095802, 9304567}, + {20714564, -4336911, 29088195, 7406487, 11426967, -5095705, + 14792667, -14608617, 5289421, -477127}, + {-16665533, -10650790, -6160345, -13305760, 9192020, -1802462, + 17271490, 12349094, 26939669, -3752294}, + }, + { + {-12889898, 9373458, 31595848, 16374215, 21471720, 13221525, + -27283495, -12348559, -3698806, 117887}, + {22263325, -6560050, 3984570, -11174646, -15114008, -566785, + 28311253, 5358056, -23319780, 541964}, + {16259219, 3261970, 2309254, -15534474, -16885711, -4581916, + 24134070, -16705829, -13337066, -13552195}, + }, + { + {9378160, -13140186, -22845982, -12745264, 28198281, -7244098, + -2399684, -717351, 690426, 14876244}, + {24977353, -314384, -8223969, -13465086, 28432343, -1176353, + -13068804, -12297348, -22380984, 6618999}, + {-1538174, 11685646, 12944378, 13682314, -24389511, -14413193, + 8044829, -13817328, 32239829, -5652762}, + }, + { + {-18603066, 4762990, -926250, 8885304, -28412480, -3187315, 9781647, + -10350059, 32779359, 5095274}, + {-33008130, -5214506, -32264887, -3685216, 9460461, -9327423, + -24601656, 14506724, 21639561, -2630236}, + {-16400943, -13112215, 25239338, 15531969, 3987758, -4499318, + -1289502, -6863535, 17874574, 558605}, + }, + { + {-13600129, 10240081, 9171883, 16131053, -20869254, 9599700, + 33499487, 5080151, 2085892, 5119761}, + {-22205145, -2519528, -16381601, 414691, -25019550, 2170430, + 30634760, -8363614, -31999993, -5759884}, + {-6845704, 15791202, 8550074, -1312654, 29928809, -12092256, + 27534430, -7192145, -22351378, 12961482}, + }, + { + {-24492060, -9570771, 10368194, 11582341, -23397293, -2245287, + 16533930, 8206996, -30194652, -5159638}, + {-11121496, -3382234, 2307366, 6362031, -135455, 8868177, -16835630, + 7031275, 7589640, 8945490}, + {-32152748, 8917967, 6661220, -11677616, -1192060, -15793393, + 7251489, -11182180, 24099109, -14456170}, + }, + { + {5019558, -7907470, 4244127, -14714356, -26933272, 6453165, + -19118182, -13289025, -6231896, -10280736}, + {10853594, 10721687, 26480089, 5861829, -22995819, 1972175, + -1866647, -10557898, -3363451, -6441124}, + {-17002408, 5906790, 221599, -6563147, 7828208, -13248918, 24362661, + -2008168, -13866408, 7421392}, + }, + { + {8139927, -6546497, 32257646, -5890546, 30375719, 1886181, + -21175108, 15441252, 28826358, -4123029}, + {6267086, 9695052, 7709135, -16603597, -32869068, -1886135, + 14795160, -7840124, 13746021, -1742048}, + {28584902, 7787108, -6732942, -15050729, 22846041, -7571236, + -3181936, -363524, 4771362, -8419958}, + }, + }, + { + { + {24949256, 6376279, -27466481, -8174608, -18646154, -9930606, + 33543569, -12141695, 3569627, 11342593}, + {26514989, 4740088, 27912651, 3697550, 19331575, -11472339, 6809886, + 4608608, 7325975, -14801071}, + {-11618399, -14554430, -24321212, 7655128, -1369274, 5214312, + -27400540, 10258390, -17646694, -8186692}, + }, + { + {11431204, 15823007, 26570245, 14329124, 18029990, 4796082, + -31446179, 15580664, 9280358, -3973687}, + {-160783, -10326257, -22855316, -4304997, -20861367, -13621002, + -32810901, -11181622, -15545091, 4387441}, + {-20799378, 12194512, 3937617, -5805892, -27154820, 9340370, + -24513992, 8548137, 20617071, -7482001}, + }, + { + {-938825, -3930586, -8714311, 16124718, 24603125, -6225393, + -13775352, -11875822, 24345683, 10325460}, + {-19855277, -1568885, -22202708, 8714034, 14007766, 6928528, + 16318175, -1010689, 4766743, 3552007}, + {-21751364, -16730916, 1351763, -803421, -4009670, 3950935, 3217514, + 14481909, 10988822, -3994762}, + }, + { + {15564307, -14311570, 3101243, 5684148, 30446780, -8051356, + 12677127, -6505343, -8295852, 13296005}, + {-9442290, 6624296, -30298964, -11913677, -4670981, -2057379, + 31521204, 9614054, -30000824, 12074674}, + {4771191, -135239, 14290749, -13089852, 27992298, 14998318, + -1413936, -1556716, 29832613, -16391035}, + }, + { + {7064884, -7541174, -19161962, -5067537, -18891269, -2912736, + 25825242, 5293297, -27122660, 13101590}, + {-2298563, 2439670, -7466610, 1719965, -27267541, -16328445, + 32512469, -5317593, -30356070, -4190957}, + {-30006540, 10162316, -33180176, 3981723, -16482138, -13070044, + 14413974, 9515896, 19568978, 9628812}, + }, + { + {33053803, 199357, 15894591, 1583059, 27380243, -4580435, -17838894, + -6106839, -6291786, 3437740}, + {-18978877, 3884493, 19469877, 12726490, 15913552, 13614290, + -22961733, 70104, 7463304, 4176122}, + {-27124001, 10659917, 11482427, -16070381, 12771467, -6635117, + -32719404, -5322751, 24216882, 5944158}, + }, + { + {8894125, 7450974, -2664149, -9765752, -28080517, -12389115, + 19345746, 14680796, 11632993, 5847885}, + {26942781, -2315317, 9129564, -4906607, 26024105, 11769399, + -11518837, 6367194, -9727230, 4782140}, + {19916461, -4828410, -22910704, -11414391, 25606324, -5972441, + 33253853, 8220911, 6358847, -1873857}, + }, + { + {801428, -2081702, 16569428, 11065167, 29875704, 96627, 7908388, + -4480480, -13538503, 1387155}, + {19646058, 5720633, -11416706, 12814209, 11607948, 12749789, + 14147075, 15156355, -21866831, 11835260}, + {19299512, 1155910, 28703737, 14890794, 2925026, 7269399, 26121523, + 15467869, -26560550, 5052483}, + }, + }, + { + { + {-3017432, 10058206, 1980837, 3964243, 22160966, 12322533, -6431123, + -12618185, 12228557, -7003677}, + {32944382, 14922211, -22844894, 5188528, 21913450, -8719943, + 4001465, 13238564, -6114803, 8653815}, + {22865569, -4652735, 27603668, -12545395, 14348958, 8234005, + 24808405, 5719875, 28483275, 2841751}, + }, + { + {-16420968, -1113305, -327719, -12107856, 21886282, -15552774, + -1887966, -315658, 19932058, -12739203}, + {-11656086, 10087521, -8864888, -5536143, -19278573, -3055912, + 3999228, 13239134, -4777469, -13910208}, + {1382174, -11694719, 17266790, 9194690, -13324356, 9720081, + 20403944, 11284705, -14013818, 3093230}, + }, + { + {16650921, -11037932, -1064178, 1570629, -8329746, 7352753, -302424, + 16271225, -24049421, -6691850}, + {-21911077, -5927941, -4611316, -5560156, -31744103, -10785293, + 24123614, 15193618, -21652117, -16739389}, + {-9935934, -4289447, -25279823, 4372842, 2087473, 10399484, + 31870908, 14690798, 17361620, 11864968}, + }, + { + {-11307610, 6210372, 13206574, 5806320, -29017692, -13967200, + -12331205, -7486601, -25578460, -16240689}, + {14668462, -12270235, 26039039, 15305210, 25515617, 4542480, + 10453892, 6577524, 9145645, -6443880}, + {5974874, 3053895, -9433049, -10385191, -31865124, 3225009, + -7972642, 3936128, -5652273, -3050304}, + }, + { + {30625386, -4729400, -25555961, -12792866, -20484575, 7695099, + 17097188, -16303496, -27999779, 1803632}, + {-3553091, 9865099, -5228566, 4272701, -5673832, -16689700, + 14911344, 12196514, -21405489, 7047412}, + {20093277, 9920966, -11138194, -5343857, 13161587, 12044805, + -32856851, 4124601, -32343828, -10257566}, + }, + { + {-20788824, 14084654, -13531713, 7842147, 19119038, -13822605, + 4752377, -8714640, -21679658, 2288038}, + {-26819236, -3283715, 29965059, 3039786, -14473765, 2540457, + 29457502, 14625692, -24819617, 12570232}, + {-1063558, -11551823, 16920318, 12494842, 1278292, -5869109, + -21159943, -3498680, -11974704, 4724943}, + }, + { + {17960970, -11775534, -4140968, -9702530, -8876562, -1410617, + -12907383, -8659932, -29576300, 1903856}, + {23134274, -14279132, -10681997, -1611936, 20684485, 15770816, + -12989750, 3190296, 26955097, 14109738}, + {15308788, 5320727, -30113809, -14318877, 22902008, 7767164, + 29425325, -11277562, 31960942, 11934971}, + }, + { + {-27395711, 8435796, 4109644, 12222639, -24627868, 14818669, + 20638173, 4875028, 10491392, 1379718}, + {-13159415, 9197841, 3875503, -8936108, -1383712, -5879801, + 33518459, 16176658, 21432314, 12180697}, + {-11787308, 11500838, 13787581, -13832590, -22430679, 10140205, + 1465425, 12689540, -10301319, -13872883}, + }, + }, + { + { + {5414091, -15386041, -21007664, 9643570, 12834970, 1186149, + -2622916, -1342231, 26128231, 6032912}, + {-26337395, -13766162, 32496025, -13653919, 17847801, -12669156, + 3604025, 8316894, -25875034, -10437358}, + {3296484, 6223048, 24680646, -12246460, -23052020, 5903205, + -8862297, -4639164, 12376617, 3188849}, + }, + { + {29190488, -14659046, 27549113, -1183516, 3520066, -10697301, + 32049515, -7309113, -16109234, -9852307}, + {-14744486, -9309156, 735818, -598978, -20407687, -5057904, + 25246078, -15795669, 18640741, -960977}, + {-6928835, -16430795, 10361374, 5642961, 4910474, 12345252, + -31638386, -494430, 10530747, 1053335}, + }, + { + {-29265967, -14186805, -13538216, -12117373, -19457059, -10655384, + -31462369, -2948985, 24018831, 15026644}, + {-22592535, -3145277, -2289276, 5953843, -13440189, 9425631, + 25310643, 13003497, -2314791, -15145616}, + {-27419985, -603321, -8043984, -1669117, -26092265, 13987819, + -27297622, 187899, -23166419, -2531735}, + }, + { + {-21744398, -13810475, 1844840, 5021428, -10434399, -15911473, + 9716667, 16266922, -5070217, 726099}, + {29370922, -6053998, 7334071, -15342259, 9385287, 2247707, + -13661962, -4839461, 30007388, -15823341}, + {-936379, 16086691, 23751945, -543318, -1167538, -5189036, 9137109, + 730663, 9835848, 4555336}, + }, + { + {-23376435, 1410446, -22253753, -12899614, 30867635, 15826977, + 17693930, 544696, -11985298, 12422646}, + {31117226, -12215734, -13502838, 6561947, -9876867, -12757670, + -5118685, -4096706, 29120153, 13924425}, + {-17400879, -14233209, 19675799, -2734756, -11006962, -5858820, + -9383939, -11317700, 7240931, -237388}, + }, + { + {-31361739, -11346780, -15007447, -5856218, -22453340, -12152771, + 1222336, 4389483, 3293637, -15551743}, + {-16684801, -14444245, 11038544, 11054958, -13801175, -3338533, + -24319580, 7733547, 12796905, -6335822}, + {-8759414, -10817836, -25418864, 10783769, -30615557, -9746811, + -28253339, 3647836, 3222231, -11160462}, + }, + { + {18606113, 1693100, -25448386, -15170272, 4112353, 10045021, + 23603893, -2048234, -7550776, 2484985}, + {9255317, -3131197, -12156162, -1004256, 13098013, -9214866, + 16377220, -2102812, -19802075, -3034702}, + {-22729289, 7496160, -5742199, 11329249, 19991973, -3347502, + -31718148, 9936966, -30097688, -10618797}, + }, + { + {21878590, -5001297, 4338336, 13643897, -3036865, 13160960, + 19708896, 5415497, -7360503, -4109293}, + {27736861, 10103576, 12500508, 8502413, -3413016, -9633558, + 10436918, -1550276, -23659143, -8132100}, + {19492550, -12104365, -29681976, -852630, -3208171, 12403437, + 30066266, 8367329, 13243957, 8709688}, + }, + }, + { + { + {12015105, 2801261, 28198131, 10151021, 24818120, -4743133, + -11194191, -5645734, 5150968, 7274186}, + {2831366, -12492146, 1478975, 6122054, 23825128, -12733586, + 31097299, 6083058, 31021603, -9793610}, + {-2529932, -2229646, 445613, 10720828, -13849527, -11505937, + -23507731, 16354465, 15067285, -14147707}, + }, + { + {7840942, 14037873, -33364863, 15934016, -728213, -3642706, + 21403988, 1057586, -19379462, -12403220}, + {915865, -16469274, 15608285, -8789130, -24357026, 6060030, + -17371319, 8410997, -7220461, 16527025}, + {32922597, -556987, 20336074, -16184568, 10903705, -5384487, + 16957574, 52992, 23834301, 6588044}, + }, + { + {32752030, 11232950, 3381995, -8714866, 22652988, -10744103, + 17159699, 16689107, -20314580, -1305992}, + {-4689649, 9166776, -25710296, -10847306, 11576752, 12733943, + 7924251, -2752281, 1976123, -7249027}, + {21251222, 16309901, -2983015, -6783122, 30810597, 12967303, 156041, + -3371252, 12331345, -8237197}, + }, + { + {8651614, -4477032, -16085636, -4996994, 13002507, 2950805, + 29054427, -5106970, 10008136, -4667901}, + {31486080, 15114593, -14261250, 12951354, 14369431, -7387845, + 16347321, -13662089, 8684155, -10532952}, + {19443825, 11385320, 24468943, -9659068, -23919258, 2187569, + -26263207, -6086921, 31316348, 14219878}, + }, + { + {-28594490, 1193785, 32245219, 11392485, 31092169, 15722801, + 27146014, 6992409, 29126555, 9207390}, + {32382935, 1110093, 18477781, 11028262, -27411763, -7548111, + -4980517, 10843782, -7957600, -14435730}, + {2814918, 7836403, 27519878, -7868156, -20894015, -11553689, + -21494559, 8550130, 28346258, 1994730}, + }, + { + {-19578299, 8085545, -14000519, -3948622, 2785838, -16231307, + -19516951, 7174894, 22628102, 8115180}, + {-30405132, 955511, -11133838, -15078069, -32447087, -13278079, + -25651578, 3317160, -9943017, 930272}, + {-15303681, -6833769, 28856490, 1357446, 23421993, 1057177, + 24091212, -1388970, -22765376, -10650715}, + }, + { + {-22751231, -5303997, -12907607, -12768866, -15811511, -7797053, + -14839018, -16554220, -1867018, 8398970}, + {-31969310, 2106403, -4736360, 1362501, 12813763, 16200670, + 22981545, -6291273, 18009408, -15772772}, + {-17220923, -9545221, -27784654, 14166835, 29815394, 7444469, + 29551787, -3727419, 19288549, 1325865}, + }, + { + {15100157, -15835752, -23923978, -1005098, -26450192, 15509408, + 12376730, -3479146, 33166107, -8042750}, + {20909231, 13023121, -9209752, 16251778, -5778415, -8094914, + 12412151, 10018715, 2213263, -13878373}, + {32529814, -11074689, 30361439, -16689753, -9135940, 1513226, + 22922121, 6382134, -5766928, 8371348}, + }, + }, + { + { + {9923462, 11271500, 12616794, 3544722, -29998368, -1721626, + 12891687, -8193132, -26442943, 10486144}, + {-22597207, -7012665, 8587003, -8257861, 4084309, -12970062, 361726, + 2610596, -23921530, -11455195}, + {5408411, -1136691, -4969122, 10561668, 24145918, 14240566, + 31319731, -4235541, 19985175, -3436086}, + }, + { + {-13994457, 16616821, 14549246, 3341099, 32155958, 13648976, + -17577068, 8849297, 65030, 8370684}, + {-8320926, -12049626, 31204563, 5839400, -20627288, -1057277, + -19442942, 6922164, 12743482, -9800518}, + {-2361371, 12678785, 28815050, 4759974, -23893047, 4884717, + 23783145, 11038569, 18800704, 255233}, + }, + { + {-5269658, -1773886, 13957886, 7990715, 23132995, 728773, 13393847, + 9066957, 19258688, -14753793}, + {-2936654, -10827535, -10432089, 14516793, -3640786, 4372541, + -31934921, 2209390, -1524053, 2055794}, + {580882, 16705327, 5468415, -2683018, -30926419, -14696000, + -7203346, -8994389, -30021019, 7394435}, + }, + { + {23838809, 1822728, -15738443, 15242727, 8318092, -3733104, + -21672180, -3492205, -4821741, 14799921}, + {13345610, 9759151, 3371034, -16137791, 16353039, 8577942, 31129804, + 13496856, -9056018, 7402518}, + {2286874, -4435931, -20042458, -2008336, -13696227, 5038122, + 11006906, -15760352, 8205061, 1607563}, + }, + { + {14414086, -8002132, 3331830, -3208217, 22249151, -5594188, + 18364661, -2906958, 30019587, -9029278}, + {-27688051, 1585953, -10775053, 931069, -29120221, -11002319, + -14410829, 12029093, 9944378, 8024}, + {4368715, -3709630, 29874200, -15022983, -20230386, -11410704, + -16114594, -999085, -8142388, 5640030}, + }, + { + {10299610, 13746483, 11661824, 16234854, 7630238, 5998374, 9809887, + -16694564, 15219798, -14327783}, + {27425505, -5719081, 3055006, 10660664, 23458024, 595578, -15398605, + -1173195, -18342183, 9742717}, + {6744077, 2427284, 26042789, 2720740, -847906, 1118974, 32324614, + 7406442, 12420155, 1994844}, + }, + { + {14012521, -5024720, -18384453, -9578469, -26485342, -3936439, + -13033478, -10909803, 24319929, -6446333}, + {16412690, -4507367, 10772641, 15929391, -17068788, -4658621, + 10555945, -10484049, -30102368, -4739048}, + {22397382, -7767684, -9293161, -12792868, 17166287, -9755136, + -27333065, 6199366, 21880021, -12250760}, + }, + { + {-4283307, 5368523, -31117018, 8163389, -30323063, 3209128, + 16557151, 8890729, 8840445, 4957760}, + {-15447727, 709327, -6919446, -10870178, -29777922, 6522332, + -21720181, 12130072, -14796503, 5005757}, + {-2114751, -14308128, 23019042, 15765735, -25269683, 6002752, + 10183197, -13239326, -16395286, -2176112}, + }, + }, + { + { + {-19025756, 1632005, 13466291, -7995100, -23640451, 16573537, + -32013908, -3057104, 22208662, 2000468}, + {3065073, -1412761, -25598674, -361432, -17683065, -5703415, + -8164212, 11248527, -3691214, -7414184}, + {10379208, -6045554, 8877319, 1473647, -29291284, -12507580, + 16690915, 2553332, -3132688, 16400289}, + }, + { + {15716668, 1254266, -18472690, 7446274, -8448918, 6344164, + -22097271, -7285580, 26894937, 9132066}, + {24158887, 12938817, 11085297, -8177598, -28063478, -4457083, + -30576463, 64452, -6817084, -2692882}, + {13488534, 7794716, 22236231, 5989356, 25426474, -12578208, 2350710, + -3418511, -4688006, 2364226}, + }, + { + {16335052, 9132434, 25640582, 6678888, 1725628, 8517937, -11807024, + -11697457, 15445875, -7798101}, + {29004207, -7867081, 28661402, -640412, -12794003, -7943086, + 31863255, -4135540, -278050, -15759279}, + {-6122061, -14866665, -28614905, 14569919, -10857999, -3591829, + 10343412, -6976290, -29828287, -10815811}, + }, + { + {27081650, 3463984, 14099042, -4517604, 1616303, -6205604, 29542636, + 15372179, 17293797, 960709}, + {20263915, 11434237, -5765435, 11236810, 13505955, -10857102, + -16111345, 6493122, -19384511, 7639714}, + {-2830798, -14839232, 25403038, -8215196, -8317012, -16173699, + 18006287, -16043750, 29994677, -15808121}, + }, + { + {9769828, 5202651, -24157398, -13631392, -28051003, -11561624, + -24613141, -13860782, -31184575, 709464}, + {12286395, 13076066, -21775189, -1176622, -25003198, 4057652, + -32018128, -8890874, 16102007, 13205847}, + {13733362, 5599946, 10557076, 3195751, -5557991, 8536970, -25540170, + 8525972, 10151379, 10394400}, + }, + { + {4024660, -16137551, 22436262, 12276534, -9099015, -2686099, + 19698229, 11743039, -33302334, 8934414}, + {-15879800, -4525240, -8580747, -2934061, 14634845, -698278, + -9449077, 3137094, -11536886, 11721158}, + {17555939, -5013938, 8268606, 2331751, -22738815, 9761013, 9319229, + 8835153, -9205489, -1280045}, + }, + { + {-461409, -7830014, 20614118, 16688288, -7514766, -4807119, + 22300304, 505429, 6108462, -6183415}, + {-5070281, 12367917, -30663534, 3234473, 32617080, -8422642, + 29880583, -13483331, -26898490, -7867459}, + {-31975283, 5726539, 26934134, 10237677, -3173717, -605053, + 24199304, 3795095, 7592688, -14992079}, + }, + { + {21594432, -14964228, 17466408, -4077222, 32537084, 2739898, + 6407723, 12018833, -28256052, 4298412}, + {-20650503, -11961496, -27236275, 570498, 3767144, -1717540, + 13891942, -1569194, 13717174, 10805743}, + {-14676630, -15644296, 15287174, 11927123, 24177847, -8175568, + -796431, 14860609, -26938930, -5863836}, + }, + }, + { + { + {12962541, 5311799, -10060768, 11658280, 18855286, -7954201, + 13286263, -12808704, -4381056, 9882022}, + {18512079, 11319350, -20123124, 15090309, 18818594, 5271736, + -22727904, 3666879, -23967430, -3299429}, + {-6789020, -3146043, 16192429, 13241070, 15898607, -14206114, + -10084880, -6661110, -2403099, 5276065}, + }, + { + {30169808, -5317648, 26306206, -11750859, 27814964, 7069267, + 7152851, 3684982, 1449224, 13082861}, + {10342826, 3098505, 2119311, 193222, 25702612, 12233820, 23697382, + 15056736, -21016438, -8202000}, + {-33150110, 3261608, 22745853, 7948688, 19370557, -15177665, + -26171976, 6482814, -10300080, -11060101}, + }, + { + {32869458, -5408545, 25609743, 15678670, -10687769, -15471071, + 26112421, 2521008, -22664288, 6904815}, + {29506923, 4457497, 3377935, -9796444, -30510046, 12935080, 1561737, + 3841096, -29003639, -6657642}, + {10340844, -6630377, -18656632, -2278430, 12621151, -13339055, + 30878497, -11824370, -25584551, 5181966}, + }, + { + {25940115, -12658025, 17324188, -10307374, -8671468, 15029094, + 24396252, -16450922, -2322852, -12388574}, + {-21765684, 9916823, -1300409, 4079498, -1028346, 11909559, 1782390, + 12641087, 20603771, -6561742}, + {-18882287, -11673380, 24849422, 11501709, 13161720, -4768874, + 1925523, 11914390, 4662781, 7820689}, + }, + { + {12241050, -425982, 8132691, 9393934, 32846760, -1599620, 29749456, + 12172924, 16136752, 15264020}, + {-10349955, -14680563, -8211979, 2330220, -17662549, -14545780, + 10658213, 6671822, 19012087, 3772772}, + {3753511, -3421066, 10617074, 2028709, 14841030, -6721664, 28718732, + -15762884, 20527771, 12988982}, + }, + { + {-14822485, -5797269, -3707987, 12689773, -898983, -10914866, + -24183046, -10564943, 3299665, -12424953}, + {-16777703, -15253301, -9642417, 4978983, 3308785, 8755439, 6943197, + 6461331, -25583147, 8991218}, + {-17226263, 1816362, -1673288, -6086439, 31783888, -8175991, + -32948145, 7417950, -30242287, 1507265}, + }, + { + {29692663, 6829891, -10498800, 4334896, 20945975, -11906496, + -28887608, 8209391, 14606362, -10647073}, + {-3481570, 8707081, 32188102, 5672294, 22096700, 1711240, -33020695, + 9761487, 4170404, -2085325}, + {-11587470, 14855945, -4127778, -1531857, -26649089, 15084046, + 22186522, 16002000, -14276837, -8400798}, + }, + { + {-4811456, 13761029, -31703877, -2483919, -3312471, 7869047, + -7113572, -9620092, 13240845, 10965870}, + {-7742563, -8256762, -14768334, -13656260, -23232383, 12387166, + 4498947, 14147411, 29514390, 4302863}, + {-13413405, -12407859, 20757302, -13801832, 14785143, 8976368, + -5061276, -2144373, 17846988, -13971927}, + }, + }, + { + { + {-2244452, -754728, -4597030, -1066309, -6247172, 1455299, + -21647728, -9214789, -5222701, 12650267}, + {-9906797, -16070310, 21134160, 12198166, -27064575, 708126, 387813, + 13770293, -19134326, 10958663}, + {22470984, 12369526, 23446014, -5441109, -21520802, -9698723, + -11772496, -11574455, -25083830, 4271862}, + }, + { + {-25169565, -10053642, -19909332, 15361595, -5984358, 2159192, + 75375, -4278529, -32526221, 8469673}, + {15854970, 4148314, -8893890, 7259002, 11666551, 13824734, + -30531198, 2697372, 24154791, -9460943}, + {15446137, -15806644, 29759747, 14019369, 30811221, -9610191, + -31582008, 12840104, 24913809, 9815020}, + }, + { + {-4709286, -5614269, -31841498, -12288893, -14443537, 10799414, + -9103676, 13438769, 18735128, 9466238}, + {11933045, 9281483, 5081055, -5183824, -2628162, -4905629, -7727821, + -10896103, -22728655, 16199064}, + {14576810, 379472, -26786533, -8317236, -29426508, -10812974, + -102766, 1876699, 30801119, 2164795}, + }, + { + {15995086, 3199873, 13672555, 13712240, -19378835, -4647646, + -13081610, -15496269, -13492807, 1268052}, + {-10290614, -3659039, -3286592, 10948818, 23037027, 3794475, + -3470338, -12600221, -17055369, 3565904}, + {29210088, -9419337, -5919792, -4952785, 10834811, -13327726, + -16512102, -10820713, -27162222, -14030531}, + }, + { + {-13161890, 15508588, 16663704, -8156150, -28349942, 9019123, + -29183421, -3769423, 2244111, -14001979}, + {-5152875, -3800936, -9306475, -6071583, 16243069, 14684434, + -25673088, -16180800, 13491506, 4641841}, + {10813417, 643330, -19188515, -728916, 30292062, -16600078, + 27548447, -7721242, 14476989, -12767431}, + }, + { + {10292079, 9984945, 6481436, 8279905, -7251514, 7032743, 27282937, + -1644259, -27912810, 12651324}, + {-31185513, -813383, 22271204, 11835308, 10201545, 15351028, + 17099662, 3988035, 21721536, -3148940}, + {10202177, -6545839, -31373232, -9574638, -32150642, -8119683, + -12906320, 3852694, 13216206, 14842320}, + }, + { + {-15815640, -10601066, -6538952, -7258995, -6984659, -6581778, + -31500847, 13765824, -27434397, 9900184}, + {14465505, -13833331, -32133984, -14738873, -27443187, 12990492, + 33046193, 15796406, -7051866, -8040114}, + {30924417, -8279620, 6359016, -12816335, 16508377, 9071735, + -25488601, 15413635, 9524356, -7018878}, + }, + { + {12274201, -13175547, 32627641, -1785326, 6736625, 13267305, + 5237659, -5109483, 15663516, 4035784}, + {-2951309, 8903985, 17349946, 601635, -16432815, -4612556, + -13732739, -15889334, -22258478, 4659091}, + {-16916263, -4952973, -30393711, -15158821, 20774812, 15897498, + 5736189, 15026997, -2178256, -13455585}, + }, + }, + { + { + {-8858980, -2219056, 28571666, -10155518, -474467, -10105698, + -3801496, 278095, 23440562, -290208}, + {10226241, -5928702, 15139956, 120818, -14867693, 5218603, 32937275, + 11551483, -16571960, -7442864}, + {17932739, -12437276, -24039557, 10749060, 11316803, 7535897, + 22503767, 5561594, -3646624, 3898661}, + }, + { + {7749907, -969567, -16339731, -16464, -25018111, 15122143, -1573531, + 7152530, 21831162, 1245233}, + {26958459, -14658026, 4314586, 8346991, -5677764, 11960072, + -32589295, -620035, -30402091, -16716212}, + {-12165896, 9166947, 33491384, 13673479, 29787085, 13096535, + 6280834, 14587357, -22338025, 13987525}, + }, + { + {-24349909, 7778775, 21116000, 15572597, -4833266, -5357778, + -4300898, -5124639, -7469781, -2858068}, + {9681908, -6737123, -31951644, 13591838, -6883821, 386950, 31622781, + 6439245, -14581012, 4091397}, + {-8426427, 1470727, -28109679, -1596990, 3978627, -5123623, + -19622683, 12092163, 29077877, -14741988}, + }, + { + {5269168, -6859726, -13230211, -8020715, 25932563, 1763552, + -5606110, -5505881, -20017847, 2357889}, + {32264008, -15407652, -5387735, -1160093, -2091322, -3946900, + 23104804, -12869908, 5727338, 189038}, + {14609123, -8954470, -6000566, -16622781, -14577387, -7743898, + -26745169, 10942115, -25888931, -14884697}, + }, + { + {20513500, 5557931, -15604613, 7829531, 26413943, -2019404, + -21378968, 7471781, 13913677, -5137875}, + {-25574376, 11967826, 29233242, 12948236, -6754465, 4713227, + -8940970, 14059180, 12878652, 8511905}, + {-25656801, 3393631, -2955415, -7075526, -2250709, 9366908, + -30223418, 6812974, 5568676, -3127656}, + }, + { + {11630004, 12144454, 2116339, 13606037, 27378885, 15676917, + -17408753, -13504373, -14395196, 8070818}, + {27117696, -10007378, -31282771, -5570088, 1127282, 12772488, + -29845906, 10483306, -11552749, -1028714}, + {10637467, -5688064, 5674781, 1072708, -26343588, -6982302, + -1683975, 9177853, -27493162, 15431203}, + }, + { + {20525145, 10892566, -12742472, 12779443, -29493034, 16150075, + -28240519, 14943142, -15056790, -7935931}, + {-30024462, 5626926, -551567, -9981087, 753598, 11981191, 25244767, + -3239766, -3356550, 9594024}, + {-23752644, 2636870, -5163910, -10103818, 585134, 7877383, 11345683, + -6492290, 13352335, -10977084}, + }, + { + {-1931799, -5407458, 3304649, -12884869, 17015806, -4877091, + -29783850, -7752482, -13215537, -319204}, + {20239939, 6607058, 6203985, 3483793, -18386976, -779229, -20723742, + 15077870, -22750759, 14523817}, + {27406042, -6041657, 27423596, -4497394, 4996214, 10002360, + -28842031, -4545494, -30172742, -4805667}, + }, + }, + { + { + {11374242, 12660715, 17861383, -12540833, 10935568, 1099227, + -13886076, -9091740, -27727044, 11358504}, + {-12730809, 10311867, 1510375, 10778093, -2119455, -9145702, + 32676003, 11149336, -26123651, 4985768}, + {-19096303, 341147, -6197485, -239033, 15756973, -8796662, -983043, + 13794114, -19414307, -15621255}, + }, + { + {6490081, 11940286, 25495923, -7726360, 8668373, -8751316, 3367603, + 6970005, -1691065, -9004790}, + {1656497, 13457317, 15370807, 6364910, 13605745, 8362338, -19174622, + -5475723, -16796596, -5031438}, + {-22273315, -13524424, -64685, -4334223, -18605636, -10921968, + -20571065, -7007978, -99853, -10237333}, + }, + { + {17747465, 10039260, 19368299, -4050591, -20630635, -16041286, + 31992683, -15857976, -29260363, -5511971}, + {31932027, -4986141, -19612382, 16366580, 22023614, 88450, 11371999, + -3744247, 4882242, -10626905}, + {29796507, 37186, 19818052, 10115756, -11829032, 3352736, 18551198, + 3272828, -5190932, -4162409}, + }, + { + {12501286, 4044383, -8612957, -13392385, -32430052, 5136599, + -19230378, -3529697, 330070, -3659409}, + {6384877, 2899513, 17807477, 7663917, -2358888, 12363165, 25366522, + -8573892, -271295, 12071499}, + {-8365515, -4042521, 25133448, -4517355, -6211027, 2265927, + -32769618, 1936675, -5159697, 3829363}, + }, + { + {28425966, -5835433, -577090, -4697198, -14217555, 6870930, 7921550, + -6567787, 26333140, 14267664}, + {-11067219, 11871231, 27385719, -10559544, -4585914, -11189312, + 10004786, -8709488, -21761224, 8930324}, + {-21197785, -16396035, 25654216, -1725397, 12282012, 11008919, + 1541940, 4757911, -26491501, -16408940}, + }, + { + {13537262, -7759490, -20604840, 10961927, -5922820, -13218065, + -13156584, 6217254, -15943699, 13814990}, + {-17422573, 15157790, 18705543, 29619, 24409717, -260476, 27361681, + 9257833, -1956526, -1776914}, + {-25045300, -10191966, 15366585, 15166509, -13105086, 8423556, + -29171540, 12361135, -18685978, 4578290}, + }, + { + {24579768, 3711570, 1342322, -11180126, -27005135, 14124956, + -22544529, 14074919, 21964432, 8235257}, + {-6528613, -2411497, 9442966, -5925588, 12025640, -1487420, + -2981514, -1669206, 13006806, 2355433}, + {-16304899, -13605259, -6632427, -5142349, 16974359, -10911083, + 27202044, 1719366, 1141648, -12796236}, + }, + { + {-12863944, -13219986, -8318266, -11018091, -6810145, -4843894, + 13475066, -3133972, 32674895, 13715045}, + {11423335, -5468059, 32344216, 8962751, 24989809, 9241752, + -13265253, 16086212, -28740881, -15642093}, + {-1409668, 12530728, -6368726, 10847387, 19531186, -14132160, + -11709148, 7791794, -27245943, 4383347}, + }, + }, + { + { + {-28970898, 5271447, -1266009, -9736989, -12455236, 16732599, + -4862407, -4906449, 27193557, 6245191}, + {-15193956, 5362278, -1783893, 2695834, 4960227, 12840725, 23061898, + 3260492, 22510453, 8577507}, + {-12632451, 11257346, -32692994, 13548177, -721004, 10879011, + 31168030, 13952092, -29571492, -3635906}, + }, + { + {3877321, -9572739, 32416692, 5405324, -11004407, -13656635, + 3759769, 11935320, 5611860, 8164018}, + {-16275802, 14667797, 15906460, 12155291, -22111149, -9039718, + 32003002, -8832289, 5773085, -8422109}, + {-23788118, -8254300, 1950875, 8937633, 18686727, 16459170, -905725, + 12376320, 31632953, 190926}, + }, + { + {-24593607, -16138885, -8423991, 13378746, 14162407, 6901328, + -8288749, 4508564, -25341555, -3627528}, + {8884438, -5884009, 6023974, 10104341, -6881569, -4941533, 18722941, + -14786005, -1672488, 827625}, + {-32720583, -16289296, -32503547, 7101210, 13354605, 2659080, + -1800575, -14108036, -24878478, 1541286}, + }, + { + {2901347, -1117687, 3880376, -10059388, -17620940, -3612781, + -21802117, -3567481, 20456845, -1885033}, + {27019610, 12299467, -13658288, -1603234, -12861660, -4861471, + -19540150, -5016058, 29439641, 15138866}, + {21536104, -6626420, -32447818, -10690208, -22408077, 5175814, + -5420040, -16361163, 7779328, 109896}, + }, + { + {30279744, 14648750, -8044871, 6425558, 13639621, -743509, 28698390, + 12180118, 23177719, -554075}, + {26572847, 3405927, -31701700, 12890905, -19265668, 5335866, + -6493768, 2378492, 4439158, -13279347}, + {-22716706, 3489070, -9225266, -332753, 18875722, -1140095, + 14819434, -12731527, -17717757, -5461437}, + }, + { + {-5056483, 16566551, 15953661, 3767752, -10436499, 15627060, + -820954, 2177225, 8550082, -15114165}, + {-18473302, 16596775, -381660, 15663611, 22860960, 15585581, + -27844109, -3582739, -23260460, -8428588}, + {-32480551, 15707275, -8205912, -5652081, 29464558, 2713815, + -22725137, 15860482, -21902570, 1494193}, + }, + { + {-19562091, -14087393, -25583872, -9299552, 13127842, 759709, + 21923482, 16529112, 8742704, 12967017}, + {-28464899, 1553205, 32536856, -10473729, -24691605, -406174, + -8914625, -2933896, -29903758, 15553883}, + {21877909, 3230008, 9881174, 10539357, -4797115, 2841332, 11543572, + 14513274, 19375923, -12647961}, + }, + { + {8832269, -14495485, 13253511, 5137575, 5037871, 4078777, 24880818, + -6222716, 2862653, 9455043}, + {29306751, 5123106, 20245049, -14149889, 9592566, 8447059, -2077124, + -2990080, 15511449, 4789663}, + {-20679756, 7004547, 8824831, -9434977, -4045704, -3750736, + -5754762, 108893, 23513200, 16652362}, + }, + }, + { + { + {-33256173, 4144782, -4476029, -6579123, 10770039, -7155542, + -6650416, -12936300, -18319198, 10212860}, + {2756081, 8598110, 7383731, -6859892, 22312759, -1105012, 21179801, + 2600940, -9988298, -12506466}, + {-24645692, 13317462, -30449259, -15653928, 21365574, -10869657, + 11344424, 864440, -2499677, -16710063}, + }, + { + {-26432803, 6148329, -17184412, -14474154, 18782929, -275997, + -22561534, 211300, 2719757, 4940997}, + {-1323882, 3911313, -6948744, 14759765, -30027150, 7851207, + 21690126, 8518463, 26699843, 5276295}, + {-13149873, -6429067, 9396249, 365013, 24703301, -10488939, 1321586, + 149635, -15452774, 7159369}, + }, + { + {9987780, -3404759, 17507962, 9505530, 9731535, -2165514, 22356009, + 8312176, 22477218, -8403385}, + {18155857, -16504990, 19744716, 9006923, 15154154, -10538976, + 24256460, -4864995, -22548173, 9334109}, + {2986088, -4911893, 10776628, -3473844, 10620590, -7083203, + -21413845, 14253545, -22587149, 536906}, + }, + { + {4377756, 8115836, 24567078, 15495314, 11625074, 13064599, 7390551, + 10589625, 10838060, -15420424}, + {-19342404, 867880, 9277171, -3218459, -14431572, -1986443, + 19295826, -15796950, 6378260, 699185}, + {7895026, 4057113, -7081772, -13077756, -17886831, -323126, -716039, + 15693155, -5045064, -13373962}, + }, + { + {-7737563, -5869402, -14566319, -7406919, 11385654, 13201616, + 31730678, -10962840, -3918636, -9669325}, + {10188286, -15770834, -7336361, 13427543, 22223443, 14896287, + 30743455, 7116568, -21786507, 5427593}, + {696102, 13206899, 27047647, -10632082, 15285305, -9853179, + 10798490, -4578720, 19236243, 12477404}, + }, + { + {-11229439, 11243796, -17054270, -8040865, -788228, -8167967, + -3897669, 11180504, -23169516, 7733644}, + {17800790, -14036179, -27000429, -11766671, 23887827, 3149671, + 23466177, -10538171, 10322027, 15313801}, + {26246234, 11968874, 32263343, -5468728, 6830755, -13323031, + -15794704, -101982, -24449242, 10890804}, + }, + { + {-31365647, 10271363, -12660625, -6267268, 16690207, -13062544, + -14982212, 16484931, 25180797, -5334884}, + {-586574, 10376444, -32586414, -11286356, 19801893, 10997610, + 2276632, 9482883, 316878, 13820577}, + {-9882808, -4510367, -2115506, 16457136, -11100081, 11674996, + 30756178, -7515054, 30696930, -3712849}, + }, + { + {32988917, -9603412, 12499366, 7910787, -10617257, -11931514, + -7342816, -9985397, -32349517, 7392473}, + {-8855661, 15927861, 9866406, -3649411, -2396914, -16655781, + -30409476, -9134995, 25112947, -2926644}, + {-2504044, -436966, 25621774, -5678772, 15085042, -5479877, + -24884878, -13526194, 5537438, -13914319}, + }, + }, + { + { + {-11225584, 2320285, -9584280, 10149187, -33444663, 5808648, + -14876251, -1729667, 31234590, 6090599}, + {-9633316, 116426, 26083934, 2897444, -6364437, -2688086, 609721, + 15878753, -6970405, -9034768}, + {-27757857, 247744, -15194774, -9002551, 23288161, -10011936, + -23869595, 6503646, 20650474, 1804084}, + }, + { + {-27589786, 15456424, 8972517, 8469608, 15640622, 4439847, 3121995, + -10329713, 27842616, -202328}, + {-15306973, 2839644, 22530074, 10026331, 4602058, 5048462, 28248656, + 5031932, -11375082, 12714369}, + {20807691, -7270825, 29286141, 11421711, -27876523, -13868230, + -21227475, 1035546, -19733229, 12796920}, + }, + { + {12076899, -14301286, -8785001, -11848922, -25012791, 16400684, + -17591495, -12899438, 3480665, -15182815}, + {-32361549, 5457597, 28548107, 7833186, 7303070, -11953545, + -24363064, -15921875, -33374054, 2771025}, + {-21389266, 421932, 26597266, 6860826, 22486084, -6737172, + -17137485, -4210226, -24552282, 15673397}, + }, + { + {-20184622, 2338216, 19788685, -9620956, -4001265, -8740893, + -20271184, 4733254, 3727144, -12934448}, + {6120119, 814863, -11794402, -622716, 6812205, -15747771, 2019594, + 7975683, 31123697, -10958981}, + {30069250, -11435332, 30434654, 2958439, 18399564, -976289, + 12296869, 9204260, -16432438, 9648165}, + }, + { + {32705432, -1550977, 30705658, 7451065, -11805606, 9631813, 3305266, + 5248604, -26008332, -11377501}, + {17219865, 2375039, -31570947, -5575615, -19459679, 9219903, 294711, + 15298639, 2662509, -16297073}, + {-1172927, -7558695, -4366770, -4287744, -21346413, -8434326, + 32087529, -1222777, 32247248, -14389861}, + }, + { + {14312628, 1221556, 17395390, -8700143, -4945741, -8684635, + -28197744, -9637817, -16027623, -13378845}, + {-1428825, -9678990, -9235681, 6549687, -7383069, -468664, 23046502, + 9803137, 17597934, 2346211}, + {18510800, 15337574, 26171504, 981392, -22241552, 7827556, + -23491134, -11323352, 3059833, -11782870}, + }, + { + {10141598, 6082907, 17829293, -1947643, 9830092, 13613136, + -25556636, -5544586, -33502212, 3592096}, + {33114168, -15889352, -26525686, -13343397, 33076705, 8716171, + 1151462, 1521897, -982665, -6837803}, + {-32939165, -4255815, 23947181, -324178, -33072974, -12305637, + -16637686, 3891704, 26353178, 693168}, + }, + { + {30374239, 1595580, -16884039, 13186931, 4600344, 406904, 9585294, + -400668, 31375464, 14369965}, + {-14370654, -7772529, 1510301, 6434173, -18784789, -6262728, + 32732230, -13108839, 17901441, 16011505}, + {18171223, -11934626, -12500402, 15197122, -11038147, -15230035, + -19172240, -16046376, 8764035, 12309598}, + }, + }, + { + { + {5975908, -5243188, -19459362, -9681747, -11541277, 14015782, + -23665757, 1228319, 17544096, -10593782}, + {5811932, -1715293, 3442887, -2269310, -18367348, -8359541, + -18044043, -15410127, -5565381, 12348900}, + {-31399660, 11407555, 25755363, 6891399, -3256938, 14872274, + -24849353, 8141295, -10632534, -585479}, + }, + { + {-12675304, 694026, -5076145, 13300344, 14015258, -14451394, + -9698672, -11329050, 30944593, 1130208}, + {8247766, -6710942, -26562381, -7709309, -14401939, -14648910, + 4652152, 2488540, 23550156, -271232}, + {17294316, -3788438, 7026748, 15626851, 22990044, 113481, 2267737, + -5908146, -408818, -137719}, + }, + { + {16091085, -16253926, 18599252, 7340678, 2137637, -1221657, + -3364161, 14550936, 3260525, -7166271}, + {-4910104, -13332887, 18550887, 10864893, -16459325, -7291596, + -23028869, -13204905, -12748722, 2701326}, + {-8574695, 16099415, 4629974, -16340524, -20786213, -6005432, + -10018363, 9276971, 11329923, 1862132}, + }, + { + {14763076, -15903608, -30918270, 3689867, 3511892, 10313526, + -21951088, 12219231, -9037963, -940300}, + {8894987, -3446094, 6150753, 3013931, 301220, 15693451, -31981216, + -2909717, -15438168, 11595570}, + {15214962, 3537601, -26238722, -14058872, 4418657, -15230761, + 13947276, 10730794, -13489462, -4363670}, + }, + { + {-2538306, 7682793, 32759013, 263109, -29984731, -7955452, + -22332124, -10188635, 977108, 699994}, + {-12466472, 4195084, -9211532, 550904, -15565337, 12917920, + 19118110, -439841, -30534533, -14337913}, + {31788461, -14507657, 4799989, 7372237, 8808585, -14747943, 9408237, + -10051775, 12493932, -5409317}, + }, + { + {-25680606, 5260744, -19235809, -6284470, -3695942, 16566087, + 27218280, 2607121, 29375955, 6024730}, + {842132, -2794693, -4763381, -8722815, 26332018, -12405641, + 11831880, 6985184, -9940361, 2854096}, + {-4847262, -7969331, 2516242, -5847713, 9695691, -7221186, 16512645, + 960770, 12121869, 16648078}, + }, + { + {-15218652, 14667096, -13336229, 2013717, 30598287, -464137, + -31504922, -7882064, 20237806, 2838411}, + {-19288047, 4453152, 15298546, -16178388, 22115043, -15972604, + 12544294, -13470457, 1068881, -12499905}, + {-9558883, -16518835, 33238498, 13506958, 30505848, -1114596, + -8486907, -2630053, 12521378, 4845654}, + }, + { + {-28198521, 10744108, -2958380, 10199664, 7759311, -13088600, + 3409348, -873400, -6482306, -12885870}, + {-23561822, 6230156, -20382013, 10655314, -24040585, -11621172, + 10477734, -1240216, -3113227, 13974498}, + {12966261, 15550616, -32038948, -1615346, 21025980, -629444, + 5642325, 7188737, 18895762, 12629579}, + }, + }, + { + { + {14741879, -14946887, 22177208, -11721237, 1279741, 8058600, + 11758140, 789443, 32195181, 3895677}, + {10758205, 15755439, -4509950, 9243698, -4879422, 6879879, -2204575, + -3566119, -8982069, 4429647}, + {-2453894, 15725973, -20436342, -10410672, -5803908, -11040220, + -7135870, -11642895, 18047436, -15281743}, + }, + { + {-25173001, -11307165, 29759956, 11776784, -22262383, -15820455, + 10993114, -12850837, -17620701, -9408468}, + {21987233, 700364, -24505048, 14972008, -7774265, -5718395, + 32155026, 2581431, -29958985, 8773375}, + {-25568350, 454463, -13211935, 16126715, 25240068, 8594567, + 20656846, 12017935, -7874389, -13920155}, + }, + { + {6028182, 6263078, -31011806, -11301710, -818919, 2461772, + -31841174, -5468042, -1721788, -2776725}, + {-12278994, 16624277, 987579, -5922598, 32908203, 1248608, 7719845, + -4166698, 28408820, 6816612}, + {-10358094, -8237829, 19549651, -12169222, 22082623, 16147817, + 20613181, 13982702, -10339570, 5067943}, + }, + { + {-30505967, -3821767, 12074681, 13582412, -19877972, 2443951, + -19719286, 12746132, 5331210, -10105944}, + {30528811, 3601899, -1957090, 4619785, -27361822, -15436388, + 24180793, -12570394, 27679908, -1648928}, + {9402404, -13957065, 32834043, 10838634, -26580150, -13237195, + 26653274, -8685565, 22611444, -12715406}, + }, + { + {22190590, 1118029, 22736441, 15130463, -30460692, -5991321, + 19189625, -4648942, 4854859, 6622139}, + {-8310738, -2953450, -8262579, -3388049, -10401731, -271929, + 13424426, -3567227, 26404409, 13001963}, + {-31241838, -15415700, -2994250, 8939346, 11562230, -12840670, + -26064365, -11621720, -15405155, 11020693}, + }, + { + {1866042, -7949489, -7898649, -10301010, 12483315, 13477547, + 3175636, -12424163, 28761762, 1406734}, + {-448555, -1777666, 13018551, 3194501, -9580420, -11161737, + 24760585, -4347088, 25577411, -13378680}, + {-24290378, 4759345, -690653, -1852816, 2066747, 10693769, + -29595790, 9884936, -9368926, 4745410}, + }, + { + {-9141284, 6049714, -19531061, -4341411, -31260798, 9944276, + -15462008, -11311852, 10931924, -11931931}, + {-16561513, 14112680, -8012645, 4817318, -8040464, -11414606, + -22853429, 10856641, -20470770, 13434654}, + {22759489, -10073434, -16766264, -1871422, 13637442, -10168091, + 1765144, -12654326, 28445307, -5364710}, + }, + { + {29875063, 12493613, 2795536, -3786330, 1710620, 15181182, + -10195717, -8788675, 9074234, 1167180}, + {-26205683, 11014233, -9842651, -2635485, -26908120, 7532294, + -18716888, -9535498, 3843903, 9367684}, + {-10969595, -6403711, 9591134, 9582310, 11349256, 108879, 16235123, + 8601684, -139197, 4242895}, + }, + }, + { + { + {22092954, -13191123, -2042793, -11968512, 32186753, -11517388, + -6574341, 2470660, -27417366, 16625501}, + {-11057722, 3042016, 13770083, -9257922, 584236, -544855, -7770857, + 2602725, -27351616, 14247413}, + {6314175, -10264892, -32772502, 15957557, -10157730, 168750, + -8618807, 14290061, 27108877, -1180880}, + }, + { + {-8586597, -7170966, 13241782, 10960156, -32991015, -13794596, + 33547976, -11058889, -27148451, 981874}, + {22833440, 9293594, -32649448, -13618667, -9136966, 14756819, + -22928859, -13970780, -10479804, -16197962}, + {-7768587, 3326786, -28111797, 10783824, 19178761, 14905060, + 22680049, 13906969, -15933690, 3797899}, + }, + { + {21721356, -4212746, -12206123, 9310182, -3882239, -13653110, + 23740224, -2709232, 20491983, -8042152}, + {9209270, -15135055, -13256557, -6167798, -731016, 15289673, + 25947805, 15286587, 30997318, -6703063}, + {7392032, 16618386, 23946583, -8039892, -13265164, -1533858, + -14197445, -2321576, 17649998, -250080}, + }, + { + {-9301088, -14193827, 30609526, -3049543, -25175069, -1283752, + -15241566, -9525724, -2233253, 7662146}, + {-17558673, 1763594, -33114336, 15908610, -30040870, -12174295, + 7335080, -8472199, -3174674, 3440183}, + {-19889700, -5977008, -24111293, -9688870, 10799743, -16571957, + 40450, -4431835, 4862400, 1133}, + }, + { + {-32856209, -7873957, -5422389, 14860950, -16319031, 7956142, + 7258061, 311861, -30594991, -7379421}, + {-3773428, -1565936, 28985340, 7499440, 24445838, 9325937, 29727763, + 16527196, 18278453, 15405622}, + {-4381906, 8508652, -19898366, -3674424, -5984453, 15149970, + -13313598, 843523, -21875062, 13626197}, + }, + { + {2281448, -13487055, -10915418, -2609910, 1879358, 16164207, + -10783882, 3953792, 13340839, 15928663}, + {31727126, -7179855, -18437503, -8283652, 2875793, -16390330, + -25269894, -7014826, -23452306, 5964753}, + {4100420, -5959452, -17179337, 6017714, -18705837, 12227141, + -26684835, 11344144, 2538215, -7570755}, + }, + { + {-9433605, 6123113, 11159803, -2156608, 30016280, 14966241, + -20474983, 1485421, -629256, -15958862}, + {-26804558, 4260919, 11851389, 9658551, -32017107, 16367492, + -20205425, -13191288, 11659922, -11115118}, + {26180396, 10015009, -30844224, -8581293, 5418197, 9480663, 2231568, + -10170080, 33100372, -1306171}, + }, + { + {15121113, -5201871, -10389905, 15427821, -27509937, -15992507, + 21670947, 4486675, -5931810, -14466380}, + {16166486, -9483733, -11104130, 6023908, -31926798, -1364923, + 2340060, -16254968, -10735770, -10039824}, + {28042865, -3557089, -12126526, 12259706, -3717498, -6945899, + 6766453, -8689599, 18036436, 5803270}, + }, + }, + { + { + {-817581, 6763912, 11803561, 1585585, 10958447, -2671165, 23855391, + 4598332, -6159431, -14117438}, + {-31031306, -14256194, 17332029, -2383520, 31312682, -5967183, + 696309, 50292, -20095739, 11763584}, + {-594563, -2514283, -32234153, 12643980, 12650761, 14811489, 665117, + -12613632, -19773211, -10713562}, + }, + { + {30464590, -11262872, -4127476, -12734478, 19835327, -7105613, + -24396175, 2075773, -17020157, 992471}, + {18357185, -6994433, 7766382, 16342475, -29324918, 411174, 14578841, + 8080033, -11574335, -10601610}, + {19598397, 10334610, 12555054, 2555664, 18821899, -10339780, + 21873263, 16014234, 26224780, 16452269}, + }, + { + {-30223925, 5145196, 5944548, 16385966, 3976735, 2009897, -11377804, + -7618186, -20533829, 3698650}, + {14187449, 3448569, -10636236, -10810935, -22663880, -3433596, + 7268410, -10890444, 27394301, 12015369}, + {19695761, 16087646, 28032085, 12999827, 6817792, 11427614, + 20244189, -1312777, -13259127, -3402461}, + }, + { + {30860103, 12735208, -1888245, -4699734, -16974906, 2256940, + -8166013, 12298312, -8550524, -10393462}, + {-5719826, -11245325, -1910649, 15569035, 26642876, -7587760, + -5789354, -15118654, -4976164, 12651793}, + {-2848395, 9953421, 11531313, -5282879, 26895123, -12697089, + -13118820, -16517902, 9768698, -2533218}, + }, + { + {-24719459, 1894651, -287698, -4704085, 15348719, -8156530, + 32767513, 12765450, 4940095, 10678226}, + {18860224, 15980149, -18987240, -1562570, -26233012, -11071856, + -7843882, 13944024, -24372348, 16582019}, + {-15504260, 4970268, -29893044, 4175593, -20993212, -2199756, + -11704054, 15444560, -11003761, 7989037}, + }, + { + {31490452, 5568061, -2412803, 2182383, -32336847, 4531686, + -32078269, 6200206, -19686113, -14800171}, + {-17308668, -15879940, -31522777, -2831, -32887382, 16375549, + 8680158, -16371713, 28550068, -6857132}, + {-28126887, -5688091, 16837845, -1820458, -6850681, 12700016, + -30039981, 4364038, 1155602, 5988841}, + }, + { + {21890435, -13272907, -12624011, 12154349, -7831873, 15300496, + 23148983, -4470481, 24618407, 8283181}, + {-33136107, -10512751, 9975416, 6841041, -31559793, 16356536, + 3070187, -7025928, 1466169, 10740210}, + {-1509399, -15488185, -13503385, -10655916, 32799044, 909394, + -13938903, -5779719, -32164649, -15327040}, + }, + { + {3960823, -14267803, -28026090, -15918051, -19404858, 13146868, + 15567327, 951507, -3260321, -573935}, + {24740841, 5052253, -30094131, 8961361, 25877428, 6165135, + -24368180, 14397372, -7380369, -6144105}, + {-28888365, 3510803, -28103278, -1158478, -11238128, -10631454, + -15441463, -14453128, -1625486, -6494814}, + }, + }, + { + { + {793299, -9230478, 8836302, -6235707, -27360908, -2369593, 33152843, + -4885251, -9906200, -621852}, + {5666233, 525582, 20782575, -8038419, -24538499, 14657740, 16099374, + 1468826, -6171428, -15186581}, + {-4859255, -3779343, -2917758, -6748019, 7778750, 11688288, + -30404353, -9871238, -1558923, -9863646}, + }, + { + {10896332, -7719704, 824275, 472601, -19460308, 3009587, 25248958, + 14783338, -30581476, -15757844}, + {10566929, 12612572, -31944212, 11118703, -12633376, 12362879, + 21752402, 8822496, 24003793, 14264025}, + {27713862, -7355973, -11008240, 9227530, 27050101, 2504721, + 23886875, -13117525, 13958495, -5732453}, + }, + { + {-23481610, 4867226, -27247128, 3900521, 29838369, -8212291, + -31889399, -10041781, 7340521, -15410068}, + {4646514, -8011124, -22766023, -11532654, 23184553, 8566613, + 31366726, -1381061, -15066784, -10375192}, + {-17270517, 12723032, -16993061, 14878794, 21619651, -6197576, + 27584817, 3093888, -8843694, 3849921}, + }, + { + {-9064912, 2103172, 25561640, -15125738, -5239824, 9582958, + 32477045, -9017955, 5002294, -15550259}, + {-12057553, -11177906, 21115585, -13365155, 8808712, -12030708, + 16489530, 13378448, -25845716, 12741426}, + {-5946367, 10645103, -30911586, 15390284, -3286982, -7118677, + 24306472, 15852464, 28834118, -7646072}, + }, + { + {-17335748, -9107057, -24531279, 9434953, -8472084, -583362, + -13090771, 455841, 20461858, 5491305}, + {13669248, -16095482, -12481974, -10203039, -14569770, -11893198, + -24995986, 11293807, -28588204, -9421832}, + {28497928, 6272777, -33022994, 14470570, 8906179, -1225630, + 18504674, -14165166, 29867745, -8795943}, + }, + { + {-16207023, 13517196, -27799630, -13697798, 24009064, -6373891, + -6367600, -13175392, 22853429, -4012011}, + {24191378, 16712145, -13931797, 15217831, 14542237, 1646131, + 18603514, -11037887, 12876623, -2112447}, + {17902668, 4518229, -411702, -2829247, 26878217, 5258055, -12860753, + 608397, 16031844, 3723494}, + }, + { + {-28632773, 12763728, -20446446, 7577504, 33001348, -13017745, + 17558842, -7872890, 23896954, -4314245}, + {-20005381, -12011952, 31520464, 605201, 2543521, 5991821, -2945064, + 7229064, -9919646, -8826859}, + {28816045, 298879, -28165016, -15920938, 19000928, -1665890, + -12680833, -2949325, -18051778, -2082915}, + }, + { + {16000882, -344896, 3493092, -11447198, -29504595, -13159789, + 12577740, 16041268, -19715240, 7847707}, + {10151868, 10572098, 27312476, 7922682, 14825339, 4723128, + -32855931, -6519018, -10020567, 3852848}, + {-11430470, 15697596, -21121557, -4420647, 5386314, 15063598, + 16514493, -15932110, 29330899, -15076224}, + }, + }, + { + { + {-25499735, -4378794, -15222908, -6901211, 16615731, 2051784, + 3303702, 15490, -27548796, 12314391}, + {15683520, -6003043, 18109120, -9980648, 15337968, -5997823, + -16717435, 15921866, 16103996, -3731215}, + {-23169824, -10781249, 13588192, -1628807, -3798557, -1074929, + -19273607, 5402699, -29815713, -9841101}, + }, + { + {23190676, 2384583, -32714340, 3462154, -29903655, -1529132, + -11266856, 8911517, -25205859, 2739713}, + {21374101, -3554250, -33524649, 9874411, 15377179, 11831242, + -33529904, 6134907, 4931255, 11987849}, + {-7732, -2978858, -16223486, 7277597, 105524, -322051, -31480539, + 13861388, -30076310, 10117930}, + }, + { + {-29501170, -10744872, -26163768, 13051539, -25625564, 5089643, + -6325503, 6704079, 12890019, 15728940}, + {-21972360, -11771379, -951059, -4418840, 14704840, 2695116, 903376, + -10428139, 12885167, 8311031}, + {-17516482, 5352194, 10384213, -13811658, 7506451, 13453191, + 26423267, 4384730, 1888765, -5435404}, + }, + { + {-25817338, -3107312, -13494599, -3182506, 30896459, -13921729, + -32251644, -12707869, -19464434, -3340243}, + {-23607977, -2665774, -526091, 4651136, 5765089, 4618330, 6092245, + 14845197, 17151279, -9854116}, + {-24830458, -12733720, -15165978, 10367250, -29530908, -265356, + 22825805, -7087279, -16866484, 16176525}, + }, + { + {-23583256, 6564961, 20063689, 3798228, -4740178, 7359225, 2006182, + -10363426, -28746253, -10197509}, + {-10626600, -4486402, -13320562, -5125317, 3432136, -6393229, + 23632037, -1940610, 32808310, 1099883}, + {15030977, 5768825, -27451236, -2887299, -6427378, -15361371, + -15277896, -6809350, 2051441, -15225865}, + }, + { + {-3362323, -7239372, 7517890, 9824992, 23555850, 295369, 5148398, + -14154188, -22686354, 16633660}, + {4577086, -16752288, 13249841, -15304328, 19958763, -14537274, + 18559670, -10759549, 8402478, -9864273}, + {-28406330, -1051581, -26790155, -907698, -17212414, -11030789, + 9453451, -14980072, 17983010, 9967138}, + }, + { + {-25762494, 6524722, 26585488, 9969270, 24709298, 1220360, -1677990, + 7806337, 17507396, 3651560}, + {-10420457, -4118111, 14584639, 15971087, -15768321, 8861010, + 26556809, -5574557, -18553322, -11357135}, + {2839101, 14284142, 4029895, 3472686, 14402957, 12689363, -26642121, + 8459447, -5605463, -7621941}, + }, + { + {-4839289, -3535444, 9744961, 2871048, 25113978, 3187018, -25110813, + -849066, 17258084, -7977739}, + {18164541, -10595176, -17154882, -1542417, 19237078, -9745295, + 23357533, -15217008, 26908270, 12150756}, + {-30264870, -7647865, 5112249, -7036672, -1499807, -6974257, 43168, + -5537701, -32302074, 16215819}, + }, + }, + { + { + {-6898905, 9824394, -12304779, -4401089, -31397141, -6276835, + 32574489, 12532905, -7503072, -8675347}, + {-27343522, -16515468, -27151524, -10722951, 946346, 16291093, + 254968, 7168080, 21676107, -1943028}, + {21260961, -8424752, -16831886, -11920822, -23677961, 3968121, + -3651949, -6215466, -3556191, -7913075}, + }, + { + {16544754, 13250366, -16804428, 15546242, -4583003, 12757258, + -2462308, -8680336, -18907032, -9662799}, + {-2415239, -15577728, 18312303, 4964443, -15272530, -12653564, + 26820651, 16690659, 25459437, -4564609}, + {-25144690, 11425020, 28423002, -11020557, -6144921, -15826224, + 9142795, -2391602, -6432418, -1644817}, + }, + { + {-23104652, 6253476, 16964147, -3768872, -25113972, -12296437, + -27457225, -16344658, 6335692, 7249989}, + {-30333227, 13979675, 7503222, -12368314, -11956721, -4621693, + -30272269, 2682242, 25993170, -12478523}, + {4364628, 5930691, 32304656, -10044554, -8054781, 15091131, + 22857016, -10598955, 31820368, 15075278}, + }, + { + {31879134, -8918693, 17258761, 90626, -8041836, -4917709, 24162788, + -9650886, -17970238, 12833045}, + {19073683, 14851414, -24403169, -11860168, 7625278, 11091125, + -19619190, 2074449, -9413939, 14905377}, + {24483667, -11935567, -2518866, -11547418, -1553130, 15355506, + -25282080, 9253129, 27628530, -7555480}, + }, + { + {17597607, 8340603, 19355617, 552187, 26198470, -3176583, 4593324, + -9157582, -14110875, 15297016}, + {510886, 14337390, -31785257, 16638632, 6328095, 2713355, -20217417, + -11864220, 8683221, 2921426}, + {18606791, 11874196, 27155355, -5281482, -24031742, 6265446, + -25178240, -1278924, 4674690, 13890525}, + }, + { + {13609624, 13069022, -27372361, -13055908, 24360586, 9592974, + 14977157, 9835105, 4389687, 288396}, + {9922506, -519394, 13613107, 5883594, -18758345, -434263, -12304062, + 8317628, 23388070, 16052080}, + {12720016, 11937594, -31970060, -5028689, 26900120, 8561328, + -20155687, -11632979, -14754271, -10812892}, + }, + { + {15961858, 14150409, 26716931, -665832, -22794328, 13603569, + 11829573, 7467844, -28822128, 929275}, + {11038231, -11582396, -27310482, -7316562, -10498527, -16307831, + -23479533, -9371869, -21393143, 2465074}, + {20017163, -4323226, 27915242, 1529148, 12396362, 15675764, + 13817261, -9658066, 2463391, -4622140}, + }, + { + {-16358878, -12663911, -12065183, 4996454, -1256422, 1073572, + 9583558, 12851107, 4003896, 12673717}, + {-1731589, -15155870, -3262930, 16143082, 19294135, 13385325, + 14741514, -9103726, 7903886, 2348101}, + {24536016, -16515207, 12715592, -3862155, 1511293, 10047386, + -3842346, -7129159, -28377538, 10048127}, + }, + }, + { + { + {-12622226, -6204820, 30718825, 2591312, -10617028, 12192840, + 18873298, -7297090, -32297756, 15221632}, + {-26478122, -11103864, 11546244, -1852483, 9180880, 7656409, + -21343950, 2095755, 29769758, 6593415}, + {-31994208, -2907461, 4176912, 3264766, 12538965, -868111, 26312345, + -6118678, 30958054, 8292160}, + }, + { + {31429822, -13959116, 29173532, 15632448, 12174511, -2760094, + 32808831, 3977186, 26143136, -3148876}, + {22648901, 1402143, -22799984, 13746059, 7936347, 365344, -8668633, + -1674433, -3758243, -2304625}, + {-15491917, 8012313, -2514730, -12702462, -23965846, -10254029, + -1612713, -1535569, -16664475, 8194478}, + }, + { + {27338066, -7507420, -7414224, 10140405, -19026427, -6589889, + 27277191, 8855376, 28572286, 3005164}, + {26287124, 4821776, 25476601, -4145903, -3764513, -15788984, + -18008582, 1182479, -26094821, -13079595}, + {-7171154, 3178080, 23970071, 6201893, -17195577, -4489192, + -21876275, -13982627, 32208683, -1198248}, + }, + { + {-16657702, 2817643, -10286362, 14811298, 6024667, 13349505, + -27315504, -10497842, -27672585, -11539858}, + {15941029, -9405932, -21367050, 8062055, 31876073, -238629, + -15278393, -1444429, 15397331, -4130193}, + {8934485, -13485467, -23286397, -13423241, -32446090, 14047986, + 31170398, -1441021, -27505566, 15087184}, + }, + { + {-18357243, -2156491, 24524913, -16677868, 15520427, -6360776, + -15502406, 11461896, 16788528, -5868942}, + {-1947386, 16013773, 21750665, 3714552, -17401782, -16055433, + -3770287, -10323320, 31322514, -11615635}, + {21426655, -5650218, -13648287, -5347537, -28812189, -4920970, + -18275391, -14621414, 13040862, -12112948}, + }, + { + {11293895, 12478086, -27136401, 15083750, -29307421, 14748872, + 14555558, -13417103, 1613711, 4896935}, + {-25894883, 15323294, -8489791, -8057900, 25967126, -13425460, + 2825960, -4897045, -23971776, -11267415}, + {-15924766, -5229880, -17443532, 6410664, 3622847, 10243618, + 20615400, 12405433, -23753030, -8436416}, + }, + { + {-7091295, 12556208, -20191352, 9025187, -17072479, 4333801, + 4378436, 2432030, 23097949, -566018}, + {4565804, -16025654, 20084412, -7842817, 1724999, 189254, 24767264, + 10103221, -18512313, 2424778}, + {366633, -11976806, 8173090, -6890119, 30788634, 5745705, -7168678, + 1344109, -3642553, 12412659}, + }, + { + {-24001791, 7690286, 14929416, -168257, -32210835, -13412986, + 24162697, -15326504, -3141501, 11179385}, + {18289522, -14724954, 8056945, 16430056, -21729724, 7842514, + -6001441, -1486897, -18684645, -11443503}, + {476239, 6601091, -6152790, -9723375, 17503545, -4863900, 27672959, + 13403813, 11052904, 5219329}, + }, + }, + { + { + {20678546, -8375738, -32671898, 8849123, -5009758, 14574752, + 31186971, -3973730, 9014762, -8579056}, + {-13644050, -10350239, -15962508, 5075808, -1514661, -11534600, + -33102500, 9160280, 8473550, -3256838}, + {24900749, 14435722, 17209120, -15292541, -22592275, 9878983, + -7689309, -16335821, -24568481, 11788948}, + }, + { + {-3118155, -11395194, -13802089, 14797441, 9652448, -6845904, + -20037437, 10410733, -24568470, -1458691}, + {-15659161, 16736706, -22467150, 10215878, -9097177, 7563911, + 11871841, -12505194, -18513325, 8464118}, + {-23400612, 8348507, -14585951, -861714, -3950205, -6373419, + 14325289, 8628612, 33313881, -8370517}, + }, + { + {-20186973, -4967935, 22367356, 5271547, -1097117, -4788838, + -24805667, -10236854, -8940735, -5818269}, + {-6948785, -1795212, -32625683, -16021179, 32635414, -7374245, + 15989197, -12838188, 28358192, -4253904}, + {-23561781, -2799059, -32351682, -1661963, -9147719, 10429267, + -16637684, 4072016, -5351664, 5596589}, + }, + { + {-28236598, -3390048, 12312896, 6213178, 3117142, 16078565, + 29266239, 2557221, 1768301, 15373193}, + {-7243358, -3246960, -4593467, -7553353, -127927, -912245, -1090902, + -4504991, -24660491, 3442910}, + {-30210571, 5124043, 14181784, 8197961, 18964734, -11939093, + 22597931, 7176455, -18585478, 13365930}, + }, + { + {-7877390, -1499958, 8324673, 4690079, 6261860, 890446, 24538107, + -8570186, -9689599, -3031667}, + {25008904, -10771599, -4305031, -9638010, 16265036, 15721635, + 683793, -11823784, 15723479, -15163481}, + {-9660625, 12374379, -27006999, -7026148, -7724114, -12314514, + 11879682, 5400171, 519526, -1235876}, + }, + { + {22258397, -16332233, -7869817, 14613016, -22520255, -2950923, + -20353881, 7315967, 16648397, 7605640}, + {-8081308, -8464597, -8223311, 9719710, 19259459, -15348212, + 23994942, -5281555, -9468848, 4763278}, + {-21699244, 9220969, -15730624, 1084137, -25476107, -2852390, + 31088447, -7764523, -11356529, 728112}, + }, + { + {26047220, -11751471, -6900323, -16521798, 24092068, 9158119, + -4273545, -12555558, -29365436, -5498272}, + {17510331, -322857, 5854289, 8403524, 17133918, -3112612, -28111007, + 12327945, 10750447, 10014012}, + {-10312768, 3936952, 9156313, -8897683, 16498692, -994647, + -27481051, -666732, 3424691, 7540221}, + }, + { + {30322361, -6964110, 11361005, -4143317, 7433304, 4989748, -7071422, + -16317219, -9244265, 15258046}, + {13054562, -2779497, 19155474, 469045, -12482797, 4566042, 5631406, + 2711395, 1062915, -5136345}, + {-19240248, -11254599, -29509029, -7499965, -5835763, 13005411, + -6066489, 12194497, 32960380, 1459310}, + }, + }, + { + { + {19852034, 7027924, 23669353, 10020366, 8586503, -6657907, 394197, + -6101885, 18638003, -11174937}, + {31395534, 15098109, 26581030, 8030562, -16527914, -5007134, + 9012486, -7584354, -6643087, -5442636}, + {-9192165, -2347377, -1997099, 4529534, 25766844, 607986, -13222, + 9677543, -32294889, -6456008}, + }, + { + {-2444496, -149937, 29348902, 8186665, 1873760, 12489863, -30934579, + -7839692, -7852844, -8138429}, + {-15236356, -15433509, 7766470, 746860, 26346930, -10221762, + -27333451, 10754588, -9431476, 5203576}, + {31834314, 14135496, -770007, 5159118, 20917671, -16768096, + -7467973, -7337524, 31809243, 7347066}, + }, + { + {-9606723, -11874240, 20414459, 13033986, 13716524, -11691881, + 19797970, -12211255, 15192876, -2087490}, + {-12663563, -2181719, 1168162, -3804809, 26747877, -14138091, + 10609330, 12694420, 33473243, -13382104}, + {33184999, 11180355, 15832085, -11385430, -1633671, 225884, + 15089336, -11023903, -6135662, 14480053}, + }, + { + {31308717, -5619998, 31030840, -1897099, 15674547, -6582883, + 5496208, 13685227, 27595050, 8737275}, + {-20318852, -15150239, 10933843, -16178022, 8335352, -7546022, + -31008351, -12610604, 26498114, 66511}, + {22644454, -8761729, -16671776, 4884562, -3105614, -13559366, + 30540766, -4286747, -13327787, -7515095}, + }, + { + {-28017847, 9834845, 18617207, -2681312, -3401956, -13307506, + 8205540, 13585437, -17127465, 15115439}, + {23711543, -672915, 31206561, -8362711, 6164647, -9709987, + -33535882, -1426096, 8236921, 16492939}, + {-23910559, -13515526, -26299483, -4503841, 25005590, -7687270, + 19574902, 10071562, 6708380, -6222424}, + }, + { + {2101391, -4930054, 19702731, 2367575, -15427167, 1047675, 5301017, + 9328700, 29955601, -11678310}, + {3096359, 9271816, -21620864, -15521844, -14847996, -7592937, + -25892142, -12635595, -9917575, 6216608}, + {-32615849, 338663, -25195611, 2510422, -29213566, -13820213, + 24822830, -6146567, -26767480, 7525079}, + }, + { + {-23066649, -13985623, 16133487, -7896178, -3389565, 778788, + -910336, -2782495, -19386633, 11994101}, + {21691500, -13624626, -641331, -14367021, 3285881, -3483596, + -25064666, 9718258, -7477437, 13381418}, + {18445390, -4202236, 14979846, 11622458, -1727110, -3582980, + 23111648, -6375247, 28535282, 15779576}, + }, + { + {30098053, 3089662, -9234387, 16662135, -21306940, 11308411, + -14068454, 12021730, 9955285, -16303356}, + {9734894, -14576830, -7473633, -9138735, 2060392, 11313496, + -18426029, 9924399, 20194861, 13380996}, + {-26378102, -7965207, -22167821, 15789297, -18055342, -6168792, + -1984914, 15707771, 26342023, 10146099}, + }, + }, + { + { + {-26016874, -219943, 21339191, -41388, 19745256, -2878700, + -29637280, 2227040, 21612326, -545728}, + {-13077387, 1184228, 23562814, -5970442, -20351244, -6348714, + 25764461, 12243797, -20856566, 11649658}, + {-10031494, 11262626, 27384172, 2271902, 26947504, -15997771, 39944, + 6114064, 33514190, 2333242}, + }, + { + {-21433588, -12421821, 8119782, 7219913, -21830522, -9016134, + -6679750, -12670638, 24350578, -13450001}, + {-4116307, -11271533, -23886186, 4843615, -30088339, 690623, + -31536088, -10406836, 8317860, 12352766}, + {18200138, -14475911, -33087759, -2696619, -23702521, -9102511, + -23552096, -2287550, 20712163, 6719373}, + }, + { + {26656208, 6075253, -7858556, 1886072, -28344043, 4262326, 11117530, + -3763210, 26224235, -3297458}, + {-17168938, -14854097, -3395676, -16369877, -19954045, 14050420, + 21728352, 9493610, 18620611, -16428628}, + {-13323321, 13325349, 11432106, 5964811, 18609221, 6062965, + -5269471, -9725556, -30701573, -16479657}, + }, + { + {-23860538, -11233159, 26961357, 1640861, -32413112, -16737940, + 12248509, -5240639, 13735342, 1934062}, + {25089769, 6742589, 17081145, -13406266, 21909293, -16067981, + -15136294, -3765346, -21277997, 5473616}, + {31883677, -7961101, 1083432, -11572403, 22828471, 13290673, + -7125085, 12469656, 29111212, -5451014}, + }, + { + {24244947, -15050407, -26262976, 2791540, -14997599, 16666678, + 24367466, 6388839, -10295587, 452383}, + {-25640782, -3417841, 5217916, 16224624, 19987036, -4082269, + -24236251, -5915248, 15766062, 8407814}, + {-20406999, 13990231, 15495425, 16395525, 5377168, 15166495, + -8917023, -4388953, -8067909, 2276718}, + }, + { + {30157918, 12924066, -17712050, 9245753, 19895028, 3368142, + -23827587, 5096219, 22740376, -7303417}, + {2041139, -14256350, 7783687, 13876377, -25946985, -13352459, + 24051124, 13742383, -15637599, 13295222}, + {33338237, -8505733, 12532113, 7977527, 9106186, -1715251, + -17720195, -4612972, -4451357, -14669444}, + }, + { + {-20045281, 5454097, -14346548, 6447146, 28862071, 1883651, + -2469266, -4141880, 7770569, 9620597}, + {23208068, 7979712, 33071466, 8149229, 1758231, -10834995, 30945528, + -1694323, -33502340, -14767970}, + {1439958, -16270480, -1079989, -793782, 4625402, 10647766, -5043801, + 1220118, 30494170, -11440799}, + }, + { + {-5037580, -13028295, -2970559, -3061767, 15640974, -6701666, + -26739026, 926050, -1684339, -13333647}, + {13908495, -3549272, 30919928, -6273825, -21521863, 7989039, + 9021034, 9078865, 3353509, 4033511}, + {-29663431, -15113610, 32259991, -344482, 24295849, -12912123, + 23161163, 8839127, 27485041, 7356032}, + }, + }, + { + { + {9661027, 705443, 11980065, -5370154, -1628543, 14661173, -6346142, + 2625015, 28431036, -16771834}, + {-23839233, -8311415, -25945511, 7480958, -17681669, -8354183, + -22545972, 14150565, 15970762, 4099461}, + {29262576, 16756590, 26350592, -8793563, 8529671, -11208050, + 13617293, -9937143, 11465739, 8317062}, + }, + { + {-25493081, -6962928, 32500200, -9419051, -23038724, -2302222, + 14898637, 3848455, 20969334, -5157516}, + {-20384450, -14347713, -18336405, 13884722, -33039454, 2842114, + -21610826, -3649888, 11177095, 14989547}, + {-24496721, -11716016, 16959896, 2278463, 12066309, 10137771, + 13515641, 2581286, -28487508, 9930240}, + }, + { + {-17751622, -2097826, 16544300, -13009300, -15914807, -14949081, + 18345767, -13403753, 16291481, -5314038}, + {-33229194, 2553288, 32678213, 9875984, 8534129, 6889387, -9676774, + 6957617, 4368891, 9788741}, + {16660756, 7281060, -10830758, 12911820, 20108584, -8101676, + -21722536, -8613148, 16250552, -11111103}, + }, + { + {-19765507, 2390526, -16551031, 14161980, 1905286, 6414907, 4689584, + 10604807, -30190403, 4782747}, + {-1354539, 14736941, -7367442, -13292886, 7710542, -14155590, + -9981571, 4383045, 22546403, 437323}, + {31665577, -12180464, -16186830, 1491339, -18368625, 3294682, + 27343084, 2786261, -30633590, -14097016}, + }, + { + {-14467279, -683715, -33374107, 7448552, 19294360, 14334329, + -19690631, 2355319, -19284671, -6114373}, + {15121312, -15796162, 6377020, -6031361, -10798111, -12957845, + 18952177, 15496498, -29380133, 11754228}, + {-2637277, -13483075, 8488727, -14303896, 12728761, -1622493, + 7141596, 11724556, 22761615, -10134141}, + }, + { + {16918416, 11729663, -18083579, 3022987, -31015732, -13339659, + -28741185, -12227393, 32851222, 11717399}, + {11166634, 7338049, -6722523, 4531520, -29468672, -7302055, + 31474879, 3483633, -1193175, -4030831}, + {-185635, 9921305, 31456609, -13536438, -12013818, 13348923, + 33142652, 6546660, -19985279, -3948376}, + }, + { + {-32460596, 11266712, -11197107, -7899103, 31703694, 3855903, + -8537131, -12833048, -30772034, -15486313}, + {-18006477, 12709068, 3991746, -6479188, -21491523, -10550425, + -31135347, -16049879, 10928917, 3011958}, + {-6957757, -15594337, 31696059, 334240, 29576716, 14796075, + -30831056, -12805180, 18008031, 10258577}, + }, + { + {-22448644, 15655569, 7018479, -4410003, -30314266, -1201591, + -1853465, 1367120, 25127874, 6671743}, + {29701166, -14373934, -10878120, 9279288, -17568, 13127210, + 21382910, 11042292, 25838796, 4642684}, + {-20430234, 14955537, -24126347, 8124619, -5369288, -5990470, + 30468147, -13900640, 18423289, 4177476}, + }, + }, +}; + +static uint8_t negative(signed char b) { + uint32_t x = b; + x >>= 31; /* 1: yes; 0: no */ + return x; +} + +static void table_select(ge_precomp *t, int pos, signed char b) { + ge_precomp minust; + uint8_t bnegative = negative(b); + uint8_t babs = b - ((uint8_t)((-bnegative) & b) << 1); + + ge_precomp_0(t); + cmov(t, &k25519Precomp[pos][0], equal(babs, 1)); + cmov(t, &k25519Precomp[pos][1], equal(babs, 2)); + cmov(t, &k25519Precomp[pos][2], equal(babs, 3)); + cmov(t, &k25519Precomp[pos][3], equal(babs, 4)); + cmov(t, &k25519Precomp[pos][4], equal(babs, 5)); + cmov(t, &k25519Precomp[pos][5], equal(babs, 6)); + cmov(t, &k25519Precomp[pos][6], equal(babs, 7)); + cmov(t, &k25519Precomp[pos][7], equal(babs, 8)); + fe_copy(minust.yplusx, t->yminusx); + fe_copy(minust.yminusx, t->yplusx); + fe_neg(minust.xy2d, t->xy2d); + cmov(t, &minust, bnegative); +} + +/* h = a * B + * where a = a[0]+256*a[1]+...+256^31 a[31] + * B is the Ed25519 base point (x,4/5) with x positive. + * + * Preconditions: + * a[31] <= 127 */ +void x25519_ge_scalarmult_base(ge_p3 *h, const uint8_t *a) { + signed char e[64]; + signed char carry; + ge_p1p1 r; + ge_p2 s; + ge_precomp t; + int i; + + for (i = 0; i < 32; ++i) { + e[2 * i + 0] = (a[i] >> 0) & 15; + e[2 * i + 1] = (a[i] >> 4) & 15; + } + /* each e[i] is between 0 and 15 */ + /* e[63] is between 0 and 7 */ + + carry = 0; + for (i = 0; i < 63; ++i) { + e[i] += carry; + carry = e[i] + 8; + carry >>= 4; + e[i] -= carry << 4; + } + e[63] += carry; + /* each e[i] is between -8 and 8 */ + + ge_p3_0(h); + for (i = 1; i < 64; i += 2) { + table_select(&t, i / 2, e[i]); + ge_madd(&r, h, &t); + x25519_ge_p1p1_to_p3(h, &r); + } + + ge_p3_dbl(&r, h); + x25519_ge_p1p1_to_p2(&s, &r); + ge_p2_dbl(&r, &s); + x25519_ge_p1p1_to_p2(&s, &r); + ge_p2_dbl(&r, &s); + x25519_ge_p1p1_to_p2(&s, &r); + ge_p2_dbl(&r, &s); + x25519_ge_p1p1_to_p3(h, &r); + + for (i = 0; i < 64; i += 2) { + table_select(&t, i / 2, e[i]); + ge_madd(&r, h, &t); + x25519_ge_p1p1_to_p3(h, &r); + } +} + +#endif + +static void cmov_cached(ge_cached *t, ge_cached *u, uint8_t b) { + fe_cmov(t->YplusX, u->YplusX, b); + fe_cmov(t->YminusX, u->YminusX, b); + fe_cmov(t->Z, u->Z, b); + fe_cmov(t->T2d, u->T2d, b); +} + +/* r = scalar * A. + * where a = a[0]+256*a[1]+...+256^31 a[31]. */ +void x25519_ge_scalarmult(ge_p2 *r, const uint8_t *scalar, const ge_p3 *A) { + ge_p2 Ai_p2[8]; + ge_cached Ai[16]; + ge_p1p1 t; + + ge_cached_0(&Ai[0]); + x25519_ge_p3_to_cached(&Ai[1], A); + ge_p3_to_p2(&Ai_p2[1], A); + + unsigned i; + for (i = 2; i < 16; i += 2) { + ge_p2_dbl(&t, &Ai_p2[i / 2]); + ge_p1p1_to_cached(&Ai[i], &t); + if (i < 8) { + x25519_ge_p1p1_to_p2(&Ai_p2[i], &t); + } + x25519_ge_add(&t, A, &Ai[i]); + ge_p1p1_to_cached(&Ai[i + 1], &t); + if (i < 7) { + x25519_ge_p1p1_to_p2(&Ai_p2[i + 1], &t); + } + } + + ge_p2_0(r); + ge_p3 u; + + for (i = 0; i < 256; i += 4) { + ge_p2_dbl(&t, r); + x25519_ge_p1p1_to_p2(r, &t); + ge_p2_dbl(&t, r); + x25519_ge_p1p1_to_p2(r, &t); + ge_p2_dbl(&t, r); + x25519_ge_p1p1_to_p2(r, &t); + ge_p2_dbl(&t, r); + x25519_ge_p1p1_to_p3(&u, &t); + + uint8_t index = scalar[31 - i/8]; + index >>= 4 - (i & 4); + index &= 0xf; + + unsigned j; + ge_cached selected; + ge_cached_0(&selected); + for (j = 0; j < 16; j++) { + cmov_cached(&selected, &Ai[j], equal(j, index)); + } + + x25519_ge_add(&t, &u, &selected); + x25519_ge_p1p1_to_p2(r, &t); + } +} + +#ifdef ED25519 +static void slide(signed char *r, const uint8_t *a) { + int i; + int b; + int k; + + for (i = 0; i < 256; ++i) { + r[i] = 1 & (a[i >> 3] >> (i & 7)); + } + + for (i = 0; i < 256; ++i) { + if (r[i]) { + for (b = 1; b <= 6 && i + b < 256; ++b) { + if (r[i + b]) { + if (r[i] + (r[i + b] << b) <= 15) { + r[i] += r[i + b] << b; + r[i + b] = 0; + } else if (r[i] - (r[i + b] << b) >= -15) { + r[i] -= r[i + b] << b; + for (k = i + b; k < 256; ++k) { + if (!r[k]) { + r[k] = 1; + break; + } + r[k] = 0; + } + } else { + break; + } + } + } + } + } +} + +static const ge_precomp Bi[8] = { + { + {25967493, -14356035, 29566456, 3660896, -12694345, 4014787, 27544626, + -11754271, -6079156, 2047605}, + {-12545711, 934262, -2722910, 3049990, -727428, 9406986, 12720692, + 5043384, 19500929, -15469378}, + {-8738181, 4489570, 9688441, -14785194, 10184609, -12363380, 29287919, + 11864899, -24514362, -4438546}, + }, + { + {15636291, -9688557, 24204773, -7912398, 616977, -16685262, 27787600, + -14772189, 28944400, -1550024}, + {16568933, 4717097, -11556148, -1102322, 15682896, -11807043, 16354577, + -11775962, 7689662, 11199574}, + {30464156, -5976125, -11779434, -15670865, 23220365, 15915852, 7512774, + 10017326, -17749093, -9920357}, + }, + { + {10861363, 11473154, 27284546, 1981175, -30064349, 12577861, 32867885, + 14515107, -15438304, 10819380}, + {4708026, 6336745, 20377586, 9066809, -11272109, 6594696, -25653668, + 12483688, -12668491, 5581306}, + {19563160, 16186464, -29386857, 4097519, 10237984, -4348115, 28542350, + 13850243, -23678021, -15815942}, + }, + { + {5153746, 9909285, 1723747, -2777874, 30523605, 5516873, 19480852, + 5230134, -23952439, -15175766}, + {-30269007, -3463509, 7665486, 10083793, 28475525, 1649722, 20654025, + 16520125, 30598449, 7715701}, + {28881845, 14381568, 9657904, 3680757, -20181635, 7843316, -31400660, + 1370708, 29794553, -1409300}, + }, + { + {-22518993, -6692182, 14201702, -8745502, -23510406, 8844726, 18474211, + -1361450, -13062696, 13821877}, + {-6455177, -7839871, 3374702, -4740862, -27098617, -10571707, 31655028, + -7212327, 18853322, -14220951}, + {4566830, -12963868, -28974889, -12240689, -7602672, -2830569, -8514358, + -10431137, 2207753, -3209784}, + }, + { + {-25154831, -4185821, 29681144, 7868801, -6854661, -9423865, -12437364, + -663000, -31111463, -16132436}, + {25576264, -2703214, 7349804, -11814844, 16472782, 9300885, 3844789, + 15725684, 171356, 6466918}, + {23103977, 13316479, 9739013, -16149481, 817875, -15038942, 8965339, + -14088058, -30714912, 16193877}, + }, + { + {-33521811, 3180713, -2394130, 14003687, -16903474, -16270840, 17238398, + 4729455, -18074513, 9256800}, + {-25182317, -4174131, 32336398, 5036987, -21236817, 11360617, 22616405, + 9761698, -19827198, 630305}, + {-13720693, 2639453, -24237460, -7406481, 9494427, -5774029, -6554551, + -15960994, -2449256, -14291300}, + }, + { + {-3151181, -5046075, 9282714, 6866145, -31907062, -863023, -18940575, + 15033784, 25105118, -7894876}, + {-24326370, 15950226, -31801215, -14592823, -11662737, -5090925, + 1573892, -2625887, 2198790, -15804619}, + {-3099351, 10324967, -2241613, 7453183, -5446979, -2735503, -13812022, + -16236442, -32461234, -12290683}, + }, +}; + +/* r = a * A + b * B + * where a = a[0]+256*a[1]+...+256^31 a[31]. + * and b = b[0]+256*b[1]+...+256^31 b[31]. + * B is the Ed25519 base point (x,4/5) with x positive. */ +static void +ge_double_scalarmult_vartime(ge_p2 *r, const uint8_t *a, + const ge_p3 *A, const uint8_t *b) { + signed char aslide[256]; + signed char bslide[256]; + ge_cached Ai[8]; /* A,3A,5A,7A,9A,11A,13A,15A */ + ge_p1p1 t; + ge_p3 u; + ge_p3 A2; + int i; + + slide(aslide, a); + slide(bslide, b); + + x25519_ge_p3_to_cached(&Ai[0], A); + ge_p3_dbl(&t, A); + x25519_ge_p1p1_to_p3(&A2, &t); + x25519_ge_add(&t, &A2, &Ai[0]); + x25519_ge_p1p1_to_p3(&u, &t); + x25519_ge_p3_to_cached(&Ai[1], &u); + x25519_ge_add(&t, &A2, &Ai[1]); + x25519_ge_p1p1_to_p3(&u, &t); + x25519_ge_p3_to_cached(&Ai[2], &u); + x25519_ge_add(&t, &A2, &Ai[2]); + x25519_ge_p1p1_to_p3(&u, &t); + x25519_ge_p3_to_cached(&Ai[3], &u); + x25519_ge_add(&t, &A2, &Ai[3]); + x25519_ge_p1p1_to_p3(&u, &t); + x25519_ge_p3_to_cached(&Ai[4], &u); + x25519_ge_add(&t, &A2, &Ai[4]); + x25519_ge_p1p1_to_p3(&u, &t); + x25519_ge_p3_to_cached(&Ai[5], &u); + x25519_ge_add(&t, &A2, &Ai[5]); + x25519_ge_p1p1_to_p3(&u, &t); + x25519_ge_p3_to_cached(&Ai[6], &u); + x25519_ge_add(&t, &A2, &Ai[6]); + x25519_ge_p1p1_to_p3(&u, &t); + x25519_ge_p3_to_cached(&Ai[7], &u); + + ge_p2_0(r); + + for (i = 255; i >= 0; --i) { + if (aslide[i] || bslide[i]) { + break; + } + } + + for (; i >= 0; --i) { + ge_p2_dbl(&t, r); + + if (aslide[i] > 0) { + x25519_ge_p1p1_to_p3(&u, &t); + x25519_ge_add(&t, &u, &Ai[aslide[i] / 2]); + } else if (aslide[i] < 0) { + x25519_ge_p1p1_to_p3(&u, &t); + x25519_ge_sub(&t, &u, &Ai[(-aslide[i]) / 2]); + } + + if (bslide[i] > 0) { + x25519_ge_p1p1_to_p3(&u, &t); + ge_madd(&t, &u, &Bi[bslide[i] / 2]); + } else if (bslide[i] < 0) { + x25519_ge_p1p1_to_p3(&u, &t); + ge_msub(&t, &u, &Bi[(-bslide[i]) / 2]); + } + + x25519_ge_p1p1_to_p2(r, &t); + } +} +#endif + +/* The set of scalars is \Z/l + * where l = 2^252 + 27742317777372353535851937790883648493. */ + +/* Input: + * s[0]+256*s[1]+...+256^63*s[63] = s + * + * Output: + * s[0]+256*s[1]+...+256^31*s[31] = s mod l + * where l = 2^252 + 27742317777372353535851937790883648493. + * Overwrites s in place. */ +void +x25519_sc_reduce(uint8_t *s) { + int64_t s0 = 2097151 & load_3(s); + int64_t s1 = 2097151 & (load_4(s + 2) >> 5); + int64_t s2 = 2097151 & (load_3(s + 5) >> 2); + int64_t s3 = 2097151 & (load_4(s + 7) >> 7); + int64_t s4 = 2097151 & (load_4(s + 10) >> 4); + int64_t s5 = 2097151 & (load_3(s + 13) >> 1); + int64_t s6 = 2097151 & (load_4(s + 15) >> 6); + int64_t s7 = 2097151 & (load_3(s + 18) >> 3); + int64_t s8 = 2097151 & load_3(s + 21); + int64_t s9 = 2097151 & (load_4(s + 23) >> 5); + int64_t s10 = 2097151 & (load_3(s + 26) >> 2); + int64_t s11 = 2097151 & (load_4(s + 28) >> 7); + int64_t s12 = 2097151 & (load_4(s + 31) >> 4); + int64_t s13 = 2097151 & (load_3(s + 34) >> 1); + int64_t s14 = 2097151 & (load_4(s + 36) >> 6); + int64_t s15 = 2097151 & (load_3(s + 39) >> 3); + int64_t s16 = 2097151 & load_3(s + 42); + int64_t s17 = 2097151 & (load_4(s + 44) >> 5); + int64_t s18 = 2097151 & (load_3(s + 47) >> 2); + int64_t s19 = 2097151 & (load_4(s + 49) >> 7); + int64_t s20 = 2097151 & (load_4(s + 52) >> 4); + int64_t s21 = 2097151 & (load_3(s + 55) >> 1); + int64_t s22 = 2097151 & (load_4(s + 57) >> 6); + int64_t s23 = (load_4(s + 60) >> 3); + int64_t carry0; + int64_t carry1; + int64_t carry2; + int64_t carry3; + int64_t carry4; + int64_t carry5; + int64_t carry6; + int64_t carry7; + int64_t carry8; + int64_t carry9; + int64_t carry10; + int64_t carry11; + int64_t carry12; + int64_t carry13; + int64_t carry14; + int64_t carry15; + int64_t carry16; + + s11 += s23 * 666643; + s12 += s23 * 470296; + s13 += s23 * 654183; + s14 -= s23 * 997805; + s15 += s23 * 136657; + s16 -= s23 * 683901; + s23 = 0; + + s10 += s22 * 666643; + s11 += s22 * 470296; + s12 += s22 * 654183; + s13 -= s22 * 997805; + s14 += s22 * 136657; + s15 -= s22 * 683901; + s22 = 0; + + s9 += s21 * 666643; + s10 += s21 * 470296; + s11 += s21 * 654183; + s12 -= s21 * 997805; + s13 += s21 * 136657; + s14 -= s21 * 683901; + s21 = 0; + + s8 += s20 * 666643; + s9 += s20 * 470296; + s10 += s20 * 654183; + s11 -= s20 * 997805; + s12 += s20 * 136657; + s13 -= s20 * 683901; + s20 = 0; + + s7 += s19 * 666643; + s8 += s19 * 470296; + s9 += s19 * 654183; + s10 -= s19 * 997805; + s11 += s19 * 136657; + s12 -= s19 * 683901; + s19 = 0; + + s6 += s18 * 666643; + s7 += s18 * 470296; + s8 += s18 * 654183; + s9 -= s18 * 997805; + s10 += s18 * 136657; + s11 -= s18 * 683901; + s18 = 0; + + carry6 = (s6 + (1 << 20)) >> 21; + s7 += carry6; + s6 -= carry6 << 21; + carry8 = (s8 + (1 << 20)) >> 21; + s9 += carry8; + s8 -= carry8 << 21; + carry10 = (s10 + (1 << 20)) >> 21; + s11 += carry10; + s10 -= carry10 << 21; + carry12 = (s12 + (1 << 20)) >> 21; + s13 += carry12; + s12 -= carry12 << 21; + carry14 = (s14 + (1 << 20)) >> 21; + s15 += carry14; + s14 -= carry14 << 21; + carry16 = (s16 + (1 << 20)) >> 21; + s17 += carry16; + s16 -= carry16 << 21; + + carry7 = (s7 + (1 << 20)) >> 21; + s8 += carry7; + s7 -= carry7 << 21; + carry9 = (s9 + (1 << 20)) >> 21; + s10 += carry9; + s9 -= carry9 << 21; + carry11 = (s11 + (1 << 20)) >> 21; + s12 += carry11; + s11 -= carry11 << 21; + carry13 = (s13 + (1 << 20)) >> 21; + s14 += carry13; + s13 -= carry13 << 21; + carry15 = (s15 + (1 << 20)) >> 21; + s16 += carry15; + s15 -= carry15 << 21; + + s5 += s17 * 666643; + s6 += s17 * 470296; + s7 += s17 * 654183; + s8 -= s17 * 997805; + s9 += s17 * 136657; + s10 -= s17 * 683901; + s17 = 0; + + s4 += s16 * 666643; + s5 += s16 * 470296; + s6 += s16 * 654183; + s7 -= s16 * 997805; + s8 += s16 * 136657; + s9 -= s16 * 683901; + s16 = 0; + + s3 += s15 * 666643; + s4 += s15 * 470296; + s5 += s15 * 654183; + s6 -= s15 * 997805; + s7 += s15 * 136657; + s8 -= s15 * 683901; + s15 = 0; + + s2 += s14 * 666643; + s3 += s14 * 470296; + s4 += s14 * 654183; + s5 -= s14 * 997805; + s6 += s14 * 136657; + s7 -= s14 * 683901; + s14 = 0; + + s1 += s13 * 666643; + s2 += s13 * 470296; + s3 += s13 * 654183; + s4 -= s13 * 997805; + s5 += s13 * 136657; + s6 -= s13 * 683901; + s13 = 0; + + s0 += s12 * 666643; + s1 += s12 * 470296; + s2 += s12 * 654183; + s3 -= s12 * 997805; + s4 += s12 * 136657; + s5 -= s12 * 683901; + s12 = 0; + + carry0 = (s0 + (1 << 20)) >> 21; + s1 += carry0; + s0 -= carry0 << 21; + carry2 = (s2 + (1 << 20)) >> 21; + s3 += carry2; + s2 -= carry2 << 21; + carry4 = (s4 + (1 << 20)) >> 21; + s5 += carry4; + s4 -= carry4 << 21; + carry6 = (s6 + (1 << 20)) >> 21; + s7 += carry6; + s6 -= carry6 << 21; + carry8 = (s8 + (1 << 20)) >> 21; + s9 += carry8; + s8 -= carry8 << 21; + carry10 = (s10 + (1 << 20)) >> 21; + s11 += carry10; + s10 -= carry10 << 21; + + carry1 = (s1 + (1 << 20)) >> 21; + s2 += carry1; + s1 -= carry1 << 21; + carry3 = (s3 + (1 << 20)) >> 21; + s4 += carry3; + s3 -= carry3 << 21; + carry5 = (s5 + (1 << 20)) >> 21; + s6 += carry5; + s5 -= carry5 << 21; + carry7 = (s7 + (1 << 20)) >> 21; + s8 += carry7; + s7 -= carry7 << 21; + carry9 = (s9 + (1 << 20)) >> 21; + s10 += carry9; + s9 -= carry9 << 21; + carry11 = (s11 + (1 << 20)) >> 21; + s12 += carry11; + s11 -= carry11 << 21; + + s0 += s12 * 666643; + s1 += s12 * 470296; + s2 += s12 * 654183; + s3 -= s12 * 997805; + s4 += s12 * 136657; + s5 -= s12 * 683901; + s12 = 0; + + carry0 = s0 >> 21; + s1 += carry0; + s0 -= carry0 << 21; + carry1 = s1 >> 21; + s2 += carry1; + s1 -= carry1 << 21; + carry2 = s2 >> 21; + s3 += carry2; + s2 -= carry2 << 21; + carry3 = s3 >> 21; + s4 += carry3; + s3 -= carry3 << 21; + carry4 = s4 >> 21; + s5 += carry4; + s4 -= carry4 << 21; + carry5 = s5 >> 21; + s6 += carry5; + s5 -= carry5 << 21; + carry6 = s6 >> 21; + s7 += carry6; + s6 -= carry6 << 21; + carry7 = s7 >> 21; + s8 += carry7; + s7 -= carry7 << 21; + carry8 = s8 >> 21; + s9 += carry8; + s8 -= carry8 << 21; + carry9 = s9 >> 21; + s10 += carry9; + s9 -= carry9 << 21; + carry10 = s10 >> 21; + s11 += carry10; + s10 -= carry10 << 21; + carry11 = s11 >> 21; + s12 += carry11; + s11 -= carry11 << 21; + + s0 += s12 * 666643; + s1 += s12 * 470296; + s2 += s12 * 654183; + s3 -= s12 * 997805; + s4 += s12 * 136657; + s5 -= s12 * 683901; + s12 = 0; + + carry0 = s0 >> 21; + s1 += carry0; + s0 -= carry0 << 21; + carry1 = s1 >> 21; + s2 += carry1; + s1 -= carry1 << 21; + carry2 = s2 >> 21; + s3 += carry2; + s2 -= carry2 << 21; + carry3 = s3 >> 21; + s4 += carry3; + s3 -= carry3 << 21; + carry4 = s4 >> 21; + s5 += carry4; + s4 -= carry4 << 21; + carry5 = s5 >> 21; + s6 += carry5; + s5 -= carry5 << 21; + carry6 = s6 >> 21; + s7 += carry6; + s6 -= carry6 << 21; + carry7 = s7 >> 21; + s8 += carry7; + s7 -= carry7 << 21; + carry8 = s8 >> 21; + s9 += carry8; + s8 -= carry8 << 21; + carry9 = s9 >> 21; + s10 += carry9; + s9 -= carry9 << 21; + carry10 = s10 >> 21; + s11 += carry10; + s10 -= carry10 << 21; + + s[0] = s0 >> 0; + s[1] = s0 >> 8; + s[2] = (s0 >> 16) | (s1 << 5); + s[3] = s1 >> 3; + s[4] = s1 >> 11; + s[5] = (s1 >> 19) | (s2 << 2); + s[6] = s2 >> 6; + s[7] = (s2 >> 14) | (s3 << 7); + s[8] = s3 >> 1; + s[9] = s3 >> 9; + s[10] = (s3 >> 17) | (s4 << 4); + s[11] = s4 >> 4; + s[12] = s4 >> 12; + s[13] = (s4 >> 20) | (s5 << 1); + s[14] = s5 >> 7; + s[15] = (s5 >> 15) | (s6 << 6); + s[16] = s6 >> 2; + s[17] = s6 >> 10; + s[18] = (s6 >> 18) | (s7 << 3); + s[19] = s7 >> 5; + s[20] = s7 >> 13; + s[21] = s8 >> 0; + s[22] = s8 >> 8; + s[23] = (s8 >> 16) | (s9 << 5); + s[24] = s9 >> 3; + s[25] = s9 >> 11; + s[26] = (s9 >> 19) | (s10 << 2); + s[27] = s10 >> 6; + s[28] = (s10 >> 14) | (s11 << 7); + s[29] = s11 >> 1; + s[30] = s11 >> 9; + s[31] = s11 >> 17; +} + +#ifdef ED25519 +/* Input: + * a[0]+256*a[1]+...+256^31*a[31] = a + * b[0]+256*b[1]+...+256^31*b[31] = b + * c[0]+256*c[1]+...+256^31*c[31] = c + * + * Output: + * s[0]+256*s[1]+...+256^31*s[31] = (ab+c) mod l + * where l = 2^252 + 27742317777372353535851937790883648493. */ +static void +sc_muladd(uint8_t *s, const uint8_t *a, const uint8_t *b, + const uint8_t *c) +{ + int64_t a0 = 2097151 & load_3(a); + int64_t a1 = 2097151 & (load_4(a + 2) >> 5); + int64_t a2 = 2097151 & (load_3(a + 5) >> 2); + int64_t a3 = 2097151 & (load_4(a + 7) >> 7); + int64_t a4 = 2097151 & (load_4(a + 10) >> 4); + int64_t a5 = 2097151 & (load_3(a + 13) >> 1); + int64_t a6 = 2097151 & (load_4(a + 15) >> 6); + int64_t a7 = 2097151 & (load_3(a + 18) >> 3); + int64_t a8 = 2097151 & load_3(a + 21); + int64_t a9 = 2097151 & (load_4(a + 23) >> 5); + int64_t a10 = 2097151 & (load_3(a + 26) >> 2); + int64_t a11 = (load_4(a + 28) >> 7); + int64_t b0 = 2097151 & load_3(b); + int64_t b1 = 2097151 & (load_4(b + 2) >> 5); + int64_t b2 = 2097151 & (load_3(b + 5) >> 2); + int64_t b3 = 2097151 & (load_4(b + 7) >> 7); + int64_t b4 = 2097151 & (load_4(b + 10) >> 4); + int64_t b5 = 2097151 & (load_3(b + 13) >> 1); + int64_t b6 = 2097151 & (load_4(b + 15) >> 6); + int64_t b7 = 2097151 & (load_3(b + 18) >> 3); + int64_t b8 = 2097151 & load_3(b + 21); + int64_t b9 = 2097151 & (load_4(b + 23) >> 5); + int64_t b10 = 2097151 & (load_3(b + 26) >> 2); + int64_t b11 = (load_4(b + 28) >> 7); + int64_t c0 = 2097151 & load_3(c); + int64_t c1 = 2097151 & (load_4(c + 2) >> 5); + int64_t c2 = 2097151 & (load_3(c + 5) >> 2); + int64_t c3 = 2097151 & (load_4(c + 7) >> 7); + int64_t c4 = 2097151 & (load_4(c + 10) >> 4); + int64_t c5 = 2097151 & (load_3(c + 13) >> 1); + int64_t c6 = 2097151 & (load_4(c + 15) >> 6); + int64_t c7 = 2097151 & (load_3(c + 18) >> 3); + int64_t c8 = 2097151 & load_3(c + 21); + int64_t c9 = 2097151 & (load_4(c + 23) >> 5); + int64_t c10 = 2097151 & (load_3(c + 26) >> 2); + int64_t c11 = (load_4(c + 28) >> 7); + int64_t s0; + int64_t s1; + int64_t s2; + int64_t s3; + int64_t s4; + int64_t s5; + int64_t s6; + int64_t s7; + int64_t s8; + int64_t s9; + int64_t s10; + int64_t s11; + int64_t s12; + int64_t s13; + int64_t s14; + int64_t s15; + int64_t s16; + int64_t s17; + int64_t s18; + int64_t s19; + int64_t s20; + int64_t s21; + int64_t s22; + int64_t s23; + int64_t carry0; + int64_t carry1; + int64_t carry2; + int64_t carry3; + int64_t carry4; + int64_t carry5; + int64_t carry6; + int64_t carry7; + int64_t carry8; + int64_t carry9; + int64_t carry10; + int64_t carry11; + int64_t carry12; + int64_t carry13; + int64_t carry14; + int64_t carry15; + int64_t carry16; + int64_t carry17; + int64_t carry18; + int64_t carry19; + int64_t carry20; + int64_t carry21; + int64_t carry22; + + s0 = c0 + a0 * b0; + s1 = c1 + a0 * b1 + a1 * b0; + s2 = c2 + a0 * b2 + a1 * b1 + a2 * b0; + s3 = c3 + a0 * b3 + a1 * b2 + a2 * b1 + a3 * b0; + s4 = c4 + a0 * b4 + a1 * b3 + a2 * b2 + a3 * b1 + a4 * b0; + s5 = c5 + a0 * b5 + a1 * b4 + a2 * b3 + a3 * b2 + a4 * b1 + a5 * b0; + s6 = c6 + a0 * b6 + a1 * b5 + a2 * b4 + a3 * b3 + a4 * b2 + a5 * b1 + a6 * b0; + s7 = c7 + a0 * b7 + a1 * b6 + a2 * b5 + a3 * b4 + a4 * b3 + a5 * b2 + + a6 * b1 + a7 * b0; + s8 = c8 + a0 * b8 + a1 * b7 + a2 * b6 + a3 * b5 + a4 * b4 + a5 * b3 + + a6 * b2 + a7 * b1 + a8 * b0; + s9 = c9 + a0 * b9 + a1 * b8 + a2 * b7 + a3 * b6 + a4 * b5 + a5 * b4 + + a6 * b3 + a7 * b2 + a8 * b1 + a9 * b0; + s10 = c10 + a0 * b10 + a1 * b9 + a2 * b8 + a3 * b7 + a4 * b6 + a5 * b5 + + a6 * b4 + a7 * b3 + a8 * b2 + a9 * b1 + a10 * b0; + s11 = c11 + a0 * b11 + a1 * b10 + a2 * b9 + a3 * b8 + a4 * b7 + a5 * b6 + + a6 * b5 + a7 * b4 + a8 * b3 + a9 * b2 + a10 * b1 + a11 * b0; + s12 = a1 * b11 + a2 * b10 + a3 * b9 + a4 * b8 + a5 * b7 + a6 * b6 + a7 * b5 + + a8 * b4 + a9 * b3 + a10 * b2 + a11 * b1; + s13 = a2 * b11 + a3 * b10 + a4 * b9 + a5 * b8 + a6 * b7 + a7 * b6 + a8 * b5 + + a9 * b4 + a10 * b3 + a11 * b2; + s14 = a3 * b11 + a4 * b10 + a5 * b9 + a6 * b8 + a7 * b7 + a8 * b6 + a9 * b5 + + a10 * b4 + a11 * b3; + s15 = a4 * b11 + a5 * b10 + a6 * b9 + a7 * b8 + a8 * b7 + a9 * b6 + a10 * b5 + + a11 * b4; + s16 = a5 * b11 + a6 * b10 + a7 * b9 + a8 * b8 + a9 * b7 + a10 * b6 + a11 * b5; + s17 = a6 * b11 + a7 * b10 + a8 * b9 + a9 * b8 + a10 * b7 + a11 * b6; + s18 = a7 * b11 + a8 * b10 + a9 * b9 + a10 * b8 + a11 * b7; + s19 = a8 * b11 + a9 * b10 + a10 * b9 + a11 * b8; + s20 = a9 * b11 + a10 * b10 + a11 * b9; + s21 = a10 * b11 + a11 * b10; + s22 = a11 * b11; + s23 = 0; + + carry0 = (s0 + (1 << 20)) >> 21; + s1 += carry0; + s0 -= carry0 << 21; + carry2 = (s2 + (1 << 20)) >> 21; + s3 += carry2; + s2 -= carry2 << 21; + carry4 = (s4 + (1 << 20)) >> 21; + s5 += carry4; + s4 -= carry4 << 21; + carry6 = (s6 + (1 << 20)) >> 21; + s7 += carry6; + s6 -= carry6 << 21; + carry8 = (s8 + (1 << 20)) >> 21; + s9 += carry8; + s8 -= carry8 << 21; + carry10 = (s10 + (1 << 20)) >> 21; + s11 += carry10; + s10 -= carry10 << 21; + carry12 = (s12 + (1 << 20)) >> 21; + s13 += carry12; + s12 -= carry12 << 21; + carry14 = (s14 + (1 << 20)) >> 21; + s15 += carry14; + s14 -= carry14 << 21; + carry16 = (s16 + (1 << 20)) >> 21; + s17 += carry16; + s16 -= carry16 << 21; + carry18 = (s18 + (1 << 20)) >> 21; + s19 += carry18; + s18 -= carry18 << 21; + carry20 = (s20 + (1 << 20)) >> 21; + s21 += carry20; + s20 -= carry20 << 21; + carry22 = (s22 + (1 << 20)) >> 21; + s23 += carry22; + s22 -= carry22 << 21; + + carry1 = (s1 + (1 << 20)) >> 21; + s2 += carry1; + s1 -= carry1 << 21; + carry3 = (s3 + (1 << 20)) >> 21; + s4 += carry3; + s3 -= carry3 << 21; + carry5 = (s5 + (1 << 20)) >> 21; + s6 += carry5; + s5 -= carry5 << 21; + carry7 = (s7 + (1 << 20)) >> 21; + s8 += carry7; + s7 -= carry7 << 21; + carry9 = (s9 + (1 << 20)) >> 21; + s10 += carry9; + s9 -= carry9 << 21; + carry11 = (s11 + (1 << 20)) >> 21; + s12 += carry11; + s11 -= carry11 << 21; + carry13 = (s13 + (1 << 20)) >> 21; + s14 += carry13; + s13 -= carry13 << 21; + carry15 = (s15 + (1 << 20)) >> 21; + s16 += carry15; + s15 -= carry15 << 21; + carry17 = (s17 + (1 << 20)) >> 21; + s18 += carry17; + s17 -= carry17 << 21; + carry19 = (s19 + (1 << 20)) >> 21; + s20 += carry19; + s19 -= carry19 << 21; + carry21 = (s21 + (1 << 20)) >> 21; + s22 += carry21; + s21 -= carry21 << 21; + + s11 += s23 * 666643; + s12 += s23 * 470296; + s13 += s23 * 654183; + s14 -= s23 * 997805; + s15 += s23 * 136657; + s16 -= s23 * 683901; + s23 = 0; + + s10 += s22 * 666643; + s11 += s22 * 470296; + s12 += s22 * 654183; + s13 -= s22 * 997805; + s14 += s22 * 136657; + s15 -= s22 * 683901; + s22 = 0; + + s9 += s21 * 666643; + s10 += s21 * 470296; + s11 += s21 * 654183; + s12 -= s21 * 997805; + s13 += s21 * 136657; + s14 -= s21 * 683901; + s21 = 0; + + s8 += s20 * 666643; + s9 += s20 * 470296; + s10 += s20 * 654183; + s11 -= s20 * 997805; + s12 += s20 * 136657; + s13 -= s20 * 683901; + s20 = 0; + + s7 += s19 * 666643; + s8 += s19 * 470296; + s9 += s19 * 654183; + s10 -= s19 * 997805; + s11 += s19 * 136657; + s12 -= s19 * 683901; + s19 = 0; + + s6 += s18 * 666643; + s7 += s18 * 470296; + s8 += s18 * 654183; + s9 -= s18 * 997805; + s10 += s18 * 136657; + s11 -= s18 * 683901; + s18 = 0; + + carry6 = (s6 + (1 << 20)) >> 21; + s7 += carry6; + s6 -= carry6 << 21; + carry8 = (s8 + (1 << 20)) >> 21; + s9 += carry8; + s8 -= carry8 << 21; + carry10 = (s10 + (1 << 20)) >> 21; + s11 += carry10; + s10 -= carry10 << 21; + carry12 = (s12 + (1 << 20)) >> 21; + s13 += carry12; + s12 -= carry12 << 21; + carry14 = (s14 + (1 << 20)) >> 21; + s15 += carry14; + s14 -= carry14 << 21; + carry16 = (s16 + (1 << 20)) >> 21; + s17 += carry16; + s16 -= carry16 << 21; + + carry7 = (s7 + (1 << 20)) >> 21; + s8 += carry7; + s7 -= carry7 << 21; + carry9 = (s9 + (1 << 20)) >> 21; + s10 += carry9; + s9 -= carry9 << 21; + carry11 = (s11 + (1 << 20)) >> 21; + s12 += carry11; + s11 -= carry11 << 21; + carry13 = (s13 + (1 << 20)) >> 21; + s14 += carry13; + s13 -= carry13 << 21; + carry15 = (s15 + (1 << 20)) >> 21; + s16 += carry15; + s15 -= carry15 << 21; + + s5 += s17 * 666643; + s6 += s17 * 470296; + s7 += s17 * 654183; + s8 -= s17 * 997805; + s9 += s17 * 136657; + s10 -= s17 * 683901; + s17 = 0; + + s4 += s16 * 666643; + s5 += s16 * 470296; + s6 += s16 * 654183; + s7 -= s16 * 997805; + s8 += s16 * 136657; + s9 -= s16 * 683901; + s16 = 0; + + s3 += s15 * 666643; + s4 += s15 * 470296; + s5 += s15 * 654183; + s6 -= s15 * 997805; + s7 += s15 * 136657; + s8 -= s15 * 683901; + s15 = 0; + + s2 += s14 * 666643; + s3 += s14 * 470296; + s4 += s14 * 654183; + s5 -= s14 * 997805; + s6 += s14 * 136657; + s7 -= s14 * 683901; + s14 = 0; + + s1 += s13 * 666643; + s2 += s13 * 470296; + s3 += s13 * 654183; + s4 -= s13 * 997805; + s5 += s13 * 136657; + s6 -= s13 * 683901; + s13 = 0; + + s0 += s12 * 666643; + s1 += s12 * 470296; + s2 += s12 * 654183; + s3 -= s12 * 997805; + s4 += s12 * 136657; + s5 -= s12 * 683901; + s12 = 0; + + carry0 = (s0 + (1 << 20)) >> 21; + s1 += carry0; + s0 -= carry0 << 21; + carry2 = (s2 + (1 << 20)) >> 21; + s3 += carry2; + s2 -= carry2 << 21; + carry4 = (s4 + (1 << 20)) >> 21; + s5 += carry4; + s4 -= carry4 << 21; + carry6 = (s6 + (1 << 20)) >> 21; + s7 += carry6; + s6 -= carry6 << 21; + carry8 = (s8 + (1 << 20)) >> 21; + s9 += carry8; + s8 -= carry8 << 21; + carry10 = (s10 + (1 << 20)) >> 21; + s11 += carry10; + s10 -= carry10 << 21; + + carry1 = (s1 + (1 << 20)) >> 21; + s2 += carry1; + s1 -= carry1 << 21; + carry3 = (s3 + (1 << 20)) >> 21; + s4 += carry3; + s3 -= carry3 << 21; + carry5 = (s5 + (1 << 20)) >> 21; + s6 += carry5; + s5 -= carry5 << 21; + carry7 = (s7 + (1 << 20)) >> 21; + s8 += carry7; + s7 -= carry7 << 21; + carry9 = (s9 + (1 << 20)) >> 21; + s10 += carry9; + s9 -= carry9 << 21; + carry11 = (s11 + (1 << 20)) >> 21; + s12 += carry11; + s11 -= carry11 << 21; + + s0 += s12 * 666643; + s1 += s12 * 470296; + s2 += s12 * 654183; + s3 -= s12 * 997805; + s4 += s12 * 136657; + s5 -= s12 * 683901; + s12 = 0; + + carry0 = s0 >> 21; + s1 += carry0; + s0 -= carry0 << 21; + carry1 = s1 >> 21; + s2 += carry1; + s1 -= carry1 << 21; + carry2 = s2 >> 21; + s3 += carry2; + s2 -= carry2 << 21; + carry3 = s3 >> 21; + s4 += carry3; + s3 -= carry3 << 21; + carry4 = s4 >> 21; + s5 += carry4; + s4 -= carry4 << 21; + carry5 = s5 >> 21; + s6 += carry5; + s5 -= carry5 << 21; + carry6 = s6 >> 21; + s7 += carry6; + s6 -= carry6 << 21; + carry7 = s7 >> 21; + s8 += carry7; + s7 -= carry7 << 21; + carry8 = s8 >> 21; + s9 += carry8; + s8 -= carry8 << 21; + carry9 = s9 >> 21; + s10 += carry9; + s9 -= carry9 << 21; + carry10 = s10 >> 21; + s11 += carry10; + s10 -= carry10 << 21; + carry11 = s11 >> 21; + s12 += carry11; + s11 -= carry11 << 21; + + s0 += s12 * 666643; + s1 += s12 * 470296; + s2 += s12 * 654183; + s3 -= s12 * 997805; + s4 += s12 * 136657; + s5 -= s12 * 683901; + s12 = 0; + + carry0 = s0 >> 21; + s1 += carry0; + s0 -= carry0 << 21; + carry1 = s1 >> 21; + s2 += carry1; + s1 -= carry1 << 21; + carry2 = s2 >> 21; + s3 += carry2; + s2 -= carry2 << 21; + carry3 = s3 >> 21; + s4 += carry3; + s3 -= carry3 << 21; + carry4 = s4 >> 21; + s5 += carry4; + s4 -= carry4 << 21; + carry5 = s5 >> 21; + s6 += carry5; + s5 -= carry5 << 21; + carry6 = s6 >> 21; + s7 += carry6; + s6 -= carry6 << 21; + carry7 = s7 >> 21; + s8 += carry7; + s7 -= carry7 << 21; + carry8 = s8 >> 21; + s9 += carry8; + s8 -= carry8 << 21; + carry9 = s9 >> 21; + s10 += carry9; + s9 -= carry9 << 21; + carry10 = s10 >> 21; + s11 += carry10; + s10 -= carry10 << 21; + + s[0] = s0 >> 0; + s[1] = s0 >> 8; + s[2] = (s0 >> 16) | (s1 << 5); + s[3] = s1 >> 3; + s[4] = s1 >> 11; + s[5] = (s1 >> 19) | (s2 << 2); + s[6] = s2 >> 6; + s[7] = (s2 >> 14) | (s3 << 7); + s[8] = s3 >> 1; + s[9] = s3 >> 9; + s[10] = (s3 >> 17) | (s4 << 4); + s[11] = s4 >> 4; + s[12] = s4 >> 12; + s[13] = (s4 >> 20) | (s5 << 1); + s[14] = s5 >> 7; + s[15] = (s5 >> 15) | (s6 << 6); + s[16] = s6 >> 2; + s[17] = s6 >> 10; + s[18] = (s6 >> 18) | (s7 << 3); + s[19] = s7 >> 5; + s[20] = s7 >> 13; + s[21] = s8 >> 0; + s[22] = s8 >> 8; + s[23] = (s8 >> 16) | (s9 << 5); + s[24] = s9 >> 3; + s[25] = s9 >> 11; + s[26] = (s9 >> 19) | (s10 << 2); + s[27] = s10 >> 6; + s[28] = (s10 >> 14) | (s11 << 7); + s[29] = s11 >> 1; + s[30] = s11 >> 9; + s[31] = s11 >> 17; +} +#endif + +#ifdef ED25519 +int ED25519_keypair(uint8_t out_public_key[32], uint8_t out_private_key[64], + int (*random_buf)(void *, size_t)) { + uint8_t seed[32]; + int rv = random_buf(seed, 32); + if (rv) return 0; + + uint8_t az[SHA512_DIGEST_LENGTH]; + SHA512(seed, 32, az); + + az[0] &= 248; + az[31] &= 63; + az[31] |= 64; + + ge_p3 A; + x25519_ge_scalarmult_base(&A, az); + ge_p3_tobytes(out_public_key, &A); + + memcpy(out_private_key, seed, 32); + memmove(out_private_key + 32, out_public_key, 32); + return 1; +} + +int ED25519_sign(uint8_t *out_sig, const uint8_t *message, size_t message_len, + const uint8_t private_key[64]) { + uint8_t az[SHA512_DIGEST_LENGTH]; + SHA512(private_key, 32, az); + + az[0] &= 248; + az[31] &= 63; + az[31] |= 64; + + SHA512_CTX hash_ctx; + SHA512_Init(&hash_ctx); + SHA512_Update(&hash_ctx, az + 32, 32); + SHA512_Update(&hash_ctx, message, message_len); + uint8_t nonce[SHA512_DIGEST_LENGTH]; + SHA512_Final(nonce, &hash_ctx); + + x25519_sc_reduce(nonce); + ge_p3 R; + x25519_ge_scalarmult_base(&R, nonce); + ge_p3_tobytes(out_sig, &R); + + SHA512_Init(&hash_ctx); + SHA512_Update(&hash_ctx, out_sig, 32); + SHA512_Update(&hash_ctx, private_key + 32, 32); + SHA512_Update(&hash_ctx, message, message_len); + uint8_t hram[SHA512_DIGEST_LENGTH]; + SHA512_Final(hram, &hash_ctx); + + x25519_sc_reduce(hram); + sc_muladd(out_sig + 32, hram, az, nonce); + + return 1; +} + +int ED25519_verify(const uint8_t *message, size_t message_len, + const uint8_t signature[64], const uint8_t public_key[32]) { + ge_p3 A; + if ((signature[63] & 224) != 0 || + x25519_ge_frombytes_vartime(&A, public_key) != 0) { + return 0; + } + + fe_neg(A.X, A.X); + fe_neg(A.T, A.T); + + uint8_t pkcopy[32]; + memcpy(pkcopy, public_key, 32); + uint8_t rcopy[32]; + memcpy(rcopy, signature, 32); + uint8_t scopy[32]; + memcpy(scopy, signature + 32, 32); + + SHA512_CTX hash_ctx; + SHA512_Init(&hash_ctx); + SHA512_Update(&hash_ctx, signature, 32); + SHA512_Update(&hash_ctx, public_key, 32); + SHA512_Update(&hash_ctx, message, message_len); + uint8_t h[SHA512_DIGEST_LENGTH]; + SHA512_Final(h, &hash_ctx); + + x25519_sc_reduce(h); + + ge_p2 R; + ge_double_scalarmult_vartime(&R, h, &A, scopy); + + uint8_t rcheck[32]; + x25519_ge_tobytes(rcheck, &R); + + return timingsafe_memcmp(rcheck, rcopy, sizeof(rcheck)) == 0; +} +#endif + +/* Replace (f,g) with (g,f) if b == 1; + * replace (f,g) with (f,g) if b == 0. + * + * Preconditions: b in {0,1}. */ +static void fe_cswap(fe f, fe g, unsigned int b) { + b = 0-b; + unsigned i; + for (i = 0; i < 10; i++) { + int32_t x = f[i] ^ g[i]; + x &= b; + f[i] ^= x; + g[i] ^= x; + } +} + +/* h = f * 121666 + * Can overlap h with f. + * + * Preconditions: + * |f| bounded by 1.1*2^26,1.1*2^25,1.1*2^26,1.1*2^25,etc. + * + * Postconditions: + * |h| bounded by 1.1*2^25,1.1*2^24,1.1*2^25,1.1*2^24,etc. */ +static void fe_mul121666(fe h, fe f) { + int32_t f0 = f[0]; + int32_t f1 = f[1]; + int32_t f2 = f[2]; + int32_t f3 = f[3]; + int32_t f4 = f[4]; + int32_t f5 = f[5]; + int32_t f6 = f[6]; + int32_t f7 = f[7]; + int32_t f8 = f[8]; + int32_t f9 = f[9]; + int64_t h0 = f0 * (int64_t) 121666; + int64_t h1 = f1 * (int64_t) 121666; + int64_t h2 = f2 * (int64_t) 121666; + int64_t h3 = f3 * (int64_t) 121666; + int64_t h4 = f4 * (int64_t) 121666; + int64_t h5 = f5 * (int64_t) 121666; + int64_t h6 = f6 * (int64_t) 121666; + int64_t h7 = f7 * (int64_t) 121666; + int64_t h8 = f8 * (int64_t) 121666; + int64_t h9 = f9 * (int64_t) 121666; + int64_t carry0; + int64_t carry1; + int64_t carry2; + int64_t carry3; + int64_t carry4; + int64_t carry5; + int64_t carry6; + int64_t carry7; + int64_t carry8; + int64_t carry9; + + carry9 = h9 + (1 << 24); h0 += (carry9 >> 25) * 19; h9 -= carry9 & kTop39Bits; + carry1 = h1 + (1 << 24); h2 += carry1 >> 25; h1 -= carry1 & kTop39Bits; + carry3 = h3 + (1 << 24); h4 += carry3 >> 25; h3 -= carry3 & kTop39Bits; + carry5 = h5 + (1 << 24); h6 += carry5 >> 25; h5 -= carry5 & kTop39Bits; + carry7 = h7 + (1 << 24); h8 += carry7 >> 25; h7 -= carry7 & kTop39Bits; + + carry0 = h0 + (1 << 25); h1 += carry0 >> 26; h0 -= carry0 & kTop38Bits; + carry2 = h2 + (1 << 25); h3 += carry2 >> 26; h2 -= carry2 & kTop38Bits; + carry4 = h4 + (1 << 25); h5 += carry4 >> 26; h4 -= carry4 & kTop38Bits; + carry6 = h6 + (1 << 25); h7 += carry6 >> 26; h6 -= carry6 & kTop38Bits; + carry8 = h8 + (1 << 25); h9 += carry8 >> 26; h8 -= carry8 & kTop38Bits; + + h[0] = h0; + h[1] = h1; + h[2] = h2; + h[3] = h3; + h[4] = h4; + h[5] = h5; + h[6] = h6; + h[7] = h7; + h[8] = h8; + h[9] = h9; +} + +void +x25519_scalar_mult_generic(uint8_t out[32], const uint8_t scalar[32], + const uint8_t point[32]) { + fe x1, x2, z2, x3, z3, tmp0, tmp1; + + uint8_t e[32]; + memcpy(e, scalar, 32); + e[0] &= 248; + e[31] &= 127; + e[31] |= 64; + fe_frombytes(x1, point); + fe_1(x2); + fe_0(z2); + fe_copy(x3, x1); + fe_1(z3); + + unsigned swap = 0; + int pos; + for (pos = 254; pos >= 0; --pos) { + unsigned b = 1 & (e[pos / 8] >> (pos & 7)); + swap ^= b; + fe_cswap(x2, x3, swap); + fe_cswap(z2, z3, swap); + swap = b; + fe_sub(tmp0, x3, z3); + fe_sub(tmp1, x2, z2); + fe_add(x2, x2, z2); + fe_add(z2, x3, z3); + fe_mul(z3, tmp0, x2); + fe_mul(z2, z2, tmp1); + fe_sq(tmp0, tmp1); + fe_sq(tmp1, x2); + fe_add(x3, z3, z2); + fe_sub(z2, z3, z2); + fe_mul(x2, tmp1, tmp0); + fe_sub(tmp1, tmp1, tmp0); + fe_sq(z2, z2); + fe_mul121666(z3, tmp1); + fe_sq(x3, x3); + fe_add(tmp0, tmp0, z3); + fe_mul(z3, x1, z2); + fe_mul(z2, tmp1, tmp0); + } + fe_cswap(x2, x3, swap); + fe_cswap(z2, z3, swap); + + fe_invert(z2, z2); + fe_mul(x2, x2, z2); + fe_tobytes(out, x2); +} + +#ifdef unused +void +x25519_public_from_private_generic(uint8_t out_public_value[32], + const uint8_t private_key[32]) +{ + uint8_t e[32]; + + memcpy(e, private_key, 32); + e[0] &= 248; + e[31] &= 127; + e[31] |= 64; + + ge_p3 A; + x25519_ge_scalarmult_base(&A, e); + + /* We only need the u-coordinate of the curve25519 point. The map is + * u=(y+1)/(1-y). Since y=Y/Z, this gives u=(Z+Y)/(Z-Y). */ + fe zplusy, zminusy, zminusy_inv; + fe_add(zplusy, A.Z, A.Y); + fe_sub(zminusy, A.Z, A.Y); + fe_invert(zminusy_inv, zminusy); + fe_mul(zplusy, zplusy, zminusy_inv); + fe_tobytes(out_public_value, zplusy); +} +#endif + +void +x25519_public_from_private(uint8_t out_public_value[32], + const uint8_t private_key[32]) +{ + static const uint8_t kMongomeryBasePoint[32] = {9}; + + x25519_scalar_mult(out_public_value, private_key, kMongomeryBasePoint); +} + +int +X25519_keypair(uint8_t out_public_value[X25519_KEY_LENGTH], + uint8_t out_private_key[X25519_KEY_LENGTH], int (*random_buf)(void *, size_t)) +{ + /* All X25519 implementations should decode scalars correctly (see + * https://tools.ietf.org/html/rfc7748#section-5). However, if an + * implementation doesn't then it might interoperate with random keys a + * fraction of the time because they'll, randomly, happen to be correctly + * formed. + * + * Thus we do the opposite of the masking here to make sure that our private + * keys are never correctly masked and so, hopefully, any incorrect + * implementations are deterministically broken. + * + * This does not affect security because, although we're throwing away + * entropy, a valid implementation of scalarmult should throw away the exact + * same bits anyway. */ + int rv = random_buf(out_private_key, 32); + if (rv) return 0; + + out_private_key[0] |= 7; + out_private_key[31] &= 63; + out_private_key[31] |= 128; + + x25519_public_from_private(out_public_value, out_private_key); + return 1; +} + +int +X25519(uint8_t out_shared_key[X25519_KEY_LENGTH], + const uint8_t private_key[X25519_KEY_LENGTH], + const uint8_t peer_public_value[X25519_KEY_LENGTH]) +{ + static const uint8_t kZeros[32] = {0}; + + x25519_scalar_mult(out_shared_key, private_key, peer_public_value); + + /* The all-zero output results when the input is a point of small order. */ + return timingsafe_memcmp(kZeros, out_shared_key, 32) != 0; +} diff --git a/code/crypto/curve25519/curve25519_internal.h b/code/crypto/curve25519/curve25519_internal.h new file mode 100644 index 0000000..f80424a --- /dev/null +++ b/code/crypto/curve25519/curve25519_internal.h @@ -0,0 +1,98 @@ +/* + * Copyright (c) 2015, Google Inc. + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY + * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION + * OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN + * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +#ifndef HEADER_CURVE25519_INTERNAL_H +#define HEADER_CURVE25519_INTERNAL_H + +#include <stdint.h> + +__BEGIN_HIDDEN_DECLS + +/* fe means field element. Here the field is \Z/(2^255-19). An element t, + * entries t[0]...t[9], represents the integer t[0]+2^26 t[1]+2^51 t[2]+2^77 + * t[3]+2^102 t[4]+...+2^230 t[9]. Bounds on each t[i] vary depending on + * context. */ +typedef int32_t fe[10]; + +/* ge means group element. + + * Here the group is the set of pairs (x,y) of field elements (see fe.h) + * satisfying -x^2 + y^2 = 1 + d x^2y^2 + * where d = -121665/121666. + * + * Representations: + * ge_p2 (projective): (X:Y:Z) satisfying x=X/Z, y=Y/Z + * ge_p3 (extended): (X:Y:Z:T) satisfying x=X/Z, y=Y/Z, XY=ZT + * ge_p1p1 (completed): ((X:Z),(Y:T)) satisfying x=X/Z, y=Y/T + * ge_precomp (Duif): (y+x,y-x,2dxy) */ + +typedef struct { + fe X; + fe Y; + fe Z; +} ge_p2; + +typedef struct { + fe X; + fe Y; + fe Z; + fe T; +} ge_p3; + +typedef struct { + fe X; + fe Y; + fe Z; + fe T; +} ge_p1p1; + +typedef struct { + fe yplusx; + fe yminusx; + fe xy2d; +} ge_precomp; + +typedef struct { + fe YplusX; + fe YminusX; + fe Z; + fe T2d; +} ge_cached; + +void x25519_ge_tobytes(uint8_t *s, const ge_p2 *h); +int x25519_ge_frombytes_vartime(ge_p3 *h, const uint8_t *s); +void x25519_ge_p3_to_cached(ge_cached *r, const ge_p3 *p); +void x25519_ge_p1p1_to_p2(ge_p2 *r, const ge_p1p1 *p); +void x25519_ge_p1p1_to_p3(ge_p3 *r, const ge_p1p1 *p); +void x25519_ge_add(ge_p1p1 *r, const ge_p3 *p, const ge_cached *q); +void x25519_ge_sub(ge_p1p1 *r, const ge_p3 *p, const ge_cached *q); +void x25519_ge_scalarmult_small_precomp(ge_p3 *h, const uint8_t a[32], + const uint8_t precomp_table[15 * 2 * 32]); +void x25519_ge_scalarmult_base(ge_p3 *h, const uint8_t a[32]); +void x25519_ge_scalarmult(ge_p2 *r, const uint8_t *scalar, const ge_p3 *A); +void x25519_sc_reduce(uint8_t *s); + +void x25519_public_from_private(uint8_t out_public_value[32], + const uint8_t private_key[32]); + +void x25519_scalar_mult(uint8_t out[32], const uint8_t scalar[32], + const uint8_t point[32]); +void x25519_scalar_mult_generic(uint8_t out[32], const uint8_t scalar[32], + const uint8_t point[32]); + +__END_HIDDEN_DECLS + +#endif /* HEADER_CURVE25519_INTERNAL_H */ diff --git a/code/crypto/e_chacha20poly1305.c b/code/crypto/e_chacha20poly1305.c new file mode 100644 index 0000000..17fcab8 --- /dev/null +++ b/code/crypto/e_chacha20poly1305.c @@ -0,0 +1,229 @@ +/* $OpenBSD: e_chacha20poly1305.c,v 1.15 2017/01/29 17:49:23 beck Exp $ */ + +/* + * Copyright (c) 2015 Reyk Floter <reyk@openbsd.org> + * Copyright (c) 2014, Google Inc. + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY + * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION + * OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN + * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +#include <stdint.h> +#include <string.h> + +#if !defined(OPENSSL_NO_CHACHA) && !defined(OPENSSL_NO_POLY1305) + +#include <compat.h> +#include <chacha.h> +#include <poly1305.h> + +#define POLY1305_TAG_LEN 16 +#define CHACHA20_NONCE_LEN_OLD 8 + +/* + * The informational RFC 7539, "ChaCha20 and Poly1305 for IETF Protocols", + * introduced a modified AEAD construction that is incompatible with the + * common style that has been already used in TLS. The IETF version also + * adds a constant (salt) that is prepended to the nonce. + */ +#define CHACHA20_CONSTANT_LEN 4 +#define CHACHA20_IV_LEN 8 +#define CHACHA20_NONCE_LEN (CHACHA20_CONSTANT_LEN + CHACHA20_IV_LEN) + + +static void +poly1305_update_with_length(poly1305_state *poly1305, + const unsigned char *data, size_t data_len) +{ + size_t j = data_len; + unsigned char length_bytes[8]; + unsigned i; + + for (i = 0; i < sizeof(length_bytes); i++) { + length_bytes[i] = j; + j >>= 8; + } + + if (data != NULL) + CRYPTO_poly1305_update(poly1305, data, data_len); + CRYPTO_poly1305_update(poly1305, length_bytes, sizeof(length_bytes)); +} + +static void +poly1305_update_with_pad16(poly1305_state *poly1305, + const unsigned char *data, size_t data_len) +{ + static const unsigned char zero_pad16[16]; + size_t pad_len; + + CRYPTO_poly1305_update(poly1305, data, data_len); + + /* pad16() is defined in RFC 7539 2.8.1. */ + if ((pad_len = data_len % 16) == 0) + return; + + CRYPTO_poly1305_update(poly1305, zero_pad16, 16 - pad_len); +} + +int +aead_chacha20_poly1305_seal(unsigned char *out, size_t *out_len, + size_t max_out_len, unsigned char key[32], unsigned char tag_len, + const unsigned char *nonce, size_t nonce_len, + const unsigned char *in, size_t in_len, + const unsigned char *ad, size_t ad_len) +{ + unsigned char poly1305_key[32]; + poly1305_state poly1305; + const unsigned char *iv; + const uint64_t in_len_64 = in_len; + uint64_t ctr; + + /* The underlying ChaCha implementation may not overflow the block + * counter into the second counter word. Therefore we disallow + * individual operations that work on more than 2TB at a time. + * in_len_64 is needed because, on 32-bit platforms, size_t is only + * 32-bits and this produces a warning because it's always false. + * Casting to uint64_t inside the conditional is not sufficient to stop + * the warning. */ + if (in_len_64 >= (1ULL << 32) * 64 - 64) { + // EVPerror(EVP_R_TOO_LARGE); + return 0; + } + + if (max_out_len < in_len + tag_len) { + // EVPerror(EVP_R_BUFFER_TOO_SMALL); + return 0; + } + + if (nonce_len == CHACHA20_NONCE_LEN_OLD) { + /* Google's draft-agl-tls-chacha20poly1305-04, Nov 2013 */ + + memset(poly1305_key, 0, sizeof(poly1305_key)); + CRYPTO_chacha_20(poly1305_key, poly1305_key, + sizeof(poly1305_key), key, nonce, 0); + + CRYPTO_poly1305_init(&poly1305, poly1305_key); + poly1305_update_with_length(&poly1305, ad, ad_len); + CRYPTO_chacha_20(out, in, in_len, key, nonce, 1); + poly1305_update_with_length(&poly1305, out, in_len); + } else if (nonce_len == CHACHA20_NONCE_LEN) { + /* RFC 7539, May 2015 */ + + ctr = (uint64_t)(nonce[0] | nonce[1] << 8 | + nonce[2] << 16 | nonce[3] << 24) << 32; + iv = nonce + CHACHA20_CONSTANT_LEN; + + memset(poly1305_key, 0, sizeof(poly1305_key)); + CRYPTO_chacha_20(poly1305_key, poly1305_key, + sizeof(poly1305_key), key, iv, ctr); + + CRYPTO_poly1305_init(&poly1305, poly1305_key); + poly1305_update_with_pad16(&poly1305, ad, ad_len); + CRYPTO_chacha_20(out, in, in_len, key, iv, ctr + 1); + poly1305_update_with_pad16(&poly1305, out, in_len); + poly1305_update_with_length(&poly1305, NULL, ad_len); + poly1305_update_with_length(&poly1305, NULL, in_len); + } + + if (tag_len != POLY1305_TAG_LEN) { + unsigned char tag[POLY1305_TAG_LEN]; + CRYPTO_poly1305_finish(&poly1305, tag); + memcpy(out + in_len, tag, tag_len); + *out_len = in_len + tag_len; + return 1; + } + + CRYPTO_poly1305_finish(&poly1305, out + in_len); + *out_len = in_len + POLY1305_TAG_LEN; + return 1; +} + +int +aead_chacha20_poly1305_open(unsigned char *out, size_t *out_len, + size_t max_out_len, unsigned char key[32], unsigned char tag_len, + const unsigned char *nonce, size_t nonce_len, + const unsigned char *in, size_t in_len, + const unsigned char *ad, size_t ad_len) +{ + unsigned char mac[POLY1305_TAG_LEN]; + unsigned char poly1305_key[32]; + const unsigned char *iv = nonce; + poly1305_state poly1305; + const uint64_t in_len_64 = in_len; + size_t plaintext_len; + uint64_t ctr = 0; + + if (in_len < tag_len) { + // EVPerror(EVP_R_BAD_DECRYPT); + return 0; + } + + /* The underlying ChaCha implementation may not overflow the block + * counter into the second counter word. Therefore we disallow + * individual operations that work on more than 2TB at a time. + * in_len_64 is needed because, on 32-bit platforms, size_t is only + * 32-bits and this produces a warning because it's always false. + * Casting to uint64_t inside the conditional is not sufficient to stop + * the warning. */ + if (in_len_64 >= (1ULL << 32) * 64 - 64) { + // EVPerror(EVP_R_TOO_LARGE); + return 0; + } + + plaintext_len = in_len - tag_len; + + if (max_out_len < plaintext_len) { + // EVPerror(EVP_R_BUFFER_TOO_SMALL); + return 0; + } + + if (nonce_len == CHACHA20_NONCE_LEN_OLD) { + /* Google's draft-agl-tls-chacha20poly1305-04, Nov 2013 */ + + memset(poly1305_key, 0, sizeof(poly1305_key)); + CRYPTO_chacha_20(poly1305_key, poly1305_key, + sizeof(poly1305_key), key, nonce, 0); + + CRYPTO_poly1305_init(&poly1305, poly1305_key); + poly1305_update_with_length(&poly1305, ad, ad_len); + poly1305_update_with_length(&poly1305, in, plaintext_len); + } else if (nonce_len == CHACHA20_NONCE_LEN) { + /* RFC 7539, May 2015 */ + + ctr = (uint64_t)(nonce[0] | nonce[1] << 8 | + nonce[2] << 16 | nonce[3] << 24) << 32; + iv = nonce + CHACHA20_CONSTANT_LEN; + + memset(poly1305_key, 0, sizeof(poly1305_key)); + CRYPTO_chacha_20(poly1305_key, poly1305_key, + sizeof(poly1305_key), key, iv, ctr); + + CRYPTO_poly1305_init(&poly1305, poly1305_key); + poly1305_update_with_pad16(&poly1305, ad, ad_len); + poly1305_update_with_pad16(&poly1305, in, plaintext_len); + poly1305_update_with_length(&poly1305, NULL, ad_len); + poly1305_update_with_length(&poly1305, NULL, plaintext_len); + } + + CRYPTO_poly1305_finish(&poly1305, mac); + + if (timingsafe_memcmp(mac, in + plaintext_len, tag_len) != 0) { + // EVPerror(EVP_R_BAD_DECRYPT); + return 0; + } + + CRYPTO_chacha_20(out, in, plaintext_len, key, iv, ctr + 1); + *out_len = plaintext_len; + return 1; +} + +#endif /* !OPENSSL_NO_CHACHA && !OPENSSL_NO_POLY1305 */ diff --git a/code/crypto/include/chacha.h b/code/crypto/include/chacha.h new file mode 100644 index 0000000..11885b7 --- /dev/null +++ b/code/crypto/include/chacha.h @@ -0,0 +1,48 @@ +/* $OpenBSD: chacha.h,v 1.7 2015/12/09 14:07:55 bcook Exp $ */ +/* + * Copyright (c) 2014 Joel Sing <jsing@openbsd.org> + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +#ifndef HEADER_CHACHA_H +#define HEADER_CHACHA_H + +#include <stddef.h> +#include <stdint.h> + +#ifdef __cplusplus +extern "C" { +#endif + +typedef struct { + unsigned int input[16]; + unsigned char ks[64]; + unsigned char unused; +} ChaCha_ctx; + +void ChaCha_set_key(ChaCha_ctx *ctx, const unsigned char *key, + unsigned int keybits); +void ChaCha_set_iv(ChaCha_ctx *ctx, const unsigned char *iv, + const unsigned char *counter); +void ChaCha(ChaCha_ctx *ctx, unsigned char *out, const unsigned char *in, + size_t len); + +void CRYPTO_chacha_20(unsigned char *out, const unsigned char *in, size_t len, + const unsigned char key[32], const unsigned char iv[8], uint64_t counter); + +#ifdef __cplusplus +} +#endif + +#endif /* HEADER_CHACHA_H */ diff --git a/code/crypto/include/compat.h b/code/crypto/include/compat.h new file mode 100644 index 0000000..12fb3f4 --- /dev/null +++ b/code/crypto/include/compat.h @@ -0,0 +1,24 @@ +#include <sys/types.h> +#include <stdint.h> + +#ifndef HAVE_GETENTROPY +int getentropy(void *buf, size_t buflen); +#endif + +#ifndef HAVE_EXPLICIT_BZERO +void explicit_bzero(void *, size_t); +#endif + +#ifndef HAVE_TIMINGSAFE_BCMP +int timingsafe_bcmp(const void *b1, const void *b2, size_t n); +#endif + +#ifndef HAVE_TIMINGSAFE_MEMCMP +int timingsafe_memcmp(const void *b1, const void *b2, size_t len); +#endif + +#ifndef HAVE_ARC4RANDOM_BUF +uint32_t arc4random(void); +void arc4random_buf(void *_buf, size_t n); +uint32_t arc4random_uniform(uint32_t upper_bound); +#endif diff --git a/code/crypto/include/curve25519.h b/code/crypto/include/curve25519.h new file mode 100644 index 0000000..a7092cb --- /dev/null +++ b/code/crypto/include/curve25519.h @@ -0,0 +1,78 @@ +/* + * Copyright (c) 2015, Google Inc. + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY + * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION + * OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN + * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +#ifndef HEADER_CURVE25519_H +#define HEADER_CURVE25519_H + +#include <stdint.h> +#include <stddef.h> + +#if defined(__cplusplus) +extern "C" { +#endif + +/* + * Curve25519. + * + * Curve25519 is an elliptic curve. See https://tools.ietf.org/html/rfc7748. + */ + +/* + * X25519. + * + * X25519 is the Diffie-Hellman primitive built from curve25519. It is + * sometimes referred to as curve25519, but X25519 is a more precise name. + * See http://cr.yp.to/ecdh.html and https://tools.ietf.org/html/rfc7748. + */ + +#define X25519_KEY_LENGTH 32 + +/* + * X25519_keypair sets |out_public_value| and |out_private_key| to a freshly + * generated, public/private key pair. It returns one on success and zero on + * error. + */ +int X25519_keypair(uint8_t out_public_value[X25519_KEY_LENGTH], + uint8_t out_private_key[X25519_KEY_LENGTH], + int (*random_buf)(void *, size_t)); + +/* + * X25519 writes a shared key to |out_shared_key| that is calculated from the + * given private key and the peer's public value. It returns one on success and + * zero on error. + * + * Don't use the shared key directly, rather use a KDF and also include the two + * public values as inputs. + */ +int X25519(uint8_t out_shared_key[X25519_KEY_LENGTH], + const uint8_t private_key[X25519_KEY_LENGTH], + const uint8_t peers_public_value[X25519_KEY_LENGTH]); + +/* + * ED25519 + */ +int ED25519_keypair(uint8_t out_public_key[32], uint8_t out_private_key[64], + int (*random_buf)(void *, size_t)); +int ED25519_sign(uint8_t *out_sig, const uint8_t *message, size_t message_len, + const uint8_t private_key[64]); +int ED25519_verify(const uint8_t *message, size_t message_len, + const uint8_t signature[64], const uint8_t public_key[32]); + +#if defined(__cplusplus) +} /* extern C */ +#endif + +#endif /* HEADER_CURVE25519_H */ diff --git a/code/crypto/include/machine/endian.h b/code/crypto/include/machine/endian.h new file mode 100644 index 0000000..5ec39af --- /dev/null +++ b/code/crypto/include/machine/endian.h @@ -0,0 +1,40 @@ +/* + * Public domain + * machine/endian.h compatibility shim + */ + +#ifndef LIBCRYPTOCOMPAT_BYTE_ORDER_H_ +#define LIBCRYPTOCOMPAT_BYTE_ORDER_H_ + +#if defined(_WIN32) + +#define LITTLE_ENDIAN 1234 +#define BIG_ENDIAN 4321 +#define PDP_ENDIAN 3412 + +/* + * Use GCC and Visual Studio compiler defines to determine endian. + */ +#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__ +#define BYTE_ORDER LITTLE_ENDIAN +#else +#define BYTE_ORDER BIG_ENDIAN +#endif + +#elif defined(__linux__) +#include <endian.h> + +#elif defined(__sun) || defined(_AIX) || defined(__hpux) +#include <sys/types.h> +#include <arpa/nameser_compat.h> + +#elif defined(__sgi) +#include <standards.h> +#include <sys/endian.h> + +#else +#include_next <machine/endian.h> + +#endif + +#endif diff --git a/code/crypto/include/md32_common.h b/code/crypto/include/md32_common.h new file mode 100644 index 0000000..7a6e893 --- /dev/null +++ b/code/crypto/include/md32_common.h @@ -0,0 +1,344 @@ +/* $OpenBSD: md32_common.h,v 1.22 2016/11/04 13:56:04 miod Exp $ */ +/* ==================================================================== + * Copyright (c) 1999-2007 The OpenSSL Project. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in + * the documentation and/or other materials provided with the + * distribution. + * + * 3. All advertising materials mentioning features or use of this + * software must display the following acknowledgment: + * "This product includes software developed by the OpenSSL Project + * for use in the OpenSSL Toolkit. (http://www.OpenSSL.org/)" + * + * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to + * endorse or promote products derived from this software without + * prior written permission. For written permission, please contact + * licensing@OpenSSL.org. + * + * 5. Products derived from this software may not be called "OpenSSL" + * nor may "OpenSSL" appear in their names without prior written + * permission of the OpenSSL Project. + * + * 6. Redistributions of any form whatsoever must retain the following + * acknowledgment: + * "This product includes software developed by the OpenSSL Project + * for use in the OpenSSL Toolkit (http://www.OpenSSL.org/)" + * + * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY + * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR + * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED + * OF THE POSSIBILITY OF SUCH DAMAGE. + * ==================================================================== + * + */ + +/* + * This is a generic 32 bit "collector" for message digest algorithms. + * Whenever needed it collects input character stream into chunks of + * 32 bit values and invokes a block function that performs actual hash + * calculations. + * + * Porting guide. + * + * Obligatory macros: + * + * DATA_ORDER_IS_BIG_ENDIAN or DATA_ORDER_IS_LITTLE_ENDIAN + * this macro defines byte order of input stream. + * HASH_CBLOCK + * size of a unit chunk HASH_BLOCK operates on. + * HASH_LONG + * has to be at least 32 bit wide. + * HASH_CTX + * context structure that at least contains following + * members: + * typedef struct { + * ... + * HASH_LONG Nl,Nh; + * either { + * HASH_LONG data[HASH_LBLOCK]; + * unsigned char data[HASH_CBLOCK]; + * }; + * unsigned int num; + * ... + * } HASH_CTX; + * data[] vector is expected to be zeroed upon first call to + * HASH_UPDATE. + * HASH_UPDATE + * name of "Update" function, implemented here. + * HASH_TRANSFORM + * name of "Transform" function, implemented here. + * HASH_FINAL + * name of "Final" function, implemented here. + * HASH_BLOCK_DATA_ORDER + * name of "block" function capable of treating *unaligned* input + * message in original (data) byte order, implemented externally. + * HASH_MAKE_STRING + * macro convering context variables to an ASCII hash string. + * + * MD5 example: + * + * #define DATA_ORDER_IS_LITTLE_ENDIAN + * + * #define HASH_LONG MD5_LONG + * #define HASH_CTX MD5_CTX + * #define HASH_CBLOCK MD5_CBLOCK + * #define HASH_UPDATE MD5_Update + * #define HASH_TRANSFORM MD5_Transform + * #define HASH_FINAL MD5_Final + * #define HASH_BLOCK_DATA_ORDER md5_block_data_order + * + * <appro@fy.chalmers.se> + */ + +#include <stdint.h> +#include <string.h> + +#if !defined(DATA_ORDER_IS_BIG_ENDIAN) && !defined(DATA_ORDER_IS_LITTLE_ENDIAN) +#error "DATA_ORDER must be defined!" +#endif + +#ifndef HASH_CBLOCK +#error "HASH_CBLOCK must be defined!" +#endif +#ifndef HASH_LONG +#error "HASH_LONG must be defined!" +#endif +#ifndef HASH_CTX +#error "HASH_CTX must be defined!" +#endif + +#ifndef HASH_UPDATE +#error "HASH_UPDATE must be defined!" +#endif +#ifndef HASH_TRANSFORM +#error "HASH_TRANSFORM must be defined!" +#endif +#if !defined(HASH_FINAL) && !defined(HASH_NO_FINAL) +#error "HASH_FINAL or HASH_NO_FINAL must be defined!" +#endif + +#ifndef HASH_BLOCK_DATA_ORDER +#error "HASH_BLOCK_DATA_ORDER must be defined!" +#endif + +/* + * This common idiom is recognized by the compiler and turned into a + * CPU-specific intrinsic as appropriate. + * e.g. GCC optimizes to roll on amd64 at -O0 + */ +static inline uint32_t ROTATE(uint32_t a, uint32_t n) +{ + return (a<<n)|(a>>(32-n)); +} + +#if defined(DATA_ORDER_IS_BIG_ENDIAN) + +#if defined(__GNUC__) && __GNUC__>=2 && !defined(OPENSSL_NO_ASM) && !defined(OPENSSL_NO_INLINE_ASM) +# if (defined(__i386) || defined(__i386__) || \ + defined(__x86_64) || defined(__x86_64__)) + /* + * This gives ~30-40% performance improvement in SHA-256 compiled + * with gcc [on P4]. Well, first macro to be frank. We can pull + * this trick on x86* platforms only, because these CPUs can fetch + * unaligned data without raising an exception. + */ +# define HOST_c2l(c,l) ({ unsigned int r=*((const unsigned int *)(c)); \ + asm ("bswapl %0":"=r"(r):"0"(r)); \ + (c)+=4; (l)=r; }) +# define HOST_l2c(l,c) ({ unsigned int r=(l); \ + asm ("bswapl %0":"=r"(r):"0"(r)); \ + *((unsigned int *)(c))=r; (c)+=4; }) +# endif +#endif + +#ifndef HOST_c2l +#define HOST_c2l(c,l) do {l =(((unsigned long)(*((c)++)))<<24); \ + l|=(((unsigned long)(*((c)++)))<<16); \ + l|=(((unsigned long)(*((c)++)))<< 8); \ + l|=(((unsigned long)(*((c)++))) ); \ + } while (0) +#endif +#ifndef HOST_l2c +#define HOST_l2c(l,c) do {*((c)++)=(unsigned char)(((l)>>24)&0xff); \ + *((c)++)=(unsigned char)(((l)>>16)&0xff); \ + *((c)++)=(unsigned char)(((l)>> 8)&0xff); \ + *((c)++)=(unsigned char)(((l) )&0xff); \ + } while (0) +#endif + +#elif defined(DATA_ORDER_IS_LITTLE_ENDIAN) + +#if defined(__i386) || defined(__i386__) || defined(__x86_64) || defined(__x86_64__) +# define HOST_c2l(c,l) ((l)=*((const unsigned int *)(c)), (c)+=4) +# define HOST_l2c(l,c) (*((unsigned int *)(c))=(l), (c)+=4) +#endif + +#ifndef HOST_c2l +#define HOST_c2l(c,l) do {l =(((unsigned long)(*((c)++))) ); \ + l|=(((unsigned long)(*((c)++)))<< 8); \ + l|=(((unsigned long)(*((c)++)))<<16); \ + l|=(((unsigned long)(*((c)++)))<<24); \ + } while (0) +#endif +#ifndef HOST_l2c +#define HOST_l2c(l,c) do {*((c)++)=(unsigned char)(((l) )&0xff); \ + *((c)++)=(unsigned char)(((l)>> 8)&0xff); \ + *((c)++)=(unsigned char)(((l)>>16)&0xff); \ + *((c)++)=(unsigned char)(((l)>>24)&0xff); \ + } while (0) +#endif + +#endif + +/* + * Time for some action:-) + */ + +int +HASH_UPDATE(HASH_CTX *c, const void *data_, size_t len) +{ + const unsigned char *data = data_; + unsigned char *p; + HASH_LONG l; + size_t n; + + if (len == 0) + return 1; + + l = (c->Nl + (((HASH_LONG)len) << 3))&0xffffffffUL; + /* 95-05-24 eay Fixed a bug with the overflow handling, thanks to + * Wei Dai <weidai@eskimo.com> for pointing it out. */ + if (l < c->Nl) /* overflow */ + c->Nh++; + c->Nh+=(HASH_LONG)(len>>29); /* might cause compiler warning on 16-bit */ + c->Nl = l; + + n = c->num; + if (n != 0) { + p = (unsigned char *)c->data; + + if (len >= HASH_CBLOCK || len + n >= HASH_CBLOCK) { + memcpy (p + n, data, HASH_CBLOCK - n); + HASH_BLOCK_DATA_ORDER (c, p, 1); + n = HASH_CBLOCK - n; + data += n; + len -= n; + c->num = 0; + memset (p,0,HASH_CBLOCK); /* keep it zeroed */ + } else { + memcpy (p + n, data, len); + c->num += (unsigned int)len; + return 1; + } + } + + n = len/HASH_CBLOCK; + if (n > 0) { + HASH_BLOCK_DATA_ORDER (c, data, n); + n *= HASH_CBLOCK; + data += n; + len -= n; + } + + if (len != 0) { + p = (unsigned char *)c->data; + c->num = (unsigned int)len; + memcpy (p, data, len); + } + return 1; +} + + +void HASH_TRANSFORM (HASH_CTX *c, const unsigned char *data) +{ + HASH_BLOCK_DATA_ORDER (c, data, 1); +} + + +#ifndef HASH_NO_FINAL +int HASH_FINAL (unsigned char *md, HASH_CTX *c) +{ + unsigned char *p = (unsigned char *)c->data; + size_t n = c->num; + + p[n] = 0x80; /* there is always room for one */ + n++; + + if (n > (HASH_CBLOCK - 8)) { + memset (p + n, 0, HASH_CBLOCK - n); + n = 0; + HASH_BLOCK_DATA_ORDER (c, p, 1); + } + memset (p + n, 0, HASH_CBLOCK - 8 - n); + + p += HASH_CBLOCK - 8; +#if defined(DATA_ORDER_IS_BIG_ENDIAN) + HOST_l2c(c->Nh, p); + HOST_l2c(c->Nl, p); +#elif defined(DATA_ORDER_IS_LITTLE_ENDIAN) + HOST_l2c(c->Nl, p); + HOST_l2c(c->Nh, p); +#endif + p -= HASH_CBLOCK; + HASH_BLOCK_DATA_ORDER (c, p, 1); + c->num = 0; + memset (p, 0, HASH_CBLOCK); + +#ifndef HASH_MAKE_STRING +#error "HASH_MAKE_STRING must be defined!" +#else + HASH_MAKE_STRING(c, md); +#endif + + return 1; +} +#endif + +#ifndef MD32_REG_T +#if defined(__alpha) || defined(__sparcv9) || defined(__mips) +#define MD32_REG_T long +/* + * This comment was originaly written for MD5, which is why it + * discusses A-D. But it basically applies to all 32-bit digests, + * which is why it was moved to common header file. + * + * In case you wonder why A-D are declared as long and not + * as MD5_LONG. Doing so results in slight performance + * boost on LP64 architectures. The catch is we don't + * really care if 32 MSBs of a 64-bit register get polluted + * with eventual overflows as we *save* only 32 LSBs in + * *either* case. Now declaring 'em long excuses the compiler + * from keeping 32 MSBs zeroed resulting in 13% performance + * improvement under SPARC Solaris7/64 and 5% under AlphaLinux. + * Well, to be honest it should say that this *prevents* + * performance degradation. + * <appro@fy.chalmers.se> + */ +#else +/* + * Above is not absolute and there are LP64 compilers that + * generate better code if MD32_REG_T is defined int. The above + * pre-processor condition reflects the circumstances under which + * the conclusion was made and is subject to further extension. + * <appro@fy.chalmers.se> + */ +#define MD32_REG_T int +#endif +#endif diff --git a/code/crypto/include/poly1305.h b/code/crypto/include/poly1305.h new file mode 100644 index 0000000..d7af3df --- /dev/null +++ b/code/crypto/include/poly1305.h @@ -0,0 +1,43 @@ +/* $OpenBSD: poly1305.h,v 1.3 2014/07/25 14:04:51 jsing Exp $ */ +/* + * Copyright (c) 2014 Joel Sing <jsing@openbsd.org> + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +#ifndef HEADER_POLY1305_H +#define HEADER_POLY1305_H + +#include <stddef.h> + +#ifdef __cplusplus +extern "C" { +#endif + +typedef struct poly1305_context { + size_t aligner; + unsigned char opaque[136]; +} poly1305_context; + +typedef struct poly1305_context poly1305_state; + +void CRYPTO_poly1305_init(poly1305_context *ctx, const unsigned char key[32]); +void CRYPTO_poly1305_update(poly1305_context *ctx, const unsigned char *in, + size_t len); +void CRYPTO_poly1305_finish(poly1305_context *ctx, unsigned char mac[16]); + +#ifdef __cplusplus +} +#endif + +#endif /* HEADER_POLY1305_H */ diff --git a/code/crypto/include/sha.h b/code/crypto/include/sha.h new file mode 100644 index 0000000..93af5d9 --- /dev/null +++ b/code/crypto/include/sha.h @@ -0,0 +1,186 @@ +/* $OpenBSD: sha.h,v 1.21 2015/09/13 21:09:56 doug Exp $ */ +/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com) + * All rights reserved. + * + * This package is an SSL implementation written + * by Eric Young (eay@cryptsoft.com). + * The implementation was written so as to conform with Netscapes SSL. + * + * This library is free for commercial and non-commercial use as long as + * the following conditions are aheared to. The following conditions + * apply to all code found in this distribution, be it the RC4, RSA, + * lhash, DES, etc., code; not just the SSL code. The SSL documentation + * included with this distribution is covered by the same copyright terms + * except that the holder is Tim Hudson (tjh@cryptsoft.com). + * + * Copyright remains Eric Young's, and as such any Copyright notices in + * the code are not to be removed. + * If this package is used in a product, Eric Young should be given attribution + * as the author of the parts of the library used. + * This can be in the form of a textual message at program startup or + * in documentation (online or textual) provided with the package. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. All advertising materials mentioning features or use of this software + * must display the following acknowledgement: + * "This product includes cryptographic software written by + * Eric Young (eay@cryptsoft.com)" + * The word 'cryptographic' can be left out if the rouines from the library + * being used are not cryptographic related :-). + * 4. If you include any Windows specific code (or a derivative thereof) from + * the apps directory (application code) you must include an acknowledgement: + * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)" + * + * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND + * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS + * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY + * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + * SUCH DAMAGE. + * + * The licence and distribution terms for any publically available version or + * derivative of this code cannot be changed. i.e. this code cannot simply be + * copied and put under another distribution licence + * [including the GNU Public Licence.] + */ + +#include <stddef.h> + +#ifndef HEADER_SHA_H +#define HEADER_SHA_H +#if !defined(HAVE_ATTRIBUTE__BOUNDED__) && !defined(__OpenBSD__) +#define __bounded__(x, y, z) +#endif + +#ifdef __cplusplus +extern "C" { +#endif + +/* + * !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + * ! SHA_LONG has to be at least 32 bits wide. ! + * !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + */ + +#define SHA_LONG unsigned int + +#define SHA_LBLOCK 16 +#define SHA_CBLOCK (SHA_LBLOCK*4) /* SHA treats input data as a + * contiguous array of 32 bit + * wide big-endian values. */ +#define SHA_LAST_BLOCK (SHA_CBLOCK-8) +#define SHA_DIGEST_LENGTH 20 + +typedef struct SHAstate_st + { + SHA_LONG h0,h1,h2,h3,h4; + SHA_LONG Nl,Nh; + SHA_LONG data[SHA_LBLOCK]; + unsigned int num; + } SHA_CTX; + +#ifndef OPENSSL_NO_SHA1 +int SHA1_Init(SHA_CTX *c); +int SHA1_Update(SHA_CTX *c, const void *data, size_t len) + __attribute__ ((__bounded__(__buffer__,2,3))); +int SHA1_Final(unsigned char *md, SHA_CTX *c); +unsigned char *SHA1(const unsigned char *d, size_t n, unsigned char *md) + __attribute__ ((__bounded__(__buffer__,1,2))); +void SHA1_Transform(SHA_CTX *c, const unsigned char *data); +#endif + +#define SHA256_CBLOCK (SHA_LBLOCK*4) /* SHA-256 treats input data as a + * contiguous array of 32 bit + * wide big-endian values. */ +#define SHA224_DIGEST_LENGTH 28 +#define SHA256_DIGEST_LENGTH 32 + +typedef struct SHA256state_st + { + SHA_LONG h[8]; + SHA_LONG Nl,Nh; + SHA_LONG data[SHA_LBLOCK]; + unsigned int num,md_len; + } SHA256_CTX; + +#ifndef OPENSSL_NO_SHA256 +int SHA224_Init(SHA256_CTX *c); +int SHA224_Update(SHA256_CTX *c, const void *data, size_t len) + __attribute__ ((__bounded__(__buffer__,2,3))); +int SHA224_Final(unsigned char *md, SHA256_CTX *c); +unsigned char *SHA224(const unsigned char *d, size_t n,unsigned char *md) + __attribute__ ((__bounded__(__buffer__,1,2))); +int SHA256_Init(SHA256_CTX *c); +int SHA256_Update(SHA256_CTX *c, const void *data, size_t len) + __attribute__ ((__bounded__(__buffer__,2,3))); +int SHA256_Final(unsigned char *md, SHA256_CTX *c); +unsigned char *SHA256(const unsigned char *d, size_t n,unsigned char *md) + __attribute__ ((__bounded__(__buffer__,1,2))); +void SHA256_Transform(SHA256_CTX *c, const unsigned char *data); +#endif + +#define SHA384_DIGEST_LENGTH 48 +#define SHA512_DIGEST_LENGTH 64 + +#ifndef OPENSSL_NO_SHA512 +/* + * Unlike 32-bit digest algorithms, SHA-512 *relies* on SHA_LONG64 + * being exactly 64-bit wide. See Implementation Notes in sha512.c + * for further details. + */ +#define SHA512_CBLOCK (SHA_LBLOCK*8) /* SHA-512 treats input data as a + * contiguous array of 64 bit + * wide big-endian values. */ +#if defined(_LP64) +#define SHA_LONG64 unsigned long +#define U64(C) C##UL +#else +#define SHA_LONG64 unsigned long long +#define U64(C) C##ULL +#endif + +typedef struct SHA512state_st + { + SHA_LONG64 h[8]; + SHA_LONG64 Nl,Nh; + union { + SHA_LONG64 d[SHA_LBLOCK]; + unsigned char p[SHA512_CBLOCK]; + } u; + unsigned int num,md_len; + } SHA512_CTX; +#endif + +#ifndef OPENSSL_NO_SHA512 +int SHA384_Init(SHA512_CTX *c); +int SHA384_Update(SHA512_CTX *c, const void *data, size_t len) + __attribute__ ((__bounded__(__buffer__,2,3))); +int SHA384_Final(unsigned char *md, SHA512_CTX *c); +unsigned char *SHA384(const unsigned char *d, size_t n,unsigned char *md) + __attribute__ ((__bounded__(__buffer__,1,2))); +int SHA512_Init(SHA512_CTX *c); +int SHA512_Update(SHA512_CTX *c, const void *data, size_t len) + __attribute__ ((__bounded__(__buffer__,2,3))); +int SHA512_Final(unsigned char *md, SHA512_CTX *c); +unsigned char *SHA512(const unsigned char *d, size_t n,unsigned char *md) + __attribute__ ((__bounded__(__buffer__,1,2))); +void SHA512_Transform(SHA512_CTX *c, const unsigned char *data); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/code/crypto/poly1305/Makefile b/code/crypto/poly1305/Makefile new file mode 100644 index 0000000..6ddec33 --- /dev/null +++ b/code/crypto/poly1305/Makefile @@ -0,0 +1,14 @@ +CFLAGS=-I../include -D__BEGIN_HIDDEN_DECLS= -D__END_HIDDEN_DECLS= -O3 $(PIC) +obj = poly1305.o + +all: libpoly1305.a +dep: all + +%.o: %.c + $(CC) $(CFLAGS) -c $< + +libpoly1305.a: $(obj) + $(AR) rcs $@ $(obj) + +clean: + rm -f *.o *.a diff --git a/code/crypto/poly1305/poly1305-donna.c b/code/crypto/poly1305/poly1305-donna.c new file mode 100644 index 0000000..773ea4e --- /dev/null +++ b/code/crypto/poly1305/poly1305-donna.c @@ -0,0 +1,321 @@ +/* $OpenBSD: poly1305-donna.c,v 1.3 2014/06/12 15:49:30 deraadt Exp $ */ +/* + * Public Domain poly1305 from Andrew Moon + * Based on poly1305-donna.c, poly1305-donna-32.h and poly1305-donna.h from: + * https://github.com/floodyberry/poly1305-donna + */ + +#include <stddef.h> + +static inline void poly1305_init(poly1305_context *ctx, + const unsigned char key[32]); +static inline void poly1305_update(poly1305_context *ctx, + const unsigned char *m, size_t bytes); +static inline void poly1305_finish(poly1305_context *ctx, + unsigned char mac[16]); + +/* + * poly1305 implementation using 32 bit * 32 bit = 64 bit multiplication + * and 64 bit addition. + */ + +#define poly1305_block_size 16 + +/* 17 + sizeof(size_t) + 14*sizeof(unsigned long) */ +typedef struct poly1305_state_internal_t { + unsigned long r[5]; + unsigned long h[5]; + unsigned long pad[4]; + size_t leftover; + unsigned char buffer[poly1305_block_size]; + unsigned char final; +} poly1305_state_internal_t; + +/* interpret four 8 bit unsigned integers as a 32 bit unsigned integer in little endian */ +static unsigned long +U8TO32(const unsigned char *p) +{ + return (((unsigned long)(p[0] & 0xff)) | + ((unsigned long)(p[1] & 0xff) << 8) | + ((unsigned long)(p[2] & 0xff) << 16) | + ((unsigned long)(p[3] & 0xff) << 24)); +} + +/* store a 32 bit unsigned integer as four 8 bit unsigned integers in little endian */ +static void +U32TO8(unsigned char *p, unsigned long v) +{ + p[0] = (v) & 0xff; + p[1] = (v >> 8) & 0xff; + p[2] = (v >> 16) & 0xff; + p[3] = (v >> 24) & 0xff; +} + +static inline void +poly1305_init(poly1305_context *ctx, const unsigned char key[32]) +{ + poly1305_state_internal_t *st = (poly1305_state_internal_t *)ctx; + + /* r &= 0xffffffc0ffffffc0ffffffc0fffffff */ + st->r[0] = (U8TO32(&key[0])) & 0x3ffffff; + st->r[1] = (U8TO32(&key[3]) >> 2) & 0x3ffff03; + st->r[2] = (U8TO32(&key[6]) >> 4) & 0x3ffc0ff; + st->r[3] = (U8TO32(&key[9]) >> 6) & 0x3f03fff; + st->r[4] = (U8TO32(&key[12]) >> 8) & 0x00fffff; + + /* h = 0 */ + st->h[0] = 0; + st->h[1] = 0; + st->h[2] = 0; + st->h[3] = 0; + st->h[4] = 0; + + /* save pad for later */ + st->pad[0] = U8TO32(&key[16]); + st->pad[1] = U8TO32(&key[20]); + st->pad[2] = U8TO32(&key[24]); + st->pad[3] = U8TO32(&key[28]); + + st->leftover = 0; + st->final = 0; +} + +static void +poly1305_blocks(poly1305_state_internal_t *st, const unsigned char *m, size_t bytes) +{ + const unsigned long hibit = (st->final) ? 0 : (1 << 24); /* 1 << 128 */ + unsigned long r0, r1, r2, r3, r4; + unsigned long s1, s2, s3, s4; + unsigned long h0, h1, h2, h3, h4; + unsigned long long d0, d1, d2, d3, d4; + unsigned long c; + + r0 = st->r[0]; + r1 = st->r[1]; + r2 = st->r[2]; + r3 = st->r[3]; + r4 = st->r[4]; + + s1 = r1 * 5; + s2 = r2 * 5; + s3 = r3 * 5; + s4 = r4 * 5; + + h0 = st->h[0]; + h1 = st->h[1]; + h2 = st->h[2]; + h3 = st->h[3]; + h4 = st->h[4]; + + while (bytes >= poly1305_block_size) { + /* h += m[i] */ + h0 += (U8TO32(m + 0)) & 0x3ffffff; + h1 += (U8TO32(m + 3) >> 2) & 0x3ffffff; + h2 += (U8TO32(m + 6) >> 4) & 0x3ffffff; + h3 += (U8TO32(m + 9) >> 6) & 0x3ffffff; + h4 += (U8TO32(m + 12) >> 8) | hibit; + + /* h *= r */ + d0 = ((unsigned long long)h0 * r0) + + ((unsigned long long)h1 * s4) + + ((unsigned long long)h2 * s3) + + ((unsigned long long)h3 * s2) + + ((unsigned long long)h4 * s1); + d1 = ((unsigned long long)h0 * r1) + + ((unsigned long long)h1 * r0) + + ((unsigned long long)h2 * s4) + + ((unsigned long long)h3 * s3) + + ((unsigned long long)h4 * s2); + d2 = ((unsigned long long)h0 * r2) + + ((unsigned long long)h1 * r1) + + ((unsigned long long)h2 * r0) + + ((unsigned long long)h3 * s4) + + ((unsigned long long)h4 * s3); + d3 = ((unsigned long long)h0 * r3) + + ((unsigned long long)h1 * r2) + + ((unsigned long long)h2 * r1) + + ((unsigned long long)h3 * r0) + + ((unsigned long long)h4 * s4); + d4 = ((unsigned long long)h0 * r4) + + ((unsigned long long)h1 * r3) + + ((unsigned long long)h2 * r2) + + ((unsigned long long)h3 * r1) + + ((unsigned long long)h4 * r0); + + /* (partial) h %= p */ + c = (unsigned long)(d0 >> 26); + h0 = (unsigned long)d0 & 0x3ffffff; + d1 += c; + c = (unsigned long)(d1 >> 26); + h1 = (unsigned long)d1 & 0x3ffffff; + d2 += c; + c = (unsigned long)(d2 >> 26); + h2 = (unsigned long)d2 & 0x3ffffff; + d3 += c; + c = (unsigned long)(d3 >> 26); + h3 = (unsigned long)d3 & 0x3ffffff; + d4 += c; + c = (unsigned long)(d4 >> 26); + h4 = (unsigned long)d4 & 0x3ffffff; + h0 += c * 5; + c = (h0 >> 26); + h0 = h0 & 0x3ffffff; + h1 += c; + + m += poly1305_block_size; + bytes -= poly1305_block_size; + } + + st->h[0] = h0; + st->h[1] = h1; + st->h[2] = h2; + st->h[3] = h3; + st->h[4] = h4; +} + +static inline void +poly1305_update(poly1305_context *ctx, const unsigned char *m, size_t bytes) +{ + poly1305_state_internal_t *st = (poly1305_state_internal_t *)ctx; + size_t i; + + /* handle leftover */ + if (st->leftover) { + size_t want = (poly1305_block_size - st->leftover); + if (want > bytes) + want = bytes; + for (i = 0; i < want; i++) + st->buffer[st->leftover + i] = m[i]; + bytes -= want; + m += want; + st->leftover += want; + if (st->leftover < poly1305_block_size) + return; + poly1305_blocks(st, st->buffer, poly1305_block_size); + st->leftover = 0; + } + + /* process full blocks */ + if (bytes >= poly1305_block_size) { + size_t want = (bytes & ~(poly1305_block_size - 1)); + poly1305_blocks(st, m, want); + m += want; + bytes -= want; + } + + /* store leftover */ + if (bytes) { + for (i = 0; i < bytes; i++) + st->buffer[st->leftover + i] = m[i]; + st->leftover += bytes; + } +} + +static inline void +poly1305_finish(poly1305_context *ctx, unsigned char mac[16]) +{ + poly1305_state_internal_t *st = (poly1305_state_internal_t *)ctx; + unsigned long h0, h1, h2, h3, h4, c; + unsigned long g0, g1, g2, g3, g4; + unsigned long long f; + unsigned long mask; + + /* process the remaining block */ + if (st->leftover) { + size_t i = st->leftover; + st->buffer[i++] = 1; + for (; i < poly1305_block_size; i++) + st->buffer[i] = 0; + st->final = 1; + poly1305_blocks(st, st->buffer, poly1305_block_size); + } + + /* fully carry h */ + h0 = st->h[0]; + h1 = st->h[1]; + h2 = st->h[2]; + h3 = st->h[3]; + h4 = st->h[4]; + + c = h1 >> 26; + h1 = h1 & 0x3ffffff; + h2 += c; + c = h2 >> 26; + h2 = h2 & 0x3ffffff; + h3 += c; + c = h3 >> 26; + h3 = h3 & 0x3ffffff; + h4 += c; + c = h4 >> 26; + h4 = h4 & 0x3ffffff; + h0 += c * 5; + c = h0 >> 26; + h0 = h0 & 0x3ffffff; + h1 += c; + + /* compute h + -p */ + g0 = h0 + 5; + c = g0 >> 26; + g0 &= 0x3ffffff; + g1 = h1 + c; + c = g1 >> 26; + g1 &= 0x3ffffff; + g2 = h2 + c; + c = g2 >> 26; + g2 &= 0x3ffffff; + g3 = h3 + c; + c = g3 >> 26; + g3 &= 0x3ffffff; + g4 = h4 + c - (1 << 26); + + /* select h if h < p, or h + -p if h >= p */ + mask = (g4 >> ((sizeof(unsigned long) * 8) - 1)) - 1; + g0 &= mask; + g1 &= mask; + g2 &= mask; + g3 &= mask; + g4 &= mask; + mask = ~mask; + h0 = (h0 & mask) | g0; + h1 = (h1 & mask) | g1; + h2 = (h2 & mask) | g2; + h3 = (h3 & mask) | g3; + h4 = (h4 & mask) | g4; + + /* h = h % (2^128) */ + h0 = ((h0) | (h1 << 26)) & 0xffffffff; + h1 = ((h1 >> 6) | (h2 << 20)) & 0xffffffff; + h2 = ((h2 >> 12) | (h3 << 14)) & 0xffffffff; + h3 = ((h3 >> 18) | (h4 << 8)) & 0xffffffff; + + /* mac = (h + pad) % (2^128) */ + f = (unsigned long long)h0 + st->pad[0]; + h0 = (unsigned long)f; + f = (unsigned long long)h1 + st->pad[1] + (f >> 32); + h1 = (unsigned long)f; + f = (unsigned long long)h2 + st->pad[2] + (f >> 32); + h2 = (unsigned long)f; + f = (unsigned long long)h3 + st->pad[3] + (f >> 32); + h3 = (unsigned long)f; + + U32TO8(mac + 0, h0); + U32TO8(mac + 4, h1); + U32TO8(mac + 8, h2); + U32TO8(mac + 12, h3); + + /* zero out the state */ + st->h[0] = 0; + st->h[1] = 0; + st->h[2] = 0; + st->h[3] = 0; + st->h[4] = 0; + st->r[0] = 0; + st->r[1] = 0; + st->r[2] = 0; + st->r[3] = 0; + st->r[4] = 0; + st->pad[0] = 0; + st->pad[1] = 0; + st->pad[2] = 0; + st->pad[3] = 0; +} diff --git a/code/crypto/poly1305/poly1305.c b/code/crypto/poly1305/poly1305.c new file mode 100644 index 0000000..5f3f111 --- /dev/null +++ b/code/crypto/poly1305/poly1305.c @@ -0,0 +1,38 @@ +/* $OpenBSD: poly1305.c,v 1.3 2014/06/12 15:49:30 deraadt Exp $ */ +/* + * Copyright (c) 2014 Joel Sing <jsing@openbsd.org> + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +#include <poly1305.h> +#include "poly1305-donna.c" + +void +CRYPTO_poly1305_init(poly1305_context *ctx, const unsigned char key[32]) +{ + poly1305_init(ctx, key); +} + +void +CRYPTO_poly1305_update(poly1305_context *ctx, const unsigned char *in, + size_t len) +{ + poly1305_update(ctx, in, len); +} + +void +CRYPTO_poly1305_finish(poly1305_context *ctx, unsigned char mac[16]) +{ + poly1305_finish(ctx, mac); +} diff --git a/code/crypto/sha/Makefile b/code/crypto/sha/Makefile new file mode 100644 index 0000000..8a50b28 --- /dev/null +++ b/code/crypto/sha/Makefile @@ -0,0 +1,14 @@ +CFLAGS=-I../include -D__BEGIN_HIDDEN_DECLS= -D__END_HIDDEN_DECLS= -O3 $(PIC) +obj = sha1dgst.o sha1_one.o sha256.o sha512.o + +all: libsha.a +dep: all + +%.o: %.c + $(CC) $(CFLAGS) -c $< + +libsha.a: $(obj) + $(AR) rcs $@ $(obj) + +clean: + rm -f *.o *.a diff --git a/code/crypto/sha/sha1-elf-x86_64.S b/code/crypto/sha/sha1-elf-x86_64.S new file mode 100644 index 0000000..d7ef2bf --- /dev/null +++ b/code/crypto/sha/sha1-elf-x86_64.S @@ -0,0 +1,2491 @@ +#include "x86_arch.h" +.text + +.hidden OPENSSL_ia32cap_P + +.globl sha1_block_data_order +.type sha1_block_data_order,@function +.align 16 +sha1_block_data_order: + movl OPENSSL_ia32cap_P+0(%rip),%r9d + movl OPENSSL_ia32cap_P+4(%rip),%r8d + testl $IA32CAP_MASK1_SSSE3,%r8d + jz .Lialu + jmp _ssse3_shortcut + +.align 16 +.Lialu: + pushq %rbx + pushq %rbp + pushq %r12 + pushq %r13 + movq %rsp,%r11 + movq %rdi,%r8 + subq $72,%rsp + movq %rsi,%r9 + andq $-64,%rsp + movq %rdx,%r10 + movq %r11,64(%rsp) +.Lprologue: + + movl 0(%r8),%esi + movl 4(%r8),%edi + movl 8(%r8),%r11d + movl 12(%r8),%r12d + movl 16(%r8),%r13d + jmp .Lloop + +.align 16 +.Lloop: + movl 0(%r9),%edx + bswapl %edx + movl %edx,0(%rsp) + movl %r11d,%eax + movl 4(%r9),%ebp + movl %esi,%ecx + xorl %r12d,%eax + bswapl %ebp + roll $5,%ecx + leal 1518500249(%rdx,%r13,1),%r13d + andl %edi,%eax + movl %ebp,4(%rsp) + addl %ecx,%r13d + xorl %r12d,%eax + roll $30,%edi + addl %eax,%r13d + movl %edi,%eax + movl 8(%r9),%edx + movl %r13d,%ecx + xorl %r11d,%eax + bswapl %edx + roll $5,%ecx + leal 1518500249(%rbp,%r12,1),%r12d + andl %esi,%eax + movl %edx,8(%rsp) + addl %ecx,%r12d + xorl %r11d,%eax + roll $30,%esi + addl %eax,%r12d + movl %esi,%eax + movl 12(%r9),%ebp + movl %r12d,%ecx + xorl %edi,%eax + bswapl %ebp + roll $5,%ecx + leal 1518500249(%rdx,%r11,1),%r11d + andl %r13d,%eax + movl %ebp,12(%rsp) + addl %ecx,%r11d + xorl %edi,%eax + roll $30,%r13d + addl %eax,%r11d + movl %r13d,%eax + movl 16(%r9),%edx + movl %r11d,%ecx + xorl %esi,%eax + bswapl %edx + roll $5,%ecx + leal 1518500249(%rbp,%rdi,1),%edi + andl %r12d,%eax + movl %edx,16(%rsp) + addl %ecx,%edi + xorl %esi,%eax + roll $30,%r12d + addl %eax,%edi + movl %r12d,%eax + movl 20(%r9),%ebp + movl %edi,%ecx + xorl %r13d,%eax + bswapl %ebp + roll $5,%ecx + leal 1518500249(%rdx,%rsi,1),%esi + andl %r11d,%eax + movl %ebp,20(%rsp) + addl %ecx,%esi + xorl %r13d,%eax + roll $30,%r11d + addl %eax,%esi + movl %r11d,%eax + movl 24(%r9),%edx + movl %esi,%ecx + xorl %r12d,%eax + bswapl %edx + roll $5,%ecx + leal 1518500249(%rbp,%r13,1),%r13d + andl %edi,%eax + movl %edx,24(%rsp) + addl %ecx,%r13d + xorl %r12d,%eax + roll $30,%edi + addl %eax,%r13d + movl %edi,%eax + movl 28(%r9),%ebp + movl %r13d,%ecx + xorl %r11d,%eax + bswapl %ebp + roll $5,%ecx + leal 1518500249(%rdx,%r12,1),%r12d + andl %esi,%eax + movl %ebp,28(%rsp) + addl %ecx,%r12d + xorl %r11d,%eax + roll $30,%esi + addl %eax,%r12d + movl %esi,%eax + movl 32(%r9),%edx + movl %r12d,%ecx + xorl %edi,%eax + bswapl %edx + roll $5,%ecx + leal 1518500249(%rbp,%r11,1),%r11d + andl %r13d,%eax + movl %edx,32(%rsp) + addl %ecx,%r11d + xorl %edi,%eax + roll $30,%r13d + addl %eax,%r11d + movl %r13d,%eax + movl 36(%r9),%ebp + movl %r11d,%ecx + xorl %esi,%eax + bswapl %ebp + roll $5,%ecx + leal 1518500249(%rdx,%rdi,1),%edi + andl %r12d,%eax + movl %ebp,36(%rsp) + addl %ecx,%edi + xorl %esi,%eax + roll $30,%r12d + addl %eax,%edi + movl %r12d,%eax + movl 40(%r9),%edx + movl %edi,%ecx + xorl %r13d,%eax + bswapl %edx + roll $5,%ecx + leal 1518500249(%rbp,%rsi,1),%esi + andl %r11d,%eax + movl %edx,40(%rsp) + addl %ecx,%esi + xorl %r13d,%eax + roll $30,%r11d + addl %eax,%esi + movl %r11d,%eax + movl 44(%r9),%ebp + movl %esi,%ecx + xorl %r12d,%eax + bswapl %ebp + roll $5,%ecx + leal 1518500249(%rdx,%r13,1),%r13d + andl %edi,%eax + movl %ebp,44(%rsp) + addl %ecx,%r13d + xorl %r12d,%eax + roll $30,%edi + addl %eax,%r13d + movl %edi,%eax + movl 48(%r9),%edx + movl %r13d,%ecx + xorl %r11d,%eax + bswapl %edx + roll $5,%ecx + leal 1518500249(%rbp,%r12,1),%r12d + andl %esi,%eax + movl %edx,48(%rsp) + addl %ecx,%r12d + xorl %r11d,%eax + roll $30,%esi + addl %eax,%r12d + movl %esi,%eax + movl 52(%r9),%ebp + movl %r12d,%ecx + xorl %edi,%eax + bswapl %ebp + roll $5,%ecx + leal 1518500249(%rdx,%r11,1),%r11d + andl %r13d,%eax + movl %ebp,52(%rsp) + addl %ecx,%r11d + xorl %edi,%eax + roll $30,%r13d + addl %eax,%r11d + movl %r13d,%eax + movl 56(%r9),%edx + movl %r11d,%ecx + xorl %esi,%eax + bswapl %edx + roll $5,%ecx + leal 1518500249(%rbp,%rdi,1),%edi + andl %r12d,%eax + movl %edx,56(%rsp) + addl %ecx,%edi + xorl %esi,%eax + roll $30,%r12d + addl %eax,%edi + movl %r12d,%eax + movl 60(%r9),%ebp + movl %edi,%ecx + xorl %r13d,%eax + bswapl %ebp + roll $5,%ecx + leal 1518500249(%rdx,%rsi,1),%esi + andl %r11d,%eax + movl %ebp,60(%rsp) + addl %ecx,%esi + xorl %r13d,%eax + roll $30,%r11d + addl %eax,%esi + movl 0(%rsp),%edx + movl %r11d,%eax + movl %esi,%ecx + xorl 8(%rsp),%edx + xorl %r12d,%eax + roll $5,%ecx + xorl 32(%rsp),%edx + andl %edi,%eax + leal 1518500249(%rbp,%r13,1),%r13d + xorl 52(%rsp),%edx + xorl %r12d,%eax + roll $1,%edx + addl %ecx,%r13d + roll $30,%edi + movl %edx,0(%rsp) + addl %eax,%r13d + movl 4(%rsp),%ebp + movl %edi,%eax + movl %r13d,%ecx + xorl 12(%rsp),%ebp + xorl %r11d,%eax + roll $5,%ecx + xorl 36(%rsp),%ebp + andl %esi,%eax + leal 1518500249(%rdx,%r12,1),%r12d + xorl 56(%rsp),%ebp + xorl %r11d,%eax + roll $1,%ebp + addl %ecx,%r12d + roll $30,%esi + movl %ebp,4(%rsp) + addl %eax,%r12d + movl 8(%rsp),%edx + movl %esi,%eax + movl %r12d,%ecx + xorl 16(%rsp),%edx + xorl %edi,%eax + roll $5,%ecx + xorl 40(%rsp),%edx + andl %r13d,%eax + leal 1518500249(%rbp,%r11,1),%r11d + xorl 60(%rsp),%edx + xorl %edi,%eax + roll $1,%edx + addl %ecx,%r11d + roll $30,%r13d + movl %edx,8(%rsp) + addl %eax,%r11d + movl 12(%rsp),%ebp + movl %r13d,%eax + movl %r11d,%ecx + xorl 20(%rsp),%ebp + xorl %esi,%eax + roll $5,%ecx + xorl 44(%rsp),%ebp + andl %r12d,%eax + leal 1518500249(%rdx,%rdi,1),%edi + xorl 0(%rsp),%ebp + xorl %esi,%eax + roll $1,%ebp + addl %ecx,%edi + roll $30,%r12d + movl %ebp,12(%rsp) + addl %eax,%edi + movl 16(%rsp),%edx + movl %r12d,%eax + movl %edi,%ecx + xorl 24(%rsp),%edx + xorl %r13d,%eax + roll $5,%ecx + xorl 48(%rsp),%edx + andl %r11d,%eax + leal 1518500249(%rbp,%rsi,1),%esi + xorl 4(%rsp),%edx + xorl %r13d,%eax + roll $1,%edx + addl %ecx,%esi + roll $30,%r11d + movl %edx,16(%rsp) + addl %eax,%esi + movl 20(%rsp),%ebp + movl %r11d,%eax + movl %esi,%ecx + xorl 28(%rsp),%ebp + xorl %edi,%eax + roll $5,%ecx + leal 1859775393(%rdx,%r13,1),%r13d + xorl 52(%rsp),%ebp + xorl %r12d,%eax + addl %ecx,%r13d + xorl 8(%rsp),%ebp + roll $30,%edi + addl %eax,%r13d + roll $1,%ebp + movl %ebp,20(%rsp) + movl 24(%rsp),%edx + movl %edi,%eax + movl %r13d,%ecx + xorl 32(%rsp),%edx + xorl %esi,%eax + roll $5,%ecx + leal 1859775393(%rbp,%r12,1),%r12d + xorl 56(%rsp),%edx + xorl %r11d,%eax + addl %ecx,%r12d + xorl 12(%rsp),%edx + roll $30,%esi + addl %eax,%r12d + roll $1,%edx + movl %edx,24(%rsp) + movl 28(%rsp),%ebp + movl %esi,%eax + movl %r12d,%ecx + xorl 36(%rsp),%ebp + xorl %r13d,%eax + roll $5,%ecx + leal 1859775393(%rdx,%r11,1),%r11d + xorl 60(%rsp),%ebp + xorl %edi,%eax + addl %ecx,%r11d + xorl 16(%rsp),%ebp + roll $30,%r13d + addl %eax,%r11d + roll $1,%ebp + movl %ebp,28(%rsp) + movl 32(%rsp),%edx + movl %r13d,%eax + movl %r11d,%ecx + xorl 40(%rsp),%edx + xorl %r12d,%eax + roll $5,%ecx + leal 1859775393(%rbp,%rdi,1),%edi + xorl 0(%rsp),%edx + xorl %esi,%eax + addl %ecx,%edi + xorl 20(%rsp),%edx + roll $30,%r12d + addl %eax,%edi + roll $1,%edx + movl %edx,32(%rsp) + movl 36(%rsp),%ebp + movl %r12d,%eax + movl %edi,%ecx + xorl 44(%rsp),%ebp + xorl %r11d,%eax + roll $5,%ecx + leal 1859775393(%rdx,%rsi,1),%esi + xorl 4(%rsp),%ebp + xorl %r13d,%eax + addl %ecx,%esi + xorl 24(%rsp),%ebp + roll $30,%r11d + addl %eax,%esi + roll $1,%ebp + movl %ebp,36(%rsp) + movl 40(%rsp),%edx + movl %r11d,%eax + movl %esi,%ecx + xorl 48(%rsp),%edx + xorl %edi,%eax + roll $5,%ecx + leal 1859775393(%rbp,%r13,1),%r13d + xorl 8(%rsp),%edx + xorl %r12d,%eax + addl %ecx,%r13d + xorl 28(%rsp),%edx + roll $30,%edi + addl %eax,%r13d + roll $1,%edx + movl %edx,40(%rsp) + movl 44(%rsp),%ebp + movl %edi,%eax + movl %r13d,%ecx + xorl 52(%rsp),%ebp + xorl %esi,%eax + roll $5,%ecx + leal 1859775393(%rdx,%r12,1),%r12d + xorl 12(%rsp),%ebp + xorl %r11d,%eax + addl %ecx,%r12d + xorl 32(%rsp),%ebp + roll $30,%esi + addl %eax,%r12d + roll $1,%ebp + movl %ebp,44(%rsp) + movl 48(%rsp),%edx + movl %esi,%eax + movl %r12d,%ecx + xorl 56(%rsp),%edx + xorl %r13d,%eax + roll $5,%ecx + leal 1859775393(%rbp,%r11,1),%r11d + xorl 16(%rsp),%edx + xorl %edi,%eax + addl %ecx,%r11d + xorl 36(%rsp),%edx + roll $30,%r13d + addl %eax,%r11d + roll $1,%edx + movl %edx,48(%rsp) + movl 52(%rsp),%ebp + movl %r13d,%eax + movl %r11d,%ecx + xorl 60(%rsp),%ebp + xorl %r12d,%eax + roll $5,%ecx + leal 1859775393(%rdx,%rdi,1),%edi + xorl 20(%rsp),%ebp + xorl %esi,%eax + addl %ecx,%edi + xorl 40(%rsp),%ebp + roll $30,%r12d + addl %eax,%edi + roll $1,%ebp + movl %ebp,52(%rsp) + movl 56(%rsp),%edx + movl %r12d,%eax + movl %edi,%ecx + xorl 0(%rsp),%edx + xorl %r11d,%eax + roll $5,%ecx + leal 1859775393(%rbp,%rsi,1),%esi + xorl 24(%rsp),%edx + xorl %r13d,%eax + addl %ecx,%esi + xorl 44(%rsp),%edx + roll $30,%r11d + addl %eax,%esi + roll $1,%edx + movl %edx,56(%rsp) + movl 60(%rsp),%ebp + movl %r11d,%eax + movl %esi,%ecx + xorl 4(%rsp),%ebp + xorl %edi,%eax + roll $5,%ecx + leal 1859775393(%rdx,%r13,1),%r13d + xorl 28(%rsp),%ebp + xorl %r12d,%eax + addl %ecx,%r13d + xorl 48(%rsp),%ebp + roll $30,%edi + addl %eax,%r13d + roll $1,%ebp + movl %ebp,60(%rsp) + movl 0(%rsp),%edx + movl %edi,%eax + movl %r13d,%ecx + xorl 8(%rsp),%edx + xorl %esi,%eax + roll $5,%ecx + leal 1859775393(%rbp,%r12,1),%r12d + xorl 32(%rsp),%edx + xorl %r11d,%eax + addl %ecx,%r12d + xorl 52(%rsp),%edx + roll $30,%esi + addl %eax,%r12d + roll $1,%edx + movl %edx,0(%rsp) + movl 4(%rsp),%ebp + movl %esi,%eax + movl %r12d,%ecx + xorl 12(%rsp),%ebp + xorl %r13d,%eax + roll $5,%ecx + leal 1859775393(%rdx,%r11,1),%r11d + xorl 36(%rsp),%ebp + xorl %edi,%eax + addl %ecx,%r11d + xorl 56(%rsp),%ebp + roll $30,%r13d + addl %eax,%r11d + roll $1,%ebp + movl %ebp,4(%rsp) + movl 8(%rsp),%edx + movl %r13d,%eax + movl %r11d,%ecx + xorl 16(%rsp),%edx + xorl %r12d,%eax + roll $5,%ecx + leal 1859775393(%rbp,%rdi,1),%edi + xorl 40(%rsp),%edx + xorl %esi,%eax + addl %ecx,%edi + xorl 60(%rsp),%edx + roll $30,%r12d + addl %eax,%edi + roll $1,%edx + movl %edx,8(%rsp) + movl 12(%rsp),%ebp + movl %r12d,%eax + movl %edi,%ecx + xorl 20(%rsp),%ebp + xorl %r11d,%eax + roll $5,%ecx + leal 1859775393(%rdx,%rsi,1),%esi + xorl 44(%rsp),%ebp + xorl %r13d,%eax + addl %ecx,%esi + xorl 0(%rsp),%ebp + roll $30,%r11d + addl %eax,%esi + roll $1,%ebp + movl %ebp,12(%rsp) + movl 16(%rsp),%edx + movl %r11d,%eax + movl %esi,%ecx + xorl 24(%rsp),%edx + xorl %edi,%eax + roll $5,%ecx + leal 1859775393(%rbp,%r13,1),%r13d + xorl 48(%rsp),%edx + xorl %r12d,%eax + addl %ecx,%r13d + xorl 4(%rsp),%edx + roll $30,%edi + addl %eax,%r13d + roll $1,%edx + movl %edx,16(%rsp) + movl 20(%rsp),%ebp + movl %edi,%eax + movl %r13d,%ecx + xorl 28(%rsp),%ebp + xorl %esi,%eax + roll $5,%ecx + leal 1859775393(%rdx,%r12,1),%r12d + xorl 52(%rsp),%ebp + xorl %r11d,%eax + addl %ecx,%r12d + xorl 8(%rsp),%ebp + roll $30,%esi + addl %eax,%r12d + roll $1,%ebp + movl %ebp,20(%rsp) + movl 24(%rsp),%edx + movl %esi,%eax + movl %r12d,%ecx + xorl 32(%rsp),%edx + xorl %r13d,%eax + roll $5,%ecx + leal 1859775393(%rbp,%r11,1),%r11d + xorl 56(%rsp),%edx + xorl %edi,%eax + addl %ecx,%r11d + xorl 12(%rsp),%edx + roll $30,%r13d + addl %eax,%r11d + roll $1,%edx + movl %edx,24(%rsp) + movl 28(%rsp),%ebp + movl %r13d,%eax + movl %r11d,%ecx + xorl 36(%rsp),%ebp + xorl %r12d,%eax + roll $5,%ecx + leal 1859775393(%rdx,%rdi,1),%edi + xorl 60(%rsp),%ebp + xorl %esi,%eax + addl %ecx,%edi + xorl 16(%rsp),%ebp + roll $30,%r12d + addl %eax,%edi + roll $1,%ebp + movl %ebp,28(%rsp) + movl 32(%rsp),%edx + movl %r12d,%eax + movl %edi,%ecx + xorl 40(%rsp),%edx + xorl %r11d,%eax + roll $5,%ecx + leal 1859775393(%rbp,%rsi,1),%esi + xorl 0(%rsp),%edx + xorl %r13d,%eax + addl %ecx,%esi + xorl 20(%rsp),%edx + roll $30,%r11d + addl %eax,%esi + roll $1,%edx + movl %edx,32(%rsp) + movl 36(%rsp),%ebp + movl %r11d,%eax + movl %r11d,%ebx + xorl 44(%rsp),%ebp + andl %r12d,%eax + movl %esi,%ecx + xorl 4(%rsp),%ebp + xorl %r12d,%ebx + leal -1894007588(%rdx,%r13,1),%r13d + roll $5,%ecx + xorl 24(%rsp),%ebp + addl %eax,%r13d + andl %edi,%ebx + roll $1,%ebp + addl %ebx,%r13d + roll $30,%edi + movl %ebp,36(%rsp) + addl %ecx,%r13d + movl 40(%rsp),%edx + movl %edi,%eax + movl %edi,%ebx + xorl 48(%rsp),%edx + andl %r11d,%eax + movl %r13d,%ecx + xorl 8(%rsp),%edx + xorl %r11d,%ebx + leal -1894007588(%rbp,%r12,1),%r12d + roll $5,%ecx + xorl 28(%rsp),%edx + addl %eax,%r12d + andl %esi,%ebx + roll $1,%edx + addl %ebx,%r12d + roll $30,%esi + movl %edx,40(%rsp) + addl %ecx,%r12d + movl 44(%rsp),%ebp + movl %esi,%eax + movl %esi,%ebx + xorl 52(%rsp),%ebp + andl %edi,%eax + movl %r12d,%ecx + xorl 12(%rsp),%ebp + xorl %edi,%ebx + leal -1894007588(%rdx,%r11,1),%r11d + roll $5,%ecx + xorl 32(%rsp),%ebp + addl %eax,%r11d + andl %r13d,%ebx + roll $1,%ebp + addl %ebx,%r11d + roll $30,%r13d + movl %ebp,44(%rsp) + addl %ecx,%r11d + movl 48(%rsp),%edx + movl %r13d,%eax + movl %r13d,%ebx + xorl 56(%rsp),%edx + andl %esi,%eax + movl %r11d,%ecx + xorl 16(%rsp),%edx + xorl %esi,%ebx + leal -1894007588(%rbp,%rdi,1),%edi + roll $5,%ecx + xorl 36(%rsp),%edx + addl %eax,%edi + andl %r12d,%ebx + roll $1,%edx + addl %ebx,%edi + roll $30,%r12d + movl %edx,48(%rsp) + addl %ecx,%edi + movl 52(%rsp),%ebp + movl %r12d,%eax + movl %r12d,%ebx + xorl 60(%rsp),%ebp + andl %r13d,%eax + movl %edi,%ecx + xorl 20(%rsp),%ebp + xorl %r13d,%ebx + leal -1894007588(%rdx,%rsi,1),%esi + roll $5,%ecx + xorl 40(%rsp),%ebp + addl %eax,%esi + andl %r11d,%ebx + roll $1,%ebp + addl %ebx,%esi + roll $30,%r11d + movl %ebp,52(%rsp) + addl %ecx,%esi + movl 56(%rsp),%edx + movl %r11d,%eax + movl %r11d,%ebx + xorl 0(%rsp),%edx + andl %r12d,%eax + movl %esi,%ecx + xorl 24(%rsp),%edx + xorl %r12d,%ebx + leal -1894007588(%rbp,%r13,1),%r13d + roll $5,%ecx + xorl 44(%rsp),%edx + addl %eax,%r13d + andl %edi,%ebx + roll $1,%edx + addl %ebx,%r13d + roll $30,%edi + movl %edx,56(%rsp) + addl %ecx,%r13d + movl 60(%rsp),%ebp + movl %edi,%eax + movl %edi,%ebx + xorl 4(%rsp),%ebp + andl %r11d,%eax + movl %r13d,%ecx + xorl 28(%rsp),%ebp + xorl %r11d,%ebx + leal -1894007588(%rdx,%r12,1),%r12d + roll $5,%ecx + xorl 48(%rsp),%ebp + addl %eax,%r12d + andl %esi,%ebx + roll $1,%ebp + addl %ebx,%r12d + roll $30,%esi + movl %ebp,60(%rsp) + addl %ecx,%r12d + movl 0(%rsp),%edx + movl %esi,%eax + movl %esi,%ebx + xorl 8(%rsp),%edx + andl %edi,%eax + movl %r12d,%ecx + xorl 32(%rsp),%edx + xorl %edi,%ebx + leal -1894007588(%rbp,%r11,1),%r11d + roll $5,%ecx + xorl 52(%rsp),%edx + addl %eax,%r11d + andl %r13d,%ebx + roll $1,%edx + addl %ebx,%r11d + roll $30,%r13d + movl %edx,0(%rsp) + addl %ecx,%r11d + movl 4(%rsp),%ebp + movl %r13d,%eax + movl %r13d,%ebx + xorl 12(%rsp),%ebp + andl %esi,%eax + movl %r11d,%ecx + xorl 36(%rsp),%ebp + xorl %esi,%ebx + leal -1894007588(%rdx,%rdi,1),%edi + roll $5,%ecx + xorl 56(%rsp),%ebp + addl %eax,%edi + andl %r12d,%ebx + roll $1,%ebp + addl %ebx,%edi + roll $30,%r12d + movl %ebp,4(%rsp) + addl %ecx,%edi + movl 8(%rsp),%edx + movl %r12d,%eax + movl %r12d,%ebx + xorl 16(%rsp),%edx + andl %r13d,%eax + movl %edi,%ecx + xorl 40(%rsp),%edx + xorl %r13d,%ebx + leal -1894007588(%rbp,%rsi,1),%esi + roll $5,%ecx + xorl 60(%rsp),%edx + addl %eax,%esi + andl %r11d,%ebx + roll $1,%edx + addl %ebx,%esi + roll $30,%r11d + movl %edx,8(%rsp) + addl %ecx,%esi + movl 12(%rsp),%ebp + movl %r11d,%eax + movl %r11d,%ebx + xorl 20(%rsp),%ebp + andl %r12d,%eax + movl %esi,%ecx + xorl 44(%rsp),%ebp + xorl %r12d,%ebx + leal -1894007588(%rdx,%r13,1),%r13d + roll $5,%ecx + xorl 0(%rsp),%ebp + addl %eax,%r13d + andl %edi,%ebx + roll $1,%ebp + addl %ebx,%r13d + roll $30,%edi + movl %ebp,12(%rsp) + addl %ecx,%r13d + movl 16(%rsp),%edx + movl %edi,%eax + movl %edi,%ebx + xorl 24(%rsp),%edx + andl %r11d,%eax + movl %r13d,%ecx + xorl 48(%rsp),%edx + xorl %r11d,%ebx + leal -1894007588(%rbp,%r12,1),%r12d + roll $5,%ecx + xorl 4(%rsp),%edx + addl %eax,%r12d + andl %esi,%ebx + roll $1,%edx + addl %ebx,%r12d + roll $30,%esi + movl %edx,16(%rsp) + addl %ecx,%r12d + movl 20(%rsp),%ebp + movl %esi,%eax + movl %esi,%ebx + xorl 28(%rsp),%ebp + andl %edi,%eax + movl %r12d,%ecx + xorl 52(%rsp),%ebp + xorl %edi,%ebx + leal -1894007588(%rdx,%r11,1),%r11d + roll $5,%ecx + xorl 8(%rsp),%ebp + addl %eax,%r11d + andl %r13d,%ebx + roll $1,%ebp + addl %ebx,%r11d + roll $30,%r13d + movl %ebp,20(%rsp) + addl %ecx,%r11d + movl 24(%rsp),%edx + movl %r13d,%eax + movl %r13d,%ebx + xorl 32(%rsp),%edx + andl %esi,%eax + movl %r11d,%ecx + xorl 56(%rsp),%edx + xorl %esi,%ebx + leal -1894007588(%rbp,%rdi,1),%edi + roll $5,%ecx + xorl 12(%rsp),%edx + addl %eax,%edi + andl %r12d,%ebx + roll $1,%edx + addl %ebx,%edi + roll $30,%r12d + movl %edx,24(%rsp) + addl %ecx,%edi + movl 28(%rsp),%ebp + movl %r12d,%eax + movl %r12d,%ebx + xorl 36(%rsp),%ebp + andl %r13d,%eax + movl %edi,%ecx + xorl 60(%rsp),%ebp + xorl %r13d,%ebx + leal -1894007588(%rdx,%rsi,1),%esi + roll $5,%ecx + xorl 16(%rsp),%ebp + addl %eax,%esi + andl %r11d,%ebx + roll $1,%ebp + addl %ebx,%esi + roll $30,%r11d + movl %ebp,28(%rsp) + addl %ecx,%esi + movl 32(%rsp),%edx + movl %r11d,%eax + movl %r11d,%ebx + xorl 40(%rsp),%edx + andl %r12d,%eax + movl %esi,%ecx + xorl 0(%rsp),%edx + xorl %r12d,%ebx + leal -1894007588(%rbp,%r13,1),%r13d + roll $5,%ecx + xorl 20(%rsp),%edx + addl %eax,%r13d + andl %edi,%ebx + roll $1,%edx + addl %ebx,%r13d + roll $30,%edi + movl %edx,32(%rsp) + addl %ecx,%r13d + movl 36(%rsp),%ebp + movl %edi,%eax + movl %edi,%ebx + xorl 44(%rsp),%ebp + andl %r11d,%eax + movl %r13d,%ecx + xorl 4(%rsp),%ebp + xorl %r11d,%ebx + leal -1894007588(%rdx,%r12,1),%r12d + roll $5,%ecx + xorl 24(%rsp),%ebp + addl %eax,%r12d + andl %esi,%ebx + roll $1,%ebp + addl %ebx,%r12d + roll $30,%esi + movl %ebp,36(%rsp) + addl %ecx,%r12d + movl 40(%rsp),%edx + movl %esi,%eax + movl %esi,%ebx + xorl 48(%rsp),%edx + andl %edi,%eax + movl %r12d,%ecx + xorl 8(%rsp),%edx + xorl %edi,%ebx + leal -1894007588(%rbp,%r11,1),%r11d + roll $5,%ecx + xorl 28(%rsp),%edx + addl %eax,%r11d + andl %r13d,%ebx + roll $1,%edx + addl %ebx,%r11d + roll $30,%r13d + movl %edx,40(%rsp) + addl %ecx,%r11d + movl 44(%rsp),%ebp + movl %r13d,%eax + movl %r13d,%ebx + xorl 52(%rsp),%ebp + andl %esi,%eax + movl %r11d,%ecx + xorl 12(%rsp),%ebp + xorl %esi,%ebx + leal -1894007588(%rdx,%rdi,1),%edi + roll $5,%ecx + xorl 32(%rsp),%ebp + addl %eax,%edi + andl %r12d,%ebx + roll $1,%ebp + addl %ebx,%edi + roll $30,%r12d + movl %ebp,44(%rsp) + addl %ecx,%edi + movl 48(%rsp),%edx + movl %r12d,%eax + movl %r12d,%ebx + xorl 56(%rsp),%edx + andl %r13d,%eax + movl %edi,%ecx + xorl 16(%rsp),%edx + xorl %r13d,%ebx + leal -1894007588(%rbp,%rsi,1),%esi + roll $5,%ecx + xorl 36(%rsp),%edx + addl %eax,%esi + andl %r11d,%ebx + roll $1,%edx + addl %ebx,%esi + roll $30,%r11d + movl %edx,48(%rsp) + addl %ecx,%esi + movl 52(%rsp),%ebp + movl %r11d,%eax + movl %esi,%ecx + xorl 60(%rsp),%ebp + xorl %edi,%eax + roll $5,%ecx + leal -899497514(%rdx,%r13,1),%r13d + xorl 20(%rsp),%ebp + xorl %r12d,%eax + addl %ecx,%r13d + xorl 40(%rsp),%ebp + roll $30,%edi + addl %eax,%r13d + roll $1,%ebp + movl %ebp,52(%rsp) + movl 56(%rsp),%edx + movl %edi,%eax + movl %r13d,%ecx + xorl 0(%rsp),%edx + xorl %esi,%eax + roll $5,%ecx + leal -899497514(%rbp,%r12,1),%r12d + xorl 24(%rsp),%edx + xorl %r11d,%eax + addl %ecx,%r12d + xorl 44(%rsp),%edx + roll $30,%esi + addl %eax,%r12d + roll $1,%edx + movl %edx,56(%rsp) + movl 60(%rsp),%ebp + movl %esi,%eax + movl %r12d,%ecx + xorl 4(%rsp),%ebp + xorl %r13d,%eax + roll $5,%ecx + leal -899497514(%rdx,%r11,1),%r11d + xorl 28(%rsp),%ebp + xorl %edi,%eax + addl %ecx,%r11d + xorl 48(%rsp),%ebp + roll $30,%r13d + addl %eax,%r11d + roll $1,%ebp + movl %ebp,60(%rsp) + movl 0(%rsp),%edx + movl %r13d,%eax + movl %r11d,%ecx + xorl 8(%rsp),%edx + xorl %r12d,%eax + roll $5,%ecx + leal -899497514(%rbp,%rdi,1),%edi + xorl 32(%rsp),%edx + xorl %esi,%eax + addl %ecx,%edi + xorl 52(%rsp),%edx + roll $30,%r12d + addl %eax,%edi + roll $1,%edx + movl %edx,0(%rsp) + movl 4(%rsp),%ebp + movl %r12d,%eax + movl %edi,%ecx + xorl 12(%rsp),%ebp + xorl %r11d,%eax + roll $5,%ecx + leal -899497514(%rdx,%rsi,1),%esi + xorl 36(%rsp),%ebp + xorl %r13d,%eax + addl %ecx,%esi + xorl 56(%rsp),%ebp + roll $30,%r11d + addl %eax,%esi + roll $1,%ebp + movl %ebp,4(%rsp) + movl 8(%rsp),%edx + movl %r11d,%eax + movl %esi,%ecx + xorl 16(%rsp),%edx + xorl %edi,%eax + roll $5,%ecx + leal -899497514(%rbp,%r13,1),%r13d + xorl 40(%rsp),%edx + xorl %r12d,%eax + addl %ecx,%r13d + xorl 60(%rsp),%edx + roll $30,%edi + addl %eax,%r13d + roll $1,%edx + movl %edx,8(%rsp) + movl 12(%rsp),%ebp + movl %edi,%eax + movl %r13d,%ecx + xorl 20(%rsp),%ebp + xorl %esi,%eax + roll $5,%ecx + leal -899497514(%rdx,%r12,1),%r12d + xorl 44(%rsp),%ebp + xorl %r11d,%eax + addl %ecx,%r12d + xorl 0(%rsp),%ebp + roll $30,%esi + addl %eax,%r12d + roll $1,%ebp + movl %ebp,12(%rsp) + movl 16(%rsp),%edx + movl %esi,%eax + movl %r12d,%ecx + xorl 24(%rsp),%edx + xorl %r13d,%eax + roll $5,%ecx + leal -899497514(%rbp,%r11,1),%r11d + xorl 48(%rsp),%edx + xorl %edi,%eax + addl %ecx,%r11d + xorl 4(%rsp),%edx + roll $30,%r13d + addl %eax,%r11d + roll $1,%edx + movl %edx,16(%rsp) + movl 20(%rsp),%ebp + movl %r13d,%eax + movl %r11d,%ecx + xorl 28(%rsp),%ebp + xorl %r12d,%eax + roll $5,%ecx + leal -899497514(%rdx,%rdi,1),%edi + xorl 52(%rsp),%ebp + xorl %esi,%eax + addl %ecx,%edi + xorl 8(%rsp),%ebp + roll $30,%r12d + addl %eax,%edi + roll $1,%ebp + movl %ebp,20(%rsp) + movl 24(%rsp),%edx + movl %r12d,%eax + movl %edi,%ecx + xorl 32(%rsp),%edx + xorl %r11d,%eax + roll $5,%ecx + leal -899497514(%rbp,%rsi,1),%esi + xorl 56(%rsp),%edx + xorl %r13d,%eax + addl %ecx,%esi + xorl 12(%rsp),%edx + roll $30,%r11d + addl %eax,%esi + roll $1,%edx + movl %edx,24(%rsp) + movl 28(%rsp),%ebp + movl %r11d,%eax + movl %esi,%ecx + xorl 36(%rsp),%ebp + xorl %edi,%eax + roll $5,%ecx + leal -899497514(%rdx,%r13,1),%r13d + xorl 60(%rsp),%ebp + xorl %r12d,%eax + addl %ecx,%r13d + xorl 16(%rsp),%ebp + roll $30,%edi + addl %eax,%r13d + roll $1,%ebp + movl %ebp,28(%rsp) + movl 32(%rsp),%edx + movl %edi,%eax + movl %r13d,%ecx + xorl 40(%rsp),%edx + xorl %esi,%eax + roll $5,%ecx + leal -899497514(%rbp,%r12,1),%r12d + xorl 0(%rsp),%edx + xorl %r11d,%eax + addl %ecx,%r12d + xorl 20(%rsp),%edx + roll $30,%esi + addl %eax,%r12d + roll $1,%edx + movl %edx,32(%rsp) + movl 36(%rsp),%ebp + movl %esi,%eax + movl %r12d,%ecx + xorl 44(%rsp),%ebp + xorl %r13d,%eax + roll $5,%ecx + leal -899497514(%rdx,%r11,1),%r11d + xorl 4(%rsp),%ebp + xorl %edi,%eax + addl %ecx,%r11d + xorl 24(%rsp),%ebp + roll $30,%r13d + addl %eax,%r11d + roll $1,%ebp + movl %ebp,36(%rsp) + movl 40(%rsp),%edx + movl %r13d,%eax + movl %r11d,%ecx + xorl 48(%rsp),%edx + xorl %r12d,%eax + roll $5,%ecx + leal -899497514(%rbp,%rdi,1),%edi + xorl 8(%rsp),%edx + xorl %esi,%eax + addl %ecx,%edi + xorl 28(%rsp),%edx + roll $30,%r12d + addl %eax,%edi + roll $1,%edx + movl %edx,40(%rsp) + movl 44(%rsp),%ebp + movl %r12d,%eax + movl %edi,%ecx + xorl 52(%rsp),%ebp + xorl %r11d,%eax + roll $5,%ecx + leal -899497514(%rdx,%rsi,1),%esi + xorl 12(%rsp),%ebp + xorl %r13d,%eax + addl %ecx,%esi + xorl 32(%rsp),%ebp + roll $30,%r11d + addl %eax,%esi + roll $1,%ebp + movl %ebp,44(%rsp) + movl 48(%rsp),%edx + movl %r11d,%eax + movl %esi,%ecx + xorl 56(%rsp),%edx + xorl %edi,%eax + roll $5,%ecx + leal -899497514(%rbp,%r13,1),%r13d + xorl 16(%rsp),%edx + xorl %r12d,%eax + addl %ecx,%r13d + xorl 36(%rsp),%edx + roll $30,%edi + addl %eax,%r13d + roll $1,%edx + movl %edx,48(%rsp) + movl 52(%rsp),%ebp + movl %edi,%eax + movl %r13d,%ecx + xorl 60(%rsp),%ebp + xorl %esi,%eax + roll $5,%ecx + leal -899497514(%rdx,%r12,1),%r12d + xorl 20(%rsp),%ebp + xorl %r11d,%eax + addl %ecx,%r12d + xorl 40(%rsp),%ebp + roll $30,%esi + addl %eax,%r12d + roll $1,%ebp + movl 56(%rsp),%edx + movl %esi,%eax + movl %r12d,%ecx + xorl 0(%rsp),%edx + xorl %r13d,%eax + roll $5,%ecx + leal -899497514(%rbp,%r11,1),%r11d + xorl 24(%rsp),%edx + xorl %edi,%eax + addl %ecx,%r11d + xorl 44(%rsp),%edx + roll $30,%r13d + addl %eax,%r11d + roll $1,%edx + movl 60(%rsp),%ebp + movl %r13d,%eax + movl %r11d,%ecx + xorl 4(%rsp),%ebp + xorl %r12d,%eax + roll $5,%ecx + leal -899497514(%rdx,%rdi,1),%edi + xorl 28(%rsp),%ebp + xorl %esi,%eax + addl %ecx,%edi + xorl 48(%rsp),%ebp + roll $30,%r12d + addl %eax,%edi + roll $1,%ebp + movl %r12d,%eax + movl %edi,%ecx + xorl %r11d,%eax + leal -899497514(%rbp,%rsi,1),%esi + roll $5,%ecx + xorl %r13d,%eax + addl %ecx,%esi + roll $30,%r11d + addl %eax,%esi + addl 0(%r8),%esi + addl 4(%r8),%edi + addl 8(%r8),%r11d + addl 12(%r8),%r12d + addl 16(%r8),%r13d + movl %esi,0(%r8) + movl %edi,4(%r8) + movl %r11d,8(%r8) + movl %r12d,12(%r8) + movl %r13d,16(%r8) + + subq $1,%r10 + leaq 64(%r9),%r9 + jnz .Lloop + + movq 64(%rsp),%rsi + movq (%rsi),%r13 + movq 8(%rsi),%r12 + movq 16(%rsi),%rbp + movq 24(%rsi),%rbx + leaq 32(%rsi),%rsp +.Lepilogue: + .byte 0xf3,0xc3 +.size sha1_block_data_order,.-sha1_block_data_order +.type sha1_block_data_order_ssse3,@function +.align 16 +sha1_block_data_order_ssse3: +_ssse3_shortcut: + pushq %rbx + pushq %rbp + pushq %r12 + leaq -64(%rsp),%rsp + movq %rdi,%r8 + movq %rsi,%r9 + movq %rdx,%r10 + + shlq $6,%r10 + addq %r9,%r10 + leaq K_XX_XX(%rip),%r11 + + movl 0(%r8),%eax + movl 4(%r8),%ebx + movl 8(%r8),%ecx + movl 12(%r8),%edx + movl %ebx,%esi + movl 16(%r8),%ebp + + movdqa 64(%r11),%xmm6 + movdqa 0(%r11),%xmm9 + movdqu 0(%r9),%xmm0 + movdqu 16(%r9),%xmm1 + movdqu 32(%r9),%xmm2 + movdqu 48(%r9),%xmm3 +.byte 102,15,56,0,198 + addq $64,%r9 +.byte 102,15,56,0,206 +.byte 102,15,56,0,214 +.byte 102,15,56,0,222 + paddd %xmm9,%xmm0 + paddd %xmm9,%xmm1 + paddd %xmm9,%xmm2 + movdqa %xmm0,0(%rsp) + psubd %xmm9,%xmm0 + movdqa %xmm1,16(%rsp) + psubd %xmm9,%xmm1 + movdqa %xmm2,32(%rsp) + psubd %xmm9,%xmm2 + jmp .Loop_ssse3 +.align 16 +.Loop_ssse3: + movdqa %xmm1,%xmm4 + addl 0(%rsp),%ebp + xorl %edx,%ecx + movdqa %xmm3,%xmm8 +.byte 102,15,58,15,224,8 + movl %eax,%edi + roll $5,%eax + paddd %xmm3,%xmm9 + andl %ecx,%esi + xorl %edx,%ecx + psrldq $4,%xmm8 + xorl %edx,%esi + addl %eax,%ebp + pxor %xmm0,%xmm4 + rorl $2,%ebx + addl %esi,%ebp + pxor %xmm2,%xmm8 + addl 4(%rsp),%edx + xorl %ecx,%ebx + movl %ebp,%esi + roll $5,%ebp + pxor %xmm8,%xmm4 + andl %ebx,%edi + xorl %ecx,%ebx + movdqa %xmm9,48(%rsp) + xorl %ecx,%edi + addl %ebp,%edx + movdqa %xmm4,%xmm10 + movdqa %xmm4,%xmm8 + rorl $7,%eax + addl %edi,%edx + addl 8(%rsp),%ecx + xorl %ebx,%eax + pslldq $12,%xmm10 + paddd %xmm4,%xmm4 + movl %edx,%edi + roll $5,%edx + andl %eax,%esi + xorl %ebx,%eax + psrld $31,%xmm8 + xorl %ebx,%esi + addl %edx,%ecx + movdqa %xmm10,%xmm9 + rorl $7,%ebp + addl %esi,%ecx + psrld $30,%xmm10 + por %xmm8,%xmm4 + addl 12(%rsp),%ebx + xorl %eax,%ebp + movl %ecx,%esi + roll $5,%ecx + pslld $2,%xmm9 + pxor %xmm10,%xmm4 + andl %ebp,%edi + xorl %eax,%ebp + movdqa 0(%r11),%xmm10 + xorl %eax,%edi + addl %ecx,%ebx + pxor %xmm9,%xmm4 + rorl $7,%edx + addl %edi,%ebx + movdqa %xmm2,%xmm5 + addl 16(%rsp),%eax + xorl %ebp,%edx + movdqa %xmm4,%xmm9 +.byte 102,15,58,15,233,8 + movl %ebx,%edi + roll $5,%ebx + paddd %xmm4,%xmm10 + andl %edx,%esi + xorl %ebp,%edx + psrldq $4,%xmm9 + xorl %ebp,%esi + addl %ebx,%eax + pxor %xmm1,%xmm5 + rorl $7,%ecx + addl %esi,%eax + pxor %xmm3,%xmm9 + addl 20(%rsp),%ebp + xorl %edx,%ecx + movl %eax,%esi + roll $5,%eax + pxor %xmm9,%xmm5 + andl %ecx,%edi + xorl %edx,%ecx + movdqa %xmm10,0(%rsp) + xorl %edx,%edi + addl %eax,%ebp + movdqa %xmm5,%xmm8 + movdqa %xmm5,%xmm9 + rorl $7,%ebx + addl %edi,%ebp + addl 24(%rsp),%edx + xorl %ecx,%ebx + pslldq $12,%xmm8 + paddd %xmm5,%xmm5 + movl %ebp,%edi + roll $5,%ebp + andl %ebx,%esi + xorl %ecx,%ebx + psrld $31,%xmm9 + xorl %ecx,%esi + addl %ebp,%edx + movdqa %xmm8,%xmm10 + rorl $7,%eax + addl %esi,%edx + psrld $30,%xmm8 + por %xmm9,%xmm5 + addl 28(%rsp),%ecx + xorl %ebx,%eax + movl %edx,%esi + roll $5,%edx + pslld $2,%xmm10 + pxor %xmm8,%xmm5 + andl %eax,%edi + xorl %ebx,%eax + movdqa 16(%r11),%xmm8 + xorl %ebx,%edi + addl %edx,%ecx + pxor %xmm10,%xmm5 + rorl $7,%ebp + addl %edi,%ecx + movdqa %xmm3,%xmm6 + addl 32(%rsp),%ebx + xorl %eax,%ebp + movdqa %xmm5,%xmm10 +.byte 102,15,58,15,242,8 + movl %ecx,%edi + roll $5,%ecx + paddd %xmm5,%xmm8 + andl %ebp,%esi + xorl %eax,%ebp + psrldq $4,%xmm10 + xorl %eax,%esi + addl %ecx,%ebx + pxor %xmm2,%xmm6 + rorl $7,%edx + addl %esi,%ebx + pxor %xmm4,%xmm10 + addl 36(%rsp),%eax + xorl %ebp,%edx + movl %ebx,%esi + roll $5,%ebx + pxor %xmm10,%xmm6 + andl %edx,%edi + xorl %ebp,%edx + movdqa %xmm8,16(%rsp) + xorl %ebp,%edi + addl %ebx,%eax + movdqa %xmm6,%xmm9 + movdqa %xmm6,%xmm10 + rorl $7,%ecx + addl %edi,%eax + addl 40(%rsp),%ebp + xorl %edx,%ecx + pslldq $12,%xmm9 + paddd %xmm6,%xmm6 + movl %eax,%edi + roll $5,%eax + andl %ecx,%esi + xorl %edx,%ecx + psrld $31,%xmm10 + xorl %edx,%esi + addl %eax,%ebp + movdqa %xmm9,%xmm8 + rorl $7,%ebx + addl %esi,%ebp + psrld $30,%xmm9 + por %xmm10,%xmm6 + addl 44(%rsp),%edx + xorl %ecx,%ebx + movl %ebp,%esi + roll $5,%ebp + pslld $2,%xmm8 + pxor %xmm9,%xmm6 + andl %ebx,%edi + xorl %ecx,%ebx + movdqa 16(%r11),%xmm9 + xorl %ecx,%edi + addl %ebp,%edx + pxor %xmm8,%xmm6 + rorl $7,%eax + addl %edi,%edx + movdqa %xmm4,%xmm7 + addl 48(%rsp),%ecx + xorl %ebx,%eax + movdqa %xmm6,%xmm8 +.byte 102,15,58,15,251,8 + movl %edx,%edi + roll $5,%edx + paddd %xmm6,%xmm9 + andl %eax,%esi + xorl %ebx,%eax + psrldq $4,%xmm8 + xorl %ebx,%esi + addl %edx,%ecx + pxor %xmm3,%xmm7 + rorl $7,%ebp + addl %esi,%ecx + pxor %xmm5,%xmm8 + addl 52(%rsp),%ebx + xorl %eax,%ebp + movl %ecx,%esi + roll $5,%ecx + pxor %xmm8,%xmm7 + andl %ebp,%edi + xorl %eax,%ebp + movdqa %xmm9,32(%rsp) + xorl %eax,%edi + addl %ecx,%ebx + movdqa %xmm7,%xmm10 + movdqa %xmm7,%xmm8 + rorl $7,%edx + addl %edi,%ebx + addl 56(%rsp),%eax + xorl %ebp,%edx + pslldq $12,%xmm10 + paddd %xmm7,%xmm7 + movl %ebx,%edi + roll $5,%ebx + andl %edx,%esi + xorl %ebp,%edx + psrld $31,%xmm8 + xorl %ebp,%esi + addl %ebx,%eax + movdqa %xmm10,%xmm9 + rorl $7,%ecx + addl %esi,%eax + psrld $30,%xmm10 + por %xmm8,%xmm7 + addl 60(%rsp),%ebp + xorl %edx,%ecx + movl %eax,%esi + roll $5,%eax + pslld $2,%xmm9 + pxor %xmm10,%xmm7 + andl %ecx,%edi + xorl %edx,%ecx + movdqa 16(%r11),%xmm10 + xorl %edx,%edi + addl %eax,%ebp + pxor %xmm9,%xmm7 + rorl $7,%ebx + addl %edi,%ebp + movdqa %xmm7,%xmm9 + addl 0(%rsp),%edx + pxor %xmm4,%xmm0 +.byte 102,68,15,58,15,206,8 + xorl %ecx,%ebx + movl %ebp,%edi + roll $5,%ebp + pxor %xmm1,%xmm0 + andl %ebx,%esi + xorl %ecx,%ebx + movdqa %xmm10,%xmm8 + paddd %xmm7,%xmm10 + xorl %ecx,%esi + addl %ebp,%edx + pxor %xmm9,%xmm0 + rorl $7,%eax + addl %esi,%edx + addl 4(%rsp),%ecx + xorl %ebx,%eax + movdqa %xmm0,%xmm9 + movdqa %xmm10,48(%rsp) + movl %edx,%esi + roll $5,%edx + andl %eax,%edi + xorl %ebx,%eax + pslld $2,%xmm0 + xorl %ebx,%edi + addl %edx,%ecx + psrld $30,%xmm9 + rorl $7,%ebp + addl %edi,%ecx + addl 8(%rsp),%ebx + xorl %eax,%ebp + movl %ecx,%edi + roll $5,%ecx + por %xmm9,%xmm0 + andl %ebp,%esi + xorl %eax,%ebp + movdqa %xmm0,%xmm10 + xorl %eax,%esi + addl %ecx,%ebx + rorl $7,%edx + addl %esi,%ebx + addl 12(%rsp),%eax + xorl %ebp,%edx + movl %ebx,%esi + roll $5,%ebx + andl %edx,%edi + xorl %ebp,%edx + xorl %ebp,%edi + addl %ebx,%eax + rorl $7,%ecx + addl %edi,%eax + addl 16(%rsp),%ebp + pxor %xmm5,%xmm1 +.byte 102,68,15,58,15,215,8 + xorl %edx,%esi + movl %eax,%edi + roll $5,%eax + pxor %xmm2,%xmm1 + xorl %ecx,%esi + addl %eax,%ebp + movdqa %xmm8,%xmm9 + paddd %xmm0,%xmm8 + rorl $7,%ebx + addl %esi,%ebp + pxor %xmm10,%xmm1 + addl 20(%rsp),%edx + xorl %ecx,%edi + movl %ebp,%esi + roll $5,%ebp + movdqa %xmm1,%xmm10 + movdqa %xmm8,0(%rsp) + xorl %ebx,%edi + addl %ebp,%edx + rorl $7,%eax + addl %edi,%edx + pslld $2,%xmm1 + addl 24(%rsp),%ecx + xorl %ebx,%esi + psrld $30,%xmm10 + movl %edx,%edi + roll $5,%edx + xorl %eax,%esi + addl %edx,%ecx + rorl $7,%ebp + addl %esi,%ecx + por %xmm10,%xmm1 + addl 28(%rsp),%ebx + xorl %eax,%edi + movdqa %xmm1,%xmm8 + movl %ecx,%esi + roll $5,%ecx + xorl %ebp,%edi + addl %ecx,%ebx + rorl $7,%edx + addl %edi,%ebx + addl 32(%rsp),%eax + pxor %xmm6,%xmm2 +.byte 102,68,15,58,15,192,8 + xorl %ebp,%esi + movl %ebx,%edi + roll $5,%ebx + pxor %xmm3,%xmm2 + xorl %edx,%esi + addl %ebx,%eax + movdqa 32(%r11),%xmm10 + paddd %xmm1,%xmm9 + rorl $7,%ecx + addl %esi,%eax + pxor %xmm8,%xmm2 + addl 36(%rsp),%ebp + xorl %edx,%edi + movl %eax,%esi + roll $5,%eax + movdqa %xmm2,%xmm8 + movdqa %xmm9,16(%rsp) + xorl %ecx,%edi + addl %eax,%ebp + rorl $7,%ebx + addl %edi,%ebp + pslld $2,%xmm2 + addl 40(%rsp),%edx + xorl %ecx,%esi + psrld $30,%xmm8 + movl %ebp,%edi + roll $5,%ebp + xorl %ebx,%esi + addl %ebp,%edx + rorl $7,%eax + addl %esi,%edx + por %xmm8,%xmm2 + addl 44(%rsp),%ecx + xorl %ebx,%edi + movdqa %xmm2,%xmm9 + movl %edx,%esi + roll $5,%edx + xorl %eax,%edi + addl %edx,%ecx + rorl $7,%ebp + addl %edi,%ecx + addl 48(%rsp),%ebx + pxor %xmm7,%xmm3 +.byte 102,68,15,58,15,201,8 + xorl %eax,%esi + movl %ecx,%edi + roll $5,%ecx + pxor %xmm4,%xmm3 + xorl %ebp,%esi + addl %ecx,%ebx + movdqa %xmm10,%xmm8 + paddd %xmm2,%xmm10 + rorl $7,%edx + addl %esi,%ebx + pxor %xmm9,%xmm3 + addl 52(%rsp),%eax + xorl %ebp,%edi + movl %ebx,%esi + roll $5,%ebx + movdqa %xmm3,%xmm9 + movdqa %xmm10,32(%rsp) + xorl %edx,%edi + addl %ebx,%eax + rorl $7,%ecx + addl %edi,%eax + pslld $2,%xmm3 + addl 56(%rsp),%ebp + xorl %edx,%esi + psrld $30,%xmm9 + movl %eax,%edi + roll $5,%eax + xorl %ecx,%esi + addl %eax,%ebp + rorl $7,%ebx + addl %esi,%ebp + por %xmm9,%xmm3 + addl 60(%rsp),%edx + xorl %ecx,%edi + movdqa %xmm3,%xmm10 + movl %ebp,%esi + roll $5,%ebp + xorl %ebx,%edi + addl %ebp,%edx + rorl $7,%eax + addl %edi,%edx + addl 0(%rsp),%ecx + pxor %xmm0,%xmm4 +.byte 102,68,15,58,15,210,8 + xorl %ebx,%esi + movl %edx,%edi + roll $5,%edx + pxor %xmm5,%xmm4 + xorl %eax,%esi + addl %edx,%ecx + movdqa %xmm8,%xmm9 + paddd %xmm3,%xmm8 + rorl $7,%ebp + addl %esi,%ecx + pxor %xmm10,%xmm4 + addl 4(%rsp),%ebx + xorl %eax,%edi + movl %ecx,%esi + roll $5,%ecx + movdqa %xmm4,%xmm10 + movdqa %xmm8,48(%rsp) + xorl %ebp,%edi + addl %ecx,%ebx + rorl $7,%edx + addl %edi,%ebx + pslld $2,%xmm4 + addl 8(%rsp),%eax + xorl %ebp,%esi + psrld $30,%xmm10 + movl %ebx,%edi + roll $5,%ebx + xorl %edx,%esi + addl %ebx,%eax + rorl $7,%ecx + addl %esi,%eax + por %xmm10,%xmm4 + addl 12(%rsp),%ebp + xorl %edx,%edi + movdqa %xmm4,%xmm8 + movl %eax,%esi + roll $5,%eax + xorl %ecx,%edi + addl %eax,%ebp + rorl $7,%ebx + addl %edi,%ebp + addl 16(%rsp),%edx + pxor %xmm1,%xmm5 +.byte 102,68,15,58,15,195,8 + xorl %ecx,%esi + movl %ebp,%edi + roll $5,%ebp + pxor %xmm6,%xmm5 + xorl %ebx,%esi + addl %ebp,%edx + movdqa %xmm9,%xmm10 + paddd %xmm4,%xmm9 + rorl $7,%eax + addl %esi,%edx + pxor %xmm8,%xmm5 + addl 20(%rsp),%ecx + xorl %ebx,%edi + movl %edx,%esi + roll $5,%edx + movdqa %xmm5,%xmm8 + movdqa %xmm9,0(%rsp) + xorl %eax,%edi + addl %edx,%ecx + rorl $7,%ebp + addl %edi,%ecx + pslld $2,%xmm5 + addl 24(%rsp),%ebx + xorl %eax,%esi + psrld $30,%xmm8 + movl %ecx,%edi + roll $5,%ecx + xorl %ebp,%esi + addl %ecx,%ebx + rorl $7,%edx + addl %esi,%ebx + por %xmm8,%xmm5 + addl 28(%rsp),%eax + xorl %ebp,%edi + movdqa %xmm5,%xmm9 + movl %ebx,%esi + roll $5,%ebx + xorl %edx,%edi + addl %ebx,%eax + rorl $7,%ecx + addl %edi,%eax + movl %ecx,%edi + pxor %xmm2,%xmm6 +.byte 102,68,15,58,15,204,8 + xorl %edx,%ecx + addl 32(%rsp),%ebp + andl %edx,%edi + pxor %xmm7,%xmm6 + andl %ecx,%esi + rorl $7,%ebx + movdqa %xmm10,%xmm8 + paddd %xmm5,%xmm10 + addl %edi,%ebp + movl %eax,%edi + pxor %xmm9,%xmm6 + roll $5,%eax + addl %esi,%ebp + xorl %edx,%ecx + addl %eax,%ebp + movdqa %xmm6,%xmm9 + movdqa %xmm10,16(%rsp) + movl %ebx,%esi + xorl %ecx,%ebx + addl 36(%rsp),%edx + andl %ecx,%esi + pslld $2,%xmm6 + andl %ebx,%edi + rorl $7,%eax + psrld $30,%xmm9 + addl %esi,%edx + movl %ebp,%esi + roll $5,%ebp + addl %edi,%edx + xorl %ecx,%ebx + addl %ebp,%edx + por %xmm9,%xmm6 + movl %eax,%edi + xorl %ebx,%eax + movdqa %xmm6,%xmm10 + addl 40(%rsp),%ecx + andl %ebx,%edi + andl %eax,%esi + rorl $7,%ebp + addl %edi,%ecx + movl %edx,%edi + roll $5,%edx + addl %esi,%ecx + xorl %ebx,%eax + addl %edx,%ecx + movl %ebp,%esi + xorl %eax,%ebp + addl 44(%rsp),%ebx + andl %eax,%esi + andl %ebp,%edi + rorl $7,%edx + addl %esi,%ebx + movl %ecx,%esi + roll $5,%ecx + addl %edi,%ebx + xorl %eax,%ebp + addl %ecx,%ebx + movl %edx,%edi + pxor %xmm3,%xmm7 +.byte 102,68,15,58,15,213,8 + xorl %ebp,%edx + addl 48(%rsp),%eax + andl %ebp,%edi + pxor %xmm0,%xmm7 + andl %edx,%esi + rorl $7,%ecx + movdqa 48(%r11),%xmm9 + paddd %xmm6,%xmm8 + addl %edi,%eax + movl %ebx,%edi + pxor %xmm10,%xmm7 + roll $5,%ebx + addl %esi,%eax + xorl %ebp,%edx + addl %ebx,%eax + movdqa %xmm7,%xmm10 + movdqa %xmm8,32(%rsp) + movl %ecx,%esi + xorl %edx,%ecx + addl 52(%rsp),%ebp + andl %edx,%esi + pslld $2,%xmm7 + andl %ecx,%edi + rorl $7,%ebx + psrld $30,%xmm10 + addl %esi,%ebp + movl %eax,%esi + roll $5,%eax + addl %edi,%ebp + xorl %edx,%ecx + addl %eax,%ebp + por %xmm10,%xmm7 + movl %ebx,%edi + xorl %ecx,%ebx + movdqa %xmm7,%xmm8 + addl 56(%rsp),%edx + andl %ecx,%edi + andl %ebx,%esi + rorl $7,%eax + addl %edi,%edx + movl %ebp,%edi + roll $5,%ebp + addl %esi,%edx + xorl %ecx,%ebx + addl %ebp,%edx + movl %eax,%esi + xorl %ebx,%eax + addl 60(%rsp),%ecx + andl %ebx,%esi + andl %eax,%edi + rorl $7,%ebp + addl %esi,%ecx + movl %edx,%esi + roll $5,%edx + addl %edi,%ecx + xorl %ebx,%eax + addl %edx,%ecx + movl %ebp,%edi + pxor %xmm4,%xmm0 +.byte 102,68,15,58,15,198,8 + xorl %eax,%ebp + addl 0(%rsp),%ebx + andl %eax,%edi + pxor %xmm1,%xmm0 + andl %ebp,%esi + rorl $7,%edx + movdqa %xmm9,%xmm10 + paddd %xmm7,%xmm9 + addl %edi,%ebx + movl %ecx,%edi + pxor %xmm8,%xmm0 + roll $5,%ecx + addl %esi,%ebx + xorl %eax,%ebp + addl %ecx,%ebx + movdqa %xmm0,%xmm8 + movdqa %xmm9,48(%rsp) + movl %edx,%esi + xorl %ebp,%edx + addl 4(%rsp),%eax + andl %ebp,%esi + pslld $2,%xmm0 + andl %edx,%edi + rorl $7,%ecx + psrld $30,%xmm8 + addl %esi,%eax + movl %ebx,%esi + roll $5,%ebx + addl %edi,%eax + xorl %ebp,%edx + addl %ebx,%eax + por %xmm8,%xmm0 + movl %ecx,%edi + xorl %edx,%ecx + movdqa %xmm0,%xmm9 + addl 8(%rsp),%ebp + andl %edx,%edi + andl %ecx,%esi + rorl $7,%ebx + addl %edi,%ebp + movl %eax,%edi + roll $5,%eax + addl %esi,%ebp + xorl %edx,%ecx + addl %eax,%ebp + movl %ebx,%esi + xorl %ecx,%ebx + addl 12(%rsp),%edx + andl %ecx,%esi + andl %ebx,%edi + rorl $7,%eax + addl %esi,%edx + movl %ebp,%esi + roll $5,%ebp + addl %edi,%edx + xorl %ecx,%ebx + addl %ebp,%edx + movl %eax,%edi + pxor %xmm5,%xmm1 +.byte 102,68,15,58,15,207,8 + xorl %ebx,%eax + addl 16(%rsp),%ecx + andl %ebx,%edi + pxor %xmm2,%xmm1 + andl %eax,%esi + rorl $7,%ebp + movdqa %xmm10,%xmm8 + paddd %xmm0,%xmm10 + addl %edi,%ecx + movl %edx,%edi + pxor %xmm9,%xmm1 + roll $5,%edx + addl %esi,%ecx + xorl %ebx,%eax + addl %edx,%ecx + movdqa %xmm1,%xmm9 + movdqa %xmm10,0(%rsp) + movl %ebp,%esi + xorl %eax,%ebp + addl 20(%rsp),%ebx + andl %eax,%esi + pslld $2,%xmm1 + andl %ebp,%edi + rorl $7,%edx + psrld $30,%xmm9 + addl %esi,%ebx + movl %ecx,%esi + roll $5,%ecx + addl %edi,%ebx + xorl %eax,%ebp + addl %ecx,%ebx + por %xmm9,%xmm1 + movl %edx,%edi + xorl %ebp,%edx + movdqa %xmm1,%xmm10 + addl 24(%rsp),%eax + andl %ebp,%edi + andl %edx,%esi + rorl $7,%ecx + addl %edi,%eax + movl %ebx,%edi + roll $5,%ebx + addl %esi,%eax + xorl %ebp,%edx + addl %ebx,%eax + movl %ecx,%esi + xorl %edx,%ecx + addl 28(%rsp),%ebp + andl %edx,%esi + andl %ecx,%edi + rorl $7,%ebx + addl %esi,%ebp + movl %eax,%esi + roll $5,%eax + addl %edi,%ebp + xorl %edx,%ecx + addl %eax,%ebp + movl %ebx,%edi + pxor %xmm6,%xmm2 +.byte 102,68,15,58,15,208,8 + xorl %ecx,%ebx + addl 32(%rsp),%edx + andl %ecx,%edi + pxor %xmm3,%xmm2 + andl %ebx,%esi + rorl $7,%eax + movdqa %xmm8,%xmm9 + paddd %xmm1,%xmm8 + addl %edi,%edx + movl %ebp,%edi + pxor %xmm10,%xmm2 + roll $5,%ebp + addl %esi,%edx + xorl %ecx,%ebx + addl %ebp,%edx + movdqa %xmm2,%xmm10 + movdqa %xmm8,16(%rsp) + movl %eax,%esi + xorl %ebx,%eax + addl 36(%rsp),%ecx + andl %ebx,%esi + pslld $2,%xmm2 + andl %eax,%edi + rorl $7,%ebp + psrld $30,%xmm10 + addl %esi,%ecx + movl %edx,%esi + roll $5,%edx + addl %edi,%ecx + xorl %ebx,%eax + addl %edx,%ecx + por %xmm10,%xmm2 + movl %ebp,%edi + xorl %eax,%ebp + movdqa %xmm2,%xmm8 + addl 40(%rsp),%ebx + andl %eax,%edi + andl %ebp,%esi + rorl $7,%edx + addl %edi,%ebx + movl %ecx,%edi + roll $5,%ecx + addl %esi,%ebx + xorl %eax,%ebp + addl %ecx,%ebx + movl %edx,%esi + xorl %ebp,%edx + addl 44(%rsp),%eax + andl %ebp,%esi + andl %edx,%edi + rorl $7,%ecx + addl %esi,%eax + movl %ebx,%esi + roll $5,%ebx + addl %edi,%eax + xorl %ebp,%edx + addl %ebx,%eax + addl 48(%rsp),%ebp + pxor %xmm7,%xmm3 +.byte 102,68,15,58,15,193,8 + xorl %edx,%esi + movl %eax,%edi + roll $5,%eax + pxor %xmm4,%xmm3 + xorl %ecx,%esi + addl %eax,%ebp + movdqa %xmm9,%xmm10 + paddd %xmm2,%xmm9 + rorl $7,%ebx + addl %esi,%ebp + pxor %xmm8,%xmm3 + addl 52(%rsp),%edx + xorl %ecx,%edi + movl %ebp,%esi + roll $5,%ebp + movdqa %xmm3,%xmm8 + movdqa %xmm9,32(%rsp) + xorl %ebx,%edi + addl %ebp,%edx + rorl $7,%eax + addl %edi,%edx + pslld $2,%xmm3 + addl 56(%rsp),%ecx + xorl %ebx,%esi + psrld $30,%xmm8 + movl %edx,%edi + roll $5,%edx + xorl %eax,%esi + addl %edx,%ecx + rorl $7,%ebp + addl %esi,%ecx + por %xmm8,%xmm3 + addl 60(%rsp),%ebx + xorl %eax,%edi + movl %ecx,%esi + roll $5,%ecx + xorl %ebp,%edi + addl %ecx,%ebx + rorl $7,%edx + addl %edi,%ebx + addl 0(%rsp),%eax + paddd %xmm3,%xmm10 + xorl %ebp,%esi + movl %ebx,%edi + roll $5,%ebx + xorl %edx,%esi + movdqa %xmm10,48(%rsp) + addl %ebx,%eax + rorl $7,%ecx + addl %esi,%eax + addl 4(%rsp),%ebp + xorl %edx,%edi + movl %eax,%esi + roll $5,%eax + xorl %ecx,%edi + addl %eax,%ebp + rorl $7,%ebx + addl %edi,%ebp + addl 8(%rsp),%edx + xorl %ecx,%esi + movl %ebp,%edi + roll $5,%ebp + xorl %ebx,%esi + addl %ebp,%edx + rorl $7,%eax + addl %esi,%edx + addl 12(%rsp),%ecx + xorl %ebx,%edi + movl %edx,%esi + roll $5,%edx + xorl %eax,%edi + addl %edx,%ecx + rorl $7,%ebp + addl %edi,%ecx + cmpq %r10,%r9 + je .Ldone_ssse3 + movdqa 64(%r11),%xmm6 + movdqa 0(%r11),%xmm9 + movdqu 0(%r9),%xmm0 + movdqu 16(%r9),%xmm1 + movdqu 32(%r9),%xmm2 + movdqu 48(%r9),%xmm3 +.byte 102,15,56,0,198 + addq $64,%r9 + addl 16(%rsp),%ebx + xorl %eax,%esi +.byte 102,15,56,0,206 + movl %ecx,%edi + roll $5,%ecx + paddd %xmm9,%xmm0 + xorl %ebp,%esi + addl %ecx,%ebx + rorl $7,%edx + addl %esi,%ebx + movdqa %xmm0,0(%rsp) + addl 20(%rsp),%eax + xorl %ebp,%edi + psubd %xmm9,%xmm0 + movl %ebx,%esi + roll $5,%ebx + xorl %edx,%edi + addl %ebx,%eax + rorl $7,%ecx + addl %edi,%eax + addl 24(%rsp),%ebp + xorl %edx,%esi + movl %eax,%edi + roll $5,%eax + xorl %ecx,%esi + addl %eax,%ebp + rorl $7,%ebx + addl %esi,%ebp + addl 28(%rsp),%edx + xorl %ecx,%edi + movl %ebp,%esi + roll $5,%ebp + xorl %ebx,%edi + addl %ebp,%edx + rorl $7,%eax + addl %edi,%edx + addl 32(%rsp),%ecx + xorl %ebx,%esi +.byte 102,15,56,0,214 + movl %edx,%edi + roll $5,%edx + paddd %xmm9,%xmm1 + xorl %eax,%esi + addl %edx,%ecx + rorl $7,%ebp + addl %esi,%ecx + movdqa %xmm1,16(%rsp) + addl 36(%rsp),%ebx + xorl %eax,%edi + psubd %xmm9,%xmm1 + movl %ecx,%esi + roll $5,%ecx + xorl %ebp,%edi + addl %ecx,%ebx + rorl $7,%edx + addl %edi,%ebx + addl 40(%rsp),%eax + xorl %ebp,%esi + movl %ebx,%edi + roll $5,%ebx + xorl %edx,%esi + addl %ebx,%eax + rorl $7,%ecx + addl %esi,%eax + addl 44(%rsp),%ebp + xorl %edx,%edi + movl %eax,%esi + roll $5,%eax + xorl %ecx,%edi + addl %eax,%ebp + rorl $7,%ebx + addl %edi,%ebp + addl 48(%rsp),%edx + xorl %ecx,%esi +.byte 102,15,56,0,222 + movl %ebp,%edi + roll $5,%ebp + paddd %xmm9,%xmm2 + xorl %ebx,%esi + addl %ebp,%edx + rorl $7,%eax + addl %esi,%edx + movdqa %xmm2,32(%rsp) + addl 52(%rsp),%ecx + xorl %ebx,%edi + psubd %xmm9,%xmm2 + movl %edx,%esi + roll $5,%edx + xorl %eax,%edi + addl %edx,%ecx + rorl $7,%ebp + addl %edi,%ecx + addl 56(%rsp),%ebx + xorl %eax,%esi + movl %ecx,%edi + roll $5,%ecx + xorl %ebp,%esi + addl %ecx,%ebx + rorl $7,%edx + addl %esi,%ebx + addl 60(%rsp),%eax + xorl %ebp,%edi + movl %ebx,%esi + roll $5,%ebx + xorl %edx,%edi + addl %ebx,%eax + rorl $7,%ecx + addl %edi,%eax + addl 0(%r8),%eax + addl 4(%r8),%esi + addl 8(%r8),%ecx + addl 12(%r8),%edx + movl %eax,0(%r8) + addl 16(%r8),%ebp + movl %esi,4(%r8) + movl %esi,%ebx + movl %ecx,8(%r8) + movl %edx,12(%r8) + movl %ebp,16(%r8) + jmp .Loop_ssse3 + +.align 16 +.Ldone_ssse3: + addl 16(%rsp),%ebx + xorl %eax,%esi + movl %ecx,%edi + roll $5,%ecx + xorl %ebp,%esi + addl %ecx,%ebx + rorl $7,%edx + addl %esi,%ebx + addl 20(%rsp),%eax + xorl %ebp,%edi + movl %ebx,%esi + roll $5,%ebx + xorl %edx,%edi + addl %ebx,%eax + rorl $7,%ecx + addl %edi,%eax + addl 24(%rsp),%ebp + xorl %edx,%esi + movl %eax,%edi + roll $5,%eax + xorl %ecx,%esi + addl %eax,%ebp + rorl $7,%ebx + addl %esi,%ebp + addl 28(%rsp),%edx + xorl %ecx,%edi + movl %ebp,%esi + roll $5,%ebp + xorl %ebx,%edi + addl %ebp,%edx + rorl $7,%eax + addl %edi,%edx + addl 32(%rsp),%ecx + xorl %ebx,%esi + movl %edx,%edi + roll $5,%edx + xorl %eax,%esi + addl %edx,%ecx + rorl $7,%ebp + addl %esi,%ecx + addl 36(%rsp),%ebx + xorl %eax,%edi + movl %ecx,%esi + roll $5,%ecx + xorl %ebp,%edi + addl %ecx,%ebx + rorl $7,%edx + addl %edi,%ebx + addl 40(%rsp),%eax + xorl %ebp,%esi + movl %ebx,%edi + roll $5,%ebx + xorl %edx,%esi + addl %ebx,%eax + rorl $7,%ecx + addl %esi,%eax + addl 44(%rsp),%ebp + xorl %edx,%edi + movl %eax,%esi + roll $5,%eax + xorl %ecx,%edi + addl %eax,%ebp + rorl $7,%ebx + addl %edi,%ebp + addl 48(%rsp),%edx + xorl %ecx,%esi + movl %ebp,%edi + roll $5,%ebp + xorl %ebx,%esi + addl %ebp,%edx + rorl $7,%eax + addl %esi,%edx + addl 52(%rsp),%ecx + xorl %ebx,%edi + movl %edx,%esi + roll $5,%edx + xorl %eax,%edi + addl %edx,%ecx + rorl $7,%ebp + addl %edi,%ecx + addl 56(%rsp),%ebx + xorl %eax,%esi + movl %ecx,%edi + roll $5,%ecx + xorl %ebp,%esi + addl %ecx,%ebx + rorl $7,%edx + addl %esi,%ebx + addl 60(%rsp),%eax + xorl %ebp,%edi + movl %ebx,%esi + roll $5,%ebx + xorl %edx,%edi + addl %ebx,%eax + rorl $7,%ecx + addl %edi,%eax + addl 0(%r8),%eax + addl 4(%r8),%esi + addl 8(%r8),%ecx + movl %eax,0(%r8) + addl 12(%r8),%edx + movl %esi,4(%r8) + addl 16(%r8),%ebp + movl %ecx,8(%r8) + movl %edx,12(%r8) + movl %ebp,16(%r8) + leaq 64(%rsp),%rsi + movq 0(%rsi),%r12 + movq 8(%rsi),%rbp + movq 16(%rsi),%rbx + leaq 24(%rsi),%rsp +.Lepilogue_ssse3: + .byte 0xf3,0xc3 +.size sha1_block_data_order_ssse3,.-sha1_block_data_order_ssse3 +.align 64 +K_XX_XX: +.long 0x5a827999,0x5a827999,0x5a827999,0x5a827999 +.long 0x6ed9eba1,0x6ed9eba1,0x6ed9eba1,0x6ed9eba1 +.long 0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc +.long 0xca62c1d6,0xca62c1d6,0xca62c1d6,0xca62c1d6 +.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f +.byte 83,72,65,49,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 +.align 64 +#if defined(HAVE_GNU_STACK) +.section .note.GNU-stack,"",%progbits +#endif diff --git a/code/crypto/sha/sha1-macosx-x86_64.S b/code/crypto/sha/sha1-macosx-x86_64.S new file mode 100644 index 0000000..f5c2c36 --- /dev/null +++ b/code/crypto/sha/sha1-macosx-x86_64.S @@ -0,0 +1,2488 @@ +#include "x86_arch.h" +.text + +.private_extern _OPENSSL_ia32cap_P + +.globl _sha1_block_data_order + +.p2align 4 +_sha1_block_data_order: + movl _OPENSSL_ia32cap_P+0(%rip),%r9d + movl _OPENSSL_ia32cap_P+4(%rip),%r8d + testl $IA32CAP_MASK1_SSSE3,%r8d + jz L$ialu + jmp _ssse3_shortcut + +.p2align 4 +L$ialu: + pushq %rbx + pushq %rbp + pushq %r12 + pushq %r13 + movq %rsp,%r11 + movq %rdi,%r8 + subq $72,%rsp + movq %rsi,%r9 + andq $-64,%rsp + movq %rdx,%r10 + movq %r11,64(%rsp) +L$prologue: + + movl 0(%r8),%esi + movl 4(%r8),%edi + movl 8(%r8),%r11d + movl 12(%r8),%r12d + movl 16(%r8),%r13d + jmp L$loop + +.p2align 4 +L$loop: + movl 0(%r9),%edx + bswapl %edx + movl %edx,0(%rsp) + movl %r11d,%eax + movl 4(%r9),%ebp + movl %esi,%ecx + xorl %r12d,%eax + bswapl %ebp + roll $5,%ecx + leal 1518500249(%rdx,%r13,1),%r13d + andl %edi,%eax + movl %ebp,4(%rsp) + addl %ecx,%r13d + xorl %r12d,%eax + roll $30,%edi + addl %eax,%r13d + movl %edi,%eax + movl 8(%r9),%edx + movl %r13d,%ecx + xorl %r11d,%eax + bswapl %edx + roll $5,%ecx + leal 1518500249(%rbp,%r12,1),%r12d + andl %esi,%eax + movl %edx,8(%rsp) + addl %ecx,%r12d + xorl %r11d,%eax + roll $30,%esi + addl %eax,%r12d + movl %esi,%eax + movl 12(%r9),%ebp + movl %r12d,%ecx + xorl %edi,%eax + bswapl %ebp + roll $5,%ecx + leal 1518500249(%rdx,%r11,1),%r11d + andl %r13d,%eax + movl %ebp,12(%rsp) + addl %ecx,%r11d + xorl %edi,%eax + roll $30,%r13d + addl %eax,%r11d + movl %r13d,%eax + movl 16(%r9),%edx + movl %r11d,%ecx + xorl %esi,%eax + bswapl %edx + roll $5,%ecx + leal 1518500249(%rbp,%rdi,1),%edi + andl %r12d,%eax + movl %edx,16(%rsp) + addl %ecx,%edi + xorl %esi,%eax + roll $30,%r12d + addl %eax,%edi + movl %r12d,%eax + movl 20(%r9),%ebp + movl %edi,%ecx + xorl %r13d,%eax + bswapl %ebp + roll $5,%ecx + leal 1518500249(%rdx,%rsi,1),%esi + andl %r11d,%eax + movl %ebp,20(%rsp) + addl %ecx,%esi + xorl %r13d,%eax + roll $30,%r11d + addl %eax,%esi + movl %r11d,%eax + movl 24(%r9),%edx + movl %esi,%ecx + xorl %r12d,%eax + bswapl %edx + roll $5,%ecx + leal 1518500249(%rbp,%r13,1),%r13d + andl %edi,%eax + movl %edx,24(%rsp) + addl %ecx,%r13d + xorl %r12d,%eax + roll $30,%edi + addl %eax,%r13d + movl %edi,%eax + movl 28(%r9),%ebp + movl %r13d,%ecx + xorl %r11d,%eax + bswapl %ebp + roll $5,%ecx + leal 1518500249(%rdx,%r12,1),%r12d + andl %esi,%eax + movl %ebp,28(%rsp) + addl %ecx,%r12d + xorl %r11d,%eax + roll $30,%esi + addl %eax,%r12d + movl %esi,%eax + movl 32(%r9),%edx + movl %r12d,%ecx + xorl %edi,%eax + bswapl %edx + roll $5,%ecx + leal 1518500249(%rbp,%r11,1),%r11d + andl %r13d,%eax + movl %edx,32(%rsp) + addl %ecx,%r11d + xorl %edi,%eax + roll $30,%r13d + addl %eax,%r11d + movl %r13d,%eax + movl 36(%r9),%ebp + movl %r11d,%ecx + xorl %esi,%eax + bswapl %ebp + roll $5,%ecx + leal 1518500249(%rdx,%rdi,1),%edi + andl %r12d,%eax + movl %ebp,36(%rsp) + addl %ecx,%edi + xorl %esi,%eax + roll $30,%r12d + addl %eax,%edi + movl %r12d,%eax + movl 40(%r9),%edx + movl %edi,%ecx + xorl %r13d,%eax + bswapl %edx + roll $5,%ecx + leal 1518500249(%rbp,%rsi,1),%esi + andl %r11d,%eax + movl %edx,40(%rsp) + addl %ecx,%esi + xorl %r13d,%eax + roll $30,%r11d + addl %eax,%esi + movl %r11d,%eax + movl 44(%r9),%ebp + movl %esi,%ecx + xorl %r12d,%eax + bswapl %ebp + roll $5,%ecx + leal 1518500249(%rdx,%r13,1),%r13d + andl %edi,%eax + movl %ebp,44(%rsp) + addl %ecx,%r13d + xorl %r12d,%eax + roll $30,%edi + addl %eax,%r13d + movl %edi,%eax + movl 48(%r9),%edx + movl %r13d,%ecx + xorl %r11d,%eax + bswapl %edx + roll $5,%ecx + leal 1518500249(%rbp,%r12,1),%r12d + andl %esi,%eax + movl %edx,48(%rsp) + addl %ecx,%r12d + xorl %r11d,%eax + roll $30,%esi + addl %eax,%r12d + movl %esi,%eax + movl 52(%r9),%ebp + movl %r12d,%ecx + xorl %edi,%eax + bswapl %ebp + roll $5,%ecx + leal 1518500249(%rdx,%r11,1),%r11d + andl %r13d,%eax + movl %ebp,52(%rsp) + addl %ecx,%r11d + xorl %edi,%eax + roll $30,%r13d + addl %eax,%r11d + movl %r13d,%eax + movl 56(%r9),%edx + movl %r11d,%ecx + xorl %esi,%eax + bswapl %edx + roll $5,%ecx + leal 1518500249(%rbp,%rdi,1),%edi + andl %r12d,%eax + movl %edx,56(%rsp) + addl %ecx,%edi + xorl %esi,%eax + roll $30,%r12d + addl %eax,%edi + movl %r12d,%eax + movl 60(%r9),%ebp + movl %edi,%ecx + xorl %r13d,%eax + bswapl %ebp + roll $5,%ecx + leal 1518500249(%rdx,%rsi,1),%esi + andl %r11d,%eax + movl %ebp,60(%rsp) + addl %ecx,%esi + xorl %r13d,%eax + roll $30,%r11d + addl %eax,%esi + movl 0(%rsp),%edx + movl %r11d,%eax + movl %esi,%ecx + xorl 8(%rsp),%edx + xorl %r12d,%eax + roll $5,%ecx + xorl 32(%rsp),%edx + andl %edi,%eax + leal 1518500249(%rbp,%r13,1),%r13d + xorl 52(%rsp),%edx + xorl %r12d,%eax + roll $1,%edx + addl %ecx,%r13d + roll $30,%edi + movl %edx,0(%rsp) + addl %eax,%r13d + movl 4(%rsp),%ebp + movl %edi,%eax + movl %r13d,%ecx + xorl 12(%rsp),%ebp + xorl %r11d,%eax + roll $5,%ecx + xorl 36(%rsp),%ebp + andl %esi,%eax + leal 1518500249(%rdx,%r12,1),%r12d + xorl 56(%rsp),%ebp + xorl %r11d,%eax + roll $1,%ebp + addl %ecx,%r12d + roll $30,%esi + movl %ebp,4(%rsp) + addl %eax,%r12d + movl 8(%rsp),%edx + movl %esi,%eax + movl %r12d,%ecx + xorl 16(%rsp),%edx + xorl %edi,%eax + roll $5,%ecx + xorl 40(%rsp),%edx + andl %r13d,%eax + leal 1518500249(%rbp,%r11,1),%r11d + xorl 60(%rsp),%edx + xorl %edi,%eax + roll $1,%edx + addl %ecx,%r11d + roll $30,%r13d + movl %edx,8(%rsp) + addl %eax,%r11d + movl 12(%rsp),%ebp + movl %r13d,%eax + movl %r11d,%ecx + xorl 20(%rsp),%ebp + xorl %esi,%eax + roll $5,%ecx + xorl 44(%rsp),%ebp + andl %r12d,%eax + leal 1518500249(%rdx,%rdi,1),%edi + xorl 0(%rsp),%ebp + xorl %esi,%eax + roll $1,%ebp + addl %ecx,%edi + roll $30,%r12d + movl %ebp,12(%rsp) + addl %eax,%edi + movl 16(%rsp),%edx + movl %r12d,%eax + movl %edi,%ecx + xorl 24(%rsp),%edx + xorl %r13d,%eax + roll $5,%ecx + xorl 48(%rsp),%edx + andl %r11d,%eax + leal 1518500249(%rbp,%rsi,1),%esi + xorl 4(%rsp),%edx + xorl %r13d,%eax + roll $1,%edx + addl %ecx,%esi + roll $30,%r11d + movl %edx,16(%rsp) + addl %eax,%esi + movl 20(%rsp),%ebp + movl %r11d,%eax + movl %esi,%ecx + xorl 28(%rsp),%ebp + xorl %edi,%eax + roll $5,%ecx + leal 1859775393(%rdx,%r13,1),%r13d + xorl 52(%rsp),%ebp + xorl %r12d,%eax + addl %ecx,%r13d + xorl 8(%rsp),%ebp + roll $30,%edi + addl %eax,%r13d + roll $1,%ebp + movl %ebp,20(%rsp) + movl 24(%rsp),%edx + movl %edi,%eax + movl %r13d,%ecx + xorl 32(%rsp),%edx + xorl %esi,%eax + roll $5,%ecx + leal 1859775393(%rbp,%r12,1),%r12d + xorl 56(%rsp),%edx + xorl %r11d,%eax + addl %ecx,%r12d + xorl 12(%rsp),%edx + roll $30,%esi + addl %eax,%r12d + roll $1,%edx + movl %edx,24(%rsp) + movl 28(%rsp),%ebp + movl %esi,%eax + movl %r12d,%ecx + xorl 36(%rsp),%ebp + xorl %r13d,%eax + roll $5,%ecx + leal 1859775393(%rdx,%r11,1),%r11d + xorl 60(%rsp),%ebp + xorl %edi,%eax + addl %ecx,%r11d + xorl 16(%rsp),%ebp + roll $30,%r13d + addl %eax,%r11d + roll $1,%ebp + movl %ebp,28(%rsp) + movl 32(%rsp),%edx + movl %r13d,%eax + movl %r11d,%ecx + xorl 40(%rsp),%edx + xorl %r12d,%eax + roll $5,%ecx + leal 1859775393(%rbp,%rdi,1),%edi + xorl 0(%rsp),%edx + xorl %esi,%eax + addl %ecx,%edi + xorl 20(%rsp),%edx + roll $30,%r12d + addl %eax,%edi + roll $1,%edx + movl %edx,32(%rsp) + movl 36(%rsp),%ebp + movl %r12d,%eax + movl %edi,%ecx + xorl 44(%rsp),%ebp + xorl %r11d,%eax + roll $5,%ecx + leal 1859775393(%rdx,%rsi,1),%esi + xorl 4(%rsp),%ebp + xorl %r13d,%eax + addl %ecx,%esi + xorl 24(%rsp),%ebp + roll $30,%r11d + addl %eax,%esi + roll $1,%ebp + movl %ebp,36(%rsp) + movl 40(%rsp),%edx + movl %r11d,%eax + movl %esi,%ecx + xorl 48(%rsp),%edx + xorl %edi,%eax + roll $5,%ecx + leal 1859775393(%rbp,%r13,1),%r13d + xorl 8(%rsp),%edx + xorl %r12d,%eax + addl %ecx,%r13d + xorl 28(%rsp),%edx + roll $30,%edi + addl %eax,%r13d + roll $1,%edx + movl %edx,40(%rsp) + movl 44(%rsp),%ebp + movl %edi,%eax + movl %r13d,%ecx + xorl 52(%rsp),%ebp + xorl %esi,%eax + roll $5,%ecx + leal 1859775393(%rdx,%r12,1),%r12d + xorl 12(%rsp),%ebp + xorl %r11d,%eax + addl %ecx,%r12d + xorl 32(%rsp),%ebp + roll $30,%esi + addl %eax,%r12d + roll $1,%ebp + movl %ebp,44(%rsp) + movl 48(%rsp),%edx + movl %esi,%eax + movl %r12d,%ecx + xorl 56(%rsp),%edx + xorl %r13d,%eax + roll $5,%ecx + leal 1859775393(%rbp,%r11,1),%r11d + xorl 16(%rsp),%edx + xorl %edi,%eax + addl %ecx,%r11d + xorl 36(%rsp),%edx + roll $30,%r13d + addl %eax,%r11d + roll $1,%edx + movl %edx,48(%rsp) + movl 52(%rsp),%ebp + movl %r13d,%eax + movl %r11d,%ecx + xorl 60(%rsp),%ebp + xorl %r12d,%eax + roll $5,%ecx + leal 1859775393(%rdx,%rdi,1),%edi + xorl 20(%rsp),%ebp + xorl %esi,%eax + addl %ecx,%edi + xorl 40(%rsp),%ebp + roll $30,%r12d + addl %eax,%edi + roll $1,%ebp + movl %ebp,52(%rsp) + movl 56(%rsp),%edx + movl %r12d,%eax + movl %edi,%ecx + xorl 0(%rsp),%edx + xorl %r11d,%eax + roll $5,%ecx + leal 1859775393(%rbp,%rsi,1),%esi + xorl 24(%rsp),%edx + xorl %r13d,%eax + addl %ecx,%esi + xorl 44(%rsp),%edx + roll $30,%r11d + addl %eax,%esi + roll $1,%edx + movl %edx,56(%rsp) + movl 60(%rsp),%ebp + movl %r11d,%eax + movl %esi,%ecx + xorl 4(%rsp),%ebp + xorl %edi,%eax + roll $5,%ecx + leal 1859775393(%rdx,%r13,1),%r13d + xorl 28(%rsp),%ebp + xorl %r12d,%eax + addl %ecx,%r13d + xorl 48(%rsp),%ebp + roll $30,%edi + addl %eax,%r13d + roll $1,%ebp + movl %ebp,60(%rsp) + movl 0(%rsp),%edx + movl %edi,%eax + movl %r13d,%ecx + xorl 8(%rsp),%edx + xorl %esi,%eax + roll $5,%ecx + leal 1859775393(%rbp,%r12,1),%r12d + xorl 32(%rsp),%edx + xorl %r11d,%eax + addl %ecx,%r12d + xorl 52(%rsp),%edx + roll $30,%esi + addl %eax,%r12d + roll $1,%edx + movl %edx,0(%rsp) + movl 4(%rsp),%ebp + movl %esi,%eax + movl %r12d,%ecx + xorl 12(%rsp),%ebp + xorl %r13d,%eax + roll $5,%ecx + leal 1859775393(%rdx,%r11,1),%r11d + xorl 36(%rsp),%ebp + xorl %edi,%eax + addl %ecx,%r11d + xorl 56(%rsp),%ebp + roll $30,%r13d + addl %eax,%r11d + roll $1,%ebp + movl %ebp,4(%rsp) + movl 8(%rsp),%edx + movl %r13d,%eax + movl %r11d,%ecx + xorl 16(%rsp),%edx + xorl %r12d,%eax + roll $5,%ecx + leal 1859775393(%rbp,%rdi,1),%edi + xorl 40(%rsp),%edx + xorl %esi,%eax + addl %ecx,%edi + xorl 60(%rsp),%edx + roll $30,%r12d + addl %eax,%edi + roll $1,%edx + movl %edx,8(%rsp) + movl 12(%rsp),%ebp + movl %r12d,%eax + movl %edi,%ecx + xorl 20(%rsp),%ebp + xorl %r11d,%eax + roll $5,%ecx + leal 1859775393(%rdx,%rsi,1),%esi + xorl 44(%rsp),%ebp + xorl %r13d,%eax + addl %ecx,%esi + xorl 0(%rsp),%ebp + roll $30,%r11d + addl %eax,%esi + roll $1,%ebp + movl %ebp,12(%rsp) + movl 16(%rsp),%edx + movl %r11d,%eax + movl %esi,%ecx + xorl 24(%rsp),%edx + xorl %edi,%eax + roll $5,%ecx + leal 1859775393(%rbp,%r13,1),%r13d + xorl 48(%rsp),%edx + xorl %r12d,%eax + addl %ecx,%r13d + xorl 4(%rsp),%edx + roll $30,%edi + addl %eax,%r13d + roll $1,%edx + movl %edx,16(%rsp) + movl 20(%rsp),%ebp + movl %edi,%eax + movl %r13d,%ecx + xorl 28(%rsp),%ebp + xorl %esi,%eax + roll $5,%ecx + leal 1859775393(%rdx,%r12,1),%r12d + xorl 52(%rsp),%ebp + xorl %r11d,%eax + addl %ecx,%r12d + xorl 8(%rsp),%ebp + roll $30,%esi + addl %eax,%r12d + roll $1,%ebp + movl %ebp,20(%rsp) + movl 24(%rsp),%edx + movl %esi,%eax + movl %r12d,%ecx + xorl 32(%rsp),%edx + xorl %r13d,%eax + roll $5,%ecx + leal 1859775393(%rbp,%r11,1),%r11d + xorl 56(%rsp),%edx + xorl %edi,%eax + addl %ecx,%r11d + xorl 12(%rsp),%edx + roll $30,%r13d + addl %eax,%r11d + roll $1,%edx + movl %edx,24(%rsp) + movl 28(%rsp),%ebp + movl %r13d,%eax + movl %r11d,%ecx + xorl 36(%rsp),%ebp + xorl %r12d,%eax + roll $5,%ecx + leal 1859775393(%rdx,%rdi,1),%edi + xorl 60(%rsp),%ebp + xorl %esi,%eax + addl %ecx,%edi + xorl 16(%rsp),%ebp + roll $30,%r12d + addl %eax,%edi + roll $1,%ebp + movl %ebp,28(%rsp) + movl 32(%rsp),%edx + movl %r12d,%eax + movl %edi,%ecx + xorl 40(%rsp),%edx + xorl %r11d,%eax + roll $5,%ecx + leal 1859775393(%rbp,%rsi,1),%esi + xorl 0(%rsp),%edx + xorl %r13d,%eax + addl %ecx,%esi + xorl 20(%rsp),%edx + roll $30,%r11d + addl %eax,%esi + roll $1,%edx + movl %edx,32(%rsp) + movl 36(%rsp),%ebp + movl %r11d,%eax + movl %r11d,%ebx + xorl 44(%rsp),%ebp + andl %r12d,%eax + movl %esi,%ecx + xorl 4(%rsp),%ebp + xorl %r12d,%ebx + leal -1894007588(%rdx,%r13,1),%r13d + roll $5,%ecx + xorl 24(%rsp),%ebp + addl %eax,%r13d + andl %edi,%ebx + roll $1,%ebp + addl %ebx,%r13d + roll $30,%edi + movl %ebp,36(%rsp) + addl %ecx,%r13d + movl 40(%rsp),%edx + movl %edi,%eax + movl %edi,%ebx + xorl 48(%rsp),%edx + andl %r11d,%eax + movl %r13d,%ecx + xorl 8(%rsp),%edx + xorl %r11d,%ebx + leal -1894007588(%rbp,%r12,1),%r12d + roll $5,%ecx + xorl 28(%rsp),%edx + addl %eax,%r12d + andl %esi,%ebx + roll $1,%edx + addl %ebx,%r12d + roll $30,%esi + movl %edx,40(%rsp) + addl %ecx,%r12d + movl 44(%rsp),%ebp + movl %esi,%eax + movl %esi,%ebx + xorl 52(%rsp),%ebp + andl %edi,%eax + movl %r12d,%ecx + xorl 12(%rsp),%ebp + xorl %edi,%ebx + leal -1894007588(%rdx,%r11,1),%r11d + roll $5,%ecx + xorl 32(%rsp),%ebp + addl %eax,%r11d + andl %r13d,%ebx + roll $1,%ebp + addl %ebx,%r11d + roll $30,%r13d + movl %ebp,44(%rsp) + addl %ecx,%r11d + movl 48(%rsp),%edx + movl %r13d,%eax + movl %r13d,%ebx + xorl 56(%rsp),%edx + andl %esi,%eax + movl %r11d,%ecx + xorl 16(%rsp),%edx + xorl %esi,%ebx + leal -1894007588(%rbp,%rdi,1),%edi + roll $5,%ecx + xorl 36(%rsp),%edx + addl %eax,%edi + andl %r12d,%ebx + roll $1,%edx + addl %ebx,%edi + roll $30,%r12d + movl %edx,48(%rsp) + addl %ecx,%edi + movl 52(%rsp),%ebp + movl %r12d,%eax + movl %r12d,%ebx + xorl 60(%rsp),%ebp + andl %r13d,%eax + movl %edi,%ecx + xorl 20(%rsp),%ebp + xorl %r13d,%ebx + leal -1894007588(%rdx,%rsi,1),%esi + roll $5,%ecx + xorl 40(%rsp),%ebp + addl %eax,%esi + andl %r11d,%ebx + roll $1,%ebp + addl %ebx,%esi + roll $30,%r11d + movl %ebp,52(%rsp) + addl %ecx,%esi + movl 56(%rsp),%edx + movl %r11d,%eax + movl %r11d,%ebx + xorl 0(%rsp),%edx + andl %r12d,%eax + movl %esi,%ecx + xorl 24(%rsp),%edx + xorl %r12d,%ebx + leal -1894007588(%rbp,%r13,1),%r13d + roll $5,%ecx + xorl 44(%rsp),%edx + addl %eax,%r13d + andl %edi,%ebx + roll $1,%edx + addl %ebx,%r13d + roll $30,%edi + movl %edx,56(%rsp) + addl %ecx,%r13d + movl 60(%rsp),%ebp + movl %edi,%eax + movl %edi,%ebx + xorl 4(%rsp),%ebp + andl %r11d,%eax + movl %r13d,%ecx + xorl 28(%rsp),%ebp + xorl %r11d,%ebx + leal -1894007588(%rdx,%r12,1),%r12d + roll $5,%ecx + xorl 48(%rsp),%ebp + addl %eax,%r12d + andl %esi,%ebx + roll $1,%ebp + addl %ebx,%r12d + roll $30,%esi + movl %ebp,60(%rsp) + addl %ecx,%r12d + movl 0(%rsp),%edx + movl %esi,%eax + movl %esi,%ebx + xorl 8(%rsp),%edx + andl %edi,%eax + movl %r12d,%ecx + xorl 32(%rsp),%edx + xorl %edi,%ebx + leal -1894007588(%rbp,%r11,1),%r11d + roll $5,%ecx + xorl 52(%rsp),%edx + addl %eax,%r11d + andl %r13d,%ebx + roll $1,%edx + addl %ebx,%r11d + roll $30,%r13d + movl %edx,0(%rsp) + addl %ecx,%r11d + movl 4(%rsp),%ebp + movl %r13d,%eax + movl %r13d,%ebx + xorl 12(%rsp),%ebp + andl %esi,%eax + movl %r11d,%ecx + xorl 36(%rsp),%ebp + xorl %esi,%ebx + leal -1894007588(%rdx,%rdi,1),%edi + roll $5,%ecx + xorl 56(%rsp),%ebp + addl %eax,%edi + andl %r12d,%ebx + roll $1,%ebp + addl %ebx,%edi + roll $30,%r12d + movl %ebp,4(%rsp) + addl %ecx,%edi + movl 8(%rsp),%edx + movl %r12d,%eax + movl %r12d,%ebx + xorl 16(%rsp),%edx + andl %r13d,%eax + movl %edi,%ecx + xorl 40(%rsp),%edx + xorl %r13d,%ebx + leal -1894007588(%rbp,%rsi,1),%esi + roll $5,%ecx + xorl 60(%rsp),%edx + addl %eax,%esi + andl %r11d,%ebx + roll $1,%edx + addl %ebx,%esi + roll $30,%r11d + movl %edx,8(%rsp) + addl %ecx,%esi + movl 12(%rsp),%ebp + movl %r11d,%eax + movl %r11d,%ebx + xorl 20(%rsp),%ebp + andl %r12d,%eax + movl %esi,%ecx + xorl 44(%rsp),%ebp + xorl %r12d,%ebx + leal -1894007588(%rdx,%r13,1),%r13d + roll $5,%ecx + xorl 0(%rsp),%ebp + addl %eax,%r13d + andl %edi,%ebx + roll $1,%ebp + addl %ebx,%r13d + roll $30,%edi + movl %ebp,12(%rsp) + addl %ecx,%r13d + movl 16(%rsp),%edx + movl %edi,%eax + movl %edi,%ebx + xorl 24(%rsp),%edx + andl %r11d,%eax + movl %r13d,%ecx + xorl 48(%rsp),%edx + xorl %r11d,%ebx + leal -1894007588(%rbp,%r12,1),%r12d + roll $5,%ecx + xorl 4(%rsp),%edx + addl %eax,%r12d + andl %esi,%ebx + roll $1,%edx + addl %ebx,%r12d + roll $30,%esi + movl %edx,16(%rsp) + addl %ecx,%r12d + movl 20(%rsp),%ebp + movl %esi,%eax + movl %esi,%ebx + xorl 28(%rsp),%ebp + andl %edi,%eax + movl %r12d,%ecx + xorl 52(%rsp),%ebp + xorl %edi,%ebx + leal -1894007588(%rdx,%r11,1),%r11d + roll $5,%ecx + xorl 8(%rsp),%ebp + addl %eax,%r11d + andl %r13d,%ebx + roll $1,%ebp + addl %ebx,%r11d + roll $30,%r13d + movl %ebp,20(%rsp) + addl %ecx,%r11d + movl 24(%rsp),%edx + movl %r13d,%eax + movl %r13d,%ebx + xorl 32(%rsp),%edx + andl %esi,%eax + movl %r11d,%ecx + xorl 56(%rsp),%edx + xorl %esi,%ebx + leal -1894007588(%rbp,%rdi,1),%edi + roll $5,%ecx + xorl 12(%rsp),%edx + addl %eax,%edi + andl %r12d,%ebx + roll $1,%edx + addl %ebx,%edi + roll $30,%r12d + movl %edx,24(%rsp) + addl %ecx,%edi + movl 28(%rsp),%ebp + movl %r12d,%eax + movl %r12d,%ebx + xorl 36(%rsp),%ebp + andl %r13d,%eax + movl %edi,%ecx + xorl 60(%rsp),%ebp + xorl %r13d,%ebx + leal -1894007588(%rdx,%rsi,1),%esi + roll $5,%ecx + xorl 16(%rsp),%ebp + addl %eax,%esi + andl %r11d,%ebx + roll $1,%ebp + addl %ebx,%esi + roll $30,%r11d + movl %ebp,28(%rsp) + addl %ecx,%esi + movl 32(%rsp),%edx + movl %r11d,%eax + movl %r11d,%ebx + xorl 40(%rsp),%edx + andl %r12d,%eax + movl %esi,%ecx + xorl 0(%rsp),%edx + xorl %r12d,%ebx + leal -1894007588(%rbp,%r13,1),%r13d + roll $5,%ecx + xorl 20(%rsp),%edx + addl %eax,%r13d + andl %edi,%ebx + roll $1,%edx + addl %ebx,%r13d + roll $30,%edi + movl %edx,32(%rsp) + addl %ecx,%r13d + movl 36(%rsp),%ebp + movl %edi,%eax + movl %edi,%ebx + xorl 44(%rsp),%ebp + andl %r11d,%eax + movl %r13d,%ecx + xorl 4(%rsp),%ebp + xorl %r11d,%ebx + leal -1894007588(%rdx,%r12,1),%r12d + roll $5,%ecx + xorl 24(%rsp),%ebp + addl %eax,%r12d + andl %esi,%ebx + roll $1,%ebp + addl %ebx,%r12d + roll $30,%esi + movl %ebp,36(%rsp) + addl %ecx,%r12d + movl 40(%rsp),%edx + movl %esi,%eax + movl %esi,%ebx + xorl 48(%rsp),%edx + andl %edi,%eax + movl %r12d,%ecx + xorl 8(%rsp),%edx + xorl %edi,%ebx + leal -1894007588(%rbp,%r11,1),%r11d + roll $5,%ecx + xorl 28(%rsp),%edx + addl %eax,%r11d + andl %r13d,%ebx + roll $1,%edx + addl %ebx,%r11d + roll $30,%r13d + movl %edx,40(%rsp) + addl %ecx,%r11d + movl 44(%rsp),%ebp + movl %r13d,%eax + movl %r13d,%ebx + xorl 52(%rsp),%ebp + andl %esi,%eax + movl %r11d,%ecx + xorl 12(%rsp),%ebp + xorl %esi,%ebx + leal -1894007588(%rdx,%rdi,1),%edi + roll $5,%ecx + xorl 32(%rsp),%ebp + addl %eax,%edi + andl %r12d,%ebx + roll $1,%ebp + addl %ebx,%edi + roll $30,%r12d + movl %ebp,44(%rsp) + addl %ecx,%edi + movl 48(%rsp),%edx + movl %r12d,%eax + movl %r12d,%ebx + xorl 56(%rsp),%edx + andl %r13d,%eax + movl %edi,%ecx + xorl 16(%rsp),%edx + xorl %r13d,%ebx + leal -1894007588(%rbp,%rsi,1),%esi + roll $5,%ecx + xorl 36(%rsp),%edx + addl %eax,%esi + andl %r11d,%ebx + roll $1,%edx + addl %ebx,%esi + roll $30,%r11d + movl %edx,48(%rsp) + addl %ecx,%esi + movl 52(%rsp),%ebp + movl %r11d,%eax + movl %esi,%ecx + xorl 60(%rsp),%ebp + xorl %edi,%eax + roll $5,%ecx + leal -899497514(%rdx,%r13,1),%r13d + xorl 20(%rsp),%ebp + xorl %r12d,%eax + addl %ecx,%r13d + xorl 40(%rsp),%ebp + roll $30,%edi + addl %eax,%r13d + roll $1,%ebp + movl %ebp,52(%rsp) + movl 56(%rsp),%edx + movl %edi,%eax + movl %r13d,%ecx + xorl 0(%rsp),%edx + xorl %esi,%eax + roll $5,%ecx + leal -899497514(%rbp,%r12,1),%r12d + xorl 24(%rsp),%edx + xorl %r11d,%eax + addl %ecx,%r12d + xorl 44(%rsp),%edx + roll $30,%esi + addl %eax,%r12d + roll $1,%edx + movl %edx,56(%rsp) + movl 60(%rsp),%ebp + movl %esi,%eax + movl %r12d,%ecx + xorl 4(%rsp),%ebp + xorl %r13d,%eax + roll $5,%ecx + leal -899497514(%rdx,%r11,1),%r11d + xorl 28(%rsp),%ebp + xorl %edi,%eax + addl %ecx,%r11d + xorl 48(%rsp),%ebp + roll $30,%r13d + addl %eax,%r11d + roll $1,%ebp + movl %ebp,60(%rsp) + movl 0(%rsp),%edx + movl %r13d,%eax + movl %r11d,%ecx + xorl 8(%rsp),%edx + xorl %r12d,%eax + roll $5,%ecx + leal -899497514(%rbp,%rdi,1),%edi + xorl 32(%rsp),%edx + xorl %esi,%eax + addl %ecx,%edi + xorl 52(%rsp),%edx + roll $30,%r12d + addl %eax,%edi + roll $1,%edx + movl %edx,0(%rsp) + movl 4(%rsp),%ebp + movl %r12d,%eax + movl %edi,%ecx + xorl 12(%rsp),%ebp + xorl %r11d,%eax + roll $5,%ecx + leal -899497514(%rdx,%rsi,1),%esi + xorl 36(%rsp),%ebp + xorl %r13d,%eax + addl %ecx,%esi + xorl 56(%rsp),%ebp + roll $30,%r11d + addl %eax,%esi + roll $1,%ebp + movl %ebp,4(%rsp) + movl 8(%rsp),%edx + movl %r11d,%eax + movl %esi,%ecx + xorl 16(%rsp),%edx + xorl %edi,%eax + roll $5,%ecx + leal -899497514(%rbp,%r13,1),%r13d + xorl 40(%rsp),%edx + xorl %r12d,%eax + addl %ecx,%r13d + xorl 60(%rsp),%edx + roll $30,%edi + addl %eax,%r13d + roll $1,%edx + movl %edx,8(%rsp) + movl 12(%rsp),%ebp + movl %edi,%eax + movl %r13d,%ecx + xorl 20(%rsp),%ebp + xorl %esi,%eax + roll $5,%ecx + leal -899497514(%rdx,%r12,1),%r12d + xorl 44(%rsp),%ebp + xorl %r11d,%eax + addl %ecx,%r12d + xorl 0(%rsp),%ebp + roll $30,%esi + addl %eax,%r12d + roll $1,%ebp + movl %ebp,12(%rsp) + movl 16(%rsp),%edx + movl %esi,%eax + movl %r12d,%ecx + xorl 24(%rsp),%edx + xorl %r13d,%eax + roll $5,%ecx + leal -899497514(%rbp,%r11,1),%r11d + xorl 48(%rsp),%edx + xorl %edi,%eax + addl %ecx,%r11d + xorl 4(%rsp),%edx + roll $30,%r13d + addl %eax,%r11d + roll $1,%edx + movl %edx,16(%rsp) + movl 20(%rsp),%ebp + movl %r13d,%eax + movl %r11d,%ecx + xorl 28(%rsp),%ebp + xorl %r12d,%eax + roll $5,%ecx + leal -899497514(%rdx,%rdi,1),%edi + xorl 52(%rsp),%ebp + xorl %esi,%eax + addl %ecx,%edi + xorl 8(%rsp),%ebp + roll $30,%r12d + addl %eax,%edi + roll $1,%ebp + movl %ebp,20(%rsp) + movl 24(%rsp),%edx + movl %r12d,%eax + movl %edi,%ecx + xorl 32(%rsp),%edx + xorl %r11d,%eax + roll $5,%ecx + leal -899497514(%rbp,%rsi,1),%esi + xorl 56(%rsp),%edx + xorl %r13d,%eax + addl %ecx,%esi + xorl 12(%rsp),%edx + roll $30,%r11d + addl %eax,%esi + roll $1,%edx + movl %edx,24(%rsp) + movl 28(%rsp),%ebp + movl %r11d,%eax + movl %esi,%ecx + xorl 36(%rsp),%ebp + xorl %edi,%eax + roll $5,%ecx + leal -899497514(%rdx,%r13,1),%r13d + xorl 60(%rsp),%ebp + xorl %r12d,%eax + addl %ecx,%r13d + xorl 16(%rsp),%ebp + roll $30,%edi + addl %eax,%r13d + roll $1,%ebp + movl %ebp,28(%rsp) + movl 32(%rsp),%edx + movl %edi,%eax + movl %r13d,%ecx + xorl 40(%rsp),%edx + xorl %esi,%eax + roll $5,%ecx + leal -899497514(%rbp,%r12,1),%r12d + xorl 0(%rsp),%edx + xorl %r11d,%eax + addl %ecx,%r12d + xorl 20(%rsp),%edx + roll $30,%esi + addl %eax,%r12d + roll $1,%edx + movl %edx,32(%rsp) + movl 36(%rsp),%ebp + movl %esi,%eax + movl %r12d,%ecx + xorl 44(%rsp),%ebp + xorl %r13d,%eax + roll $5,%ecx + leal -899497514(%rdx,%r11,1),%r11d + xorl 4(%rsp),%ebp + xorl %edi,%eax + addl %ecx,%r11d + xorl 24(%rsp),%ebp + roll $30,%r13d + addl %eax,%r11d + roll $1,%ebp + movl %ebp,36(%rsp) + movl 40(%rsp),%edx + movl %r13d,%eax + movl %r11d,%ecx + xorl 48(%rsp),%edx + xorl %r12d,%eax + roll $5,%ecx + leal -899497514(%rbp,%rdi,1),%edi + xorl 8(%rsp),%edx + xorl %esi,%eax + addl %ecx,%edi + xorl 28(%rsp),%edx + roll $30,%r12d + addl %eax,%edi + roll $1,%edx + movl %edx,40(%rsp) + movl 44(%rsp),%ebp + movl %r12d,%eax + movl %edi,%ecx + xorl 52(%rsp),%ebp + xorl %r11d,%eax + roll $5,%ecx + leal -899497514(%rdx,%rsi,1),%esi + xorl 12(%rsp),%ebp + xorl %r13d,%eax + addl %ecx,%esi + xorl 32(%rsp),%ebp + roll $30,%r11d + addl %eax,%esi + roll $1,%ebp + movl %ebp,44(%rsp) + movl 48(%rsp),%edx + movl %r11d,%eax + movl %esi,%ecx + xorl 56(%rsp),%edx + xorl %edi,%eax + roll $5,%ecx + leal -899497514(%rbp,%r13,1),%r13d + xorl 16(%rsp),%edx + xorl %r12d,%eax + addl %ecx,%r13d + xorl 36(%rsp),%edx + roll $30,%edi + addl %eax,%r13d + roll $1,%edx + movl %edx,48(%rsp) + movl 52(%rsp),%ebp + movl %edi,%eax + movl %r13d,%ecx + xorl 60(%rsp),%ebp + xorl %esi,%eax + roll $5,%ecx + leal -899497514(%rdx,%r12,1),%r12d + xorl 20(%rsp),%ebp + xorl %r11d,%eax + addl %ecx,%r12d + xorl 40(%rsp),%ebp + roll $30,%esi + addl %eax,%r12d + roll $1,%ebp + movl 56(%rsp),%edx + movl %esi,%eax + movl %r12d,%ecx + xorl 0(%rsp),%edx + xorl %r13d,%eax + roll $5,%ecx + leal -899497514(%rbp,%r11,1),%r11d + xorl 24(%rsp),%edx + xorl %edi,%eax + addl %ecx,%r11d + xorl 44(%rsp),%edx + roll $30,%r13d + addl %eax,%r11d + roll $1,%edx + movl 60(%rsp),%ebp + movl %r13d,%eax + movl %r11d,%ecx + xorl 4(%rsp),%ebp + xorl %r12d,%eax + roll $5,%ecx + leal -899497514(%rdx,%rdi,1),%edi + xorl 28(%rsp),%ebp + xorl %esi,%eax + addl %ecx,%edi + xorl 48(%rsp),%ebp + roll $30,%r12d + addl %eax,%edi + roll $1,%ebp + movl %r12d,%eax + movl %edi,%ecx + xorl %r11d,%eax + leal -899497514(%rbp,%rsi,1),%esi + roll $5,%ecx + xorl %r13d,%eax + addl %ecx,%esi + roll $30,%r11d + addl %eax,%esi + addl 0(%r8),%esi + addl 4(%r8),%edi + addl 8(%r8),%r11d + addl 12(%r8),%r12d + addl 16(%r8),%r13d + movl %esi,0(%r8) + movl %edi,4(%r8) + movl %r11d,8(%r8) + movl %r12d,12(%r8) + movl %r13d,16(%r8) + + subq $1,%r10 + leaq 64(%r9),%r9 + jnz L$loop + + movq 64(%rsp),%rsi + movq (%rsi),%r13 + movq 8(%rsi),%r12 + movq 16(%rsi),%rbp + movq 24(%rsi),%rbx + leaq 32(%rsi),%rsp +L$epilogue: + .byte 0xf3,0xc3 + + +.p2align 4 +sha1_block_data_order_ssse3: +_ssse3_shortcut: + pushq %rbx + pushq %rbp + pushq %r12 + leaq -64(%rsp),%rsp + movq %rdi,%r8 + movq %rsi,%r9 + movq %rdx,%r10 + + shlq $6,%r10 + addq %r9,%r10 + leaq K_XX_XX(%rip),%r11 + + movl 0(%r8),%eax + movl 4(%r8),%ebx + movl 8(%r8),%ecx + movl 12(%r8),%edx + movl %ebx,%esi + movl 16(%r8),%ebp + + movdqa 64(%r11),%xmm6 + movdqa 0(%r11),%xmm9 + movdqu 0(%r9),%xmm0 + movdqu 16(%r9),%xmm1 + movdqu 32(%r9),%xmm2 + movdqu 48(%r9),%xmm3 +.byte 102,15,56,0,198 + addq $64,%r9 +.byte 102,15,56,0,206 +.byte 102,15,56,0,214 +.byte 102,15,56,0,222 + paddd %xmm9,%xmm0 + paddd %xmm9,%xmm1 + paddd %xmm9,%xmm2 + movdqa %xmm0,0(%rsp) + psubd %xmm9,%xmm0 + movdqa %xmm1,16(%rsp) + psubd %xmm9,%xmm1 + movdqa %xmm2,32(%rsp) + psubd %xmm9,%xmm2 + jmp L$oop_ssse3 +.p2align 4 +L$oop_ssse3: + movdqa %xmm1,%xmm4 + addl 0(%rsp),%ebp + xorl %edx,%ecx + movdqa %xmm3,%xmm8 +.byte 102,15,58,15,224,8 + movl %eax,%edi + roll $5,%eax + paddd %xmm3,%xmm9 + andl %ecx,%esi + xorl %edx,%ecx + psrldq $4,%xmm8 + xorl %edx,%esi + addl %eax,%ebp + pxor %xmm0,%xmm4 + rorl $2,%ebx + addl %esi,%ebp + pxor %xmm2,%xmm8 + addl 4(%rsp),%edx + xorl %ecx,%ebx + movl %ebp,%esi + roll $5,%ebp + pxor %xmm8,%xmm4 + andl %ebx,%edi + xorl %ecx,%ebx + movdqa %xmm9,48(%rsp) + xorl %ecx,%edi + addl %ebp,%edx + movdqa %xmm4,%xmm10 + movdqa %xmm4,%xmm8 + rorl $7,%eax + addl %edi,%edx + addl 8(%rsp),%ecx + xorl %ebx,%eax + pslldq $12,%xmm10 + paddd %xmm4,%xmm4 + movl %edx,%edi + roll $5,%edx + andl %eax,%esi + xorl %ebx,%eax + psrld $31,%xmm8 + xorl %ebx,%esi + addl %edx,%ecx + movdqa %xmm10,%xmm9 + rorl $7,%ebp + addl %esi,%ecx + psrld $30,%xmm10 + por %xmm8,%xmm4 + addl 12(%rsp),%ebx + xorl %eax,%ebp + movl %ecx,%esi + roll $5,%ecx + pslld $2,%xmm9 + pxor %xmm10,%xmm4 + andl %ebp,%edi + xorl %eax,%ebp + movdqa 0(%r11),%xmm10 + xorl %eax,%edi + addl %ecx,%ebx + pxor %xmm9,%xmm4 + rorl $7,%edx + addl %edi,%ebx + movdqa %xmm2,%xmm5 + addl 16(%rsp),%eax + xorl %ebp,%edx + movdqa %xmm4,%xmm9 +.byte 102,15,58,15,233,8 + movl %ebx,%edi + roll $5,%ebx + paddd %xmm4,%xmm10 + andl %edx,%esi + xorl %ebp,%edx + psrldq $4,%xmm9 + xorl %ebp,%esi + addl %ebx,%eax + pxor %xmm1,%xmm5 + rorl $7,%ecx + addl %esi,%eax + pxor %xmm3,%xmm9 + addl 20(%rsp),%ebp + xorl %edx,%ecx + movl %eax,%esi + roll $5,%eax + pxor %xmm9,%xmm5 + andl %ecx,%edi + xorl %edx,%ecx + movdqa %xmm10,0(%rsp) + xorl %edx,%edi + addl %eax,%ebp + movdqa %xmm5,%xmm8 + movdqa %xmm5,%xmm9 + rorl $7,%ebx + addl %edi,%ebp + addl 24(%rsp),%edx + xorl %ecx,%ebx + pslldq $12,%xmm8 + paddd %xmm5,%xmm5 + movl %ebp,%edi + roll $5,%ebp + andl %ebx,%esi + xorl %ecx,%ebx + psrld $31,%xmm9 + xorl %ecx,%esi + addl %ebp,%edx + movdqa %xmm8,%xmm10 + rorl $7,%eax + addl %esi,%edx + psrld $30,%xmm8 + por %xmm9,%xmm5 + addl 28(%rsp),%ecx + xorl %ebx,%eax + movl %edx,%esi + roll $5,%edx + pslld $2,%xmm10 + pxor %xmm8,%xmm5 + andl %eax,%edi + xorl %ebx,%eax + movdqa 16(%r11),%xmm8 + xorl %ebx,%edi + addl %edx,%ecx + pxor %xmm10,%xmm5 + rorl $7,%ebp + addl %edi,%ecx + movdqa %xmm3,%xmm6 + addl 32(%rsp),%ebx + xorl %eax,%ebp + movdqa %xmm5,%xmm10 +.byte 102,15,58,15,242,8 + movl %ecx,%edi + roll $5,%ecx + paddd %xmm5,%xmm8 + andl %ebp,%esi + xorl %eax,%ebp + psrldq $4,%xmm10 + xorl %eax,%esi + addl %ecx,%ebx + pxor %xmm2,%xmm6 + rorl $7,%edx + addl %esi,%ebx + pxor %xmm4,%xmm10 + addl 36(%rsp),%eax + xorl %ebp,%edx + movl %ebx,%esi + roll $5,%ebx + pxor %xmm10,%xmm6 + andl %edx,%edi + xorl %ebp,%edx + movdqa %xmm8,16(%rsp) + xorl %ebp,%edi + addl %ebx,%eax + movdqa %xmm6,%xmm9 + movdqa %xmm6,%xmm10 + rorl $7,%ecx + addl %edi,%eax + addl 40(%rsp),%ebp + xorl %edx,%ecx + pslldq $12,%xmm9 + paddd %xmm6,%xmm6 + movl %eax,%edi + roll $5,%eax + andl %ecx,%esi + xorl %edx,%ecx + psrld $31,%xmm10 + xorl %edx,%esi + addl %eax,%ebp + movdqa %xmm9,%xmm8 + rorl $7,%ebx + addl %esi,%ebp + psrld $30,%xmm9 + por %xmm10,%xmm6 + addl 44(%rsp),%edx + xorl %ecx,%ebx + movl %ebp,%esi + roll $5,%ebp + pslld $2,%xmm8 + pxor %xmm9,%xmm6 + andl %ebx,%edi + xorl %ecx,%ebx + movdqa 16(%r11),%xmm9 + xorl %ecx,%edi + addl %ebp,%edx + pxor %xmm8,%xmm6 + rorl $7,%eax + addl %edi,%edx + movdqa %xmm4,%xmm7 + addl 48(%rsp),%ecx + xorl %ebx,%eax + movdqa %xmm6,%xmm8 +.byte 102,15,58,15,251,8 + movl %edx,%edi + roll $5,%edx + paddd %xmm6,%xmm9 + andl %eax,%esi + xorl %ebx,%eax + psrldq $4,%xmm8 + xorl %ebx,%esi + addl %edx,%ecx + pxor %xmm3,%xmm7 + rorl $7,%ebp + addl %esi,%ecx + pxor %xmm5,%xmm8 + addl 52(%rsp),%ebx + xorl %eax,%ebp + movl %ecx,%esi + roll $5,%ecx + pxor %xmm8,%xmm7 + andl %ebp,%edi + xorl %eax,%ebp + movdqa %xmm9,32(%rsp) + xorl %eax,%edi + addl %ecx,%ebx + movdqa %xmm7,%xmm10 + movdqa %xmm7,%xmm8 + rorl $7,%edx + addl %edi,%ebx + addl 56(%rsp),%eax + xorl %ebp,%edx + pslldq $12,%xmm10 + paddd %xmm7,%xmm7 + movl %ebx,%edi + roll $5,%ebx + andl %edx,%esi + xorl %ebp,%edx + psrld $31,%xmm8 + xorl %ebp,%esi + addl %ebx,%eax + movdqa %xmm10,%xmm9 + rorl $7,%ecx + addl %esi,%eax + psrld $30,%xmm10 + por %xmm8,%xmm7 + addl 60(%rsp),%ebp + xorl %edx,%ecx + movl %eax,%esi + roll $5,%eax + pslld $2,%xmm9 + pxor %xmm10,%xmm7 + andl %ecx,%edi + xorl %edx,%ecx + movdqa 16(%r11),%xmm10 + xorl %edx,%edi + addl %eax,%ebp + pxor %xmm9,%xmm7 + rorl $7,%ebx + addl %edi,%ebp + movdqa %xmm7,%xmm9 + addl 0(%rsp),%edx + pxor %xmm4,%xmm0 +.byte 102,68,15,58,15,206,8 + xorl %ecx,%ebx + movl %ebp,%edi + roll $5,%ebp + pxor %xmm1,%xmm0 + andl %ebx,%esi + xorl %ecx,%ebx + movdqa %xmm10,%xmm8 + paddd %xmm7,%xmm10 + xorl %ecx,%esi + addl %ebp,%edx + pxor %xmm9,%xmm0 + rorl $7,%eax + addl %esi,%edx + addl 4(%rsp),%ecx + xorl %ebx,%eax + movdqa %xmm0,%xmm9 + movdqa %xmm10,48(%rsp) + movl %edx,%esi + roll $5,%edx + andl %eax,%edi + xorl %ebx,%eax + pslld $2,%xmm0 + xorl %ebx,%edi + addl %edx,%ecx + psrld $30,%xmm9 + rorl $7,%ebp + addl %edi,%ecx + addl 8(%rsp),%ebx + xorl %eax,%ebp + movl %ecx,%edi + roll $5,%ecx + por %xmm9,%xmm0 + andl %ebp,%esi + xorl %eax,%ebp + movdqa %xmm0,%xmm10 + xorl %eax,%esi + addl %ecx,%ebx + rorl $7,%edx + addl %esi,%ebx + addl 12(%rsp),%eax + xorl %ebp,%edx + movl %ebx,%esi + roll $5,%ebx + andl %edx,%edi + xorl %ebp,%edx + xorl %ebp,%edi + addl %ebx,%eax + rorl $7,%ecx + addl %edi,%eax + addl 16(%rsp),%ebp + pxor %xmm5,%xmm1 +.byte 102,68,15,58,15,215,8 + xorl %edx,%esi + movl %eax,%edi + roll $5,%eax + pxor %xmm2,%xmm1 + xorl %ecx,%esi + addl %eax,%ebp + movdqa %xmm8,%xmm9 + paddd %xmm0,%xmm8 + rorl $7,%ebx + addl %esi,%ebp + pxor %xmm10,%xmm1 + addl 20(%rsp),%edx + xorl %ecx,%edi + movl %ebp,%esi + roll $5,%ebp + movdqa %xmm1,%xmm10 + movdqa %xmm8,0(%rsp) + xorl %ebx,%edi + addl %ebp,%edx + rorl $7,%eax + addl %edi,%edx + pslld $2,%xmm1 + addl 24(%rsp),%ecx + xorl %ebx,%esi + psrld $30,%xmm10 + movl %edx,%edi + roll $5,%edx + xorl %eax,%esi + addl %edx,%ecx + rorl $7,%ebp + addl %esi,%ecx + por %xmm10,%xmm1 + addl 28(%rsp),%ebx + xorl %eax,%edi + movdqa %xmm1,%xmm8 + movl %ecx,%esi + roll $5,%ecx + xorl %ebp,%edi + addl %ecx,%ebx + rorl $7,%edx + addl %edi,%ebx + addl 32(%rsp),%eax + pxor %xmm6,%xmm2 +.byte 102,68,15,58,15,192,8 + xorl %ebp,%esi + movl %ebx,%edi + roll $5,%ebx + pxor %xmm3,%xmm2 + xorl %edx,%esi + addl %ebx,%eax + movdqa 32(%r11),%xmm10 + paddd %xmm1,%xmm9 + rorl $7,%ecx + addl %esi,%eax + pxor %xmm8,%xmm2 + addl 36(%rsp),%ebp + xorl %edx,%edi + movl %eax,%esi + roll $5,%eax + movdqa %xmm2,%xmm8 + movdqa %xmm9,16(%rsp) + xorl %ecx,%edi + addl %eax,%ebp + rorl $7,%ebx + addl %edi,%ebp + pslld $2,%xmm2 + addl 40(%rsp),%edx + xorl %ecx,%esi + psrld $30,%xmm8 + movl %ebp,%edi + roll $5,%ebp + xorl %ebx,%esi + addl %ebp,%edx + rorl $7,%eax + addl %esi,%edx + por %xmm8,%xmm2 + addl 44(%rsp),%ecx + xorl %ebx,%edi + movdqa %xmm2,%xmm9 + movl %edx,%esi + roll $5,%edx + xorl %eax,%edi + addl %edx,%ecx + rorl $7,%ebp + addl %edi,%ecx + addl 48(%rsp),%ebx + pxor %xmm7,%xmm3 +.byte 102,68,15,58,15,201,8 + xorl %eax,%esi + movl %ecx,%edi + roll $5,%ecx + pxor %xmm4,%xmm3 + xorl %ebp,%esi + addl %ecx,%ebx + movdqa %xmm10,%xmm8 + paddd %xmm2,%xmm10 + rorl $7,%edx + addl %esi,%ebx + pxor %xmm9,%xmm3 + addl 52(%rsp),%eax + xorl %ebp,%edi + movl %ebx,%esi + roll $5,%ebx + movdqa %xmm3,%xmm9 + movdqa %xmm10,32(%rsp) + xorl %edx,%edi + addl %ebx,%eax + rorl $7,%ecx + addl %edi,%eax + pslld $2,%xmm3 + addl 56(%rsp),%ebp + xorl %edx,%esi + psrld $30,%xmm9 + movl %eax,%edi + roll $5,%eax + xorl %ecx,%esi + addl %eax,%ebp + rorl $7,%ebx + addl %esi,%ebp + por %xmm9,%xmm3 + addl 60(%rsp),%edx + xorl %ecx,%edi + movdqa %xmm3,%xmm10 + movl %ebp,%esi + roll $5,%ebp + xorl %ebx,%edi + addl %ebp,%edx + rorl $7,%eax + addl %edi,%edx + addl 0(%rsp),%ecx + pxor %xmm0,%xmm4 +.byte 102,68,15,58,15,210,8 + xorl %ebx,%esi + movl %edx,%edi + roll $5,%edx + pxor %xmm5,%xmm4 + xorl %eax,%esi + addl %edx,%ecx + movdqa %xmm8,%xmm9 + paddd %xmm3,%xmm8 + rorl $7,%ebp + addl %esi,%ecx + pxor %xmm10,%xmm4 + addl 4(%rsp),%ebx + xorl %eax,%edi + movl %ecx,%esi + roll $5,%ecx + movdqa %xmm4,%xmm10 + movdqa %xmm8,48(%rsp) + xorl %ebp,%edi + addl %ecx,%ebx + rorl $7,%edx + addl %edi,%ebx + pslld $2,%xmm4 + addl 8(%rsp),%eax + xorl %ebp,%esi + psrld $30,%xmm10 + movl %ebx,%edi + roll $5,%ebx + xorl %edx,%esi + addl %ebx,%eax + rorl $7,%ecx + addl %esi,%eax + por %xmm10,%xmm4 + addl 12(%rsp),%ebp + xorl %edx,%edi + movdqa %xmm4,%xmm8 + movl %eax,%esi + roll $5,%eax + xorl %ecx,%edi + addl %eax,%ebp + rorl $7,%ebx + addl %edi,%ebp + addl 16(%rsp),%edx + pxor %xmm1,%xmm5 +.byte 102,68,15,58,15,195,8 + xorl %ecx,%esi + movl %ebp,%edi + roll $5,%ebp + pxor %xmm6,%xmm5 + xorl %ebx,%esi + addl %ebp,%edx + movdqa %xmm9,%xmm10 + paddd %xmm4,%xmm9 + rorl $7,%eax + addl %esi,%edx + pxor %xmm8,%xmm5 + addl 20(%rsp),%ecx + xorl %ebx,%edi + movl %edx,%esi + roll $5,%edx + movdqa %xmm5,%xmm8 + movdqa %xmm9,0(%rsp) + xorl %eax,%edi + addl %edx,%ecx + rorl $7,%ebp + addl %edi,%ecx + pslld $2,%xmm5 + addl 24(%rsp),%ebx + xorl %eax,%esi + psrld $30,%xmm8 + movl %ecx,%edi + roll $5,%ecx + xorl %ebp,%esi + addl %ecx,%ebx + rorl $7,%edx + addl %esi,%ebx + por %xmm8,%xmm5 + addl 28(%rsp),%eax + xorl %ebp,%edi + movdqa %xmm5,%xmm9 + movl %ebx,%esi + roll $5,%ebx + xorl %edx,%edi + addl %ebx,%eax + rorl $7,%ecx + addl %edi,%eax + movl %ecx,%edi + pxor %xmm2,%xmm6 +.byte 102,68,15,58,15,204,8 + xorl %edx,%ecx + addl 32(%rsp),%ebp + andl %edx,%edi + pxor %xmm7,%xmm6 + andl %ecx,%esi + rorl $7,%ebx + movdqa %xmm10,%xmm8 + paddd %xmm5,%xmm10 + addl %edi,%ebp + movl %eax,%edi + pxor %xmm9,%xmm6 + roll $5,%eax + addl %esi,%ebp + xorl %edx,%ecx + addl %eax,%ebp + movdqa %xmm6,%xmm9 + movdqa %xmm10,16(%rsp) + movl %ebx,%esi + xorl %ecx,%ebx + addl 36(%rsp),%edx + andl %ecx,%esi + pslld $2,%xmm6 + andl %ebx,%edi + rorl $7,%eax + psrld $30,%xmm9 + addl %esi,%edx + movl %ebp,%esi + roll $5,%ebp + addl %edi,%edx + xorl %ecx,%ebx + addl %ebp,%edx + por %xmm9,%xmm6 + movl %eax,%edi + xorl %ebx,%eax + movdqa %xmm6,%xmm10 + addl 40(%rsp),%ecx + andl %ebx,%edi + andl %eax,%esi + rorl $7,%ebp + addl %edi,%ecx + movl %edx,%edi + roll $5,%edx + addl %esi,%ecx + xorl %ebx,%eax + addl %edx,%ecx + movl %ebp,%esi + xorl %eax,%ebp + addl 44(%rsp),%ebx + andl %eax,%esi + andl %ebp,%edi + rorl $7,%edx + addl %esi,%ebx + movl %ecx,%esi + roll $5,%ecx + addl %edi,%ebx + xorl %eax,%ebp + addl %ecx,%ebx + movl %edx,%edi + pxor %xmm3,%xmm7 +.byte 102,68,15,58,15,213,8 + xorl %ebp,%edx + addl 48(%rsp),%eax + andl %ebp,%edi + pxor %xmm0,%xmm7 + andl %edx,%esi + rorl $7,%ecx + movdqa 48(%r11),%xmm9 + paddd %xmm6,%xmm8 + addl %edi,%eax + movl %ebx,%edi + pxor %xmm10,%xmm7 + roll $5,%ebx + addl %esi,%eax + xorl %ebp,%edx + addl %ebx,%eax + movdqa %xmm7,%xmm10 + movdqa %xmm8,32(%rsp) + movl %ecx,%esi + xorl %edx,%ecx + addl 52(%rsp),%ebp + andl %edx,%esi + pslld $2,%xmm7 + andl %ecx,%edi + rorl $7,%ebx + psrld $30,%xmm10 + addl %esi,%ebp + movl %eax,%esi + roll $5,%eax + addl %edi,%ebp + xorl %edx,%ecx + addl %eax,%ebp + por %xmm10,%xmm7 + movl %ebx,%edi + xorl %ecx,%ebx + movdqa %xmm7,%xmm8 + addl 56(%rsp),%edx + andl %ecx,%edi + andl %ebx,%esi + rorl $7,%eax + addl %edi,%edx + movl %ebp,%edi + roll $5,%ebp + addl %esi,%edx + xorl %ecx,%ebx + addl %ebp,%edx + movl %eax,%esi + xorl %ebx,%eax + addl 60(%rsp),%ecx + andl %ebx,%esi + andl %eax,%edi + rorl $7,%ebp + addl %esi,%ecx + movl %edx,%esi + roll $5,%edx + addl %edi,%ecx + xorl %ebx,%eax + addl %edx,%ecx + movl %ebp,%edi + pxor %xmm4,%xmm0 +.byte 102,68,15,58,15,198,8 + xorl %eax,%ebp + addl 0(%rsp),%ebx + andl %eax,%edi + pxor %xmm1,%xmm0 + andl %ebp,%esi + rorl $7,%edx + movdqa %xmm9,%xmm10 + paddd %xmm7,%xmm9 + addl %edi,%ebx + movl %ecx,%edi + pxor %xmm8,%xmm0 + roll $5,%ecx + addl %esi,%ebx + xorl %eax,%ebp + addl %ecx,%ebx + movdqa %xmm0,%xmm8 + movdqa %xmm9,48(%rsp) + movl %edx,%esi + xorl %ebp,%edx + addl 4(%rsp),%eax + andl %ebp,%esi + pslld $2,%xmm0 + andl %edx,%edi + rorl $7,%ecx + psrld $30,%xmm8 + addl %esi,%eax + movl %ebx,%esi + roll $5,%ebx + addl %edi,%eax + xorl %ebp,%edx + addl %ebx,%eax + por %xmm8,%xmm0 + movl %ecx,%edi + xorl %edx,%ecx + movdqa %xmm0,%xmm9 + addl 8(%rsp),%ebp + andl %edx,%edi + andl %ecx,%esi + rorl $7,%ebx + addl %edi,%ebp + movl %eax,%edi + roll $5,%eax + addl %esi,%ebp + xorl %edx,%ecx + addl %eax,%ebp + movl %ebx,%esi + xorl %ecx,%ebx + addl 12(%rsp),%edx + andl %ecx,%esi + andl %ebx,%edi + rorl $7,%eax + addl %esi,%edx + movl %ebp,%esi + roll $5,%ebp + addl %edi,%edx + xorl %ecx,%ebx + addl %ebp,%edx + movl %eax,%edi + pxor %xmm5,%xmm1 +.byte 102,68,15,58,15,207,8 + xorl %ebx,%eax + addl 16(%rsp),%ecx + andl %ebx,%edi + pxor %xmm2,%xmm1 + andl %eax,%esi + rorl $7,%ebp + movdqa %xmm10,%xmm8 + paddd %xmm0,%xmm10 + addl %edi,%ecx + movl %edx,%edi + pxor %xmm9,%xmm1 + roll $5,%edx + addl %esi,%ecx + xorl %ebx,%eax + addl %edx,%ecx + movdqa %xmm1,%xmm9 + movdqa %xmm10,0(%rsp) + movl %ebp,%esi + xorl %eax,%ebp + addl 20(%rsp),%ebx + andl %eax,%esi + pslld $2,%xmm1 + andl %ebp,%edi + rorl $7,%edx + psrld $30,%xmm9 + addl %esi,%ebx + movl %ecx,%esi + roll $5,%ecx + addl %edi,%ebx + xorl %eax,%ebp + addl %ecx,%ebx + por %xmm9,%xmm1 + movl %edx,%edi + xorl %ebp,%edx + movdqa %xmm1,%xmm10 + addl 24(%rsp),%eax + andl %ebp,%edi + andl %edx,%esi + rorl $7,%ecx + addl %edi,%eax + movl %ebx,%edi + roll $5,%ebx + addl %esi,%eax + xorl %ebp,%edx + addl %ebx,%eax + movl %ecx,%esi + xorl %edx,%ecx + addl 28(%rsp),%ebp + andl %edx,%esi + andl %ecx,%edi + rorl $7,%ebx + addl %esi,%ebp + movl %eax,%esi + roll $5,%eax + addl %edi,%ebp + xorl %edx,%ecx + addl %eax,%ebp + movl %ebx,%edi + pxor %xmm6,%xmm2 +.byte 102,68,15,58,15,208,8 + xorl %ecx,%ebx + addl 32(%rsp),%edx + andl %ecx,%edi + pxor %xmm3,%xmm2 + andl %ebx,%esi + rorl $7,%eax + movdqa %xmm8,%xmm9 + paddd %xmm1,%xmm8 + addl %edi,%edx + movl %ebp,%edi + pxor %xmm10,%xmm2 + roll $5,%ebp + addl %esi,%edx + xorl %ecx,%ebx + addl %ebp,%edx + movdqa %xmm2,%xmm10 + movdqa %xmm8,16(%rsp) + movl %eax,%esi + xorl %ebx,%eax + addl 36(%rsp),%ecx + andl %ebx,%esi + pslld $2,%xmm2 + andl %eax,%edi + rorl $7,%ebp + psrld $30,%xmm10 + addl %esi,%ecx + movl %edx,%esi + roll $5,%edx + addl %edi,%ecx + xorl %ebx,%eax + addl %edx,%ecx + por %xmm10,%xmm2 + movl %ebp,%edi + xorl %eax,%ebp + movdqa %xmm2,%xmm8 + addl 40(%rsp),%ebx + andl %eax,%edi + andl %ebp,%esi + rorl $7,%edx + addl %edi,%ebx + movl %ecx,%edi + roll $5,%ecx + addl %esi,%ebx + xorl %eax,%ebp + addl %ecx,%ebx + movl %edx,%esi + xorl %ebp,%edx + addl 44(%rsp),%eax + andl %ebp,%esi + andl %edx,%edi + rorl $7,%ecx + addl %esi,%eax + movl %ebx,%esi + roll $5,%ebx + addl %edi,%eax + xorl %ebp,%edx + addl %ebx,%eax + addl 48(%rsp),%ebp + pxor %xmm7,%xmm3 +.byte 102,68,15,58,15,193,8 + xorl %edx,%esi + movl %eax,%edi + roll $5,%eax + pxor %xmm4,%xmm3 + xorl %ecx,%esi + addl %eax,%ebp + movdqa %xmm9,%xmm10 + paddd %xmm2,%xmm9 + rorl $7,%ebx + addl %esi,%ebp + pxor %xmm8,%xmm3 + addl 52(%rsp),%edx + xorl %ecx,%edi + movl %ebp,%esi + roll $5,%ebp + movdqa %xmm3,%xmm8 + movdqa %xmm9,32(%rsp) + xorl %ebx,%edi + addl %ebp,%edx + rorl $7,%eax + addl %edi,%edx + pslld $2,%xmm3 + addl 56(%rsp),%ecx + xorl %ebx,%esi + psrld $30,%xmm8 + movl %edx,%edi + roll $5,%edx + xorl %eax,%esi + addl %edx,%ecx + rorl $7,%ebp + addl %esi,%ecx + por %xmm8,%xmm3 + addl 60(%rsp),%ebx + xorl %eax,%edi + movl %ecx,%esi + roll $5,%ecx + xorl %ebp,%edi + addl %ecx,%ebx + rorl $7,%edx + addl %edi,%ebx + addl 0(%rsp),%eax + paddd %xmm3,%xmm10 + xorl %ebp,%esi + movl %ebx,%edi + roll $5,%ebx + xorl %edx,%esi + movdqa %xmm10,48(%rsp) + addl %ebx,%eax + rorl $7,%ecx + addl %esi,%eax + addl 4(%rsp),%ebp + xorl %edx,%edi + movl %eax,%esi + roll $5,%eax + xorl %ecx,%edi + addl %eax,%ebp + rorl $7,%ebx + addl %edi,%ebp + addl 8(%rsp),%edx + xorl %ecx,%esi + movl %ebp,%edi + roll $5,%ebp + xorl %ebx,%esi + addl %ebp,%edx + rorl $7,%eax + addl %esi,%edx + addl 12(%rsp),%ecx + xorl %ebx,%edi + movl %edx,%esi + roll $5,%edx + xorl %eax,%edi + addl %edx,%ecx + rorl $7,%ebp + addl %edi,%ecx + cmpq %r10,%r9 + je L$done_ssse3 + movdqa 64(%r11),%xmm6 + movdqa 0(%r11),%xmm9 + movdqu 0(%r9),%xmm0 + movdqu 16(%r9),%xmm1 + movdqu 32(%r9),%xmm2 + movdqu 48(%r9),%xmm3 +.byte 102,15,56,0,198 + addq $64,%r9 + addl 16(%rsp),%ebx + xorl %eax,%esi +.byte 102,15,56,0,206 + movl %ecx,%edi + roll $5,%ecx + paddd %xmm9,%xmm0 + xorl %ebp,%esi + addl %ecx,%ebx + rorl $7,%edx + addl %esi,%ebx + movdqa %xmm0,0(%rsp) + addl 20(%rsp),%eax + xorl %ebp,%edi + psubd %xmm9,%xmm0 + movl %ebx,%esi + roll $5,%ebx + xorl %edx,%edi + addl %ebx,%eax + rorl $7,%ecx + addl %edi,%eax + addl 24(%rsp),%ebp + xorl %edx,%esi + movl %eax,%edi + roll $5,%eax + xorl %ecx,%esi + addl %eax,%ebp + rorl $7,%ebx + addl %esi,%ebp + addl 28(%rsp),%edx + xorl %ecx,%edi + movl %ebp,%esi + roll $5,%ebp + xorl %ebx,%edi + addl %ebp,%edx + rorl $7,%eax + addl %edi,%edx + addl 32(%rsp),%ecx + xorl %ebx,%esi +.byte 102,15,56,0,214 + movl %edx,%edi + roll $5,%edx + paddd %xmm9,%xmm1 + xorl %eax,%esi + addl %edx,%ecx + rorl $7,%ebp + addl %esi,%ecx + movdqa %xmm1,16(%rsp) + addl 36(%rsp),%ebx + xorl %eax,%edi + psubd %xmm9,%xmm1 + movl %ecx,%esi + roll $5,%ecx + xorl %ebp,%edi + addl %ecx,%ebx + rorl $7,%edx + addl %edi,%ebx + addl 40(%rsp),%eax + xorl %ebp,%esi + movl %ebx,%edi + roll $5,%ebx + xorl %edx,%esi + addl %ebx,%eax + rorl $7,%ecx + addl %esi,%eax + addl 44(%rsp),%ebp + xorl %edx,%edi + movl %eax,%esi + roll $5,%eax + xorl %ecx,%edi + addl %eax,%ebp + rorl $7,%ebx + addl %edi,%ebp + addl 48(%rsp),%edx + xorl %ecx,%esi +.byte 102,15,56,0,222 + movl %ebp,%edi + roll $5,%ebp + paddd %xmm9,%xmm2 + xorl %ebx,%esi + addl %ebp,%edx + rorl $7,%eax + addl %esi,%edx + movdqa %xmm2,32(%rsp) + addl 52(%rsp),%ecx + xorl %ebx,%edi + psubd %xmm9,%xmm2 + movl %edx,%esi + roll $5,%edx + xorl %eax,%edi + addl %edx,%ecx + rorl $7,%ebp + addl %edi,%ecx + addl 56(%rsp),%ebx + xorl %eax,%esi + movl %ecx,%edi + roll $5,%ecx + xorl %ebp,%esi + addl %ecx,%ebx + rorl $7,%edx + addl %esi,%ebx + addl 60(%rsp),%eax + xorl %ebp,%edi + movl %ebx,%esi + roll $5,%ebx + xorl %edx,%edi + addl %ebx,%eax + rorl $7,%ecx + addl %edi,%eax + addl 0(%r8),%eax + addl 4(%r8),%esi + addl 8(%r8),%ecx + addl 12(%r8),%edx + movl %eax,0(%r8) + addl 16(%r8),%ebp + movl %esi,4(%r8) + movl %esi,%ebx + movl %ecx,8(%r8) + movl %edx,12(%r8) + movl %ebp,16(%r8) + jmp L$oop_ssse3 + +.p2align 4 +L$done_ssse3: + addl 16(%rsp),%ebx + xorl %eax,%esi + movl %ecx,%edi + roll $5,%ecx + xorl %ebp,%esi + addl %ecx,%ebx + rorl $7,%edx + addl %esi,%ebx + addl 20(%rsp),%eax + xorl %ebp,%edi + movl %ebx,%esi + roll $5,%ebx + xorl %edx,%edi + addl %ebx,%eax + rorl $7,%ecx + addl %edi,%eax + addl 24(%rsp),%ebp + xorl %edx,%esi + movl %eax,%edi + roll $5,%eax + xorl %ecx,%esi + addl %eax,%ebp + rorl $7,%ebx + addl %esi,%ebp + addl 28(%rsp),%edx + xorl %ecx,%edi + movl %ebp,%esi + roll $5,%ebp + xorl %ebx,%edi + addl %ebp,%edx + rorl $7,%eax + addl %edi,%edx + addl 32(%rsp),%ecx + xorl %ebx,%esi + movl %edx,%edi + roll $5,%edx + xorl %eax,%esi + addl %edx,%ecx + rorl $7,%ebp + addl %esi,%ecx + addl 36(%rsp),%ebx + xorl %eax,%edi + movl %ecx,%esi + roll $5,%ecx + xorl %ebp,%edi + addl %ecx,%ebx + rorl $7,%edx + addl %edi,%ebx + addl 40(%rsp),%eax + xorl %ebp,%esi + movl %ebx,%edi + roll $5,%ebx + xorl %edx,%esi + addl %ebx,%eax + rorl $7,%ecx + addl %esi,%eax + addl 44(%rsp),%ebp + xorl %edx,%edi + movl %eax,%esi + roll $5,%eax + xorl %ecx,%edi + addl %eax,%ebp + rorl $7,%ebx + addl %edi,%ebp + addl 48(%rsp),%edx + xorl %ecx,%esi + movl %ebp,%edi + roll $5,%ebp + xorl %ebx,%esi + addl %ebp,%edx + rorl $7,%eax + addl %esi,%edx + addl 52(%rsp),%ecx + xorl %ebx,%edi + movl %edx,%esi + roll $5,%edx + xorl %eax,%edi + addl %edx,%ecx + rorl $7,%ebp + addl %edi,%ecx + addl 56(%rsp),%ebx + xorl %eax,%esi + movl %ecx,%edi + roll $5,%ecx + xorl %ebp,%esi + addl %ecx,%ebx + rorl $7,%edx + addl %esi,%ebx + addl 60(%rsp),%eax + xorl %ebp,%edi + movl %ebx,%esi + roll $5,%ebx + xorl %edx,%edi + addl %ebx,%eax + rorl $7,%ecx + addl %edi,%eax + addl 0(%r8),%eax + addl 4(%r8),%esi + addl 8(%r8),%ecx + movl %eax,0(%r8) + addl 12(%r8),%edx + movl %esi,4(%r8) + addl 16(%r8),%ebp + movl %ecx,8(%r8) + movl %edx,12(%r8) + movl %ebp,16(%r8) + leaq 64(%rsp),%rsi + movq 0(%rsi),%r12 + movq 8(%rsi),%rbp + movq 16(%rsi),%rbx + leaq 24(%rsi),%rsp +L$epilogue_ssse3: + .byte 0xf3,0xc3 + +.p2align 6 +K_XX_XX: +.long 0x5a827999,0x5a827999,0x5a827999,0x5a827999 +.long 0x6ed9eba1,0x6ed9eba1,0x6ed9eba1,0x6ed9eba1 +.long 0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc +.long 0xca62c1d6,0xca62c1d6,0xca62c1d6,0xca62c1d6 +.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f +.byte 83,72,65,49,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 +.p2align 6 diff --git a/code/crypto/sha/sha1_one.c b/code/crypto/sha/sha1_one.c new file mode 100644 index 0000000..080ea4d --- /dev/null +++ b/code/crypto/sha/sha1_one.c @@ -0,0 +1,79 @@ +/* $OpenBSD: sha1_one.c,v 1.12 2015/09/10 15:56:26 jsing Exp $ */ +/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com) + * All rights reserved. + * + * This package is an SSL implementation written + * by Eric Young (eay@cryptsoft.com). + * The implementation was written so as to conform with Netscapes SSL. + * + * This library is free for commercial and non-commercial use as long as + * the following conditions are aheared to. The following conditions + * apply to all code found in this distribution, be it the RC4, RSA, + * lhash, DES, etc., code; not just the SSL code. The SSL documentation + * included with this distribution is covered by the same copyright terms + * except that the holder is Tim Hudson (tjh@cryptsoft.com). + * + * Copyright remains Eric Young's, and as such any Copyright notices in + * the code are not to be removed. + * If this package is used in a product, Eric Young should be given attribution + * as the author of the parts of the library used. + * This can be in the form of a textual message at program startup or + * in documentation (online or textual) provided with the package. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. All advertising materials mentioning features or use of this software + * must display the following acknowledgement: + * "This product includes cryptographic software written by + * Eric Young (eay@cryptsoft.com)" + * The word 'cryptographic' can be left out if the rouines from the library + * being used are not cryptographic related :-). + * 4. If you include any Windows specific code (or a derivative thereof) from + * the apps directory (application code) you must include an acknowledgement: + * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)" + * + * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND + * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS + * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY + * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + * SUCH DAMAGE. + * + * The licence and distribution terms for any publically available version or + * derivative of this code cannot be changed. i.e. this code cannot simply be + * copied and put under another distribution licence + * [including the GNU Public Licence.] + */ + +#include <stdio.h> +#include <string.h> + +#include <compat.h> +#include <sha.h> + +#ifndef OPENSSL_NO_SHA1 +unsigned char *SHA1(const unsigned char *d, size_t n, unsigned char *md) + { + SHA_CTX c; + static unsigned char m[SHA_DIGEST_LENGTH]; + + if (md == NULL) md=m; + if (!SHA1_Init(&c)) + return NULL; + SHA1_Update(&c,d,n); + SHA1_Final(md,&c); + explicit_bzero(&c,sizeof(c)); + return(md); + } +#endif diff --git a/code/crypto/sha/sha1dgst.c b/code/crypto/sha/sha1dgst.c new file mode 100644 index 0000000..acc576c --- /dev/null +++ b/code/crypto/sha/sha1dgst.c @@ -0,0 +1,66 @@ +/* $OpenBSD: sha1dgst.c,v 1.14 2015/09/13 21:09:56 doug Exp $ */ +/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com) + * All rights reserved. + * + * This package is an SSL implementation written + * by Eric Young (eay@cryptsoft.com). + * The implementation was written so as to conform with Netscapes SSL. + * + * This library is free for commercial and non-commercial use as long as + * the following conditions are aheared to. The following conditions + * apply to all code found in this distribution, be it the RC4, RSA, + * lhash, DES, etc., code; not just the SSL code. The SSL documentation + * included with this distribution is covered by the same copyright terms + * except that the holder is Tim Hudson (tjh@cryptsoft.com). + * + * Copyright remains Eric Young's, and as such any Copyright notices in + * the code are not to be removed. + * If this package is used in a product, Eric Young should be given attribution + * as the author of the parts of the library used. + * This can be in the form of a textual message at program startup or + * in documentation (online or textual) provided with the package. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. All advertising materials mentioning features or use of this software + * must display the following acknowledgement: + * "This product includes cryptographic software written by + * Eric Young (eay@cryptsoft.com)" + * The word 'cryptographic' can be left out if the rouines from the library + * being used are not cryptographic related :-). + * 4. If you include any Windows specific code (or a derivative thereof) from + * the apps directory (application code) you must include an acknowledgement: + * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)" + * + * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND + * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS + * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY + * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + * SUCH DAMAGE. + * + * The licence and distribution terms for any publically available version or + * derivative of this code cannot be changed. i.e. this code cannot simply be + * copied and put under another distribution licence + * [including the GNU Public Licence.] + */ + +#if !defined(OPENSSL_NO_SHA1) && !defined(OPENSSL_NO_SHA) + +/* The implementation is in ../md32_common.h */ + +#include "sha_locl.h" + +#endif + diff --git a/code/crypto/sha/sha256-elf-x86_64.S b/code/crypto/sha/sha256-elf-x86_64.S new file mode 100644 index 0000000..599a5cb --- /dev/null +++ b/code/crypto/sha/sha256-elf-x86_64.S @@ -0,0 +1,1782 @@ +#include "x86_arch.h" +.text + +.globl sha256_block_data_order +.type sha256_block_data_order,@function +.align 16 +sha256_block_data_order: + pushq %rbx + pushq %rbp + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + movq %rsp,%r11 + shlq $4,%rdx + subq $64+32,%rsp + leaq (%rsi,%rdx,4),%rdx + andq $-64,%rsp + movq %rdi,64+0(%rsp) + movq %rsi,64+8(%rsp) + movq %rdx,64+16(%rsp) + movq %r11,64+24(%rsp) +.Lprologue: + + leaq K256(%rip),%rbp + + movl 0(%rdi),%eax + movl 4(%rdi),%ebx + movl 8(%rdi),%ecx + movl 12(%rdi),%edx + movl 16(%rdi),%r8d + movl 20(%rdi),%r9d + movl 24(%rdi),%r10d + movl 28(%rdi),%r11d + jmp .Lloop + +.align 16 +.Lloop: + xorq %rdi,%rdi + movl 0(%rsi),%r12d + movl %r8d,%r13d + movl %eax,%r14d + bswapl %r12d + rorl $14,%r13d + movl %r9d,%r15d + movl %r12d,0(%rsp) + + rorl $9,%r14d + xorl %r8d,%r13d + xorl %r10d,%r15d + + rorl $5,%r13d + addl %r11d,%r12d + xorl %eax,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %r8d,%r15d + movl %ebx,%r11d + + rorl $11,%r14d + xorl %r8d,%r13d + xorl %r10d,%r15d + + xorl %ecx,%r11d + xorl %eax,%r14d + addl %r15d,%r12d + movl %ebx,%r15d + + rorl $6,%r13d + andl %eax,%r11d + andl %ecx,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r11d + + addl %r12d,%edx + addl %r12d,%r11d + leaq 1(%rdi),%rdi + addl %r14d,%r11d + + movl 4(%rsi),%r12d + movl %edx,%r13d + movl %r11d,%r14d + bswapl %r12d + rorl $14,%r13d + movl %r8d,%r15d + movl %r12d,4(%rsp) + + rorl $9,%r14d + xorl %edx,%r13d + xorl %r9d,%r15d + + rorl $5,%r13d + addl %r10d,%r12d + xorl %r11d,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %edx,%r15d + movl %eax,%r10d + + rorl $11,%r14d + xorl %edx,%r13d + xorl %r9d,%r15d + + xorl %ebx,%r10d + xorl %r11d,%r14d + addl %r15d,%r12d + movl %eax,%r15d + + rorl $6,%r13d + andl %r11d,%r10d + andl %ebx,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r10d + + addl %r12d,%ecx + addl %r12d,%r10d + leaq 1(%rdi),%rdi + addl %r14d,%r10d + + movl 8(%rsi),%r12d + movl %ecx,%r13d + movl %r10d,%r14d + bswapl %r12d + rorl $14,%r13d + movl %edx,%r15d + movl %r12d,8(%rsp) + + rorl $9,%r14d + xorl %ecx,%r13d + xorl %r8d,%r15d + + rorl $5,%r13d + addl %r9d,%r12d + xorl %r10d,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %ecx,%r15d + movl %r11d,%r9d + + rorl $11,%r14d + xorl %ecx,%r13d + xorl %r8d,%r15d + + xorl %eax,%r9d + xorl %r10d,%r14d + addl %r15d,%r12d + movl %r11d,%r15d + + rorl $6,%r13d + andl %r10d,%r9d + andl %eax,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r9d + + addl %r12d,%ebx + addl %r12d,%r9d + leaq 1(%rdi),%rdi + addl %r14d,%r9d + + movl 12(%rsi),%r12d + movl %ebx,%r13d + movl %r9d,%r14d + bswapl %r12d + rorl $14,%r13d + movl %ecx,%r15d + movl %r12d,12(%rsp) + + rorl $9,%r14d + xorl %ebx,%r13d + xorl %edx,%r15d + + rorl $5,%r13d + addl %r8d,%r12d + xorl %r9d,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %ebx,%r15d + movl %r10d,%r8d + + rorl $11,%r14d + xorl %ebx,%r13d + xorl %edx,%r15d + + xorl %r11d,%r8d + xorl %r9d,%r14d + addl %r15d,%r12d + movl %r10d,%r15d + + rorl $6,%r13d + andl %r9d,%r8d + andl %r11d,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r8d + + addl %r12d,%eax + addl %r12d,%r8d + leaq 1(%rdi),%rdi + addl %r14d,%r8d + + movl 16(%rsi),%r12d + movl %eax,%r13d + movl %r8d,%r14d + bswapl %r12d + rorl $14,%r13d + movl %ebx,%r15d + movl %r12d,16(%rsp) + + rorl $9,%r14d + xorl %eax,%r13d + xorl %ecx,%r15d + + rorl $5,%r13d + addl %edx,%r12d + xorl %r8d,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %eax,%r15d + movl %r9d,%edx + + rorl $11,%r14d + xorl %eax,%r13d + xorl %ecx,%r15d + + xorl %r10d,%edx + xorl %r8d,%r14d + addl %r15d,%r12d + movl %r9d,%r15d + + rorl $6,%r13d + andl %r8d,%edx + andl %r10d,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%edx + + addl %r12d,%r11d + addl %r12d,%edx + leaq 1(%rdi),%rdi + addl %r14d,%edx + + movl 20(%rsi),%r12d + movl %r11d,%r13d + movl %edx,%r14d + bswapl %r12d + rorl $14,%r13d + movl %eax,%r15d + movl %r12d,20(%rsp) + + rorl $9,%r14d + xorl %r11d,%r13d + xorl %ebx,%r15d + + rorl $5,%r13d + addl %ecx,%r12d + xorl %edx,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %r11d,%r15d + movl %r8d,%ecx + + rorl $11,%r14d + xorl %r11d,%r13d + xorl %ebx,%r15d + + xorl %r9d,%ecx + xorl %edx,%r14d + addl %r15d,%r12d + movl %r8d,%r15d + + rorl $6,%r13d + andl %edx,%ecx + andl %r9d,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%ecx + + addl %r12d,%r10d + addl %r12d,%ecx + leaq 1(%rdi),%rdi + addl %r14d,%ecx + + movl 24(%rsi),%r12d + movl %r10d,%r13d + movl %ecx,%r14d + bswapl %r12d + rorl $14,%r13d + movl %r11d,%r15d + movl %r12d,24(%rsp) + + rorl $9,%r14d + xorl %r10d,%r13d + xorl %eax,%r15d + + rorl $5,%r13d + addl %ebx,%r12d + xorl %ecx,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %r10d,%r15d + movl %edx,%ebx + + rorl $11,%r14d + xorl %r10d,%r13d + xorl %eax,%r15d + + xorl %r8d,%ebx + xorl %ecx,%r14d + addl %r15d,%r12d + movl %edx,%r15d + + rorl $6,%r13d + andl %ecx,%ebx + andl %r8d,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%ebx + + addl %r12d,%r9d + addl %r12d,%ebx + leaq 1(%rdi),%rdi + addl %r14d,%ebx + + movl 28(%rsi),%r12d + movl %r9d,%r13d + movl %ebx,%r14d + bswapl %r12d + rorl $14,%r13d + movl %r10d,%r15d + movl %r12d,28(%rsp) + + rorl $9,%r14d + xorl %r9d,%r13d + xorl %r11d,%r15d + + rorl $5,%r13d + addl %eax,%r12d + xorl %ebx,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %r9d,%r15d + movl %ecx,%eax + + rorl $11,%r14d + xorl %r9d,%r13d + xorl %r11d,%r15d + + xorl %edx,%eax + xorl %ebx,%r14d + addl %r15d,%r12d + movl %ecx,%r15d + + rorl $6,%r13d + andl %ebx,%eax + andl %edx,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%eax + + addl %r12d,%r8d + addl %r12d,%eax + leaq 1(%rdi),%rdi + addl %r14d,%eax + + movl 32(%rsi),%r12d + movl %r8d,%r13d + movl %eax,%r14d + bswapl %r12d + rorl $14,%r13d + movl %r9d,%r15d + movl %r12d,32(%rsp) + + rorl $9,%r14d + xorl %r8d,%r13d + xorl %r10d,%r15d + + rorl $5,%r13d + addl %r11d,%r12d + xorl %eax,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %r8d,%r15d + movl %ebx,%r11d + + rorl $11,%r14d + xorl %r8d,%r13d + xorl %r10d,%r15d + + xorl %ecx,%r11d + xorl %eax,%r14d + addl %r15d,%r12d + movl %ebx,%r15d + + rorl $6,%r13d + andl %eax,%r11d + andl %ecx,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r11d + + addl %r12d,%edx + addl %r12d,%r11d + leaq 1(%rdi),%rdi + addl %r14d,%r11d + + movl 36(%rsi),%r12d + movl %edx,%r13d + movl %r11d,%r14d + bswapl %r12d + rorl $14,%r13d + movl %r8d,%r15d + movl %r12d,36(%rsp) + + rorl $9,%r14d + xorl %edx,%r13d + xorl %r9d,%r15d + + rorl $5,%r13d + addl %r10d,%r12d + xorl %r11d,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %edx,%r15d + movl %eax,%r10d + + rorl $11,%r14d + xorl %edx,%r13d + xorl %r9d,%r15d + + xorl %ebx,%r10d + xorl %r11d,%r14d + addl %r15d,%r12d + movl %eax,%r15d + + rorl $6,%r13d + andl %r11d,%r10d + andl %ebx,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r10d + + addl %r12d,%ecx + addl %r12d,%r10d + leaq 1(%rdi),%rdi + addl %r14d,%r10d + + movl 40(%rsi),%r12d + movl %ecx,%r13d + movl %r10d,%r14d + bswapl %r12d + rorl $14,%r13d + movl %edx,%r15d + movl %r12d,40(%rsp) + + rorl $9,%r14d + xorl %ecx,%r13d + xorl %r8d,%r15d + + rorl $5,%r13d + addl %r9d,%r12d + xorl %r10d,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %ecx,%r15d + movl %r11d,%r9d + + rorl $11,%r14d + xorl %ecx,%r13d + xorl %r8d,%r15d + + xorl %eax,%r9d + xorl %r10d,%r14d + addl %r15d,%r12d + movl %r11d,%r15d + + rorl $6,%r13d + andl %r10d,%r9d + andl %eax,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r9d + + addl %r12d,%ebx + addl %r12d,%r9d + leaq 1(%rdi),%rdi + addl %r14d,%r9d + + movl 44(%rsi),%r12d + movl %ebx,%r13d + movl %r9d,%r14d + bswapl %r12d + rorl $14,%r13d + movl %ecx,%r15d + movl %r12d,44(%rsp) + + rorl $9,%r14d + xorl %ebx,%r13d + xorl %edx,%r15d + + rorl $5,%r13d + addl %r8d,%r12d + xorl %r9d,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %ebx,%r15d + movl %r10d,%r8d + + rorl $11,%r14d + xorl %ebx,%r13d + xorl %edx,%r15d + + xorl %r11d,%r8d + xorl %r9d,%r14d + addl %r15d,%r12d + movl %r10d,%r15d + + rorl $6,%r13d + andl %r9d,%r8d + andl %r11d,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r8d + + addl %r12d,%eax + addl %r12d,%r8d + leaq 1(%rdi),%rdi + addl %r14d,%r8d + + movl 48(%rsi),%r12d + movl %eax,%r13d + movl %r8d,%r14d + bswapl %r12d + rorl $14,%r13d + movl %ebx,%r15d + movl %r12d,48(%rsp) + + rorl $9,%r14d + xorl %eax,%r13d + xorl %ecx,%r15d + + rorl $5,%r13d + addl %edx,%r12d + xorl %r8d,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %eax,%r15d + movl %r9d,%edx + + rorl $11,%r14d + xorl %eax,%r13d + xorl %ecx,%r15d + + xorl %r10d,%edx + xorl %r8d,%r14d + addl %r15d,%r12d + movl %r9d,%r15d + + rorl $6,%r13d + andl %r8d,%edx + andl %r10d,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%edx + + addl %r12d,%r11d + addl %r12d,%edx + leaq 1(%rdi),%rdi + addl %r14d,%edx + + movl 52(%rsi),%r12d + movl %r11d,%r13d + movl %edx,%r14d + bswapl %r12d + rorl $14,%r13d + movl %eax,%r15d + movl %r12d,52(%rsp) + + rorl $9,%r14d + xorl %r11d,%r13d + xorl %ebx,%r15d + + rorl $5,%r13d + addl %ecx,%r12d + xorl %edx,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %r11d,%r15d + movl %r8d,%ecx + + rorl $11,%r14d + xorl %r11d,%r13d + xorl %ebx,%r15d + + xorl %r9d,%ecx + xorl %edx,%r14d + addl %r15d,%r12d + movl %r8d,%r15d + + rorl $6,%r13d + andl %edx,%ecx + andl %r9d,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%ecx + + addl %r12d,%r10d + addl %r12d,%ecx + leaq 1(%rdi),%rdi + addl %r14d,%ecx + + movl 56(%rsi),%r12d + movl %r10d,%r13d + movl %ecx,%r14d + bswapl %r12d + rorl $14,%r13d + movl %r11d,%r15d + movl %r12d,56(%rsp) + + rorl $9,%r14d + xorl %r10d,%r13d + xorl %eax,%r15d + + rorl $5,%r13d + addl %ebx,%r12d + xorl %ecx,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %r10d,%r15d + movl %edx,%ebx + + rorl $11,%r14d + xorl %r10d,%r13d + xorl %eax,%r15d + + xorl %r8d,%ebx + xorl %ecx,%r14d + addl %r15d,%r12d + movl %edx,%r15d + + rorl $6,%r13d + andl %ecx,%ebx + andl %r8d,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%ebx + + addl %r12d,%r9d + addl %r12d,%ebx + leaq 1(%rdi),%rdi + addl %r14d,%ebx + + movl 60(%rsi),%r12d + movl %r9d,%r13d + movl %ebx,%r14d + bswapl %r12d + rorl $14,%r13d + movl %r10d,%r15d + movl %r12d,60(%rsp) + + rorl $9,%r14d + xorl %r9d,%r13d + xorl %r11d,%r15d + + rorl $5,%r13d + addl %eax,%r12d + xorl %ebx,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %r9d,%r15d + movl %ecx,%eax + + rorl $11,%r14d + xorl %r9d,%r13d + xorl %r11d,%r15d + + xorl %edx,%eax + xorl %ebx,%r14d + addl %r15d,%r12d + movl %ecx,%r15d + + rorl $6,%r13d + andl %ebx,%eax + andl %edx,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%eax + + addl %r12d,%r8d + addl %r12d,%eax + leaq 1(%rdi),%rdi + addl %r14d,%eax + + jmp .Lrounds_16_xx +.align 16 +.Lrounds_16_xx: + movl 4(%rsp),%r13d + movl 56(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + + rorl $11,%r12d + xorl %r13d,%r12d + shrl $3,%r13d + + rorl $7,%r12d + xorl %r12d,%r13d + movl 36(%rsp),%r12d + + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + + rorl $17,%r15d + addl %r13d,%r12d + xorl %r15d,%r14d + + addl 0(%rsp),%r12d + movl %r8d,%r13d + addl %r14d,%r12d + movl %eax,%r14d + rorl $14,%r13d + movl %r9d,%r15d + movl %r12d,0(%rsp) + + rorl $9,%r14d + xorl %r8d,%r13d + xorl %r10d,%r15d + + rorl $5,%r13d + addl %r11d,%r12d + xorl %eax,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %r8d,%r15d + movl %ebx,%r11d + + rorl $11,%r14d + xorl %r8d,%r13d + xorl %r10d,%r15d + + xorl %ecx,%r11d + xorl %eax,%r14d + addl %r15d,%r12d + movl %ebx,%r15d + + rorl $6,%r13d + andl %eax,%r11d + andl %ecx,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r11d + + addl %r12d,%edx + addl %r12d,%r11d + leaq 1(%rdi),%rdi + addl %r14d,%r11d + + movl 8(%rsp),%r13d + movl 60(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + + rorl $11,%r12d + xorl %r13d,%r12d + shrl $3,%r13d + + rorl $7,%r12d + xorl %r12d,%r13d + movl 40(%rsp),%r12d + + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + + rorl $17,%r15d + addl %r13d,%r12d + xorl %r15d,%r14d + + addl 4(%rsp),%r12d + movl %edx,%r13d + addl %r14d,%r12d + movl %r11d,%r14d + rorl $14,%r13d + movl %r8d,%r15d + movl %r12d,4(%rsp) + + rorl $9,%r14d + xorl %edx,%r13d + xorl %r9d,%r15d + + rorl $5,%r13d + addl %r10d,%r12d + xorl %r11d,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %edx,%r15d + movl %eax,%r10d + + rorl $11,%r14d + xorl %edx,%r13d + xorl %r9d,%r15d + + xorl %ebx,%r10d + xorl %r11d,%r14d + addl %r15d,%r12d + movl %eax,%r15d + + rorl $6,%r13d + andl %r11d,%r10d + andl %ebx,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r10d + + addl %r12d,%ecx + addl %r12d,%r10d + leaq 1(%rdi),%rdi + addl %r14d,%r10d + + movl 12(%rsp),%r13d + movl 0(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + + rorl $11,%r12d + xorl %r13d,%r12d + shrl $3,%r13d + + rorl $7,%r12d + xorl %r12d,%r13d + movl 44(%rsp),%r12d + + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + + rorl $17,%r15d + addl %r13d,%r12d + xorl %r15d,%r14d + + addl 8(%rsp),%r12d + movl %ecx,%r13d + addl %r14d,%r12d + movl %r10d,%r14d + rorl $14,%r13d + movl %edx,%r15d + movl %r12d,8(%rsp) + + rorl $9,%r14d + xorl %ecx,%r13d + xorl %r8d,%r15d + + rorl $5,%r13d + addl %r9d,%r12d + xorl %r10d,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %ecx,%r15d + movl %r11d,%r9d + + rorl $11,%r14d + xorl %ecx,%r13d + xorl %r8d,%r15d + + xorl %eax,%r9d + xorl %r10d,%r14d + addl %r15d,%r12d + movl %r11d,%r15d + + rorl $6,%r13d + andl %r10d,%r9d + andl %eax,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r9d + + addl %r12d,%ebx + addl %r12d,%r9d + leaq 1(%rdi),%rdi + addl %r14d,%r9d + + movl 16(%rsp),%r13d + movl 4(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + + rorl $11,%r12d + xorl %r13d,%r12d + shrl $3,%r13d + + rorl $7,%r12d + xorl %r12d,%r13d + movl 48(%rsp),%r12d + + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + + rorl $17,%r15d + addl %r13d,%r12d + xorl %r15d,%r14d + + addl 12(%rsp),%r12d + movl %ebx,%r13d + addl %r14d,%r12d + movl %r9d,%r14d + rorl $14,%r13d + movl %ecx,%r15d + movl %r12d,12(%rsp) + + rorl $9,%r14d + xorl %ebx,%r13d + xorl %edx,%r15d + + rorl $5,%r13d + addl %r8d,%r12d + xorl %r9d,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %ebx,%r15d + movl %r10d,%r8d + + rorl $11,%r14d + xorl %ebx,%r13d + xorl %edx,%r15d + + xorl %r11d,%r8d + xorl %r9d,%r14d + addl %r15d,%r12d + movl %r10d,%r15d + + rorl $6,%r13d + andl %r9d,%r8d + andl %r11d,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r8d + + addl %r12d,%eax + addl %r12d,%r8d + leaq 1(%rdi),%rdi + addl %r14d,%r8d + + movl 20(%rsp),%r13d + movl 8(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + + rorl $11,%r12d + xorl %r13d,%r12d + shrl $3,%r13d + + rorl $7,%r12d + xorl %r12d,%r13d + movl 52(%rsp),%r12d + + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + + rorl $17,%r15d + addl %r13d,%r12d + xorl %r15d,%r14d + + addl 16(%rsp),%r12d + movl %eax,%r13d + addl %r14d,%r12d + movl %r8d,%r14d + rorl $14,%r13d + movl %ebx,%r15d + movl %r12d,16(%rsp) + + rorl $9,%r14d + xorl %eax,%r13d + xorl %ecx,%r15d + + rorl $5,%r13d + addl %edx,%r12d + xorl %r8d,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %eax,%r15d + movl %r9d,%edx + + rorl $11,%r14d + xorl %eax,%r13d + xorl %ecx,%r15d + + xorl %r10d,%edx + xorl %r8d,%r14d + addl %r15d,%r12d + movl %r9d,%r15d + + rorl $6,%r13d + andl %r8d,%edx + andl %r10d,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%edx + + addl %r12d,%r11d + addl %r12d,%edx + leaq 1(%rdi),%rdi + addl %r14d,%edx + + movl 24(%rsp),%r13d + movl 12(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + + rorl $11,%r12d + xorl %r13d,%r12d + shrl $3,%r13d + + rorl $7,%r12d + xorl %r12d,%r13d + movl 56(%rsp),%r12d + + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + + rorl $17,%r15d + addl %r13d,%r12d + xorl %r15d,%r14d + + addl 20(%rsp),%r12d + movl %r11d,%r13d + addl %r14d,%r12d + movl %edx,%r14d + rorl $14,%r13d + movl %eax,%r15d + movl %r12d,20(%rsp) + + rorl $9,%r14d + xorl %r11d,%r13d + xorl %ebx,%r15d + + rorl $5,%r13d + addl %ecx,%r12d + xorl %edx,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %r11d,%r15d + movl %r8d,%ecx + + rorl $11,%r14d + xorl %r11d,%r13d + xorl %ebx,%r15d + + xorl %r9d,%ecx + xorl %edx,%r14d + addl %r15d,%r12d + movl %r8d,%r15d + + rorl $6,%r13d + andl %edx,%ecx + andl %r9d,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%ecx + + addl %r12d,%r10d + addl %r12d,%ecx + leaq 1(%rdi),%rdi + addl %r14d,%ecx + + movl 28(%rsp),%r13d + movl 16(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + + rorl $11,%r12d + xorl %r13d,%r12d + shrl $3,%r13d + + rorl $7,%r12d + xorl %r12d,%r13d + movl 60(%rsp),%r12d + + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + + rorl $17,%r15d + addl %r13d,%r12d + xorl %r15d,%r14d + + addl 24(%rsp),%r12d + movl %r10d,%r13d + addl %r14d,%r12d + movl %ecx,%r14d + rorl $14,%r13d + movl %r11d,%r15d + movl %r12d,24(%rsp) + + rorl $9,%r14d + xorl %r10d,%r13d + xorl %eax,%r15d + + rorl $5,%r13d + addl %ebx,%r12d + xorl %ecx,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %r10d,%r15d + movl %edx,%ebx + + rorl $11,%r14d + xorl %r10d,%r13d + xorl %eax,%r15d + + xorl %r8d,%ebx + xorl %ecx,%r14d + addl %r15d,%r12d + movl %edx,%r15d + + rorl $6,%r13d + andl %ecx,%ebx + andl %r8d,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%ebx + + addl %r12d,%r9d + addl %r12d,%ebx + leaq 1(%rdi),%rdi + addl %r14d,%ebx + + movl 32(%rsp),%r13d + movl 20(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + + rorl $11,%r12d + xorl %r13d,%r12d + shrl $3,%r13d + + rorl $7,%r12d + xorl %r12d,%r13d + movl 0(%rsp),%r12d + + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + + rorl $17,%r15d + addl %r13d,%r12d + xorl %r15d,%r14d + + addl 28(%rsp),%r12d + movl %r9d,%r13d + addl %r14d,%r12d + movl %ebx,%r14d + rorl $14,%r13d + movl %r10d,%r15d + movl %r12d,28(%rsp) + + rorl $9,%r14d + xorl %r9d,%r13d + xorl %r11d,%r15d + + rorl $5,%r13d + addl %eax,%r12d + xorl %ebx,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %r9d,%r15d + movl %ecx,%eax + + rorl $11,%r14d + xorl %r9d,%r13d + xorl %r11d,%r15d + + xorl %edx,%eax + xorl %ebx,%r14d + addl %r15d,%r12d + movl %ecx,%r15d + + rorl $6,%r13d + andl %ebx,%eax + andl %edx,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%eax + + addl %r12d,%r8d + addl %r12d,%eax + leaq 1(%rdi),%rdi + addl %r14d,%eax + + movl 36(%rsp),%r13d + movl 24(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + + rorl $11,%r12d + xorl %r13d,%r12d + shrl $3,%r13d + + rorl $7,%r12d + xorl %r12d,%r13d + movl 4(%rsp),%r12d + + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + + rorl $17,%r15d + addl %r13d,%r12d + xorl %r15d,%r14d + + addl 32(%rsp),%r12d + movl %r8d,%r13d + addl %r14d,%r12d + movl %eax,%r14d + rorl $14,%r13d + movl %r9d,%r15d + movl %r12d,32(%rsp) + + rorl $9,%r14d + xorl %r8d,%r13d + xorl %r10d,%r15d + + rorl $5,%r13d + addl %r11d,%r12d + xorl %eax,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %r8d,%r15d + movl %ebx,%r11d + + rorl $11,%r14d + xorl %r8d,%r13d + xorl %r10d,%r15d + + xorl %ecx,%r11d + xorl %eax,%r14d + addl %r15d,%r12d + movl %ebx,%r15d + + rorl $6,%r13d + andl %eax,%r11d + andl %ecx,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r11d + + addl %r12d,%edx + addl %r12d,%r11d + leaq 1(%rdi),%rdi + addl %r14d,%r11d + + movl 40(%rsp),%r13d + movl 28(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + + rorl $11,%r12d + xorl %r13d,%r12d + shrl $3,%r13d + + rorl $7,%r12d + xorl %r12d,%r13d + movl 8(%rsp),%r12d + + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + + rorl $17,%r15d + addl %r13d,%r12d + xorl %r15d,%r14d + + addl 36(%rsp),%r12d + movl %edx,%r13d + addl %r14d,%r12d + movl %r11d,%r14d + rorl $14,%r13d + movl %r8d,%r15d + movl %r12d,36(%rsp) + + rorl $9,%r14d + xorl %edx,%r13d + xorl %r9d,%r15d + + rorl $5,%r13d + addl %r10d,%r12d + xorl %r11d,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %edx,%r15d + movl %eax,%r10d + + rorl $11,%r14d + xorl %edx,%r13d + xorl %r9d,%r15d + + xorl %ebx,%r10d + xorl %r11d,%r14d + addl %r15d,%r12d + movl %eax,%r15d + + rorl $6,%r13d + andl %r11d,%r10d + andl %ebx,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r10d + + addl %r12d,%ecx + addl %r12d,%r10d + leaq 1(%rdi),%rdi + addl %r14d,%r10d + + movl 44(%rsp),%r13d + movl 32(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + + rorl $11,%r12d + xorl %r13d,%r12d + shrl $3,%r13d + + rorl $7,%r12d + xorl %r12d,%r13d + movl 12(%rsp),%r12d + + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + + rorl $17,%r15d + addl %r13d,%r12d + xorl %r15d,%r14d + + addl 40(%rsp),%r12d + movl %ecx,%r13d + addl %r14d,%r12d + movl %r10d,%r14d + rorl $14,%r13d + movl %edx,%r15d + movl %r12d,40(%rsp) + + rorl $9,%r14d + xorl %ecx,%r13d + xorl %r8d,%r15d + + rorl $5,%r13d + addl %r9d,%r12d + xorl %r10d,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %ecx,%r15d + movl %r11d,%r9d + + rorl $11,%r14d + xorl %ecx,%r13d + xorl %r8d,%r15d + + xorl %eax,%r9d + xorl %r10d,%r14d + addl %r15d,%r12d + movl %r11d,%r15d + + rorl $6,%r13d + andl %r10d,%r9d + andl %eax,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r9d + + addl %r12d,%ebx + addl %r12d,%r9d + leaq 1(%rdi),%rdi + addl %r14d,%r9d + + movl 48(%rsp),%r13d + movl 36(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + + rorl $11,%r12d + xorl %r13d,%r12d + shrl $3,%r13d + + rorl $7,%r12d + xorl %r12d,%r13d + movl 16(%rsp),%r12d + + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + + rorl $17,%r15d + addl %r13d,%r12d + xorl %r15d,%r14d + + addl 44(%rsp),%r12d + movl %ebx,%r13d + addl %r14d,%r12d + movl %r9d,%r14d + rorl $14,%r13d + movl %ecx,%r15d + movl %r12d,44(%rsp) + + rorl $9,%r14d + xorl %ebx,%r13d + xorl %edx,%r15d + + rorl $5,%r13d + addl %r8d,%r12d + xorl %r9d,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %ebx,%r15d + movl %r10d,%r8d + + rorl $11,%r14d + xorl %ebx,%r13d + xorl %edx,%r15d + + xorl %r11d,%r8d + xorl %r9d,%r14d + addl %r15d,%r12d + movl %r10d,%r15d + + rorl $6,%r13d + andl %r9d,%r8d + andl %r11d,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r8d + + addl %r12d,%eax + addl %r12d,%r8d + leaq 1(%rdi),%rdi + addl %r14d,%r8d + + movl 52(%rsp),%r13d + movl 40(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + + rorl $11,%r12d + xorl %r13d,%r12d + shrl $3,%r13d + + rorl $7,%r12d + xorl %r12d,%r13d + movl 20(%rsp),%r12d + + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + + rorl $17,%r15d + addl %r13d,%r12d + xorl %r15d,%r14d + + addl 48(%rsp),%r12d + movl %eax,%r13d + addl %r14d,%r12d + movl %r8d,%r14d + rorl $14,%r13d + movl %ebx,%r15d + movl %r12d,48(%rsp) + + rorl $9,%r14d + xorl %eax,%r13d + xorl %ecx,%r15d + + rorl $5,%r13d + addl %edx,%r12d + xorl %r8d,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %eax,%r15d + movl %r9d,%edx + + rorl $11,%r14d + xorl %eax,%r13d + xorl %ecx,%r15d + + xorl %r10d,%edx + xorl %r8d,%r14d + addl %r15d,%r12d + movl %r9d,%r15d + + rorl $6,%r13d + andl %r8d,%edx + andl %r10d,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%edx + + addl %r12d,%r11d + addl %r12d,%edx + leaq 1(%rdi),%rdi + addl %r14d,%edx + + movl 56(%rsp),%r13d + movl 44(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + + rorl $11,%r12d + xorl %r13d,%r12d + shrl $3,%r13d + + rorl $7,%r12d + xorl %r12d,%r13d + movl 24(%rsp),%r12d + + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + + rorl $17,%r15d + addl %r13d,%r12d + xorl %r15d,%r14d + + addl 52(%rsp),%r12d + movl %r11d,%r13d + addl %r14d,%r12d + movl %edx,%r14d + rorl $14,%r13d + movl %eax,%r15d + movl %r12d,52(%rsp) + + rorl $9,%r14d + xorl %r11d,%r13d + xorl %ebx,%r15d + + rorl $5,%r13d + addl %ecx,%r12d + xorl %edx,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %r11d,%r15d + movl %r8d,%ecx + + rorl $11,%r14d + xorl %r11d,%r13d + xorl %ebx,%r15d + + xorl %r9d,%ecx + xorl %edx,%r14d + addl %r15d,%r12d + movl %r8d,%r15d + + rorl $6,%r13d + andl %edx,%ecx + andl %r9d,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%ecx + + addl %r12d,%r10d + addl %r12d,%ecx + leaq 1(%rdi),%rdi + addl %r14d,%ecx + + movl 60(%rsp),%r13d + movl 48(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + + rorl $11,%r12d + xorl %r13d,%r12d + shrl $3,%r13d + + rorl $7,%r12d + xorl %r12d,%r13d + movl 28(%rsp),%r12d + + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + + rorl $17,%r15d + addl %r13d,%r12d + xorl %r15d,%r14d + + addl 56(%rsp),%r12d + movl %r10d,%r13d + addl %r14d,%r12d + movl %ecx,%r14d + rorl $14,%r13d + movl %r11d,%r15d + movl %r12d,56(%rsp) + + rorl $9,%r14d + xorl %r10d,%r13d + xorl %eax,%r15d + + rorl $5,%r13d + addl %ebx,%r12d + xorl %ecx,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %r10d,%r15d + movl %edx,%ebx + + rorl $11,%r14d + xorl %r10d,%r13d + xorl %eax,%r15d + + xorl %r8d,%ebx + xorl %ecx,%r14d + addl %r15d,%r12d + movl %edx,%r15d + + rorl $6,%r13d + andl %ecx,%ebx + andl %r8d,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%ebx + + addl %r12d,%r9d + addl %r12d,%ebx + leaq 1(%rdi),%rdi + addl %r14d,%ebx + + movl 0(%rsp),%r13d + movl 52(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + + rorl $11,%r12d + xorl %r13d,%r12d + shrl $3,%r13d + + rorl $7,%r12d + xorl %r12d,%r13d + movl 32(%rsp),%r12d + + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + + rorl $17,%r15d + addl %r13d,%r12d + xorl %r15d,%r14d + + addl 60(%rsp),%r12d + movl %r9d,%r13d + addl %r14d,%r12d + movl %ebx,%r14d + rorl $14,%r13d + movl %r10d,%r15d + movl %r12d,60(%rsp) + + rorl $9,%r14d + xorl %r9d,%r13d + xorl %r11d,%r15d + + rorl $5,%r13d + addl %eax,%r12d + xorl %ebx,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %r9d,%r15d + movl %ecx,%eax + + rorl $11,%r14d + xorl %r9d,%r13d + xorl %r11d,%r15d + + xorl %edx,%eax + xorl %ebx,%r14d + addl %r15d,%r12d + movl %ecx,%r15d + + rorl $6,%r13d + andl %ebx,%eax + andl %edx,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%eax + + addl %r12d,%r8d + addl %r12d,%eax + leaq 1(%rdi),%rdi + addl %r14d,%eax + + cmpq $64,%rdi + jb .Lrounds_16_xx + + movq 64+0(%rsp),%rdi + leaq 64(%rsi),%rsi + + addl 0(%rdi),%eax + addl 4(%rdi),%ebx + addl 8(%rdi),%ecx + addl 12(%rdi),%edx + addl 16(%rdi),%r8d + addl 20(%rdi),%r9d + addl 24(%rdi),%r10d + addl 28(%rdi),%r11d + + cmpq 64+16(%rsp),%rsi + + movl %eax,0(%rdi) + movl %ebx,4(%rdi) + movl %ecx,8(%rdi) + movl %edx,12(%rdi) + movl %r8d,16(%rdi) + movl %r9d,20(%rdi) + movl %r10d,24(%rdi) + movl %r11d,28(%rdi) + jb .Lloop + + movq 64+24(%rsp),%rsi + movq (%rsi),%r15 + movq 8(%rsi),%r14 + movq 16(%rsi),%r13 + movq 24(%rsi),%r12 + movq 32(%rsi),%rbp + movq 40(%rsi),%rbx + leaq 48(%rsi),%rsp +.Lepilogue: + .byte 0xf3,0xc3 +.size sha256_block_data_order,.-sha256_block_data_order +.align 64 +.type K256,@object +K256: +.long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5 +.long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5 +.long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3 +.long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174 +.long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc +.long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da +.long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7 +.long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967 +.long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13 +.long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85 +.long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3 +.long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070 +.long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5 +.long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3 +.long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208 +.long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2 +#if defined(HAVE_GNU_STACK) +.section .note.GNU-stack,"",%progbits +#endif diff --git a/code/crypto/sha/sha256-macosx-x86_64.S b/code/crypto/sha/sha256-macosx-x86_64.S new file mode 100644 index 0000000..3ecafca --- /dev/null +++ b/code/crypto/sha/sha256-macosx-x86_64.S @@ -0,0 +1,1779 @@ +#include "x86_arch.h" +.text + +.globl _sha256_block_data_order + +.p2align 4 +_sha256_block_data_order: + pushq %rbx + pushq %rbp + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + movq %rsp,%r11 + shlq $4,%rdx + subq $64+32,%rsp + leaq (%rsi,%rdx,4),%rdx + andq $-64,%rsp + movq %rdi,64+0(%rsp) + movq %rsi,64+8(%rsp) + movq %rdx,64+16(%rsp) + movq %r11,64+24(%rsp) +L$prologue: + + leaq K256(%rip),%rbp + + movl 0(%rdi),%eax + movl 4(%rdi),%ebx + movl 8(%rdi),%ecx + movl 12(%rdi),%edx + movl 16(%rdi),%r8d + movl 20(%rdi),%r9d + movl 24(%rdi),%r10d + movl 28(%rdi),%r11d + jmp L$loop + +.p2align 4 +L$loop: + xorq %rdi,%rdi + movl 0(%rsi),%r12d + movl %r8d,%r13d + movl %eax,%r14d + bswapl %r12d + rorl $14,%r13d + movl %r9d,%r15d + movl %r12d,0(%rsp) + + rorl $9,%r14d + xorl %r8d,%r13d + xorl %r10d,%r15d + + rorl $5,%r13d + addl %r11d,%r12d + xorl %eax,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %r8d,%r15d + movl %ebx,%r11d + + rorl $11,%r14d + xorl %r8d,%r13d + xorl %r10d,%r15d + + xorl %ecx,%r11d + xorl %eax,%r14d + addl %r15d,%r12d + movl %ebx,%r15d + + rorl $6,%r13d + andl %eax,%r11d + andl %ecx,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r11d + + addl %r12d,%edx + addl %r12d,%r11d + leaq 1(%rdi),%rdi + addl %r14d,%r11d + + movl 4(%rsi),%r12d + movl %edx,%r13d + movl %r11d,%r14d + bswapl %r12d + rorl $14,%r13d + movl %r8d,%r15d + movl %r12d,4(%rsp) + + rorl $9,%r14d + xorl %edx,%r13d + xorl %r9d,%r15d + + rorl $5,%r13d + addl %r10d,%r12d + xorl %r11d,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %edx,%r15d + movl %eax,%r10d + + rorl $11,%r14d + xorl %edx,%r13d + xorl %r9d,%r15d + + xorl %ebx,%r10d + xorl %r11d,%r14d + addl %r15d,%r12d + movl %eax,%r15d + + rorl $6,%r13d + andl %r11d,%r10d + andl %ebx,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r10d + + addl %r12d,%ecx + addl %r12d,%r10d + leaq 1(%rdi),%rdi + addl %r14d,%r10d + + movl 8(%rsi),%r12d + movl %ecx,%r13d + movl %r10d,%r14d + bswapl %r12d + rorl $14,%r13d + movl %edx,%r15d + movl %r12d,8(%rsp) + + rorl $9,%r14d + xorl %ecx,%r13d + xorl %r8d,%r15d + + rorl $5,%r13d + addl %r9d,%r12d + xorl %r10d,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %ecx,%r15d + movl %r11d,%r9d + + rorl $11,%r14d + xorl %ecx,%r13d + xorl %r8d,%r15d + + xorl %eax,%r9d + xorl %r10d,%r14d + addl %r15d,%r12d + movl %r11d,%r15d + + rorl $6,%r13d + andl %r10d,%r9d + andl %eax,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r9d + + addl %r12d,%ebx + addl %r12d,%r9d + leaq 1(%rdi),%rdi + addl %r14d,%r9d + + movl 12(%rsi),%r12d + movl %ebx,%r13d + movl %r9d,%r14d + bswapl %r12d + rorl $14,%r13d + movl %ecx,%r15d + movl %r12d,12(%rsp) + + rorl $9,%r14d + xorl %ebx,%r13d + xorl %edx,%r15d + + rorl $5,%r13d + addl %r8d,%r12d + xorl %r9d,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %ebx,%r15d + movl %r10d,%r8d + + rorl $11,%r14d + xorl %ebx,%r13d + xorl %edx,%r15d + + xorl %r11d,%r8d + xorl %r9d,%r14d + addl %r15d,%r12d + movl %r10d,%r15d + + rorl $6,%r13d + andl %r9d,%r8d + andl %r11d,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r8d + + addl %r12d,%eax + addl %r12d,%r8d + leaq 1(%rdi),%rdi + addl %r14d,%r8d + + movl 16(%rsi),%r12d + movl %eax,%r13d + movl %r8d,%r14d + bswapl %r12d + rorl $14,%r13d + movl %ebx,%r15d + movl %r12d,16(%rsp) + + rorl $9,%r14d + xorl %eax,%r13d + xorl %ecx,%r15d + + rorl $5,%r13d + addl %edx,%r12d + xorl %r8d,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %eax,%r15d + movl %r9d,%edx + + rorl $11,%r14d + xorl %eax,%r13d + xorl %ecx,%r15d + + xorl %r10d,%edx + xorl %r8d,%r14d + addl %r15d,%r12d + movl %r9d,%r15d + + rorl $6,%r13d + andl %r8d,%edx + andl %r10d,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%edx + + addl %r12d,%r11d + addl %r12d,%edx + leaq 1(%rdi),%rdi + addl %r14d,%edx + + movl 20(%rsi),%r12d + movl %r11d,%r13d + movl %edx,%r14d + bswapl %r12d + rorl $14,%r13d + movl %eax,%r15d + movl %r12d,20(%rsp) + + rorl $9,%r14d + xorl %r11d,%r13d + xorl %ebx,%r15d + + rorl $5,%r13d + addl %ecx,%r12d + xorl %edx,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %r11d,%r15d + movl %r8d,%ecx + + rorl $11,%r14d + xorl %r11d,%r13d + xorl %ebx,%r15d + + xorl %r9d,%ecx + xorl %edx,%r14d + addl %r15d,%r12d + movl %r8d,%r15d + + rorl $6,%r13d + andl %edx,%ecx + andl %r9d,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%ecx + + addl %r12d,%r10d + addl %r12d,%ecx + leaq 1(%rdi),%rdi + addl %r14d,%ecx + + movl 24(%rsi),%r12d + movl %r10d,%r13d + movl %ecx,%r14d + bswapl %r12d + rorl $14,%r13d + movl %r11d,%r15d + movl %r12d,24(%rsp) + + rorl $9,%r14d + xorl %r10d,%r13d + xorl %eax,%r15d + + rorl $5,%r13d + addl %ebx,%r12d + xorl %ecx,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %r10d,%r15d + movl %edx,%ebx + + rorl $11,%r14d + xorl %r10d,%r13d + xorl %eax,%r15d + + xorl %r8d,%ebx + xorl %ecx,%r14d + addl %r15d,%r12d + movl %edx,%r15d + + rorl $6,%r13d + andl %ecx,%ebx + andl %r8d,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%ebx + + addl %r12d,%r9d + addl %r12d,%ebx + leaq 1(%rdi),%rdi + addl %r14d,%ebx + + movl 28(%rsi),%r12d + movl %r9d,%r13d + movl %ebx,%r14d + bswapl %r12d + rorl $14,%r13d + movl %r10d,%r15d + movl %r12d,28(%rsp) + + rorl $9,%r14d + xorl %r9d,%r13d + xorl %r11d,%r15d + + rorl $5,%r13d + addl %eax,%r12d + xorl %ebx,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %r9d,%r15d + movl %ecx,%eax + + rorl $11,%r14d + xorl %r9d,%r13d + xorl %r11d,%r15d + + xorl %edx,%eax + xorl %ebx,%r14d + addl %r15d,%r12d + movl %ecx,%r15d + + rorl $6,%r13d + andl %ebx,%eax + andl %edx,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%eax + + addl %r12d,%r8d + addl %r12d,%eax + leaq 1(%rdi),%rdi + addl %r14d,%eax + + movl 32(%rsi),%r12d + movl %r8d,%r13d + movl %eax,%r14d + bswapl %r12d + rorl $14,%r13d + movl %r9d,%r15d + movl %r12d,32(%rsp) + + rorl $9,%r14d + xorl %r8d,%r13d + xorl %r10d,%r15d + + rorl $5,%r13d + addl %r11d,%r12d + xorl %eax,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %r8d,%r15d + movl %ebx,%r11d + + rorl $11,%r14d + xorl %r8d,%r13d + xorl %r10d,%r15d + + xorl %ecx,%r11d + xorl %eax,%r14d + addl %r15d,%r12d + movl %ebx,%r15d + + rorl $6,%r13d + andl %eax,%r11d + andl %ecx,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r11d + + addl %r12d,%edx + addl %r12d,%r11d + leaq 1(%rdi),%rdi + addl %r14d,%r11d + + movl 36(%rsi),%r12d + movl %edx,%r13d + movl %r11d,%r14d + bswapl %r12d + rorl $14,%r13d + movl %r8d,%r15d + movl %r12d,36(%rsp) + + rorl $9,%r14d + xorl %edx,%r13d + xorl %r9d,%r15d + + rorl $5,%r13d + addl %r10d,%r12d + xorl %r11d,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %edx,%r15d + movl %eax,%r10d + + rorl $11,%r14d + xorl %edx,%r13d + xorl %r9d,%r15d + + xorl %ebx,%r10d + xorl %r11d,%r14d + addl %r15d,%r12d + movl %eax,%r15d + + rorl $6,%r13d + andl %r11d,%r10d + andl %ebx,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r10d + + addl %r12d,%ecx + addl %r12d,%r10d + leaq 1(%rdi),%rdi + addl %r14d,%r10d + + movl 40(%rsi),%r12d + movl %ecx,%r13d + movl %r10d,%r14d + bswapl %r12d + rorl $14,%r13d + movl %edx,%r15d + movl %r12d,40(%rsp) + + rorl $9,%r14d + xorl %ecx,%r13d + xorl %r8d,%r15d + + rorl $5,%r13d + addl %r9d,%r12d + xorl %r10d,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %ecx,%r15d + movl %r11d,%r9d + + rorl $11,%r14d + xorl %ecx,%r13d + xorl %r8d,%r15d + + xorl %eax,%r9d + xorl %r10d,%r14d + addl %r15d,%r12d + movl %r11d,%r15d + + rorl $6,%r13d + andl %r10d,%r9d + andl %eax,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r9d + + addl %r12d,%ebx + addl %r12d,%r9d + leaq 1(%rdi),%rdi + addl %r14d,%r9d + + movl 44(%rsi),%r12d + movl %ebx,%r13d + movl %r9d,%r14d + bswapl %r12d + rorl $14,%r13d + movl %ecx,%r15d + movl %r12d,44(%rsp) + + rorl $9,%r14d + xorl %ebx,%r13d + xorl %edx,%r15d + + rorl $5,%r13d + addl %r8d,%r12d + xorl %r9d,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %ebx,%r15d + movl %r10d,%r8d + + rorl $11,%r14d + xorl %ebx,%r13d + xorl %edx,%r15d + + xorl %r11d,%r8d + xorl %r9d,%r14d + addl %r15d,%r12d + movl %r10d,%r15d + + rorl $6,%r13d + andl %r9d,%r8d + andl %r11d,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r8d + + addl %r12d,%eax + addl %r12d,%r8d + leaq 1(%rdi),%rdi + addl %r14d,%r8d + + movl 48(%rsi),%r12d + movl %eax,%r13d + movl %r8d,%r14d + bswapl %r12d + rorl $14,%r13d + movl %ebx,%r15d + movl %r12d,48(%rsp) + + rorl $9,%r14d + xorl %eax,%r13d + xorl %ecx,%r15d + + rorl $5,%r13d + addl %edx,%r12d + xorl %r8d,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %eax,%r15d + movl %r9d,%edx + + rorl $11,%r14d + xorl %eax,%r13d + xorl %ecx,%r15d + + xorl %r10d,%edx + xorl %r8d,%r14d + addl %r15d,%r12d + movl %r9d,%r15d + + rorl $6,%r13d + andl %r8d,%edx + andl %r10d,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%edx + + addl %r12d,%r11d + addl %r12d,%edx + leaq 1(%rdi),%rdi + addl %r14d,%edx + + movl 52(%rsi),%r12d + movl %r11d,%r13d + movl %edx,%r14d + bswapl %r12d + rorl $14,%r13d + movl %eax,%r15d + movl %r12d,52(%rsp) + + rorl $9,%r14d + xorl %r11d,%r13d + xorl %ebx,%r15d + + rorl $5,%r13d + addl %ecx,%r12d + xorl %edx,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %r11d,%r15d + movl %r8d,%ecx + + rorl $11,%r14d + xorl %r11d,%r13d + xorl %ebx,%r15d + + xorl %r9d,%ecx + xorl %edx,%r14d + addl %r15d,%r12d + movl %r8d,%r15d + + rorl $6,%r13d + andl %edx,%ecx + andl %r9d,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%ecx + + addl %r12d,%r10d + addl %r12d,%ecx + leaq 1(%rdi),%rdi + addl %r14d,%ecx + + movl 56(%rsi),%r12d + movl %r10d,%r13d + movl %ecx,%r14d + bswapl %r12d + rorl $14,%r13d + movl %r11d,%r15d + movl %r12d,56(%rsp) + + rorl $9,%r14d + xorl %r10d,%r13d + xorl %eax,%r15d + + rorl $5,%r13d + addl %ebx,%r12d + xorl %ecx,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %r10d,%r15d + movl %edx,%ebx + + rorl $11,%r14d + xorl %r10d,%r13d + xorl %eax,%r15d + + xorl %r8d,%ebx + xorl %ecx,%r14d + addl %r15d,%r12d + movl %edx,%r15d + + rorl $6,%r13d + andl %ecx,%ebx + andl %r8d,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%ebx + + addl %r12d,%r9d + addl %r12d,%ebx + leaq 1(%rdi),%rdi + addl %r14d,%ebx + + movl 60(%rsi),%r12d + movl %r9d,%r13d + movl %ebx,%r14d + bswapl %r12d + rorl $14,%r13d + movl %r10d,%r15d + movl %r12d,60(%rsp) + + rorl $9,%r14d + xorl %r9d,%r13d + xorl %r11d,%r15d + + rorl $5,%r13d + addl %eax,%r12d + xorl %ebx,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %r9d,%r15d + movl %ecx,%eax + + rorl $11,%r14d + xorl %r9d,%r13d + xorl %r11d,%r15d + + xorl %edx,%eax + xorl %ebx,%r14d + addl %r15d,%r12d + movl %ecx,%r15d + + rorl $6,%r13d + andl %ebx,%eax + andl %edx,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%eax + + addl %r12d,%r8d + addl %r12d,%eax + leaq 1(%rdi),%rdi + addl %r14d,%eax + + jmp L$rounds_16_xx +.p2align 4 +L$rounds_16_xx: + movl 4(%rsp),%r13d + movl 56(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + + rorl $11,%r12d + xorl %r13d,%r12d + shrl $3,%r13d + + rorl $7,%r12d + xorl %r12d,%r13d + movl 36(%rsp),%r12d + + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + + rorl $17,%r15d + addl %r13d,%r12d + xorl %r15d,%r14d + + addl 0(%rsp),%r12d + movl %r8d,%r13d + addl %r14d,%r12d + movl %eax,%r14d + rorl $14,%r13d + movl %r9d,%r15d + movl %r12d,0(%rsp) + + rorl $9,%r14d + xorl %r8d,%r13d + xorl %r10d,%r15d + + rorl $5,%r13d + addl %r11d,%r12d + xorl %eax,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %r8d,%r15d + movl %ebx,%r11d + + rorl $11,%r14d + xorl %r8d,%r13d + xorl %r10d,%r15d + + xorl %ecx,%r11d + xorl %eax,%r14d + addl %r15d,%r12d + movl %ebx,%r15d + + rorl $6,%r13d + andl %eax,%r11d + andl %ecx,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r11d + + addl %r12d,%edx + addl %r12d,%r11d + leaq 1(%rdi),%rdi + addl %r14d,%r11d + + movl 8(%rsp),%r13d + movl 60(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + + rorl $11,%r12d + xorl %r13d,%r12d + shrl $3,%r13d + + rorl $7,%r12d + xorl %r12d,%r13d + movl 40(%rsp),%r12d + + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + + rorl $17,%r15d + addl %r13d,%r12d + xorl %r15d,%r14d + + addl 4(%rsp),%r12d + movl %edx,%r13d + addl %r14d,%r12d + movl %r11d,%r14d + rorl $14,%r13d + movl %r8d,%r15d + movl %r12d,4(%rsp) + + rorl $9,%r14d + xorl %edx,%r13d + xorl %r9d,%r15d + + rorl $5,%r13d + addl %r10d,%r12d + xorl %r11d,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %edx,%r15d + movl %eax,%r10d + + rorl $11,%r14d + xorl %edx,%r13d + xorl %r9d,%r15d + + xorl %ebx,%r10d + xorl %r11d,%r14d + addl %r15d,%r12d + movl %eax,%r15d + + rorl $6,%r13d + andl %r11d,%r10d + andl %ebx,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r10d + + addl %r12d,%ecx + addl %r12d,%r10d + leaq 1(%rdi),%rdi + addl %r14d,%r10d + + movl 12(%rsp),%r13d + movl 0(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + + rorl $11,%r12d + xorl %r13d,%r12d + shrl $3,%r13d + + rorl $7,%r12d + xorl %r12d,%r13d + movl 44(%rsp),%r12d + + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + + rorl $17,%r15d + addl %r13d,%r12d + xorl %r15d,%r14d + + addl 8(%rsp),%r12d + movl %ecx,%r13d + addl %r14d,%r12d + movl %r10d,%r14d + rorl $14,%r13d + movl %edx,%r15d + movl %r12d,8(%rsp) + + rorl $9,%r14d + xorl %ecx,%r13d + xorl %r8d,%r15d + + rorl $5,%r13d + addl %r9d,%r12d + xorl %r10d,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %ecx,%r15d + movl %r11d,%r9d + + rorl $11,%r14d + xorl %ecx,%r13d + xorl %r8d,%r15d + + xorl %eax,%r9d + xorl %r10d,%r14d + addl %r15d,%r12d + movl %r11d,%r15d + + rorl $6,%r13d + andl %r10d,%r9d + andl %eax,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r9d + + addl %r12d,%ebx + addl %r12d,%r9d + leaq 1(%rdi),%rdi + addl %r14d,%r9d + + movl 16(%rsp),%r13d + movl 4(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + + rorl $11,%r12d + xorl %r13d,%r12d + shrl $3,%r13d + + rorl $7,%r12d + xorl %r12d,%r13d + movl 48(%rsp),%r12d + + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + + rorl $17,%r15d + addl %r13d,%r12d + xorl %r15d,%r14d + + addl 12(%rsp),%r12d + movl %ebx,%r13d + addl %r14d,%r12d + movl %r9d,%r14d + rorl $14,%r13d + movl %ecx,%r15d + movl %r12d,12(%rsp) + + rorl $9,%r14d + xorl %ebx,%r13d + xorl %edx,%r15d + + rorl $5,%r13d + addl %r8d,%r12d + xorl %r9d,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %ebx,%r15d + movl %r10d,%r8d + + rorl $11,%r14d + xorl %ebx,%r13d + xorl %edx,%r15d + + xorl %r11d,%r8d + xorl %r9d,%r14d + addl %r15d,%r12d + movl %r10d,%r15d + + rorl $6,%r13d + andl %r9d,%r8d + andl %r11d,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r8d + + addl %r12d,%eax + addl %r12d,%r8d + leaq 1(%rdi),%rdi + addl %r14d,%r8d + + movl 20(%rsp),%r13d + movl 8(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + + rorl $11,%r12d + xorl %r13d,%r12d + shrl $3,%r13d + + rorl $7,%r12d + xorl %r12d,%r13d + movl 52(%rsp),%r12d + + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + + rorl $17,%r15d + addl %r13d,%r12d + xorl %r15d,%r14d + + addl 16(%rsp),%r12d + movl %eax,%r13d + addl %r14d,%r12d + movl %r8d,%r14d + rorl $14,%r13d + movl %ebx,%r15d + movl %r12d,16(%rsp) + + rorl $9,%r14d + xorl %eax,%r13d + xorl %ecx,%r15d + + rorl $5,%r13d + addl %edx,%r12d + xorl %r8d,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %eax,%r15d + movl %r9d,%edx + + rorl $11,%r14d + xorl %eax,%r13d + xorl %ecx,%r15d + + xorl %r10d,%edx + xorl %r8d,%r14d + addl %r15d,%r12d + movl %r9d,%r15d + + rorl $6,%r13d + andl %r8d,%edx + andl %r10d,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%edx + + addl %r12d,%r11d + addl %r12d,%edx + leaq 1(%rdi),%rdi + addl %r14d,%edx + + movl 24(%rsp),%r13d + movl 12(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + + rorl $11,%r12d + xorl %r13d,%r12d + shrl $3,%r13d + + rorl $7,%r12d + xorl %r12d,%r13d + movl 56(%rsp),%r12d + + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + + rorl $17,%r15d + addl %r13d,%r12d + xorl %r15d,%r14d + + addl 20(%rsp),%r12d + movl %r11d,%r13d + addl %r14d,%r12d + movl %edx,%r14d + rorl $14,%r13d + movl %eax,%r15d + movl %r12d,20(%rsp) + + rorl $9,%r14d + xorl %r11d,%r13d + xorl %ebx,%r15d + + rorl $5,%r13d + addl %ecx,%r12d + xorl %edx,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %r11d,%r15d + movl %r8d,%ecx + + rorl $11,%r14d + xorl %r11d,%r13d + xorl %ebx,%r15d + + xorl %r9d,%ecx + xorl %edx,%r14d + addl %r15d,%r12d + movl %r8d,%r15d + + rorl $6,%r13d + andl %edx,%ecx + andl %r9d,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%ecx + + addl %r12d,%r10d + addl %r12d,%ecx + leaq 1(%rdi),%rdi + addl %r14d,%ecx + + movl 28(%rsp),%r13d + movl 16(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + + rorl $11,%r12d + xorl %r13d,%r12d + shrl $3,%r13d + + rorl $7,%r12d + xorl %r12d,%r13d + movl 60(%rsp),%r12d + + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + + rorl $17,%r15d + addl %r13d,%r12d + xorl %r15d,%r14d + + addl 24(%rsp),%r12d + movl %r10d,%r13d + addl %r14d,%r12d + movl %ecx,%r14d + rorl $14,%r13d + movl %r11d,%r15d + movl %r12d,24(%rsp) + + rorl $9,%r14d + xorl %r10d,%r13d + xorl %eax,%r15d + + rorl $5,%r13d + addl %ebx,%r12d + xorl %ecx,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %r10d,%r15d + movl %edx,%ebx + + rorl $11,%r14d + xorl %r10d,%r13d + xorl %eax,%r15d + + xorl %r8d,%ebx + xorl %ecx,%r14d + addl %r15d,%r12d + movl %edx,%r15d + + rorl $6,%r13d + andl %ecx,%ebx + andl %r8d,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%ebx + + addl %r12d,%r9d + addl %r12d,%ebx + leaq 1(%rdi),%rdi + addl %r14d,%ebx + + movl 32(%rsp),%r13d + movl 20(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + + rorl $11,%r12d + xorl %r13d,%r12d + shrl $3,%r13d + + rorl $7,%r12d + xorl %r12d,%r13d + movl 0(%rsp),%r12d + + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + + rorl $17,%r15d + addl %r13d,%r12d + xorl %r15d,%r14d + + addl 28(%rsp),%r12d + movl %r9d,%r13d + addl %r14d,%r12d + movl %ebx,%r14d + rorl $14,%r13d + movl %r10d,%r15d + movl %r12d,28(%rsp) + + rorl $9,%r14d + xorl %r9d,%r13d + xorl %r11d,%r15d + + rorl $5,%r13d + addl %eax,%r12d + xorl %ebx,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %r9d,%r15d + movl %ecx,%eax + + rorl $11,%r14d + xorl %r9d,%r13d + xorl %r11d,%r15d + + xorl %edx,%eax + xorl %ebx,%r14d + addl %r15d,%r12d + movl %ecx,%r15d + + rorl $6,%r13d + andl %ebx,%eax + andl %edx,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%eax + + addl %r12d,%r8d + addl %r12d,%eax + leaq 1(%rdi),%rdi + addl %r14d,%eax + + movl 36(%rsp),%r13d + movl 24(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + + rorl $11,%r12d + xorl %r13d,%r12d + shrl $3,%r13d + + rorl $7,%r12d + xorl %r12d,%r13d + movl 4(%rsp),%r12d + + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + + rorl $17,%r15d + addl %r13d,%r12d + xorl %r15d,%r14d + + addl 32(%rsp),%r12d + movl %r8d,%r13d + addl %r14d,%r12d + movl %eax,%r14d + rorl $14,%r13d + movl %r9d,%r15d + movl %r12d,32(%rsp) + + rorl $9,%r14d + xorl %r8d,%r13d + xorl %r10d,%r15d + + rorl $5,%r13d + addl %r11d,%r12d + xorl %eax,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %r8d,%r15d + movl %ebx,%r11d + + rorl $11,%r14d + xorl %r8d,%r13d + xorl %r10d,%r15d + + xorl %ecx,%r11d + xorl %eax,%r14d + addl %r15d,%r12d + movl %ebx,%r15d + + rorl $6,%r13d + andl %eax,%r11d + andl %ecx,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r11d + + addl %r12d,%edx + addl %r12d,%r11d + leaq 1(%rdi),%rdi + addl %r14d,%r11d + + movl 40(%rsp),%r13d + movl 28(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + + rorl $11,%r12d + xorl %r13d,%r12d + shrl $3,%r13d + + rorl $7,%r12d + xorl %r12d,%r13d + movl 8(%rsp),%r12d + + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + + rorl $17,%r15d + addl %r13d,%r12d + xorl %r15d,%r14d + + addl 36(%rsp),%r12d + movl %edx,%r13d + addl %r14d,%r12d + movl %r11d,%r14d + rorl $14,%r13d + movl %r8d,%r15d + movl %r12d,36(%rsp) + + rorl $9,%r14d + xorl %edx,%r13d + xorl %r9d,%r15d + + rorl $5,%r13d + addl %r10d,%r12d + xorl %r11d,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %edx,%r15d + movl %eax,%r10d + + rorl $11,%r14d + xorl %edx,%r13d + xorl %r9d,%r15d + + xorl %ebx,%r10d + xorl %r11d,%r14d + addl %r15d,%r12d + movl %eax,%r15d + + rorl $6,%r13d + andl %r11d,%r10d + andl %ebx,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r10d + + addl %r12d,%ecx + addl %r12d,%r10d + leaq 1(%rdi),%rdi + addl %r14d,%r10d + + movl 44(%rsp),%r13d + movl 32(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + + rorl $11,%r12d + xorl %r13d,%r12d + shrl $3,%r13d + + rorl $7,%r12d + xorl %r12d,%r13d + movl 12(%rsp),%r12d + + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + + rorl $17,%r15d + addl %r13d,%r12d + xorl %r15d,%r14d + + addl 40(%rsp),%r12d + movl %ecx,%r13d + addl %r14d,%r12d + movl %r10d,%r14d + rorl $14,%r13d + movl %edx,%r15d + movl %r12d,40(%rsp) + + rorl $9,%r14d + xorl %ecx,%r13d + xorl %r8d,%r15d + + rorl $5,%r13d + addl %r9d,%r12d + xorl %r10d,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %ecx,%r15d + movl %r11d,%r9d + + rorl $11,%r14d + xorl %ecx,%r13d + xorl %r8d,%r15d + + xorl %eax,%r9d + xorl %r10d,%r14d + addl %r15d,%r12d + movl %r11d,%r15d + + rorl $6,%r13d + andl %r10d,%r9d + andl %eax,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r9d + + addl %r12d,%ebx + addl %r12d,%r9d + leaq 1(%rdi),%rdi + addl %r14d,%r9d + + movl 48(%rsp),%r13d + movl 36(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + + rorl $11,%r12d + xorl %r13d,%r12d + shrl $3,%r13d + + rorl $7,%r12d + xorl %r12d,%r13d + movl 16(%rsp),%r12d + + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + + rorl $17,%r15d + addl %r13d,%r12d + xorl %r15d,%r14d + + addl 44(%rsp),%r12d + movl %ebx,%r13d + addl %r14d,%r12d + movl %r9d,%r14d + rorl $14,%r13d + movl %ecx,%r15d + movl %r12d,44(%rsp) + + rorl $9,%r14d + xorl %ebx,%r13d + xorl %edx,%r15d + + rorl $5,%r13d + addl %r8d,%r12d + xorl %r9d,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %ebx,%r15d + movl %r10d,%r8d + + rorl $11,%r14d + xorl %ebx,%r13d + xorl %edx,%r15d + + xorl %r11d,%r8d + xorl %r9d,%r14d + addl %r15d,%r12d + movl %r10d,%r15d + + rorl $6,%r13d + andl %r9d,%r8d + andl %r11d,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%r8d + + addl %r12d,%eax + addl %r12d,%r8d + leaq 1(%rdi),%rdi + addl %r14d,%r8d + + movl 52(%rsp),%r13d + movl 40(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + + rorl $11,%r12d + xorl %r13d,%r12d + shrl $3,%r13d + + rorl $7,%r12d + xorl %r12d,%r13d + movl 20(%rsp),%r12d + + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + + rorl $17,%r15d + addl %r13d,%r12d + xorl %r15d,%r14d + + addl 48(%rsp),%r12d + movl %eax,%r13d + addl %r14d,%r12d + movl %r8d,%r14d + rorl $14,%r13d + movl %ebx,%r15d + movl %r12d,48(%rsp) + + rorl $9,%r14d + xorl %eax,%r13d + xorl %ecx,%r15d + + rorl $5,%r13d + addl %edx,%r12d + xorl %r8d,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %eax,%r15d + movl %r9d,%edx + + rorl $11,%r14d + xorl %eax,%r13d + xorl %ecx,%r15d + + xorl %r10d,%edx + xorl %r8d,%r14d + addl %r15d,%r12d + movl %r9d,%r15d + + rorl $6,%r13d + andl %r8d,%edx + andl %r10d,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%edx + + addl %r12d,%r11d + addl %r12d,%edx + leaq 1(%rdi),%rdi + addl %r14d,%edx + + movl 56(%rsp),%r13d + movl 44(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + + rorl $11,%r12d + xorl %r13d,%r12d + shrl $3,%r13d + + rorl $7,%r12d + xorl %r12d,%r13d + movl 24(%rsp),%r12d + + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + + rorl $17,%r15d + addl %r13d,%r12d + xorl %r15d,%r14d + + addl 52(%rsp),%r12d + movl %r11d,%r13d + addl %r14d,%r12d + movl %edx,%r14d + rorl $14,%r13d + movl %eax,%r15d + movl %r12d,52(%rsp) + + rorl $9,%r14d + xorl %r11d,%r13d + xorl %ebx,%r15d + + rorl $5,%r13d + addl %ecx,%r12d + xorl %edx,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %r11d,%r15d + movl %r8d,%ecx + + rorl $11,%r14d + xorl %r11d,%r13d + xorl %ebx,%r15d + + xorl %r9d,%ecx + xorl %edx,%r14d + addl %r15d,%r12d + movl %r8d,%r15d + + rorl $6,%r13d + andl %edx,%ecx + andl %r9d,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%ecx + + addl %r12d,%r10d + addl %r12d,%ecx + leaq 1(%rdi),%rdi + addl %r14d,%ecx + + movl 60(%rsp),%r13d + movl 48(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + + rorl $11,%r12d + xorl %r13d,%r12d + shrl $3,%r13d + + rorl $7,%r12d + xorl %r12d,%r13d + movl 28(%rsp),%r12d + + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + + rorl $17,%r15d + addl %r13d,%r12d + xorl %r15d,%r14d + + addl 56(%rsp),%r12d + movl %r10d,%r13d + addl %r14d,%r12d + movl %ecx,%r14d + rorl $14,%r13d + movl %r11d,%r15d + movl %r12d,56(%rsp) + + rorl $9,%r14d + xorl %r10d,%r13d + xorl %eax,%r15d + + rorl $5,%r13d + addl %ebx,%r12d + xorl %ecx,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %r10d,%r15d + movl %edx,%ebx + + rorl $11,%r14d + xorl %r10d,%r13d + xorl %eax,%r15d + + xorl %r8d,%ebx + xorl %ecx,%r14d + addl %r15d,%r12d + movl %edx,%r15d + + rorl $6,%r13d + andl %ecx,%ebx + andl %r8d,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%ebx + + addl %r12d,%r9d + addl %r12d,%ebx + leaq 1(%rdi),%rdi + addl %r14d,%ebx + + movl 0(%rsp),%r13d + movl 52(%rsp),%r14d + movl %r13d,%r12d + movl %r14d,%r15d + + rorl $11,%r12d + xorl %r13d,%r12d + shrl $3,%r13d + + rorl $7,%r12d + xorl %r12d,%r13d + movl 32(%rsp),%r12d + + rorl $2,%r15d + xorl %r14d,%r15d + shrl $10,%r14d + + rorl $17,%r15d + addl %r13d,%r12d + xorl %r15d,%r14d + + addl 60(%rsp),%r12d + movl %r9d,%r13d + addl %r14d,%r12d + movl %ebx,%r14d + rorl $14,%r13d + movl %r10d,%r15d + movl %r12d,60(%rsp) + + rorl $9,%r14d + xorl %r9d,%r13d + xorl %r11d,%r15d + + rorl $5,%r13d + addl %eax,%r12d + xorl %ebx,%r14d + + addl (%rbp,%rdi,4),%r12d + andl %r9d,%r15d + movl %ecx,%eax + + rorl $11,%r14d + xorl %r9d,%r13d + xorl %r11d,%r15d + + xorl %edx,%eax + xorl %ebx,%r14d + addl %r15d,%r12d + movl %ecx,%r15d + + rorl $6,%r13d + andl %ebx,%eax + andl %edx,%r15d + + rorl $2,%r14d + addl %r13d,%r12d + addl %r15d,%eax + + addl %r12d,%r8d + addl %r12d,%eax + leaq 1(%rdi),%rdi + addl %r14d,%eax + + cmpq $64,%rdi + jb L$rounds_16_xx + + movq 64+0(%rsp),%rdi + leaq 64(%rsi),%rsi + + addl 0(%rdi),%eax + addl 4(%rdi),%ebx + addl 8(%rdi),%ecx + addl 12(%rdi),%edx + addl 16(%rdi),%r8d + addl 20(%rdi),%r9d + addl 24(%rdi),%r10d + addl 28(%rdi),%r11d + + cmpq 64+16(%rsp),%rsi + + movl %eax,0(%rdi) + movl %ebx,4(%rdi) + movl %ecx,8(%rdi) + movl %edx,12(%rdi) + movl %r8d,16(%rdi) + movl %r9d,20(%rdi) + movl %r10d,24(%rdi) + movl %r11d,28(%rdi) + jb L$loop + + movq 64+24(%rsp),%rsi + movq (%rsi),%r15 + movq 8(%rsi),%r14 + movq 16(%rsi),%r13 + movq 24(%rsi),%r12 + movq 32(%rsi),%rbp + movq 40(%rsi),%rbx + leaq 48(%rsi),%rsp +L$epilogue: + .byte 0xf3,0xc3 + +.p2align 6 + +K256: +.long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5 +.long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5 +.long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3 +.long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174 +.long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc +.long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da +.long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7 +.long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967 +.long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13 +.long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85 +.long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3 +.long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070 +.long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5 +.long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3 +.long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208 +.long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2 diff --git a/code/crypto/sha/sha256.c b/code/crypto/sha/sha256.c new file mode 100644 index 0000000..ee22321 --- /dev/null +++ b/code/crypto/sha/sha256.c @@ -0,0 +1,281 @@ +/* $OpenBSD: sha256.c,v 1.9 2015/09/10 15:56:26 jsing Exp $ */ +/* ==================================================================== + * Copyright (c) 2004 The OpenSSL Project. All rights reserved + * according to the OpenSSL license [found in ../../LICENSE]. + * ==================================================================== + */ + +#if !defined(OPENSSL_NO_SHA) && !defined(OPENSSL_NO_SHA256) + +#include <machine/endian.h> + +#include <stdlib.h> +#include <string.h> + +#include <compat.h> +#include <sha.h> + +int SHA224_Init(SHA256_CTX *c) + { + memset (c,0,sizeof(*c)); + c->h[0]=0xc1059ed8UL; c->h[1]=0x367cd507UL; + c->h[2]=0x3070dd17UL; c->h[3]=0xf70e5939UL; + c->h[4]=0xffc00b31UL; c->h[5]=0x68581511UL; + c->h[6]=0x64f98fa7UL; c->h[7]=0xbefa4fa4UL; + c->md_len=SHA224_DIGEST_LENGTH; + return 1; + } + +int SHA256_Init(SHA256_CTX *c) + { + memset (c,0,sizeof(*c)); + c->h[0]=0x6a09e667UL; c->h[1]=0xbb67ae85UL; + c->h[2]=0x3c6ef372UL; c->h[3]=0xa54ff53aUL; + c->h[4]=0x510e527fUL; c->h[5]=0x9b05688cUL; + c->h[6]=0x1f83d9abUL; c->h[7]=0x5be0cd19UL; + c->md_len=SHA256_DIGEST_LENGTH; + return 1; + } + +unsigned char *SHA224(const unsigned char *d, size_t n, unsigned char *md) + { + SHA256_CTX c; + static unsigned char m[SHA224_DIGEST_LENGTH]; + + if (md == NULL) md=m; + SHA224_Init(&c); + SHA256_Update(&c,d,n); + SHA256_Final(md,&c); + explicit_bzero(&c,sizeof(c)); + return(md); + } + +unsigned char *SHA256(const unsigned char *d, size_t n, unsigned char *md) + { + SHA256_CTX c; + static unsigned char m[SHA256_DIGEST_LENGTH]; + + if (md == NULL) md=m; + SHA256_Init(&c); + SHA256_Update(&c,d,n); + SHA256_Final(md,&c); + explicit_bzero(&c,sizeof(c)); + return(md); + } + +int SHA224_Update(SHA256_CTX *c, const void *data, size_t len) +{ return SHA256_Update (c,data,len); } +int SHA224_Final (unsigned char *md, SHA256_CTX *c) +{ return SHA256_Final (md,c); } + +#define DATA_ORDER_IS_BIG_ENDIAN + +#define HASH_LONG SHA_LONG +#define HASH_CTX SHA256_CTX +#define HASH_CBLOCK SHA_CBLOCK +/* + * Note that FIPS180-2 discusses "Truncation of the Hash Function Output." + * default: case below covers for it. It's not clear however if it's + * permitted to truncate to amount of bytes not divisible by 4. I bet not, + * but if it is, then default: case shall be extended. For reference. + * Idea behind separate cases for pre-defined lenghts is to let the + * compiler decide if it's appropriate to unroll small loops. + */ +#define HASH_MAKE_STRING(c,s) do { \ + unsigned long ll; \ + unsigned int nn; \ + switch ((c)->md_len) \ + { case SHA224_DIGEST_LENGTH: \ + for (nn=0;nn<SHA224_DIGEST_LENGTH/4;nn++) \ + { ll=(c)->h[nn]; HOST_l2c(ll,(s)); } \ + break; \ + case SHA256_DIGEST_LENGTH: \ + for (nn=0;nn<SHA256_DIGEST_LENGTH/4;nn++) \ + { ll=(c)->h[nn]; HOST_l2c(ll,(s)); } \ + break; \ + default: \ + if ((c)->md_len > SHA256_DIGEST_LENGTH) \ + return 0; \ + for (nn=0;nn<(c)->md_len/4;nn++) \ + { ll=(c)->h[nn]; HOST_l2c(ll,(s)); } \ + break; \ + } \ + } while (0) + +#define HASH_UPDATE SHA256_Update +#define HASH_TRANSFORM SHA256_Transform +#define HASH_FINAL SHA256_Final +#define HASH_BLOCK_DATA_ORDER sha256_block_data_order +#ifndef SHA256_ASM +static +#endif +void sha256_block_data_order (SHA256_CTX *ctx, const void *in, size_t num); + +#include "md32_common.h" + +#ifndef SHA256_ASM +static const SHA_LONG K256[64] = { + 0x428a2f98UL,0x71374491UL,0xb5c0fbcfUL,0xe9b5dba5UL, + 0x3956c25bUL,0x59f111f1UL,0x923f82a4UL,0xab1c5ed5UL, + 0xd807aa98UL,0x12835b01UL,0x243185beUL,0x550c7dc3UL, + 0x72be5d74UL,0x80deb1feUL,0x9bdc06a7UL,0xc19bf174UL, + 0xe49b69c1UL,0xefbe4786UL,0x0fc19dc6UL,0x240ca1ccUL, + 0x2de92c6fUL,0x4a7484aaUL,0x5cb0a9dcUL,0x76f988daUL, + 0x983e5152UL,0xa831c66dUL,0xb00327c8UL,0xbf597fc7UL, + 0xc6e00bf3UL,0xd5a79147UL,0x06ca6351UL,0x14292967UL, + 0x27b70a85UL,0x2e1b2138UL,0x4d2c6dfcUL,0x53380d13UL, + 0x650a7354UL,0x766a0abbUL,0x81c2c92eUL,0x92722c85UL, + 0xa2bfe8a1UL,0xa81a664bUL,0xc24b8b70UL,0xc76c51a3UL, + 0xd192e819UL,0xd6990624UL,0xf40e3585UL,0x106aa070UL, + 0x19a4c116UL,0x1e376c08UL,0x2748774cUL,0x34b0bcb5UL, + 0x391c0cb3UL,0x4ed8aa4aUL,0x5b9cca4fUL,0x682e6ff3UL, + 0x748f82eeUL,0x78a5636fUL,0x84c87814UL,0x8cc70208UL, + 0x90befffaUL,0xa4506cebUL,0xbef9a3f7UL,0xc67178f2UL }; + +/* + * FIPS specification refers to right rotations, while our ROTATE macro + * is left one. This is why you might notice that rotation coefficients + * differ from those observed in FIPS document by 32-N... + */ +#define Sigma0(x) (ROTATE((x),30) ^ ROTATE((x),19) ^ ROTATE((x),10)) +#define Sigma1(x) (ROTATE((x),26) ^ ROTATE((x),21) ^ ROTATE((x),7)) +#define sigma0(x) (ROTATE((x),25) ^ ROTATE((x),14) ^ ((x)>>3)) +#define sigma1(x) (ROTATE((x),15) ^ ROTATE((x),13) ^ ((x)>>10)) + +#define Ch(x,y,z) (((x) & (y)) ^ ((~(x)) & (z))) +#define Maj(x,y,z) (((x) & (y)) ^ ((x) & (z)) ^ ((y) & (z))) + +#ifdef OPENSSL_SMALL_FOOTPRINT + +static void sha256_block_data_order (SHA256_CTX *ctx, const void *in, size_t num) + { + unsigned MD32_REG_T a,b,c,d,e,f,g,h,s0,s1,T1,T2; + SHA_LONG X[16],l; + int i; + const unsigned char *data=in; + + while (num--) { + + a = ctx->h[0]; b = ctx->h[1]; c = ctx->h[2]; d = ctx->h[3]; + e = ctx->h[4]; f = ctx->h[5]; g = ctx->h[6]; h = ctx->h[7]; + + for (i=0;i<16;i++) + { + HOST_c2l(data,l); T1 = X[i] = l; + T1 += h + Sigma1(e) + Ch(e,f,g) + K256[i]; + T2 = Sigma0(a) + Maj(a,b,c); + h = g; g = f; f = e; e = d + T1; + d = c; c = b; b = a; a = T1 + T2; + } + + for (;i<64;i++) + { + s0 = X[(i+1)&0x0f]; s0 = sigma0(s0); + s1 = X[(i+14)&0x0f]; s1 = sigma1(s1); + + T1 = X[i&0xf] += s0 + s1 + X[(i+9)&0xf]; + T1 += h + Sigma1(e) + Ch(e,f,g) + K256[i]; + T2 = Sigma0(a) + Maj(a,b,c); + h = g; g = f; f = e; e = d + T1; + d = c; c = b; b = a; a = T1 + T2; + } + + ctx->h[0] += a; ctx->h[1] += b; ctx->h[2] += c; ctx->h[3] += d; + ctx->h[4] += e; ctx->h[5] += f; ctx->h[6] += g; ctx->h[7] += h; + + } +} + +#else + +#define ROUND_00_15(i,a,b,c,d,e,f,g,h) do { \ + T1 += h + Sigma1(e) + Ch(e,f,g) + K256[i]; \ + h = Sigma0(a) + Maj(a,b,c); \ + d += T1; h += T1; } while (0) + +#define ROUND_16_63(i,a,b,c,d,e,f,g,h,X) do { \ + s0 = X[(i+1)&0x0f]; s0 = sigma0(s0); \ + s1 = X[(i+14)&0x0f]; s1 = sigma1(s1); \ + T1 = X[(i)&0x0f] += s0 + s1 + X[(i+9)&0x0f]; \ + ROUND_00_15(i,a,b,c,d,e,f,g,h); } while (0) + +static void sha256_block_data_order (SHA256_CTX *ctx, const void *in, size_t num) + { + unsigned MD32_REG_T a,b,c,d,e,f,g,h,s0,s1,T1; + SHA_LONG X[16]; + int i; + const unsigned char *data=in; + + while (num--) { + + a = ctx->h[0]; b = ctx->h[1]; c = ctx->h[2]; d = ctx->h[3]; + e = ctx->h[4]; f = ctx->h[5]; g = ctx->h[6]; h = ctx->h[7]; + + if (BYTE_ORDER != LITTLE_ENDIAN && + sizeof(SHA_LONG)==4 && ((size_t)in%4)==0) + { + const SHA_LONG *W=(const SHA_LONG *)data; + + T1 = X[0] = W[0]; ROUND_00_15(0,a,b,c,d,e,f,g,h); + T1 = X[1] = W[1]; ROUND_00_15(1,h,a,b,c,d,e,f,g); + T1 = X[2] = W[2]; ROUND_00_15(2,g,h,a,b,c,d,e,f); + T1 = X[3] = W[3]; ROUND_00_15(3,f,g,h,a,b,c,d,e); + T1 = X[4] = W[4]; ROUND_00_15(4,e,f,g,h,a,b,c,d); + T1 = X[5] = W[5]; ROUND_00_15(5,d,e,f,g,h,a,b,c); + T1 = X[6] = W[6]; ROUND_00_15(6,c,d,e,f,g,h,a,b); + T1 = X[7] = W[7]; ROUND_00_15(7,b,c,d,e,f,g,h,a); + T1 = X[8] = W[8]; ROUND_00_15(8,a,b,c,d,e,f,g,h); + T1 = X[9] = W[9]; ROUND_00_15(9,h,a,b,c,d,e,f,g); + T1 = X[10] = W[10]; ROUND_00_15(10,g,h,a,b,c,d,e,f); + T1 = X[11] = W[11]; ROUND_00_15(11,f,g,h,a,b,c,d,e); + T1 = X[12] = W[12]; ROUND_00_15(12,e,f,g,h,a,b,c,d); + T1 = X[13] = W[13]; ROUND_00_15(13,d,e,f,g,h,a,b,c); + T1 = X[14] = W[14]; ROUND_00_15(14,c,d,e,f,g,h,a,b); + T1 = X[15] = W[15]; ROUND_00_15(15,b,c,d,e,f,g,h,a); + + data += SHA256_CBLOCK; + } + else + { + SHA_LONG l; + + HOST_c2l(data,l); T1 = X[0] = l; ROUND_00_15(0,a,b,c,d,e,f,g,h); + HOST_c2l(data,l); T1 = X[1] = l; ROUND_00_15(1,h,a,b,c,d,e,f,g); + HOST_c2l(data,l); T1 = X[2] = l; ROUND_00_15(2,g,h,a,b,c,d,e,f); + HOST_c2l(data,l); T1 = X[3] = l; ROUND_00_15(3,f,g,h,a,b,c,d,e); + HOST_c2l(data,l); T1 = X[4] = l; ROUND_00_15(4,e,f,g,h,a,b,c,d); + HOST_c2l(data,l); T1 = X[5] = l; ROUND_00_15(5,d,e,f,g,h,a,b,c); + HOST_c2l(data,l); T1 = X[6] = l; ROUND_00_15(6,c,d,e,f,g,h,a,b); + HOST_c2l(data,l); T1 = X[7] = l; ROUND_00_15(7,b,c,d,e,f,g,h,a); + HOST_c2l(data,l); T1 = X[8] = l; ROUND_00_15(8,a,b,c,d,e,f,g,h); + HOST_c2l(data,l); T1 = X[9] = l; ROUND_00_15(9,h,a,b,c,d,e,f,g); + HOST_c2l(data,l); T1 = X[10] = l; ROUND_00_15(10,g,h,a,b,c,d,e,f); + HOST_c2l(data,l); T1 = X[11] = l; ROUND_00_15(11,f,g,h,a,b,c,d,e); + HOST_c2l(data,l); T1 = X[12] = l; ROUND_00_15(12,e,f,g,h,a,b,c,d); + HOST_c2l(data,l); T1 = X[13] = l; ROUND_00_15(13,d,e,f,g,h,a,b,c); + HOST_c2l(data,l); T1 = X[14] = l; ROUND_00_15(14,c,d,e,f,g,h,a,b); + HOST_c2l(data,l); T1 = X[15] = l; ROUND_00_15(15,b,c,d,e,f,g,h,a); + } + + for (i=16;i<64;i+=8) + { + ROUND_16_63(i+0,a,b,c,d,e,f,g,h,X); + ROUND_16_63(i+1,h,a,b,c,d,e,f,g,X); + ROUND_16_63(i+2,g,h,a,b,c,d,e,f,X); + ROUND_16_63(i+3,f,g,h,a,b,c,d,e,X); + ROUND_16_63(i+4,e,f,g,h,a,b,c,d,X); + ROUND_16_63(i+5,d,e,f,g,h,a,b,c,X); + ROUND_16_63(i+6,c,d,e,f,g,h,a,b,X); + ROUND_16_63(i+7,b,c,d,e,f,g,h,a,X); + } + + ctx->h[0] += a; ctx->h[1] += b; ctx->h[2] += c; ctx->h[3] += d; + ctx->h[4] += e; ctx->h[5] += f; ctx->h[6] += g; ctx->h[7] += h; + + } + } + +#endif +#endif /* SHA256_ASM */ + +#endif /* OPENSSL_NO_SHA256 */ diff --git a/code/crypto/sha/sha512-elf-x86_64.S b/code/crypto/sha/sha512-elf-x86_64.S new file mode 100644 index 0000000..d218304 --- /dev/null +++ b/code/crypto/sha/sha512-elf-x86_64.S @@ -0,0 +1,1806 @@ +#include "x86_arch.h" +.text + +.globl sha512_block_data_order +.type sha512_block_data_order,@function +.align 16 +sha512_block_data_order: + pushq %rbx + pushq %rbp + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + movq %rsp,%r11 + shlq $4,%rdx + subq $128+32,%rsp + leaq (%rsi,%rdx,8),%rdx + andq $-64,%rsp + movq %rdi,128+0(%rsp) + movq %rsi,128+8(%rsp) + movq %rdx,128+16(%rsp) + movq %r11,128+24(%rsp) +.Lprologue: + + leaq K512(%rip),%rbp + + movq 0(%rdi),%rax + movq 8(%rdi),%rbx + movq 16(%rdi),%rcx + movq 24(%rdi),%rdx + movq 32(%rdi),%r8 + movq 40(%rdi),%r9 + movq 48(%rdi),%r10 + movq 56(%rdi),%r11 + jmp .Lloop + +.align 16 +.Lloop: + xorq %rdi,%rdi + movq 0(%rsi),%r12 + movq %r8,%r13 + movq %rax,%r14 + bswapq %r12 + rorq $23,%r13 + movq %r9,%r15 + movq %r12,0(%rsp) + + rorq $5,%r14 + xorq %r8,%r13 + xorq %r10,%r15 + + rorq $4,%r13 + addq %r11,%r12 + xorq %rax,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %r8,%r15 + movq %rbx,%r11 + + rorq $6,%r14 + xorq %r8,%r13 + xorq %r10,%r15 + + xorq %rcx,%r11 + xorq %rax,%r14 + addq %r15,%r12 + movq %rbx,%r15 + + rorq $14,%r13 + andq %rax,%r11 + andq %rcx,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r11 + + addq %r12,%rdx + addq %r12,%r11 + leaq 1(%rdi),%rdi + addq %r14,%r11 + + movq 8(%rsi),%r12 + movq %rdx,%r13 + movq %r11,%r14 + bswapq %r12 + rorq $23,%r13 + movq %r8,%r15 + movq %r12,8(%rsp) + + rorq $5,%r14 + xorq %rdx,%r13 + xorq %r9,%r15 + + rorq $4,%r13 + addq %r10,%r12 + xorq %r11,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %rdx,%r15 + movq %rax,%r10 + + rorq $6,%r14 + xorq %rdx,%r13 + xorq %r9,%r15 + + xorq %rbx,%r10 + xorq %r11,%r14 + addq %r15,%r12 + movq %rax,%r15 + + rorq $14,%r13 + andq %r11,%r10 + andq %rbx,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r10 + + addq %r12,%rcx + addq %r12,%r10 + leaq 1(%rdi),%rdi + addq %r14,%r10 + + movq 16(%rsi),%r12 + movq %rcx,%r13 + movq %r10,%r14 + bswapq %r12 + rorq $23,%r13 + movq %rdx,%r15 + movq %r12,16(%rsp) + + rorq $5,%r14 + xorq %rcx,%r13 + xorq %r8,%r15 + + rorq $4,%r13 + addq %r9,%r12 + xorq %r10,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %rcx,%r15 + movq %r11,%r9 + + rorq $6,%r14 + xorq %rcx,%r13 + xorq %r8,%r15 + + xorq %rax,%r9 + xorq %r10,%r14 + addq %r15,%r12 + movq %r11,%r15 + + rorq $14,%r13 + andq %r10,%r9 + andq %rax,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r9 + + addq %r12,%rbx + addq %r12,%r9 + leaq 1(%rdi),%rdi + addq %r14,%r9 + + movq 24(%rsi),%r12 + movq %rbx,%r13 + movq %r9,%r14 + bswapq %r12 + rorq $23,%r13 + movq %rcx,%r15 + movq %r12,24(%rsp) + + rorq $5,%r14 + xorq %rbx,%r13 + xorq %rdx,%r15 + + rorq $4,%r13 + addq %r8,%r12 + xorq %r9,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %rbx,%r15 + movq %r10,%r8 + + rorq $6,%r14 + xorq %rbx,%r13 + xorq %rdx,%r15 + + xorq %r11,%r8 + xorq %r9,%r14 + addq %r15,%r12 + movq %r10,%r15 + + rorq $14,%r13 + andq %r9,%r8 + andq %r11,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r8 + + addq %r12,%rax + addq %r12,%r8 + leaq 1(%rdi),%rdi + addq %r14,%r8 + + movq 32(%rsi),%r12 + movq %rax,%r13 + movq %r8,%r14 + bswapq %r12 + rorq $23,%r13 + movq %rbx,%r15 + movq %r12,32(%rsp) + + rorq $5,%r14 + xorq %rax,%r13 + xorq %rcx,%r15 + + rorq $4,%r13 + addq %rdx,%r12 + xorq %r8,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %rax,%r15 + movq %r9,%rdx + + rorq $6,%r14 + xorq %rax,%r13 + xorq %rcx,%r15 + + xorq %r10,%rdx + xorq %r8,%r14 + addq %r15,%r12 + movq %r9,%r15 + + rorq $14,%r13 + andq %r8,%rdx + andq %r10,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rdx + + addq %r12,%r11 + addq %r12,%rdx + leaq 1(%rdi),%rdi + addq %r14,%rdx + + movq 40(%rsi),%r12 + movq %r11,%r13 + movq %rdx,%r14 + bswapq %r12 + rorq $23,%r13 + movq %rax,%r15 + movq %r12,40(%rsp) + + rorq $5,%r14 + xorq %r11,%r13 + xorq %rbx,%r15 + + rorq $4,%r13 + addq %rcx,%r12 + xorq %rdx,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %r11,%r15 + movq %r8,%rcx + + rorq $6,%r14 + xorq %r11,%r13 + xorq %rbx,%r15 + + xorq %r9,%rcx + xorq %rdx,%r14 + addq %r15,%r12 + movq %r8,%r15 + + rorq $14,%r13 + andq %rdx,%rcx + andq %r9,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rcx + + addq %r12,%r10 + addq %r12,%rcx + leaq 1(%rdi),%rdi + addq %r14,%rcx + + movq 48(%rsi),%r12 + movq %r10,%r13 + movq %rcx,%r14 + bswapq %r12 + rorq $23,%r13 + movq %r11,%r15 + movq %r12,48(%rsp) + + rorq $5,%r14 + xorq %r10,%r13 + xorq %rax,%r15 + + rorq $4,%r13 + addq %rbx,%r12 + xorq %rcx,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %r10,%r15 + movq %rdx,%rbx + + rorq $6,%r14 + xorq %r10,%r13 + xorq %rax,%r15 + + xorq %r8,%rbx + xorq %rcx,%r14 + addq %r15,%r12 + movq %rdx,%r15 + + rorq $14,%r13 + andq %rcx,%rbx + andq %r8,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rbx + + addq %r12,%r9 + addq %r12,%rbx + leaq 1(%rdi),%rdi + addq %r14,%rbx + + movq 56(%rsi),%r12 + movq %r9,%r13 + movq %rbx,%r14 + bswapq %r12 + rorq $23,%r13 + movq %r10,%r15 + movq %r12,56(%rsp) + + rorq $5,%r14 + xorq %r9,%r13 + xorq %r11,%r15 + + rorq $4,%r13 + addq %rax,%r12 + xorq %rbx,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %r9,%r15 + movq %rcx,%rax + + rorq $6,%r14 + xorq %r9,%r13 + xorq %r11,%r15 + + xorq %rdx,%rax + xorq %rbx,%r14 + addq %r15,%r12 + movq %rcx,%r15 + + rorq $14,%r13 + andq %rbx,%rax + andq %rdx,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rax + + addq %r12,%r8 + addq %r12,%rax + leaq 1(%rdi),%rdi + addq %r14,%rax + + movq 64(%rsi),%r12 + movq %r8,%r13 + movq %rax,%r14 + bswapq %r12 + rorq $23,%r13 + movq %r9,%r15 + movq %r12,64(%rsp) + + rorq $5,%r14 + xorq %r8,%r13 + xorq %r10,%r15 + + rorq $4,%r13 + addq %r11,%r12 + xorq %rax,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %r8,%r15 + movq %rbx,%r11 + + rorq $6,%r14 + xorq %r8,%r13 + xorq %r10,%r15 + + xorq %rcx,%r11 + xorq %rax,%r14 + addq %r15,%r12 + movq %rbx,%r15 + + rorq $14,%r13 + andq %rax,%r11 + andq %rcx,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r11 + + addq %r12,%rdx + addq %r12,%r11 + leaq 1(%rdi),%rdi + addq %r14,%r11 + + movq 72(%rsi),%r12 + movq %rdx,%r13 + movq %r11,%r14 + bswapq %r12 + rorq $23,%r13 + movq %r8,%r15 + movq %r12,72(%rsp) + + rorq $5,%r14 + xorq %rdx,%r13 + xorq %r9,%r15 + + rorq $4,%r13 + addq %r10,%r12 + xorq %r11,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %rdx,%r15 + movq %rax,%r10 + + rorq $6,%r14 + xorq %rdx,%r13 + xorq %r9,%r15 + + xorq %rbx,%r10 + xorq %r11,%r14 + addq %r15,%r12 + movq %rax,%r15 + + rorq $14,%r13 + andq %r11,%r10 + andq %rbx,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r10 + + addq %r12,%rcx + addq %r12,%r10 + leaq 1(%rdi),%rdi + addq %r14,%r10 + + movq 80(%rsi),%r12 + movq %rcx,%r13 + movq %r10,%r14 + bswapq %r12 + rorq $23,%r13 + movq %rdx,%r15 + movq %r12,80(%rsp) + + rorq $5,%r14 + xorq %rcx,%r13 + xorq %r8,%r15 + + rorq $4,%r13 + addq %r9,%r12 + xorq %r10,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %rcx,%r15 + movq %r11,%r9 + + rorq $6,%r14 + xorq %rcx,%r13 + xorq %r8,%r15 + + xorq %rax,%r9 + xorq %r10,%r14 + addq %r15,%r12 + movq %r11,%r15 + + rorq $14,%r13 + andq %r10,%r9 + andq %rax,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r9 + + addq %r12,%rbx + addq %r12,%r9 + leaq 1(%rdi),%rdi + addq %r14,%r9 + + movq 88(%rsi),%r12 + movq %rbx,%r13 + movq %r9,%r14 + bswapq %r12 + rorq $23,%r13 + movq %rcx,%r15 + movq %r12,88(%rsp) + + rorq $5,%r14 + xorq %rbx,%r13 + xorq %rdx,%r15 + + rorq $4,%r13 + addq %r8,%r12 + xorq %r9,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %rbx,%r15 + movq %r10,%r8 + + rorq $6,%r14 + xorq %rbx,%r13 + xorq %rdx,%r15 + + xorq %r11,%r8 + xorq %r9,%r14 + addq %r15,%r12 + movq %r10,%r15 + + rorq $14,%r13 + andq %r9,%r8 + andq %r11,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r8 + + addq %r12,%rax + addq %r12,%r8 + leaq 1(%rdi),%rdi + addq %r14,%r8 + + movq 96(%rsi),%r12 + movq %rax,%r13 + movq %r8,%r14 + bswapq %r12 + rorq $23,%r13 + movq %rbx,%r15 + movq %r12,96(%rsp) + + rorq $5,%r14 + xorq %rax,%r13 + xorq %rcx,%r15 + + rorq $4,%r13 + addq %rdx,%r12 + xorq %r8,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %rax,%r15 + movq %r9,%rdx + + rorq $6,%r14 + xorq %rax,%r13 + xorq %rcx,%r15 + + xorq %r10,%rdx + xorq %r8,%r14 + addq %r15,%r12 + movq %r9,%r15 + + rorq $14,%r13 + andq %r8,%rdx + andq %r10,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rdx + + addq %r12,%r11 + addq %r12,%rdx + leaq 1(%rdi),%rdi + addq %r14,%rdx + + movq 104(%rsi),%r12 + movq %r11,%r13 + movq %rdx,%r14 + bswapq %r12 + rorq $23,%r13 + movq %rax,%r15 + movq %r12,104(%rsp) + + rorq $5,%r14 + xorq %r11,%r13 + xorq %rbx,%r15 + + rorq $4,%r13 + addq %rcx,%r12 + xorq %rdx,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %r11,%r15 + movq %r8,%rcx + + rorq $6,%r14 + xorq %r11,%r13 + xorq %rbx,%r15 + + xorq %r9,%rcx + xorq %rdx,%r14 + addq %r15,%r12 + movq %r8,%r15 + + rorq $14,%r13 + andq %rdx,%rcx + andq %r9,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rcx + + addq %r12,%r10 + addq %r12,%rcx + leaq 1(%rdi),%rdi + addq %r14,%rcx + + movq 112(%rsi),%r12 + movq %r10,%r13 + movq %rcx,%r14 + bswapq %r12 + rorq $23,%r13 + movq %r11,%r15 + movq %r12,112(%rsp) + + rorq $5,%r14 + xorq %r10,%r13 + xorq %rax,%r15 + + rorq $4,%r13 + addq %rbx,%r12 + xorq %rcx,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %r10,%r15 + movq %rdx,%rbx + + rorq $6,%r14 + xorq %r10,%r13 + xorq %rax,%r15 + + xorq %r8,%rbx + xorq %rcx,%r14 + addq %r15,%r12 + movq %rdx,%r15 + + rorq $14,%r13 + andq %rcx,%rbx + andq %r8,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rbx + + addq %r12,%r9 + addq %r12,%rbx + leaq 1(%rdi),%rdi + addq %r14,%rbx + + movq 120(%rsi),%r12 + movq %r9,%r13 + movq %rbx,%r14 + bswapq %r12 + rorq $23,%r13 + movq %r10,%r15 + movq %r12,120(%rsp) + + rorq $5,%r14 + xorq %r9,%r13 + xorq %r11,%r15 + + rorq $4,%r13 + addq %rax,%r12 + xorq %rbx,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %r9,%r15 + movq %rcx,%rax + + rorq $6,%r14 + xorq %r9,%r13 + xorq %r11,%r15 + + xorq %rdx,%rax + xorq %rbx,%r14 + addq %r15,%r12 + movq %rcx,%r15 + + rorq $14,%r13 + andq %rbx,%rax + andq %rdx,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rax + + addq %r12,%r8 + addq %r12,%rax + leaq 1(%rdi),%rdi + addq %r14,%rax + + jmp .Lrounds_16_xx +.align 16 +.Lrounds_16_xx: + movq 8(%rsp),%r13 + movq 112(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + + rorq $7,%r12 + xorq %r13,%r12 + shrq $7,%r13 + + rorq $1,%r12 + xorq %r12,%r13 + movq 72(%rsp),%r12 + + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + + rorq $19,%r15 + addq %r13,%r12 + xorq %r15,%r14 + + addq 0(%rsp),%r12 + movq %r8,%r13 + addq %r14,%r12 + movq %rax,%r14 + rorq $23,%r13 + movq %r9,%r15 + movq %r12,0(%rsp) + + rorq $5,%r14 + xorq %r8,%r13 + xorq %r10,%r15 + + rorq $4,%r13 + addq %r11,%r12 + xorq %rax,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %r8,%r15 + movq %rbx,%r11 + + rorq $6,%r14 + xorq %r8,%r13 + xorq %r10,%r15 + + xorq %rcx,%r11 + xorq %rax,%r14 + addq %r15,%r12 + movq %rbx,%r15 + + rorq $14,%r13 + andq %rax,%r11 + andq %rcx,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r11 + + addq %r12,%rdx + addq %r12,%r11 + leaq 1(%rdi),%rdi + addq %r14,%r11 + + movq 16(%rsp),%r13 + movq 120(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + + rorq $7,%r12 + xorq %r13,%r12 + shrq $7,%r13 + + rorq $1,%r12 + xorq %r12,%r13 + movq 80(%rsp),%r12 + + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + + rorq $19,%r15 + addq %r13,%r12 + xorq %r15,%r14 + + addq 8(%rsp),%r12 + movq %rdx,%r13 + addq %r14,%r12 + movq %r11,%r14 + rorq $23,%r13 + movq %r8,%r15 + movq %r12,8(%rsp) + + rorq $5,%r14 + xorq %rdx,%r13 + xorq %r9,%r15 + + rorq $4,%r13 + addq %r10,%r12 + xorq %r11,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %rdx,%r15 + movq %rax,%r10 + + rorq $6,%r14 + xorq %rdx,%r13 + xorq %r9,%r15 + + xorq %rbx,%r10 + xorq %r11,%r14 + addq %r15,%r12 + movq %rax,%r15 + + rorq $14,%r13 + andq %r11,%r10 + andq %rbx,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r10 + + addq %r12,%rcx + addq %r12,%r10 + leaq 1(%rdi),%rdi + addq %r14,%r10 + + movq 24(%rsp),%r13 + movq 0(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + + rorq $7,%r12 + xorq %r13,%r12 + shrq $7,%r13 + + rorq $1,%r12 + xorq %r12,%r13 + movq 88(%rsp),%r12 + + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + + rorq $19,%r15 + addq %r13,%r12 + xorq %r15,%r14 + + addq 16(%rsp),%r12 + movq %rcx,%r13 + addq %r14,%r12 + movq %r10,%r14 + rorq $23,%r13 + movq %rdx,%r15 + movq %r12,16(%rsp) + + rorq $5,%r14 + xorq %rcx,%r13 + xorq %r8,%r15 + + rorq $4,%r13 + addq %r9,%r12 + xorq %r10,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %rcx,%r15 + movq %r11,%r9 + + rorq $6,%r14 + xorq %rcx,%r13 + xorq %r8,%r15 + + xorq %rax,%r9 + xorq %r10,%r14 + addq %r15,%r12 + movq %r11,%r15 + + rorq $14,%r13 + andq %r10,%r9 + andq %rax,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r9 + + addq %r12,%rbx + addq %r12,%r9 + leaq 1(%rdi),%rdi + addq %r14,%r9 + + movq 32(%rsp),%r13 + movq 8(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + + rorq $7,%r12 + xorq %r13,%r12 + shrq $7,%r13 + + rorq $1,%r12 + xorq %r12,%r13 + movq 96(%rsp),%r12 + + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + + rorq $19,%r15 + addq %r13,%r12 + xorq %r15,%r14 + + addq 24(%rsp),%r12 + movq %rbx,%r13 + addq %r14,%r12 + movq %r9,%r14 + rorq $23,%r13 + movq %rcx,%r15 + movq %r12,24(%rsp) + + rorq $5,%r14 + xorq %rbx,%r13 + xorq %rdx,%r15 + + rorq $4,%r13 + addq %r8,%r12 + xorq %r9,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %rbx,%r15 + movq %r10,%r8 + + rorq $6,%r14 + xorq %rbx,%r13 + xorq %rdx,%r15 + + xorq %r11,%r8 + xorq %r9,%r14 + addq %r15,%r12 + movq %r10,%r15 + + rorq $14,%r13 + andq %r9,%r8 + andq %r11,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r8 + + addq %r12,%rax + addq %r12,%r8 + leaq 1(%rdi),%rdi + addq %r14,%r8 + + movq 40(%rsp),%r13 + movq 16(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + + rorq $7,%r12 + xorq %r13,%r12 + shrq $7,%r13 + + rorq $1,%r12 + xorq %r12,%r13 + movq 104(%rsp),%r12 + + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + + rorq $19,%r15 + addq %r13,%r12 + xorq %r15,%r14 + + addq 32(%rsp),%r12 + movq %rax,%r13 + addq %r14,%r12 + movq %r8,%r14 + rorq $23,%r13 + movq %rbx,%r15 + movq %r12,32(%rsp) + + rorq $5,%r14 + xorq %rax,%r13 + xorq %rcx,%r15 + + rorq $4,%r13 + addq %rdx,%r12 + xorq %r8,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %rax,%r15 + movq %r9,%rdx + + rorq $6,%r14 + xorq %rax,%r13 + xorq %rcx,%r15 + + xorq %r10,%rdx + xorq %r8,%r14 + addq %r15,%r12 + movq %r9,%r15 + + rorq $14,%r13 + andq %r8,%rdx + andq %r10,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rdx + + addq %r12,%r11 + addq %r12,%rdx + leaq 1(%rdi),%rdi + addq %r14,%rdx + + movq 48(%rsp),%r13 + movq 24(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + + rorq $7,%r12 + xorq %r13,%r12 + shrq $7,%r13 + + rorq $1,%r12 + xorq %r12,%r13 + movq 112(%rsp),%r12 + + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + + rorq $19,%r15 + addq %r13,%r12 + xorq %r15,%r14 + + addq 40(%rsp),%r12 + movq %r11,%r13 + addq %r14,%r12 + movq %rdx,%r14 + rorq $23,%r13 + movq %rax,%r15 + movq %r12,40(%rsp) + + rorq $5,%r14 + xorq %r11,%r13 + xorq %rbx,%r15 + + rorq $4,%r13 + addq %rcx,%r12 + xorq %rdx,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %r11,%r15 + movq %r8,%rcx + + rorq $6,%r14 + xorq %r11,%r13 + xorq %rbx,%r15 + + xorq %r9,%rcx + xorq %rdx,%r14 + addq %r15,%r12 + movq %r8,%r15 + + rorq $14,%r13 + andq %rdx,%rcx + andq %r9,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rcx + + addq %r12,%r10 + addq %r12,%rcx + leaq 1(%rdi),%rdi + addq %r14,%rcx + + movq 56(%rsp),%r13 + movq 32(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + + rorq $7,%r12 + xorq %r13,%r12 + shrq $7,%r13 + + rorq $1,%r12 + xorq %r12,%r13 + movq 120(%rsp),%r12 + + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + + rorq $19,%r15 + addq %r13,%r12 + xorq %r15,%r14 + + addq 48(%rsp),%r12 + movq %r10,%r13 + addq %r14,%r12 + movq %rcx,%r14 + rorq $23,%r13 + movq %r11,%r15 + movq %r12,48(%rsp) + + rorq $5,%r14 + xorq %r10,%r13 + xorq %rax,%r15 + + rorq $4,%r13 + addq %rbx,%r12 + xorq %rcx,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %r10,%r15 + movq %rdx,%rbx + + rorq $6,%r14 + xorq %r10,%r13 + xorq %rax,%r15 + + xorq %r8,%rbx + xorq %rcx,%r14 + addq %r15,%r12 + movq %rdx,%r15 + + rorq $14,%r13 + andq %rcx,%rbx + andq %r8,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rbx + + addq %r12,%r9 + addq %r12,%rbx + leaq 1(%rdi),%rdi + addq %r14,%rbx + + movq 64(%rsp),%r13 + movq 40(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + + rorq $7,%r12 + xorq %r13,%r12 + shrq $7,%r13 + + rorq $1,%r12 + xorq %r12,%r13 + movq 0(%rsp),%r12 + + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + + rorq $19,%r15 + addq %r13,%r12 + xorq %r15,%r14 + + addq 56(%rsp),%r12 + movq %r9,%r13 + addq %r14,%r12 + movq %rbx,%r14 + rorq $23,%r13 + movq %r10,%r15 + movq %r12,56(%rsp) + + rorq $5,%r14 + xorq %r9,%r13 + xorq %r11,%r15 + + rorq $4,%r13 + addq %rax,%r12 + xorq %rbx,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %r9,%r15 + movq %rcx,%rax + + rorq $6,%r14 + xorq %r9,%r13 + xorq %r11,%r15 + + xorq %rdx,%rax + xorq %rbx,%r14 + addq %r15,%r12 + movq %rcx,%r15 + + rorq $14,%r13 + andq %rbx,%rax + andq %rdx,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rax + + addq %r12,%r8 + addq %r12,%rax + leaq 1(%rdi),%rdi + addq %r14,%rax + + movq 72(%rsp),%r13 + movq 48(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + + rorq $7,%r12 + xorq %r13,%r12 + shrq $7,%r13 + + rorq $1,%r12 + xorq %r12,%r13 + movq 8(%rsp),%r12 + + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + + rorq $19,%r15 + addq %r13,%r12 + xorq %r15,%r14 + + addq 64(%rsp),%r12 + movq %r8,%r13 + addq %r14,%r12 + movq %rax,%r14 + rorq $23,%r13 + movq %r9,%r15 + movq %r12,64(%rsp) + + rorq $5,%r14 + xorq %r8,%r13 + xorq %r10,%r15 + + rorq $4,%r13 + addq %r11,%r12 + xorq %rax,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %r8,%r15 + movq %rbx,%r11 + + rorq $6,%r14 + xorq %r8,%r13 + xorq %r10,%r15 + + xorq %rcx,%r11 + xorq %rax,%r14 + addq %r15,%r12 + movq %rbx,%r15 + + rorq $14,%r13 + andq %rax,%r11 + andq %rcx,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r11 + + addq %r12,%rdx + addq %r12,%r11 + leaq 1(%rdi),%rdi + addq %r14,%r11 + + movq 80(%rsp),%r13 + movq 56(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + + rorq $7,%r12 + xorq %r13,%r12 + shrq $7,%r13 + + rorq $1,%r12 + xorq %r12,%r13 + movq 16(%rsp),%r12 + + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + + rorq $19,%r15 + addq %r13,%r12 + xorq %r15,%r14 + + addq 72(%rsp),%r12 + movq %rdx,%r13 + addq %r14,%r12 + movq %r11,%r14 + rorq $23,%r13 + movq %r8,%r15 + movq %r12,72(%rsp) + + rorq $5,%r14 + xorq %rdx,%r13 + xorq %r9,%r15 + + rorq $4,%r13 + addq %r10,%r12 + xorq %r11,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %rdx,%r15 + movq %rax,%r10 + + rorq $6,%r14 + xorq %rdx,%r13 + xorq %r9,%r15 + + xorq %rbx,%r10 + xorq %r11,%r14 + addq %r15,%r12 + movq %rax,%r15 + + rorq $14,%r13 + andq %r11,%r10 + andq %rbx,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r10 + + addq %r12,%rcx + addq %r12,%r10 + leaq 1(%rdi),%rdi + addq %r14,%r10 + + movq 88(%rsp),%r13 + movq 64(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + + rorq $7,%r12 + xorq %r13,%r12 + shrq $7,%r13 + + rorq $1,%r12 + xorq %r12,%r13 + movq 24(%rsp),%r12 + + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + + rorq $19,%r15 + addq %r13,%r12 + xorq %r15,%r14 + + addq 80(%rsp),%r12 + movq %rcx,%r13 + addq %r14,%r12 + movq %r10,%r14 + rorq $23,%r13 + movq %rdx,%r15 + movq %r12,80(%rsp) + + rorq $5,%r14 + xorq %rcx,%r13 + xorq %r8,%r15 + + rorq $4,%r13 + addq %r9,%r12 + xorq %r10,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %rcx,%r15 + movq %r11,%r9 + + rorq $6,%r14 + xorq %rcx,%r13 + xorq %r8,%r15 + + xorq %rax,%r9 + xorq %r10,%r14 + addq %r15,%r12 + movq %r11,%r15 + + rorq $14,%r13 + andq %r10,%r9 + andq %rax,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r9 + + addq %r12,%rbx + addq %r12,%r9 + leaq 1(%rdi),%rdi + addq %r14,%r9 + + movq 96(%rsp),%r13 + movq 72(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + + rorq $7,%r12 + xorq %r13,%r12 + shrq $7,%r13 + + rorq $1,%r12 + xorq %r12,%r13 + movq 32(%rsp),%r12 + + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + + rorq $19,%r15 + addq %r13,%r12 + xorq %r15,%r14 + + addq 88(%rsp),%r12 + movq %rbx,%r13 + addq %r14,%r12 + movq %r9,%r14 + rorq $23,%r13 + movq %rcx,%r15 + movq %r12,88(%rsp) + + rorq $5,%r14 + xorq %rbx,%r13 + xorq %rdx,%r15 + + rorq $4,%r13 + addq %r8,%r12 + xorq %r9,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %rbx,%r15 + movq %r10,%r8 + + rorq $6,%r14 + xorq %rbx,%r13 + xorq %rdx,%r15 + + xorq %r11,%r8 + xorq %r9,%r14 + addq %r15,%r12 + movq %r10,%r15 + + rorq $14,%r13 + andq %r9,%r8 + andq %r11,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r8 + + addq %r12,%rax + addq %r12,%r8 + leaq 1(%rdi),%rdi + addq %r14,%r8 + + movq 104(%rsp),%r13 + movq 80(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + + rorq $7,%r12 + xorq %r13,%r12 + shrq $7,%r13 + + rorq $1,%r12 + xorq %r12,%r13 + movq 40(%rsp),%r12 + + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + + rorq $19,%r15 + addq %r13,%r12 + xorq %r15,%r14 + + addq 96(%rsp),%r12 + movq %rax,%r13 + addq %r14,%r12 + movq %r8,%r14 + rorq $23,%r13 + movq %rbx,%r15 + movq %r12,96(%rsp) + + rorq $5,%r14 + xorq %rax,%r13 + xorq %rcx,%r15 + + rorq $4,%r13 + addq %rdx,%r12 + xorq %r8,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %rax,%r15 + movq %r9,%rdx + + rorq $6,%r14 + xorq %rax,%r13 + xorq %rcx,%r15 + + xorq %r10,%rdx + xorq %r8,%r14 + addq %r15,%r12 + movq %r9,%r15 + + rorq $14,%r13 + andq %r8,%rdx + andq %r10,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rdx + + addq %r12,%r11 + addq %r12,%rdx + leaq 1(%rdi),%rdi + addq %r14,%rdx + + movq 112(%rsp),%r13 + movq 88(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + + rorq $7,%r12 + xorq %r13,%r12 + shrq $7,%r13 + + rorq $1,%r12 + xorq %r12,%r13 + movq 48(%rsp),%r12 + + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + + rorq $19,%r15 + addq %r13,%r12 + xorq %r15,%r14 + + addq 104(%rsp),%r12 + movq %r11,%r13 + addq %r14,%r12 + movq %rdx,%r14 + rorq $23,%r13 + movq %rax,%r15 + movq %r12,104(%rsp) + + rorq $5,%r14 + xorq %r11,%r13 + xorq %rbx,%r15 + + rorq $4,%r13 + addq %rcx,%r12 + xorq %rdx,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %r11,%r15 + movq %r8,%rcx + + rorq $6,%r14 + xorq %r11,%r13 + xorq %rbx,%r15 + + xorq %r9,%rcx + xorq %rdx,%r14 + addq %r15,%r12 + movq %r8,%r15 + + rorq $14,%r13 + andq %rdx,%rcx + andq %r9,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rcx + + addq %r12,%r10 + addq %r12,%rcx + leaq 1(%rdi),%rdi + addq %r14,%rcx + + movq 120(%rsp),%r13 + movq 96(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + + rorq $7,%r12 + xorq %r13,%r12 + shrq $7,%r13 + + rorq $1,%r12 + xorq %r12,%r13 + movq 56(%rsp),%r12 + + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + + rorq $19,%r15 + addq %r13,%r12 + xorq %r15,%r14 + + addq 112(%rsp),%r12 + movq %r10,%r13 + addq %r14,%r12 + movq %rcx,%r14 + rorq $23,%r13 + movq %r11,%r15 + movq %r12,112(%rsp) + + rorq $5,%r14 + xorq %r10,%r13 + xorq %rax,%r15 + + rorq $4,%r13 + addq %rbx,%r12 + xorq %rcx,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %r10,%r15 + movq %rdx,%rbx + + rorq $6,%r14 + xorq %r10,%r13 + xorq %rax,%r15 + + xorq %r8,%rbx + xorq %rcx,%r14 + addq %r15,%r12 + movq %rdx,%r15 + + rorq $14,%r13 + andq %rcx,%rbx + andq %r8,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rbx + + addq %r12,%r9 + addq %r12,%rbx + leaq 1(%rdi),%rdi + addq %r14,%rbx + + movq 0(%rsp),%r13 + movq 104(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + + rorq $7,%r12 + xorq %r13,%r12 + shrq $7,%r13 + + rorq $1,%r12 + xorq %r12,%r13 + movq 64(%rsp),%r12 + + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + + rorq $19,%r15 + addq %r13,%r12 + xorq %r15,%r14 + + addq 120(%rsp),%r12 + movq %r9,%r13 + addq %r14,%r12 + movq %rbx,%r14 + rorq $23,%r13 + movq %r10,%r15 + movq %r12,120(%rsp) + + rorq $5,%r14 + xorq %r9,%r13 + xorq %r11,%r15 + + rorq $4,%r13 + addq %rax,%r12 + xorq %rbx,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %r9,%r15 + movq %rcx,%rax + + rorq $6,%r14 + xorq %r9,%r13 + xorq %r11,%r15 + + xorq %rdx,%rax + xorq %rbx,%r14 + addq %r15,%r12 + movq %rcx,%r15 + + rorq $14,%r13 + andq %rbx,%rax + andq %rdx,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rax + + addq %r12,%r8 + addq %r12,%rax + leaq 1(%rdi),%rdi + addq %r14,%rax + + cmpq $80,%rdi + jb .Lrounds_16_xx + + movq 128+0(%rsp),%rdi + leaq 128(%rsi),%rsi + + addq 0(%rdi),%rax + addq 8(%rdi),%rbx + addq 16(%rdi),%rcx + addq 24(%rdi),%rdx + addq 32(%rdi),%r8 + addq 40(%rdi),%r9 + addq 48(%rdi),%r10 + addq 56(%rdi),%r11 + + cmpq 128+16(%rsp),%rsi + + movq %rax,0(%rdi) + movq %rbx,8(%rdi) + movq %rcx,16(%rdi) + movq %rdx,24(%rdi) + movq %r8,32(%rdi) + movq %r9,40(%rdi) + movq %r10,48(%rdi) + movq %r11,56(%rdi) + jb .Lloop + + movq 128+24(%rsp),%rsi + movq (%rsi),%r15 + movq 8(%rsi),%r14 + movq 16(%rsi),%r13 + movq 24(%rsi),%r12 + movq 32(%rsi),%rbp + movq 40(%rsi),%rbx + leaq 48(%rsi),%rsp +.Lepilogue: + .byte 0xf3,0xc3 +.size sha512_block_data_order,.-sha512_block_data_order +.align 64 +.type K512,@object +K512: +.quad 0x428a2f98d728ae22,0x7137449123ef65cd +.quad 0xb5c0fbcfec4d3b2f,0xe9b5dba58189dbbc +.quad 0x3956c25bf348b538,0x59f111f1b605d019 +.quad 0x923f82a4af194f9b,0xab1c5ed5da6d8118 +.quad 0xd807aa98a3030242,0x12835b0145706fbe +.quad 0x243185be4ee4b28c,0x550c7dc3d5ffb4e2 +.quad 0x72be5d74f27b896f,0x80deb1fe3b1696b1 +.quad 0x9bdc06a725c71235,0xc19bf174cf692694 +.quad 0xe49b69c19ef14ad2,0xefbe4786384f25e3 +.quad 0x0fc19dc68b8cd5b5,0x240ca1cc77ac9c65 +.quad 0x2de92c6f592b0275,0x4a7484aa6ea6e483 +.quad 0x5cb0a9dcbd41fbd4,0x76f988da831153b5 +.quad 0x983e5152ee66dfab,0xa831c66d2db43210 +.quad 0xb00327c898fb213f,0xbf597fc7beef0ee4 +.quad 0xc6e00bf33da88fc2,0xd5a79147930aa725 +.quad 0x06ca6351e003826f,0x142929670a0e6e70 +.quad 0x27b70a8546d22ffc,0x2e1b21385c26c926 +.quad 0x4d2c6dfc5ac42aed,0x53380d139d95b3df +.quad 0x650a73548baf63de,0x766a0abb3c77b2a8 +.quad 0x81c2c92e47edaee6,0x92722c851482353b +.quad 0xa2bfe8a14cf10364,0xa81a664bbc423001 +.quad 0xc24b8b70d0f89791,0xc76c51a30654be30 +.quad 0xd192e819d6ef5218,0xd69906245565a910 +.quad 0xf40e35855771202a,0x106aa07032bbd1b8 +.quad 0x19a4c116b8d2d0c8,0x1e376c085141ab53 +.quad 0x2748774cdf8eeb99,0x34b0bcb5e19b48a8 +.quad 0x391c0cb3c5c95a63,0x4ed8aa4ae3418acb +.quad 0x5b9cca4f7763e373,0x682e6ff3d6b2b8a3 +.quad 0x748f82ee5defb2fc,0x78a5636f43172f60 +.quad 0x84c87814a1f0ab72,0x8cc702081a6439ec +.quad 0x90befffa23631e28,0xa4506cebde82bde9 +.quad 0xbef9a3f7b2c67915,0xc67178f2e372532b +.quad 0xca273eceea26619c,0xd186b8c721c0c207 +.quad 0xeada7dd6cde0eb1e,0xf57d4f7fee6ed178 +.quad 0x06f067aa72176fba,0x0a637dc5a2c898a6 +.quad 0x113f9804bef90dae,0x1b710b35131c471b +.quad 0x28db77f523047d84,0x32caab7b40c72493 +.quad 0x3c9ebe0a15c9bebc,0x431d67c49c100d4c +.quad 0x4cc5d4becb3e42b6,0x597f299cfc657e2a +.quad 0x5fcb6fab3ad6faec,0x6c44198c4a475817 +#if defined(HAVE_GNU_STACK) +.section .note.GNU-stack,"",%progbits +#endif diff --git a/code/crypto/sha/sha512-macosx-x86_64.S b/code/crypto/sha/sha512-macosx-x86_64.S new file mode 100644 index 0000000..4a51d14 --- /dev/null +++ b/code/crypto/sha/sha512-macosx-x86_64.S @@ -0,0 +1,1803 @@ +#include "x86_arch.h" +.text + +.globl _sha512_block_data_order + +.p2align 4 +_sha512_block_data_order: + pushq %rbx + pushq %rbp + pushq %r12 + pushq %r13 + pushq %r14 + pushq %r15 + movq %rsp,%r11 + shlq $4,%rdx + subq $128+32,%rsp + leaq (%rsi,%rdx,8),%rdx + andq $-64,%rsp + movq %rdi,128+0(%rsp) + movq %rsi,128+8(%rsp) + movq %rdx,128+16(%rsp) + movq %r11,128+24(%rsp) +L$prologue: + + leaq K512(%rip),%rbp + + movq 0(%rdi),%rax + movq 8(%rdi),%rbx + movq 16(%rdi),%rcx + movq 24(%rdi),%rdx + movq 32(%rdi),%r8 + movq 40(%rdi),%r9 + movq 48(%rdi),%r10 + movq 56(%rdi),%r11 + jmp L$loop + +.p2align 4 +L$loop: + xorq %rdi,%rdi + movq 0(%rsi),%r12 + movq %r8,%r13 + movq %rax,%r14 + bswapq %r12 + rorq $23,%r13 + movq %r9,%r15 + movq %r12,0(%rsp) + + rorq $5,%r14 + xorq %r8,%r13 + xorq %r10,%r15 + + rorq $4,%r13 + addq %r11,%r12 + xorq %rax,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %r8,%r15 + movq %rbx,%r11 + + rorq $6,%r14 + xorq %r8,%r13 + xorq %r10,%r15 + + xorq %rcx,%r11 + xorq %rax,%r14 + addq %r15,%r12 + movq %rbx,%r15 + + rorq $14,%r13 + andq %rax,%r11 + andq %rcx,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r11 + + addq %r12,%rdx + addq %r12,%r11 + leaq 1(%rdi),%rdi + addq %r14,%r11 + + movq 8(%rsi),%r12 + movq %rdx,%r13 + movq %r11,%r14 + bswapq %r12 + rorq $23,%r13 + movq %r8,%r15 + movq %r12,8(%rsp) + + rorq $5,%r14 + xorq %rdx,%r13 + xorq %r9,%r15 + + rorq $4,%r13 + addq %r10,%r12 + xorq %r11,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %rdx,%r15 + movq %rax,%r10 + + rorq $6,%r14 + xorq %rdx,%r13 + xorq %r9,%r15 + + xorq %rbx,%r10 + xorq %r11,%r14 + addq %r15,%r12 + movq %rax,%r15 + + rorq $14,%r13 + andq %r11,%r10 + andq %rbx,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r10 + + addq %r12,%rcx + addq %r12,%r10 + leaq 1(%rdi),%rdi + addq %r14,%r10 + + movq 16(%rsi),%r12 + movq %rcx,%r13 + movq %r10,%r14 + bswapq %r12 + rorq $23,%r13 + movq %rdx,%r15 + movq %r12,16(%rsp) + + rorq $5,%r14 + xorq %rcx,%r13 + xorq %r8,%r15 + + rorq $4,%r13 + addq %r9,%r12 + xorq %r10,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %rcx,%r15 + movq %r11,%r9 + + rorq $6,%r14 + xorq %rcx,%r13 + xorq %r8,%r15 + + xorq %rax,%r9 + xorq %r10,%r14 + addq %r15,%r12 + movq %r11,%r15 + + rorq $14,%r13 + andq %r10,%r9 + andq %rax,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r9 + + addq %r12,%rbx + addq %r12,%r9 + leaq 1(%rdi),%rdi + addq %r14,%r9 + + movq 24(%rsi),%r12 + movq %rbx,%r13 + movq %r9,%r14 + bswapq %r12 + rorq $23,%r13 + movq %rcx,%r15 + movq %r12,24(%rsp) + + rorq $5,%r14 + xorq %rbx,%r13 + xorq %rdx,%r15 + + rorq $4,%r13 + addq %r8,%r12 + xorq %r9,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %rbx,%r15 + movq %r10,%r8 + + rorq $6,%r14 + xorq %rbx,%r13 + xorq %rdx,%r15 + + xorq %r11,%r8 + xorq %r9,%r14 + addq %r15,%r12 + movq %r10,%r15 + + rorq $14,%r13 + andq %r9,%r8 + andq %r11,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r8 + + addq %r12,%rax + addq %r12,%r8 + leaq 1(%rdi),%rdi + addq %r14,%r8 + + movq 32(%rsi),%r12 + movq %rax,%r13 + movq %r8,%r14 + bswapq %r12 + rorq $23,%r13 + movq %rbx,%r15 + movq %r12,32(%rsp) + + rorq $5,%r14 + xorq %rax,%r13 + xorq %rcx,%r15 + + rorq $4,%r13 + addq %rdx,%r12 + xorq %r8,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %rax,%r15 + movq %r9,%rdx + + rorq $6,%r14 + xorq %rax,%r13 + xorq %rcx,%r15 + + xorq %r10,%rdx + xorq %r8,%r14 + addq %r15,%r12 + movq %r9,%r15 + + rorq $14,%r13 + andq %r8,%rdx + andq %r10,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rdx + + addq %r12,%r11 + addq %r12,%rdx + leaq 1(%rdi),%rdi + addq %r14,%rdx + + movq 40(%rsi),%r12 + movq %r11,%r13 + movq %rdx,%r14 + bswapq %r12 + rorq $23,%r13 + movq %rax,%r15 + movq %r12,40(%rsp) + + rorq $5,%r14 + xorq %r11,%r13 + xorq %rbx,%r15 + + rorq $4,%r13 + addq %rcx,%r12 + xorq %rdx,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %r11,%r15 + movq %r8,%rcx + + rorq $6,%r14 + xorq %r11,%r13 + xorq %rbx,%r15 + + xorq %r9,%rcx + xorq %rdx,%r14 + addq %r15,%r12 + movq %r8,%r15 + + rorq $14,%r13 + andq %rdx,%rcx + andq %r9,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rcx + + addq %r12,%r10 + addq %r12,%rcx + leaq 1(%rdi),%rdi + addq %r14,%rcx + + movq 48(%rsi),%r12 + movq %r10,%r13 + movq %rcx,%r14 + bswapq %r12 + rorq $23,%r13 + movq %r11,%r15 + movq %r12,48(%rsp) + + rorq $5,%r14 + xorq %r10,%r13 + xorq %rax,%r15 + + rorq $4,%r13 + addq %rbx,%r12 + xorq %rcx,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %r10,%r15 + movq %rdx,%rbx + + rorq $6,%r14 + xorq %r10,%r13 + xorq %rax,%r15 + + xorq %r8,%rbx + xorq %rcx,%r14 + addq %r15,%r12 + movq %rdx,%r15 + + rorq $14,%r13 + andq %rcx,%rbx + andq %r8,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rbx + + addq %r12,%r9 + addq %r12,%rbx + leaq 1(%rdi),%rdi + addq %r14,%rbx + + movq 56(%rsi),%r12 + movq %r9,%r13 + movq %rbx,%r14 + bswapq %r12 + rorq $23,%r13 + movq %r10,%r15 + movq %r12,56(%rsp) + + rorq $5,%r14 + xorq %r9,%r13 + xorq %r11,%r15 + + rorq $4,%r13 + addq %rax,%r12 + xorq %rbx,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %r9,%r15 + movq %rcx,%rax + + rorq $6,%r14 + xorq %r9,%r13 + xorq %r11,%r15 + + xorq %rdx,%rax + xorq %rbx,%r14 + addq %r15,%r12 + movq %rcx,%r15 + + rorq $14,%r13 + andq %rbx,%rax + andq %rdx,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rax + + addq %r12,%r8 + addq %r12,%rax + leaq 1(%rdi),%rdi + addq %r14,%rax + + movq 64(%rsi),%r12 + movq %r8,%r13 + movq %rax,%r14 + bswapq %r12 + rorq $23,%r13 + movq %r9,%r15 + movq %r12,64(%rsp) + + rorq $5,%r14 + xorq %r8,%r13 + xorq %r10,%r15 + + rorq $4,%r13 + addq %r11,%r12 + xorq %rax,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %r8,%r15 + movq %rbx,%r11 + + rorq $6,%r14 + xorq %r8,%r13 + xorq %r10,%r15 + + xorq %rcx,%r11 + xorq %rax,%r14 + addq %r15,%r12 + movq %rbx,%r15 + + rorq $14,%r13 + andq %rax,%r11 + andq %rcx,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r11 + + addq %r12,%rdx + addq %r12,%r11 + leaq 1(%rdi),%rdi + addq %r14,%r11 + + movq 72(%rsi),%r12 + movq %rdx,%r13 + movq %r11,%r14 + bswapq %r12 + rorq $23,%r13 + movq %r8,%r15 + movq %r12,72(%rsp) + + rorq $5,%r14 + xorq %rdx,%r13 + xorq %r9,%r15 + + rorq $4,%r13 + addq %r10,%r12 + xorq %r11,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %rdx,%r15 + movq %rax,%r10 + + rorq $6,%r14 + xorq %rdx,%r13 + xorq %r9,%r15 + + xorq %rbx,%r10 + xorq %r11,%r14 + addq %r15,%r12 + movq %rax,%r15 + + rorq $14,%r13 + andq %r11,%r10 + andq %rbx,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r10 + + addq %r12,%rcx + addq %r12,%r10 + leaq 1(%rdi),%rdi + addq %r14,%r10 + + movq 80(%rsi),%r12 + movq %rcx,%r13 + movq %r10,%r14 + bswapq %r12 + rorq $23,%r13 + movq %rdx,%r15 + movq %r12,80(%rsp) + + rorq $5,%r14 + xorq %rcx,%r13 + xorq %r8,%r15 + + rorq $4,%r13 + addq %r9,%r12 + xorq %r10,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %rcx,%r15 + movq %r11,%r9 + + rorq $6,%r14 + xorq %rcx,%r13 + xorq %r8,%r15 + + xorq %rax,%r9 + xorq %r10,%r14 + addq %r15,%r12 + movq %r11,%r15 + + rorq $14,%r13 + andq %r10,%r9 + andq %rax,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r9 + + addq %r12,%rbx + addq %r12,%r9 + leaq 1(%rdi),%rdi + addq %r14,%r9 + + movq 88(%rsi),%r12 + movq %rbx,%r13 + movq %r9,%r14 + bswapq %r12 + rorq $23,%r13 + movq %rcx,%r15 + movq %r12,88(%rsp) + + rorq $5,%r14 + xorq %rbx,%r13 + xorq %rdx,%r15 + + rorq $4,%r13 + addq %r8,%r12 + xorq %r9,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %rbx,%r15 + movq %r10,%r8 + + rorq $6,%r14 + xorq %rbx,%r13 + xorq %rdx,%r15 + + xorq %r11,%r8 + xorq %r9,%r14 + addq %r15,%r12 + movq %r10,%r15 + + rorq $14,%r13 + andq %r9,%r8 + andq %r11,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r8 + + addq %r12,%rax + addq %r12,%r8 + leaq 1(%rdi),%rdi + addq %r14,%r8 + + movq 96(%rsi),%r12 + movq %rax,%r13 + movq %r8,%r14 + bswapq %r12 + rorq $23,%r13 + movq %rbx,%r15 + movq %r12,96(%rsp) + + rorq $5,%r14 + xorq %rax,%r13 + xorq %rcx,%r15 + + rorq $4,%r13 + addq %rdx,%r12 + xorq %r8,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %rax,%r15 + movq %r9,%rdx + + rorq $6,%r14 + xorq %rax,%r13 + xorq %rcx,%r15 + + xorq %r10,%rdx + xorq %r8,%r14 + addq %r15,%r12 + movq %r9,%r15 + + rorq $14,%r13 + andq %r8,%rdx + andq %r10,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rdx + + addq %r12,%r11 + addq %r12,%rdx + leaq 1(%rdi),%rdi + addq %r14,%rdx + + movq 104(%rsi),%r12 + movq %r11,%r13 + movq %rdx,%r14 + bswapq %r12 + rorq $23,%r13 + movq %rax,%r15 + movq %r12,104(%rsp) + + rorq $5,%r14 + xorq %r11,%r13 + xorq %rbx,%r15 + + rorq $4,%r13 + addq %rcx,%r12 + xorq %rdx,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %r11,%r15 + movq %r8,%rcx + + rorq $6,%r14 + xorq %r11,%r13 + xorq %rbx,%r15 + + xorq %r9,%rcx + xorq %rdx,%r14 + addq %r15,%r12 + movq %r8,%r15 + + rorq $14,%r13 + andq %rdx,%rcx + andq %r9,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rcx + + addq %r12,%r10 + addq %r12,%rcx + leaq 1(%rdi),%rdi + addq %r14,%rcx + + movq 112(%rsi),%r12 + movq %r10,%r13 + movq %rcx,%r14 + bswapq %r12 + rorq $23,%r13 + movq %r11,%r15 + movq %r12,112(%rsp) + + rorq $5,%r14 + xorq %r10,%r13 + xorq %rax,%r15 + + rorq $4,%r13 + addq %rbx,%r12 + xorq %rcx,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %r10,%r15 + movq %rdx,%rbx + + rorq $6,%r14 + xorq %r10,%r13 + xorq %rax,%r15 + + xorq %r8,%rbx + xorq %rcx,%r14 + addq %r15,%r12 + movq %rdx,%r15 + + rorq $14,%r13 + andq %rcx,%rbx + andq %r8,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rbx + + addq %r12,%r9 + addq %r12,%rbx + leaq 1(%rdi),%rdi + addq %r14,%rbx + + movq 120(%rsi),%r12 + movq %r9,%r13 + movq %rbx,%r14 + bswapq %r12 + rorq $23,%r13 + movq %r10,%r15 + movq %r12,120(%rsp) + + rorq $5,%r14 + xorq %r9,%r13 + xorq %r11,%r15 + + rorq $4,%r13 + addq %rax,%r12 + xorq %rbx,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %r9,%r15 + movq %rcx,%rax + + rorq $6,%r14 + xorq %r9,%r13 + xorq %r11,%r15 + + xorq %rdx,%rax + xorq %rbx,%r14 + addq %r15,%r12 + movq %rcx,%r15 + + rorq $14,%r13 + andq %rbx,%rax + andq %rdx,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rax + + addq %r12,%r8 + addq %r12,%rax + leaq 1(%rdi),%rdi + addq %r14,%rax + + jmp L$rounds_16_xx +.p2align 4 +L$rounds_16_xx: + movq 8(%rsp),%r13 + movq 112(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + + rorq $7,%r12 + xorq %r13,%r12 + shrq $7,%r13 + + rorq $1,%r12 + xorq %r12,%r13 + movq 72(%rsp),%r12 + + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + + rorq $19,%r15 + addq %r13,%r12 + xorq %r15,%r14 + + addq 0(%rsp),%r12 + movq %r8,%r13 + addq %r14,%r12 + movq %rax,%r14 + rorq $23,%r13 + movq %r9,%r15 + movq %r12,0(%rsp) + + rorq $5,%r14 + xorq %r8,%r13 + xorq %r10,%r15 + + rorq $4,%r13 + addq %r11,%r12 + xorq %rax,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %r8,%r15 + movq %rbx,%r11 + + rorq $6,%r14 + xorq %r8,%r13 + xorq %r10,%r15 + + xorq %rcx,%r11 + xorq %rax,%r14 + addq %r15,%r12 + movq %rbx,%r15 + + rorq $14,%r13 + andq %rax,%r11 + andq %rcx,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r11 + + addq %r12,%rdx + addq %r12,%r11 + leaq 1(%rdi),%rdi + addq %r14,%r11 + + movq 16(%rsp),%r13 + movq 120(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + + rorq $7,%r12 + xorq %r13,%r12 + shrq $7,%r13 + + rorq $1,%r12 + xorq %r12,%r13 + movq 80(%rsp),%r12 + + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + + rorq $19,%r15 + addq %r13,%r12 + xorq %r15,%r14 + + addq 8(%rsp),%r12 + movq %rdx,%r13 + addq %r14,%r12 + movq %r11,%r14 + rorq $23,%r13 + movq %r8,%r15 + movq %r12,8(%rsp) + + rorq $5,%r14 + xorq %rdx,%r13 + xorq %r9,%r15 + + rorq $4,%r13 + addq %r10,%r12 + xorq %r11,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %rdx,%r15 + movq %rax,%r10 + + rorq $6,%r14 + xorq %rdx,%r13 + xorq %r9,%r15 + + xorq %rbx,%r10 + xorq %r11,%r14 + addq %r15,%r12 + movq %rax,%r15 + + rorq $14,%r13 + andq %r11,%r10 + andq %rbx,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r10 + + addq %r12,%rcx + addq %r12,%r10 + leaq 1(%rdi),%rdi + addq %r14,%r10 + + movq 24(%rsp),%r13 + movq 0(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + + rorq $7,%r12 + xorq %r13,%r12 + shrq $7,%r13 + + rorq $1,%r12 + xorq %r12,%r13 + movq 88(%rsp),%r12 + + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + + rorq $19,%r15 + addq %r13,%r12 + xorq %r15,%r14 + + addq 16(%rsp),%r12 + movq %rcx,%r13 + addq %r14,%r12 + movq %r10,%r14 + rorq $23,%r13 + movq %rdx,%r15 + movq %r12,16(%rsp) + + rorq $5,%r14 + xorq %rcx,%r13 + xorq %r8,%r15 + + rorq $4,%r13 + addq %r9,%r12 + xorq %r10,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %rcx,%r15 + movq %r11,%r9 + + rorq $6,%r14 + xorq %rcx,%r13 + xorq %r8,%r15 + + xorq %rax,%r9 + xorq %r10,%r14 + addq %r15,%r12 + movq %r11,%r15 + + rorq $14,%r13 + andq %r10,%r9 + andq %rax,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r9 + + addq %r12,%rbx + addq %r12,%r9 + leaq 1(%rdi),%rdi + addq %r14,%r9 + + movq 32(%rsp),%r13 + movq 8(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + + rorq $7,%r12 + xorq %r13,%r12 + shrq $7,%r13 + + rorq $1,%r12 + xorq %r12,%r13 + movq 96(%rsp),%r12 + + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + + rorq $19,%r15 + addq %r13,%r12 + xorq %r15,%r14 + + addq 24(%rsp),%r12 + movq %rbx,%r13 + addq %r14,%r12 + movq %r9,%r14 + rorq $23,%r13 + movq %rcx,%r15 + movq %r12,24(%rsp) + + rorq $5,%r14 + xorq %rbx,%r13 + xorq %rdx,%r15 + + rorq $4,%r13 + addq %r8,%r12 + xorq %r9,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %rbx,%r15 + movq %r10,%r8 + + rorq $6,%r14 + xorq %rbx,%r13 + xorq %rdx,%r15 + + xorq %r11,%r8 + xorq %r9,%r14 + addq %r15,%r12 + movq %r10,%r15 + + rorq $14,%r13 + andq %r9,%r8 + andq %r11,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r8 + + addq %r12,%rax + addq %r12,%r8 + leaq 1(%rdi),%rdi + addq %r14,%r8 + + movq 40(%rsp),%r13 + movq 16(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + + rorq $7,%r12 + xorq %r13,%r12 + shrq $7,%r13 + + rorq $1,%r12 + xorq %r12,%r13 + movq 104(%rsp),%r12 + + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + + rorq $19,%r15 + addq %r13,%r12 + xorq %r15,%r14 + + addq 32(%rsp),%r12 + movq %rax,%r13 + addq %r14,%r12 + movq %r8,%r14 + rorq $23,%r13 + movq %rbx,%r15 + movq %r12,32(%rsp) + + rorq $5,%r14 + xorq %rax,%r13 + xorq %rcx,%r15 + + rorq $4,%r13 + addq %rdx,%r12 + xorq %r8,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %rax,%r15 + movq %r9,%rdx + + rorq $6,%r14 + xorq %rax,%r13 + xorq %rcx,%r15 + + xorq %r10,%rdx + xorq %r8,%r14 + addq %r15,%r12 + movq %r9,%r15 + + rorq $14,%r13 + andq %r8,%rdx + andq %r10,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rdx + + addq %r12,%r11 + addq %r12,%rdx + leaq 1(%rdi),%rdi + addq %r14,%rdx + + movq 48(%rsp),%r13 + movq 24(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + + rorq $7,%r12 + xorq %r13,%r12 + shrq $7,%r13 + + rorq $1,%r12 + xorq %r12,%r13 + movq 112(%rsp),%r12 + + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + + rorq $19,%r15 + addq %r13,%r12 + xorq %r15,%r14 + + addq 40(%rsp),%r12 + movq %r11,%r13 + addq %r14,%r12 + movq %rdx,%r14 + rorq $23,%r13 + movq %rax,%r15 + movq %r12,40(%rsp) + + rorq $5,%r14 + xorq %r11,%r13 + xorq %rbx,%r15 + + rorq $4,%r13 + addq %rcx,%r12 + xorq %rdx,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %r11,%r15 + movq %r8,%rcx + + rorq $6,%r14 + xorq %r11,%r13 + xorq %rbx,%r15 + + xorq %r9,%rcx + xorq %rdx,%r14 + addq %r15,%r12 + movq %r8,%r15 + + rorq $14,%r13 + andq %rdx,%rcx + andq %r9,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rcx + + addq %r12,%r10 + addq %r12,%rcx + leaq 1(%rdi),%rdi + addq %r14,%rcx + + movq 56(%rsp),%r13 + movq 32(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + + rorq $7,%r12 + xorq %r13,%r12 + shrq $7,%r13 + + rorq $1,%r12 + xorq %r12,%r13 + movq 120(%rsp),%r12 + + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + + rorq $19,%r15 + addq %r13,%r12 + xorq %r15,%r14 + + addq 48(%rsp),%r12 + movq %r10,%r13 + addq %r14,%r12 + movq %rcx,%r14 + rorq $23,%r13 + movq %r11,%r15 + movq %r12,48(%rsp) + + rorq $5,%r14 + xorq %r10,%r13 + xorq %rax,%r15 + + rorq $4,%r13 + addq %rbx,%r12 + xorq %rcx,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %r10,%r15 + movq %rdx,%rbx + + rorq $6,%r14 + xorq %r10,%r13 + xorq %rax,%r15 + + xorq %r8,%rbx + xorq %rcx,%r14 + addq %r15,%r12 + movq %rdx,%r15 + + rorq $14,%r13 + andq %rcx,%rbx + andq %r8,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rbx + + addq %r12,%r9 + addq %r12,%rbx + leaq 1(%rdi),%rdi + addq %r14,%rbx + + movq 64(%rsp),%r13 + movq 40(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + + rorq $7,%r12 + xorq %r13,%r12 + shrq $7,%r13 + + rorq $1,%r12 + xorq %r12,%r13 + movq 0(%rsp),%r12 + + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + + rorq $19,%r15 + addq %r13,%r12 + xorq %r15,%r14 + + addq 56(%rsp),%r12 + movq %r9,%r13 + addq %r14,%r12 + movq %rbx,%r14 + rorq $23,%r13 + movq %r10,%r15 + movq %r12,56(%rsp) + + rorq $5,%r14 + xorq %r9,%r13 + xorq %r11,%r15 + + rorq $4,%r13 + addq %rax,%r12 + xorq %rbx,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %r9,%r15 + movq %rcx,%rax + + rorq $6,%r14 + xorq %r9,%r13 + xorq %r11,%r15 + + xorq %rdx,%rax + xorq %rbx,%r14 + addq %r15,%r12 + movq %rcx,%r15 + + rorq $14,%r13 + andq %rbx,%rax + andq %rdx,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rax + + addq %r12,%r8 + addq %r12,%rax + leaq 1(%rdi),%rdi + addq %r14,%rax + + movq 72(%rsp),%r13 + movq 48(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + + rorq $7,%r12 + xorq %r13,%r12 + shrq $7,%r13 + + rorq $1,%r12 + xorq %r12,%r13 + movq 8(%rsp),%r12 + + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + + rorq $19,%r15 + addq %r13,%r12 + xorq %r15,%r14 + + addq 64(%rsp),%r12 + movq %r8,%r13 + addq %r14,%r12 + movq %rax,%r14 + rorq $23,%r13 + movq %r9,%r15 + movq %r12,64(%rsp) + + rorq $5,%r14 + xorq %r8,%r13 + xorq %r10,%r15 + + rorq $4,%r13 + addq %r11,%r12 + xorq %rax,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %r8,%r15 + movq %rbx,%r11 + + rorq $6,%r14 + xorq %r8,%r13 + xorq %r10,%r15 + + xorq %rcx,%r11 + xorq %rax,%r14 + addq %r15,%r12 + movq %rbx,%r15 + + rorq $14,%r13 + andq %rax,%r11 + andq %rcx,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r11 + + addq %r12,%rdx + addq %r12,%r11 + leaq 1(%rdi),%rdi + addq %r14,%r11 + + movq 80(%rsp),%r13 + movq 56(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + + rorq $7,%r12 + xorq %r13,%r12 + shrq $7,%r13 + + rorq $1,%r12 + xorq %r12,%r13 + movq 16(%rsp),%r12 + + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + + rorq $19,%r15 + addq %r13,%r12 + xorq %r15,%r14 + + addq 72(%rsp),%r12 + movq %rdx,%r13 + addq %r14,%r12 + movq %r11,%r14 + rorq $23,%r13 + movq %r8,%r15 + movq %r12,72(%rsp) + + rorq $5,%r14 + xorq %rdx,%r13 + xorq %r9,%r15 + + rorq $4,%r13 + addq %r10,%r12 + xorq %r11,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %rdx,%r15 + movq %rax,%r10 + + rorq $6,%r14 + xorq %rdx,%r13 + xorq %r9,%r15 + + xorq %rbx,%r10 + xorq %r11,%r14 + addq %r15,%r12 + movq %rax,%r15 + + rorq $14,%r13 + andq %r11,%r10 + andq %rbx,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r10 + + addq %r12,%rcx + addq %r12,%r10 + leaq 1(%rdi),%rdi + addq %r14,%r10 + + movq 88(%rsp),%r13 + movq 64(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + + rorq $7,%r12 + xorq %r13,%r12 + shrq $7,%r13 + + rorq $1,%r12 + xorq %r12,%r13 + movq 24(%rsp),%r12 + + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + + rorq $19,%r15 + addq %r13,%r12 + xorq %r15,%r14 + + addq 80(%rsp),%r12 + movq %rcx,%r13 + addq %r14,%r12 + movq %r10,%r14 + rorq $23,%r13 + movq %rdx,%r15 + movq %r12,80(%rsp) + + rorq $5,%r14 + xorq %rcx,%r13 + xorq %r8,%r15 + + rorq $4,%r13 + addq %r9,%r12 + xorq %r10,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %rcx,%r15 + movq %r11,%r9 + + rorq $6,%r14 + xorq %rcx,%r13 + xorq %r8,%r15 + + xorq %rax,%r9 + xorq %r10,%r14 + addq %r15,%r12 + movq %r11,%r15 + + rorq $14,%r13 + andq %r10,%r9 + andq %rax,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r9 + + addq %r12,%rbx + addq %r12,%r9 + leaq 1(%rdi),%rdi + addq %r14,%r9 + + movq 96(%rsp),%r13 + movq 72(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + + rorq $7,%r12 + xorq %r13,%r12 + shrq $7,%r13 + + rorq $1,%r12 + xorq %r12,%r13 + movq 32(%rsp),%r12 + + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + + rorq $19,%r15 + addq %r13,%r12 + xorq %r15,%r14 + + addq 88(%rsp),%r12 + movq %rbx,%r13 + addq %r14,%r12 + movq %r9,%r14 + rorq $23,%r13 + movq %rcx,%r15 + movq %r12,88(%rsp) + + rorq $5,%r14 + xorq %rbx,%r13 + xorq %rdx,%r15 + + rorq $4,%r13 + addq %r8,%r12 + xorq %r9,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %rbx,%r15 + movq %r10,%r8 + + rorq $6,%r14 + xorq %rbx,%r13 + xorq %rdx,%r15 + + xorq %r11,%r8 + xorq %r9,%r14 + addq %r15,%r12 + movq %r10,%r15 + + rorq $14,%r13 + andq %r9,%r8 + andq %r11,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%r8 + + addq %r12,%rax + addq %r12,%r8 + leaq 1(%rdi),%rdi + addq %r14,%r8 + + movq 104(%rsp),%r13 + movq 80(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + + rorq $7,%r12 + xorq %r13,%r12 + shrq $7,%r13 + + rorq $1,%r12 + xorq %r12,%r13 + movq 40(%rsp),%r12 + + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + + rorq $19,%r15 + addq %r13,%r12 + xorq %r15,%r14 + + addq 96(%rsp),%r12 + movq %rax,%r13 + addq %r14,%r12 + movq %r8,%r14 + rorq $23,%r13 + movq %rbx,%r15 + movq %r12,96(%rsp) + + rorq $5,%r14 + xorq %rax,%r13 + xorq %rcx,%r15 + + rorq $4,%r13 + addq %rdx,%r12 + xorq %r8,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %rax,%r15 + movq %r9,%rdx + + rorq $6,%r14 + xorq %rax,%r13 + xorq %rcx,%r15 + + xorq %r10,%rdx + xorq %r8,%r14 + addq %r15,%r12 + movq %r9,%r15 + + rorq $14,%r13 + andq %r8,%rdx + andq %r10,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rdx + + addq %r12,%r11 + addq %r12,%rdx + leaq 1(%rdi),%rdi + addq %r14,%rdx + + movq 112(%rsp),%r13 + movq 88(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + + rorq $7,%r12 + xorq %r13,%r12 + shrq $7,%r13 + + rorq $1,%r12 + xorq %r12,%r13 + movq 48(%rsp),%r12 + + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + + rorq $19,%r15 + addq %r13,%r12 + xorq %r15,%r14 + + addq 104(%rsp),%r12 + movq %r11,%r13 + addq %r14,%r12 + movq %rdx,%r14 + rorq $23,%r13 + movq %rax,%r15 + movq %r12,104(%rsp) + + rorq $5,%r14 + xorq %r11,%r13 + xorq %rbx,%r15 + + rorq $4,%r13 + addq %rcx,%r12 + xorq %rdx,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %r11,%r15 + movq %r8,%rcx + + rorq $6,%r14 + xorq %r11,%r13 + xorq %rbx,%r15 + + xorq %r9,%rcx + xorq %rdx,%r14 + addq %r15,%r12 + movq %r8,%r15 + + rorq $14,%r13 + andq %rdx,%rcx + andq %r9,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rcx + + addq %r12,%r10 + addq %r12,%rcx + leaq 1(%rdi),%rdi + addq %r14,%rcx + + movq 120(%rsp),%r13 + movq 96(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + + rorq $7,%r12 + xorq %r13,%r12 + shrq $7,%r13 + + rorq $1,%r12 + xorq %r12,%r13 + movq 56(%rsp),%r12 + + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + + rorq $19,%r15 + addq %r13,%r12 + xorq %r15,%r14 + + addq 112(%rsp),%r12 + movq %r10,%r13 + addq %r14,%r12 + movq %rcx,%r14 + rorq $23,%r13 + movq %r11,%r15 + movq %r12,112(%rsp) + + rorq $5,%r14 + xorq %r10,%r13 + xorq %rax,%r15 + + rorq $4,%r13 + addq %rbx,%r12 + xorq %rcx,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %r10,%r15 + movq %rdx,%rbx + + rorq $6,%r14 + xorq %r10,%r13 + xorq %rax,%r15 + + xorq %r8,%rbx + xorq %rcx,%r14 + addq %r15,%r12 + movq %rdx,%r15 + + rorq $14,%r13 + andq %rcx,%rbx + andq %r8,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rbx + + addq %r12,%r9 + addq %r12,%rbx + leaq 1(%rdi),%rdi + addq %r14,%rbx + + movq 0(%rsp),%r13 + movq 104(%rsp),%r14 + movq %r13,%r12 + movq %r14,%r15 + + rorq $7,%r12 + xorq %r13,%r12 + shrq $7,%r13 + + rorq $1,%r12 + xorq %r12,%r13 + movq 64(%rsp),%r12 + + rorq $42,%r15 + xorq %r14,%r15 + shrq $6,%r14 + + rorq $19,%r15 + addq %r13,%r12 + xorq %r15,%r14 + + addq 120(%rsp),%r12 + movq %r9,%r13 + addq %r14,%r12 + movq %rbx,%r14 + rorq $23,%r13 + movq %r10,%r15 + movq %r12,120(%rsp) + + rorq $5,%r14 + xorq %r9,%r13 + xorq %r11,%r15 + + rorq $4,%r13 + addq %rax,%r12 + xorq %rbx,%r14 + + addq (%rbp,%rdi,8),%r12 + andq %r9,%r15 + movq %rcx,%rax + + rorq $6,%r14 + xorq %r9,%r13 + xorq %r11,%r15 + + xorq %rdx,%rax + xorq %rbx,%r14 + addq %r15,%r12 + movq %rcx,%r15 + + rorq $14,%r13 + andq %rbx,%rax + andq %rdx,%r15 + + rorq $28,%r14 + addq %r13,%r12 + addq %r15,%rax + + addq %r12,%r8 + addq %r12,%rax + leaq 1(%rdi),%rdi + addq %r14,%rax + + cmpq $80,%rdi + jb L$rounds_16_xx + + movq 128+0(%rsp),%rdi + leaq 128(%rsi),%rsi + + addq 0(%rdi),%rax + addq 8(%rdi),%rbx + addq 16(%rdi),%rcx + addq 24(%rdi),%rdx + addq 32(%rdi),%r8 + addq 40(%rdi),%r9 + addq 48(%rdi),%r10 + addq 56(%rdi),%r11 + + cmpq 128+16(%rsp),%rsi + + movq %rax,0(%rdi) + movq %rbx,8(%rdi) + movq %rcx,16(%rdi) + movq %rdx,24(%rdi) + movq %r8,32(%rdi) + movq %r9,40(%rdi) + movq %r10,48(%rdi) + movq %r11,56(%rdi) + jb L$loop + + movq 128+24(%rsp),%rsi + movq (%rsi),%r15 + movq 8(%rsi),%r14 + movq 16(%rsi),%r13 + movq 24(%rsi),%r12 + movq 32(%rsi),%rbp + movq 40(%rsi),%rbx + leaq 48(%rsi),%rsp +L$epilogue: + .byte 0xf3,0xc3 + +.p2align 6 + +K512: +.quad 0x428a2f98d728ae22,0x7137449123ef65cd +.quad 0xb5c0fbcfec4d3b2f,0xe9b5dba58189dbbc +.quad 0x3956c25bf348b538,0x59f111f1b605d019 +.quad 0x923f82a4af194f9b,0xab1c5ed5da6d8118 +.quad 0xd807aa98a3030242,0x12835b0145706fbe +.quad 0x243185be4ee4b28c,0x550c7dc3d5ffb4e2 +.quad 0x72be5d74f27b896f,0x80deb1fe3b1696b1 +.quad 0x9bdc06a725c71235,0xc19bf174cf692694 +.quad 0xe49b69c19ef14ad2,0xefbe4786384f25e3 +.quad 0x0fc19dc68b8cd5b5,0x240ca1cc77ac9c65 +.quad 0x2de92c6f592b0275,0x4a7484aa6ea6e483 +.quad 0x5cb0a9dcbd41fbd4,0x76f988da831153b5 +.quad 0x983e5152ee66dfab,0xa831c66d2db43210 +.quad 0xb00327c898fb213f,0xbf597fc7beef0ee4 +.quad 0xc6e00bf33da88fc2,0xd5a79147930aa725 +.quad 0x06ca6351e003826f,0x142929670a0e6e70 +.quad 0x27b70a8546d22ffc,0x2e1b21385c26c926 +.quad 0x4d2c6dfc5ac42aed,0x53380d139d95b3df +.quad 0x650a73548baf63de,0x766a0abb3c77b2a8 +.quad 0x81c2c92e47edaee6,0x92722c851482353b +.quad 0xa2bfe8a14cf10364,0xa81a664bbc423001 +.quad 0xc24b8b70d0f89791,0xc76c51a30654be30 +.quad 0xd192e819d6ef5218,0xd69906245565a910 +.quad 0xf40e35855771202a,0x106aa07032bbd1b8 +.quad 0x19a4c116b8d2d0c8,0x1e376c085141ab53 +.quad 0x2748774cdf8eeb99,0x34b0bcb5e19b48a8 +.quad 0x391c0cb3c5c95a63,0x4ed8aa4ae3418acb +.quad 0x5b9cca4f7763e373,0x682e6ff3d6b2b8a3 +.quad 0x748f82ee5defb2fc,0x78a5636f43172f60 +.quad 0x84c87814a1f0ab72,0x8cc702081a6439ec +.quad 0x90befffa23631e28,0xa4506cebde82bde9 +.quad 0xbef9a3f7b2c67915,0xc67178f2e372532b +.quad 0xca273eceea26619c,0xd186b8c721c0c207 +.quad 0xeada7dd6cde0eb1e,0xf57d4f7fee6ed178 +.quad 0x06f067aa72176fba,0x0a637dc5a2c898a6 +.quad 0x113f9804bef90dae,0x1b710b35131c471b +.quad 0x28db77f523047d84,0x32caab7b40c72493 +.quad 0x3c9ebe0a15c9bebc,0x431d67c49c100d4c +.quad 0x4cc5d4becb3e42b6,0x597f299cfc657e2a +.quad 0x5fcb6fab3ad6faec,0x6c44198c4a475817 diff --git a/code/crypto/sha/sha512.c b/code/crypto/sha/sha512.c new file mode 100644 index 0000000..364c85c --- /dev/null +++ b/code/crypto/sha/sha512.c @@ -0,0 +1,544 @@ +/* $OpenBSD: sha512.c,v 1.15 2016/11/04 13:56:05 miod Exp $ */ +/* ==================================================================== + * Copyright (c) 2004 The OpenSSL Project. All rights reserved + * according to the OpenSSL license [found in ../../LICENSE]. + * ==================================================================== + */ + +#if !defined(OPENSSL_NO_SHA) && !defined(OPENSSL_NO_SHA512) +/* + * IMPLEMENTATION NOTES. + * + * As you might have noticed 32-bit hash algorithms: + * + * - permit SHA_LONG to be wider than 32-bit (case on CRAY); + * - optimized versions implement two transform functions: one operating + * on [aligned] data in host byte order and one - on data in input + * stream byte order; + * - share common byte-order neutral collector and padding function + * implementations, ../md32_common.h; + * + * Neither of the above applies to this SHA-512 implementations. Reasons + * [in reverse order] are: + * + * - it's the only 64-bit hash algorithm for the moment of this writing, + * there is no need for common collector/padding implementation [yet]; + * - by supporting only one transform function [which operates on + * *aligned* data in input stream byte order, big-endian in this case] + * we minimize burden of maintenance in two ways: a) collector/padding + * function is simpler; b) only one transform function to stare at; + * - SHA_LONG64 is required to be exactly 64-bit in order to be able to + * apply a number of optimizations to mitigate potential performance + * penalties caused by previous design decision; + * + * Caveat lector. + * + * Implementation relies on the fact that "long long" is 64-bit on + * both 32- and 64-bit platforms. If some compiler vendor comes up + * with 128-bit long long, adjustment to sha.h would be required. + * As this implementation relies on 64-bit integer type, it's totally + * inappropriate for platforms which don't support it, most notably + * 16-bit platforms. + * <appro@fy.chalmers.se> + */ + +#include <machine/endian.h> + +#include <stdlib.h> +#include <string.h> + +#include <compat.h> +#include <sha.h> + +#if !defined(__STRICT_ALIGNMENT) || defined(SHA512_ASM) +#define SHA512_BLOCK_CAN_MANAGE_UNALIGNED_DATA +#endif + +int SHA384_Init(SHA512_CTX *c) + { + c->h[0]=U64(0xcbbb9d5dc1059ed8); + c->h[1]=U64(0x629a292a367cd507); + c->h[2]=U64(0x9159015a3070dd17); + c->h[3]=U64(0x152fecd8f70e5939); + c->h[4]=U64(0x67332667ffc00b31); + c->h[5]=U64(0x8eb44a8768581511); + c->h[6]=U64(0xdb0c2e0d64f98fa7); + c->h[7]=U64(0x47b5481dbefa4fa4); + + c->Nl=0; c->Nh=0; + c->num=0; c->md_len=SHA384_DIGEST_LENGTH; + return 1; + } + +int SHA512_Init(SHA512_CTX *c) + { + c->h[0]=U64(0x6a09e667f3bcc908); + c->h[1]=U64(0xbb67ae8584caa73b); + c->h[2]=U64(0x3c6ef372fe94f82b); + c->h[3]=U64(0xa54ff53a5f1d36f1); + c->h[4]=U64(0x510e527fade682d1); + c->h[5]=U64(0x9b05688c2b3e6c1f); + c->h[6]=U64(0x1f83d9abfb41bd6b); + c->h[7]=U64(0x5be0cd19137e2179); + + c->Nl=0; c->Nh=0; + c->num=0; c->md_len=SHA512_DIGEST_LENGTH; + return 1; + } + +#ifndef SHA512_ASM +static +#endif +void sha512_block_data_order (SHA512_CTX *ctx, const void *in, size_t num); + +int SHA512_Final (unsigned char *md, SHA512_CTX *c) + { + unsigned char *p=(unsigned char *)c->u.p; + size_t n=c->num; + + p[n]=0x80; /* There always is a room for one */ + n++; + if (n > (sizeof(c->u)-16)) + memset (p+n,0,sizeof(c->u)-n), n=0, + sha512_block_data_order (c,p,1); + + memset (p+n,0,sizeof(c->u)-16-n); +#if BYTE_ORDER == BIG_ENDIAN + c->u.d[SHA_LBLOCK-2] = c->Nh; + c->u.d[SHA_LBLOCK-1] = c->Nl; +#else + p[sizeof(c->u)-1] = (unsigned char)(c->Nl); + p[sizeof(c->u)-2] = (unsigned char)(c->Nl>>8); + p[sizeof(c->u)-3] = (unsigned char)(c->Nl>>16); + p[sizeof(c->u)-4] = (unsigned char)(c->Nl>>24); + p[sizeof(c->u)-5] = (unsigned char)(c->Nl>>32); + p[sizeof(c->u)-6] = (unsigned char)(c->Nl>>40); + p[sizeof(c->u)-7] = (unsigned char)(c->Nl>>48); + p[sizeof(c->u)-8] = (unsigned char)(c->Nl>>56); + p[sizeof(c->u)-9] = (unsigned char)(c->Nh); + p[sizeof(c->u)-10] = (unsigned char)(c->Nh>>8); + p[sizeof(c->u)-11] = (unsigned char)(c->Nh>>16); + p[sizeof(c->u)-12] = (unsigned char)(c->Nh>>24); + p[sizeof(c->u)-13] = (unsigned char)(c->Nh>>32); + p[sizeof(c->u)-14] = (unsigned char)(c->Nh>>40); + p[sizeof(c->u)-15] = (unsigned char)(c->Nh>>48); + p[sizeof(c->u)-16] = (unsigned char)(c->Nh>>56); +#endif + + sha512_block_data_order (c,p,1); + + if (md==0) return 0; + + switch (c->md_len) + { + /* Let compiler decide if it's appropriate to unroll... */ + case SHA384_DIGEST_LENGTH: + for (n=0;n<SHA384_DIGEST_LENGTH/8;n++) + { + SHA_LONG64 t = c->h[n]; + + *(md++) = (unsigned char)(t>>56); + *(md++) = (unsigned char)(t>>48); + *(md++) = (unsigned char)(t>>40); + *(md++) = (unsigned char)(t>>32); + *(md++) = (unsigned char)(t>>24); + *(md++) = (unsigned char)(t>>16); + *(md++) = (unsigned char)(t>>8); + *(md++) = (unsigned char)(t); + } + break; + case SHA512_DIGEST_LENGTH: + for (n=0;n<SHA512_DIGEST_LENGTH/8;n++) + { + SHA_LONG64 t = c->h[n]; + + *(md++) = (unsigned char)(t>>56); + *(md++) = (unsigned char)(t>>48); + *(md++) = (unsigned char)(t>>40); + *(md++) = (unsigned char)(t>>32); + *(md++) = (unsigned char)(t>>24); + *(md++) = (unsigned char)(t>>16); + *(md++) = (unsigned char)(t>>8); + *(md++) = (unsigned char)(t); + } + break; + /* ... as well as make sure md_len is not abused. */ + default: return 0; + } + + return 1; + } + +int SHA384_Final (unsigned char *md,SHA512_CTX *c) +{ return SHA512_Final (md,c); } + +int SHA512_Update (SHA512_CTX *c, const void *_data, size_t len) + { + SHA_LONG64 l; + unsigned char *p=c->u.p; + const unsigned char *data=(const unsigned char *)_data; + + if (len==0) return 1; + + l = (c->Nl+(((SHA_LONG64)len)<<3))&U64(0xffffffffffffffff); + if (l < c->Nl) c->Nh++; + if (sizeof(len)>=8) c->Nh+=(((SHA_LONG64)len)>>61); + c->Nl=l; + + if (c->num != 0) + { + size_t n = sizeof(c->u) - c->num; + + if (len < n) + { + memcpy (p+c->num,data,len), c->num += (unsigned int)len; + return 1; + } + else { + memcpy (p+c->num,data,n), c->num = 0; + len-=n, data+=n; + sha512_block_data_order (c,p,1); + } + } + + if (len >= sizeof(c->u)) + { +#ifndef SHA512_BLOCK_CAN_MANAGE_UNALIGNED_DATA + if ((size_t)data%sizeof(c->u.d[0]) != 0) + while (len >= sizeof(c->u)) + memcpy (p,data,sizeof(c->u)), + sha512_block_data_order (c,p,1), + len -= sizeof(c->u), + data += sizeof(c->u); + else +#endif + sha512_block_data_order (c,data,len/sizeof(c->u)), + data += len, + len %= sizeof(c->u), + data -= len; + } + + if (len != 0) memcpy (p,data,len), c->num = (int)len; + + return 1; + } + +int SHA384_Update (SHA512_CTX *c, const void *data, size_t len) +{ return SHA512_Update (c,data,len); } + +void SHA512_Transform (SHA512_CTX *c, const unsigned char *data) + { +#ifndef SHA512_BLOCK_CAN_MANAGE_UNALIGNED_DATA + if ((size_t)data%sizeof(c->u.d[0]) != 0) + memcpy(c->u.p,data,sizeof(c->u.p)), + data = c->u.p; +#endif + sha512_block_data_order (c,data,1); + } + +unsigned char *SHA384(const unsigned char *d, size_t n, unsigned char *md) + { + SHA512_CTX c; + static unsigned char m[SHA384_DIGEST_LENGTH]; + + if (md == NULL) md=m; + SHA384_Init(&c); + SHA512_Update(&c,d,n); + SHA512_Final(md,&c); + explicit_bzero(&c,sizeof(c)); + return(md); + } + +unsigned char *SHA512(const unsigned char *d, size_t n, unsigned char *md) + { + SHA512_CTX c; + static unsigned char m[SHA512_DIGEST_LENGTH]; + + if (md == NULL) md=m; + SHA512_Init(&c); + SHA512_Update(&c,d,n); + SHA512_Final(md,&c); + explicit_bzero(&c,sizeof(c)); + return(md); + } + +#ifndef SHA512_ASM +static const SHA_LONG64 K512[80] = { + U64(0x428a2f98d728ae22),U64(0x7137449123ef65cd), + U64(0xb5c0fbcfec4d3b2f),U64(0xe9b5dba58189dbbc), + U64(0x3956c25bf348b538),U64(0x59f111f1b605d019), + U64(0x923f82a4af194f9b),U64(0xab1c5ed5da6d8118), + U64(0xd807aa98a3030242),U64(0x12835b0145706fbe), + U64(0x243185be4ee4b28c),U64(0x550c7dc3d5ffb4e2), + U64(0x72be5d74f27b896f),U64(0x80deb1fe3b1696b1), + U64(0x9bdc06a725c71235),U64(0xc19bf174cf692694), + U64(0xe49b69c19ef14ad2),U64(0xefbe4786384f25e3), + U64(0x0fc19dc68b8cd5b5),U64(0x240ca1cc77ac9c65), + U64(0x2de92c6f592b0275),U64(0x4a7484aa6ea6e483), + U64(0x5cb0a9dcbd41fbd4),U64(0x76f988da831153b5), + U64(0x983e5152ee66dfab),U64(0xa831c66d2db43210), + U64(0xb00327c898fb213f),U64(0xbf597fc7beef0ee4), + U64(0xc6e00bf33da88fc2),U64(0xd5a79147930aa725), + U64(0x06ca6351e003826f),U64(0x142929670a0e6e70), + U64(0x27b70a8546d22ffc),U64(0x2e1b21385c26c926), + U64(0x4d2c6dfc5ac42aed),U64(0x53380d139d95b3df), + U64(0x650a73548baf63de),U64(0x766a0abb3c77b2a8), + U64(0x81c2c92e47edaee6),U64(0x92722c851482353b), + U64(0xa2bfe8a14cf10364),U64(0xa81a664bbc423001), + U64(0xc24b8b70d0f89791),U64(0xc76c51a30654be30), + U64(0xd192e819d6ef5218),U64(0xd69906245565a910), + U64(0xf40e35855771202a),U64(0x106aa07032bbd1b8), + U64(0x19a4c116b8d2d0c8),U64(0x1e376c085141ab53), + U64(0x2748774cdf8eeb99),U64(0x34b0bcb5e19b48a8), + U64(0x391c0cb3c5c95a63),U64(0x4ed8aa4ae3418acb), + U64(0x5b9cca4f7763e373),U64(0x682e6ff3d6b2b8a3), + U64(0x748f82ee5defb2fc),U64(0x78a5636f43172f60), + U64(0x84c87814a1f0ab72),U64(0x8cc702081a6439ec), + U64(0x90befffa23631e28),U64(0xa4506cebde82bde9), + U64(0xbef9a3f7b2c67915),U64(0xc67178f2e372532b), + U64(0xca273eceea26619c),U64(0xd186b8c721c0c207), + U64(0xeada7dd6cde0eb1e),U64(0xf57d4f7fee6ed178), + U64(0x06f067aa72176fba),U64(0x0a637dc5a2c898a6), + U64(0x113f9804bef90dae),U64(0x1b710b35131c471b), + U64(0x28db77f523047d84),U64(0x32caab7b40c72493), + U64(0x3c9ebe0a15c9bebc),U64(0x431d67c49c100d4c), + U64(0x4cc5d4becb3e42b6),U64(0x597f299cfc657e2a), + U64(0x5fcb6fab3ad6faec),U64(0x6c44198c4a475817) }; + +#if defined(__GNUC__) && __GNUC__>=2 && !defined(OPENSSL_NO_ASM) && !defined(OPENSSL_NO_INLINE_ASM) +# if defined(__x86_64) || defined(__x86_64__) +# define ROTR(a,n) ({ SHA_LONG64 ret; \ + asm ("rorq %1,%0" \ + : "=r"(ret) \ + : "J"(n),"0"(a) \ + : "cc"); ret; }) +# define PULL64(x) ({ SHA_LONG64 ret=*((const SHA_LONG64 *)(&(x))); \ + asm ("bswapq %0" \ + : "=r"(ret) \ + : "0"(ret)); ret; }) +# elif (defined(__i386) || defined(__i386__)) +# define PULL64(x) ({ const unsigned int *p=(const unsigned int *)(&(x));\ + unsigned int hi=p[0],lo=p[1]; \ + asm ("bswapl %0; bswapl %1;" \ + : "=r"(lo),"=r"(hi) \ + : "0"(lo),"1"(hi)); \ + ((SHA_LONG64)hi)<<32|lo; }) +# elif (defined(_ARCH_PPC) && defined(__64BIT__)) || defined(_ARCH_PPC64) +# define ROTR(a,n) ({ SHA_LONG64 ret; \ + asm ("rotrdi %0,%1,%2" \ + : "=r"(ret) \ + : "r"(a),"K"(n)); ret; }) +# endif +#endif + +#ifndef PULL64 +#define B(x,j) (((SHA_LONG64)(*(((const unsigned char *)(&x))+j)))<<((7-j)*8)) +#define PULL64(x) (B(x,0)|B(x,1)|B(x,2)|B(x,3)|B(x,4)|B(x,5)|B(x,6)|B(x,7)) +#endif + +#ifndef ROTR +#define ROTR(x,s) (((x)>>s) | (x)<<(64-s)) +#endif + +#define Sigma0(x) (ROTR((x),28) ^ ROTR((x),34) ^ ROTR((x),39)) +#define Sigma1(x) (ROTR((x),14) ^ ROTR((x),18) ^ ROTR((x),41)) +#define sigma0(x) (ROTR((x),1) ^ ROTR((x),8) ^ ((x)>>7)) +#define sigma1(x) (ROTR((x),19) ^ ROTR((x),61) ^ ((x)>>6)) + +#define Ch(x,y,z) (((x) & (y)) ^ ((~(x)) & (z))) +#define Maj(x,y,z) (((x) & (y)) ^ ((x) & (z)) ^ ((y) & (z))) + + +#if defined(__i386) || defined(__i386__) || defined(_M_IX86) +/* + * This code should give better results on 32-bit CPU with less than + * ~24 registers, both size and performance wise... + */ +static void sha512_block_data_order (SHA512_CTX *ctx, const void *in, size_t num) + { + const SHA_LONG64 *W=in; + SHA_LONG64 A,E,T; + SHA_LONG64 X[9+80],*F; + int i; + + while (num--) { + + F = X+80; + A = ctx->h[0]; F[1] = ctx->h[1]; + F[2] = ctx->h[2]; F[3] = ctx->h[3]; + E = ctx->h[4]; F[5] = ctx->h[5]; + F[6] = ctx->h[6]; F[7] = ctx->h[7]; + + for (i=0;i<16;i++,F--) + { + T = PULL64(W[i]); + F[0] = A; + F[4] = E; + F[8] = T; + T += F[7] + Sigma1(E) + Ch(E,F[5],F[6]) + K512[i]; + E = F[3] + T; + A = T + Sigma0(A) + Maj(A,F[1],F[2]); + } + + for (;i<80;i++,F--) + { + T = sigma0(F[8+16-1]); + T += sigma1(F[8+16-14]); + T += F[8+16] + F[8+16-9]; + + F[0] = A; + F[4] = E; + F[8] = T; + T += F[7] + Sigma1(E) + Ch(E,F[5],F[6]) + K512[i]; + E = F[3] + T; + A = T + Sigma0(A) + Maj(A,F[1],F[2]); + } + + ctx->h[0] += A; ctx->h[1] += F[1]; + ctx->h[2] += F[2]; ctx->h[3] += F[3]; + ctx->h[4] += E; ctx->h[5] += F[5]; + ctx->h[6] += F[6]; ctx->h[7] += F[7]; + + W+=SHA_LBLOCK; + } + } + +#elif defined(OPENSSL_SMALL_FOOTPRINT) + +static void sha512_block_data_order (SHA512_CTX *ctx, const void *in, size_t num) + { + const SHA_LONG64 *W=in; + SHA_LONG64 a,b,c,d,e,f,g,h,s0,s1,T1,T2; + SHA_LONG64 X[16]; + int i; + + while (num--) { + + a = ctx->h[0]; b = ctx->h[1]; c = ctx->h[2]; d = ctx->h[3]; + e = ctx->h[4]; f = ctx->h[5]; g = ctx->h[6]; h = ctx->h[7]; + + for (i=0;i<16;i++) + { +#if BYTE_ORDER == BIG_ENDIAN + T1 = X[i] = W[i]; +#else + T1 = X[i] = PULL64(W[i]); +#endif + T1 += h + Sigma1(e) + Ch(e,f,g) + K512[i]; + T2 = Sigma0(a) + Maj(a,b,c); + h = g; g = f; f = e; e = d + T1; + d = c; c = b; b = a; a = T1 + T2; + } + + for (;i<80;i++) + { + s0 = X[(i+1)&0x0f]; s0 = sigma0(s0); + s1 = X[(i+14)&0x0f]; s1 = sigma1(s1); + + T1 = X[i&0xf] += s0 + s1 + X[(i+9)&0xf]; + T1 += h + Sigma1(e) + Ch(e,f,g) + K512[i]; + T2 = Sigma0(a) + Maj(a,b,c); + h = g; g = f; f = e; e = d + T1; + d = c; c = b; b = a; a = T1 + T2; + } + + ctx->h[0] += a; ctx->h[1] += b; ctx->h[2] += c; ctx->h[3] += d; + ctx->h[4] += e; ctx->h[5] += f; ctx->h[6] += g; ctx->h[7] += h; + + W+=SHA_LBLOCK; + } + } + +#else + +#define ROUND_00_15(i,a,b,c,d,e,f,g,h) do { \ + T1 += h + Sigma1(e) + Ch(e,f,g) + K512[i]; \ + h = Sigma0(a) + Maj(a,b,c); \ + d += T1; h += T1; } while (0) + +#define ROUND_16_80(i,j,a,b,c,d,e,f,g,h,X) do { \ + s0 = X[(j+1)&0x0f]; s0 = sigma0(s0); \ + s1 = X[(j+14)&0x0f]; s1 = sigma1(s1); \ + T1 = X[(j)&0x0f] += s0 + s1 + X[(j+9)&0x0f]; \ + ROUND_00_15(i+j,a,b,c,d,e,f,g,h); } while (0) + +static void sha512_block_data_order (SHA512_CTX *ctx, const void *in, size_t num) + { + const SHA_LONG64 *W=in; + SHA_LONG64 a,b,c,d,e,f,g,h,s0,s1,T1; + SHA_LONG64 X[16]; + int i; + + while (num--) { + + a = ctx->h[0]; b = ctx->h[1]; c = ctx->h[2]; d = ctx->h[3]; + e = ctx->h[4]; f = ctx->h[5]; g = ctx->h[6]; h = ctx->h[7]; + +#if BYTE_ORDER == BIG_ENDIAN + T1 = X[0] = W[0]; ROUND_00_15(0,a,b,c,d,e,f,g,h); + T1 = X[1] = W[1]; ROUND_00_15(1,h,a,b,c,d,e,f,g); + T1 = X[2] = W[2]; ROUND_00_15(2,g,h,a,b,c,d,e,f); + T1 = X[3] = W[3]; ROUND_00_15(3,f,g,h,a,b,c,d,e); + T1 = X[4] = W[4]; ROUND_00_15(4,e,f,g,h,a,b,c,d); + T1 = X[5] = W[5]; ROUND_00_15(5,d,e,f,g,h,a,b,c); + T1 = X[6] = W[6]; ROUND_00_15(6,c,d,e,f,g,h,a,b); + T1 = X[7] = W[7]; ROUND_00_15(7,b,c,d,e,f,g,h,a); + T1 = X[8] = W[8]; ROUND_00_15(8,a,b,c,d,e,f,g,h); + T1 = X[9] = W[9]; ROUND_00_15(9,h,a,b,c,d,e,f,g); + T1 = X[10] = W[10]; ROUND_00_15(10,g,h,a,b,c,d,e,f); + T1 = X[11] = W[11]; ROUND_00_15(11,f,g,h,a,b,c,d,e); + T1 = X[12] = W[12]; ROUND_00_15(12,e,f,g,h,a,b,c,d); + T1 = X[13] = W[13]; ROUND_00_15(13,d,e,f,g,h,a,b,c); + T1 = X[14] = W[14]; ROUND_00_15(14,c,d,e,f,g,h,a,b); + T1 = X[15] = W[15]; ROUND_00_15(15,b,c,d,e,f,g,h,a); +#else + T1 = X[0] = PULL64(W[0]); ROUND_00_15(0,a,b,c,d,e,f,g,h); + T1 = X[1] = PULL64(W[1]); ROUND_00_15(1,h,a,b,c,d,e,f,g); + T1 = X[2] = PULL64(W[2]); ROUND_00_15(2,g,h,a,b,c,d,e,f); + T1 = X[3] = PULL64(W[3]); ROUND_00_15(3,f,g,h,a,b,c,d,e); + T1 = X[4] = PULL64(W[4]); ROUND_00_15(4,e,f,g,h,a,b,c,d); + T1 = X[5] = PULL64(W[5]); ROUND_00_15(5,d,e,f,g,h,a,b,c); + T1 = X[6] = PULL64(W[6]); ROUND_00_15(6,c,d,e,f,g,h,a,b); + T1 = X[7] = PULL64(W[7]); ROUND_00_15(7,b,c,d,e,f,g,h,a); + T1 = X[8] = PULL64(W[8]); ROUND_00_15(8,a,b,c,d,e,f,g,h); + T1 = X[9] = PULL64(W[9]); ROUND_00_15(9,h,a,b,c,d,e,f,g); + T1 = X[10] = PULL64(W[10]); ROUND_00_15(10,g,h,a,b,c,d,e,f); + T1 = X[11] = PULL64(W[11]); ROUND_00_15(11,f,g,h,a,b,c,d,e); + T1 = X[12] = PULL64(W[12]); ROUND_00_15(12,e,f,g,h,a,b,c,d); + T1 = X[13] = PULL64(W[13]); ROUND_00_15(13,d,e,f,g,h,a,b,c); + T1 = X[14] = PULL64(W[14]); ROUND_00_15(14,c,d,e,f,g,h,a,b); + T1 = X[15] = PULL64(W[15]); ROUND_00_15(15,b,c,d,e,f,g,h,a); +#endif + + for (i=16;i<80;i+=16) + { + ROUND_16_80(i, 0,a,b,c,d,e,f,g,h,X); + ROUND_16_80(i, 1,h,a,b,c,d,e,f,g,X); + ROUND_16_80(i, 2,g,h,a,b,c,d,e,f,X); + ROUND_16_80(i, 3,f,g,h,a,b,c,d,e,X); + ROUND_16_80(i, 4,e,f,g,h,a,b,c,d,X); + ROUND_16_80(i, 5,d,e,f,g,h,a,b,c,X); + ROUND_16_80(i, 6,c,d,e,f,g,h,a,b,X); + ROUND_16_80(i, 7,b,c,d,e,f,g,h,a,X); + ROUND_16_80(i, 8,a,b,c,d,e,f,g,h,X); + ROUND_16_80(i, 9,h,a,b,c,d,e,f,g,X); + ROUND_16_80(i,10,g,h,a,b,c,d,e,f,X); + ROUND_16_80(i,11,f,g,h,a,b,c,d,e,X); + ROUND_16_80(i,12,e,f,g,h,a,b,c,d,X); + ROUND_16_80(i,13,d,e,f,g,h,a,b,c,X); + ROUND_16_80(i,14,c,d,e,f,g,h,a,b,X); + ROUND_16_80(i,15,b,c,d,e,f,g,h,a,X); + } + + ctx->h[0] += a; ctx->h[1] += b; ctx->h[2] += c; ctx->h[3] += d; + ctx->h[4] += e; ctx->h[5] += f; ctx->h[6] += g; ctx->h[7] += h; + + W+=SHA_LBLOCK; + } + } + +#endif + +#endif /* SHA512_ASM */ + +#endif /* !OPENSSL_NO_SHA512 */ diff --git a/code/crypto/sha/sha_locl.h b/code/crypto/sha/sha_locl.h new file mode 100644 index 0000000..f6d305d --- /dev/null +++ b/code/crypto/sha/sha_locl.h @@ -0,0 +1,419 @@ +/* $OpenBSD: sha_locl.h,v 1.23 2016/12/23 23:22:25 patrick Exp $ */ +/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com) + * All rights reserved. + * + * This package is an SSL implementation written + * by Eric Young (eay@cryptsoft.com). + * The implementation was written so as to conform with Netscapes SSL. + * + * This library is free for commercial and non-commercial use as long as + * the following conditions are aheared to. The following conditions + * apply to all code found in this distribution, be it the RC4, RSA, + * lhash, DES, etc., code; not just the SSL code. The SSL documentation + * included with this distribution is covered by the same copyright terms + * except that the holder is Tim Hudson (tjh@cryptsoft.com). + * + * Copyright remains Eric Young's, and as such any Copyright notices in + * the code are not to be removed. + * If this package is used in a product, Eric Young should be given attribution + * as the author of the parts of the library used. + * This can be in the form of a textual message at program startup or + * in documentation (online or textual) provided with the package. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. All advertising materials mentioning features or use of this software + * must display the following acknowledgement: + * "This product includes cryptographic software written by + * Eric Young (eay@cryptsoft.com)" + * The word 'cryptographic' can be left out if the rouines from the library + * being used are not cryptographic related :-). + * 4. If you include any Windows specific code (or a derivative thereof) from + * the apps directory (application code) you must include an acknowledgement: + * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)" + * + * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND + * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS + * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY + * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + * SUCH DAMAGE. + * + * The licence and distribution terms for any publically available version or + * derivative of this code cannot be changed. i.e. this code cannot simply be + * copied and put under another distribution licence + * [including the GNU Public Licence.] + */ + +#include <stdlib.h> +#include <stddef.h> + +#include <compat.h> +#include <sha.h> + +#define DATA_ORDER_IS_BIG_ENDIAN + +#define HASH_LONG SHA_LONG +#define HASH_CTX SHA_CTX +#define HASH_CBLOCK SHA_CBLOCK +#define HASH_MAKE_STRING(c,s) do { \ + unsigned long ll; \ + ll=(c)->h0; HOST_l2c(ll,(s)); \ + ll=(c)->h1; HOST_l2c(ll,(s)); \ + ll=(c)->h2; HOST_l2c(ll,(s)); \ + ll=(c)->h3; HOST_l2c(ll,(s)); \ + ll=(c)->h4; HOST_l2c(ll,(s)); \ + } while (0) + +# define HASH_UPDATE SHA1_Update +# define HASH_TRANSFORM SHA1_Transform +# define HASH_FINAL SHA1_Final +# define HASH_INIT SHA1_Init +# define HASH_BLOCK_DATA_ORDER sha1_block_data_order +# define Xupdate(a,ix,ia,ib,ic,id) ( (a)=(ia^ib^ic^id), \ + ix=(a)=ROTATE((a),1) \ + ) + +__BEGIN_HIDDEN_DECLS + +#ifndef SHA1_ASM +static +#endif + +void sha1_block_data_order (SHA_CTX *c, const void *p,size_t num); + +__END_HIDDEN_DECLS + +#include "md32_common.h" + +#define INIT_DATA_h0 0x67452301UL +#define INIT_DATA_h1 0xefcdab89UL +#define INIT_DATA_h2 0x98badcfeUL +#define INIT_DATA_h3 0x10325476UL +#define INIT_DATA_h4 0xc3d2e1f0UL + +int SHA1_Init(SHA_CTX *c) + { + memset (c,0,sizeof(*c)); + c->h0=INIT_DATA_h0; + c->h1=INIT_DATA_h1; + c->h2=INIT_DATA_h2; + c->h3=INIT_DATA_h3; + c->h4=INIT_DATA_h4; + return 1; + } + +#define K_00_19 0x5a827999UL +#define K_20_39 0x6ed9eba1UL +#define K_40_59 0x8f1bbcdcUL +#define K_60_79 0xca62c1d6UL + +/* As pointed out by Wei Dai <weidai@eskimo.com>, F() below can be + * simplified to the code in F_00_19. Wei attributes these optimisations + * to Peter Gutmann's SHS code, and he attributes it to Rich Schroeppel. + * #define F(x,y,z) (((x) & (y)) | ((~(x)) & (z))) + * I've just become aware of another tweak to be made, again from Wei Dai, + * in F_40_59, (x&a)|(y&a) -> (x|y)&a + */ +#define F_00_19(b,c,d) ((((c) ^ (d)) & (b)) ^ (d)) +#define F_20_39(b,c,d) ((b) ^ (c) ^ (d)) +#define F_40_59(b,c,d) (((b) & (c)) | (((b)|(c)) & (d))) +#define F_60_79(b,c,d) F_20_39(b,c,d) + +#ifndef OPENSSL_SMALL_FOOTPRINT + +#define BODY_00_15(i,a,b,c,d,e,f,xi) \ + (f)=xi+(e)+K_00_19+ROTATE((a),5)+F_00_19((b),(c),(d)); \ + (b)=ROTATE((b),30); + +#define BODY_16_19(i,a,b,c,d,e,f,xi,xa,xb,xc,xd) \ + Xupdate(f,xi,xa,xb,xc,xd); \ + (f)+=(e)+K_00_19+ROTATE((a),5)+F_00_19((b),(c),(d)); \ + (b)=ROTATE((b),30); + +#define BODY_20_31(i,a,b,c,d,e,f,xi,xa,xb,xc,xd) \ + Xupdate(f,xi,xa,xb,xc,xd); \ + (f)+=(e)+K_20_39+ROTATE((a),5)+F_20_39((b),(c),(d)); \ + (b)=ROTATE((b),30); + +#define BODY_32_39(i,a,b,c,d,e,f,xa,xb,xc,xd) \ + Xupdate(f,xa,xa,xb,xc,xd); \ + (f)+=(e)+K_20_39+ROTATE((a),5)+F_20_39((b),(c),(d)); \ + (b)=ROTATE((b),30); + +#define BODY_40_59(i,a,b,c,d,e,f,xa,xb,xc,xd) \ + Xupdate(f,xa,xa,xb,xc,xd); \ + (f)+=(e)+K_40_59+ROTATE((a),5)+F_40_59((b),(c),(d)); \ + (b)=ROTATE((b),30); + +#define BODY_60_79(i,a,b,c,d,e,f,xa,xb,xc,xd) \ + Xupdate(f,xa,xa,xb,xc,xd); \ + (f)=xa+(e)+K_60_79+ROTATE((a),5)+F_60_79((b),(c),(d)); \ + (b)=ROTATE((b),30); + +#ifdef X +#undef X +#endif +#ifndef MD32_XARRAY + /* + * Originally X was an array. As it's automatic it's natural + * to expect RISC compiler to accommodate at least part of it in + * the register bank, isn't it? Unfortunately not all compilers + * "find" this expectation reasonable:-( On order to make such + * compilers generate better code I replace X[] with a bunch of + * X0, X1, etc. See the function body below... + * <appro@fy.chalmers.se> + */ +# define X(i) XX##i +#else + /* + * However! Some compilers (most notably HP C) get overwhelmed by + * that many local variables so that we have to have the way to + * fall down to the original behavior. + */ +# define X(i) XX[i] +#endif + +#if !defined(SHA1_ASM) +#include <machine/endian.h> +static void HASH_BLOCK_DATA_ORDER (SHA_CTX *c, const void *p, size_t num) + { + const unsigned char *data=p; + unsigned MD32_REG_T A,B,C,D,E,T,l; +#ifndef MD32_XARRAY + unsigned MD32_REG_T XX0, XX1, XX2, XX3, XX4, XX5, XX6, XX7, + XX8, XX9,XX10,XX11,XX12,XX13,XX14,XX15; +#else + SHA_LONG XX[16]; +#endif + + A=c->h0; + B=c->h1; + C=c->h2; + D=c->h3; + E=c->h4; + + for (;;) + { + + if (BYTE_ORDER != LITTLE_ENDIAN && + sizeof(SHA_LONG)==4 && ((size_t)p%4)==0) + { + const SHA_LONG *W=(const SHA_LONG *)data; + + X( 0) = W[0]; X( 1) = W[ 1]; + BODY_00_15( 0,A,B,C,D,E,T,X( 0)); X( 2) = W[ 2]; + BODY_00_15( 1,T,A,B,C,D,E,X( 1)); X( 3) = W[ 3]; + BODY_00_15( 2,E,T,A,B,C,D,X( 2)); X( 4) = W[ 4]; + BODY_00_15( 3,D,E,T,A,B,C,X( 3)); X( 5) = W[ 5]; + BODY_00_15( 4,C,D,E,T,A,B,X( 4)); X( 6) = W[ 6]; + BODY_00_15( 5,B,C,D,E,T,A,X( 5)); X( 7) = W[ 7]; + BODY_00_15( 6,A,B,C,D,E,T,X( 6)); X( 8) = W[ 8]; + BODY_00_15( 7,T,A,B,C,D,E,X( 7)); X( 9) = W[ 9]; + BODY_00_15( 8,E,T,A,B,C,D,X( 8)); X(10) = W[10]; + BODY_00_15( 9,D,E,T,A,B,C,X( 9)); X(11) = W[11]; + BODY_00_15(10,C,D,E,T,A,B,X(10)); X(12) = W[12]; + BODY_00_15(11,B,C,D,E,T,A,X(11)); X(13) = W[13]; + BODY_00_15(12,A,B,C,D,E,T,X(12)); X(14) = W[14]; + BODY_00_15(13,T,A,B,C,D,E,X(13)); X(15) = W[15]; + BODY_00_15(14,E,T,A,B,C,D,X(14)); + BODY_00_15(15,D,E,T,A,B,C,X(15)); + + data += SHA_CBLOCK; + } + else + { + HOST_c2l(data,l); X( 0)=l; HOST_c2l(data,l); X( 1)=l; + BODY_00_15( 0,A,B,C,D,E,T,X( 0)); HOST_c2l(data,l); X( 2)=l; + BODY_00_15( 1,T,A,B,C,D,E,X( 1)); HOST_c2l(data,l); X( 3)=l; + BODY_00_15( 2,E,T,A,B,C,D,X( 2)); HOST_c2l(data,l); X( 4)=l; + BODY_00_15( 3,D,E,T,A,B,C,X( 3)); HOST_c2l(data,l); X( 5)=l; + BODY_00_15( 4,C,D,E,T,A,B,X( 4)); HOST_c2l(data,l); X( 6)=l; + BODY_00_15( 5,B,C,D,E,T,A,X( 5)); HOST_c2l(data,l); X( 7)=l; + BODY_00_15( 6,A,B,C,D,E,T,X( 6)); HOST_c2l(data,l); X( 8)=l; + BODY_00_15( 7,T,A,B,C,D,E,X( 7)); HOST_c2l(data,l); X( 9)=l; + BODY_00_15( 8,E,T,A,B,C,D,X( 8)); HOST_c2l(data,l); X(10)=l; + BODY_00_15( 9,D,E,T,A,B,C,X( 9)); HOST_c2l(data,l); X(11)=l; + BODY_00_15(10,C,D,E,T,A,B,X(10)); HOST_c2l(data,l); X(12)=l; + BODY_00_15(11,B,C,D,E,T,A,X(11)); HOST_c2l(data,l); X(13)=l; + BODY_00_15(12,A,B,C,D,E,T,X(12)); HOST_c2l(data,l); X(14)=l; + BODY_00_15(13,T,A,B,C,D,E,X(13)); HOST_c2l(data,l); X(15)=l; + BODY_00_15(14,E,T,A,B,C,D,X(14)); + BODY_00_15(15,D,E,T,A,B,C,X(15)); + } + + BODY_16_19(16,C,D,E,T,A,B,X( 0),X( 0),X( 2),X( 8),X(13)); + BODY_16_19(17,B,C,D,E,T,A,X( 1),X( 1),X( 3),X( 9),X(14)); + BODY_16_19(18,A,B,C,D,E,T,X( 2),X( 2),X( 4),X(10),X(15)); + BODY_16_19(19,T,A,B,C,D,E,X( 3),X( 3),X( 5),X(11),X( 0)); + + BODY_20_31(20,E,T,A,B,C,D,X( 4),X( 4),X( 6),X(12),X( 1)); + BODY_20_31(21,D,E,T,A,B,C,X( 5),X( 5),X( 7),X(13),X( 2)); + BODY_20_31(22,C,D,E,T,A,B,X( 6),X( 6),X( 8),X(14),X( 3)); + BODY_20_31(23,B,C,D,E,T,A,X( 7),X( 7),X( 9),X(15),X( 4)); + BODY_20_31(24,A,B,C,D,E,T,X( 8),X( 8),X(10),X( 0),X( 5)); + BODY_20_31(25,T,A,B,C,D,E,X( 9),X( 9),X(11),X( 1),X( 6)); + BODY_20_31(26,E,T,A,B,C,D,X(10),X(10),X(12),X( 2),X( 7)); + BODY_20_31(27,D,E,T,A,B,C,X(11),X(11),X(13),X( 3),X( 8)); + BODY_20_31(28,C,D,E,T,A,B,X(12),X(12),X(14),X( 4),X( 9)); + BODY_20_31(29,B,C,D,E,T,A,X(13),X(13),X(15),X( 5),X(10)); + BODY_20_31(30,A,B,C,D,E,T,X(14),X(14),X( 0),X( 6),X(11)); + BODY_20_31(31,T,A,B,C,D,E,X(15),X(15),X( 1),X( 7),X(12)); + + BODY_32_39(32,E,T,A,B,C,D,X( 0),X( 2),X( 8),X(13)); + BODY_32_39(33,D,E,T,A,B,C,X( 1),X( 3),X( 9),X(14)); + BODY_32_39(34,C,D,E,T,A,B,X( 2),X( 4),X(10),X(15)); + BODY_32_39(35,B,C,D,E,T,A,X( 3),X( 5),X(11),X( 0)); + BODY_32_39(36,A,B,C,D,E,T,X( 4),X( 6),X(12),X( 1)); + BODY_32_39(37,T,A,B,C,D,E,X( 5),X( 7),X(13),X( 2)); + BODY_32_39(38,E,T,A,B,C,D,X( 6),X( 8),X(14),X( 3)); + BODY_32_39(39,D,E,T,A,B,C,X( 7),X( 9),X(15),X( 4)); + + BODY_40_59(40,C,D,E,T,A,B,X( 8),X(10),X( 0),X( 5)); + BODY_40_59(41,B,C,D,E,T,A,X( 9),X(11),X( 1),X( 6)); + BODY_40_59(42,A,B,C,D,E,T,X(10),X(12),X( 2),X( 7)); + BODY_40_59(43,T,A,B,C,D,E,X(11),X(13),X( 3),X( 8)); + BODY_40_59(44,E,T,A,B,C,D,X(12),X(14),X( 4),X( 9)); + BODY_40_59(45,D,E,T,A,B,C,X(13),X(15),X( 5),X(10)); + BODY_40_59(46,C,D,E,T,A,B,X(14),X( 0),X( 6),X(11)); + BODY_40_59(47,B,C,D,E,T,A,X(15),X( 1),X( 7),X(12)); + BODY_40_59(48,A,B,C,D,E,T,X( 0),X( 2),X( 8),X(13)); + BODY_40_59(49,T,A,B,C,D,E,X( 1),X( 3),X( 9),X(14)); + BODY_40_59(50,E,T,A,B,C,D,X( 2),X( 4),X(10),X(15)); + BODY_40_59(51,D,E,T,A,B,C,X( 3),X( 5),X(11),X( 0)); + BODY_40_59(52,C,D,E,T,A,B,X( 4),X( 6),X(12),X( 1)); + BODY_40_59(53,B,C,D,E,T,A,X( 5),X( 7),X(13),X( 2)); + BODY_40_59(54,A,B,C,D,E,T,X( 6),X( 8),X(14),X( 3)); + BODY_40_59(55,T,A,B,C,D,E,X( 7),X( 9),X(15),X( 4)); + BODY_40_59(56,E,T,A,B,C,D,X( 8),X(10),X( 0),X( 5)); + BODY_40_59(57,D,E,T,A,B,C,X( 9),X(11),X( 1),X( 6)); + BODY_40_59(58,C,D,E,T,A,B,X(10),X(12),X( 2),X( 7)); + BODY_40_59(59,B,C,D,E,T,A,X(11),X(13),X( 3),X( 8)); + + BODY_60_79(60,A,B,C,D,E,T,X(12),X(14),X( 4),X( 9)); + BODY_60_79(61,T,A,B,C,D,E,X(13),X(15),X( 5),X(10)); + BODY_60_79(62,E,T,A,B,C,D,X(14),X( 0),X( 6),X(11)); + BODY_60_79(63,D,E,T,A,B,C,X(15),X( 1),X( 7),X(12)); + BODY_60_79(64,C,D,E,T,A,B,X( 0),X( 2),X( 8),X(13)); + BODY_60_79(65,B,C,D,E,T,A,X( 1),X( 3),X( 9),X(14)); + BODY_60_79(66,A,B,C,D,E,T,X( 2),X( 4),X(10),X(15)); + BODY_60_79(67,T,A,B,C,D,E,X( 3),X( 5),X(11),X( 0)); + BODY_60_79(68,E,T,A,B,C,D,X( 4),X( 6),X(12),X( 1)); + BODY_60_79(69,D,E,T,A,B,C,X( 5),X( 7),X(13),X( 2)); + BODY_60_79(70,C,D,E,T,A,B,X( 6),X( 8),X(14),X( 3)); + BODY_60_79(71,B,C,D,E,T,A,X( 7),X( 9),X(15),X( 4)); + BODY_60_79(72,A,B,C,D,E,T,X( 8),X(10),X( 0),X( 5)); + BODY_60_79(73,T,A,B,C,D,E,X( 9),X(11),X( 1),X( 6)); + BODY_60_79(74,E,T,A,B,C,D,X(10),X(12),X( 2),X( 7)); + BODY_60_79(75,D,E,T,A,B,C,X(11),X(13),X( 3),X( 8)); + BODY_60_79(76,C,D,E,T,A,B,X(12),X(14),X( 4),X( 9)); + BODY_60_79(77,B,C,D,E,T,A,X(13),X(15),X( 5),X(10)); + BODY_60_79(78,A,B,C,D,E,T,X(14),X( 0),X( 6),X(11)); + BODY_60_79(79,T,A,B,C,D,E,X(15),X( 1),X( 7),X(12)); + + c->h0=(c->h0+E)&0xffffffffL; + c->h1=(c->h1+T)&0xffffffffL; + c->h2=(c->h2+A)&0xffffffffL; + c->h3=(c->h3+B)&0xffffffffL; + c->h4=(c->h4+C)&0xffffffffL; + + if (--num == 0) break; + + A=c->h0; + B=c->h1; + C=c->h2; + D=c->h3; + E=c->h4; + + } + } +#endif + +#else /* OPENSSL_SMALL_FOOTPRINT */ + +#define BODY_00_15(xi) do { \ + T=E+K_00_19+F_00_19(B,C,D); \ + E=D, D=C, C=ROTATE(B,30), B=A; \ + A=ROTATE(A,5)+T+xi; } while(0) + +#define BODY_16_19(xa,xb,xc,xd) do { \ + Xupdate(T,xa,xa,xb,xc,xd); \ + T+=E+K_00_19+F_00_19(B,C,D); \ + E=D, D=C, C=ROTATE(B,30), B=A; \ + A=ROTATE(A,5)+T; } while(0) + +#define BODY_20_39(xa,xb,xc,xd) do { \ + Xupdate(T,xa,xa,xb,xc,xd); \ + T+=E+K_20_39+F_20_39(B,C,D); \ + E=D, D=C, C=ROTATE(B,30), B=A; \ + A=ROTATE(A,5)+T; } while(0) + +#define BODY_40_59(xa,xb,xc,xd) do { \ + Xupdate(T,xa,xa,xb,xc,xd); \ + T+=E+K_40_59+F_40_59(B,C,D); \ + E=D, D=C, C=ROTATE(B,30), B=A; \ + A=ROTATE(A,5)+T; } while(0) + +#define BODY_60_79(xa,xb,xc,xd) do { \ + Xupdate(T,xa,xa,xb,xc,xd); \ + T=E+K_60_79+F_60_79(B,C,D); \ + E=D, D=C, C=ROTATE(B,30), B=A; \ + A=ROTATE(A,5)+T+xa; } while(0) + +#if !defined(SHA1_ASM) +static void HASH_BLOCK_DATA_ORDER (SHA_CTX *c, const void *p, size_t num) + { + const unsigned char *data=p; + unsigned MD32_REG_T A,B,C,D,E,T,l; + int i; + SHA_LONG X[16]; + + A=c->h0; + B=c->h1; + C=c->h2; + D=c->h3; + E=c->h4; + + for (;;) + { + for (i=0;i<16;i++) + { HOST_c2l(data,l); X[i]=l; BODY_00_15(X[i]); } + for (i=0;i<4;i++) + { BODY_16_19(X[i], X[i+2], X[i+8], X[(i+13)&15]); } + for (;i<24;i++) + { BODY_20_39(X[i&15], X[(i+2)&15], X[(i+8)&15],X[(i+13)&15]); } + for (i=0;i<20;i++) + { BODY_40_59(X[(i+8)&15],X[(i+10)&15],X[i&15], X[(i+5)&15]); } + for (i=4;i<24;i++) + { BODY_60_79(X[(i+8)&15],X[(i+10)&15],X[i&15], X[(i+5)&15]); } + + c->h0=(c->h0+A)&0xffffffffL; + c->h1=(c->h1+B)&0xffffffffL; + c->h2=(c->h2+C)&0xffffffffL; + c->h3=(c->h3+D)&0xffffffffL; + c->h4=(c->h4+E)&0xffffffffL; + + if (--num == 0) break; + + A=c->h0; + B=c->h1; + C=c->h2; + D=c->h3; + E=c->h4; + + } + } +#endif + +#endif diff --git a/code/crypto/test/Makefile b/code/crypto/test/Makefile new file mode 100644 index 0000000..b6a92f2 --- /dev/null +++ b/code/crypto/test/Makefile @@ -0,0 +1,33 @@ +CFLAGS=-I.. -I../include -O3 +aead_dep=../compat/explicit_bzero.o ../compat/timingsafe_memcmp.o ../compat/timingsafe_bcmp.o \ + ../chacha/chacha.o ../poly1305/poly1305.o ../curve25519/curve25519.o ../curve25519/curve25519-generic.o \ + ../sha/sha512.o ../e_chacha20poly1305.o +dsa_dep=../*/*.a + +%.o: %.c + $(CC) $(CFLAGS) -c $< + +all: aead aead_enc aead_dec ed25519 ed25519_sign ed25519_open + +aead: aead.o + $(CC) $(LDFLAGS) -o $@ $< $(aead_dep) + +aead_enc: aead_enc.o + $(CC) $(LDFLAGS) -o $@ $< $(aead_dep) + +aead_dec: aead_dec.o + $(CC) $(LDFLAGS) -o $@ $< $(aead_dep) + +ed25519: ed25519.o + $(CC) $(LDFLAGS) -o $@ $< $(dsa_dep) + +ed25519_sign: ed25519_sign.o + $(CC) $(LDFLAGS) -o $@ $< $(dsa_dep) + +ed25519_open: ed25519_open.o + $(CC) $(LDFLAGS) -o $@ $< $(dsa_dep) + + +clean: + rm -f *.o + rm -f aead aead_enc aead_dec ed25519 ed25519_sign ed25519_open diff --git a/code/crypto/test/aead.c b/code/crypto/test/aead.c new file mode 100644 index 0000000..a2e0da7 --- /dev/null +++ b/code/crypto/test/aead.c @@ -0,0 +1,55 @@ +#include <string.h> +#include <stdio.h> +#include <stdlib.h> +#include <fcntl.h> +#include <unistd.h> + +#include <crypto.h> +#include <curve25519.h> + +#define NONCE_LEN 8 +#define TAG_LEN 16 +#define KEY_LEN 32 + +static int v_rng(void *buf, size_t bufsize) { + int fd; + + if((fd = open("/dev/urandom", O_RDONLY)) < 0) return -1; + size_t nb = read(fd, buf, bufsize); + close(fd); + if (nb != bufsize) return -1; + return 0; +} + +int main(int argc, char *argv[]) { + unsigned char in_msg[1024]; + unsigned char out_msg[1024]; + size_t in_msg_len; + size_t out_msg_len; + int rv; + unsigned char public1[KEY_LEN]; + unsigned char private1[KEY_LEN]; + unsigned char public2[KEY_LEN]; + unsigned char private2[KEY_LEN]; + unsigned char key1[KEY_LEN]; + unsigned char key2[KEY_LEN]; + unsigned char nonce[NONCE_LEN]; + + strcpy((char *)in_msg, "PERA JE CAR!"); + in_msg_len = strlen((char *)in_msg) + 1; + + v_rng(nonce, NONCE_LEN); + X25519_keypair(public1, private1, v_rng); + X25519_keypair(public2, private2, v_rng); + + X25519(key1, private1, public2); + rv = aead_chacha20_poly1305_seal(out_msg, &out_msg_len, 1024, key1, TAG_LEN, nonce, NONCE_LEN, in_msg, in_msg_len, NULL, 0); + printf("SEAL RV:%d ILEN:%lu OLEN:%lu\n", rv, in_msg_len, out_msg_len); + + memset(in_msg, 0, sizeof(in_msg)); + + X25519(key2, private2, public1); + rv = aead_chacha20_poly1305_open(in_msg, &in_msg_len, 1024, key2, TAG_LEN, nonce, NONCE_LEN, out_msg, out_msg_len, NULL, 0); + printf("OPEN RV:%d ILEN:%lu OLEN:%lu\n", rv, in_msg_len, out_msg_len); + printf("MSG: %s\n", in_msg); +}
\ No newline at end of file diff --git a/code/crypto/test/aead_dec.c b/code/crypto/test/aead_dec.c new file mode 100644 index 0000000..7deb587 --- /dev/null +++ b/code/crypto/test/aead_dec.c @@ -0,0 +1,46 @@ +#include <string.h> +#include <stdio.h> +#include <stdlib.h> +#include <fcntl.h> +#include <unistd.h> + +#include <crypto.h> +#include <curve25519.h> + +#define NONCE_LEN 8 +#define TAG_LEN 16 +#define KEY_LEN 32 +#define MSG_LEN 29 + +static int v_rng(void *buf, size_t bufsize) { + int fd; + + if((fd = open("/dev/urandom", O_RDONLY)) < 0) return -1; + size_t nb = read(fd, buf, bufsize); + close(fd); + if (nb != bufsize) return -1; + return 0; +} + +int main(int argc, char *argv[]) { + unsigned char in_msg[1024]; + unsigned char out_msg[1024]; + size_t in_msg_len; + int rv; + unsigned char public[KEY_LEN]; + unsigned char private[KEY_LEN]; + unsigned char key[KEY_LEN]; + unsigned char nonce[NONCE_LEN]; + + int fd = open("msg.enc", O_RDONLY); + read(fd, private, KEY_LEN); + read(fd, public, KEY_LEN); + read(fd, nonce, NONCE_LEN); + read(fd, out_msg, MSG_LEN); + close(fd); + + X25519(key, private, public); + rv = aead_chacha20_poly1305_open(in_msg, &in_msg_len, 1024, key, TAG_LEN, nonce, NONCE_LEN, out_msg, MSG_LEN, NULL, 0); + printf("OPEN RV:%d ILEN:%lu OLEN:%d\n", rv, in_msg_len, MSG_LEN); + printf("MSG: %s\n", in_msg); +}
\ No newline at end of file diff --git a/code/crypto/test/aead_enc.c b/code/crypto/test/aead_enc.c new file mode 100644 index 0000000..a103490 --- /dev/null +++ b/code/crypto/test/aead_enc.c @@ -0,0 +1,55 @@ +#include <string.h> +#include <stdio.h> +#include <stdlib.h> +#include <fcntl.h> +#include <unistd.h> + +#include <crypto.h> +#include <curve25519.h> + +#define NONCE_LEN 8 +#define TAG_LEN 16 +#define KEY_LEN 32 + +static int v_rng(void *buf, size_t bufsize) { + int fd; + + if((fd = open("/dev/urandom", O_RDONLY)) < 0) return -1; + size_t nb = read(fd, buf, bufsize); + close(fd); + if (nb != bufsize) return -1; + return 0; +} + +int main(int argc, char *argv[]) { + unsigned char in_msg[1024]; + unsigned char out_msg[1024]; + size_t in_msg_len; + size_t out_msg_len; + int rv; + unsigned char public1[KEY_LEN]; + unsigned char private1[KEY_LEN]; + unsigned char public2[KEY_LEN]; + unsigned char private2[KEY_LEN]; + unsigned char key[KEY_LEN]; + unsigned char nonce[NONCE_LEN]; + + strcpy((char *)in_msg, "PERA JE CAR!"); + in_msg_len = strlen((char *)in_msg) + 1; + + v_rng(nonce, NONCE_LEN); + X25519_keypair(public1, private1, v_rng); + X25519_keypair(public2, private2, v_rng); + + X25519(key, private1, public2); + rv = aead_chacha20_poly1305_seal(out_msg, &out_msg_len, 1024, key, TAG_LEN, nonce, NONCE_LEN, in_msg, in_msg_len, NULL, 0); + printf("SEAL RV:%d ILEN:%lu OLEN:%lu\n", rv, in_msg_len, out_msg_len); + + unlink("msg.enc"); + int fd = open("msg.enc", O_WRONLY | O_CREAT); + write(fd, private2, KEY_LEN); + write(fd, public1, KEY_LEN); + write(fd, nonce, NONCE_LEN); + write(fd, out_msg, out_msg_len); + close(fd); +}
\ No newline at end of file diff --git a/code/crypto/test/ed25519.c b/code/crypto/test/ed25519.c new file mode 100644 index 0000000..21334cb --- /dev/null +++ b/code/crypto/test/ed25519.c @@ -0,0 +1,37 @@ +#include <string.h> +#include <stdio.h> +#include <stdlib.h> +#include <fcntl.h> +#include <unistd.h> + +#include <curve25519.h> + +#define KEY_LEN 32 +#define SIG_LEN 64 + +static int v_rng(void *buf, size_t bufsize) { + int fd; + + if((fd = open("/dev/urandom", O_RDONLY)) < 0) return -1; + size_t nb = read(fd, buf, bufsize); + close(fd); + if (nb != bufsize) return -1; + return 0; +} + +int main(int argc, char *argv[]) { + unsigned char msg[1024]; + size_t msg_len; + int rv; + unsigned char public[KEY_LEN]; + unsigned char private[KEY_LEN * 2]; + unsigned char signature[SIG_LEN]; + + strcpy((char *)msg, "PERA JE CAR!"); + msg_len = strlen((char *)msg) + 1; + + ED25519_keypair(public, private, v_rng); + ED25519_sign(signature, msg, msg_len, private); + rv = ED25519_verify(msg, msg_len, signature, public); + printf("OPEN rv:%d\n", rv); +}
\ No newline at end of file diff --git a/code/crypto/test/ed25519_open.c b/code/crypto/test/ed25519_open.c new file mode 100644 index 0000000..66f32f5 --- /dev/null +++ b/code/crypto/test/ed25519_open.c @@ -0,0 +1,40 @@ +#include <string.h> +#include <stdio.h> +#include <stdlib.h> +#include <fcntl.h> +#include <unistd.h> + +#include <curve25519.h> + +#define KEY_LEN 32 +#define SIG_LEN 64 + +static int v_rng(void *buf, size_t bufsize) { + int fd; + + if((fd = open("/dev/urandom", O_RDONLY)) < 0) return -1; + size_t nb = read(fd, buf, bufsize); + close(fd); + if (nb != bufsize) return -1; + return 0; +} + +int main(int argc, char *argv[]) { + unsigned char msg[1024]; + size_t msg_len; + int rv; + unsigned char public[KEY_LEN]; + unsigned char private[KEY_LEN * 2]; + unsigned char signature[SIG_LEN]; + + strcpy((char *)msg, "PERA JE CAR!"); + msg_len = strlen((char *)msg) + 1; + + int fd = open("msg.sig", O_RDONLY); + read(fd, public, KEY_LEN); + read(fd, signature, SIG_LEN); + close(fd); + + rv = ED25519_verify(msg, msg_len, signature, public); + printf("OPEN rv:%d\n", rv); +}
\ No newline at end of file diff --git a/code/crypto/test/ed25519_sign.c b/code/crypto/test/ed25519_sign.c new file mode 100644 index 0000000..da098bd --- /dev/null +++ b/code/crypto/test/ed25519_sign.c @@ -0,0 +1,40 @@ +#include <string.h> +#include <stdio.h> +#include <stdlib.h> +#include <fcntl.h> +#include <unistd.h> + +#include <curve25519.h> + +#define KEY_LEN 32 +#define SIG_LEN 64 + +static int v_rng(void *buf, size_t bufsize) { + int fd; + + if((fd = open("/dev/urandom", O_RDONLY)) < 0) return -1; + size_t nb = read(fd, buf, bufsize); + close(fd); + if (nb != bufsize) return -1; + return 0; +} + +int main(int argc, char *argv[]) { + unsigned char msg[1024]; + size_t msg_len; + unsigned char public[KEY_LEN]; + unsigned char private[KEY_LEN * 2]; + unsigned char signature[SIG_LEN]; + + strcpy((char *)msg, "PERA JE CAR!"); + msg_len = strlen((char *)msg) + 1; + + ED25519_keypair(public, private, v_rng); + ED25519_sign(signature, msg, msg_len, private); + + unlink("msg.sig"); + int fd = open("msg.sig", O_WRONLY | O_CREAT); + write(fd, public, KEY_LEN); + write(fd, signature, SIG_LEN); + close(fd); +}
\ No newline at end of file |