summaryrefslogtreecommitdiff
path: root/libs/libsodium/src/crypto_aead/aegis128l/aegis128l_common.h
diff options
context:
space:
mode:
authorGeorge Hazan <george.hazan@gmail.com>2024-06-21 14:29:17 +0300
committerGeorge Hazan <george.hazan@gmail.com>2024-06-21 14:29:17 +0300
commit46ea86584a9787c8b9dc3983cf23d9b5b93b5841 (patch)
treefbaf3793ae2170f7982f08a62c028a23cd9afedd /libs/libsodium/src/crypto_aead/aegis128l/aegis128l_common.h
parent82e75be329dd0f30c0281ef9c3c08488b89d109f (diff)
fixes #4477 (libsodium: update to 1.0.20)
Diffstat (limited to 'libs/libsodium/src/crypto_aead/aegis128l/aegis128l_common.h')
-rw-r--r--libs/libsodium/src/crypto_aead/aegis128l/aegis128l_common.h249
1 files changed, 249 insertions, 0 deletions
diff --git a/libs/libsodium/src/crypto_aead/aegis128l/aegis128l_common.h b/libs/libsodium/src/crypto_aead/aegis128l/aegis128l_common.h
new file mode 100644
index 0000000000..6e503dc35a
--- /dev/null
+++ b/libs/libsodium/src/crypto_aead/aegis128l/aegis128l_common.h
@@ -0,0 +1,249 @@
+#define RATE 32
+
+static void
+aegis128l_init(const uint8_t *key, const uint8_t *nonce, aes_block_t *const state)
+{
+ static CRYPTO_ALIGN(AES_BLOCK_LENGTH)
+ const uint8_t c0_[AES_BLOCK_LENGTH] = { 0x00, 0x01, 0x01, 0x02, 0x03, 0x05, 0x08, 0x0d,
+ 0x15, 0x22, 0x37, 0x59, 0x90, 0xe9, 0x79, 0x62 };
+ static CRYPTO_ALIGN(AES_BLOCK_LENGTH)
+ const uint8_t c1_[AES_BLOCK_LENGTH] = { 0xdb, 0x3d, 0x18, 0x55, 0x6d, 0xc2, 0x2f, 0xf1,
+ 0x20, 0x11, 0x31, 0x42, 0x73, 0xb5, 0x28, 0xdd };
+
+ const aes_block_t c0 = AES_BLOCK_LOAD(c0_);
+ const aes_block_t c1 = AES_BLOCK_LOAD(c1_);
+ aes_block_t k;
+ aes_block_t n;
+ int i;
+
+ k = AES_BLOCK_LOAD(key);
+ n = AES_BLOCK_LOAD(nonce);
+
+ state[0] = AES_BLOCK_XOR(k, n);
+ state[1] = c1;
+ state[2] = c0;
+ state[3] = c1;
+ state[4] = AES_BLOCK_XOR(k, n);
+ state[5] = AES_BLOCK_XOR(k, c0);
+ state[6] = AES_BLOCK_XOR(k, c1);
+ state[7] = AES_BLOCK_XOR(k, c0);
+ for (i = 0; i < 10; i++) {
+ aegis128l_update(state, n, k);
+ }
+}
+
+static int
+aegis128l_mac(uint8_t *mac, size_t maclen, size_t adlen, size_t mlen, aes_block_t *const state)
+{
+ aes_block_t tmp;
+ int i;
+
+ tmp = AES_BLOCK_LOAD_64x2(((uint64_t) mlen) << 3, ((uint64_t) adlen) << 3);
+ tmp = AES_BLOCK_XOR(tmp, state[2]);
+
+ for (i = 0; i < 7; i++) {
+ aegis128l_update(state, tmp, tmp);
+ }
+
+ if (maclen == 16) {
+ tmp = AES_BLOCK_XOR(state[6], AES_BLOCK_XOR(state[5], state[4]));
+ tmp = AES_BLOCK_XOR(tmp, AES_BLOCK_XOR(state[3], state[2]));
+ tmp = AES_BLOCK_XOR(tmp, AES_BLOCK_XOR(state[1], state[0]));
+ AES_BLOCK_STORE(mac, tmp);
+ } else if (maclen == 32) {
+ tmp = AES_BLOCK_XOR(state[3], state[2]);
+ tmp = AES_BLOCK_XOR(tmp, AES_BLOCK_XOR(state[1], state[0]));
+ AES_BLOCK_STORE(mac, tmp);
+ tmp = AES_BLOCK_XOR(state[7], state[6]);
+ tmp = AES_BLOCK_XOR(tmp, AES_BLOCK_XOR(state[5], state[4]));
+ AES_BLOCK_STORE(mac + 16, tmp);
+ } else {
+ memset(mac, 0, maclen);
+ return -1;
+ }
+ return 0;
+}
+
+static inline void
+aegis128l_absorb(const uint8_t *const src, aes_block_t *const state)
+{
+ aes_block_t msg0, msg1;
+
+ msg0 = AES_BLOCK_LOAD(src);
+ msg1 = AES_BLOCK_LOAD(src + AES_BLOCK_LENGTH);
+ aegis128l_update(state, msg0, msg1);
+}
+
+static inline void
+aegis128l_absorb2(const uint8_t *const src, aes_block_t *const state)
+{
+ aes_block_t msg0, msg1, msg2, msg3;
+
+ msg0 = AES_BLOCK_LOAD(src + 0 * AES_BLOCK_LENGTH);
+ msg1 = AES_BLOCK_LOAD(src + 1 * AES_BLOCK_LENGTH);
+ msg2 = AES_BLOCK_LOAD(src + 2 * AES_BLOCK_LENGTH);
+ msg3 = AES_BLOCK_LOAD(src + 3 * AES_BLOCK_LENGTH);
+ aegis128l_update(state, msg0, msg1);
+ aegis128l_update(state, msg2, msg3);
+}
+
+static void
+aegis128l_enc(uint8_t *const dst, const uint8_t *const src, aes_block_t *const state)
+{
+ aes_block_t msg0, msg1;
+ aes_block_t tmp0, tmp1;
+
+ msg0 = AES_BLOCK_LOAD(src);
+ msg1 = AES_BLOCK_LOAD(src + AES_BLOCK_LENGTH);
+ tmp0 = AES_BLOCK_XOR(msg0, state[6]);
+ tmp0 = AES_BLOCK_XOR(tmp0, state[1]);
+ tmp1 = AES_BLOCK_XOR(msg1, state[5]);
+ tmp1 = AES_BLOCK_XOR(tmp1, state[2]);
+ tmp0 = AES_BLOCK_XOR(tmp0, AES_BLOCK_AND(state[2], state[3]));
+ tmp1 = AES_BLOCK_XOR(tmp1, AES_BLOCK_AND(state[6], state[7]));
+ AES_BLOCK_STORE(dst, tmp0);
+ AES_BLOCK_STORE(dst + AES_BLOCK_LENGTH, tmp1);
+
+ aegis128l_update(state, msg0, msg1);
+}
+
+static void
+aegis128l_dec(uint8_t *const dst, const uint8_t *const src, aes_block_t *const state)
+{
+ aes_block_t msg0, msg1;
+
+ msg0 = AES_BLOCK_LOAD(src);
+ msg1 = AES_BLOCK_LOAD(src + AES_BLOCK_LENGTH);
+ msg0 = AES_BLOCK_XOR(msg0, state[6]);
+ msg0 = AES_BLOCK_XOR(msg0, state[1]);
+ msg1 = AES_BLOCK_XOR(msg1, state[5]);
+ msg1 = AES_BLOCK_XOR(msg1, state[2]);
+ msg0 = AES_BLOCK_XOR(msg0, AES_BLOCK_AND(state[2], state[3]));
+ msg1 = AES_BLOCK_XOR(msg1, AES_BLOCK_AND(state[6], state[7]));
+ AES_BLOCK_STORE(dst, msg0);
+ AES_BLOCK_STORE(dst + AES_BLOCK_LENGTH, msg1);
+
+ aegis128l_update(state, msg0, msg1);
+}
+
+static void
+aegis128l_declast(uint8_t *const dst, const uint8_t *const src, size_t len,
+ aes_block_t *const state)
+{
+ uint8_t pad[RATE];
+ aes_block_t msg0, msg1;
+
+ memset(pad, 0, sizeof pad);
+ memcpy(pad, src, len);
+
+ msg0 = AES_BLOCK_LOAD(pad);
+ msg1 = AES_BLOCK_LOAD(pad + AES_BLOCK_LENGTH);
+ msg0 = AES_BLOCK_XOR(msg0, state[6]);
+ msg0 = AES_BLOCK_XOR(msg0, state[1]);
+ msg1 = AES_BLOCK_XOR(msg1, state[5]);
+ msg1 = AES_BLOCK_XOR(msg1, state[2]);
+ msg0 = AES_BLOCK_XOR(msg0, AES_BLOCK_AND(state[2], state[3]));
+ msg1 = AES_BLOCK_XOR(msg1, AES_BLOCK_AND(state[6], state[7]));
+ AES_BLOCK_STORE(pad, msg0);
+ AES_BLOCK_STORE(pad + AES_BLOCK_LENGTH, msg1);
+
+ memset(pad + len, 0, sizeof pad - len);
+ memcpy(dst, pad, len);
+
+ msg0 = AES_BLOCK_LOAD(pad);
+ msg1 = AES_BLOCK_LOAD(pad + AES_BLOCK_LENGTH);
+
+ aegis128l_update(state, msg0, msg1);
+}
+
+static int
+encrypt_detached(uint8_t *c, uint8_t *mac, size_t maclen, const uint8_t *m, size_t mlen,
+ const uint8_t *ad, size_t adlen, const uint8_t *npub, const uint8_t *k)
+{
+ aes_block_t state[8];
+ CRYPTO_ALIGN(RATE) uint8_t src[RATE];
+ CRYPTO_ALIGN(RATE) uint8_t dst[RATE];
+ size_t i;
+
+ aegis128l_init(k, npub, state);
+
+ for (i = 0; i + RATE * 2 <= adlen; i += RATE * 2) {
+ aegis128l_absorb2(ad + i, state);
+ }
+ for (; i + RATE <= adlen; i += RATE) {
+ aegis128l_absorb(ad + i, state);
+ }
+ if (adlen % RATE) {
+ memset(src, 0, RATE);
+ memcpy(src, ad + i, adlen % RATE);
+ aegis128l_absorb(src, state);
+ }
+ for (i = 0; i + RATE <= mlen; i += RATE) {
+ aegis128l_enc(c + i, m + i, state);
+ }
+ if (mlen % RATE) {
+ memset(src, 0, RATE);
+ memcpy(src, m + i, mlen % RATE);
+ aegis128l_enc(dst, src, state);
+ memcpy(c + i, dst, mlen % RATE);
+ }
+
+ return aegis128l_mac(mac, maclen, adlen, mlen, state);
+}
+
+static int
+decrypt_detached(uint8_t *m, const uint8_t *c, size_t clen, const uint8_t *mac, size_t maclen,
+ const uint8_t *ad, size_t adlen, const uint8_t *npub, const uint8_t *k)
+{
+ aes_block_t state[8];
+ CRYPTO_ALIGN(RATE) uint8_t src[RATE];
+ CRYPTO_ALIGN(RATE) uint8_t dst[RATE];
+ CRYPTO_ALIGN(16) uint8_t computed_mac[32];
+ const size_t mlen = clen;
+ size_t i;
+ int ret;
+
+ aegis128l_init(k, npub, state);
+
+ for (i = 0; i + RATE * 2 <= adlen; i += RATE * 2) {
+ aegis128l_absorb2(ad + i, state);
+ }
+ for (; i + RATE <= adlen; i += RATE) {
+ aegis128l_absorb(ad + i, state);
+ }
+ if (adlen % RATE) {
+ memset(src, 0, RATE);
+ memcpy(src, ad + i, adlen % RATE);
+ aegis128l_absorb(src, state);
+ }
+ if (m != NULL) {
+ for (i = 0; i + RATE <= mlen; i += RATE) {
+ aegis128l_dec(m + i, c + i, state);
+ }
+ } else {
+ for (i = 0; i + RATE <= mlen; i += RATE) {
+ aegis128l_dec(dst, c + i, state);
+ }
+ }
+ if (mlen % RATE) {
+ if (m != NULL) {
+ aegis128l_declast(m + i, c + i, mlen % RATE, state);
+ } else {
+ aegis128l_declast(dst, c + i, mlen % RATE, state);
+ }
+ }
+
+ COMPILER_ASSERT(sizeof computed_mac >= 32);
+ ret = -1;
+ if (aegis128l_mac(computed_mac, maclen, adlen, mlen, state) == 0) {
+ if (maclen == 16) {
+ ret = crypto_verify_16(computed_mac, mac);
+ } else if (maclen == 32) {
+ ret = crypto_verify_32(computed_mac, mac);
+ }
+ }
+ if (ret != 0 && m != NULL) {
+ memset(m, 0, mlen);
+ }
+ return ret;
+}