Index: sys/crypto/openssl/ossl.h =================================================================== --- sys/crypto/openssl/ossl.h +++ sys/crypto/openssl/ossl.h @@ -57,6 +57,21 @@ #define CONTEXT_DUMMY_SIZE 61 #endif +typedef struct { + uint64_t hi, lo; +} uint128_t; + +struct ossl_gcm_context { + union { + uint64_t u[2]; + uint32_t d[4]; + uint8_t c[16]; + } Yi, EKi, EK0, len, Xi, H; + uint128_t Htable[1]; + uint32_t mres, ares; + void *key; +}; + struct ossl_hash_context { uint32_t dummy[CONTEXT_DUMMY_SIZE]; } __aligned(32); @@ -91,6 +106,7 @@ extern struct auth_hash ossl_hash_sha512; extern struct ossl_cipher ossl_cipher_aes_cbc; +extern struct ossl_cipher ossl_cipher_aes_gcm; extern struct ossl_cipher ossl_cipher_chacha20; #endif /* !__OSSL_H__ */ Index: sys/crypto/openssl/ossl.c =================================================================== --- sys/crypto/openssl/ossl.c +++ sys/crypto/openssl/ossl.c @@ -144,6 +144,16 @@ return (NULL); } return (&ossl_cipher_aes_cbc); + case CRYPTO_AES_NIST_GCM_16: + switch (csp->csp_cipher_klen * 8) { + case 128: + case 192: + case 256: + break; + default: + return (NULL); + } + return (&ossl_cipher_aes_gcm); case CRYPTO_CHACHA20: if (csp->csp_cipher_klen != CHACHA_KEY_SIZE) return (NULL); @@ -183,6 +193,8 @@ switch (csp->csp_cipher_alg) { case CRYPTO_CHACHA20_POLY1305: break; + case CRYPTO_AES_NIST_GCM_16: + break; default: return (EINVAL); } @@ -279,6 +291,9 @@ ossl_newsession_hash(s, csp); error = ossl_newsession_cipher(s, csp); break; + case CSP_MODE_AEAD: + error = ossl_newsession_cipher(s, csp); + break; } return (error); @@ -400,10 +415,20 @@ error = ossl_process_eta(s, crp, csp); break; case CSP_MODE_AEAD: - if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) - error = ossl_chacha20_poly1305_encrypt(crp, csp); - else - error = ossl_chacha20_poly1305_decrypt(crp, csp); + switch (csp->csp_cipher_alg) { + case CRYPTO_CHACHA20_POLY1305: + if (CRYPTO_OP_IS_ENCRYPT(crp->crp_op)) + error = + ossl_chacha20_poly1305_encrypt(crp, csp); + else + error = + ossl_chacha20_poly1305_decrypt(crp, csp); + break; + case CRYPTO_AES_NIST_GCM_16: + error = + s->cipher.cipher->process(&s->cipher, crp, csp); + break; + } break; default: __assert_unreachable(); Index: sys/crypto/openssl/ossl_aes.c =================================================================== --- sys/crypto/openssl/ossl_aes.c +++ sys/crypto/openssl/ossl_aes.c @@ -45,6 +45,7 @@ #endif static ossl_cipher_process_t ossl_aes_cbc; +static ossl_cipher_process_t ossl_aes_gcm; struct ossl_cipher ossl_cipher_aes_cbc = { .type = CRYPTO_AES_CBC, @@ -57,6 +58,17 @@ .process = ossl_aes_cbc }; +struct ossl_cipher ossl_cipher_aes_gcm = { + .type = CRYPTO_AES_NIST_GCM_16, + .blocksize = AES_BLOCK_LEN, + .ivsize = AES_BLOCK_LEN, + + /* Filled during initialization based on CPU caps. */ + .set_encrypt_key = NULL, + .set_decrypt_key = NULL, + .process = ossl_aes_gcm +}; + static int ossl_aes_cbc(struct ossl_session_cipher *s, struct cryptop *crp, const struct crypto_session_params *csp) @@ -153,3 +165,88 @@ explicit_bzero(&key, sizeof(key)); return (0); } + +static int +ossl_aes_gcm(struct ossl_session_cipher *s, struct cryptop *crp, + const struct crypto_session_params *csp) +{ + struct crypto_buffer_cursor cc_in, cc_out; + unsigned char block[EALG_MAX_BLOCK_LEN]; + const unsigned char *inseg; + unsigned char *outseg; + size_t alen, plen, seglen, inlen, outlen; + struct ossl_cipher_context key; + struct ossl_gcm_context ctx; + struct ossl_cipher *cipher; + bool encrypt; + int error; + + error = 0; + cipher = s->cipher; + encrypt = CRYPTO_OP_IS_ENCRYPT(crp->crp_op); + plen = crp->crp_payload_length; + alen = crp->crp_aad_length; + + /* Key preparation. */ + if (crp->crp_cipher_key != NULL) { + error = cipher->set_encrypt_key(crp->crp_cipher_key, + 8 * csp->csp_cipher_klen, &key); + if (error) + return (error); + } else { + key = s->enc_ctx; + } + + crypto_read_iv(crp, block); + + gcm_init(&ctx, &key); + gcm_setiv(&ctx, block); + + crypto_cursor_init(&cc_in, &crp->crp_buf); + crypto_cursor_advance(&cc_in, crp->crp_aad_start); + while (alen > 0) { + inseg = crypto_cursor_segment(&cc_in, &inlen); + seglen = MIN(alen, inlen); + + gcm_aad(&ctx, inseg, seglen); + + crypto_cursor_advance(&cc_in, seglen); + alen -= seglen; + } + + /* Derived from ossl_chacha20.c */ + crypto_cursor_init(&cc_in, &crp->crp_buf); + crypto_cursor_advance(&cc_in, crp->crp_payload_start); + if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) { + crypto_cursor_init(&cc_out, &crp->crp_obuf); + crypto_cursor_advance(&cc_out, crp->crp_payload_output_start); + } else { + cc_out = cc_in; + } + while (plen > 0) { + inseg = crypto_cursor_segment(&cc_in, &inlen); + outseg = crypto_cursor_segment(&cc_out, &outlen); + seglen = MIN(plen, MIN(inlen, outlen)); + + AES_GCM_ENCRYPT(&ctx, inseg, outseg, seglen, encrypt); + + crypto_cursor_advance(&cc_in, seglen); + crypto_cursor_advance(&cc_out, seglen); + plen -= seglen; + } + + if (encrypt) { + gcm_tag(&ctx, block, 16); + crypto_copyback(crp, crp->crp_digest_start, 16, block); + } else { + crypto_copydata(crp, crp->crp_digest_start, 16, block); + /* If tag validation fails return EBADMSG. */ + error = gcm_finish(&ctx, block, 16); + if (error != 0) + error = EBADMSG; + } + + explicit_bzero(block, sizeof(block)); + explicit_bzero(&key, sizeof(key)); + return (error); +} Index: sys/crypto/openssl/ossl_arm.h =================================================================== --- sys/crypto/openssl/ossl_arm.h +++ sys/crypto/openssl/ossl_arm.h @@ -44,4 +44,273 @@ bsaes_cbc_encrypt(in, out, length, key, iv, encrypt); } -#endif + +#define BIT(x) (1ULL << (x)) +#define IS_LITTLE_ENDIAN BYTE_ORDER == LITTLE_ENDIAN + +#define GETU32(pt) (((uint32_t)(pt)[0] << 24) ^ \ + ((uint32_t)(pt)[1] << 16) ^ \ + ((uint32_t)(pt)[2] << 8) ^ \ + ((uint32_t)(pt)[3])) + +#define PUTU32(ct, st) { \ + (ct)[0] = (uint8_t)((st) >> 24); \ + (ct)[1] = (uint8_t)((st) >> 16); \ + (ct)[2] = (uint8_t)((st) >> 8); \ + (ct)[3] = (uint8_t)(st); \ + } + +void gcm_init_neon(uint128_t *Htable, const uint64_t *Xi); +void gcm_ghash_neon(uint64_t *Xi, const uint128_t *Htable, const uint8_t *inp, size_t len); +void gcm_gmult_neon(uint64_t *Xi, const uint128_t *Htable); +void AES_encrypt(void *in, void *out, const void *key); + +static void +gcm_init(struct ossl_gcm_context *ctx, void *key) +{ + + memset(ctx, 0, sizeof(*ctx)); + ctx->key = key; + + /* encrypt 0^128 => const value H */ + AES_encrypt(ctx->H.c, ctx->H.c, (const void*)key); + + if (IS_LITTLE_ENDIAN) { + uint8_t *p = ctx->H.c; + uint64_t hi, lo; + hi = (uint64_t)GETU32(p) << 32 | GETU32(p + 4); + lo = (uint64_t)GETU32(p+8) << 32 | GETU32(p + 12); + ctx->H.u[0] = hi; + ctx->H.u[1] = lo; + } + gcm_init_neon(ctx->Htable, ctx->H.u); +} + +/* According to NIST recomendation - const IV len (96 bits) */ +static void +gcm_setiv(struct ossl_gcm_context *ctx, const unsigned char *iv) +{ + uint32_t ctr; + + ctx->len.u[0] = 0; /* AAD length */ + ctx->len.u[1] = 0; /* Message length */ + ctx->ares = 0; + ctx->mres = 0; + ctx->Xi.u[0] = 0; + ctx->Xi.u[1] = 0; + + memcpy(ctx->Yi.c, iv, 12); + memset(&ctx->Yi.c[12], 0, 4); + ctx->Yi.c[15] = 1; + ctr = 1; + + /* Encrypt first counter value */ + AES_encrypt(ctx->Yi.c, ctx->EK0.c, ctx->key); + ++ctr; + if (IS_LITTLE_ENDIAN) + PUTU32(ctx->Yi.c + 12, ctr) + else + ctx->Yi.d[3] = ctr; +} + +static int +gcm_aad(struct ossl_gcm_context *ctx, const unsigned char *aad, size_t len) +{ + size_t i; + uint32_t n; + + uint64_t alen = ctx->len.u[0]; + + if (ctx->len.u[1]) + return -2; + + alen += len; + if (alen > BIT(61) || (sizeof(len) == 8 && alen < len)) + return -1; + ctx->len.u[0] = alen; + + /* AAD's reserve - buffered data */ + n = ctx->ares; + if (n) { + while (n && len) { + ctx->Xi.c[n] ^= *(aad++); + --len; + n = (n + 1) % 16; + } + if (n == 0) + gcm_gmult_neon(ctx->Xi.u, ctx->Htable); + else { + ctx->ares = n; + return 0; + } + } + if ((i = (len & (size_t) - 16))) { + gcm_ghash_neon(ctx->Xi.u, ctx->Htable, aad, i); + aad += i; + len -= i; + } + if (len) { + n = (uint32_t)len; + for (i = 0; i< len; ++i) + ctx->Xi.c[i] ^= aad[i]; + } + ctx->ares = n; + return 0; +} + +static int +gcm_encrypt(struct ossl_gcm_context *ctx, const unsigned char *in, unsigned char *out, size_t len) +{ + uint32_t n, ctr, mres; + size_t i; + + /* Message len */ + uint64_t mlen = ctx->len.u[1] + len; + if (mlen > (BIT(36) - 32) || (sizeof(len) == 8 && mlen < len)) + return -1; + ctx->len.u[1] = mlen; + + mres = ctx->mres; + + /* Encrypt AAD first - with padding */ + if (ctx->ares) { + gcm_gmult_neon(ctx->Xi.u, ctx->Htable); + ctx->ares = 0; + } + + /* Get counter value */ + if (IS_LITTLE_ENDIAN) + ctr = GETU32(ctx->Yi.c + 12); + else + ctr = ctx->Yi.d[3]; + + n = mres % 16; + + /* Encrypt message */ + for (i = 0; i < len; ++i) { + if (n == 0) { + AES_encrypt(ctx->Yi.c, ctx->EKi.c, ctx->key); + ++ctr; + if (IS_LITTLE_ENDIAN) + PUTU32(ctx->Yi.c + 12, ctr) + else + ctx->Yi.d[3] = ctr; + } + /* XOR - ghash */ + ctx->Xi.c[n] ^= out[i] = in[i] ^ ctx->EKi.c[n]; + mres = n = (n + 1) % 16; + /* Mult to complete ghash */ + if (n == 0) + gcm_gmult_neon(ctx->Xi.u, ctx->Htable); + } + + ctx->mres = mres; + return 0; +} + +static int +gcm_decrypt(struct ossl_gcm_context *ctx, const unsigned char *in, unsigned char *out, size_t len) +{ + uint32_t n, ctr, mres; + uint64_t mlen; + size_t i; + + mlen = ctx->len.u[1] + len; + if (mlen > (BIT(36) - 32) || (sizeof(len) == 8 && mlen < len)) + return -1; + ctx->len.u[1] = mlen; + mres = ctx->mres; + + if (ctx->ares) { + gcm_gmult_neon(ctx->Xi.u, ctx->Htable); + ctx->ares = 0; + } + + /* Get counter value */ + if (IS_LITTLE_ENDIAN) + ctr = GETU32(ctx->Yi.c + 12); + else + ctr = ctx->Yi.d[3]; + + n = mres % 16; + + /* Encrypt message */ + for (i = 0; i < len; ++i) { + uint8_t c; + /* Get EKi value (encrypted counter) */ + if (n == 0) { + AES_encrypt(ctx->Yi.c, ctx->EKi.c, ctx->key); + ++ctr; + if (IS_LITTLE_ENDIAN) + PUTU32(ctx->Yi.c + 12, ctr) + else + ctx->Yi.d[3] = ctr; + } + c = in[i]; + out[i] = c ^ ctx->EKi.c[n]; + ctx->Xi.c[n] ^= c; + mres = n = (n + 1) % 16; + /* Mult to complete ghash */ + if (n == 0) + gcm_gmult_neon(ctx->Xi.u, ctx->Htable); + } + + ctx->mres = mres; + return 0; +} + +static int +gcm_finish(struct ossl_gcm_context *ctx, const unsigned char *tag, size_t len) +{ + /* Get bit size (<< 3 == *8) */ + uint64_t alen = ctx->len.u[0] << 3; + uint64_t clen = ctx->len.u[1] << 3; + + /* Process buffered data */ + if (ctx->mres || ctx->ares) + gcm_gmult_neon(ctx->Xi.u, ctx->Htable); + + if (IS_LITTLE_ENDIAN) { + uint8_t *p = ctx->len.c; + + ctx->len.u[0] = alen; + ctx->len.u[1] = clen; + + alen = (uint64_t)GETU32(p) << 32 | GETU32(p + 4); + clen = (uint64_t)GETU32(p + 8) << 32 | GETU32(p + 12); + } + + /* Ghash len data */ + ctx->Xi.u[0] ^= alen; + ctx->Xi.u[1] ^= clen; + gcm_gmult_neon(ctx->Xi.u, ctx->Htable); + + /* GCTR(J0, S) - encode/decode using counter 0 */ + ctx->Xi.u[0] ^= ctx->EK0.u[0]; + ctx->Xi.u[1] ^= ctx->EK0.u[1]; + + if (tag && len <= sizeof(ctx->Xi)) + return memcmp(ctx->Xi.c, tag, len); + else + return -1; +} + +static void +gcm_tag(struct ossl_gcm_context *ctx, unsigned char *tag, size_t len) +{ + gcm_finish(ctx, NULL, 0); + memcpy(tag, ctx->Xi.c, + len <= sizeof(ctx->Xi.c) ? len : sizeof(ctx->Xi.c)); +} + +static int +AES_GCM_ENCRYPT(struct ossl_gcm_context *ctx, const unsigned char *in, unsigned char *out, + size_t len, int encrypt) +{ + if (encrypt) + return(gcm_encrypt(ctx, in, out, len)); + else + return(gcm_decrypt(ctx, in, out, len)); +} + +#endif //__OSSL_GCM__ Index: sys/crypto/openssl/ossl_arm.c =================================================================== --- sys/crypto/openssl/ossl_arm.c +++ sys/crypto/openssl/ossl_arm.c @@ -62,5 +62,8 @@ sc->has_aes = true; ossl_cipher_aes_cbc.set_encrypt_key = AES_set_encrypt_key; ossl_cipher_aes_cbc.set_decrypt_key = AES_set_decrypt_key; + + ossl_cipher_aes_gcm.set_encrypt_key = AES_set_encrypt_key; + ossl_cipher_aes_gcm.set_decrypt_key = AES_set_decrypt_key; } } Index: sys/modules/ossl/Makefile =================================================================== --- sys/modules/ossl/Makefile +++ sys/modules/ossl/Makefile @@ -25,7 +25,8 @@ sha256-armv4.S \ sha512-armv4.S \ aes-armv4.S \ - bsaes-armv7.S + bsaes-armv7.S \ + ghash-armv4.S SRCS.aarch64= \ chacha-armv8.S \