Page MenuHomeFreeBSD

D27454.id80684.diff
No OneTemporary

D27454.id80684.diff

Index: sys/crypto/armv8/armv8_crypto.h
===================================================================
--- sys/crypto/armv8/armv8_crypto.h
+++ sys/crypto/armv8/armv8_crypto.h
@@ -32,21 +32,50 @@
#ifndef _ARMV8_CRYPTO_H_
#define _ARMV8_CRYPTO_H_
-#define AES128_ROUNDS 10
-#define AES192_ROUNDS 12
#define AES256_ROUNDS 14
#define AES_SCHED_LEN ((AES256_ROUNDS + 1) * AES_BLOCK_LEN)
+typedef struct {
+ uint32_t aes_key[AES_SCHED_LEN/4];
+ int aes_rounds;
+} AES_key_t;
+
+typedef union {
+ uint64_t u[2];
+ uint32_t d[4];
+ uint8_t c[16];
+ size_t t[16 / sizeof(size_t)];
+} __uint128_val_t;
+
struct armv8_crypto_session {
- uint32_t enc_schedule[AES_SCHED_LEN/4];
- uint32_t dec_schedule[AES_SCHED_LEN/4];
- int algo;
- int rounds;
+ AES_key_t enc_schedule;
+ AES_key_t dec_schedule;
+ __uint128_val_t Htable[16];
};
-void armv8_aes_encrypt_cbc(int, const void *, size_t, const uint8_t *,
+/* Prototypes for aesv8-armx.S */
+void aes_v8_encrypt(uint8_t *in, uint8_t *out, const AES_key_t *key);
+int aes_v8_set_encrypt_key(const unsigned char *userKey, const int bits, const AES_key_t *key);
+int aes_v8_set_decrypt_key(const unsigned char *userKey, const int bits, const AES_key_t *key);
+
+/* Prototypes for ghashv8-armx.S */
+void gcm_init_v8(__uint128_val_t Htable[16], const uint64_t Xi[2]);
+void gcm_gmult_v8(uint64_t Xi[2], const __uint128_val_t Htable[16]);
+void gcm_ghash_v8(uint64_t Xi[2], const __uint128_val_t Htable[16], const uint8_t *inp, size_t len);
+
+void armv8_aes_encrypt_cbc(const AES_key_t *, size_t, const uint8_t *,
uint8_t *, const uint8_t[static AES_BLOCK_LEN]);
-void armv8_aes_decrypt_cbc(int, const void *, size_t, uint8_t *,
+void armv8_aes_decrypt_cbc(const AES_key_t *, size_t, uint8_t *,
const uint8_t[static AES_BLOCK_LEN]);
+void armv8_aes_encrypt_gcm(AES_key_t *, size_t, const uint8_t *,
+ uint8_t *, size_t, const uint8_t*,
+ uint8_t tag[static GMAC_DIGEST_LEN],
+ const uint8_t[static AES_BLOCK_LEN],
+ const __uint128_val_t *);
+int armv8_aes_decrypt_gcm(AES_key_t *, size_t, const uint8_t *,
+ uint8_t *, size_t, const uint8_t*,
+ const uint8_t tag[static GMAC_DIGEST_LEN],
+ const uint8_t[static AES_BLOCK_LEN],
+ const __uint128_val_t *);
#endif /* _ARMV8_CRYPTO_H_ */
Index: sys/crypto/armv8/armv8_crypto.c
===================================================================
--- sys/crypto/armv8/armv8_crypto.c
+++ sys/crypto/armv8/armv8_crypto.c
@@ -2,6 +2,7 @@
* Copyright (c) 2005-2008 Pawel Jakub Dawidek <pjd@FreeBSD.org>
* Copyright (c) 2010 Konstantin Belousov <kib@FreeBSD.org>
* Copyright (c) 2014,2016 The FreeBSD Foundation
+ * Copyright (c) 2020 Ampere Computing
* All rights reserved.
*
* Portions of this software were developed by John-Mark Gurney
@@ -58,14 +59,16 @@
#include <machine/vfp.h>
#include <opencrypto/cryptodev.h>
+#include <opencrypto/gmac.h>
#include <cryptodev_if.h>
-#include <crypto/armv8/armv8_crypto.h>
#include <crypto/rijndael/rijndael.h>
+#include <crypto/armv8/armv8_crypto.h>
struct armv8_crypto_softc {
int dieing;
int32_t cid;
struct rwlock lock;
+ bool has_pmul;
};
static struct mtx *ctx_mtx;
@@ -109,13 +112,17 @@
switch (ID_AA64ISAR0_AES_VAL(reg)) {
case ID_AA64ISAR0_AES_BASE:
+ ret = 0;
+ device_set_desc_copy(dev, "AES-CBC");
+ break;
case ID_AA64ISAR0_AES_PMULL:
ret = 0;
+ device_set_desc_copy(dev, "AES-CBC,AES-GCM");
break;
+ default:
+ break;
}
- device_set_desc_copy(dev, "AES-CBC");
-
/* TODO: Check more fields as we support more features */
return (ret);
@@ -125,11 +132,17 @@
armv8_crypto_attach(device_t dev)
{
struct armv8_crypto_softc *sc;
+ uint64_t reg;
int i;
sc = device_get_softc(dev);
sc->dieing = 0;
+ reg = READ_SPECIALREG(id_aa64isar0_el1);
+
+ if (ID_AA64ISAR0_AES_VAL(reg) == ID_AA64ISAR0_AES_PMULL)
+ sc->has_pmul = true;
+
sc->cid = crypto_get_driverid(dev, sizeof(struct armv8_crypto_session),
CRYPTOCAP_F_SOFTWARE | CRYPTOCAP_F_SYNC | CRYPTOCAP_F_ACCEL_SOFTWARE);
if (sc->cid < 0) {
@@ -186,11 +199,34 @@
armv8_crypto_probesession(device_t dev,
const struct crypto_session_params *csp)
{
+ struct armv8_crypto_softc *sc;
- if (csp->csp_flags != 0)
- return (EINVAL);
+ sc = device_get_softc(dev);
+
switch (csp->csp_mode) {
+ case CSP_MODE_AEAD:
+ switch (csp->csp_cipher_alg) {
+ case CRYPTO_AES_NIST_GCM_16:
+ if (!sc->has_pmul)
+ return (EINVAL);
+ if (csp->csp_ivlen != AES_GCM_IV_LEN)
+ return (EINVAL);
+ switch (csp->csp_cipher_klen * 8) {
+ case 128:
+ case 192:
+ case 256:
+ break;
+ default:
+ return (EINVAL);
+ }
+ break;
+ default:
+ return (EINVAL);
+ }
+ break;
case CSP_MODE_CIPHER:
+ if (csp->csp_flags != 0)
+ return (EINVAL);
switch (csp->csp_cipher_alg) {
case CRYPTO_AES_CBC:
if (csp->csp_ivlen != AES_BLOCK_LEN)
@@ -214,34 +250,47 @@
return (CRYPTODEV_PROBE_ACCEL_SOFTWARE);
}
-static void
+static int
armv8_crypto_cipher_setup(struct armv8_crypto_session *ses,
const struct crypto_session_params *csp)
{
- int i;
+ __uint128_val_t H;
+ struct fpu_kern_ctx *ctx;
+ int kt, i;
switch (csp->csp_cipher_klen * 8) {
case 128:
- ses->rounds = AES128_ROUNDS;
- break;
case 192:
- ses->rounds = AES192_ROUNDS;
- break;
case 256:
- ses->rounds = AES256_ROUNDS;
break;
default:
- panic("invalid CBC key length");
+ return (EINVAL);
}
- rijndaelKeySetupEnc(ses->enc_schedule, csp->csp_cipher_key,
- csp->csp_cipher_klen * 8);
- rijndaelKeySetupDec(ses->dec_schedule, csp->csp_cipher_key,
- csp->csp_cipher_klen * 8);
- for (i = 0; i < nitems(ses->enc_schedule); i++) {
- ses->enc_schedule[i] = bswap32(ses->enc_schedule[i]);
- ses->dec_schedule[i] = bswap32(ses->dec_schedule[i]);
+ kt = is_fpu_kern_thread(0);
+ if (!kt) {
+ AQUIRE_CTX(i, ctx);
+ fpu_kern_enter(curthread, ctx,
+ FPU_KERN_NORMAL | FPU_KERN_KTHR);
}
+
+ aes_v8_set_encrypt_key(csp->csp_cipher_key,
+ csp->csp_cipher_klen * 8, &ses->enc_schedule);
+ aes_v8_set_decrypt_key(csp->csp_cipher_key,
+ csp->csp_cipher_klen * 8, &ses->dec_schedule);
+
+ memset(H.c, 0, sizeof(H.c));
+ aes_v8_encrypt(H.c, H.c, &ses->enc_schedule);
+ H.u[0] = bswap64(H.u[0]);
+ H.u[1] = bswap64(H.u[1]);
+ gcm_init_v8(ses->Htable, H.u);
+
+ if (!kt) {
+ fpu_kern_leave(curthread, ctx);
+ RELEASE_CTX(i, ctx);
+ }
+
+ return (0);
}
static int
@@ -250,6 +299,7 @@
{
struct armv8_crypto_softc *sc;
struct armv8_crypto_session *ses;
+ int err;
sc = device_get_softc(dev);
rw_wlock(&sc->lock);
@@ -259,39 +309,28 @@
}
ses = crypto_get_driver_session(cses);
- armv8_crypto_cipher_setup(ses, csp);
+ err = armv8_crypto_cipher_setup(ses, csp);
rw_wunlock(&sc->lock);
- return (0);
+ return (err);
}
static int
armv8_crypto_process(device_t dev, struct cryptop *crp, int hint __unused)
{
struct armv8_crypto_session *ses;
- int error;
- /* We can only handle full blocks for now */
- if ((crp->crp_payload_length % AES_BLOCK_LEN) != 0) {
- error = EINVAL;
- goto out;
- }
-
ses = crypto_get_driver_session(crp->crp_session);
- error = armv8_crypto_cipher_process(ses, crp);
-
-out:
- crp->crp_etype = error;
+ crp->crp_etype = armv8_crypto_cipher_process(ses, crp);
crypto_done(crp);
return (0);
}
static uint8_t *
-armv8_crypto_cipher_alloc(struct cryptop *crp, int *allocated)
+armv8_crypto_cipher_alloc(struct cryptop *crp, int start, int length, int *allocated)
{
uint8_t *addr;
- addr = crypto_contiguous_subsegment(crp, crp->crp_payload_start,
- crp->crp_payload_length);
+ addr = crypto_contiguous_subsegment(crp, start, length);
if (addr != NULL) {
*allocated = 0;
return (addr);
@@ -299,8 +338,7 @@
addr = malloc(crp->crp_payload_length, M_ARMV8_CRYPTO, M_NOWAIT);
if (addr != NULL) {
*allocated = 1;
- crypto_copydata(crp, crp->crp_payload_start,
- crp->crp_payload_length, addr);
+ crypto_copydata(crp, start, length, addr);
} else
*allocated = 0;
return (addr);
@@ -312,19 +350,63 @@
{
const struct crypto_session_params *csp;
struct fpu_kern_ctx *ctx;
- uint8_t *buf;
- uint8_t iv[AES_BLOCK_LEN];
- int allocated, i;
+ uint8_t *buf, *authbuf, *outbuf;
+ uint8_t iv[AES_BLOCK_LEN], tag[GMAC_DIGEST_LEN];
+ int allocated, authallocated, outallocated, i;
int encflag;
int kt;
+ int err;
+ bool outcopy;
csp = crypto_get_params(crp->crp_session);
encflag = CRYPTO_OP_IS_ENCRYPT(crp->crp_op);
- buf = armv8_crypto_cipher_alloc(crp, &allocated);
+ allocated = 0;
+ outallocated = 0;
+ authallocated = 0;
+ authbuf = NULL;
+ kt = 1;
+
+ buf = armv8_crypto_cipher_alloc(crp, crp->crp_payload_start,
+ crp->crp_payload_length, &allocated);
if (buf == NULL)
return (ENOMEM);
+ if (csp->csp_cipher_alg == CRYPTO_AES_NIST_GCM_16) {
+ if (crp->crp_aad != NULL)
+ authbuf = crp->crp_aad;
+ else
+ authbuf = armv8_crypto_cipher_alloc(crp, crp->crp_aad_start,
+ crp->crp_aad_length, &authallocated);
+ if (authbuf == NULL) {
+ err = ENOMEM;
+ goto out;
+ }
+ }
+
+ if (CRYPTO_HAS_OUTPUT_BUFFER(crp)) {
+ outbuf = crypto_buffer_contiguous_subsegment(&crp->crp_obuf,
+ crp->crp_payload_output_start, crp->crp_payload_length);
+ if (outbuf == NULL) {
+ outcopy = true;
+ if (allocated)
+ outbuf = buf;
+ else {
+ outbuf = malloc(crp->crp_payload_length,
+ M_ARMV8_CRYPTO, M_NOWAIT);
+ if (outbuf == NULL) {
+ err = ENOMEM;
+ goto out;
+ }
+ outallocated = true;
+ }
+ } else
+ outcopy = false;
+ } else {
+ outbuf = buf;
+ outcopy = allocated;
+ }
+
kt = is_fpu_kern_thread(0);
if (!kt) {
AQUIRE_CTX(i, ctx);
@@ -339,28 +421,75 @@
crypto_read_iv(crp, iv);
/* Do work */
+ err = 0;
switch (csp->csp_cipher_alg) {
case CRYPTO_AES_CBC:
+ if ((crp->crp_payload_length % AES_BLOCK_LEN) != 0) {
+ err = EINVAL;
+ goto out;
+ }
if (encflag)
- armv8_aes_encrypt_cbc(ses->rounds, ses->enc_schedule,
+ armv8_aes_encrypt_cbc(&ses->enc_schedule,
crp->crp_payload_length, buf, buf, iv);
else
- armv8_aes_decrypt_cbc(ses->rounds, ses->dec_schedule,
+ armv8_aes_decrypt_cbc(&ses->dec_schedule,
crp->crp_payload_length, buf, iv);
break;
+ case CRYPTO_AES_NIST_GCM_16:
+ if (csp->csp_auth_mlen != 0 &&
+ csp->csp_auth_mlen != GMAC_DIGEST_LEN) {
+ err = EINVAL;
+ goto out;
+ }
+ if (csp->csp_ivlen != AES_GCM_IV_LEN) {
+ err = EINVAL;
+ goto out;
+ }
+ if (encflag) {
+ memset(tag, 0, sizeof(tag));
+ armv8_aes_encrypt_gcm(&ses->enc_schedule,
+ crp->crp_payload_length,
+ buf, outbuf,
+ crp->crp_aad_length, authbuf,
+ tag, iv, ses->Htable);
+ crypto_copyback(crp, crp->crp_digest_start, sizeof(tag),
+ tag);
+ } else {
+ crypto_copydata(crp, crp->crp_digest_start, sizeof(tag),
+ tag);
+ if (armv8_aes_decrypt_gcm(&ses->enc_schedule,
+ crp->crp_payload_length,
+ buf, outbuf,
+ crp->crp_aad_length, authbuf,
+ tag, iv, ses->Htable) != 0) {
+ err = EBADMSG;
+ goto out;
+ }
+ }
+ break;
}
- if (allocated)
- crypto_copyback(crp, crp->crp_payload_start,
- crp->crp_payload_length, buf);
+ if (outcopy && err == 0)
+ crypto_copyback(crp, CRYPTO_HAS_OUTPUT_BUFFER(crp) ?
+ crp->crp_payload_output_start : crp->crp_payload_start,
+ crp->crp_payload_length, outbuf);
+out:
if (!kt) {
fpu_kern_leave(curthread, ctx);
RELEASE_CTX(i, ctx);
}
+
if (allocated)
zfree(buf, M_ARMV8_CRYPTO);
- return (0);
+ if (authallocated)
+ zfree(authbuf, M_ARMV8_CRYPTO);
+ if (outallocated)
+ zfree(outbuf, M_ARMV8_CRYPTO);
+ explicit_bzero(iv, sizeof(iv));
+ explicit_bzero(tag, sizeof(tag));
+
+ return (err);
}
static device_method_t armv8_crypto_methods[] = {
Index: sys/crypto/armv8/armv8_crypto_wrap.c
===================================================================
--- sys/crypto/armv8/armv8_crypto_wrap.c
+++ sys/crypto/armv8/armv8_crypto_wrap.c
@@ -1,5 +1,6 @@
/*-
* Copyright (c) 2016 The FreeBSD Foundation
+ * Copyright (c) 2020 Ampere Computing
* All rights reserved.
*
* This software was developed by Andrew Turner under
@@ -41,6 +42,8 @@
#include <sys/queue.h>
#include <opencrypto/cryptodev.h>
+#include <opencrypto/gmac.h>
+#include <crypto/rijndael/rijndael.h>
#include <crypto/armv8/armv8_crypto.h>
#include <arm_neon.h>
@@ -90,7 +93,7 @@
}
void
-armv8_aes_encrypt_cbc(int rounds, const void *key_schedule, size_t len,
+armv8_aes_encrypt_cbc(const AES_key_t *key, size_t len,
const uint8_t *from, uint8_t *to, const uint8_t iv[static AES_BLOCK_LEN])
{
uint8x16_t tot, ivreg, tmp;
@@ -100,8 +103,8 @@
ivreg = vld1q_u8(iv);
for (i = 0; i < len; i++) {
tmp = vld1q_u8(from);
- tot = armv8_aes_enc(rounds - 1, key_schedule,
- veorq_u8(tmp, ivreg));
+ tot = armv8_aes_enc(key->aes_rounds - 1,
+ (const void*)key->aes_key, veorq_u8(tmp, ivreg));
ivreg = tot;
vst1q_u8(to, tot);
from += AES_BLOCK_LEN;
@@ -110,7 +113,7 @@
}
void
-armv8_aes_decrypt_cbc(int rounds, const void *key_schedule, size_t len,
+armv8_aes_decrypt_cbc(const AES_key_t *key, size_t len,
uint8_t *buf, const uint8_t iv[static AES_BLOCK_LEN])
{
uint8x16_t ivreg, nextiv, tmp;
@@ -120,9 +123,189 @@
ivreg = vld1q_u8(iv);
for (i = 0; i < len; i++) {
nextiv = vld1q_u8(buf);
- tmp = armv8_aes_dec(rounds - 1, key_schedule, nextiv);
+ tmp = armv8_aes_dec(key->aes_rounds - 1,
+ (const void*)key->aes_key, nextiv);
vst1q_u8(buf, veorq_u8(tmp, ivreg));
ivreg = nextiv;
buf += AES_BLOCK_LEN;
}
+}
+
+#define AES_INC_COUNTER(counter) \
+ do { \
+ for (int pos = AES_BLOCK_LEN - 1; \
+ pos >= 0; pos--) \
+ if (++(counter)[pos]) \
+ break; \
+ } while (0)
+
+void
+armv8_aes_encrypt_gcm(AES_key_t *aes_key, size_t len,
+ const uint8_t *from, uint8_t *to,
+ size_t authdatalen, const uint8_t *authdata,
+ uint8_t tag[static GMAC_DIGEST_LEN],
+ const uint8_t iv[static AES_GCM_IV_LEN],
+ const __uint128_val_t *Htable)
+{
+ size_t i;
+ const uint64_t *from64;
+ uint64_t *to64;
+ uint8_t aes_counter[AES_BLOCK_LEN];
+ uint8_t block[AES_BLOCK_LEN];
+ size_t trailer;
+ __uint128_val_t EK0, EKi, Xi, lenblock;
+
+ bzero(&aes_counter, AES_BLOCK_LEN);
+ memcpy(aes_counter, iv, AES_GCM_IV_LEN);
+
+ /* Setup the counter */
+ aes_counter[AES_BLOCK_LEN - 1] = 1;
+
+ /* EK0 for a final GMAC round */
+ aes_v8_encrypt(aes_counter, EK0.c, aes_key);
+
+ /* GCM starts with 2 as counter, 1 is used for final xor of tag. */
+ aes_counter[AES_BLOCK_LEN - 1] = 2;
+
+ memset(Xi.c, 0, sizeof(Xi.c));
+ memset(block, 0, sizeof(block));
+ memcpy(block, authdata, min(authdatalen, sizeof(block)));
+ gcm_ghash_v8(Xi.u, Htable, block, AES_BLOCK_LEN);
+
+ from64 = (const uint64_t*)from;
+ to64 = (uint64_t*)to;
+ trailer = len % AES_BLOCK_LEN;
+
+ for (i = 0; i < (len - trailer); i += AES_BLOCK_LEN) {
+ aes_v8_encrypt(aes_counter, EKi.c, aes_key);
+ AES_INC_COUNTER(aes_counter);
+ to64[0] = from64[0] ^ EKi.u[0];
+ to64[1] = from64[1] ^ EKi.u[1];
+ gcm_ghash_v8(Xi.u, Htable, (uint8_t*)to64, AES_BLOCK_LEN);
+
+ to64 += 2;
+ from64 += 2;
+ }
+
+ to += (len - trailer);
+ from += (len - trailer);
+
+ if (trailer) {
+ aes_v8_encrypt(aes_counter, EKi.c, aes_key);
+ AES_INC_COUNTER(aes_counter);
+ for (i = 0; i < trailer; i++) {
+ block[i] = to[i] = from[i] ^ EKi.c[i % AES_BLOCK_LEN];
+ }
+
+ for (; i < AES_BLOCK_LEN; i++)
+ block[i] = 0;
+
+ gcm_ghash_v8(Xi.u, Htable, block, AES_BLOCK_LEN);
+ }
+
+ /* Lengths block */
+ lenblock.u[0] = lenblock.u[1] = 0;
+ lenblock.d[1] = htobe32(authdatalen * 8);
+ lenblock.d[3] = htobe32(len * 8);
+ gcm_ghash_v8(Xi.u, Htable, lenblock.c, AES_BLOCK_LEN);
+
+ Xi.u[0] ^= EK0.u[0];
+ Xi.u[1] ^= EK0.u[1];
+ memcpy(tag, Xi.c, GMAC_DIGEST_LEN);
+
+ explicit_bzero(aes_counter, sizeof(aes_counter));
+ explicit_bzero(Xi.c, sizeof(Xi.c));
+ explicit_bzero(EK0.c, sizeof(EK0.c));
+ explicit_bzero(EKi.c, sizeof(EKi.c));
+ explicit_bzero(lenblock.c, sizeof(lenblock.c));
+}
+
+int
+armv8_aes_decrypt_gcm(AES_key_t *aes_key, size_t len,
+ const uint8_t *from, uint8_t *to,
+ size_t authdatalen, const uint8_t *authdata,
+ const uint8_t tag[static GMAC_DIGEST_LEN],
+ const uint8_t iv[static AES_GCM_IV_LEN],
+ const __uint128_val_t *Htable)
+{
+ size_t i;
+ const uint64_t *from64;
+ uint64_t *to64;
+ uint8_t aes_counter[AES_BLOCK_LEN];
+ uint8_t block[AES_BLOCK_LEN];
+ size_t trailer;
+ __uint128_val_t EK0, EKi, Xi, lenblock;
+ int error;
+
+ error = 0;
+ bzero(&aes_counter, AES_BLOCK_LEN);
+ memcpy(aes_counter, iv, AES_GCM_IV_LEN);
+
+ /* Setup the counter */
+ aes_counter[AES_BLOCK_LEN - 1] = 1;
+
+ /* EK0 for a final GMAC round */
+ aes_v8_encrypt(aes_counter, EK0.c, aes_key);
+
+ memset(Xi.c, 0, sizeof(Xi.c));
+ memset(block, 0, sizeof(block));
+ memcpy(block, authdata, min(authdatalen, sizeof(block)));
+ gcm_ghash_v8(Xi.u, Htable, block, AES_BLOCK_LEN);
+ trailer = len % AES_BLOCK_LEN;
+ gcm_ghash_v8(Xi.u, Htable, from, len - trailer);
+
+ if (trailer) {
+ for (i = 0; i < trailer; i++)
+ block[i] = from[len - trailer + i];
+ for (; i < AES_BLOCK_LEN; i++)
+ block[i] = 0;
+ gcm_ghash_v8(Xi.u, Htable, block, AES_BLOCK_LEN);
+ }
+
+ /* Lengths block */
+ lenblock.u[0] = lenblock.u[1] = 0;
+ lenblock.d[1] = htobe32(authdatalen * 8);
+ lenblock.d[3] = htobe32(len * 8);
+ gcm_ghash_v8(Xi.u, Htable, lenblock.c, AES_BLOCK_LEN);
+
+ Xi.u[0] ^= EK0.u[0];
+ Xi.u[1] ^= EK0.u[1];
+ if (timingsafe_bcmp(tag, Xi.c, GMAC_DIGEST_LEN) != 0) {
+ error = EBADMSG;
+ goto out;
+ }
+
+ /* GCM starts with 2 as counter, 1 is used for final xor of tag. */
+ aes_counter[AES_BLOCK_LEN - 1] = 2;
+
+ from64 = (const uint64_t*)from;
+ to64 = (uint64_t*)to;
+
+ for (i = 0; i < (len - trailer); i += AES_BLOCK_LEN) {
+ aes_v8_encrypt(aes_counter, EKi.c, aes_key);
+ AES_INC_COUNTER(aes_counter);
+ to64[0] = from64[0] ^ EKi.u[0];
+ to64[1] = from64[1] ^ EKi.u[1];
+ to64 += 2;
+ from64 += 2;
+ }
+
+ to += (len - trailer);
+ from += (len - trailer);
+
+ if (trailer) {
+ aes_v8_encrypt(aes_counter, EKi.c, aes_key);
+ AES_INC_COUNTER(aes_counter);
+ for (i = 0; i < trailer; i++)
+ to[i] = from[i] ^ EKi.c[i % AES_BLOCK_LEN];
+ }
+
+out:
+ explicit_bzero(aes_counter, sizeof(aes_counter));
+ explicit_bzero(Xi.c, sizeof(Xi.c));
+ explicit_bzero(EK0.c, sizeof(EK0.c));
+ explicit_bzero(EKi.c, sizeof(EKi.c));
+ explicit_bzero(lenblock.c, sizeof(lenblock.c));
+
+ return (error);
}
Index: sys/modules/armv8crypto/Makefile
===================================================================
--- sys/modules/armv8crypto/Makefile
+++ sys/modules/armv8crypto/Makefile
@@ -1,12 +1,13 @@
# $FreeBSD$
.PATH: ${SRCTOP}/sys/crypto/armv8
+.PATH: ${SRCTOP}/sys/crypto/openssl/aarch64
KMOD= armv8crypto
SRCS= armv8_crypto.c
SRCS+= device_if.h bus_if.h opt_bus.h cryptodev_if.h
-OBJS+= armv8_crypto_wrap.o
+OBJS+= armv8_crypto_wrap.o aesv8-armx.o ghashv8-armx.o
# Remove -nostdinc so we can get the intrinsics.
armv8_crypto_wrap.o: armv8_crypto_wrap.c
@@ -14,6 +15,26 @@
-I${SRCTOP}/sys/crypto/armv8 \
${WERROR} ${PROF} \
-march=armv8-a+crypto ${.IMPSRC}
+ ${CTFCONVERT_CMD}
+
+aesv8-armx.o: aesv8-armx.S
+ ${CC} -c ${CFLAGS:C/^-O2$/-O3/:N-nostdinc:N-mgeneral-regs-only} \
+ -I${SRCTOP}/sys/crypto/armv8 \
+ -I${SRCTOP}/sys/crypto/openssl/crypto \
+ -I${.CURDIR} \
+ -Igonzo \
+ ${WERROR} ${PROF} \
+ -march=armv8-a+crypto ${.IMPSRC}
+ ${CTFCONVERT_CMD}
+
+ghashv8-armx.o: ghashv8-armx.S
+ ${CC} -c ${CFLAGS:C/^-O2$/-O3/:N-nostdinc:N-mgeneral-regs-only} \
+ -I${SRCTOP}/sys/crypto/armv8 \
+ -I${SRCTOP}/sys/crypto/openssl/crypto \
+ -I${.CURDIR} \
+ -Igonzo \
+ ${WERROR} ${PROF} \
+ -march=armv8-a+crypto ${.IMPSRC}
${CTFCONVERT_CMD}
armv8_crypto_wrap.o: armv8_crypto.h
Index: sys/modules/armv8crypto/arm_arch.h
===================================================================
--- /dev/null
+++ sys/modules/armv8crypto/arm_arch.h
@@ -0,0 +1,84 @@
+/*
+ * Copyright 2011-2018 The OpenSSL Project Authors. All Rights Reserved.
+ *
+ * Licensed under the OpenSSL license (the "License"). You may not use
+ * this file except in compliance with the License. You can obtain a copy
+ * in the file LICENSE in the source distribution or at
+ * https://www.openssl.org/source/license.html
+ */
+
+#ifndef OSSL_CRYPTO_ARM_ARCH_H
+# define OSSL_CRYPTO_ARM_ARCH_H
+
+# if !defined(__ARM_ARCH__)
+# if defined(__CC_ARM)
+# define __ARM_ARCH__ __TARGET_ARCH_ARM
+# if defined(__BIG_ENDIAN)
+# define __ARMEB__
+# else
+# define __ARMEL__
+# endif
+# elif defined(__GNUC__)
+# if defined(__aarch64__)
+# define __ARM_ARCH__ 8
+# if __BYTE_ORDER__==__ORDER_BIG_ENDIAN__
+# define __ARMEB__
+# else
+# define __ARMEL__
+# endif
+ /*
+ * Why doesn't gcc define __ARM_ARCH__? Instead it defines
+ * bunch of below macros. See all_architectures[] table in
+ * gcc/config/arm/arm.c. On a side note it defines
+ * __ARMEL__/__ARMEB__ for little-/big-endian.
+ */
+# elif defined(__ARM_ARCH)
+# define __ARM_ARCH__ __ARM_ARCH
+# elif defined(__ARM_ARCH_8A__)
+# define __ARM_ARCH__ 8
+# elif defined(__ARM_ARCH_7__) || defined(__ARM_ARCH_7A__) || \
+ defined(__ARM_ARCH_7R__)|| defined(__ARM_ARCH_7M__) || \
+ defined(__ARM_ARCH_7EM__)
+# define __ARM_ARCH__ 7
+# elif defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_6J__) || \
+ defined(__ARM_ARCH_6K__)|| defined(__ARM_ARCH_6M__) || \
+ defined(__ARM_ARCH_6Z__)|| defined(__ARM_ARCH_6ZK__) || \
+ defined(__ARM_ARCH_6T2__)
+# define __ARM_ARCH__ 6
+# elif defined(__ARM_ARCH_5__) || defined(__ARM_ARCH_5T__) || \
+ defined(__ARM_ARCH_5E__)|| defined(__ARM_ARCH_5TE__) || \
+ defined(__ARM_ARCH_5TEJ__)
+# define __ARM_ARCH__ 5
+# elif defined(__ARM_ARCH_4__) || defined(__ARM_ARCH_4T__)
+# define __ARM_ARCH__ 4
+# else
+# error "unsupported ARM architecture"
+# endif
+# endif
+# endif
+
+# if !defined(__ARM_MAX_ARCH__)
+# define __ARM_MAX_ARCH__ __ARM_ARCH__
+# endif
+
+# if __ARM_MAX_ARCH__<__ARM_ARCH__
+# error "__ARM_MAX_ARCH__ can't be less than __ARM_ARCH__"
+# elif __ARM_MAX_ARCH__!=__ARM_ARCH__
+# if __ARM_ARCH__<7 && __ARM_MAX_ARCH__>=7 && defined(__ARMEB__)
+# error "can't build universal big-endian binary"
+# endif
+# endif
+
+# ifndef __ASSEMBLER__
+extern unsigned int OPENSSL_armcap_P;
+# endif
+
+# define ARMV7_NEON (1<<0)
+# define ARMV7_TICK (1<<1)
+# define ARMV8_AES (1<<2)
+# define ARMV8_SHA1 (1<<3)
+# define ARMV8_SHA256 (1<<4)
+# define ARMV8_PMULL (1<<5)
+# define ARMV8_SHA512 (1<<6)
+
+#endif

File Metadata

Mime Type
text/plain
Expires
Thu, Mar 19, 6:33 AM (15 h, 33 m)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
29952018
Default Alt Text
D27454.id80684.diff (22 KB)

Event Timeline