Index: sys/crypto/armv8/armv8_crypto.h =================================================================== --- sys/crypto/armv8/armv8_crypto.h +++ sys/crypto/armv8/armv8_crypto.h @@ -40,6 +40,7 @@ struct armv8_crypto_session { uint32_t enc_schedule[AES_SCHED_LEN/4]; uint32_t dec_schedule[AES_SCHED_LEN/4]; + uint32_t xts_schedule[AES_SCHED_LEN/4]; int algo; int rounds; }; @@ -49,4 +50,9 @@ void armv8_aes_decrypt_cbc(int, const void *, size_t, uint8_t *, const uint8_t[static AES_BLOCK_LEN]); +void armv8_aes_encrypt_xts(int, const void *, const void *, size_t, + const uint8_t *, uint8_t *, const uint8_t[AES_BLOCK_LEN]); +void armv8_aes_decrypt_xts(int, const void *, const void *, size_t, + const uint8_t *, uint8_t *, const uint8_t[AES_BLOCK_LEN]); + #endif /* _ARMV8_CRYPTO_H_ */ Index: sys/crypto/armv8/armv8_crypto.c =================================================================== --- sys/crypto/armv8/armv8_crypto.c +++ sys/crypto/armv8/armv8_crypto.c @@ -114,7 +114,7 @@ break; } - device_set_desc_copy(dev, "AES-CBC"); + device_set_desc_copy(dev, "AES-CBC,AES-XTS"); /* TODO: Check more fields as we support more features */ @@ -150,6 +150,7 @@ } crypto_register(sc->cid, CRYPTO_AES_CBC, 0, 0); + crypto_register(sc->cid, CRYPTO_AES_XTS, 0, 0); return (0); } @@ -186,13 +187,13 @@ static int armv8_crypto_cipher_setup(struct armv8_crypto_session *ses, - struct cryptoini *encini) + const uint8_t *key, int keylen) { int i; switch (ses->algo) { case CRYPTO_AES_CBC: - switch (encini->cri_klen) { + switch (keylen) { case 128: ses->rounds = AES128_ROUNDS; break; @@ -207,17 +208,34 @@ return (EINVAL); } break; + case CRYPTO_AES_XTS: + keylen /= 2; /* NOTE: used for setup too */ + switch (keylen) { + case 128: + ses->rounds = AES128_ROUNDS; + break; + case 256: + ses->rounds = AES256_ROUNDS; + break; + default: + CRYPTDEB("invalid XTS key length"); + return (EINVAL); + } + break; default: return (EINVAL); } - rijndaelKeySetupEnc(ses->enc_schedule, encini->cri_key, - encini->cri_klen); - rijndaelKeySetupDec(ses->dec_schedule, encini->cri_key, - encini->cri_klen); + rijndaelKeySetupEnc(ses->enc_schedule, key, keylen); + rijndaelKeySetupDec(ses->dec_schedule, key, keylen); + if (ses->algo == CRYPTO_AES_XTS) + rijndaelKeySetupEnc(ses->xts_schedule, key + (keylen * 2) / 16, + keylen); for (i = 0; i < nitems(ses->enc_schedule); i++) { ses->enc_schedule[i] = bswap32(ses->enc_schedule[i]); ses->dec_schedule[i] = bswap32(ses->dec_schedule[i]); + if (ses->algo == CRYPTO_AES_XTS) + ses->xts_schedule[i] = bswap32(ses->xts_schedule[i]); } return (0); @@ -246,6 +264,7 @@ for (; cri != NULL; cri = cri->cri_next) { switch (cri->cri_alg) { case CRYPTO_AES_CBC: + case CRYPTO_AES_XTS: if (encini != NULL) { CRYPTDEB("encini already set"); return (EINVAL); @@ -271,7 +290,8 @@ ses = crypto_get_driver_session(cses); ses->algo = encini->cri_alg; - error = armv8_crypto_cipher_setup(ses, encini); + error = armv8_crypto_cipher_setup(ses, encini->cri_key, + encini->cri_klen); if (error != 0) { CRYPTDEB("setup failed"); rw_wunlock(&sc->lock); @@ -304,6 +324,7 @@ for (crd = crp->crp_desc; crd != NULL; crd = crd->crd_next) { switch (crd->crd_alg) { case CRYPTO_AES_CBC: + case CRYPTO_AES_XTS: if (enccrd != NULL) { error = EINVAL; goto out; @@ -383,6 +404,9 @@ int allocated, i; int encflag, ivlen; int kt; + int error; + + error = 0; encflag = (enccrd->crd_flags & CRD_F_ENCRYPT) == CRD_F_ENCRYPT; @@ -398,13 +422,21 @@ } if ((enccrd->crd_flags & CRD_F_KEY_EXPLICIT) != 0) { - panic("CRD_F_KEY_EXPLICIT"); + error = armv8_crypto_cipher_setup(ses, enccrd->crd_key, + enccrd->crd_klen); + if (error != 0) { + CRYPTDEB("explicit setup failed"); + return (error); + } } switch (enccrd->crd_alg) { case CRYPTO_AES_CBC: ivlen = AES_BLOCK_LEN; break; + case CRYPTO_AES_XTS: + ivlen = 8; + break; } /* Setup iv */ @@ -435,6 +467,16 @@ armv8_aes_decrypt_cbc(ses->rounds, ses->dec_schedule, enccrd->crd_len, buf, iv); break; + case CRYPTO_AES_XTS: + if (encflag) + armv8_aes_encrypt_xts(ses->rounds, ses->enc_schedule, + ses->xts_schedule, enccrd->crd_len, buf, buf, + iv); + else + armv8_aes_decrypt_xts(ses->rounds, ses->dec_schedule, + ses->xts_schedule, enccrd->crd_len, buf, buf, + iv); + break; } if (allocated) Index: sys/crypto/armv8/armv8_crypto_wrap.c =================================================================== --- sys/crypto/armv8/armv8_crypto_wrap.c +++ sys/crypto/armv8/armv8_crypto_wrap.c @@ -126,3 +126,95 @@ buf += AES_BLOCK_LEN; } } + +#define AES_XTS_BLOCKSIZE 16 +#define AES_XTS_IVSIZE 8 +#define AES_XTS_ALPHA 0x87 /* GF(2^128) generator polynomial */ + +static inline int32x4_t +xts_crank_lfsr(int32x4_t inp) +{ + const int32x4_t alphamask = {AES_XTS_ALPHA, 1, 1, 1}; + int32x4_t xtweak, ret; + + /* set up xor mask */ + xtweak = vextq_s32(inp, inp, 3); + xtweak = vshrq_n_s32(xtweak, 31); + xtweak &= alphamask; + + /* next term */ + ret = vshlq_n_s32(inp, 1); + ret ^= xtweak; + + return ret; +} + +static void +armv8_aes_crypt_xts_block(int rounds, const uint8x16_t *key_schedule, uint8x16_t *tweak, + const uint8_t *from, uint8_t *to, int do_encrypt) +{ + uint8x16_t block; + + block = vld1q_u8(from) ^ *tweak; + + if (do_encrypt) + block = armv8_aes_enc(rounds - 1, key_schedule, block); + else + block = armv8_aes_dec(rounds - 1, key_schedule, block); + + vst1q_u8(to, block ^ *tweak); + + *tweak = vreinterpretq_u8_s32(xts_crank_lfsr(vreinterpretq_s32_u8(*tweak))); +} + +static void +armv8_aes_crypt_xts(int rounds, const uint8x16_t *data_schedule, + const uint8x16_t *tweak_schedule, size_t len, const uint8_t *from, + uint8_t *to, const uint8_t iv[static AES_BLOCK_LEN], int do_encrypt) +{ + uint8x16_t tweakreg; + uint8_t tweak[AES_XTS_BLOCKSIZE] __aligned(16); + size_t i, cnt; + + /* + * Prepare tweak as E_k2(IV). IV is specified as LE representation + * of a 64-bit block number which we allow to be passed in directly. + */ +#if BYTE_ORDER == LITTLE_ENDIAN + bcopy(iv, tweak, AES_XTS_IVSIZE); + /* Last 64 bits of IV are always zero. */ + bzero(tweak + AES_XTS_IVSIZE, AES_XTS_IVSIZE); +#else +#error Only LITTLE_ENDIAN architectures are supported. +#endif + tweakreg = vld1q_u8(tweak); + tweakreg = armv8_aes_enc(rounds - 1, tweak_schedule, tweakreg); + + cnt = len / AES_XTS_BLOCKSIZE; + for (i = 0; i < cnt; i++) { + armv8_aes_crypt_xts_block(rounds, data_schedule, &tweakreg, + from, to, do_encrypt); + from += AES_XTS_BLOCKSIZE; + to += AES_XTS_BLOCKSIZE; + } +} + +void +armv8_aes_encrypt_xts(int rounds, const void *data_schedule, + const void *tweak_schedule, size_t len, const uint8_t *from, uint8_t *to, + const uint8_t iv[static AES_BLOCK_LEN]) +{ + + armv8_aes_crypt_xts(rounds, data_schedule, tweak_schedule, len, from, to, + iv, 1); +} + +void +armv8_aes_decrypt_xts(int rounds, const void *data_schedule, + const void *tweak_schedule, size_t len, const uint8_t *from, uint8_t *to, + const uint8_t iv[static AES_BLOCK_LEN]) +{ + + armv8_aes_crypt_xts(rounds, data_schedule, tweak_schedule, len, from, to, + iv, 0); +}