diff options
-rw-r--r-- | arch/arm/crypto/aes-neonbs-glue.c | 65 |
1 files changed, 65 insertions, 0 deletions
diff --git a/arch/arm/crypto/aes-neonbs-glue.c b/arch/arm/crypto/aes-neonbs-glue.c index 8c22d10bd909..45cd9818791e 100644 --- a/arch/arm/crypto/aes-neonbs-glue.c +++ b/arch/arm/crypto/aes-neonbs-glue.c @@ -6,8 +6,10 @@ */ #include <asm/neon.h> +#include <asm/simd.h> #include <crypto/aes.h> #include <crypto/cbc.h> +#include <crypto/ctr.h> #include <crypto/internal/simd.h> #include <crypto/internal/skcipher.h> #include <crypto/xts.h> @@ -54,6 +56,11 @@ struct aesbs_xts_ctx { struct crypto_cipher *tweak_tfm; }; +struct aesbs_ctr_ctx { + struct aesbs_ctx key; /* must be first member */ + struct crypto_aes_ctx fallback; +}; + static int aesbs_setkey(struct crypto_skcipher *tfm, const u8 *in_key, unsigned int key_len) { @@ -189,6 +196,25 @@ static void cbc_exit(struct crypto_tfm *tfm) crypto_free_cipher(ctx->enc_tfm); } +static int aesbs_ctr_setkey_sync(struct crypto_skcipher *tfm, const u8 *in_key, + unsigned int key_len) +{ + struct aesbs_ctr_ctx *ctx = crypto_skcipher_ctx(tfm); + int err; + + err = aes_expandkey(&ctx->fallback, in_key, key_len); + if (err) + return err; + + ctx->key.rounds = 6 + key_len / 4; + + kernel_neon_begin(); + aesbs_convert_key(ctx->key.rk, ctx->fallback.key_enc, ctx->key.rounds); + kernel_neon_end(); + + return 0; +} + static int ctr_encrypt(struct skcipher_request *req) { struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); @@ -231,6 +257,29 @@ static int ctr_encrypt(struct skcipher_request *req) return err; } +static void ctr_encrypt_one(struct crypto_skcipher *tfm, const u8 *src, u8 *dst) +{ + struct aesbs_ctr_ctx *ctx = crypto_skcipher_ctx(tfm); + unsigned long flags; + + /* + * Temporarily disable interrupts to avoid races where + * cachelines are evicted when the CPU is interrupted + * to do something else. + */ + local_irq_save(flags); + aes_encrypt(&ctx->fallback, dst, src); + local_irq_restore(flags); +} + +static int ctr_encrypt_sync(struct skcipher_request *req) +{ + if (!crypto_simd_usable()) + return crypto_ctr_encrypt_walk(req, ctr_encrypt_one); + + return ctr_encrypt(req); +} + static int aesbs_xts_setkey(struct crypto_skcipher *tfm, const u8 *in_key, unsigned int key_len) { @@ -359,6 +408,22 @@ static struct skcipher_alg aes_algs[] = { { .encrypt = ctr_encrypt, .decrypt = ctr_encrypt, }, { + .base.cra_name = "ctr(aes)", + .base.cra_driver_name = "ctr-aes-neonbs-sync", + .base.cra_priority = 250 - 1, + .base.cra_blocksize = 1, + .base.cra_ctxsize = sizeof(struct aesbs_ctr_ctx), + .base.cra_module = THIS_MODULE, + + .min_keysize = AES_MIN_KEY_SIZE, + .max_keysize = AES_MAX_KEY_SIZE, + .chunksize = AES_BLOCK_SIZE, + .walksize = 8 * AES_BLOCK_SIZE, + .ivsize = AES_BLOCK_SIZE, + .setkey = aesbs_ctr_setkey_sync, + .encrypt = ctr_encrypt_sync, + .decrypt = ctr_encrypt_sync, +}, { .base.cra_name = "__xts(aes)", .base.cra_driver_name = "__xts-aes-neonbs", .base.cra_priority = 250, |