summaryrefslogtreecommitdiffstats
path: root/crypto/aegis128-core.c
diff options
context:
space:
mode:
authorArd Biesheuvel <ard.biesheuvel@linaro.org>2019-10-14 18:16:45 +0200
committerHerbert Xu <herbert@gondor.apana.org.au>2019-10-25 17:06:05 +0200
commit528282630ca961d6284f68f5f2f0057c86873bb6 (patch)
tree307324123ac73bba9d91116fdcecd7a24161dffc /crypto/aegis128-core.c
parentcrypto: aegis128 - avoid function pointers for parameterization (diff)
downloadlinux-528282630ca961d6284f68f5f2f0057c86873bb6.tar.xz
linux-528282630ca961d6284f68f5f2f0057c86873bb6.zip
crypto: aegis128 - duplicate init() and final() hooks in SIMD code
In order to speed up aegis128 processing even more, duplicate the init() and final() routines as SIMD versions in their entirety. This results in a 2x speedup on ARM Cortex-A57 for ~1500 byte packets (using AES instructions). Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto/aegis128-core.c')
-rw-r--r--crypto/aegis128-core.c38
1 files changed, 26 insertions, 12 deletions
diff --git a/crypto/aegis128-core.c b/crypto/aegis128-core.c
index fe7ab66dd8f9..71c11cb5bad1 100644
--- a/crypto/aegis128-core.c
+++ b/crypto/aegis128-core.c
@@ -60,10 +60,16 @@ static bool aegis128_do_simd(void)
bool crypto_aegis128_have_simd(void);
void crypto_aegis128_update_simd(struct aegis_state *state, const void *msg);
+void crypto_aegis128_init_simd(struct aegis_state *state,
+ const union aegis_block *key,
+ const u8 *iv);
void crypto_aegis128_encrypt_chunk_simd(struct aegis_state *state, u8 *dst,
const u8 *src, unsigned int size);
void crypto_aegis128_decrypt_chunk_simd(struct aegis_state *state, u8 *dst,
const u8 *src, unsigned int size);
+void crypto_aegis128_final_simd(struct aegis_state *state,
+ union aegis_block *tag_xor,
+ u64 assoclen, u64 cryptlen);
static void crypto_aegis128_update(struct aegis_state *state)
{
@@ -395,17 +401,21 @@ static int crypto_aegis128_encrypt(struct aead_request *req)
struct skcipher_walk walk;
struct aegis_state state;
- crypto_aegis128_init(&state, &ctx->key, req->iv);
- crypto_aegis128_process_ad(&state, req->src, req->assoclen);
-
skcipher_walk_aead_encrypt(&walk, req, false);
- if (aegis128_do_simd())
+ if (aegis128_do_simd()) {
+ crypto_aegis128_init_simd(&state, &ctx->key, req->iv);
+ crypto_aegis128_process_ad(&state, req->src, req->assoclen);
crypto_aegis128_process_crypt(&state, req, &walk,
crypto_aegis128_encrypt_chunk_simd);
- else
+ crypto_aegis128_final_simd(&state, &tag, req->assoclen,
+ cryptlen);
+ } else {
+ crypto_aegis128_init(&state, &ctx->key, req->iv);
+ crypto_aegis128_process_ad(&state, req->src, req->assoclen);
crypto_aegis128_process_crypt(&state, req, &walk,
crypto_aegis128_encrypt_chunk);
- crypto_aegis128_final(&state, &tag, req->assoclen, cryptlen);
+ crypto_aegis128_final(&state, &tag, req->assoclen, cryptlen);
+ }
scatterwalk_map_and_copy(tag.bytes, req->dst, req->assoclen + cryptlen,
authsize, 1);
@@ -426,17 +436,21 @@ static int crypto_aegis128_decrypt(struct aead_request *req)
scatterwalk_map_and_copy(tag.bytes, req->src, req->assoclen + cryptlen,
authsize, 0);
- crypto_aegis128_init(&state, &ctx->key, req->iv);
- crypto_aegis128_process_ad(&state, req->src, req->assoclen);
-
skcipher_walk_aead_decrypt(&walk, req, false);
- if (aegis128_do_simd())
+ if (aegis128_do_simd()) {
+ crypto_aegis128_init_simd(&state, &ctx->key, req->iv);
+ crypto_aegis128_process_ad(&state, req->src, req->assoclen);
crypto_aegis128_process_crypt(&state, req, &walk,
crypto_aegis128_decrypt_chunk_simd);
- else
+ crypto_aegis128_final_simd(&state, &tag, req->assoclen,
+ cryptlen);
+ } else {
+ crypto_aegis128_init(&state, &ctx->key, req->iv);
+ crypto_aegis128_process_ad(&state, req->src, req->assoclen);
crypto_aegis128_process_crypt(&state, req, &walk,
crypto_aegis128_decrypt_chunk);
- crypto_aegis128_final(&state, &tag, req->assoclen, cryptlen);
+ crypto_aegis128_final(&state, &tag, req->assoclen, cryptlen);
+ }
return crypto_memneq(tag.bytes, zeros, authsize) ? -EBADMSG : 0;
}