diff options
author | Pascal van Leeuwen <pascalvanl@gmail.com> | 2019-08-30 09:52:30 +0200 |
---|---|---|
committer | Herbert Xu <herbert@gondor.apana.org.au> | 2019-09-05 06:37:29 +0200 |
commit | 3e450886ec573cb9d7cb1758317b5e4e0f308b52 (patch) | |
tree | 7299e7a6ee67da108c6c56b0d6ac6ac39c6986a2 /drivers/crypto/inside-secure/safexcel_cipher.c | |
parent | crypto: inside-secure - Minor code cleanup and optimizations (diff) | |
download | linux-3e450886ec573cb9d7cb1758317b5e4e0f308b52.tar.xz linux-3e450886ec573cb9d7cb1758317b5e4e0f308b52.zip |
crypto: inside-secure - Added support for basic AES-GCM
This patch adds support for the basic AES-GCM AEAD cipher suite.
Signed-off-by: Pascal van Leeuwen <pvanleeuwen@verimatrix.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'drivers/crypto/inside-secure/safexcel_cipher.c')
-rw-r--r-- | drivers/crypto/inside-secure/safexcel_cipher.c | 226 |
1 files changed, 188 insertions, 38 deletions
diff --git a/drivers/crypto/inside-secure/safexcel_cipher.c b/drivers/crypto/inside-secure/safexcel_cipher.c index 3c2b1f759dad..7091b6086e2e 100644 --- a/drivers/crypto/inside-secure/safexcel_cipher.c +++ b/drivers/crypto/inside-secure/safexcel_cipher.c @@ -14,6 +14,8 @@ #include <crypto/authenc.h> #include <crypto/ctr.h> #include <crypto/internal/des.h> +#include <crypto/gcm.h> +#include <crypto/ghash.h> #include <crypto/sha.h> #include <crypto/xts.h> #include <crypto/skcipher.h> @@ -40,6 +42,7 @@ struct safexcel_cipher_ctx { u32 mode; enum safexcel_cipher_alg alg; bool aead; + int xcm; /* 0=authenc, 1=GCM, 2 reserved for CCM */ __le32 key[16]; u32 nonce; @@ -50,6 +53,8 @@ struct safexcel_cipher_ctx { u32 state_sz; u32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)]; u32 opad[SHA512_DIGEST_SIZE / sizeof(u32)]; + + struct crypto_cipher *hkaes; }; struct safexcel_cipher_req { @@ -76,6 +81,15 @@ static void safexcel_cipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv, cdesc->control_data.token[3] = cpu_to_be32(1); return; + } else if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_XCM) { + cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD; + + /* 96 bit IV part */ + memcpy(&cdesc->control_data.token[0], iv, 12); + /* 32 bit counter, start at 1 (big endian!) */ + cdesc->control_data.token[3] = cpu_to_be32(1); + + return; } if (ctx->mode != CONTEXT_CONTROL_CRYPTO_MODE_ECB) { @@ -129,56 +143,68 @@ static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv, if (direction == SAFEXCEL_ENCRYPT) { /* align end of instruction sequence to end of token */ token = (struct safexcel_token *)(cdesc->control_data.token + - EIP197_MAX_TOKENS - 3); + EIP197_MAX_TOKENS - 5); - token[2].opcode = EIP197_TOKEN_OPCODE_INSERT; - token[2].packet_length = digestsize; - token[2].stat = EIP197_TOKEN_STAT_LAST_HASH | + token[4].opcode = EIP197_TOKEN_OPCODE_INSERT; + token[4].packet_length = digestsize; + token[4].stat = EIP197_TOKEN_STAT_LAST_HASH | EIP197_TOKEN_STAT_LAST_PACKET; - token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT | + token[4].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT | EIP197_TOKEN_INS_INSERT_HASH_DIGEST; } else { cryptlen -= digestsize; /* align end of instruction sequence to end of token */ token = (struct safexcel_token *)(cdesc->control_data.token + - EIP197_MAX_TOKENS - 4); + EIP197_MAX_TOKENS - 6); - token[2].opcode = EIP197_TOKEN_OPCODE_RETRIEVE; - token[2].packet_length = digestsize; - token[2].stat = EIP197_TOKEN_STAT_LAST_HASH | + token[4].opcode = EIP197_TOKEN_OPCODE_RETRIEVE; + token[4].packet_length = digestsize; + token[4].stat = EIP197_TOKEN_STAT_LAST_HASH | EIP197_TOKEN_STAT_LAST_PACKET; - token[2].instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST; + token[4].instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST; - token[3].opcode = EIP197_TOKEN_OPCODE_VERIFY; - token[3].packet_length = digestsize | + token[5].opcode = EIP197_TOKEN_OPCODE_VERIFY; + token[5].packet_length = digestsize | EIP197_TOKEN_HASH_RESULT_VERIFY; - token[3].stat = EIP197_TOKEN_STAT_LAST_HASH | + token[5].stat = EIP197_TOKEN_STAT_LAST_HASH | EIP197_TOKEN_STAT_LAST_PACKET; - token[3].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT; + token[5].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT; } + token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION; + token[0].packet_length = assoclen; + if (likely(cryptlen)) { - if (likely(assoclen)) { - token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION; - token[0].packet_length = assoclen; - token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH; - } + token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH; - token[1].opcode = EIP197_TOKEN_OPCODE_DIRECTION; - token[1].packet_length = cryptlen; - token[1].stat = EIP197_TOKEN_STAT_LAST_HASH; - token[1].instructions = EIP197_TOKEN_INS_LAST | + token[3].opcode = EIP197_TOKEN_OPCODE_DIRECTION; + token[3].packet_length = cryptlen; + token[3].stat = EIP197_TOKEN_STAT_LAST_HASH; + token[3].instructions = EIP197_TOKEN_INS_LAST | EIP197_TOKEN_INS_TYPE_CRYPTO | EIP197_TOKEN_INS_TYPE_HASH | EIP197_TOKEN_INS_TYPE_OUTPUT; } else { - token[1].opcode = EIP197_TOKEN_OPCODE_DIRECTION; - token[1].packet_length = assoclen; - token[1].stat = EIP197_TOKEN_STAT_LAST_HASH; - token[1].instructions = EIP197_TOKEN_INS_LAST | + token[0].stat = EIP197_TOKEN_STAT_LAST_HASH; + token[0].instructions = EIP197_TOKEN_INS_LAST | EIP197_TOKEN_INS_TYPE_HASH; } + + if (ctx->xcm) { + token[0].instructions = EIP197_TOKEN_INS_LAST | + EIP197_TOKEN_INS_TYPE_HASH; + + token[1].opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES; + token[1].packet_length = 0; + token[1].stat = EIP197_TOKEN_STAT_LAST_HASH; + token[1].instructions = AES_BLOCK_SIZE; + + token[2].opcode = EIP197_TOKEN_OPCODE_INSERT; + token[2].packet_length = AES_BLOCK_SIZE; + token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT | + EIP197_TOKEN_INS_TYPE_CRYPTO; + } } static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm, @@ -330,22 +356,27 @@ static int safexcel_context_control(struct safexcel_cipher_ctx *ctx, if (ctx->aead) { /* Take in account the ipad+opad digests */ - ctrl_size += ctx->state_sz / sizeof(u32) * 2; - - if (sreq->direction == SAFEXCEL_ENCRYPT) + if (ctx->xcm) { + ctrl_size += ctx->state_sz / sizeof(u32); cdesc->control_data.control0 = - CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT | - CONTEXT_CONTROL_DIGEST_HMAC | CONTEXT_CONTROL_KEY_EN | + CONTEXT_CONTROL_DIGEST_XCM | ctx->hash_alg | CONTEXT_CONTROL_SIZE(ctrl_size); - else + } else { + ctrl_size += ctx->state_sz / sizeof(u32) * 2; cdesc->control_data.control0 = - CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN | - CONTEXT_CONTROL_DIGEST_HMAC | CONTEXT_CONTROL_KEY_EN | + CONTEXT_CONTROL_DIGEST_HMAC | ctx->hash_alg | CONTEXT_CONTROL_SIZE(ctrl_size); + } + if (sreq->direction == SAFEXCEL_ENCRYPT) + cdesc->control_data.control0 |= + CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT; + else + cdesc->control_data.control0 |= + CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN; } else { if (sreq->direction == SAFEXCEL_ENCRYPT) cdesc->control_data.control0 = @@ -485,9 +516,10 @@ static int safexcel_send_req(struct crypto_async_request *base, int ring, memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32), ctx->ipad, ctx->state_sz); - memcpy(ctx->base.ctxr->data + (ctx->key_len + ctx->state_sz) / - sizeof(u32), - ctx->opad, ctx->state_sz); + if (!ctx->xcm) + memcpy(ctx->base.ctxr->data + (ctx->key_len + + ctx->state_sz) / sizeof(u32), ctx->opad, + ctx->state_sz); } else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) && (sreq->direction == SAFEXCEL_DECRYPT)) { /* @@ -1893,3 +1925,121 @@ struct safexcel_alg_template safexcel_alg_xts_aes = { }, }, }; + +static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key, + unsigned int len) +{ + struct crypto_tfm *tfm = crypto_aead_tfm(ctfm); + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm); + struct safexcel_crypto_priv *priv = ctx->priv; + struct crypto_aes_ctx aes; + u32 hashkey[AES_BLOCK_SIZE >> 2]; + int ret, i; + + ret = aes_expandkey(&aes, key, len); + if (ret) { + crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN); + memzero_explicit(&aes, sizeof(aes)); + return ret; + } + + if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) { + for (i = 0; i < len / sizeof(u32); i++) { + if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) { + ctx->base.needs_inv = true; + break; + } + } + } + + for (i = 0; i < len / sizeof(u32); i++) + ctx->key[i] = cpu_to_le32(aes.key_enc[i]); + + ctx->key_len = len; + + /* Compute hash key by encrypting zeroes with cipher key */ + crypto_cipher_clear_flags(ctx->hkaes, CRYPTO_TFM_REQ_MASK); + crypto_cipher_set_flags(ctx->hkaes, crypto_aead_get_flags(ctfm) & + CRYPTO_TFM_REQ_MASK); + ret = crypto_cipher_setkey(ctx->hkaes, key, len); + crypto_aead_set_flags(ctfm, crypto_cipher_get_flags(ctx->hkaes) & + CRYPTO_TFM_RES_MASK); + if (ret) + return ret; + + memset(hashkey, 0, AES_BLOCK_SIZE); + crypto_cipher_encrypt_one(ctx->hkaes, (u8 *)hashkey, (u8 *)hashkey); + + if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) { + for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) { + if (ctx->ipad[i] != cpu_to_be32(hashkey[i])) { + ctx->base.needs_inv = true; + break; + } + } + } + + for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) + ctx->ipad[i] = cpu_to_be32(hashkey[i]); + + memzero_explicit(hashkey, AES_BLOCK_SIZE); + memzero_explicit(&aes, sizeof(aes)); + return 0; +} + +static int safexcel_aead_gcm_cra_init(struct crypto_tfm *tfm) +{ + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm); + + safexcel_aead_cra_init(tfm); + ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_GHASH; + ctx->state_sz = GHASH_BLOCK_SIZE; + ctx->xcm = 1; /* GCM */ + ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */ + + ctx->hkaes = crypto_alloc_cipher("aes", 0, 0); + if (IS_ERR(ctx->hkaes)) + return PTR_ERR(ctx->hkaes); + + return 0; +} + +static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm) +{ + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm); + + crypto_free_cipher(ctx->hkaes); + safexcel_aead_cra_exit(tfm); +} + +static int safexcel_aead_gcm_setauthsize(struct crypto_aead *tfm, + unsigned int authsize) +{ + return crypto_gcm_check_authsize(authsize); +} + +struct safexcel_alg_template safexcel_alg_gcm = { + .type = SAFEXCEL_ALG_TYPE_AEAD, + .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH, + .alg.aead = { + .setkey = safexcel_aead_gcm_setkey, + .setauthsize = safexcel_aead_gcm_setauthsize, + .encrypt = safexcel_aead_encrypt, + .decrypt = safexcel_aead_decrypt, + .ivsize = GCM_AES_IV_SIZE, + .maxauthsize = GHASH_DIGEST_SIZE, + .base = { + .cra_name = "gcm(aes)", + .cra_driver_name = "safexcel-gcm-aes", + .cra_priority = SAFEXCEL_CRA_PRIORITY, + .cra_flags = CRYPTO_ALG_ASYNC | + CRYPTO_ALG_KERN_DRIVER_ONLY, + .cra_blocksize = 1, + .cra_ctxsize = sizeof(struct safexcel_cipher_ctx), + .cra_alignmask = 0, + .cra_init = safexcel_aead_gcm_cra_init, + .cra_exit = safexcel_aead_gcm_cra_exit, + .cra_module = THIS_MODULE, + }, + }, +}; |