diff options
author | Harald Freudenberger <freude@linux.ibm.com> | 2019-07-19 15:22:26 +0200 |
---|---|---|
committer | Vasily Gorbik <gor@linux.ibm.com> | 2019-08-21 12:58:54 +0200 |
commit | 416f79c23dbe47e0e223efc06d3487e1d90a92ee (patch) | |
tree | 8dbf1a73746fb3e9657f9033bf807f25acfc647b /arch/s390/crypto | |
parent | s390/pkey: add CCA AES cipher key support (diff) | |
download | linux-416f79c23dbe47e0e223efc06d3487e1d90a92ee.tar.xz linux-416f79c23dbe47e0e223efc06d3487e1d90a92ee.zip |
s390/paes: Prepare paes functions for large key blobs
The context used to store the key blob used a fixed 80 bytes
buffer. And all the set_key functions did not even check the given key
size. With CCA variable length AES cipher keys there come key blobs
with about 136 bytes and maybe in the future there will arise the need
to store even bigger key blobs.
This patch reworks the paes set_key functions and the context
buffers to work with small key blobs (<= 128 bytes) directly in the
context buffer and larger blobs by allocating additional memory and
storing the pointer in the context buffer. If there has been memory
allocated for storing a key blob, it also needs to be freed on release
of the tfm. So all the paes ciphers now have a init and exit function
implemented for this job.
Signed-off-by: Harald Freudenberger <freude@linux.ibm.com>
Reviewed-by: Ingo Franzki <ifranzki@linux.ibm.com>
Signed-off-by: Vasily Gorbik <gor@linux.ibm.com>
Diffstat (limited to 'arch/s390/crypto')
-rw-r--r-- | arch/s390/crypto/paes_s390.c | 184 |
1 files changed, 160 insertions, 24 deletions
diff --git a/arch/s390/crypto/paes_s390.c b/arch/s390/crypto/paes_s390.c index e8d9fa54569c..6184dceed340 100644 --- a/arch/s390/crypto/paes_s390.c +++ b/arch/s390/crypto/paes_s390.c @@ -5,7 +5,7 @@ * s390 implementation of the AES Cipher Algorithm with protected keys. * * s390 Version: - * Copyright IBM Corp. 2017 + * Copyright IBM Corp. 2017,2019 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com> * Harald Freudenberger <freude@de.ibm.com> */ @@ -25,16 +25,59 @@ #include <asm/cpacf.h> #include <asm/pkey.h> +/* + * Key blobs smaller/bigger than these defines are rejected + * by the common code even before the individual setkey function + * is called. As paes can handle different kinds of key blobs + * and padding is also possible, the limits need to be generous. + */ +#define PAES_MIN_KEYSIZE 64 +#define PAES_MAX_KEYSIZE 256 + static u8 *ctrblk; static DEFINE_SPINLOCK(ctrblk_lock); static cpacf_mask_t km_functions, kmc_functions, kmctr_functions; struct key_blob { - __u8 key[MAXKEYBLOBSIZE]; + /* + * Small keys will be stored in the keybuf. Larger keys are + * stored in extra allocated memory. In both cases does + * key point to the memory where the key is stored. + * The code distinguishes by checking keylen against + * sizeof(keybuf). See the two following helper functions. + */ + u8 *key; + u8 keybuf[128]; unsigned int keylen; }; +static inline int _copy_key_to_kb(struct key_blob *kb, + const u8 *key, + unsigned int keylen) +{ + if (keylen <= sizeof(kb->keybuf)) + kb->key = kb->keybuf; + else { + kb->key = kmalloc(keylen, GFP_KERNEL); + if (!kb->key) + return -ENOMEM; + } + memcpy(kb->key, key, keylen); + kb->keylen = keylen; + + return 0; +} + +static inline void _free_kb_keybuf(struct key_blob *kb) +{ + if (kb->key && kb->key != kb->keybuf + && kb->keylen > sizeof(kb->keybuf)) { + kfree(kb->key); + kb->key = NULL; + } +} + struct s390_paes_ctx { struct key_blob kb; struct pkey_protkey pk; @@ -80,13 +123,33 @@ static int __paes_set_key(struct s390_paes_ctx *ctx) return ctx->fc ? 0 : -EINVAL; } +static int ecb_paes_init(struct crypto_tfm *tfm) +{ + struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm); + + ctx->kb.key = NULL; + + return 0; +} + +static void ecb_paes_exit(struct crypto_tfm *tfm) +{ + struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm); + + _free_kb_keybuf(&ctx->kb); +} + static int ecb_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key, unsigned int key_len) { + int rc; struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm); - memcpy(ctx->kb.key, in_key, key_len); - ctx->kb.keylen = key_len; + _free_kb_keybuf(&ctx->kb); + rc = _copy_key_to_kb(&ctx->kb, in_key, key_len); + if (rc) + return rc; + if (__paes_set_key(ctx)) { tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; return -EINVAL; @@ -148,10 +211,12 @@ static struct crypto_alg ecb_paes_alg = { .cra_type = &crypto_blkcipher_type, .cra_module = THIS_MODULE, .cra_list = LIST_HEAD_INIT(ecb_paes_alg.cra_list), + .cra_init = ecb_paes_init, + .cra_exit = ecb_paes_exit, .cra_u = { .blkcipher = { - .min_keysize = MINKEYBLOBSIZE, - .max_keysize = MAXKEYBLOBSIZE, + .min_keysize = PAES_MIN_KEYSIZE, + .max_keysize = PAES_MAX_KEYSIZE, .setkey = ecb_paes_set_key, .encrypt = ecb_paes_encrypt, .decrypt = ecb_paes_decrypt, @@ -159,6 +224,22 @@ static struct crypto_alg ecb_paes_alg = { } }; +static int cbc_paes_init(struct crypto_tfm *tfm) +{ + struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm); + + ctx->kb.key = NULL; + + return 0; +} + +static void cbc_paes_exit(struct crypto_tfm *tfm) +{ + struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm); + + _free_kb_keybuf(&ctx->kb); +} + static int __cbc_paes_set_key(struct s390_paes_ctx *ctx) { unsigned long fc; @@ -180,10 +261,14 @@ static int __cbc_paes_set_key(struct s390_paes_ctx *ctx) static int cbc_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key, unsigned int key_len) { + int rc; struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm); - memcpy(ctx->kb.key, in_key, key_len); - ctx->kb.keylen = key_len; + _free_kb_keybuf(&ctx->kb); + rc = _copy_key_to_kb(&ctx->kb, in_key, key_len); + if (rc) + return rc; + if (__cbc_paes_set_key(ctx)) { tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; return -EINVAL; @@ -252,10 +337,12 @@ static struct crypto_alg cbc_paes_alg = { .cra_type = &crypto_blkcipher_type, .cra_module = THIS_MODULE, .cra_list = LIST_HEAD_INIT(cbc_paes_alg.cra_list), + .cra_init = cbc_paes_init, + .cra_exit = cbc_paes_exit, .cra_u = { .blkcipher = { - .min_keysize = MINKEYBLOBSIZE, - .max_keysize = MAXKEYBLOBSIZE, + .min_keysize = PAES_MIN_KEYSIZE, + .max_keysize = PAES_MAX_KEYSIZE, .ivsize = AES_BLOCK_SIZE, .setkey = cbc_paes_set_key, .encrypt = cbc_paes_encrypt, @@ -264,6 +351,24 @@ static struct crypto_alg cbc_paes_alg = { } }; +static int xts_paes_init(struct crypto_tfm *tfm) +{ + struct s390_pxts_ctx *ctx = crypto_tfm_ctx(tfm); + + ctx->kb[0].key = NULL; + ctx->kb[1].key = NULL; + + return 0; +} + +static void xts_paes_exit(struct crypto_tfm *tfm) +{ + struct s390_pxts_ctx *ctx = crypto_tfm_ctx(tfm); + + _free_kb_keybuf(&ctx->kb[0]); + _free_kb_keybuf(&ctx->kb[1]); +} + static int __xts_paes_set_key(struct s390_pxts_ctx *ctx) { unsigned long fc; @@ -287,20 +392,27 @@ static int __xts_paes_set_key(struct s390_pxts_ctx *ctx) } static int xts_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key, - unsigned int key_len) + unsigned int xts_key_len) { + int rc; struct s390_pxts_ctx *ctx = crypto_tfm_ctx(tfm); u8 ckey[2 * AES_MAX_KEY_SIZE]; - unsigned int ckey_len, keytok_len; + unsigned int ckey_len, key_len; - if (key_len % 2) + if (xts_key_len % 2) return -EINVAL; - keytok_len = key_len / 2; - memcpy(ctx->kb[0].key, in_key, keytok_len); - ctx->kb[0].keylen = keytok_len; - memcpy(ctx->kb[1].key, in_key + keytok_len, keytok_len); - ctx->kb[1].keylen = keytok_len; + key_len = xts_key_len / 2; + + _free_kb_keybuf(&ctx->kb[0]); + _free_kb_keybuf(&ctx->kb[1]); + rc = _copy_key_to_kb(&ctx->kb[0], in_key, key_len); + if (rc) + return rc; + rc = _copy_key_to_kb(&ctx->kb[1], in_key + key_len, key_len); + if (rc) + return rc; + if (__xts_paes_set_key(ctx)) { tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; return -EINVAL; @@ -394,10 +506,12 @@ static struct crypto_alg xts_paes_alg = { .cra_type = &crypto_blkcipher_type, .cra_module = THIS_MODULE, .cra_list = LIST_HEAD_INIT(xts_paes_alg.cra_list), + .cra_init = xts_paes_init, + .cra_exit = xts_paes_exit, .cra_u = { .blkcipher = { - .min_keysize = 2 * MINKEYBLOBSIZE, - .max_keysize = 2 * MAXKEYBLOBSIZE, + .min_keysize = 2 * PAES_MIN_KEYSIZE, + .max_keysize = 2 * PAES_MAX_KEYSIZE, .ivsize = AES_BLOCK_SIZE, .setkey = xts_paes_set_key, .encrypt = xts_paes_encrypt, @@ -406,6 +520,22 @@ static struct crypto_alg xts_paes_alg = { } }; +static int ctr_paes_init(struct crypto_tfm *tfm) +{ + struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm); + + ctx->kb.key = NULL; + + return 0; +} + +static void ctr_paes_exit(struct crypto_tfm *tfm) +{ + struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm); + + _free_kb_keybuf(&ctx->kb); +} + static int __ctr_paes_set_key(struct s390_paes_ctx *ctx) { unsigned long fc; @@ -428,10 +558,14 @@ static int __ctr_paes_set_key(struct s390_paes_ctx *ctx) static int ctr_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key, unsigned int key_len) { + int rc; struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm); - memcpy(ctx->kb.key, in_key, key_len); - ctx->kb.keylen = key_len; + _free_kb_keybuf(&ctx->kb); + rc = _copy_key_to_kb(&ctx->kb, in_key, key_len); + if (rc) + return rc; + if (__ctr_paes_set_key(ctx)) { tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; return -EINVAL; @@ -541,10 +675,12 @@ static struct crypto_alg ctr_paes_alg = { .cra_type = &crypto_blkcipher_type, .cra_module = THIS_MODULE, .cra_list = LIST_HEAD_INIT(ctr_paes_alg.cra_list), + .cra_init = ctr_paes_init, + .cra_exit = ctr_paes_exit, .cra_u = { .blkcipher = { - .min_keysize = MINKEYBLOBSIZE, - .max_keysize = MAXKEYBLOBSIZE, + .min_keysize = PAES_MIN_KEYSIZE, + .max_keysize = PAES_MAX_KEYSIZE, .ivsize = AES_BLOCK_SIZE, .setkey = ctr_paes_set_key, .encrypt = ctr_paes_encrypt, |