diff options
author | Herbert Xu <herbert@gondor.apana.org.au> | 2023-09-14 10:28:27 +0200 |
---|---|---|
committer | Herbert Xu <herbert@gondor.apana.org.au> | 2023-09-20 07:15:29 +0200 |
commit | 32a8dc4afcfb098ef4e8b465c90db17d22d90107 (patch) | |
tree | 88c6658cb8ddd19441c307cc65ef7db4a82810af /crypto/ecb.c | |
parent | crypto: testmgr - Add support for lskcipher algorithms (diff) | |
download | linux-32a8dc4afcfb098ef4e8b465c90db17d22d90107.tar.xz linux-32a8dc4afcfb098ef4e8b465c90db17d22d90107.zip |
crypto: ecb - Convert from skcipher to lskcipher
This patch adds two different implementations of ECB. First of
all an lskcipher wrapper around existing ciphers is introduced as
a temporary transition aid.
Secondly a permanent lskcipher template is also added. It's simply
a wrapper around the underlying lskcipher algorithm.
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto/ecb.c')
-rw-r--r-- | crypto/ecb.c | 190 |
1 files changed, 156 insertions, 34 deletions
diff --git a/crypto/ecb.c b/crypto/ecb.c index 71fbb0543d64..cc7625d1a475 100644 --- a/crypto/ecb.c +++ b/crypto/ecb.c @@ -5,75 +5,196 @@ * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> */ -#include <crypto/algapi.h> #include <crypto/internal/cipher.h> #include <crypto/internal/skcipher.h> #include <linux/err.h> #include <linux/init.h> #include <linux/kernel.h> #include <linux/module.h> +#include <linux/slab.h> -static int crypto_ecb_crypt(struct skcipher_request *req, - struct crypto_cipher *cipher, +static int crypto_ecb_crypt(struct crypto_cipher *cipher, const u8 *src, + u8 *dst, unsigned nbytes, bool final, void (*fn)(struct crypto_tfm *, u8 *, const u8 *)) { const unsigned int bsize = crypto_cipher_blocksize(cipher); - struct skcipher_walk walk; - unsigned int nbytes; - int err; - - err = skcipher_walk_virt(&walk, req, false); - while ((nbytes = walk.nbytes) != 0) { - const u8 *src = walk.src.virt.addr; - u8 *dst = walk.dst.virt.addr; + while (nbytes >= bsize) { + fn(crypto_cipher_tfm(cipher), dst, src); - do { - fn(crypto_cipher_tfm(cipher), dst, src); + src += bsize; + dst += bsize; - src += bsize; - dst += bsize; - } while ((nbytes -= bsize) >= bsize); - - err = skcipher_walk_done(&walk, nbytes); + nbytes -= bsize; } - return err; + return nbytes && final ? -EINVAL : nbytes; } -static int crypto_ecb_encrypt(struct skcipher_request *req) +static int crypto_ecb_encrypt2(struct crypto_lskcipher *tfm, const u8 *src, + u8 *dst, unsigned len, u8 *iv, bool final) { - struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); - struct crypto_cipher *cipher = skcipher_cipher_simple(tfm); + struct crypto_cipher **ctx = crypto_lskcipher_ctx(tfm); + struct crypto_cipher *cipher = *ctx; - return crypto_ecb_crypt(req, cipher, + return crypto_ecb_crypt(cipher, src, dst, len, final, crypto_cipher_alg(cipher)->cia_encrypt); } -static int crypto_ecb_decrypt(struct skcipher_request *req) +static int crypto_ecb_decrypt2(struct crypto_lskcipher *tfm, const u8 *src, + u8 *dst, unsigned len, u8 *iv, bool final) { - struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); - struct crypto_cipher *cipher = skcipher_cipher_simple(tfm); + struct crypto_cipher **ctx = crypto_lskcipher_ctx(tfm); + struct crypto_cipher *cipher = *ctx; - return crypto_ecb_crypt(req, cipher, + return crypto_ecb_crypt(cipher, src, dst, len, final, crypto_cipher_alg(cipher)->cia_decrypt); } -static int crypto_ecb_create(struct crypto_template *tmpl, struct rtattr **tb) +static int lskcipher_setkey_simple2(struct crypto_lskcipher *tfm, + const u8 *key, unsigned int keylen) +{ + struct crypto_cipher **ctx = crypto_lskcipher_ctx(tfm); + struct crypto_cipher *cipher = *ctx; + + crypto_cipher_clear_flags(cipher, CRYPTO_TFM_REQ_MASK); + crypto_cipher_set_flags(cipher, crypto_lskcipher_get_flags(tfm) & + CRYPTO_TFM_REQ_MASK); + return crypto_cipher_setkey(cipher, key, keylen); +} + +static int lskcipher_init_tfm_simple2(struct crypto_lskcipher *tfm) +{ + struct lskcipher_instance *inst = lskcipher_alg_instance(tfm); + struct crypto_cipher **ctx = crypto_lskcipher_ctx(tfm); + struct crypto_cipher_spawn *spawn; + struct crypto_cipher *cipher; + + spawn = lskcipher_instance_ctx(inst); + cipher = crypto_spawn_cipher(spawn); + if (IS_ERR(cipher)) + return PTR_ERR(cipher); + + *ctx = cipher; + return 0; +} + +static void lskcipher_exit_tfm_simple2(struct crypto_lskcipher *tfm) +{ + struct crypto_cipher **ctx = crypto_lskcipher_ctx(tfm); + + crypto_free_cipher(*ctx); +} + +static void lskcipher_free_instance_simple2(struct lskcipher_instance *inst) +{ + crypto_drop_cipher(lskcipher_instance_ctx(inst)); + kfree(inst); +} + +static struct lskcipher_instance *lskcipher_alloc_instance_simple2( + struct crypto_template *tmpl, struct rtattr **tb) +{ + struct crypto_cipher_spawn *spawn; + struct lskcipher_instance *inst; + struct crypto_alg *cipher_alg; + u32 mask; + int err; + + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_LSKCIPHER, &mask); + if (err) + return ERR_PTR(err); + + inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); + if (!inst) + return ERR_PTR(-ENOMEM); + spawn = lskcipher_instance_ctx(inst); + + err = crypto_grab_cipher(spawn, lskcipher_crypto_instance(inst), + crypto_attr_alg_name(tb[1]), 0, mask); + if (err) + goto err_free_inst; + cipher_alg = crypto_spawn_cipher_alg(spawn); + + err = crypto_inst_setname(lskcipher_crypto_instance(inst), tmpl->name, + cipher_alg); + if (err) + goto err_free_inst; + + inst->free = lskcipher_free_instance_simple2; + + /* Default algorithm properties, can be overridden */ + inst->alg.co.base.cra_blocksize = cipher_alg->cra_blocksize; + inst->alg.co.base.cra_alignmask = cipher_alg->cra_alignmask; + inst->alg.co.base.cra_priority = cipher_alg->cra_priority; + inst->alg.co.min_keysize = cipher_alg->cra_cipher.cia_min_keysize; + inst->alg.co.max_keysize = cipher_alg->cra_cipher.cia_max_keysize; + inst->alg.co.ivsize = cipher_alg->cra_blocksize; + + /* Use struct crypto_cipher * by default, can be overridden */ + inst->alg.co.base.cra_ctxsize = sizeof(struct crypto_cipher *); + inst->alg.setkey = lskcipher_setkey_simple2; + inst->alg.init = lskcipher_init_tfm_simple2; + inst->alg.exit = lskcipher_exit_tfm_simple2; + + return inst; + +err_free_inst: + lskcipher_free_instance_simple2(inst); + return ERR_PTR(err); +} + +static int crypto_ecb_create2(struct crypto_template *tmpl, struct rtattr **tb) { - struct skcipher_instance *inst; + struct lskcipher_instance *inst; int err; - inst = skcipher_alloc_instance_simple(tmpl, tb); + inst = lskcipher_alloc_instance_simple2(tmpl, tb); if (IS_ERR(inst)) return PTR_ERR(inst); - inst->alg.ivsize = 0; /* ECB mode doesn't take an IV */ + /* ECB mode doesn't take an IV */ + inst->alg.co.ivsize = 0; + + inst->alg.encrypt = crypto_ecb_encrypt2; + inst->alg.decrypt = crypto_ecb_decrypt2; + + err = lskcipher_register_instance(tmpl, inst); + if (err) + inst->free(inst); + + return err; +} + +static int crypto_ecb_create(struct crypto_template *tmpl, struct rtattr **tb) +{ + struct crypto_lskcipher_spawn *spawn; + struct lskcipher_alg *cipher_alg; + struct lskcipher_instance *inst; + int err; + + inst = lskcipher_alloc_instance_simple(tmpl, tb); + if (IS_ERR(inst)) { + err = crypto_ecb_create2(tmpl, tb); + return err; + } + + spawn = lskcipher_instance_ctx(inst); + cipher_alg = crypto_lskcipher_spawn_alg(spawn); + + /* ECB mode doesn't take an IV */ + inst->alg.co.ivsize = 0; + if (cipher_alg->co.ivsize) + return -EINVAL; - inst->alg.encrypt = crypto_ecb_encrypt; - inst->alg.decrypt = crypto_ecb_decrypt; + inst->alg.co.base.cra_ctxsize = cipher_alg->co.base.cra_ctxsize; + inst->alg.setkey = cipher_alg->setkey; + inst->alg.encrypt = cipher_alg->encrypt; + inst->alg.decrypt = cipher_alg->decrypt; + inst->alg.init = cipher_alg->init; + inst->alg.exit = cipher_alg->exit; - err = skcipher_register_instance(tmpl, inst); + err = lskcipher_register_instance(tmpl, inst); if (err) inst->free(inst); @@ -102,3 +223,4 @@ module_exit(crypto_ecb_module_exit); MODULE_LICENSE("GPL"); MODULE_DESCRIPTION("ECB block cipher mode of operation"); MODULE_ALIAS_CRYPTO("ecb"); +MODULE_IMPORT_NS(CRYPTO_INTERNAL); |