summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorEric Biggers <ebiggers@google.com>2018-02-20 08:48:02 +0100
committerHerbert Xu <herbert@gondor.apana.org.au>2018-03-02 17:03:19 +0100
commit8bab4e3cd51f2143a10d5ca6b0ae5b8fc08c72bd (patch)
treedab24e89c4d4730c0db56a380bd06725ef893a64
parentcrypto: x86/serpent-sse2 - remove LRW algorithm (diff)
downloadlinux-8bab4e3cd51f2143a10d5ca6b0ae5b8fc08c72bd.tar.xz
linux-8bab4e3cd51f2143a10d5ca6b0ae5b8fc08c72bd.zip
crypto: x86/serpent-sse2 - remove XTS algorithm
The XTS template now wraps an ECB mode algorithm rather than the block cipher directly. Therefore it is now redundant for crypto modules to wrap their ECB code with generic XTS code themselves via xts_crypt(). Remove the xts-serpent-sse2 algorithm which did this. Users who request xts(serpent) and previously would have gotten xts-serpent-sse2 will now get xts(ecb-serpent-sse2) instead, which is just as fast. Signed-off-by: Eric Biggers <ebiggers@google.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
-rw-r--r--arch/x86/crypto/serpent_sse2_glue.c172
-rw-r--r--crypto/Kconfig2
2 files changed, 0 insertions, 174 deletions
diff --git a/arch/x86/crypto/serpent_sse2_glue.c b/arch/x86/crypto/serpent_sse2_glue.c
index e9de69b45184..9e2734384ce5 100644
--- a/arch/x86/crypto/serpent_sse2_glue.c
+++ b/arch/x86/crypto/serpent_sse2_glue.c
@@ -40,7 +40,6 @@
#include <crypto/cryptd.h>
#include <crypto/b128ops.h>
#include <crypto/ctr.h>
-#include <crypto/xts.h>
#include <asm/crypto/serpent-sse2.h>
#include <asm/crypto/glue_helper.h>
@@ -170,134 +169,6 @@ static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
return glue_ctr_crypt_128bit(&serpent_ctr, desc, dst, src, nbytes);
}
-static inline bool serpent_fpu_begin(bool fpu_enabled, unsigned int nbytes)
-{
- return glue_fpu_begin(SERPENT_BLOCK_SIZE, SERPENT_PARALLEL_BLOCKS,
- NULL, fpu_enabled, nbytes);
-}
-
-static inline void serpent_fpu_end(bool fpu_enabled)
-{
- glue_fpu_end(fpu_enabled);
-}
-
-struct crypt_priv {
- struct serpent_ctx *ctx;
- bool fpu_enabled;
-};
-
-static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
-{
- const unsigned int bsize = SERPENT_BLOCK_SIZE;
- struct crypt_priv *ctx = priv;
- int i;
-
- ctx->fpu_enabled = serpent_fpu_begin(ctx->fpu_enabled, nbytes);
-
- if (nbytes == bsize * SERPENT_PARALLEL_BLOCKS) {
- serpent_enc_blk_xway(ctx->ctx, srcdst, srcdst);
- return;
- }
-
- for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
- __serpent_encrypt(ctx->ctx, srcdst, srcdst);
-}
-
-static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
-{
- const unsigned int bsize = SERPENT_BLOCK_SIZE;
- struct crypt_priv *ctx = priv;
- int i;
-
- ctx->fpu_enabled = serpent_fpu_begin(ctx->fpu_enabled, nbytes);
-
- if (nbytes == bsize * SERPENT_PARALLEL_BLOCKS) {
- serpent_dec_blk_xway(ctx->ctx, srcdst, srcdst);
- return;
- }
-
- for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
- __serpent_decrypt(ctx->ctx, srcdst, srcdst);
-}
-
-struct serpent_xts_ctx {
- struct serpent_ctx tweak_ctx;
- struct serpent_ctx crypt_ctx;
-};
-
-static int xts_serpent_setkey(struct crypto_tfm *tfm, const u8 *key,
- unsigned int keylen)
-{
- struct serpent_xts_ctx *ctx = crypto_tfm_ctx(tfm);
- int err;
-
- err = xts_check_key(tfm, key, keylen);
- if (err)
- return err;
-
- /* first half of xts-key is for crypt */
- err = __serpent_setkey(&ctx->crypt_ctx, key, keylen / 2);
- if (err)
- return err;
-
- /* second half of xts-key is for tweak */
- return __serpent_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2);
-}
-
-static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
- struct scatterlist *src, unsigned int nbytes)
-{
- struct serpent_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
- le128 buf[SERPENT_PARALLEL_BLOCKS];
- struct crypt_priv crypt_ctx = {
- .ctx = &ctx->crypt_ctx,
- .fpu_enabled = false,
- };
- struct xts_crypt_req req = {
- .tbuf = buf,
- .tbuflen = sizeof(buf),
-
- .tweak_ctx = &ctx->tweak_ctx,
- .tweak_fn = XTS_TWEAK_CAST(__serpent_encrypt),
- .crypt_ctx = &crypt_ctx,
- .crypt_fn = encrypt_callback,
- };
- int ret;
-
- desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
- ret = xts_crypt(desc, dst, src, nbytes, &req);
- serpent_fpu_end(crypt_ctx.fpu_enabled);
-
- return ret;
-}
-
-static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
- struct scatterlist *src, unsigned int nbytes)
-{
- struct serpent_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
- le128 buf[SERPENT_PARALLEL_BLOCKS];
- struct crypt_priv crypt_ctx = {
- .ctx = &ctx->crypt_ctx,
- .fpu_enabled = false,
- };
- struct xts_crypt_req req = {
- .tbuf = buf,
- .tbuflen = sizeof(buf),
-
- .tweak_ctx = &ctx->tweak_ctx,
- .tweak_fn = XTS_TWEAK_CAST(__serpent_encrypt),
- .crypt_ctx = &crypt_ctx,
- .crypt_fn = decrypt_callback,
- };
- int ret;
-
- desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
- ret = xts_crypt(desc, dst, src, nbytes, &req);
- serpent_fpu_end(crypt_ctx.fpu_enabled);
-
- return ret;
-}
-
static struct crypto_alg serpent_algs[] = { {
.cra_name = "__ecb-serpent-sse2",
.cra_driver_name = "__driver-ecb-serpent-sse2",
@@ -360,27 +231,6 @@ static struct crypto_alg serpent_algs[] = { {
},
},
}, {
- .cra_name = "__xts-serpent-sse2",
- .cra_driver_name = "__driver-xts-serpent-sse2",
- .cra_priority = 0,
- .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
- CRYPTO_ALG_INTERNAL,
- .cra_blocksize = SERPENT_BLOCK_SIZE,
- .cra_ctxsize = sizeof(struct serpent_xts_ctx),
- .cra_alignmask = 0,
- .cra_type = &crypto_blkcipher_type,
- .cra_module = THIS_MODULE,
- .cra_u = {
- .blkcipher = {
- .min_keysize = SERPENT_MIN_KEY_SIZE * 2,
- .max_keysize = SERPENT_MAX_KEY_SIZE * 2,
- .ivsize = SERPENT_BLOCK_SIZE,
- .setkey = xts_serpent_setkey,
- .encrypt = xts_encrypt,
- .decrypt = xts_decrypt,
- },
- },
-}, {
.cra_name = "ecb(serpent)",
.cra_driver_name = "ecb-serpent-sse2",
.cra_priority = 400,
@@ -446,28 +296,6 @@ static struct crypto_alg serpent_algs[] = { {
.geniv = "chainiv",
},
},
-}, {
- .cra_name = "xts(serpent)",
- .cra_driver_name = "xts-serpent-sse2",
- .cra_priority = 400,
- .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
- .cra_blocksize = SERPENT_BLOCK_SIZE,
- .cra_ctxsize = sizeof(struct async_helper_ctx),
- .cra_alignmask = 0,
- .cra_type = &crypto_ablkcipher_type,
- .cra_module = THIS_MODULE,
- .cra_init = ablk_init,
- .cra_exit = ablk_exit,
- .cra_u = {
- .ablkcipher = {
- .min_keysize = SERPENT_MIN_KEY_SIZE * 2,
- .max_keysize = SERPENT_MAX_KEY_SIZE * 2,
- .ivsize = SERPENT_BLOCK_SIZE,
- .setkey = ablk_set_key,
- .encrypt = ablk_encrypt,
- .decrypt = ablk_decrypt,
- },
- },
} };
static int __init serpent_sse2_init(void)
diff --git a/crypto/Kconfig b/crypto/Kconfig
index ecc4e56e80ac..4b2026e07bac 100644
--- a/crypto/Kconfig
+++ b/crypto/Kconfig
@@ -1427,7 +1427,6 @@ config CRYPTO_SERPENT_SSE2_X86_64
select CRYPTO_ABLK_HELPER
select CRYPTO_GLUE_HELPER_X86
select CRYPTO_SERPENT
- select CRYPTO_XTS
help
Serpent cipher algorithm, by Anderson, Biham & Knudsen.
@@ -1448,7 +1447,6 @@ config CRYPTO_SERPENT_SSE2_586
select CRYPTO_ABLK_HELPER
select CRYPTO_GLUE_HELPER_X86
select CRYPTO_SERPENT
- select CRYPTO_XTS
help
Serpent cipher algorithm, by Anderson, Biham & Knudsen.