summaryrefslogtreecommitdiffstats
path: root/crypto
diff options
context:
space:
mode:
authorJiri Kosina <jkosina@suse.cz>2011-11-13 20:55:35 +0100
committerJiri Kosina <jkosina@suse.cz>2011-11-13 20:55:53 +0100
commit2290c0d06d82faee87b1ab2d9d4f7bf81ef64379 (patch)
treee075e4d5534193f28e6059904f61e5ca03958d3c /crypto
parentcrypto: drop selects of bogus Kconfig symbol (diff)
parentMerge branch 'drm-fixes' of git://people.freedesktop.org/~airlied/linux (diff)
downloadlinux-2290c0d06d82faee87b1ab2d9d4f7bf81ef64379.tar.xz
linux-2290c0d06d82faee87b1ab2d9d4f7bf81ef64379.zip
Merge branch 'master' into for-next
Sync with Linus tree to have 157550ff ("mtd: add GPMI-NAND driver in the config and Makefile") as I have patch depending on that one.
Diffstat (limited to 'crypto')
-rw-r--r--crypto/Kconfig63
-rw-r--r--crypto/Makefile4
-rw-r--r--crypto/ablkcipher.c62
-rw-r--r--crypto/aead.c62
-rw-r--r--crypto/ahash.c28
-rw-r--r--crypto/algapi.c12
-rw-r--r--crypto/async_tx/async_memcpy.c1
-rw-r--r--crypto/async_tx/async_pq.c1
-rw-r--r--crypto/async_tx/async_raid6_recov.c1
-rw-r--r--crypto/async_tx/async_tx.c1
-rw-r--r--crypto/async_tx/async_xor.c1
-rw-r--r--crypto/async_tx/raid6test.c1
-rw-r--r--crypto/blkcipher.c32
-rw-r--r--crypto/blowfish_common.c (renamed from crypto/blowfish.c)98
-rw-r--r--crypto/blowfish_generic.c142
-rw-r--r--crypto/cryptd.c2
-rw-r--r--crypto/crypto_user.c435
-rw-r--r--crypto/crypto_wq.c1
-rw-r--r--crypto/internal.h3
-rw-r--r--crypto/md4.c1
-rw-r--r--crypto/pcompress.c25
-rw-r--r--crypto/proc.c1
-rw-r--r--crypto/rng.c27
-rw-r--r--crypto/sha1_generic.c9
-rw-r--r--crypto/shash.c28
-rw-r--r--crypto/tcrypt.c10
-rw-r--r--crypto/testmgr.c30
-rw-r--r--crypto/testmgr.h398
-rw-r--r--crypto/vmac.c1
-rw-r--r--crypto/wp512.c18
-rw-r--r--crypto/xcbc.c1
31 files changed, 1384 insertions, 115 deletions
diff --git a/crypto/Kconfig b/crypto/Kconfig
index 82d4d454e1ff..ae9c3ceb2867 100644
--- a/crypto/Kconfig
+++ b/crypto/Kconfig
@@ -100,6 +100,14 @@ config CRYPTO_MANAGER2
select CRYPTO_BLKCIPHER2
select CRYPTO_PCOMP2
+config CRYPTO_USER
+ tristate "Userspace cryptographic algorithm configuration"
+ depends on NET
+ select CRYPTO_MANAGER
+ help
+ Userapace configuration for cryptographic instantiations such as
+ cbc(aes).
+
config CRYPTO_MANAGER_DISABLE_TESTS
bool "Disable run-time self tests"
default y
@@ -406,6 +414,16 @@ config CRYPTO_SHA1
help
SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2).
+config CRYPTO_SHA1_SSSE3
+ tristate "SHA1 digest algorithm (SSSE3/AVX)"
+ depends on X86 && 64BIT
+ select CRYPTO_SHA1
+ select CRYPTO_HASH
+ help
+ SHA-1 secure hash standard (FIPS 180-1/DFIPS 180-2) implemented
+ using Supplemental SSE3 (SSSE3) instructions or Advanced Vector
+ Extensions (AVX), when available.
+
config CRYPTO_SHA256
tristate "SHA224 and SHA256 digest algorithm"
select CRYPTO_HASH
@@ -588,6 +606,7 @@ config CRYPTO_ARC4
config CRYPTO_BLOWFISH
tristate "Blowfish cipher algorithm"
select CRYPTO_ALGAPI
+ select CRYPTO_BLOWFISH_COMMON
help
Blowfish cipher algorithm, by Bruce Schneier.
@@ -598,6 +617,30 @@ config CRYPTO_BLOWFISH
See also:
<http://www.schneier.com/blowfish.html>
+config CRYPTO_BLOWFISH_COMMON
+ tristate
+ help
+ Common parts of the Blowfish cipher algorithm shared by the
+ generic c and the assembler implementations.
+
+ See also:
+ <http://www.schneier.com/blowfish.html>
+
+config CRYPTO_BLOWFISH_X86_64
+ tristate "Blowfish cipher algorithm (x86_64)"
+ depends on (X86 || UML_X86) && 64BIT
+ select CRYPTO_ALGAPI
+ select CRYPTO_BLOWFISH_COMMON
+ help
+ Blowfish cipher algorithm (x86_64), by Bruce Schneier.
+
+ This is a variable key length cipher which can use keys from 32
+ bits to 448 bits in length. It's fast, simple and specifically
+ designed for use on "large microprocessors".
+
+ See also:
+ <http://www.schneier.com/blowfish.html>
+
config CRYPTO_CAMELLIA
tristate "Camellia cipher algorithms"
depends on CRYPTO
@@ -791,6 +834,26 @@ config CRYPTO_TWOFISH_X86_64
See also:
<http://www.schneier.com/twofish.html>
+config CRYPTO_TWOFISH_X86_64_3WAY
+ tristate "Twofish cipher algorithm (x86_64, 3-way parallel)"
+ depends on (X86 || UML_X86) && 64BIT
+ select CRYPTO_ALGAPI
+ select CRYPTO_TWOFISH_COMMON
+ select CRYPTO_TWOFISH_X86_64
+ help
+ Twofish cipher algorithm (x86_64, 3-way parallel).
+
+ Twofish was submitted as an AES (Advanced Encryption Standard)
+ candidate cipher by researchers at CounterPane Systems. It is a
+ 16 round block cipher supporting key sizes of 128, 192, and 256
+ bits.
+
+ This module provides Twofish cipher algorithm that processes three
+ blocks parallel, utilizing resources of out-of-order CPUs better.
+
+ See also:
+ <http://www.schneier.com/twofish.html>
+
comment "Compression"
config CRYPTO_DEFLATE
diff --git a/crypto/Makefile b/crypto/Makefile
index ce5a813d3639..9e6eee2c05db 100644
--- a/crypto/Makefile
+++ b/crypto/Makefile
@@ -31,6 +31,7 @@ obj-$(CONFIG_CRYPTO_PCOMP2) += pcompress.o
cryptomgr-y := algboss.o testmgr.o
obj-$(CONFIG_CRYPTO_MANAGER2) += cryptomgr.o
+obj-$(CONFIG_CRYPTO_USER) += crypto_user.o
obj-$(CONFIG_CRYPTO_HMAC) += hmac.o
obj-$(CONFIG_CRYPTO_VMAC) += vmac.o
obj-$(CONFIG_CRYPTO_XCBC) += xcbc.o
@@ -60,7 +61,8 @@ obj-$(CONFIG_CRYPTO_PCRYPT) += pcrypt.o
obj-$(CONFIG_CRYPTO_CRYPTD) += cryptd.o
obj-$(CONFIG_CRYPTO_DES) += des_generic.o
obj-$(CONFIG_CRYPTO_FCRYPT) += fcrypt.o
-obj-$(CONFIG_CRYPTO_BLOWFISH) += blowfish.o
+obj-$(CONFIG_CRYPTO_BLOWFISH) += blowfish_generic.o
+obj-$(CONFIG_CRYPTO_BLOWFISH_COMMON) += blowfish_common.o
obj-$(CONFIG_CRYPTO_TWOFISH) += twofish_generic.o
obj-$(CONFIG_CRYPTO_TWOFISH_COMMON) += twofish_common.o
obj-$(CONFIG_CRYPTO_SERPENT) += serpent.o
diff --git a/crypto/ablkcipher.c b/crypto/ablkcipher.c
index fdc67d38660b..a0f768c1d9aa 100644
--- a/crypto/ablkcipher.c
+++ b/crypto/ablkcipher.c
@@ -23,6 +23,8 @@
#include <linux/sched.h>
#include <linux/slab.h>
#include <linux/seq_file.h>
+#include <linux/cryptouser.h>
+#include <net/netlink.h>
#include <crypto/scatterwalk.h>
@@ -381,6 +383,35 @@ static int crypto_init_ablkcipher_ops(struct crypto_tfm *tfm, u32 type,
return 0;
}
+#ifdef CONFIG_NET
+static int crypto_ablkcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ struct crypto_report_blkcipher rblkcipher;
+
+ snprintf(rblkcipher.type, CRYPTO_MAX_ALG_NAME, "%s", "ablkcipher");
+ snprintf(rblkcipher.geniv, CRYPTO_MAX_ALG_NAME, "%s",
+ alg->cra_ablkcipher.geniv ?: "<default>");
+
+ rblkcipher.blocksize = alg->cra_blocksize;
+ rblkcipher.min_keysize = alg->cra_ablkcipher.min_keysize;
+ rblkcipher.max_keysize = alg->cra_ablkcipher.max_keysize;
+ rblkcipher.ivsize = alg->cra_ablkcipher.ivsize;
+
+ NLA_PUT(skb, CRYPTOCFGA_REPORT_BLKCIPHER,
+ sizeof(struct crypto_report_blkcipher), &rblkcipher);
+
+ return 0;
+
+nla_put_failure:
+ return -EMSGSIZE;
+}
+#else
+static int crypto_ablkcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ return -ENOSYS;
+}
+#endif
+
static void crypto_ablkcipher_show(struct seq_file *m, struct crypto_alg *alg)
__attribute__ ((unused));
static void crypto_ablkcipher_show(struct seq_file *m, struct crypto_alg *alg)
@@ -403,6 +434,7 @@ const struct crypto_type crypto_ablkcipher_type = {
#ifdef CONFIG_PROC_FS
.show = crypto_ablkcipher_show,
#endif
+ .report = crypto_ablkcipher_report,
};
EXPORT_SYMBOL_GPL(crypto_ablkcipher_type);
@@ -432,6 +464,35 @@ static int crypto_init_givcipher_ops(struct crypto_tfm *tfm, u32 type,
return 0;
}
+#ifdef CONFIG_NET
+static int crypto_givcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ struct crypto_report_blkcipher rblkcipher;
+
+ snprintf(rblkcipher.type, CRYPTO_MAX_ALG_NAME, "%s", "givcipher");
+ snprintf(rblkcipher.geniv, CRYPTO_MAX_ALG_NAME, "%s",
+ alg->cra_ablkcipher.geniv ?: "<built-in>");
+
+ rblkcipher.blocksize = alg->cra_blocksize;
+ rblkcipher.min_keysize = alg->cra_ablkcipher.min_keysize;
+ rblkcipher.max_keysize = alg->cra_ablkcipher.max_keysize;
+ rblkcipher.ivsize = alg->cra_ablkcipher.ivsize;
+
+ NLA_PUT(skb, CRYPTOCFGA_REPORT_BLKCIPHER,
+ sizeof(struct crypto_report_blkcipher), &rblkcipher);
+
+ return 0;
+
+nla_put_failure:
+ return -EMSGSIZE;
+}
+#else
+static int crypto_givcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ return -ENOSYS;
+}
+#endif
+
static void crypto_givcipher_show(struct seq_file *m, struct crypto_alg *alg)
__attribute__ ((unused));
static void crypto_givcipher_show(struct seq_file *m, struct crypto_alg *alg)
@@ -454,6 +515,7 @@ const struct crypto_type crypto_givcipher_type = {
#ifdef CONFIG_PROC_FS
.show = crypto_givcipher_show,
#endif
+ .report = crypto_givcipher_report,
};
EXPORT_SYMBOL_GPL(crypto_givcipher_type);
diff --git a/crypto/aead.c b/crypto/aead.c
index 6729e8ff68e7..04add3dca6fe 100644
--- a/crypto/aead.c
+++ b/crypto/aead.c
@@ -21,6 +21,8 @@
#include <linux/sched.h>
#include <linux/slab.h>
#include <linux/seq_file.h>
+#include <linux/cryptouser.h>
+#include <net/netlink.h>
#include "internal.h"
@@ -109,6 +111,35 @@ static int crypto_init_aead_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
return 0;
}
+#ifdef CONFIG_NET
+static int crypto_aead_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ struct crypto_report_aead raead;
+ struct aead_alg *aead = &alg->cra_aead;
+
+ snprintf(raead.type, CRYPTO_MAX_ALG_NAME, "%s", "aead");
+ snprintf(raead.geniv, CRYPTO_MAX_ALG_NAME, "%s",
+ aead->geniv ?: "<built-in>");
+
+ raead.blocksize = alg->cra_blocksize;
+ raead.maxauthsize = aead->maxauthsize;
+ raead.ivsize = aead->ivsize;
+
+ NLA_PUT(skb, CRYPTOCFGA_REPORT_AEAD,
+ sizeof(struct crypto_report_aead), &raead);
+
+ return 0;
+
+nla_put_failure:
+ return -EMSGSIZE;
+}
+#else
+static int crypto_aead_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ return -ENOSYS;
+}
+#endif
+
static void crypto_aead_show(struct seq_file *m, struct crypto_alg *alg)
__attribute__ ((unused));
static void crypto_aead_show(struct seq_file *m, struct crypto_alg *alg)
@@ -130,6 +161,7 @@ const struct crypto_type crypto_aead_type = {
#ifdef CONFIG_PROC_FS
.show = crypto_aead_show,
#endif
+ .report = crypto_aead_report,
};
EXPORT_SYMBOL_GPL(crypto_aead_type);
@@ -165,6 +197,35 @@ static int crypto_init_nivaead_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
return 0;
}
+#ifdef CONFIG_NET
+static int crypto_nivaead_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ struct crypto_report_aead raead;
+ struct aead_alg *aead = &alg->cra_aead;
+
+ snprintf(raead.type, CRYPTO_MAX_ALG_NAME, "%s", "nivaead");
+ snprintf(raead.geniv, CRYPTO_MAX_ALG_NAME, "%s", aead->geniv);
+
+ raead.blocksize = alg->cra_blocksize;
+ raead.maxauthsize = aead->maxauthsize;
+ raead.ivsize = aead->ivsize;
+
+ NLA_PUT(skb, CRYPTOCFGA_REPORT_AEAD,
+ sizeof(struct crypto_report_aead), &raead);
+
+ return 0;
+
+nla_put_failure:
+ return -EMSGSIZE;
+}
+#else
+static int crypto_nivaead_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ return -ENOSYS;
+}
+#endif
+
+
static void crypto_nivaead_show(struct seq_file *m, struct crypto_alg *alg)
__attribute__ ((unused));
static void crypto_nivaead_show(struct seq_file *m, struct crypto_alg *alg)
@@ -186,6 +247,7 @@ const struct crypto_type crypto_nivaead_type = {
#ifdef CONFIG_PROC_FS
.show = crypto_nivaead_show,
#endif
+ .report = crypto_nivaead_report,
};
EXPORT_SYMBOL_GPL(crypto_nivaead_type);
diff --git a/crypto/ahash.c b/crypto/ahash.c
index f669822a7a44..ac93c99cfae8 100644
--- a/crypto/ahash.c
+++ b/crypto/ahash.c
@@ -21,6 +21,8 @@
#include <linux/sched.h>
#include <linux/slab.h>
#include <linux/seq_file.h>
+#include <linux/cryptouser.h>
+#include <net/netlink.h>
#include "internal.h"
@@ -397,6 +399,31 @@ static unsigned int crypto_ahash_extsize(struct crypto_alg *alg)
return sizeof(struct crypto_shash *);
}
+#ifdef CONFIG_NET
+static int crypto_ahash_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ struct crypto_report_hash rhash;
+
+ snprintf(rhash.type, CRYPTO_MAX_ALG_NAME, "%s", "ahash");
+
+ rhash.blocksize = alg->cra_blocksize;
+ rhash.digestsize = __crypto_hash_alg_common(alg)->digestsize;
+
+ NLA_PUT(skb, CRYPTOCFGA_REPORT_HASH,
+ sizeof(struct crypto_report_hash), &rhash);
+
+ return 0;
+
+nla_put_failure:
+ return -EMSGSIZE;
+}
+#else
+static int crypto_ahash_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ return -ENOSYS;
+}
+#endif
+
static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
__attribute__ ((unused));
static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
@@ -415,6 +442,7 @@ const struct crypto_type crypto_ahash_type = {
#ifdef CONFIG_PROC_FS
.show = crypto_ahash_show,
#endif
+ .report = crypto_ahash_report,
.maskclear = ~CRYPTO_ALG_TYPE_MASK,
.maskset = CRYPTO_ALG_TYPE_AHASH_MASK,
.type = CRYPTO_ALG_TYPE_AHASH,
diff --git a/crypto/algapi.c b/crypto/algapi.c
index c3cf1a69a47a..54dd4e33b5d6 100644
--- a/crypto/algapi.c
+++ b/crypto/algapi.c
@@ -22,8 +22,6 @@
#include "internal.h"
-static void crypto_remove_final(struct list_head *list);
-
static LIST_HEAD(crypto_template_list);
void crypto_larval_error(const char *name, u32 type, u32 mask)
@@ -129,9 +127,8 @@ static void crypto_remove_spawn(struct crypto_spawn *spawn,
BUG_ON(!list_empty(&inst->alg.cra_users));
}
-static void crypto_remove_spawns(struct crypto_alg *alg,
- struct list_head *list,
- struct crypto_alg *nalg)
+void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
+ struct crypto_alg *nalg)
{
u32 new_type = (nalg ?: alg)->cra_flags;
struct crypto_spawn *spawn, *n;
@@ -177,6 +174,7 @@ static void crypto_remove_spawns(struct crypto_alg *alg,
crypto_remove_spawn(spawn, list);
}
}
+EXPORT_SYMBOL_GPL(crypto_remove_spawns);
static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg)
{
@@ -321,7 +319,7 @@ unlock:
}
EXPORT_SYMBOL_GPL(crypto_alg_tested);
-static void crypto_remove_final(struct list_head *list)
+void crypto_remove_final(struct list_head *list)
{
struct crypto_alg *alg;
struct crypto_alg *n;
@@ -331,6 +329,7 @@ static void crypto_remove_final(struct list_head *list)
crypto_alg_put(alg);
}
}
+EXPORT_SYMBOL_GPL(crypto_remove_final);
static void crypto_wait_for_test(struct crypto_larval *larval)
{
@@ -493,6 +492,7 @@ int crypto_register_instance(struct crypto_template *tmpl,
goto err;
inst->alg.cra_module = tmpl->module;
+ inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
down_write(&crypto_alg_sem);
diff --git a/crypto/async_tx/async_memcpy.c b/crypto/async_tx/async_memcpy.c
index 518c22bd9562..0d5a90ca6501 100644
--- a/crypto/async_tx/async_memcpy.c
+++ b/crypto/async_tx/async_memcpy.c
@@ -25,6 +25,7 @@
*/
#include <linux/kernel.h>
#include <linux/highmem.h>
+#include <linux/module.h>
#include <linux/mm.h>
#include <linux/dma-mapping.h>
#include <linux/async_tx.h>
diff --git a/crypto/async_tx/async_pq.c b/crypto/async_tx/async_pq.c
index fdd8257d35d9..91d5d385899e 100644
--- a/crypto/async_tx/async_pq.c
+++ b/crypto/async_tx/async_pq.c
@@ -21,6 +21,7 @@
*/
#include <linux/kernel.h>
#include <linux/interrupt.h>
+#include <linux/module.h>
#include <linux/dma-mapping.h>
#include <linux/raid/pq.h>
#include <linux/async_tx.h>
diff --git a/crypto/async_tx/async_raid6_recov.c b/crypto/async_tx/async_raid6_recov.c
index ce038d861eb9..a9f08a6a582e 100644
--- a/crypto/async_tx/async_raid6_recov.c
+++ b/crypto/async_tx/async_raid6_recov.c
@@ -22,6 +22,7 @@
*/
#include <linux/kernel.h>
#include <linux/interrupt.h>
+#include <linux/module.h>
#include <linux/dma-mapping.h>
#include <linux/raid/pq.h>
#include <linux/async_tx.h>
diff --git a/crypto/async_tx/async_tx.c b/crypto/async_tx/async_tx.c
index 7f2c00a45205..842120979374 100644
--- a/crypto/async_tx/async_tx.c
+++ b/crypto/async_tx/async_tx.c
@@ -24,6 +24,7 @@
*
*/
#include <linux/rculist.h>
+#include <linux/module.h>
#include <linux/kernel.h>
#include <linux/async_tx.h>
diff --git a/crypto/async_tx/async_xor.c b/crypto/async_tx/async_xor.c
index bc28337fded2..154cc84381c2 100644
--- a/crypto/async_tx/async_xor.c
+++ b/crypto/async_tx/async_xor.c
@@ -25,6 +25,7 @@
*/
#include <linux/kernel.h>
#include <linux/interrupt.h>
+#include <linux/module.h>
#include <linux/mm.h>
#include <linux/dma-mapping.h>
#include <linux/raid/xor.h>
diff --git a/crypto/async_tx/raid6test.c b/crypto/async_tx/raid6test.c
index c88ff9e3fd30..aa2b0270ed16 100644
--- a/crypto/async_tx/raid6test.c
+++ b/crypto/async_tx/raid6test.c
@@ -23,6 +23,7 @@
#include <linux/gfp.h>
#include <linux/mm.h>
#include <linux/random.h>
+#include <linux/module.h>
#undef pr
#define pr(fmt, args...) pr_info("raid6test: " fmt, ##args)
diff --git a/crypto/blkcipher.c b/crypto/blkcipher.c
index 7a7219266e3c..1e61d1a888b2 100644
--- a/crypto/blkcipher.c
+++ b/crypto/blkcipher.c
@@ -24,6 +24,8 @@
#include <linux/seq_file.h>
#include <linux/slab.h>
#include <linux/string.h>
+#include <linux/cryptouser.h>
+#include <net/netlink.h>
#include "internal.h"
@@ -492,6 +494,35 @@ static int crypto_init_blkcipher_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
return crypto_init_blkcipher_ops_async(tfm);
}
+#ifdef CONFIG_NET
+static int crypto_blkcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ struct crypto_report_blkcipher rblkcipher;
+
+ snprintf(rblkcipher.type, CRYPTO_MAX_ALG_NAME, "%s", "blkcipher");
+ snprintf(rblkcipher.geniv, CRYPTO_MAX_ALG_NAME, "%s",
+ alg->cra_blkcipher.geniv ?: "<default>");
+
+ rblkcipher.blocksize = alg->cra_blocksize;
+ rblkcipher.min_keysize = alg->cra_blkcipher.min_keysize;
+ rblkcipher.max_keysize = alg->cra_blkcipher.max_keysize;
+ rblkcipher.ivsize = alg->cra_blkcipher.ivsize;
+
+ NLA_PUT(skb, CRYPTOCFGA_REPORT_BLKCIPHER,
+ sizeof(struct crypto_report_blkcipher), &rblkcipher);
+
+ return 0;
+
+nla_put_failure:
+ return -EMSGSIZE;
+}
+#else
+static int crypto_blkcipher_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ return -ENOSYS;
+}
+#endif
+
static void crypto_blkcipher_show(struct seq_file *m, struct crypto_alg *alg)
__attribute__ ((unused));
static void crypto_blkcipher_show(struct seq_file *m, struct crypto_alg *alg)
@@ -511,6 +542,7 @@ const struct crypto_type crypto_blkcipher_type = {
#ifdef CONFIG_PROC_FS
.show = crypto_blkcipher_show,
#endif
+ .report = crypto_blkcipher_report,
};
EXPORT_SYMBOL_GPL(crypto_blkcipher_type);
diff --git a/crypto/blowfish.c b/crypto/blowfish_common.c
index a67d52ee0580..f636aab0209f 100644
--- a/crypto/blowfish.c
+++ b/crypto/blowfish_common.c
@@ -1,6 +1,9 @@
/*
* Cryptographic API.
*
+ * Common Blowfish algorithm parts shared between the c and assembler
+ * implementations.
+ *
* Blowfish Cipher Algorithm, by Bruce Schneier.
* http://www.counterpane.com/blowfish.html
*
@@ -22,15 +25,7 @@
#include <asm/byteorder.h>
#include <linux/crypto.h>
#include <linux/types.h>
-
-#define BF_BLOCK_SIZE 8
-#define BF_MIN_KEY_SIZE 4
-#define BF_MAX_KEY_SIZE 56
-
-struct bf_ctx {
- u32 p[18];
- u32 s[1024];
-};
+#include <crypto/blowfish.h>
static const u32 bf_pbox[16 + 2] = {
0x243f6a88, 0x85a308d3, 0x13198a2e, 0x03707344,
@@ -309,9 +304,9 @@ static const u32 bf_sbox[256 * 4] = {
#define GET32_0(x) (((x) >> (24)) & (0xff))
#define bf_F(x) (((S[GET32_0(x)] + S[256 + GET32_1(x)]) ^ \
- S[512 + GET32_2(x)]) + S[768 + GET32_3(x)])
+ S[512 + GET32_2(x)]) + S[768 + GET32_3(x)])
-#define ROUND(a, b, n) b ^= P[n]; a ^= bf_F (b)
+#define ROUND(a, b, n) ({ b ^= P[n]; a ^= bf_F(b); })
/*
* The blowfish encipher, processes 64-bit blocks.
@@ -348,57 +343,10 @@ static void encrypt_block(struct bf_ctx *bctx, u32 *dst, u32 *src)
dst[1] = yl;
}
-static void bf_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
-{
- const __be32 *in_blk = (const __be32 *)src;
- __be32 *const out_blk = (__be32 *)dst;
- u32 in32[2], out32[2];
-
- in32[0] = be32_to_cpu(in_blk[0]);
- in32[1] = be32_to_cpu(in_blk[1]);
- encrypt_block(crypto_tfm_ctx(tfm), out32, in32);
- out_blk[0] = cpu_to_be32(out32[0]);
- out_blk[1] = cpu_to_be32(out32[1]);
-}
-
-static void bf_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
-{
- struct bf_ctx *ctx = crypto_tfm_ctx(tfm);
- const __be32 *in_blk = (const __be32 *)src;
- __be32 *const out_blk = (__be32 *)dst;
- const u32 *P = ctx->p;
- const u32 *S = ctx->s;
- u32 yl = be32_to_cpu(in_blk[0]);
- u32 yr = be32_to_cpu(in_blk[1]);
-
- ROUND(yr, yl, 17);
- ROUND(yl, yr, 16);
- ROUND(yr, yl, 15);
- ROUND(yl, yr, 14);
- ROUND(yr, yl, 13);
- ROUND(yl, yr, 12);
- ROUND(yr, yl, 11);
- ROUND(yl, yr, 10);
- ROUND(yr, yl, 9);
- ROUND(yl, yr, 8);
- ROUND(yr, yl, 7);
- ROUND(yl, yr, 6);
- ROUND(yr, yl, 5);
- ROUND(yl, yr, 4);
- ROUND(yr, yl, 3);
- ROUND(yl, yr, 2);
-
- yl ^= P[1];
- yr ^= P[0];
-
- out_blk[0] = cpu_to_be32(yr);
- out_blk[1] = cpu_to_be32(yl);
-}
-
/*
* Calculates the blowfish S and P boxes for encryption and decryption.
*/
-static int bf_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen)
+int blowfish_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen)
{
struct bf_ctx *ctx = crypto_tfm_ctx(tfm);
u32 *P = ctx->p;
@@ -448,35 +396,7 @@ static int bf_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen)
/* Bruce says not to bother with the weak key check. */
return 0;
}
-
-static struct crypto_alg alg = {
- .cra_name = "blowfish",
- .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
- .cra_blocksize = BF_BLOCK_SIZE,
- .cra_ctxsize = sizeof(struct bf_ctx),
- .cra_alignmask = 3,
- .cra_module = THIS_MODULE,
- .cra_list = LIST_HEAD_INIT(alg.cra_list),
- .cra_u = { .cipher = {
- .cia_min_keysize = BF_MIN_KEY_SIZE,
- .cia_max_keysize = BF_MAX_KEY_SIZE,
- .cia_setkey = bf_setkey,
- .cia_encrypt = bf_encrypt,
- .cia_decrypt = bf_decrypt } }
-};
-
-static int __init blowfish_mod_init(void)
-{
- return crypto_register_alg(&alg);
-}
-
-static void __exit blowfish_mod_fini(void)
-{
- crypto_unregister_alg(&alg);
-}
-
-module_init(blowfish_mod_init);
-module_exit(blowfish_mod_fini);
+EXPORT_SYMBOL_GPL(blowfish_setkey);
MODULE_LICENSE("GPL");
-MODULE_DESCRIPTION("Blowfish Cipher Algorithm");
+MODULE_DESCRIPTION("Blowfish Cipher common functions");
diff --git a/crypto/blowfish_generic.c b/crypto/blowfish_generic.c
new file mode 100644
index 000000000000..6f269b5cfa3b
--- /dev/null
+++ b/crypto/blowfish_generic.c
@@ -0,0 +1,142 @@
+/*
+ * Cryptographic API.
+ *
+ * Blowfish Cipher Algorithm, by Bruce Schneier.
+ * http://www.counterpane.com/blowfish.html
+ *
+ * Adapted from Kerneli implementation.
+ *
+ * Copyright (c) Herbert Valerio Riedel <hvr@hvrlab.org>
+ * Copyright (c) Kyle McMartin <kyle@debian.org>
+ * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ */
+#include <linux/init.h>
+#include <linux/module.h>
+#include <linux/mm.h>
+#include <asm/byteorder.h>
+#include <linux/crypto.h>
+#include <linux/types.h>
+#include <crypto/blowfish.h>
+
+/*
+ * Round loop unrolling macros, S is a pointer to a S-Box array
+ * organized in 4 unsigned longs at a row.
+ */
+#define GET32_3(x) (((x) & 0xff))
+#define GET32_2(x) (((x) >> (8)) & (0xff))
+#define GET32_1(x) (((x) >> (16)) & (0xff))
+#define GET32_0(x) (((x) >> (24)) & (0xff))
+
+#define bf_F(x) (((S[GET32_0(x)] + S[256 + GET32_1(x)]) ^ \
+ S[512 + GET32_2(x)]) + S[768 + GET32_3(x)])
+
+#define ROUND(a, b, n) ({ b ^= P[n]; a ^= bf_F(b); })
+
+static void bf_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
+{
+ struct bf_ctx *ctx = crypto_tfm_ctx(tfm);
+ const __be32 *in_blk = (const __be32 *)src;
+ __be32 *const out_blk = (__be32 *)dst;
+ const u32 *P = ctx->p;
+ const u32 *S = ctx->s;
+ u32 yl = be32_to_cpu(in_blk[0]);
+ u32 yr = be32_to_cpu(in_blk[1]);
+
+ ROUND(yr, yl, 0);
+ ROUND(yl, yr, 1);
+ ROUND(yr, yl, 2);
+ ROUND(yl, yr, 3);
+ ROUND(yr, yl, 4);
+ ROUND(yl, yr, 5);
+ ROUND(yr, yl, 6);
+ ROUND(yl, yr, 7);
+ ROUND(yr, yl, 8);
+ ROUND(yl, yr, 9);
+ ROUND(yr, yl, 10);
+ ROUND(yl, yr, 11);
+ ROUND(yr, yl, 12);
+ ROUND(yl, yr, 13);
+ ROUND(yr, yl, 14);
+ ROUND(yl, yr, 15);
+
+ yl ^= P[16];
+ yr ^= P[17];
+
+ out_blk[0] = cpu_to_be32(yr);
+ out_blk[1] = cpu_to_be32(yl);
+}
+
+static void bf_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
+{
+ struct bf_ctx *ctx = crypto_tfm_ctx(tfm);
+ const __be32 *in_blk = (const __be32 *)src;
+ __be32 *const out_blk = (__be32 *)dst;
+ const u32 *P = ctx->p;
+ const u32 *S = ctx->s;
+ u32 yl = be32_to_cpu(in_blk[0]);
+ u32 yr = be32_to_cpu(in_blk[1]);
+
+ ROUND(yr, yl, 17);
+ ROUND(yl, yr, 16);
+ ROUND(yr, yl, 15);
+ ROUND(yl, yr, 14);
+ ROUND(yr, yl, 13);
+ ROUND(yl, yr, 12);
+ ROUND(yr, yl, 11);
+ ROUND(yl, yr, 10);
+ ROUND(yr, yl, 9);
+ ROUND(yl, yr, 8);
+ ROUND(yr, yl, 7);
+ ROUND(yl, yr, 6);
+ ROUND(yr, yl, 5);
+ ROUND(yl, yr, 4);
+ ROUND(yr, yl, 3);
+ ROUND(yl, yr, 2);
+
+ yl ^= P[1];
+ yr ^= P[0];
+
+ out_blk[0] = cpu_to_be32(yr);
+ out_blk[1] = cpu_to_be32(yl);
+}
+
+static struct crypto_alg alg = {
+ .cra_name = "blowfish",
+ .cra_driver_name = "blowfish-generic",
+ .cra_priority = 100,
+ .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
+ .cra_blocksize = BF_BLOCK_SIZE,
+ .cra_ctxsize = sizeof(struct bf_ctx),
+ .cra_alignmask = 3,
+ .cra_module = THIS_MODULE,
+ .cra_list = LIST_HEAD_INIT(alg.cra_list),
+ .cra_u = { .cipher = {
+ .cia_min_keysize = BF_MIN_KEY_SIZE,
+ .cia_max_keysize = BF_MAX_KEY_SIZE,
+ .cia_setkey = blowfish_setkey,
+ .cia_encrypt = bf_encrypt,
+ .cia_decrypt = bf_decrypt } }
+};
+
+static int __init blowfish_mod_init(void)
+{
+ return crypto_register_alg(&alg);
+}
+
+static void __exit blowfish_mod_fini(void)
+{
+ crypto_unregister_alg(&alg);
+}
+
+module_init(blowfish_mod_init);
+module_exit(blowfish_mod_fini);
+
+MODULE_LICENSE("GPL");
+MODULE_DESCRIPTION("Blowfish Cipher Algorithm");
+MODULE_ALIAS("blowfish");
diff --git a/crypto/cryptd.c b/crypto/cryptd.c
index e46d21ae26bc..671d4d6d14df 100644
--- a/crypto/cryptd.c
+++ b/crypto/cryptd.c
@@ -945,7 +945,7 @@ static void __exit cryptd_exit(void)
crypto_unregister_template(&cryptd_tmpl);
}
-module_init(cryptd_init);
+subsys_initcall(cryptd_init);
module_exit(cryptd_exit);
MODULE_LICENSE("GPL");
diff --git a/crypto/crypto_user.c b/crypto/crypto_user.c
new file mode 100644
index 000000000000..0605a2bbba75
--- /dev/null
+++ b/crypto/crypto_user.c
@@ -0,0 +1,435 @@
+/*
+ * Crypto user configuration API.
+ *
+ * Copyright (C) 2011 secunet Security Networks AG
+ * Copyright (C) 2011 Steffen Klassert <steffen.klassert@secunet.com>
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms and conditions of the GNU General Public License,
+ * version 2, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
+ * more details.
+ *
+ * You should have received a copy of the GNU General Public License along with
+ * this program; if not, write to the Free Software Foundation, Inc.,
+ * 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+ */
+
+#include <linux/module.h>
+#include <linux/crypto.h>
+#include <linux/cryptouser.h>
+#include <net/netlink.h>
+#include <linux/security.h>
+#include <net/net_namespace.h>
+#include "internal.h"
+
+DEFINE_MUTEX(crypto_cfg_mutex);
+
+/* The crypto netlink socket */
+static struct sock *crypto_nlsk;
+
+struct crypto_dump_info {
+ struct sk_buff *in_skb;
+ struct sk_buff *out_skb;
+ u32 nlmsg_seq;
+ u16 nlmsg_flags;
+};
+
+static struct crypto_alg *crypto_alg_match(struct crypto_user_alg *p, int exact)
+{
+ struct crypto_alg *q, *alg = NULL;
+
+ down_read(&crypto_alg_sem);
+
+ list_for_each_entry(q, &crypto_alg_list, cra_list) {
+ int match = 0;
+
+ if ((q->cra_flags ^ p->cru_type) & p->cru_mask)
+ continue;
+
+ if (strlen(p->cru_driver_name))
+ match = !strcmp(q->cra_driver_name,
+ p->cru_driver_name);
+ else if (!exact)
+ match = !strcmp(q->cra_name, p->cru_name);
+
+ if (match) {
+ alg = q;
+ break;
+ }
+ }
+
+ up_read(&crypto_alg_sem);
+
+ return alg;
+}
+
+static int crypto_report_cipher(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ struct crypto_report_cipher rcipher;
+
+ snprintf(rcipher.type, CRYPTO_MAX_ALG_NAME, "%s", "cipher");
+
+ rcipher.blocksize = alg->cra_blocksize;
+ rcipher.min_keysize = alg->cra_cipher.cia_min_keysize;
+ rcipher.max_keysize = alg->cra_cipher.cia_max_keysize;
+
+ NLA_PUT(skb, CRYPTOCFGA_REPORT_CIPHER,
+ sizeof(struct crypto_report_cipher), &rcipher);
+
+ return 0;
+
+nla_put_failure:
+ return -EMSGSIZE;
+}
+
+static int crypto_report_comp(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ struct crypto_report_comp rcomp;
+
+ snprintf(rcomp.type, CRYPTO_MAX_ALG_NAME, "%s", "compression");
+
+ NLA_PUT(skb, CRYPTOCFGA_REPORT_COMPRESS,
+ sizeof(struct crypto_report_comp), &rcomp);
+
+ return 0;
+
+nla_put_failure:
+ return -EMSGSIZE;
+}
+
+static int crypto_report_one(struct crypto_alg *alg,
+ struct crypto_user_alg *ualg, struct sk_buff *skb)
+{
+ memcpy(&ualg->cru_name, &alg->cra_name, sizeof(ualg->cru_name));
+ memcpy(&ualg->cru_driver_name, &alg->cra_driver_name,
+ sizeof(ualg->cru_driver_name));
+ memcpy(&ualg->cru_module_name, module_name(alg->cra_module),
+ CRYPTO_MAX_ALG_NAME);
+
+ ualg->cru_flags = alg->cra_flags;
+ ualg->cru_refcnt = atomic_read(&alg->cra_refcnt);
+
+ NLA_PUT_U32(skb, CRYPTOCFGA_PRIORITY_VAL, alg->cra_priority);
+
+ if (alg->cra_flags & CRYPTO_ALG_LARVAL) {
+ struct crypto_report_larval rl;
+
+ snprintf(rl.type, CRYPTO_MAX_ALG_NAME, "%s", "larval");
+
+ NLA_PUT(skb, CRYPTOCFGA_REPORT_LARVAL,
+ sizeof(struct crypto_report_larval), &rl);
+
+ goto out;
+ }
+
+ if (alg->cra_type && alg->cra_type->report) {
+ if (alg->cra_type->report(skb, alg))
+ goto nla_put_failure;
+
+ goto out;
+ }
+
+ switch (alg->cra_flags & (CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_LARVAL)) {
+ case CRYPTO_ALG_TYPE_CIPHER:
+ if (crypto_report_cipher(skb, alg))
+ goto nla_put_failure;
+
+ break;
+ case CRYPTO_ALG_TYPE_COMPRESS:
+ if (crypto_report_comp(skb, alg))
+ goto nla_put_failure;
+
+ break;
+ }
+
+out:
+ return 0;
+
+nla_put_failure:
+ return -EMSGSIZE;
+}
+
+static int crypto_report_alg(struct crypto_alg *alg,
+ struct crypto_dump_info *info)
+{
+ struct sk_buff *in_skb = info->in_skb;
+ struct sk_buff *skb = info->out_skb;
+ struct nlmsghdr *nlh;
+ struct crypto_user_alg *ualg;
+ int err = 0;
+
+ nlh = nlmsg_put(skb, NETLINK_CB(in_skb).pid, info->nlmsg_seq,
+ CRYPTO_MSG_GETALG, sizeof(*ualg), info->nlmsg_flags);
+ if (!nlh) {
+ err = -EMSGSIZE;
+ goto out;
+ }
+
+ ualg = nlmsg_data(nlh);
+
+ err = crypto_report_one(alg, ualg, skb);
+ if (err) {
+ nlmsg_cancel(skb, nlh);
+ goto out;
+ }
+
+ nlmsg_end(skb, nlh);
+
+out:
+ return err;
+}
+
+static int crypto_report(struct sk_buff *in_skb, struct nlmsghdr *in_nlh,
+ struct nlattr **attrs)
+{
+ struct crypto_user_alg *p = nlmsg_data(in_nlh);
+ struct crypto_alg *alg;
+ struct sk_buff *skb;
+ struct crypto_dump_info info;
+ int err;
+
+ if (!p->cru_driver_name)
+ return -EINVAL;
+
+ alg = crypto_alg_match(p, 1);
+ if (!alg)
+ return -ENOENT;
+
+ skb = nlmsg_new(NLMSG_DEFAULT_SIZE, GFP_ATOMIC);
+ if (!skb)
+ return -ENOMEM;
+
+ info.in_skb = in_skb;
+ info.out_skb = skb;
+ info.nlmsg_seq = in_nlh->nlmsg_seq;
+ info.nlmsg_flags = 0;
+
+ err = crypto_report_alg(alg, &info);
+ if (err)
+ return err;
+
+ return nlmsg_unicast(crypto_nlsk, skb, NETLINK_CB(in_skb).pid);
+}
+
+static int crypto_dump_report(struct sk_buff *skb, struct netlink_callback *cb)
+{
+ struct crypto_alg *alg;
+ struct crypto_dump_info info;
+ int err;
+
+ if (cb->args[0])
+ goto out;
+
+ cb->args[0] = 1;
+
+ info.in_skb = cb->skb;
+ info.out_skb = skb;
+ info.nlmsg_seq = cb->nlh->nlmsg_seq;
+ info.nlmsg_flags = NLM_F_MULTI;
+
+ list_for_each_entry(alg, &crypto_alg_list, cra_list) {
+ err = crypto_report_alg(alg, &info);
+ if (err)
+ goto out_err;
+ }
+
+out:
+ return skb->len;
+out_err:
+ return err;
+}
+
+static int crypto_dump_report_done(struct netlink_callback *cb)
+{
+ return 0;
+}
+
+static int crypto_update_alg(struct sk_buff *skb, struct nlmsghdr *nlh,
+ struct nlattr **attrs)
+{
+ struct crypto_alg *alg;
+ struct crypto_user_alg *p = nlmsg_data(nlh);
+ struct nlattr *priority = attrs[CRYPTOCFGA_PRIORITY_VAL];
+ LIST_HEAD(list);
+
+ if (priority && !strlen(p->cru_driver_name))
+ return -EINVAL;
+
+ alg = crypto_alg_match(p, 1);
+ if (!alg)
+ return -ENOENT;
+
+ down_write(&crypto_alg_sem);
+
+ crypto_remove_spawns(alg, &list, NULL);
+
+ if (priority)
+ alg->cra_priority = nla_get_u32(priority);
+
+ up_write(&crypto_alg_sem);
+
+ crypto_remove_final(&list);
+
+ return 0;
+}
+
+static int crypto_del_alg(struct sk_buff *skb, struct nlmsghdr *nlh,
+ struct nlattr **attrs)
+{
+ struct crypto_alg *alg;
+ struct crypto_user_alg *p = nlmsg_data(nlh);
+
+ alg = crypto_alg_match(p, 1);
+ if (!alg)
+ return -ENOENT;
+
+ /* We can not unregister core algorithms such as aes-generic.
+ * We would loose the reference in the crypto_alg_list to this algorithm
+ * if we try to unregister. Unregistering such an algorithm without
+ * removing the module is not possible, so we restrict to crypto
+ * instances that are build from templates. */
+ if (!(alg->cra_flags & CRYPTO_ALG_INSTANCE))
+ return -EINVAL;
+
+ if (atomic_read(&alg->cra_refcnt) != 1)
+ return -EBUSY;
+
+ return crypto_unregister_alg(alg);
+}
+
+static int crypto_add_alg(struct sk_buff *skb, struct nlmsghdr *nlh,
+ struct nlattr **attrs)
+{
+ int exact;
+ const char *name;
+ struct crypto_alg *alg;
+ struct crypto_user_alg *p = nlmsg_data(nlh);
+ struct nlattr *priority = attrs[CRYPTOCFGA_PRIORITY_VAL];
+
+ if (strlen(p->cru_driver_name))
+ exact = 1;
+
+ if (priority && !exact)
+ return -EINVAL;
+
+ alg = crypto_alg_match(p, exact);
+ if (alg)
+ return -EEXIST;
+
+ if (strlen(p->cru_driver_name))
+ name = p->cru_driver_name;
+ else
+ name = p->cru_name;
+
+ alg = crypto_alg_mod_lookup(name, p->cru_type, p->cru_mask);
+ if (IS_ERR(alg))
+ return PTR_ERR(alg);
+
+ down_write(&crypto_alg_sem);
+
+ if (priority)
+ alg->cra_priority = nla_get_u32(priority);
+
+ up_write(&crypto_alg_sem);
+
+ crypto_mod_put(alg);
+
+ return 0;
+}
+
+#define MSGSIZE(type) sizeof(struct type)
+
+static const int crypto_msg_min[CRYPTO_NR_MSGTYPES] = {
+ [CRYPTO_MSG_NEWALG - CRYPTO_MSG_BASE] = MSGSIZE(crypto_user_alg),
+ [CRYPTO_MSG_DELALG - CRYPTO_MSG_BASE] = MSGSIZE(crypto_user_alg),
+ [CRYPTO_MSG_UPDATEALG - CRYPTO_MSG_BASE] = MSGSIZE(crypto_user_alg),
+ [CRYPTO_MSG_GETALG - CRYPTO_MSG_BASE] = MSGSIZE(crypto_user_alg),
+};
+
+static const struct nla_policy crypto_policy[CRYPTOCFGA_MAX+1] = {
+ [CRYPTOCFGA_PRIORITY_VAL] = { .type = NLA_U32},
+};
+
+#undef MSGSIZE
+
+static struct crypto_link {
+ int (*doit)(struct sk_buff *, struct nlmsghdr *, struct nlattr **);
+ int (*dump)(struct sk_buff *, struct netlink_callback *);
+ int (*done)(struct netlink_callback *);
+} crypto_dispatch[CRYPTO_NR_MSGTYPES] = {
+ [CRYPTO_MSG_NEWALG - CRYPTO_MSG_BASE] = { .doit = crypto_add_alg},
+ [CRYPTO_MSG_DELALG - CRYPTO_MSG_BASE] = { .doit = crypto_del_alg},
+ [CRYPTO_MSG_UPDATEALG - CRYPTO_MSG_BASE] = { .doit = crypto_update_alg},
+ [CRYPTO_MSG_GETALG - CRYPTO_MSG_BASE] = { .doit = crypto_report,
+ .dump = crypto_dump_report,
+ .done = crypto_dump_report_done},
+};
+
+static int crypto_user_rcv_msg(struct sk_buff *skb, struct nlmsghdr *nlh)
+{
+ struct nlattr *attrs[CRYPTOCFGA_MAX+1];
+ struct crypto_link *link;
+ int type, err;
+
+ type = nlh->nlmsg_type;
+ if (type > CRYPTO_MSG_MAX)
+ return -EINVAL;
+
+ type -= CRYPTO_MSG_BASE;
+ link = &crypto_dispatch[type];
+
+ if (security_netlink_recv(skb, CAP_NET_ADMIN))
+ return -EPERM;
+
+ if ((type == (CRYPTO_MSG_GETALG - CRYPTO_MSG_BASE) &&
+ (nlh->nlmsg_flags & NLM_F_DUMP))) {
+ if (link->dump == NULL)
+ return -EINVAL;
+
+ return netlink_dump_start(crypto_nlsk, skb, nlh,
+ link->dump, link->done, 0);
+ }
+
+ err = nlmsg_parse(nlh, crypto_msg_min[type], attrs, CRYPTOCFGA_MAX,
+ crypto_policy);
+ if (err < 0)
+ return err;
+
+ if (link->doit == NULL)
+ return -EINVAL;
+
+ return link->doit(skb, nlh, attrs);
+}
+
+static void crypto_netlink_rcv(struct sk_buff *skb)
+{
+ mutex_lock(&crypto_cfg_mutex);
+ netlink_rcv_skb(skb, &crypto_user_rcv_msg);
+ mutex_unlock(&crypto_cfg_mutex);
+}
+
+static int __init crypto_user_init(void)
+{
+ crypto_nlsk = netlink_kernel_create(&init_net, NETLINK_CRYPTO,
+ 0, crypto_netlink_rcv,
+ NULL, THIS_MODULE);
+ if (!crypto_nlsk)
+ return -ENOMEM;
+
+ return 0;
+}
+
+static void __exit crypto_user_exit(void)
+{
+ netlink_kernel_release(crypto_nlsk);
+}
+
+module_init(crypto_user_init);
+module_exit(crypto_user_exit);
+MODULE_LICENSE("GPL");
+MODULE_AUTHOR("Steffen Klassert <steffen.klassert@secunet.com>");
+MODULE_DESCRIPTION("Crypto userspace configuration API");
diff --git a/crypto/crypto_wq.c b/crypto/crypto_wq.c
index b980ee1af459..adad92a44ba2 100644
--- a/crypto/crypto_wq.c
+++ b/crypto/crypto_wq.c
@@ -12,6 +12,7 @@
*/
#include <linux/workqueue.h>
+#include <linux/module.h>
#include <crypto/algapi.h>
#include <crypto/crypto_wq.h>
diff --git a/crypto/internal.h b/crypto/internal.h
index d4384b08ab29..b865ca1a8613 100644
--- a/crypto/internal.h
+++ b/crypto/internal.h
@@ -86,6 +86,9 @@ struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask);
void crypto_larval_error(const char *name, u32 type, u32 mask);
void crypto_alg_tested(const char *name, int err);
+void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
+ struct crypto_alg *nalg);
+void crypto_remove_final(struct list_head *list);
void crypto_shoot_alg(struct crypto_alg *alg);
struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
u32 mask);
diff --git a/crypto/md4.c b/crypto/md4.c
index 7fca1f59a4f5..0477a6a01d58 100644
--- a/crypto/md4.c
+++ b/crypto/md4.c
@@ -23,6 +23,7 @@
#include <crypto/internal/hash.h>
#include <linux/init.h>
#include <linux/kernel.h>
+#include <linux/module.h>
#include <linux/string.h>
#include <linux/types.h>
#include <asm/byteorder.h>
diff --git a/crypto/pcompress.c b/crypto/pcompress.c
index f7c4a7d7412e..2e458e5482d0 100644
--- a/crypto/pcompress.c
+++ b/crypto/pcompress.c
@@ -24,6 +24,8 @@
#include <linux/module.h>
#include <linux/seq_file.h>
#include <linux/string.h>
+#include <linux/cryptouser.h>
+#include <net/netlink.h>
#include <crypto/compress.h>
#include <crypto/internal/compress.h>
@@ -46,6 +48,28 @@ static int crypto_pcomp_init_tfm(struct crypto_tfm *tfm)
return 0;
}
+#ifdef CONFIG_NET
+static int crypto_pcomp_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ struct crypto_report_comp rpcomp;
+
+ snprintf(rpcomp.type, CRYPTO_MAX_ALG_NAME, "%s", "pcomp");
+
+ NLA_PUT(skb, CRYPTOCFGA_REPORT_COMPRESS,
+ sizeof(struct crypto_report_comp), &rpcomp);
+
+ return 0;
+
+nla_put_failure:
+ return -EMSGSIZE;
+}
+#else
+static int crypto_pcomp_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ return -ENOSYS;
+}
+#endif
+
static void crypto_pcomp_show(struct seq_file *m, struct crypto_alg *alg)
__attribute__ ((unused));
static void crypto_pcomp_show(struct seq_file *m, struct crypto_alg *alg)
@@ -60,6 +84,7 @@ static const struct crypto_type crypto_pcomp_type = {
#ifdef CONFIG_PROC_FS
.show = crypto_pcomp_show,
#endif
+ .report = crypto_pcomp_report,
.maskclear = ~CRYPTO_ALG_TYPE_MASK,
.maskset = CRYPTO_ALG_TYPE_MASK,
.type = CRYPTO_ALG_TYPE_PCOMPRESS,
diff --git a/crypto/proc.c b/crypto/proc.c
index 3808697814d7..4a0a7aad2204 100644
--- a/crypto/proc.c
+++ b/crypto/proc.c
@@ -16,6 +16,7 @@
#include <linux/atomic.h>
#include <linux/init.h>
#include <linux/crypto.h>
+#include <linux/module.h> /* for module_name() */
#include <linux/rwsem.h>
#include <linux/proc_fs.h>
#include <linux/seq_file.h>
diff --git a/crypto/rng.c b/crypto/rng.c
index 45229ae782be..64f864fa8043 100644
--- a/crypto/rng.c
+++ b/crypto/rng.c
@@ -21,6 +21,8 @@
#include <linux/seq_file.h>
#include <linux/slab.h>
#include <linux/string.h>
+#include <linux/cryptouser.h>
+#include <net/netlink.h>
static DEFINE_MUTEX(crypto_default_rng_lock);
struct crypto_rng *crypto_default_rng;
@@ -58,6 +60,30 @@ static int crypto_init_rng_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
return 0;
}
+#ifdef CONFIG_NET
+static int crypto_rng_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ struct crypto_report_rng rrng;
+
+ snprintf(rrng.type, CRYPTO_MAX_ALG_NAME, "%s", "rng");
+
+ rrng.seedsize = alg->cra_rng.seedsize;
+
+ NLA_PUT(skb, CRYPTOCFGA_REPORT_RNG,
+ sizeof(struct crypto_report_rng), &rrng);
+
+ return 0;
+
+nla_put_failure:
+ return -EMSGSIZE;
+}
+#else
+static int crypto_rng_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ return -ENOSYS;
+}
+#endif
+
static void crypto_rng_show(struct seq_file *m, struct crypto_alg *alg)
__attribute__ ((unused));
static void crypto_rng_show(struct seq_file *m, struct crypto_alg *alg)
@@ -78,6 +104,7 @@ const struct crypto_type crypto_rng_type = {
#ifdef CONFIG_PROC_FS
.show = crypto_rng_show,
#endif
+ .report = crypto_rng_report,
};
EXPORT_SYMBOL_GPL(crypto_rng_type);
diff --git a/crypto/sha1_generic.c b/crypto/sha1_generic.c
index 00ae60eb9254..42794803c480 100644
--- a/crypto/sha1_generic.c
+++ b/crypto/sha1_generic.c
@@ -36,7 +36,7 @@ static int sha1_init(struct shash_desc *desc)
return 0;
}
-static int sha1_update(struct shash_desc *desc, const u8 *data,
+int crypto_sha1_update(struct shash_desc *desc, const u8 *data,
unsigned int len)
{
struct sha1_state *sctx = shash_desc_ctx(desc);
@@ -71,6 +71,7 @@ static int sha1_update(struct shash_desc *desc, const u8 *data,
return 0;
}
+EXPORT_SYMBOL(crypto_sha1_update);
/* Add padding and return the message digest. */
@@ -87,10 +88,10 @@ static int sha1_final(struct shash_desc *desc, u8 *out)
/* Pad out to 56 mod 64 */
index = sctx->count & 0x3f;
padlen = (index < 56) ? (56 - index) : ((64+56) - index);
- sha1_update(desc, padding, padlen);
+ crypto_sha1_update(desc, padding, padlen);
/* Append length */
- sha1_update(desc, (const u8 *)&bits, sizeof(bits));
+ crypto_sha1_update(desc, (const u8 *)&bits, sizeof(bits));
/* Store state in digest */
for (i = 0; i < 5; i++)
@@ -121,7 +122,7 @@ static int sha1_import(struct shash_desc *desc, const void *in)
static struct shash_alg alg = {
.digestsize = SHA1_DIGEST_SIZE,
.init = sha1_init,
- .update = sha1_update,
+ .update = crypto_sha1_update,
.final = sha1_final,
.export = sha1_export,
.import = sha1_import,
diff --git a/crypto/shash.c b/crypto/shash.c
index 76f74b963151..9100912716ae 100644
--- a/crypto/shash.c
+++ b/crypto/shash.c
@@ -17,6 +17,8 @@
#include <linux/module.h>
#include <linux/slab.h>
#include <linux/seq_file.h>
+#include <linux/cryptouser.h>
+#include <net/netlink.h>
#include "internal.h"
@@ -522,6 +524,31 @@ static unsigned int crypto_shash_extsize(struct crypto_alg *alg)
return alg->cra_ctxsize;
}
+#ifdef CONFIG_NET
+static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ struct crypto_report_hash rhash;
+ struct shash_alg *salg = __crypto_shash_alg(alg);
+
+ snprintf(rhash.type, CRYPTO_MAX_ALG_NAME, "%s", "shash");
+ rhash.blocksize = alg->cra_blocksize;
+ rhash.digestsize = salg->digestsize;
+
+ NLA_PUT(skb, CRYPTOCFGA_REPORT_HASH,
+ sizeof(struct crypto_report_hash), &rhash);
+
+ return 0;
+
+nla_put_failure:
+ return -EMSGSIZE;
+}
+#else
+static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
+{
+ return -ENOSYS;
+}
+#endif
+
static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
__attribute__ ((unused));
static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
@@ -541,6 +568,7 @@ static const struct crypto_type crypto_shash_type = {
#ifdef CONFIG_PROC_FS
.show = crypto_shash_show,
#endif
+ .report = crypto_shash_report,
.maskclear = ~CRYPTO_ALG_TYPE_MASK,
.maskset = CRYPTO_ALG_TYPE_MASK,
.type = CRYPTO_ALG_TYPE_SHASH,
diff --git a/crypto/tcrypt.c b/crypto/tcrypt.c
index 2222617b3bed..0c4e80f34651 100644
--- a/crypto/tcrypt.c
+++ b/crypto/tcrypt.c
@@ -782,11 +782,13 @@ static int do_test(int m)
case 7:
ret += tcrypt_test("ecb(blowfish)");
ret += tcrypt_test("cbc(blowfish)");
+ ret += tcrypt_test("ctr(blowfish)");
break;
case 8:
ret += tcrypt_test("ecb(twofish)");
ret += tcrypt_test("cbc(twofish)");
+ ret += tcrypt_test("ctr(twofish)");
break;
case 9:
@@ -1039,6 +1041,10 @@ static int do_test(int m)
speed_template_16_24_32);
test_cipher_speed("cbc(twofish)", DECRYPT, sec, NULL, 0,
speed_template_16_24_32);
+ test_cipher_speed("ctr(twofish)", ENCRYPT, sec, NULL, 0,
+ speed_template_16_24_32);
+ test_cipher_speed("ctr(twofish)", DECRYPT, sec, NULL, 0,
+ speed_template_16_24_32);
break;
case 203:
@@ -1050,6 +1056,10 @@ static int do_test(int m)
speed_template_8_32);
test_cipher_speed("cbc(blowfish)", DECRYPT, sec, NULL, 0,
speed_template_8_32);
+ test_cipher_speed("ctr(blowfish)", ENCRYPT, sec, NULL, 0,
+ speed_template_8_32);
+ test_cipher_speed("ctr(blowfish)", DECRYPT, sec, NULL, 0,
+ speed_template_8_32);
break;
case 204:
diff --git a/crypto/testmgr.c b/crypto/testmgr.c
index b6b93d416351..e91c1eb1722a 100644
--- a/crypto/testmgr.c
+++ b/crypto/testmgr.c
@@ -1756,6 +1756,36 @@ static const struct alg_test_desc alg_test_descs[] = {
}
}
}, {
+ .alg = "ctr(blowfish)",
+ .test = alg_test_skcipher,
+ .suite = {
+ .cipher = {
+ .enc = {
+ .vecs = bf_ctr_enc_tv_template,
+ .count = BF_CTR_ENC_TEST_VECTORS
+ },
+ .dec = {
+ .vecs = bf_ctr_dec_tv_template,
+ .count = BF_CTR_DEC_TEST_VECTORS
+ }
+ }
+ }
+ }, {
+ .alg = "ctr(twofish)",
+ .test = alg_test_skcipher,
+ .suite = {
+ .cipher = {
+ .enc = {
+ .vecs = tf_ctr_enc_tv_template,
+ .count = TF_CTR_ENC_TEST_VECTORS
+ },
+ .dec = {
+ .vecs = tf_ctr_dec_tv_template,
+ .count = TF_CTR_DEC_TEST_VECTORS
+ }
+ }
+ }
+ }, {
.alg = "cts(cbc(aes))",
.test = alg_test_skcipher,
.suite = {
diff --git a/crypto/testmgr.h b/crypto/testmgr.h
index 27adc92842ba..37b4d8f45447 100644
--- a/crypto/testmgr.h
+++ b/crypto/testmgr.h
@@ -2391,10 +2391,12 @@ static struct cipher_testvec des3_ede_cbc_dec_tv_template[] = {
/*
* Blowfish test vectors.
*/
-#define BF_ENC_TEST_VECTORS 6
-#define BF_DEC_TEST_VECTORS 6
-#define BF_CBC_ENC_TEST_VECTORS 1
-#define BF_CBC_DEC_TEST_VECTORS 1
+#define BF_ENC_TEST_VECTORS 7
+#define BF_DEC_TEST_VECTORS 7
+#define BF_CBC_ENC_TEST_VECTORS 2
+#define BF_CBC_DEC_TEST_VECTORS 2
+#define BF_CTR_ENC_TEST_VECTORS 2
+#define BF_CTR_DEC_TEST_VECTORS 2
static struct cipher_testvec bf_enc_tv_template[] = {
{ /* DES test vectors from OpenSSL */
@@ -2448,6 +2450,24 @@ static struct cipher_testvec bf_enc_tv_template[] = {
.ilen = 8,
.result = "\xc0\x45\x04\x01\x2e\x4e\x1f\x53",
.rlen = 8,
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9",
+ .ilen = 40,
+ .result = "\x96\x87\x3D\x0C\x7B\xFB\xBD\x1F"
+ "\xE3\xC1\x99\x6D\x39\xD4\xC2\x7D"
+ "\xD7\x87\xA1\xF2\xDF\x51\x71\x26"
+ "\xC2\xF4\x6D\xFF\xF6\xCD\x6B\x40"
+ "\xE1\xB3\xBF\xD4\x38\x2B\xC8\x3B",
+ .rlen = 40,
},
};
@@ -2503,6 +2523,24 @@ static struct cipher_testvec bf_dec_tv_template[] = {
.ilen = 8,
.result = "\xfe\xdc\xba\x98\x76\x54\x32\x10",
.rlen = 8,
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .input = "\x96\x87\x3D\x0C\x7B\xFB\xBD\x1F"
+ "\xE3\xC1\x99\x6D\x39\xD4\xC2\x7D"
+ "\xD7\x87\xA1\xF2\xDF\x51\x71\x26"
+ "\xC2\xF4\x6D\xFF\xF6\xCD\x6B\x40"
+ "\xE1\xB3\xBF\xD4\x38\x2B\xC8\x3B",
+ .ilen = 40,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9",
+ .rlen = 40,
},
};
@@ -2522,6 +2560,25 @@ static struct cipher_testvec bf_cbc_enc_tv_template[] = {
"\x58\xde\xb9\xe7\x15\x46\x16\xd9"
"\x59\xf1\x65\x2b\xd5\xff\x92\xcc",
.rlen = 32,
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F",
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9",
+ .ilen = 40,
+ .result = "\xB4\xFE\xA5\xBB\x3D\x2C\x27\x06"
+ "\x06\x2B\x3A\x92\xB2\xF5\x5E\x62"
+ "\x84\xCD\xF7\x66\x7E\x41\x6C\x8E"
+ "\x1B\xD9\x02\xB6\x48\xB0\x87\x25"
+ "\x01\x9C\x93\x63\x51\x60\x82\xD2",
+ .rlen = 40,
},
};
@@ -2541,16 +2598,125 @@ static struct cipher_testvec bf_cbc_dec_tv_template[] = {
"\x68\x65\x20\x74\x69\x6d\x65\x20"
"\x66\x6f\x72\x20\x00\x00\x00\x00",
.rlen = 32,
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F",
+ .input = "\xB4\xFE\xA5\xBB\x3D\x2C\x27\x06"
+ "\x06\x2B\x3A\x92\xB2\xF5\x5E\x62"
+ "\x84\xCD\xF7\x66\x7E\x41\x6C\x8E"
+ "\x1B\xD9\x02\xB6\x48\xB0\x87\x25"
+ "\x01\x9C\x93\x63\x51\x60\x82\xD2",
+ .ilen = 40,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9",
+ .rlen = 40,
+ },
+};
+
+static struct cipher_testvec bf_ctr_enc_tv_template[] = {
+ { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F",
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9",
+ .ilen = 40,
+ .result = "\xC7\xA3\xDF\xB9\x05\xF4\x9E\x8D"
+ "\x9E\xDF\x38\x18\x83\x07\xEF\xC1"
+ "\x93\x3C\xAA\xAA\xFE\x06\x42\xCC"
+ "\x0D\x70\x86\x5A\x44\xAD\x85\x17"
+ "\xE4\x1F\x5E\xA5\x89\xAC\x32\xBC",
+ .rlen = 40,
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F",
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B",
+ .ilen = 43,
+ .result = "\xC7\xA3\xDF\xB9\x05\xF4\x9E\x8D"
+ "\x9E\xDF\x38\x18\x83\x07\xEF\xC1"
+ "\x93\x3C\xAA\xAA\xFE\x06\x42\xCC"
+ "\x0D\x70\x86\x5A\x44\xAD\x85\x17"
+ "\xE4\x1F\x5E\xA5\x89\xAC\x32\xBC"
+ "\x3D\xA7\xE9",
+ .rlen = 43,
+ },
+};
+
+static struct cipher_testvec bf_ctr_dec_tv_template[] = {
+ { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F",
+ .input = "\xC7\xA3\xDF\xB9\x05\xF4\x9E\x8D"
+ "\x9E\xDF\x38\x18\x83\x07\xEF\xC1"
+ "\x93\x3C\xAA\xAA\xFE\x06\x42\xCC"
+ "\x0D\x70\x86\x5A\x44\xAD\x85\x17"
+ "\xE4\x1F\x5E\xA5\x89\xAC\x32\xBC",
+ .ilen = 40,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9",
+ .rlen = 40,
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F",
+ .input = "\xC7\xA3\xDF\xB9\x05\xF4\x9E\x8D"
+ "\x9E\xDF\x38\x18\x83\x07\xEF\xC1"
+ "\x93\x3C\xAA\xAA\xFE\x06\x42\xCC"
+ "\x0D\x70\x86\x5A\x44\xAD\x85\x17"
+ "\xE4\x1F\x5E\xA5\x89\xAC\x32\xBC"
+ "\x3D\xA7\xE9",
+ .ilen = 43,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B",
+ .rlen = 43,
},
};
/*
* Twofish test vectors.
*/
-#define TF_ENC_TEST_VECTORS 3
-#define TF_DEC_TEST_VECTORS 3
-#define TF_CBC_ENC_TEST_VECTORS 4
-#define TF_CBC_DEC_TEST_VECTORS 4
+#define TF_ENC_TEST_VECTORS 4
+#define TF_DEC_TEST_VECTORS 4
+#define TF_CBC_ENC_TEST_VECTORS 5
+#define TF_CBC_DEC_TEST_VECTORS 5
+#define TF_CTR_ENC_TEST_VECTORS 2
+#define TF_CTR_DEC_TEST_VECTORS 2
static struct cipher_testvec tf_enc_tv_template[] = {
{
@@ -2582,6 +2748,30 @@ static struct cipher_testvec tf_enc_tv_template[] = {
.result = "\x37\x52\x7b\xe0\x05\x23\x34\xb8"
"\x9f\x0c\xfc\xca\xe8\x7c\xfa\x20",
.rlen = 16,
+ }, { /* Generated with Crypto++ */
+ .key = "\x3F\x85\x62\x3F\x1C\xF9\xD6\x1C"
+ "\xF9\xD6\xB3\x90\x6D\x4A\x90\x6D"
+ "\x4A\x27\x04\xE1\x27\x04\xE1\xBE"
+ "\x9B\x78\xBE\x9B\x78\x55\x32\x0F",
+ .klen = 32,
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C",
+ .ilen = 64,
+ .result = "\x88\xCB\x1E\xC2\xAF\x8A\x97\xFF"
+ "\xF6\x90\x46\x9C\x4A\x0F\x08\xDC"
+ "\xDE\xAB\xAD\xFA\xFC\xA8\xC2\x3D"
+ "\xE0\xE4\x8B\x3F\xD5\xA3\xF7\x14"
+ "\x34\x9E\xB6\x08\xB2\xDD\xA8\xF5"
+ "\xDF\xFA\xC7\xE8\x09\x50\x76\x08"
+ "\xA2\xB6\x6A\x59\xC0\x2B\x6D\x05"
+ "\x89\xF6\x82\xF0\xD3\xDB\x06\x02",
+ .rlen = 64,
},
};
@@ -2615,6 +2805,30 @@ static struct cipher_testvec tf_dec_tv_template[] = {
.ilen = 16,
.result = zeroed_string,
.rlen = 16,
+ }, { /* Generated with Crypto++ */
+ .key = "\x3F\x85\x62\x3F\x1C\xF9\xD6\x1C"
+ "\xF9\xD6\xB3\x90\x6D\x4A\x90\x6D"
+ "\x4A\x27\x04\xE1\x27\x04\xE1\xBE"
+ "\x9B\x78\xBE\x9B\x78\x55\x32\x0F",
+ .klen = 32,
+ .input = "\x88\xCB\x1E\xC2\xAF\x8A\x97\xFF"
+ "\xF6\x90\x46\x9C\x4A\x0F\x08\xDC"
+ "\xDE\xAB\xAD\xFA\xFC\xA8\xC2\x3D"
+ "\xE0\xE4\x8B\x3F\xD5\xA3\xF7\x14"
+ "\x34\x9E\xB6\x08\xB2\xDD\xA8\xF5"
+ "\xDF\xFA\xC7\xE8\x09\x50\x76\x08"
+ "\xA2\xB6\x6A\x59\xC0\x2B\x6D\x05"
+ "\x89\xF6\x82\xF0\xD3\xDB\x06\x02",
+ .ilen = 64,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C",
+ .rlen = 64,
},
};
@@ -2661,6 +2875,32 @@ static struct cipher_testvec tf_cbc_enc_tv_template[] = {
"\x05\xef\x8c\x61\xa8\x11\x58\x26"
"\x34\xba\x5c\xb7\x10\x6a\xa6\x41",
.rlen = 48,
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F"
+ "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64",
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C",
+ .ilen = 64,
+ .result = "\xC8\xFF\xF2\x53\xA6\x27\x09\xD1"
+ "\x33\x38\xC2\xC0\x0C\x14\x7E\xB5"
+ "\x26\x1B\x05\x0C\x05\x12\x3F\xC0"
+ "\xF9\x1C\x02\x28\x40\x96\x6F\xD0"
+ "\x3D\x32\xDF\xDA\x56\x00\x6E\xEE"
+ "\x5B\x2A\x72\x9D\xC2\x4D\x19\xBC"
+ "\x8C\x53\xFA\x87\x6F\xDD\x81\xA3"
+ "\xB1\xD3\x44\x65\xDF\xE7\x63\x38",
+ .rlen = 64,
},
};
@@ -2707,6 +2947,148 @@ static struct cipher_testvec tf_cbc_dec_tv_template[] = {
.ilen = 48,
.result = zeroed_string,
.rlen = 48,
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F"
+ "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64",
+ .input = "\xC8\xFF\xF2\x53\xA6\x27\x09\xD1"
+ "\x33\x38\xC2\xC0\x0C\x14\x7E\xB5"
+ "\x26\x1B\x05\x0C\x05\x12\x3F\xC0"
+ "\xF9\x1C\x02\x28\x40\x96\x6F\xD0"
+ "\x3D\x32\xDF\xDA\x56\x00\x6E\xEE"
+ "\x5B\x2A\x72\x9D\xC2\x4D\x19\xBC"
+ "\x8C\x53\xFA\x87\x6F\xDD\x81\xA3"
+ "\xB1\xD3\x44\x65\xDF\xE7\x63\x38",
+ .ilen = 64,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C",
+ .rlen = 64,
+ },
+};
+
+static struct cipher_testvec tf_ctr_enc_tv_template[] = {
+ { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F"
+ "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64",
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C",
+ .ilen = 64,
+ .result = "\xDF\xDD\x69\xFA\xB0\x2E\xFD\xFE"
+ "\x70\x9E\xC5\x4B\xC9\xD4\xA1\x30"
+ "\x26\x9B\x89\xA1\xEE\x43\xE0\x52"
+ "\x55\x17\x4E\xC7\x0E\x33\x1F\xF1"
+ "\x9F\x8D\x40\x9F\x24\xFD\x92\xA0"
+ "\xBC\x8F\x35\xDD\x67\x38\xD8\xAA"
+ "\xCF\xF8\x48\xCA\xFB\xE4\x5C\x60"
+ "\x01\x41\x21\x12\x38\xAB\x52\x4F",
+ .rlen = 64,
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F"
+ "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64",
+ .input = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE",
+ .ilen = 67,
+ .result = "\xDF\xDD\x69\xFA\xB0\x2E\xFD\xFE"
+ "\x70\x9E\xC5\x4B\xC9\xD4\xA1\x30"
+ "\x26\x9B\x89\xA1\xEE\x43\xE0\x52"
+ "\x55\x17\x4E\xC7\x0E\x33\x1F\xF1"
+ "\x9F\x8D\x40\x9F\x24\xFD\x92\xA0"
+ "\xBC\x8F\x35\xDD\x67\x38\xD8\xAA"
+ "\xCF\xF8\x48\xCA\xFB\xE4\x5C\x60"
+ "\x01\x41\x21\x12\x38\xAB\x52\x4F"
+ "\xA8\x57\x20",
+ .rlen = 67,
+ },
+};
+
+static struct cipher_testvec tf_ctr_dec_tv_template[] = {
+ { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F"
+ "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64",
+ .input = "\xDF\xDD\x69\xFA\xB0\x2E\xFD\xFE"
+ "\x70\x9E\xC5\x4B\xC9\xD4\xA1\x30"
+ "\x26\x9B\x89\xA1\xEE\x43\xE0\x52"
+ "\x55\x17\x4E\xC7\x0E\x33\x1F\xF1"
+ "\x9F\x8D\x40\x9F\x24\xFD\x92\xA0"
+ "\xBC\x8F\x35\xDD\x67\x38\xD8\xAA"
+ "\xCF\xF8\x48\xCA\xFB\xE4\x5C\x60"
+ "\x01\x41\x21\x12\x38\xAB\x52\x4F",
+ .ilen = 64,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C",
+ .rlen = 64,
+ }, { /* Generated with Crypto++ */
+ .key = "\x85\x62\x3F\x1C\xF9\xD6\x1C\xF9"
+ "\xD6\xB3\x90\x6D\x4A\x90\x6D\x4A"
+ "\x27\x04\xE1\x27\x04\xE1\xBE\x9B"
+ "\x78\xBE\x9B\x78\x55\x32\x0F\x55",
+ .klen = 32,
+ .iv = "\xE2\x24\x89\xEE\x53\xB8\x1D\x5F"
+ "\xC4\x29\x8E\xF3\x35\x9A\xFF\x64",
+ .input = "\xDF\xDD\x69\xFA\xB0\x2E\xFD\xFE"
+ "\x70\x9E\xC5\x4B\xC9\xD4\xA1\x30"
+ "\x26\x9B\x89\xA1\xEE\x43\xE0\x52"
+ "\x55\x17\x4E\xC7\x0E\x33\x1F\xF1"
+ "\x9F\x8D\x40\x9F\x24\xFD\x92\xA0"
+ "\xBC\x8F\x35\xDD\x67\x38\xD8\xAA"
+ "\xCF\xF8\x48\xCA\xFB\xE4\x5C\x60"
+ "\x01\x41\x21\x12\x38\xAB\x52\x4F"
+ "\xA8\x57\x20",
+ .ilen = 67,
+ .result = "\x56\xED\x84\x1B\x8F\x26\xBD\x31"
+ "\xC8\x5F\xF6\x6A\x01\x98\x0C\xA3"
+ "\x3A\xD1\x45\xDC\x73\x0A\x7E\x15"
+ "\xAC\x20\xB7\x4E\xE5\x59\xF0\x87"
+ "\x1E\x92\x29\xC0\x34\xCB\x62\xF9"
+ "\x6D\x04\x9B\x0F\xA6\x3D\xD4\x48"
+ "\xDF\x76\x0D\x81\x18\xAF\x23\xBA"
+ "\x51\xE8\x5C\xF3\x8A\x21\x95\x2C"
+ "\xC3\x37\xCE",
+ .rlen = 67,
},
};
diff --git a/crypto/vmac.c b/crypto/vmac.c
index f35ff8a3926e..4243905ba135 100644
--- a/crypto/vmac.c
+++ b/crypto/vmac.c
@@ -27,6 +27,7 @@
#include <linux/init.h>
#include <linux/types.h>
#include <linux/crypto.h>
+#include <linux/module.h>
#include <linux/scatterlist.h>
#include <asm/byteorder.h>
#include <crypto/scatterwalk.h>
diff --git a/crypto/wp512.c b/crypto/wp512.c
index 723427273687..71719a2be25a 100644
--- a/crypto/wp512.c
+++ b/crypto/wp512.c
@@ -762,11 +762,17 @@ static const u64 C7[256] = {
0x86228644a411c286ULL,
};
-static const u64 rc[WHIRLPOOL_ROUNDS + 1] = {
- 0x0000000000000000ULL, 0x1823c6e887b8014fULL, 0x36a6d2f5796f9152ULL,
- 0x60bc9b8ea30c7b35ULL, 0x1de0d7c22e4bfe57ULL, 0x157737e59ff04adaULL,
- 0x58c9290ab1a06b85ULL, 0xbd5d10f4cb3e0567ULL, 0xe427418ba77d95d8ULL,
- 0xfbee7c66dd17479eULL, 0xca2dbf07ad5a8333ULL,
+static const u64 rc[WHIRLPOOL_ROUNDS] = {
+ 0x1823c6e887b8014fULL,
+ 0x36a6d2f5796f9152ULL,
+ 0x60bc9b8ea30c7b35ULL,
+ 0x1de0d7c22e4bfe57ULL,
+ 0x157737e59ff04adaULL,
+ 0x58c9290ab1a06b85ULL,
+ 0xbd5d10f4cb3e0567ULL,
+ 0xe427418ba77d95d8ULL,
+ 0xfbee7c66dd17479eULL,
+ 0xca2dbf07ad5a8333ULL,
};
/**
@@ -793,7 +799,7 @@ static void wp512_process_buffer(struct wp512_ctx *wctx) {
state[6] = block[6] ^ (K[6] = wctx->hash[6]);
state[7] = block[7] ^ (K[7] = wctx->hash[7]);
- for (r = 1; r <= WHIRLPOOL_ROUNDS; r++) {
+ for (r = 0; r < WHIRLPOOL_ROUNDS; r++) {
L[0] = C0[(int)(K[0] >> 56) ] ^
C1[(int)(K[7] >> 48) & 0xff] ^
diff --git a/crypto/xcbc.c b/crypto/xcbc.c
index bb7b67fba349..a5fbdf3738cf 100644
--- a/crypto/xcbc.c
+++ b/crypto/xcbc.c
@@ -22,6 +22,7 @@
#include <crypto/internal/hash.h>
#include <linux/err.h>
#include <linux/kernel.h>
+#include <linux/module.h>
static u_int32_t ks[12] = {0x01010101, 0x01010101, 0x01010101, 0x01010101,
0x02020202, 0x02020202, 0x02020202, 0x02020202,