summaryrefslogtreecommitdiffstats
path: root/lib
diff options
context:
space:
mode:
authorEric Biggers <ebiggers@google.com>2019-11-18 08:22:16 +0100
committerHerbert Xu <herbert@gondor.apana.org.au>2019-11-22 11:48:39 +0100
commit413808b71e6204b0cc1eeaa77960f7c3cd381d33 (patch)
tree066de837a558fe66c353eda30ffd2ee15c1f0b64 /lib
parentcrypto: x86/chacha - only unregister algorithms if registered (diff)
downloadlinux-413808b71e6204b0cc1eeaa77960f7c3cd381d33.tar.xz
linux-413808b71e6204b0cc1eeaa77960f7c3cd381d33.zip
crypto: lib/chacha20poly1305 - use chacha20_crypt()
Use chacha20_crypt() instead of chacha_crypt(), since it's not really appropriate for users of the ChaCha library API to be passing the number of rounds as an argument. Signed-off-by: Eric Biggers <ebiggers@google.com> Acked-by: Ard Biesheuvel <ardb@kernel.org> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'lib')
-rw-r--r--lib/crypto/chacha20poly1305.c16
1 files changed, 8 insertions, 8 deletions
diff --git a/lib/crypto/chacha20poly1305.c b/lib/crypto/chacha20poly1305.c
index 821e5cc9b14e..6d83cafebc69 100644
--- a/lib/crypto/chacha20poly1305.c
+++ b/lib/crypto/chacha20poly1305.c
@@ -66,14 +66,14 @@ __chacha20poly1305_encrypt(u8 *dst, const u8 *src, const size_t src_len,
__le64 lens[2];
} b;
- chacha_crypt(chacha_state, b.block0, pad0, sizeof(b.block0), 20);
+ chacha20_crypt(chacha_state, b.block0, pad0, sizeof(b.block0));
poly1305_init(&poly1305_state, b.block0);
poly1305_update(&poly1305_state, ad, ad_len);
if (ad_len & 0xf)
poly1305_update(&poly1305_state, pad0, 0x10 - (ad_len & 0xf));
- chacha_crypt(chacha_state, dst, src, src_len, 20);
+ chacha20_crypt(chacha_state, dst, src, src_len);
poly1305_update(&poly1305_state, dst, src_len);
if (src_len & 0xf)
@@ -140,7 +140,7 @@ __chacha20poly1305_decrypt(u8 *dst, const u8 *src, const size_t src_len,
if (unlikely(src_len < POLY1305_DIGEST_SIZE))
return false;
- chacha_crypt(chacha_state, b.block0, pad0, sizeof(b.block0), 20);
+ chacha20_crypt(chacha_state, b.block0, pad0, sizeof(b.block0));
poly1305_init(&poly1305_state, b.block0);
poly1305_update(&poly1305_state, ad, ad_len);
@@ -160,7 +160,7 @@ __chacha20poly1305_decrypt(u8 *dst, const u8 *src, const size_t src_len,
ret = crypto_memneq(b.mac, src + dst_len, POLY1305_DIGEST_SIZE);
if (likely(!ret))
- chacha_crypt(chacha_state, dst, src, dst_len, 20);
+ chacha20_crypt(chacha_state, dst, src, dst_len);
memzero_explicit(&b, sizeof(b));
@@ -241,7 +241,7 @@ bool chacha20poly1305_crypt_sg_inplace(struct scatterlist *src,
b.iv[1] = cpu_to_le64(nonce);
chacha_init(chacha_state, b.k, (u8 *)b.iv);
- chacha_crypt(chacha_state, b.block0, pad0, sizeof(b.block0), 20);
+ chacha20_crypt(chacha_state, b.block0, pad0, sizeof(b.block0));
poly1305_init(&poly1305_state, b.block0);
if (unlikely(ad_len)) {
@@ -278,14 +278,14 @@ bool chacha20poly1305_crypt_sg_inplace(struct scatterlist *src,
if (unlikely(length < sl))
l &= ~(CHACHA_BLOCK_SIZE - 1);
- chacha_crypt(chacha_state, addr, addr, l, 20);
+ chacha20_crypt(chacha_state, addr, addr, l);
addr += l;
length -= l;
}
if (unlikely(length > 0)) {
- chacha_crypt(chacha_state, b.chacha_stream, pad0,
- CHACHA_BLOCK_SIZE, 20);
+ chacha20_crypt(chacha_state, b.chacha_stream, pad0,
+ CHACHA_BLOCK_SIZE);
crypto_xor(addr, b.chacha_stream, length);
partial = length;
}