diff options
author | Linus Torvalds <torvalds@linux-foundation.org> | 2023-11-03 03:15:30 +0100 |
---|---|---|
committer | Linus Torvalds <torvalds@linux-foundation.org> | 2023-11-03 03:15:30 +0100 |
commit | bc3012f4e3a9765de81f454cb8f9bb16aafc6ff5 (patch) | |
tree | 2c127c669218b8c74c843331e455372f88a6a848 /arch/sparc | |
parent | Merge tag 'for-linus' of git://git.kernel.org/pub/scm/virt/kvm/kvm (diff) | |
parent | crypto: adiantum - flush destination page before unmapping (diff) | |
download | linux-bc3012f4e3a9765de81f454cb8f9bb16aafc6ff5.tar.xz linux-bc3012f4e3a9765de81f454cb8f9bb16aafc6ff5.zip |
Merge tag 'v6.7-p1' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
Pull crypto updates from Herbert Xu:
"API:
- Add virtual-address based lskcipher interface
- Optimise ahash/shash performance in light of costly indirect calls
- Remove ahash alignmask attribute
Algorithms:
- Improve AES/XTS performance of 6-way unrolling for ppc
- Remove some uses of obsolete algorithms (md4, md5, sha1)
- Add FIPS 202 SHA-3 support in pkcs1pad
- Add fast path for single-page messages in adiantum
- Remove zlib-deflate
Drivers:
- Add support for S4 in meson RNG driver
- Add STM32MP13x support in stm32
- Add hwrng interface support in qcom-rng
- Add support for deflate algorithm in hisilicon/zip"
* tag 'v6.7-p1' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (283 commits)
crypto: adiantum - flush destination page before unmapping
crypto: testmgr - move pkcs1pad(rsa,sha3-*) to correct place
Documentation/module-signing.txt: bring up to date
module: enable automatic module signing with FIPS 202 SHA-3
crypto: asymmetric_keys - allow FIPS 202 SHA-3 signatures
crypto: rsa-pkcs1pad - Add FIPS 202 SHA-3 support
crypto: FIPS 202 SHA-3 register in hash info for IMA
x509: Add OIDs for FIPS 202 SHA-3 hash and signatures
crypto: ahash - optimize performance when wrapping shash
crypto: ahash - check for shash type instead of not ahash type
crypto: hash - move "ahash wrapping shash" functions to ahash.c
crypto: talitos - stop using crypto_ahash::init
crypto: chelsio - stop using crypto_ahash::init
crypto: ahash - improve file comment
crypto: ahash - remove struct ahash_request_priv
crypto: ahash - remove crypto_ahash_alignmask
crypto: gcm - stop using alignmask of ahash
crypto: chacha20poly1305 - stop using alignmask of ahash
crypto: ccm - stop using alignmask of ahash
net: ipv6: stop checking crypto_ahash_alignmask
...
Diffstat (limited to 'arch/sparc')
-rw-r--r-- | arch/sparc/crypto/crc32c_glue.c | 45 |
1 files changed, 24 insertions, 21 deletions
diff --git a/arch/sparc/crypto/crc32c_glue.c b/arch/sparc/crypto/crc32c_glue.c index 82efb7f81c28..688db0dcb97d 100644 --- a/arch/sparc/crypto/crc32c_glue.c +++ b/arch/sparc/crypto/crc32c_glue.c @@ -20,6 +20,7 @@ #include <asm/pstate.h> #include <asm/elf.h> +#include <asm/unaligned.h> #include "opcodes.h" @@ -35,7 +36,7 @@ static int crc32c_sparc64_setkey(struct crypto_shash *hash, const u8 *key, if (keylen != sizeof(u32)) return -EINVAL; - *mctx = le32_to_cpup((__le32 *)key); + *mctx = get_unaligned_le32(key); return 0; } @@ -51,18 +52,26 @@ static int crc32c_sparc64_init(struct shash_desc *desc) extern void crc32c_sparc64(u32 *crcp, const u64 *data, unsigned int len); -static void crc32c_compute(u32 *crcp, const u64 *data, unsigned int len) +static u32 crc32c_compute(u32 crc, const u8 *data, unsigned int len) { - unsigned int asm_len; - - asm_len = len & ~7U; - if (asm_len) { - crc32c_sparc64(crcp, data, asm_len); - data += asm_len / 8; - len -= asm_len; + unsigned int n = -(uintptr_t)data & 7; + + if (n) { + /* Data isn't 8-byte aligned. Align it. */ + n = min(n, len); + crc = __crc32c_le(crc, data, n); + data += n; + len -= n; + } + n = len & ~7U; + if (n) { + crc32c_sparc64(&crc, (const u64 *)data, n); + data += n; + len -= n; } if (len) - *crcp = __crc32c_le(*crcp, (const unsigned char *) data, len); + crc = __crc32c_le(crc, data, len); + return crc; } static int crc32c_sparc64_update(struct shash_desc *desc, const u8 *data, @@ -70,19 +79,14 @@ static int crc32c_sparc64_update(struct shash_desc *desc, const u8 *data, { u32 *crcp = shash_desc_ctx(desc); - crc32c_compute(crcp, (const u64 *) data, len); - + *crcp = crc32c_compute(*crcp, data, len); return 0; } -static int __crc32c_sparc64_finup(u32 *crcp, const u8 *data, unsigned int len, - u8 *out) +static int __crc32c_sparc64_finup(const u32 *crcp, const u8 *data, + unsigned int len, u8 *out) { - u32 tmp = *crcp; - - crc32c_compute(&tmp, (const u64 *) data, len); - - *(__le32 *) out = ~cpu_to_le32(tmp); + put_unaligned_le32(~crc32c_compute(*crcp, data, len), out); return 0; } @@ -96,7 +100,7 @@ static int crc32c_sparc64_final(struct shash_desc *desc, u8 *out) { u32 *crcp = shash_desc_ctx(desc); - *(__le32 *) out = ~cpu_to_le32p(crcp); + put_unaligned_le32(~*crcp, out); return 0; } @@ -135,7 +139,6 @@ static struct shash_alg alg = { .cra_flags = CRYPTO_ALG_OPTIONAL_KEY, .cra_blocksize = CHKSUM_BLOCK_SIZE, .cra_ctxsize = sizeof(u32), - .cra_alignmask = 7, .cra_module = THIS_MODULE, .cra_init = crc32c_sparc64_cra_init, } |