summaryrefslogtreecommitdiffstats
path: root/drivers/crypto
diff options
context:
space:
mode:
authorBehan Webster <behanw@converseincode.com>2014-04-04 23:18:00 +0200
committerBehan Webster <behanw@converseincode.com>2014-10-14 10:51:23 +0200
commit37e5265437a02e66e8c345f563241e79b4b7f087 (patch)
tree91676171d241de3ff3295c762899877da136272d /drivers/crypto
parentcrypto: LLVMLinux: Remove VLAIS from crypto/omap_sham.c (diff)
downloadlinux-37e5265437a02e66e8c345f563241e79b4b7f087.tar.xz
linux-37e5265437a02e66e8c345f563241e79b4b7f087.zip
crypto: LLVMLinux: Remove VLAIS from crypto/.../qat_algs.c
Replaced the use of a Variable Length Array In Struct (VLAIS) with a C99 compliant equivalent. This patch allocates the appropriate amount of memory using a char array using the SHASH_DESC_ON_STACK macro. The new code can be compiled with both gcc and clang. Signed-off-by: Behan Webster <behanw@converseincode.com> Reviewed-by: Mark Charlebois <charlebm@gmail.com> Reviewed-by: Jan-Simon Möller <dl9pf@gmx.de> Acked-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'drivers/crypto')
-rw-r--r--drivers/crypto/qat/qat_common/qat_algs.c31
1 files changed, 14 insertions, 17 deletions
diff --git a/drivers/crypto/qat/qat_common/qat_algs.c b/drivers/crypto/qat/qat_common/qat_algs.c
index 3e26fa2b293f..f2e2f158cfbe 100644
--- a/drivers/crypto/qat/qat_common/qat_algs.c
+++ b/drivers/crypto/qat/qat_common/qat_algs.c
@@ -149,10 +149,7 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash,
unsigned int auth_keylen)
{
struct qat_auth_state auth_state;
- struct {
- struct shash_desc shash;
- char ctx[crypto_shash_descsize(ctx->hash_tfm)];
- } desc;
+ SHASH_DESC_ON_STACK(shash, ctx->hash_tfm);
struct sha1_state sha1;
struct sha256_state sha256;
struct sha512_state sha512;
@@ -165,12 +162,12 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash,
int i, offset;
memset(auth_state.data, '\0', MAX_AUTH_STATE_SIZE + 64);
- desc.shash.tfm = ctx->hash_tfm;
- desc.shash.flags = 0x0;
+ shash->tfm = ctx->hash_tfm;
+ shash->flags = 0x0;
if (auth_keylen > block_size) {
char buff[SHA512_BLOCK_SIZE];
- int ret = crypto_shash_digest(&desc.shash, auth_key,
+ int ret = crypto_shash_digest(shash, auth_key,
auth_keylen, buff);
if (ret)
return ret;
@@ -193,10 +190,10 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash,
*opad_ptr ^= 0x5C;
}
- if (crypto_shash_init(&desc.shash))
+ if (crypto_shash_init(shash))
return -EFAULT;
- if (crypto_shash_update(&desc.shash, ipad, block_size))
+ if (crypto_shash_update(shash, ipad, block_size))
return -EFAULT;
hash_state_out = (__be32 *)hash->sha.state1;
@@ -204,19 +201,19 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash,
switch (ctx->qat_hash_alg) {
case ICP_QAT_HW_AUTH_ALGO_SHA1:
- if (crypto_shash_export(&desc.shash, &sha1))
+ if (crypto_shash_export(shash, &sha1))
return -EFAULT;
for (i = 0; i < digest_size >> 2; i++, hash_state_out++)
*hash_state_out = cpu_to_be32(*(sha1.state + i));
break;
case ICP_QAT_HW_AUTH_ALGO_SHA256:
- if (crypto_shash_export(&desc.shash, &sha256))
+ if (crypto_shash_export(shash, &sha256))
return -EFAULT;
for (i = 0; i < digest_size >> 2; i++, hash_state_out++)
*hash_state_out = cpu_to_be32(*(sha256.state + i));
break;
case ICP_QAT_HW_AUTH_ALGO_SHA512:
- if (crypto_shash_export(&desc.shash, &sha512))
+ if (crypto_shash_export(shash, &sha512))
return -EFAULT;
for (i = 0; i < digest_size >> 3; i++, hash512_state_out++)
*hash512_state_out = cpu_to_be64(*(sha512.state + i));
@@ -225,10 +222,10 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash,
return -EFAULT;
}
- if (crypto_shash_init(&desc.shash))
+ if (crypto_shash_init(shash))
return -EFAULT;
- if (crypto_shash_update(&desc.shash, opad, block_size))
+ if (crypto_shash_update(shash, opad, block_size))
return -EFAULT;
offset = round_up(qat_get_inter_state_size(ctx->qat_hash_alg), 8);
@@ -237,19 +234,19 @@ static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash,
switch (ctx->qat_hash_alg) {
case ICP_QAT_HW_AUTH_ALGO_SHA1:
- if (crypto_shash_export(&desc.shash, &sha1))
+ if (crypto_shash_export(shash, &sha1))
return -EFAULT;
for (i = 0; i < digest_size >> 2; i++, hash_state_out++)
*hash_state_out = cpu_to_be32(*(sha1.state + i));
break;
case ICP_QAT_HW_AUTH_ALGO_SHA256:
- if (crypto_shash_export(&desc.shash, &sha256))
+ if (crypto_shash_export(shash, &sha256))
return -EFAULT;
for (i = 0; i < digest_size >> 2; i++, hash_state_out++)
*hash_state_out = cpu_to_be32(*(sha256.state + i));
break;
case ICP_QAT_HW_AUTH_ALGO_SHA512:
- if (crypto_shash_export(&desc.shash, &sha512))
+ if (crypto_shash_export(shash, &sha512))
return -EFAULT;
for (i = 0; i < digest_size >> 3; i++, hash512_state_out++)
*hash512_state_out = cpu_to_be64(*(sha512.state + i));