diff options
author | Bernd Edlinger <bernd.edlinger@hotmail.de> | 2018-04-24 21:10:13 +0200 |
---|---|---|
committer | Bernd Edlinger <bernd.edlinger@hotmail.de> | 2020-05-27 20:11:20 +0200 |
commit | 77286fe3ec6b9777934e67e35f3b7007143b0734 (patch) | |
tree | 47f39fa07cd9979b57ad14f58bc39123133da115 /crypto/modes | |
parent | Rename EVP_PKEY_cmp() to EVP_PKEY_eq() and EVP_PKEY_cmp_parameters() to EVP_P... (diff) | |
download | openssl-77286fe3ec6b9777934e67e35f3b7007143b0734.tar.xz openssl-77286fe3ec6b9777934e67e35f3b7007143b0734.zip |
Avoid undefined behavior with unaligned accesses
Fixes: #4983
[extended tests]
Reviewed-by: Nicola Tuveri <nic.tuv@gmail.com>
(Merged from https://github.com/openssl/openssl/pull/6074)
Diffstat (limited to 'crypto/modes')
-rw-r--r-- | crypto/modes/cbc128.c | 19 | ||||
-rw-r--r-- | crypto/modes/ccm128.c | 22 | ||||
-rw-r--r-- | crypto/modes/cfb128.c | 18 | ||||
-rw-r--r-- | crypto/modes/ctr128.c | 11 | ||||
-rw-r--r-- | crypto/modes/gcm128.c | 22 | ||||
-rw-r--r-- | crypto/modes/ofb128.c | 11 | ||||
-rw-r--r-- | crypto/modes/xts128.c | 24 |
7 files changed, 91 insertions, 36 deletions
diff --git a/crypto/modes/cbc128.c b/crypto/modes/cbc128.c index eec44bd91a..ba765626ee 100644 --- a/crypto/modes/cbc128.c +++ b/crypto/modes/cbc128.c @@ -15,6 +15,12 @@ # define STRICT_ALIGNMENT 0 #endif +#if defined(__GNUC__) && !STRICT_ALIGNMENT +typedef size_t size_t_aX __attribute((__aligned__(1))); +#else +typedef size_t size_t_aX; +#endif + void CRYPTO_cbc128_encrypt(const unsigned char *in, unsigned char *out, size_t len, const void *key, unsigned char ivec[16], block128_f block) @@ -40,8 +46,8 @@ void CRYPTO_cbc128_encrypt(const unsigned char *in, unsigned char *out, } else { while (len >= 16) { for (n = 0; n < 16; n += sizeof(size_t)) - *(size_t *)(out + n) = - *(size_t *)(in + n) ^ *(size_t *)(iv + n); + *(size_t_aX *)(out + n) = + *(size_t_aX *)(in + n) ^ *(size_t_aX *)(iv + n); (*block) (out, out, key); iv = out; len -= 16; @@ -96,7 +102,8 @@ void CRYPTO_cbc128_decrypt(const unsigned char *in, unsigned char *out, } } else if (16 % sizeof(size_t) == 0) { /* always true */ while (len >= 16) { - size_t *out_t = (size_t *)out, *iv_t = (size_t *)iv; + size_t_aX *out_t = (size_t_aX *)out; + size_t_aX *iv_t = (size_t_aX *)iv; (*block) (in, out, key); for (n = 0; n < 16 / sizeof(size_t); n++) @@ -125,8 +132,10 @@ void CRYPTO_cbc128_decrypt(const unsigned char *in, unsigned char *out, } } else if (16 % sizeof(size_t) == 0) { /* always true */ while (len >= 16) { - size_t c, *out_t = (size_t *)out, *ivec_t = (size_t *)ivec; - const size_t *in_t = (const size_t *)in; + size_t c; + size_t_aX *out_t = (size_t_aX *)out; + size_t_aX *ivec_t = (size_t_aX *)ivec; + const size_t_aX *in_t = (const size_t_aX *)in; (*block) (in, tmp.c, key); for (n = 0; n < 16 / sizeof(size_t); n++) { diff --git a/crypto/modes/ccm128.c b/crypto/modes/ccm128.c index 1ffd6df46f..e1b2501273 100644 --- a/crypto/modes/ccm128.c +++ b/crypto/modes/ccm128.c @@ -11,6 +11,14 @@ #include <openssl/crypto.h> #include "crypto/modes.h" +#ifndef STRICT_ALIGNMENT +# ifdef __GNUC__ +typedef u64 u64_a1 __attribute((__aligned__(1))); +# else +typedef u64 u64_a1; +# endif +#endif + /* * First you setup M and L parameters and pass the key schedule. This is * called once per session setup... @@ -170,8 +178,8 @@ int CRYPTO_ccm128_encrypt(CCM128_CONTEXT *ctx, ctx->cmac.u[0] ^= temp.u[0]; ctx->cmac.u[1] ^= temp.u[1]; #else - ctx->cmac.u[0] ^= ((u64 *)inp)[0]; - ctx->cmac.u[1] ^= ((u64 *)inp)[1]; + ctx->cmac.u[0] ^= ((u64_a1 *)inp)[0]; + ctx->cmac.u[1] ^= ((u64_a1 *)inp)[1]; #endif (*block) (ctx->cmac.c, ctx->cmac.c, key); (*block) (ctx->nonce.c, scratch.c, key); @@ -181,8 +189,8 @@ int CRYPTO_ccm128_encrypt(CCM128_CONTEXT *ctx, temp.u[1] ^= scratch.u[1]; memcpy(out, temp.c, 16); #else - ((u64 *)out)[0] = scratch.u[0] ^ ((u64 *)inp)[0]; - ((u64 *)out)[1] = scratch.u[1] ^ ((u64 *)inp)[1]; + ((u64_a1 *)out)[0] = scratch.u[0] ^ ((u64_a1 *)inp)[0]; + ((u64_a1 *)out)[1] = scratch.u[1] ^ ((u64_a1 *)inp)[1]; #endif inp += 16; out += 16; @@ -254,8 +262,10 @@ int CRYPTO_ccm128_decrypt(CCM128_CONTEXT *ctx, ctx->cmac.u[1] ^= (scratch.u[1] ^= temp.u[1]); memcpy(out, scratch.c, 16); #else - ctx->cmac.u[0] ^= (((u64 *)out)[0] = scratch.u[0] ^ ((u64 *)inp)[0]); - ctx->cmac.u[1] ^= (((u64 *)out)[1] = scratch.u[1] ^ ((u64 *)inp)[1]); + ctx->cmac.u[0] ^= (((u64_a1 *)out)[0] + = scratch.u[0] ^ ((u64_a1 *)inp)[0]); + ctx->cmac.u[1] ^= (((u64_a1 *)out)[1] + = scratch.u[1] ^ ((u64_a1 *)inp)[1]); #endif (*block) (ctx->cmac.c, ctx->cmac.c, key); diff --git a/crypto/modes/cfb128.c b/crypto/modes/cfb128.c index e9ce4df3a5..e60b90b8c6 100644 --- a/crypto/modes/cfb128.c +++ b/crypto/modes/cfb128.c @@ -11,6 +11,12 @@ #include <openssl/crypto.h> #include "crypto/modes.h" +#if defined(__GNUC__) && !defined(STRICT_ALIGNMENT) +typedef size_t size_t_aX __attribute((__aligned__(1))); +#else +typedef size_t size_t_aX; +#endif + /* * The input and output encrypted as though 128bit cfb mode is being used. * The extra state information to record how much of the 128bit block we have @@ -43,8 +49,9 @@ void CRYPTO_cfb128_encrypt(const unsigned char *in, unsigned char *out, while (len >= 16) { (*block) (ivec, ivec, key); for (; n < 16; n += sizeof(size_t)) { - *(size_t *)(out + n) = - *(size_t *)(ivec + n) ^= *(size_t *)(in + n); + *(size_t_aX *)(out + n) = + *(size_t_aX *)(ivec + n) + ^= *(size_t_aX *)(in + n); } len -= 16; out += 16; @@ -92,9 +99,10 @@ void CRYPTO_cfb128_encrypt(const unsigned char *in, unsigned char *out, while (len >= 16) { (*block) (ivec, ivec, key); for (; n < 16; n += sizeof(size_t)) { - size_t t = *(size_t *)(in + n); - *(size_t *)(out + n) = *(size_t *)(ivec + n) ^ t; - *(size_t *)(ivec + n) = t; + size_t t = *(size_t_aX *)(in + n); + *(size_t_aX *)(out + n) + = *(size_t_aX *)(ivec + n) ^ t; + *(size_t_aX *)(ivec + n) = t; } len -= 16; out += 16; diff --git a/crypto/modes/ctr128.c b/crypto/modes/ctr128.c index ff7499b34a..fc1db42d7f 100644 --- a/crypto/modes/ctr128.c +++ b/crypto/modes/ctr128.c @@ -11,6 +11,12 @@ #include <openssl/crypto.h> #include "crypto/modes.h" +#if defined(__GNUC__) && !defined(STRICT_ALIGNMENT) +typedef size_t size_t_aX __attribute((__aligned__(1))); +#else +typedef size_t size_t_aX; +#endif + /* * NOTE: the IV/counter CTR mode is big-endian. The code itself is * endian-neutral. @@ -97,8 +103,9 @@ void CRYPTO_ctr128_encrypt(const unsigned char *in, unsigned char *out, (*block) (ivec, ecount_buf, key); ctr128_inc_aligned(ivec); for (n = 0; n < 16; n += sizeof(size_t)) - *(size_t *)(out + n) = - *(size_t *)(in + n) ^ *(size_t *)(ecount_buf + n); + *(size_t_aX *)(out + n) = + *(size_t_aX *)(in + n) + ^ *(size_t_aX *)(ecount_buf + n); len -= 16; out += 16; in += 16; diff --git a/crypto/modes/gcm128.c b/crypto/modes/gcm128.c index d2f2da61b3..dc6d90dd0c 100644 --- a/crypto/modes/gcm128.c +++ b/crypto/modes/gcm128.c @@ -12,6 +12,12 @@ #include "internal/cryptlib.h" #include "crypto/modes.h" +#if defined(__GNUC__) && !defined(STRICT_ALIGNMENT) +typedef size_t size_t_aX __attribute((__aligned__(1))); +#else +typedef size_t size_t_aX; +#endif + #if defined(BSWAP4) && defined(STRICT_ALIGNMENT) /* redefine, because alignment is ensured */ # undef GETU32 @@ -1080,8 +1086,8 @@ int CRYPTO_gcm128_encrypt(GCM128_CONTEXT *ctx, size_t j = GHASH_CHUNK; while (j) { - size_t *out_t = (size_t *)out; - const size_t *in_t = (const size_t *)in; + size_t_aX *out_t = (size_t_aX *)out; + const size_t_aX *in_t = (const size_t_aX *)in; (*block) (ctx->Yi.c, ctx->EKi.c, key); ++ctr; @@ -1107,8 +1113,8 @@ int CRYPTO_gcm128_encrypt(GCM128_CONTEXT *ctx, size_t j = i; while (len >= 16) { - size_t *out_t = (size_t *)out; - const size_t *in_t = (const size_t *)in; + size_t_aX *out_t = (size_t_aX *)out; + const size_t_aX *in_t = (const size_t_aX *)in; (*block) (ctx->Yi.c, ctx->EKi.c, key); ++ctr; @@ -1318,8 +1324,8 @@ int CRYPTO_gcm128_decrypt(GCM128_CONTEXT *ctx, GHASH(ctx, in, GHASH_CHUNK); while (j) { - size_t *out_t = (size_t *)out; - const size_t *in_t = (const size_t *)in; + size_t_aX *out_t = (size_t_aX *)out; + const size_t_aX *in_t = (const size_t_aX *)in; (*block) (ctx->Yi.c, ctx->EKi.c, key); ++ctr; @@ -1343,8 +1349,8 @@ int CRYPTO_gcm128_decrypt(GCM128_CONTEXT *ctx, if ((i = (len & (size_t)-16))) { GHASH(ctx, in, i); while (len >= 16) { - size_t *out_t = (size_t *)out; - const size_t *in_t = (const size_t *)in; + size_t_aX *out_t = (size_t_aX *)out; + const size_t_aX *in_t = (const size_t_aX *)in; (*block) (ctx->Yi.c, ctx->EKi.c, key); ++ctr; diff --git a/crypto/modes/ofb128.c b/crypto/modes/ofb128.c index 2eca09bc1b..e9b24f863e 100644 --- a/crypto/modes/ofb128.c +++ b/crypto/modes/ofb128.c @@ -11,6 +11,12 @@ #include <openssl/crypto.h> #include "crypto/modes.h" +#if defined(__GNUC__) && !defined(STRICT_ALIGNMENT) +typedef size_t size_t_aX __attribute((__aligned__(1))); +#else +typedef size_t size_t_aX; +#endif + /* * The input and output encrypted as though 128bit ofb mode is being used. * The extra state information to record how much of the 128bit block we have @@ -41,8 +47,9 @@ void CRYPTO_ofb128_encrypt(const unsigned char *in, unsigned char *out, while (len >= 16) { (*block) (ivec, ivec, key); for (; n < 16; n += sizeof(size_t)) - *(size_t *)(out + n) = - *(size_t *)(in + n) ^ *(size_t *)(ivec + n); + *(size_t_aX *)(out + n) = + *(size_t_aX *)(in + n) + ^ *(size_t_aX *)(ivec + n); len -= 16; out += 16; in += 16; diff --git a/crypto/modes/xts128.c b/crypto/modes/xts128.c index 9d9b65caa5..9dbcb5bc9a 100644 --- a/crypto/modes/xts128.c +++ b/crypto/modes/xts128.c @@ -11,6 +11,14 @@ #include <openssl/crypto.h> #include "crypto/modes.h" +#ifndef STRICT_ALIGNMENT +# ifdef __GNUC__ +typedef u64 u64_a1 __attribute((__aligned__(1))); +# else +typedef u64 u64_a1; +# endif +#endif + int CRYPTO_xts128_encrypt(const XTS128_CONTEXT *ctx, const unsigned char iv[16], const unsigned char *inp, unsigned char *out, @@ -45,8 +53,8 @@ int CRYPTO_xts128_encrypt(const XTS128_CONTEXT *ctx, scratch.u[0] ^= tweak.u[0]; scratch.u[1] ^= tweak.u[1]; #else - scratch.u[0] = ((u64 *)inp)[0] ^ tweak.u[0]; - scratch.u[1] = ((u64 *)inp)[1] ^ tweak.u[1]; + scratch.u[0] = ((u64_a1 *)inp)[0] ^ tweak.u[0]; + scratch.u[1] = ((u64_a1 *)inp)[1] ^ tweak.u[1]; #endif (*ctx->block1) (scratch.c, scratch.c, ctx->key1); #if defined(STRICT_ALIGNMENT) @@ -54,8 +62,8 @@ int CRYPTO_xts128_encrypt(const XTS128_CONTEXT *ctx, scratch.u[1] ^= tweak.u[1]; memcpy(out, scratch.c, 16); #else - ((u64 *)out)[0] = scratch.u[0] ^= tweak.u[0]; - ((u64 *)out)[1] = scratch.u[1] ^= tweak.u[1]; + ((u64_a1 *)out)[0] = scratch.u[0] ^= tweak.u[0]; + ((u64_a1 *)out)[1] = scratch.u[1] ^= tweak.u[1]; #endif inp += 16; out += 16; @@ -128,8 +136,8 @@ int CRYPTO_xts128_encrypt(const XTS128_CONTEXT *ctx, scratch.u[0] ^= tweak1.u[0]; scratch.u[1] ^= tweak1.u[1]; #else - scratch.u[0] = ((u64 *)inp)[0] ^ tweak1.u[0]; - scratch.u[1] = ((u64 *)inp)[1] ^ tweak1.u[1]; + scratch.u[0] = ((u64_a1 *)inp)[0] ^ tweak1.u[0]; + scratch.u[1] = ((u64_a1 *)inp)[1] ^ tweak1.u[1]; #endif (*ctx->block1) (scratch.c, scratch.c, ctx->key1); scratch.u[0] ^= tweak1.u[0]; @@ -148,8 +156,8 @@ int CRYPTO_xts128_encrypt(const XTS128_CONTEXT *ctx, scratch.u[1] ^= tweak.u[1]; memcpy(out, scratch.c, 16); #else - ((u64 *)out)[0] = scratch.u[0] ^ tweak.u[0]; - ((u64 *)out)[1] = scratch.u[1] ^ tweak.u[1]; + ((u64_a1 *)out)[0] = scratch.u[0] ^ tweak.u[0]; + ((u64_a1 *)out)[1] = scratch.u[1] ^ tweak.u[1]; #endif } |