diff options
author | Linus Torvalds <torvalds@linux-foundation.org> | 2018-08-13 22:35:26 +0200 |
---|---|---|
committer | Linus Torvalds <torvalds@linux-foundation.org> | 2018-08-13 22:35:26 +0200 |
commit | f24d6f2654d39355cdf8285e21409ed8d56d4284 (patch) | |
tree | e6d2c683e61f30147bf73eba8d9fbf2c05865f03 /arch/x86/crypto | |
parent | Merge branch 'x86-boot-for-linus' of git://git.kernel.org/pub/scm/linux/kerne... (diff) | |
parent | x86/entry/64: Add two more instruction suffixes (diff) | |
download | linux-f24d6f2654d39355cdf8285e21409ed8d56d4284.tar.xz linux-f24d6f2654d39355cdf8285e21409ed8d56d4284.zip |
Merge branch 'x86-asm-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip
Pull x86 asm updates from Thomas Gleixner:
"The lowlevel and ASM code updates for x86:
- Make stack trace unwinding more reliable
- ASM instruction updates for better code generation
- Various cleanups"
* 'x86-asm-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip:
x86/entry/64: Add two more instruction suffixes
x86/asm/64: Use 32-bit XOR to zero registers
x86/build/vdso: Simplify 'cmd_vdso2c'
x86/build/vdso: Remove unused vdso-syms.lds
x86/stacktrace: Enable HAVE_RELIABLE_STACKTRACE for the ORC unwinder
x86/unwind/orc: Detect the end of the stack
x86/stacktrace: Do not fail for ORC with regs on stack
x86/stacktrace: Clarify the reliable success paths
x86/stacktrace: Remove STACKTRACE_DUMP_ONCE
x86/stacktrace: Do not unwind after user regs
x86/asm: Use CC_SET/CC_OUT in percpu_cmpxchg8b_double() to micro-optimize code generation
Diffstat (limited to 'arch/x86/crypto')
-rw-r--r-- | arch/x86/crypto/aegis128-aesni-asm.S | 2 | ||||
-rw-r--r-- | arch/x86/crypto/aegis128l-aesni-asm.S | 2 | ||||
-rw-r--r-- | arch/x86/crypto/aegis256-aesni-asm.S | 2 | ||||
-rw-r--r-- | arch/x86/crypto/aesni-intel_asm.S | 8 | ||||
-rw-r--r-- | arch/x86/crypto/aesni-intel_avx-x86_64.S | 4 | ||||
-rw-r--r-- | arch/x86/crypto/morus1280-avx2-asm.S | 2 | ||||
-rw-r--r-- | arch/x86/crypto/morus1280-sse2-asm.S | 2 | ||||
-rw-r--r-- | arch/x86/crypto/morus640-sse2-asm.S | 2 | ||||
-rw-r--r-- | arch/x86/crypto/sha1_ssse3_asm.S | 2 |
9 files changed, 13 insertions, 13 deletions
diff --git a/arch/x86/crypto/aegis128-aesni-asm.S b/arch/x86/crypto/aegis128-aesni-asm.S index 717bf0776421..5f7e43d4f64a 100644 --- a/arch/x86/crypto/aegis128-aesni-asm.S +++ b/arch/x86/crypto/aegis128-aesni-asm.S @@ -75,7 +75,7 @@ * %r9 */ __load_partial: - xor %r9, %r9 + xor %r9d, %r9d pxor MSG, MSG mov LEN, %r8 diff --git a/arch/x86/crypto/aegis128l-aesni-asm.S b/arch/x86/crypto/aegis128l-aesni-asm.S index 4eda2b8db9e1..491dd61c845c 100644 --- a/arch/x86/crypto/aegis128l-aesni-asm.S +++ b/arch/x86/crypto/aegis128l-aesni-asm.S @@ -66,7 +66,7 @@ * %r9 */ __load_partial: - xor %r9, %r9 + xor %r9d, %r9d pxor MSG0, MSG0 pxor MSG1, MSG1 diff --git a/arch/x86/crypto/aegis256-aesni-asm.S b/arch/x86/crypto/aegis256-aesni-asm.S index 32aae8397268..8870c7c5d9a4 100644 --- a/arch/x86/crypto/aegis256-aesni-asm.S +++ b/arch/x86/crypto/aegis256-aesni-asm.S @@ -59,7 +59,7 @@ * %r9 */ __load_partial: - xor %r9, %r9 + xor %r9d, %r9d pxor MSG, MSG mov LEN, %r8 diff --git a/arch/x86/crypto/aesni-intel_asm.S b/arch/x86/crypto/aesni-intel_asm.S index e762ef417562..9bd139569b41 100644 --- a/arch/x86/crypto/aesni-intel_asm.S +++ b/arch/x86/crypto/aesni-intel_asm.S @@ -258,7 +258,7 @@ ALL_F: .octa 0xffffffffffffffffffffffffffffffff .macro GCM_INIT Iv SUBKEY AAD AADLEN mov \AADLEN, %r11 mov %r11, AadLen(%arg2) # ctx_data.aad_length = aad_length - xor %r11, %r11 + xor %r11d, %r11d mov %r11, InLen(%arg2) # ctx_data.in_length = 0 mov %r11, PBlockLen(%arg2) # ctx_data.partial_block_length = 0 mov %r11, PBlockEncKey(%arg2) # ctx_data.partial_block_enc_key = 0 @@ -286,7 +286,7 @@ ALL_F: .octa 0xffffffffffffffffffffffffffffffff movdqu HashKey(%arg2), %xmm13 add %arg5, InLen(%arg2) - xor %r11, %r11 # initialise the data pointer offset as zero + xor %r11d, %r11d # initialise the data pointer offset as zero PARTIAL_BLOCK %arg3 %arg4 %arg5 %r11 %xmm8 \operation sub %r11, %arg5 # sub partial block data used @@ -702,7 +702,7 @@ _no_extra_mask_1_\@: # GHASH computation for the last <16 Byte block GHASH_MUL \AAD_HASH, %xmm13, %xmm0, %xmm10, %xmm11, %xmm5, %xmm6 - xor %rax,%rax + xor %eax, %eax mov %rax, PBlockLen(%arg2) jmp _dec_done_\@ @@ -737,7 +737,7 @@ _no_extra_mask_2_\@: # GHASH computation for the last <16 Byte block GHASH_MUL \AAD_HASH, %xmm13, %xmm0, %xmm10, %xmm11, %xmm5, %xmm6 - xor %rax,%rax + xor %eax, %eax mov %rax, PBlockLen(%arg2) jmp _encode_done_\@ diff --git a/arch/x86/crypto/aesni-intel_avx-x86_64.S b/arch/x86/crypto/aesni-intel_avx-x86_64.S index faecb1518bf8..1985ea0b551b 100644 --- a/arch/x86/crypto/aesni-intel_avx-x86_64.S +++ b/arch/x86/crypto/aesni-intel_avx-x86_64.S @@ -463,7 +463,7 @@ _get_AAD_rest_final\@: _get_AAD_done\@: # initialize the data pointer offset as zero - xor %r11, %r11 + xor %r11d, %r11d # start AES for num_initial_blocks blocks mov arg5, %rax # rax = *Y0 @@ -1770,7 +1770,7 @@ _get_AAD_rest_final\@: _get_AAD_done\@: # initialize the data pointer offset as zero - xor %r11, %r11 + xor %r11d, %r11d # start AES for num_initial_blocks blocks mov arg5, %rax # rax = *Y0 diff --git a/arch/x86/crypto/morus1280-avx2-asm.S b/arch/x86/crypto/morus1280-avx2-asm.S index 07653d4582a6..de182c460f82 100644 --- a/arch/x86/crypto/morus1280-avx2-asm.S +++ b/arch/x86/crypto/morus1280-avx2-asm.S @@ -113,7 +113,7 @@ ENDPROC(__morus1280_update_zero) * %r9 */ __load_partial: - xor %r9, %r9 + xor %r9d, %r9d vpxor MSG, MSG, MSG mov %rcx, %r8 diff --git a/arch/x86/crypto/morus1280-sse2-asm.S b/arch/x86/crypto/morus1280-sse2-asm.S index bd1aa1b60869..da5d2905db60 100644 --- a/arch/x86/crypto/morus1280-sse2-asm.S +++ b/arch/x86/crypto/morus1280-sse2-asm.S @@ -235,7 +235,7 @@ ENDPROC(__morus1280_update_zero) * %r9 */ __load_partial: - xor %r9, %r9 + xor %r9d, %r9d pxor MSG_LO, MSG_LO pxor MSG_HI, MSG_HI diff --git a/arch/x86/crypto/morus640-sse2-asm.S b/arch/x86/crypto/morus640-sse2-asm.S index efa02816d921..414db480250e 100644 --- a/arch/x86/crypto/morus640-sse2-asm.S +++ b/arch/x86/crypto/morus640-sse2-asm.S @@ -113,7 +113,7 @@ ENDPROC(__morus640_update_zero) * %r9 */ __load_partial: - xor %r9, %r9 + xor %r9d, %r9d pxor MSG, MSG mov %rcx, %r8 diff --git a/arch/x86/crypto/sha1_ssse3_asm.S b/arch/x86/crypto/sha1_ssse3_asm.S index 6204bd53528c..613d0bfc3d84 100644 --- a/arch/x86/crypto/sha1_ssse3_asm.S +++ b/arch/x86/crypto/sha1_ssse3_asm.S @@ -96,7 +96,7 @@ # cleanup workspace mov $8, %ecx mov %rsp, %rdi - xor %rax, %rax + xor %eax, %eax rep stosq mov %rbp, %rsp # deallocate workspace |