diff options
author | Peter Zijlstra <peterz@infradead.org> | 2021-11-10 11:01:05 +0100 |
---|---|---|
committer | Peter Zijlstra <peterz@infradead.org> | 2021-12-11 09:09:45 +0100 |
commit | acba44d2436d463f60a54bf934d378dcf384a965 (patch) | |
tree | 481cfb27e94c0f9daf7d21ab399b9687984fc594 /arch | |
parent | x86/mmx_32: Remove X86_USE_3DNOW (diff) | |
download | linux-acba44d2436d463f60a54bf934d378dcf384a965.tar.xz linux-acba44d2436d463f60a54bf934d378dcf384a965.zip |
x86/copy_user_64: Remove .fixup usage
Place the anonymous .fixup code at the tail of the regular functions.
Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Reviewed-by: Josh Poimboeuf <jpoimboe@redhat.com>
Reviewed-by: Borislav Petkov <bp@suse.de>
Link: https://lore.kernel.org/r/20211110101325.068505810@infradead.org
Diffstat (limited to 'arch')
-rw-r--r-- | arch/x86/lib/copy_user_64.S | 32 |
1 files changed, 11 insertions, 21 deletions
diff --git a/arch/x86/lib/copy_user_64.S b/arch/x86/lib/copy_user_64.S index 8fb562f1dfaf..e6ac38587b40 100644 --- a/arch/x86/lib/copy_user_64.S +++ b/arch/x86/lib/copy_user_64.S @@ -32,14 +32,10 @@ decl %ecx jnz 100b 102: - .section .fixup,"ax" -103: addl %ecx,%edx /* ecx is zerorest also */ - jmp .Lcopy_user_handle_tail - .previous - _ASM_EXTABLE_CPY(100b, 103b) - _ASM_EXTABLE_CPY(101b, 103b) - .endm + _ASM_EXTABLE_CPY(100b, .Lcopy_user_handle_align) + _ASM_EXTABLE_CPY(101b, .Lcopy_user_handle_align) +.endm /* * copy_user_generic_unrolled - memory copy with exception handling. @@ -107,7 +103,6 @@ SYM_FUNC_START(copy_user_generic_unrolled) ASM_CLAC RET - .section .fixup,"ax" 30: shll $6,%ecx addl %ecx,%edx jmp 60f @@ -115,7 +110,6 @@ SYM_FUNC_START(copy_user_generic_unrolled) jmp 60f 50: movl %ecx,%edx 60: jmp .Lcopy_user_handle_tail /* ecx is zerorest also */ - .previous _ASM_EXTABLE_CPY(1b, 30b) _ASM_EXTABLE_CPY(2b, 30b) @@ -166,20 +160,16 @@ SYM_FUNC_START(copy_user_generic_string) movl %edx,%ecx shrl $3,%ecx andl $7,%edx -1: rep - movsq +1: rep movsq 2: movl %edx,%ecx -3: rep - movsb +3: rep movsb xorl %eax,%eax ASM_CLAC RET - .section .fixup,"ax" 11: leal (%rdx,%rcx,8),%ecx 12: movl %ecx,%edx /* ecx is zerorest also */ jmp .Lcopy_user_handle_tail - .previous _ASM_EXTABLE_CPY(1b, 11b) _ASM_EXTABLE_CPY(3b, 12b) @@ -203,16 +193,13 @@ SYM_FUNC_START(copy_user_enhanced_fast_string) cmpl $64,%edx jb .L_copy_short_string /* less then 64 bytes, avoid the costly 'rep' */ movl %edx,%ecx -1: rep - movsb +1: rep movsb xorl %eax,%eax ASM_CLAC RET - .section .fixup,"ax" 12: movl %ecx,%edx /* ecx is zerorest also */ jmp .Lcopy_user_handle_tail - .previous _ASM_EXTABLE_CPY(1b, 12b) SYM_FUNC_END(copy_user_enhanced_fast_string) @@ -240,6 +227,11 @@ SYM_CODE_START_LOCAL(.Lcopy_user_handle_tail) RET _ASM_EXTABLE_CPY(1b, 2b) + +.Lcopy_user_handle_align: + addl %ecx,%edx /* ecx is zerorest also */ + jmp .Lcopy_user_handle_tail + SYM_CODE_END(.Lcopy_user_handle_tail) /* @@ -350,7 +342,6 @@ SYM_FUNC_START(__copy_user_nocache) sfence RET - .section .fixup,"ax" .L_fixup_4x8b_copy: shll $6,%ecx addl %ecx,%edx @@ -366,7 +357,6 @@ SYM_FUNC_START(__copy_user_nocache) .L_fixup_handle_tail: sfence jmp .Lcopy_user_handle_tail - .previous _ASM_EXTABLE_CPY(1b, .L_fixup_4x8b_copy) _ASM_EXTABLE_CPY(2b, .L_fixup_4x8b_copy) |