diff options
author | Peter Zijlstra <peterz@infradead.org> | 2021-11-10 11:01:23 +0100 |
---|---|---|
committer | Peter Zijlstra <peterz@infradead.org> | 2021-12-11 09:09:50 +0100 |
commit | b7760780257354bb14de62abed868405b844fa13 (patch) | |
tree | 9bf71c95c2f206c86a4cc75f2f9070aa5bf3af5b | |
parent | x86/usercopy: Remove .fixup usage (diff) | |
download | linux-b7760780257354bb14de62abed868405b844fa13.tar.xz linux-b7760780257354bb14de62abed868405b844fa13.zip |
x86/word-at-a-time: Remove .fixup usage
Rewrite load_unaligned_zeropad() to not require .fixup text.
This is easiest done using asm-goto-output, where we can stick a C
label in the exception table entry. The fallback version isn't nearly
so nice but should work.
Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Reviewed-by: Josh Poimboeuf <jpoimboe@redhat.com>
Link: https://lore.kernel.org/r/20211110101326.141775772@infradead.org
-rw-r--r-- | arch/x86/include/asm/word-at-a-time.h | 66 |
1 files changed, 47 insertions, 19 deletions
diff --git a/arch/x86/include/asm/word-at-a-time.h b/arch/x86/include/asm/word-at-a-time.h index 06006b0351f3..8338b0432b50 100644 --- a/arch/x86/include/asm/word-at-a-time.h +++ b/arch/x86/include/asm/word-at-a-time.h @@ -77,30 +77,58 @@ static inline unsigned long find_zero(unsigned long mask) * and the next page not being mapped, take the exception and * return zeroes in the non-existing part. */ +#ifdef CONFIG_CC_HAS_ASM_GOTO_OUTPUT + static inline unsigned long load_unaligned_zeropad(const void *addr) { - unsigned long ret, dummy; + unsigned long offset, data; + unsigned long ret; + + asm_volatile_goto( + "1: mov %[mem], %[ret]\n" + + _ASM_EXTABLE(1b, %l[do_exception]) + + : [ret] "=r" (ret) + : [mem] "m" (*(unsigned long *)addr) + : : do_exception); + + return ret; + +do_exception: + offset = (unsigned long)addr & (sizeof(long) - 1); + addr = (void *)((unsigned long)addr & ~(sizeof(long) - 1)); + data = *(unsigned long *)addr; + ret = data >> offset * 8; + + return ret; +} - asm( - "1:\tmov %2,%0\n" +#else /* !CONFIG_CC_HAS_ASM_GOTO_OUTPUT */ + +static inline unsigned long load_unaligned_zeropad(const void *addr) +{ + unsigned long offset, data; + unsigned long ret, err = 0; + + asm( "1: mov %[mem], %[ret]\n" "2:\n" - ".section .fixup,\"ax\"\n" - "3:\t" - "lea %2,%1\n\t" - "and %3,%1\n\t" - "mov (%1),%0\n\t" - "leal %2,%%ecx\n\t" - "andl %4,%%ecx\n\t" - "shll $3,%%ecx\n\t" - "shr %%cl,%0\n\t" - "jmp 2b\n" - ".previous\n" - _ASM_EXTABLE(1b, 3b) - :"=&r" (ret),"=&c" (dummy) - :"m" (*(unsigned long *)addr), - "i" (-sizeof(unsigned long)), - "i" (sizeof(unsigned long)-1)); + + _ASM_EXTABLE_FAULT(1b, 2b) + + : [ret] "=&r" (ret), "+a" (err) + : [mem] "m" (*(unsigned long *)addr)); + + if (unlikely(err)) { + offset = (unsigned long)addr & (sizeof(long) - 1); + addr = (void *)((unsigned long)addr & ~(sizeof(long) - 1)); + data = *(unsigned long *)addr; + ret = data >> offset * 8; + } + return ret; } +#endif /* CONFIG_CC_HAS_ASM_GOTO_OUTPUT */ + #endif /* _ASM_WORD_AT_A_TIME_H */ |