diff options
author | Max Filippov <jcmvbkbc@gmail.com> | 2019-10-11 05:55:35 +0200 |
---|---|---|
committer | Max Filippov <jcmvbkbc@gmail.com> | 2019-10-14 23:14:21 +0200 |
commit | c9c63f3c7a9081e4768291514991d3208ae8a697 (patch) | |
tree | bfd6f2c748194fe121d0d638a35954ded1e56b63 /arch | |
parent | xtensa: clean up assembly arguments in uaccess macros (diff) | |
download | linux-c9c63f3c7a9081e4768291514991d3208ae8a697.tar.xz linux-c9c63f3c7a9081e4768291514991d3208ae8a697.zip |
xtensa: fix type conversion in __get_user_[no]check
__get_user_[no]check uses temporary buffer of type long to store result
of __get_user_size and do sign extension on it when necessary. This
doesn't work correctly for 64-bit data. Fix it by moving temporary
buffer/sign extension logic to __get_user_asm.
Don't do assignment of __get_user_bad result to (x) as it may not always
be integer-compatible now and issue warning even when it's going to be
optimized. Instead do (x) = 0; and call __get_user_bad separately.
Zero initialize __x in __get_user_asm and use '+' constraint for its
assembly argument, so that its value is preserved in error cases. This
may add at most 1 cycle to the fast path, but saves an instruction and
two padding bytes in the fixup section for each use of this macro and
works for both misaligned store and store exception.
Signed-off-by: Max Filippov <jcmvbkbc@gmail.com>
Diffstat (limited to 'arch')
-rw-r--r-- | arch/xtensa/include/asm/uaccess.h | 55 |
1 files changed, 29 insertions, 26 deletions
diff --git a/arch/xtensa/include/asm/uaccess.h b/arch/xtensa/include/asm/uaccess.h index da4d35445063..3f80386f1883 100644 --- a/arch/xtensa/include/asm/uaccess.h +++ b/arch/xtensa/include/asm/uaccess.h @@ -172,19 +172,19 @@ __asm__ __volatile__( \ #define __get_user_nocheck(x, ptr, size) \ ({ \ - long __gu_err, __gu_val; \ - __get_user_size(__gu_val, (ptr), (size), __gu_err); \ - (x) = (__force __typeof__(*(ptr)))__gu_val; \ + long __gu_err; \ + __get_user_size((x), (ptr), (size), __gu_err); \ __gu_err; \ }) #define __get_user_check(x, ptr, size) \ ({ \ - long __gu_err = -EFAULT, __gu_val = 0; \ + long __gu_err = -EFAULT; \ const __typeof__(*(ptr)) *__gu_addr = (ptr); \ - if (access_ok(__gu_addr, size)) \ - __get_user_size(__gu_val, __gu_addr, (size), __gu_err); \ - (x) = (__force __typeof__(*(ptr)))__gu_val; \ + if (access_ok(__gu_addr, size)) \ + __get_user_size((x), __gu_addr, (size), __gu_err); \ + else \ + (x) = 0; \ __gu_err; \ }) @@ -208,7 +208,7 @@ do { \ } \ break; \ } \ - default: (x) = __get_user_bad(); \ + default: (x) = 0; __get_user_bad(); \ } \ } while (0) @@ -218,24 +218,27 @@ do { \ * __check_align_* macros still work. */ #define __get_user_asm(x_, addr_, err_, align, insn, cb) \ -__asm__ __volatile__( \ - __check_align_##align \ - "1: "insn" %[x], %[addr], 0 \n" \ - "2: \n" \ - " .section .fixup,\"ax\" \n" \ - " .align 4 \n" \ - " .literal_position \n" \ - "5: \n" \ - " movi %[tmp], 2b \n" \ - " movi %[x], 0 \n" \ - " movi %[err], %[efault] \n" \ - " jx %[tmp] \n" \ - " .previous \n" \ - " .section __ex_table,\"a\" \n" \ - " .long 1b, 5b \n" \ - " .previous" \ - :[err] "+r"(err_), [tmp] "=r"(cb), [x] "=r"(x_)\ - :[addr] "r"(addr_), [efault] "i"(-EFAULT)) +do { \ + u32 __x = 0; \ + __asm__ __volatile__( \ + __check_align_##align \ + "1: "insn" %[x], %[addr], 0 \n" \ + "2: \n" \ + " .section .fixup,\"ax\" \n" \ + " .align 4 \n" \ + " .literal_position \n" \ + "5: \n" \ + " movi %[tmp], 2b \n" \ + " movi %[err], %[efault] \n" \ + " jx %[tmp] \n" \ + " .previous \n" \ + " .section __ex_table,\"a\" \n" \ + " .long 1b, 5b \n" \ + " .previous" \ + :[err] "+r"(err_), [tmp] "=r"(cb), [x] "+r"(__x) \ + :[addr] "r"(addr_), [efault] "i"(-EFAULT)); \ + (x_) = (__force __typeof__(*(addr_)))__x; \ +} while (0) /* |