diff options
author | Linus Torvalds <torvalds@linux-foundation.org> | 2018-12-21 18:22:24 +0100 |
---|---|---|
committer | Linus Torvalds <torvalds@linux-foundation.org> | 2018-12-21 18:22:24 +0100 |
commit | 70ad6368e878857db315788dab36817aa992c86a (patch) | |
tree | 9de519c81dc24a2d416403ec38b944fbd6047163 /arch/x86/include/asm/refcount.h | |
parent | Merge tag 'drm-fixes-2018-12-21' of git://anongit.freedesktop.org/drm/drm (diff) | |
parent | Revert "kbuild/Makefile: Prepare for using macros in inline assembly code to ... (diff) | |
download | linux-70ad6368e878857db315788dab36817aa992c86a.tar.xz linux-70ad6368e878857db315788dab36817aa992c86a.zip |
Merge branch 'x86-urgent-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip
Pull x86 fixes from Ingo Molnar:
"The biggest part is a series of reverts for the macro based GCC
inlining workarounds. It caused regressions in distro build and other
kernel tooling environments, and the GCC project was very receptive to
fixing the underlying inliner weaknesses - so as time ran out we
decided to do a reasonably straightforward revert of the patches. The
plan is to rely on the 'asm inline' GCC 9 feature, which might be
backported to GCC 8 and could thus become reasonably widely available
on modern distros.
Other than those reverts, there's misc fixes from all around the
place.
I wish our final x86 pull request for v4.20 was smaller..."
* 'x86-urgent-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip:
Revert "kbuild/Makefile: Prepare for using macros in inline assembly code to work around asm() related GCC inlining bugs"
Revert "x86/objtool: Use asm macros to work around GCC inlining bugs"
Revert "x86/refcount: Work around GCC inlining bug"
Revert "x86/alternatives: Macrofy lock prefixes to work around GCC inlining bugs"
Revert "x86/bug: Macrofy the BUG table section handling, to work around GCC inlining bugs"
Revert "x86/paravirt: Work around GCC inlining bugs when compiling paravirt ops"
Revert "x86/extable: Macrofy inline assembly code to work around GCC inlining bugs"
Revert "x86/cpufeature: Macrofy inline assembly code to work around GCC inlining bugs"
Revert "x86/jump-labels: Macrofy inline assembly code to work around GCC inlining bugs"
x86/mtrr: Don't copy uninitialized gentry fields back to userspace
x86/fsgsbase/64: Fix the base write helper functions
x86/mm/cpa: Fix cpa_flush_array() TLB invalidation
x86/vdso: Pass --eh-frame-hdr to the linker
x86/mm: Fix decoy address handling vs 32-bit builds
x86/intel_rdt: Ensure a CPU remains online for the region's pseudo-locking sequence
x86/dump_pagetables: Fix LDT remap address marker
x86/mm: Fix guard hole handling
Diffstat (limited to 'arch/x86/include/asm/refcount.h')
-rw-r--r-- | arch/x86/include/asm/refcount.h | 81 |
1 files changed, 33 insertions, 48 deletions
diff --git a/arch/x86/include/asm/refcount.h b/arch/x86/include/asm/refcount.h index a8b5e1e13319..dbaed55c1c24 100644 --- a/arch/x86/include/asm/refcount.h +++ b/arch/x86/include/asm/refcount.h @@ -4,41 +4,6 @@ * x86-specific implementation of refcount_t. Based on PAX_REFCOUNT from * PaX/grsecurity. */ - -#ifdef __ASSEMBLY__ - -#include <asm/asm.h> -#include <asm/bug.h> - -.macro REFCOUNT_EXCEPTION counter:req - .pushsection .text..refcount -111: lea \counter, %_ASM_CX -112: ud2 - ASM_UNREACHABLE - .popsection -113: _ASM_EXTABLE_REFCOUNT(112b, 113b) -.endm - -/* Trigger refcount exception if refcount result is negative. */ -.macro REFCOUNT_CHECK_LT_ZERO counter:req - js 111f - REFCOUNT_EXCEPTION counter="\counter" -.endm - -/* Trigger refcount exception if refcount result is zero or negative. */ -.macro REFCOUNT_CHECK_LE_ZERO counter:req - jz 111f - REFCOUNT_CHECK_LT_ZERO counter="\counter" -.endm - -/* Trigger refcount exception unconditionally. */ -.macro REFCOUNT_ERROR counter:req - jmp 111f - REFCOUNT_EXCEPTION counter="\counter" -.endm - -#else /* __ASSEMBLY__ */ - #include <linux/refcount.h> #include <asm/bug.h> @@ -50,12 +15,35 @@ * central refcount exception. The fixup address for the exception points * back to the regular execution flow in .text. */ +#define _REFCOUNT_EXCEPTION \ + ".pushsection .text..refcount\n" \ + "111:\tlea %[var], %%" _ASM_CX "\n" \ + "112:\t" ASM_UD2 "\n" \ + ASM_UNREACHABLE \ + ".popsection\n" \ + "113:\n" \ + _ASM_EXTABLE_REFCOUNT(112b, 113b) + +/* Trigger refcount exception if refcount result is negative. */ +#define REFCOUNT_CHECK_LT_ZERO \ + "js 111f\n\t" \ + _REFCOUNT_EXCEPTION + +/* Trigger refcount exception if refcount result is zero or negative. */ +#define REFCOUNT_CHECK_LE_ZERO \ + "jz 111f\n\t" \ + REFCOUNT_CHECK_LT_ZERO + +/* Trigger refcount exception unconditionally. */ +#define REFCOUNT_ERROR \ + "jmp 111f\n\t" \ + _REFCOUNT_EXCEPTION static __always_inline void refcount_add(unsigned int i, refcount_t *r) { asm volatile(LOCK_PREFIX "addl %1,%0\n\t" - "REFCOUNT_CHECK_LT_ZERO counter=\"%[counter]\"" - : [counter] "+m" (r->refs.counter) + REFCOUNT_CHECK_LT_ZERO + : [var] "+m" (r->refs.counter) : "ir" (i) : "cc", "cx"); } @@ -63,32 +51,31 @@ static __always_inline void refcount_add(unsigned int i, refcount_t *r) static __always_inline void refcount_inc(refcount_t *r) { asm volatile(LOCK_PREFIX "incl %0\n\t" - "REFCOUNT_CHECK_LT_ZERO counter=\"%[counter]\"" - : [counter] "+m" (r->refs.counter) + REFCOUNT_CHECK_LT_ZERO + : [var] "+m" (r->refs.counter) : : "cc", "cx"); } static __always_inline void refcount_dec(refcount_t *r) { asm volatile(LOCK_PREFIX "decl %0\n\t" - "REFCOUNT_CHECK_LE_ZERO counter=\"%[counter]\"" - : [counter] "+m" (r->refs.counter) + REFCOUNT_CHECK_LE_ZERO + : [var] "+m" (r->refs.counter) : : "cc", "cx"); } static __always_inline __must_check bool refcount_sub_and_test(unsigned int i, refcount_t *r) { - return GEN_BINARY_SUFFIXED_RMWcc(LOCK_PREFIX "subl", - "REFCOUNT_CHECK_LT_ZERO counter=\"%[var]\"", + REFCOUNT_CHECK_LT_ZERO, r->refs.counter, e, "er", i, "cx"); } static __always_inline __must_check bool refcount_dec_and_test(refcount_t *r) { return GEN_UNARY_SUFFIXED_RMWcc(LOCK_PREFIX "decl", - "REFCOUNT_CHECK_LT_ZERO counter=\"%[var]\"", + REFCOUNT_CHECK_LT_ZERO, r->refs.counter, e, "cx"); } @@ -106,8 +93,8 @@ bool refcount_add_not_zero(unsigned int i, refcount_t *r) /* Did we try to increment from/to an undesirable state? */ if (unlikely(c < 0 || c == INT_MAX || result < c)) { - asm volatile("REFCOUNT_ERROR counter=\"%[counter]\"" - : : [counter] "m" (r->refs.counter) + asm volatile(REFCOUNT_ERROR + : : [var] "m" (r->refs.counter) : "cc", "cx"); break; } @@ -122,6 +109,4 @@ static __always_inline __must_check bool refcount_inc_not_zero(refcount_t *r) return refcount_add_not_zero(1, r); } -#endif /* __ASSEMBLY__ */ - #endif |