summaryrefslogtreecommitdiffstats
path: root/arch
diff options
context:
space:
mode:
authorHuang Pei <huangpei@loongson.cn>2021-12-15 09:44:59 +0100
committerThomas Bogendoerfer <tsbogend@alpha.franken.de>2022-01-05 10:19:20 +0100
commit10657660c16e689bfad204190e7031b9b1622a35 (patch)
tree8f29f1c910f097ff7a540473d825f4fcdfa482cb /arch
parentMIPS: fix local_{add,sub}_return on MIPS64 (diff)
downloadlinux-10657660c16e689bfad204190e7031b9b1622a35.tar.xz
linux-10657660c16e689bfad204190e7031b9b1622a35.zip
MIPS: rework local_t operation on MIPS64
+. remove "asm/war.h" since R10000_LLSC_WAR became a config option +. clean up Suggested-by: Maciej W. Rozycki <macro@orcam.me.uk> Signed-off-by: Huang Pei <huangpei@loongson.cn> Signed-off-by: Thomas Bogendoerfer <tsbogend@alpha.franken.de>
Diffstat (limited to 'arch')
-rw-r--r--arch/mips/include/asm/asm.h18
-rw-r--r--arch/mips/include/asm/local.h59
2 files changed, 32 insertions, 45 deletions
diff --git a/arch/mips/include/asm/asm.h b/arch/mips/include/asm/asm.h
index 2f8ce94ebaaf..f3302b13d3e0 100644
--- a/arch/mips/include/asm/asm.h
+++ b/arch/mips/include/asm/asm.h
@@ -19,6 +19,7 @@
#include <asm/sgidefs.h>
#include <asm/asm-eva.h>
+#include <asm/isa-rev.h>
#ifndef __VDSO__
/*
@@ -211,6 +212,8 @@ symbol = value
#define LONG_SUB sub
#define LONG_SUBU subu
#define LONG_L lw
+#define LONG_LL ll
+#define LONG_SC sc
#define LONG_S sw
#define LONG_SP swp
#define LONG_SLL sll
@@ -236,6 +239,8 @@ symbol = value
#define LONG_SUB dsub
#define LONG_SUBU dsubu
#define LONG_L ld
+#define LONG_LL lld
+#define LONG_SC scd
#define LONG_S sd
#define LONG_SP sdp
#define LONG_SLL dsll
@@ -320,6 +325,19 @@ symbol = value
#define SSNOP sll zero, zero, 1
+/*
+ * Using a branch-likely instruction to check the result of an sc instruction
+ * works around a bug present in R10000 CPUs prior to revision 3.0 that could
+ * cause ll-sc sequences to execute non-atomically.
+ */
+#ifdef CONFIG_WAR_R10000_LLSC
+# define SC_BEQZ beqzl
+#elif MIPS_ISA_REV >= 6
+# define SC_BEQZ beqzc
+#else
+# define SC_BEQZ beqz
+#endif
+
#ifdef CONFIG_SGI_IP28
/* Inhibit speculative stores to volatile (e.g.DMA) or invalid addresses. */
#include <asm/cacheops.h>
diff --git a/arch/mips/include/asm/local.h b/arch/mips/include/asm/local.h
index 3fa634090388..d4d47c846bb2 100644
--- a/arch/mips/include/asm/local.h
+++ b/arch/mips/include/asm/local.h
@@ -8,7 +8,7 @@
#include <asm/asm.h>
#include <asm/cmpxchg.h>
#include <asm/compiler.h>
-#include <asm/war.h>
+#include <asm/asm.h>
typedef struct
{
@@ -32,34 +32,18 @@ static __inline__ long local_add_return(long i, local_t * l)
{
unsigned long result;
- if (kernel_uses_llsc && IS_ENABLED(CONFIG_WAR_R10000_LLSC)) {
- unsigned long temp;
-
- __asm__ __volatile__(
- " .set push \n"
- " .set arch=r4000 \n"
- __SYNC(full, loongson3_war) " \n"
- "1:" __LL "%1, %2 # local_add_return \n"
- __stringify(LONG_ADDU) " %0, %1, %3 \n"
- __SC "%0, %2 \n"
- " beqzl %0, 1b \n"
- " addu %0, %1, %3 \n"
- " .set pop \n"
- : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
- : "Ir" (i), "m" (l->a.counter)
- : "memory");
- } else if (kernel_uses_llsc) {
+ if (kernel_uses_llsc) {
unsigned long temp;
__asm__ __volatile__(
" .set push \n"
" .set "MIPS_ISA_ARCH_LEVEL" \n"
- __SYNC(full, loongson3_war) " \n"
- "1:" __LL "%1, %2 # local_add_return \n"
+ __SYNC(full, loongson3_war) " \n"
+ "1:" __stringify(LONG_LL) " %1, %2 \n"
+ __stringify(LONG_ADDU) " %0, %1, %3 \n"
+ __stringify(LONG_SC) " %0, %2 \n"
+ __stringify(SC_BEQZ) " %0, 1b \n"
__stringify(LONG_ADDU) " %0, %1, %3 \n"
- __SC "%0, %2 \n"
- " beqz %0, 1b \n"
- " addu %0, %1, %3 \n"
" .set pop \n"
: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
: "Ir" (i), "m" (l->a.counter)
@@ -81,34 +65,19 @@ static __inline__ long local_sub_return(long i, local_t * l)
{
unsigned long result;
- if (kernel_uses_llsc && IS_ENABLED(CONFIG_WAR_R10000_LLSC)) {
- unsigned long temp;
-
- __asm__ __volatile__(
- " .set push \n"
- " .set arch=r4000 \n"
- __SYNC(full, loongson3_war) " \n"
- "1:" __LL "%1, %2 # local_sub_return \n"
- __stringify(LONG_SUBU) " %0, %1, %3 \n"
- __SC "%0, %2 \n"
- " beqzl %0, 1b \n"
- " subu %0, %1, %3 \n"
- " .set pop \n"
- : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
- : "Ir" (i), "m" (l->a.counter)
- : "memory");
- } else if (kernel_uses_llsc) {
+ if (kernel_uses_llsc) {
unsigned long temp;
__asm__ __volatile__(
" .set push \n"
" .set "MIPS_ISA_ARCH_LEVEL" \n"
- __SYNC(full, loongson3_war) " \n"
- "1:" __LL "%1, %2 # local_sub_return \n"
+ __SYNC(full, loongson3_war) " \n"
+ "1:" __stringify(LONG_LL) " %1, %2 \n"
+ __stringify(LONG_SUBU) " %0, %1, %3 \n"
+ __stringify(LONG_SUBU) " %0, %1, %3 \n"
+ __stringify(LONG_SC) " %0, %2 \n"
+ __stringify(SC_BEQZ) " %0, 1b \n"
__stringify(LONG_SUBU) " %0, %1, %3 \n"
- __SC "%0, %2 \n"
- " beqz %0, 1b \n"
- " subu %0, %1, %3 \n"
" .set pop \n"
: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
: "Ir" (i), "m" (l->a.counter)