summaryrefslogtreecommitdiffstats
path: root/arch/mips/include/asm/bitops.h
diff options
context:
space:
mode:
authorDavid Daney <ddaney@caviumnetworks.com>2009-07-13 20:15:19 +0200
committerRalf Baechle <ralf@linux-mips.org>2009-09-17 20:07:50 +0200
commitb791d1193af9772040e592d5aa161790f800b762 (patch)
tree6adad3d9cdf278a3a1a3418ae75a2864d0cc7f39 /arch/mips/include/asm/bitops.h
parentMIPS: Get rid of CONFIG_CPU_HAS_LLSC (diff)
downloadlinux-b791d1193af9772040e592d5aa161790f800b762.tar.xz
linux-b791d1193af9772040e592d5aa161790f800b762.zip
MIPS: Allow kernel use of LL/SC to be separate from the presence of LL/SC.
On some CPUs, it is more efficient to disable and enable interrupts in the kernel rather than use ll/sc for atomic operations. But if we were to set cpu_has_llsc to false, we would break the userspace futex interface (in asm/futex.h). We separate the two concepts, with a new predicate kernel_uses_llsc, that lets us disable the kernel's use of ll/sc while still allowing the futex code to use it. Also there were a couple of cases in bitops.h where we were using ll/sc unconditionally even if cpu_has_llsc were false. Signed-off-by: David Daney <ddaney@caviumnetworks.com> Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
Diffstat (limited to 'arch/mips/include/asm/bitops.h')
-rw-r--r--arch/mips/include/asm/bitops.h34
1 files changed, 17 insertions, 17 deletions
diff --git a/arch/mips/include/asm/bitops.h b/arch/mips/include/asm/bitops.h
index b1e9e97a9c78..84a383806b2c 100644
--- a/arch/mips/include/asm/bitops.h
+++ b/arch/mips/include/asm/bitops.h
@@ -61,7 +61,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
unsigned short bit = nr & SZLONG_MASK;
unsigned long temp;
- if (cpu_has_llsc && R10000_LLSC_WAR) {
+ if (kernel_uses_llsc && R10000_LLSC_WAR) {
__asm__ __volatile__(
" .set mips3 \n"
"1: " __LL "%0, %1 # set_bit \n"
@@ -72,7 +72,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
: "=&r" (temp), "=m" (*m)
: "ir" (1UL << bit), "m" (*m));
#ifdef CONFIG_CPU_MIPSR2
- } else if (__builtin_constant_p(bit)) {
+ } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
__asm__ __volatile__(
"1: " __LL "%0, %1 # set_bit \n"
" " __INS "%0, %4, %2, 1 \n"
@@ -84,7 +84,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
: "=&r" (temp), "=m" (*m)
: "ir" (bit), "m" (*m), "r" (~0));
#endif /* CONFIG_CPU_MIPSR2 */
- } else if (cpu_has_llsc) {
+ } else if (kernel_uses_llsc) {
__asm__ __volatile__(
" .set mips3 \n"
"1: " __LL "%0, %1 # set_bit \n"
@@ -126,7 +126,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
unsigned short bit = nr & SZLONG_MASK;
unsigned long temp;
- if (cpu_has_llsc && R10000_LLSC_WAR) {
+ if (kernel_uses_llsc && R10000_LLSC_WAR) {
__asm__ __volatile__(
" .set mips3 \n"
"1: " __LL "%0, %1 # clear_bit \n"
@@ -137,7 +137,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
: "=&r" (temp), "=m" (*m)
: "ir" (~(1UL << bit)), "m" (*m));
#ifdef CONFIG_CPU_MIPSR2
- } else if (__builtin_constant_p(bit)) {
+ } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
__asm__ __volatile__(
"1: " __LL "%0, %1 # clear_bit \n"
" " __INS "%0, $0, %2, 1 \n"
@@ -149,7 +149,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
: "=&r" (temp), "=m" (*m)
: "ir" (bit), "m" (*m));
#endif /* CONFIG_CPU_MIPSR2 */
- } else if (cpu_has_llsc) {
+ } else if (kernel_uses_llsc) {
__asm__ __volatile__(
" .set mips3 \n"
"1: " __LL "%0, %1 # clear_bit \n"
@@ -202,7 +202,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
{
unsigned short bit = nr & SZLONG_MASK;
- if (cpu_has_llsc && R10000_LLSC_WAR) {
+ if (kernel_uses_llsc && R10000_LLSC_WAR) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
unsigned long temp;
@@ -215,7 +215,7 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
" .set mips0 \n"
: "=&r" (temp), "=m" (*m)
: "ir" (1UL << bit), "m" (*m));
- } else if (cpu_has_llsc) {
+ } else if (kernel_uses_llsc) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
unsigned long temp;
@@ -260,7 +260,7 @@ static inline int test_and_set_bit(unsigned long nr,
smp_llsc_mb();
- if (cpu_has_llsc && R10000_LLSC_WAR) {
+ if (kernel_uses_llsc && R10000_LLSC_WAR) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
unsigned long temp;
@@ -275,7 +275,7 @@ static inline int test_and_set_bit(unsigned long nr,
: "=&r" (temp), "=m" (*m), "=&r" (res)
: "r" (1UL << bit), "m" (*m)
: "memory");
- } else if (cpu_has_llsc) {
+ } else if (kernel_uses_llsc) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
unsigned long temp;
@@ -328,7 +328,7 @@ static inline int test_and_set_bit_lock(unsigned long nr,
unsigned short bit = nr & SZLONG_MASK;
unsigned long res;
- if (cpu_has_llsc && R10000_LLSC_WAR) {
+ if (kernel_uses_llsc && R10000_LLSC_WAR) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
unsigned long temp;
@@ -343,7 +343,7 @@ static inline int test_and_set_bit_lock(unsigned long nr,
: "=&r" (temp), "=m" (*m), "=&r" (res)
: "r" (1UL << bit), "m" (*m)
: "memory");
- } else if (cpu_has_llsc) {
+ } else if (kernel_uses_llsc) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
unsigned long temp;
@@ -397,7 +397,7 @@ static inline int test_and_clear_bit(unsigned long nr,
smp_llsc_mb();
- if (cpu_has_llsc && R10000_LLSC_WAR) {
+ if (kernel_uses_llsc && R10000_LLSC_WAR) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
unsigned long temp;
@@ -414,7 +414,7 @@ static inline int test_and_clear_bit(unsigned long nr,
: "r" (1UL << bit), "m" (*m)
: "memory");
#ifdef CONFIG_CPU_MIPSR2
- } else if (__builtin_constant_p(nr)) {
+ } else if (kernel_uses_llsc && __builtin_constant_p(nr)) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
unsigned long temp;
@@ -431,7 +431,7 @@ static inline int test_and_clear_bit(unsigned long nr,
: "ir" (bit), "m" (*m)
: "memory");
#endif
- } else if (cpu_has_llsc) {
+ } else if (kernel_uses_llsc) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
unsigned long temp;
@@ -487,7 +487,7 @@ static inline int test_and_change_bit(unsigned long nr,
smp_llsc_mb();
- if (cpu_has_llsc && R10000_LLSC_WAR) {
+ if (kernel_uses_llsc && R10000_LLSC_WAR) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
unsigned long temp;
@@ -502,7 +502,7 @@ static inline int test_and_change_bit(unsigned long nr,
: "=&r" (temp), "=m" (*m), "=&r" (res)
: "r" (1UL << bit), "m" (*m)
: "memory");
- } else if (cpu_has_llsc) {
+ } else if (kernel_uses_llsc) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
unsigned long temp;