diff options
Diffstat (limited to 'arch/mips/include/asm/spinlock.h')
-rw-r--r-- | arch/mips/include/asm/spinlock.h | 216 |
1 files changed, 1 insertions, 215 deletions
diff --git a/arch/mips/include/asm/spinlock.h b/arch/mips/include/asm/spinlock.h index a8df44d60607..3e7afff196cd 100644 --- a/arch/mips/include/asm/spinlock.h +++ b/arch/mips/include/asm/spinlock.h @@ -13,6 +13,7 @@ #include <asm/barrier.h> #include <asm/processor.h> +#include <asm/qrwlock.h> #include <asm/compiler.h> #include <asm/war.h> @@ -220,221 +221,6 @@ static inline unsigned int arch_spin_trylock(arch_spinlock_t *lock) return tmp; } -/* - * Read-write spinlocks, allowing multiple readers but only one writer. - * - * NOTE! it is quite common to have readers in interrupts but no interrupt - * writers. For those circumstances we can "mix" irq-safe locks - any writer - * needs to get a irq-safe write-lock, but readers can get non-irqsafe - * read-locks. - */ - -/* - * read_can_lock - would read_trylock() succeed? - * @lock: the rwlock in question. - */ -#define arch_read_can_lock(rw) ((rw)->lock >= 0) - -/* - * write_can_lock - would write_trylock() succeed? - * @lock: the rwlock in question. - */ -#define arch_write_can_lock(rw) (!(rw)->lock) - -static inline void arch_read_lock(arch_rwlock_t *rw) -{ - unsigned int tmp; - - if (R10000_LLSC_WAR) { - __asm__ __volatile__( - " .set noreorder # arch_read_lock \n" - "1: ll %1, %2 \n" - " bltz %1, 1b \n" - " addu %1, 1 \n" - " sc %1, %0 \n" - " beqzl %1, 1b \n" - " nop \n" - " .set reorder \n" - : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp) - : GCC_OFF_SMALL_ASM() (rw->lock) - : "memory"); - } else { - do { - __asm__ __volatile__( - "1: ll %1, %2 # arch_read_lock \n" - " bltz %1, 1b \n" - " addu %1, 1 \n" - "2: sc %1, %0 \n" - : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp) - : GCC_OFF_SMALL_ASM() (rw->lock) - : "memory"); - } while (unlikely(!tmp)); - } - - smp_llsc_mb(); -} - -static inline void arch_read_unlock(arch_rwlock_t *rw) -{ - unsigned int tmp; - - smp_mb__before_llsc(); - - if (R10000_LLSC_WAR) { - __asm__ __volatile__( - "1: ll %1, %2 # arch_read_unlock \n" - " addiu %1, -1 \n" - " sc %1, %0 \n" - " beqzl %1, 1b \n" - : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp) - : GCC_OFF_SMALL_ASM() (rw->lock) - : "memory"); - } else { - do { - __asm__ __volatile__( - "1: ll %1, %2 # arch_read_unlock \n" - " addiu %1, -1 \n" - " sc %1, %0 \n" - : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp) - : GCC_OFF_SMALL_ASM() (rw->lock) - : "memory"); - } while (unlikely(!tmp)); - } -} - -static inline void arch_write_lock(arch_rwlock_t *rw) -{ - unsigned int tmp; - - if (R10000_LLSC_WAR) { - __asm__ __volatile__( - " .set noreorder # arch_write_lock \n" - "1: ll %1, %2 \n" - " bnez %1, 1b \n" - " lui %1, 0x8000 \n" - " sc %1, %0 \n" - " beqzl %1, 1b \n" - " nop \n" - " .set reorder \n" - : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp) - : GCC_OFF_SMALL_ASM() (rw->lock) - : "memory"); - } else { - do { - __asm__ __volatile__( - "1: ll %1, %2 # arch_write_lock \n" - " bnez %1, 1b \n" - " lui %1, 0x8000 \n" - "2: sc %1, %0 \n" - : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp) - : GCC_OFF_SMALL_ASM() (rw->lock) - : "memory"); - } while (unlikely(!tmp)); - } - - smp_llsc_mb(); -} - -static inline void arch_write_unlock(arch_rwlock_t *rw) -{ - smp_mb__before_llsc(); - - __asm__ __volatile__( - " # arch_write_unlock \n" - " sw $0, %0 \n" - : "=m" (rw->lock) - : "m" (rw->lock) - : "memory"); -} - -static inline int arch_read_trylock(arch_rwlock_t *rw) -{ - unsigned int tmp; - int ret; - - if (R10000_LLSC_WAR) { - __asm__ __volatile__( - " .set noreorder # arch_read_trylock \n" - " li %2, 0 \n" - "1: ll %1, %3 \n" - " bltz %1, 2f \n" - " addu %1, 1 \n" - " sc %1, %0 \n" - " .set reorder \n" - " beqzl %1, 1b \n" - " nop \n" - __WEAK_LLSC_MB - " li %2, 1 \n" - "2: \n" - : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret) - : GCC_OFF_SMALL_ASM() (rw->lock) - : "memory"); - } else { - __asm__ __volatile__( - " .set noreorder # arch_read_trylock \n" - " li %2, 0 \n" - "1: ll %1, %3 \n" - " bltz %1, 2f \n" - " addu %1, 1 \n" - " sc %1, %0 \n" - " beqz %1, 1b \n" - " nop \n" - " .set reorder \n" - __WEAK_LLSC_MB - " li %2, 1 \n" - "2: .insn \n" - : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret) - : GCC_OFF_SMALL_ASM() (rw->lock) - : "memory"); - } - - return ret; -} - -static inline int arch_write_trylock(arch_rwlock_t *rw) -{ - unsigned int tmp; - int ret; - - if (R10000_LLSC_WAR) { - __asm__ __volatile__( - " .set noreorder # arch_write_trylock \n" - " li %2, 0 \n" - "1: ll %1, %3 \n" - " bnez %1, 2f \n" - " lui %1, 0x8000 \n" - " sc %1, %0 \n" - " beqzl %1, 1b \n" - " nop \n" - __WEAK_LLSC_MB - " li %2, 1 \n" - " .set reorder \n" - "2: \n" - : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp), "=&r" (ret) - : GCC_OFF_SMALL_ASM() (rw->lock) - : "memory"); - } else { - do { - __asm__ __volatile__( - " ll %1, %3 # arch_write_trylock \n" - " li %2, 0 \n" - " bnez %1, 2f \n" - " lui %1, 0x8000 \n" - " sc %1, %0 \n" - " li %2, 1 \n" - "2: .insn \n" - : "=" GCC_OFF_SMALL_ASM() (rw->lock), "=&r" (tmp), - "=&r" (ret) - : GCC_OFF_SMALL_ASM() (rw->lock) - : "memory"); - } while (unlikely(!tmp)); - - smp_llsc_mb(); - } - - return ret; -} - #define arch_read_lock_flags(lock, flags) arch_read_lock(lock) #define arch_write_lock_flags(lock, flags) arch_write_lock(lock) |