summaryrefslogtreecommitdiffstats
path: root/arch/alpha/include
diff options
context:
space:
mode:
authorRichard Henderson <rth@twiddle.net>2013-07-11 16:42:14 +0200
committerMatt Turner <mattst88@gmail.com>2013-07-19 22:54:24 +0200
commit6da7539734d4e0f5caeea1e0efbb74d7b83db1ca (patch)
treef29b23ea3236acda9db434aa89052537f2a8092a /arch/alpha/include
parentalpha: Modernize lib/mpi/longlong.h (diff)
downloadlinux-6da7539734d4e0f5caeea1e0efbb74d7b83db1ca.tar.xz
linux-6da7539734d4e0f5caeea1e0efbb74d7b83db1ca.zip
alpha: Improve atomic_add_unless
Use ll/sc loops instead of C loops around cmpxchg. Update the atomic64_add_unless block comment to match the code. Reviewed-and-Tested-by: Matt Turner <mattst88@gmail.com> Signed-off-by: Matt Turner <mattst88@gmail.com> Signed-off-by: Richard Henderson <rth@twiddle.net>
Diffstat (limited to 'arch/alpha/include')
-rw-r--r--arch/alpha/include/asm/atomic.h60
1 files changed, 37 insertions, 23 deletions
diff --git a/arch/alpha/include/asm/atomic.h b/arch/alpha/include/asm/atomic.h
index c2cbe4fc391c..0dc18fc4d925 100644
--- a/arch/alpha/include/asm/atomic.h
+++ b/arch/alpha/include/asm/atomic.h
@@ -186,17 +186,24 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
*/
static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
{
- int c, old;
- c = atomic_read(v);
- for (;;) {
- if (unlikely(c == (u)))
- break;
- old = atomic_cmpxchg((v), c, c + (a));
- if (likely(old == c))
- break;
- c = old;
- }
- return c;
+ int c, new, old;
+ smp_mb();
+ __asm__ __volatile__(
+ "1: ldl_l %[old],%[mem]\n"
+ " cmpeq %[old],%[u],%[c]\n"
+ " addl %[old],%[a],%[new]\n"
+ " bne %[c],2f\n"
+ " stl_c %[new],%[mem]\n"
+ " beq %[new],3f\n"
+ "2:\n"
+ ".subsection 2\n"
+ "3: br 1b\n"
+ ".previous"
+ : [old] "=&r"(old), [new] "=&r"(new), [c] "=&r"(c)
+ : [mem] "m"(*v), [a] "rI"(a), [u] "rI"((long)u)
+ : "memory");
+ smp_mb();
+ return old;
}
@@ -207,21 +214,28 @@ static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
* @u: ...unless v is equal to u.
*
* Atomically adds @a to @v, so long as it was not @u.
- * Returns the old value of @v.
+ * Returns true iff @v was not @u.
*/
static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
{
- long c, old;
- c = atomic64_read(v);
- for (;;) {
- if (unlikely(c == (u)))
- break;
- old = atomic64_cmpxchg((v), c, c + (a));
- if (likely(old == c))
- break;
- c = old;
- }
- return c != (u);
+ long c, tmp;
+ smp_mb();
+ __asm__ __volatile__(
+ "1: ldq_l %[tmp],%[mem]\n"
+ " cmpeq %[tmp],%[u],%[c]\n"
+ " addq %[tmp],%[a],%[tmp]\n"
+ " bne %[c],2f\n"
+ " stq_c %[tmp],%[mem]\n"
+ " beq %[tmp],3f\n"
+ "2:\n"
+ ".subsection 2\n"
+ "3: br 1b\n"
+ ".previous"
+ : [tmp] "=&r"(tmp), [c] "=&r"(c)
+ : [mem] "m"(*v), [a] "rI"(a), [u] "rI"(u)
+ : "memory");
+ smp_mb();
+ return !c;
}
#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)