summaryrefslogtreecommitdiffstats
path: root/arch/sh/include/asm/atomic.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/sh/include/asm/atomic.h')
-rw-r--r--arch/sh/include/asm/atomic.h84
1 files changed, 34 insertions, 50 deletions
diff --git a/arch/sh/include/asm/atomic.h b/arch/sh/include/asm/atomic.h
index e8e78137c6f5..c7983124d99d 100644
--- a/arch/sh/include/asm/atomic.h
+++ b/arch/sh/include/asm/atomic.h
@@ -13,7 +13,7 @@
#define ATOMIC_INIT(i) ( (atomic_t) { (i) } )
-#define atomic_read(v) ((v)->counter)
+#define atomic_read(v) (*(volatile int *)&(v)->counter)
#define atomic_set(v,i) ((v)->counter = (i))
#if defined(CONFIG_GUSA_RB)
@@ -25,64 +25,48 @@
#endif
#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
+#define atomic_dec_return(v) atomic_sub_return(1, (v))
+#define atomic_inc_return(v) atomic_add_return(1, (v))
+#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
+#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
+#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
+#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
-#define atomic_dec_return(v) atomic_sub_return(1,(v))
-#define atomic_inc_return(v) atomic_add_return(1,(v))
+#define atomic_inc(v) atomic_add(1, (v))
+#define atomic_dec(v) atomic_sub(1, (v))
-/*
- * atomic_inc_and_test - increment and test
+#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
+#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
+
+/**
+ * atomic_add_unless - add unless the number is a given value
* @v: pointer of type atomic_t
+ * @a: the amount to add to v...
+ * @u: ...unless v is equal to u.
*
- * Atomically increments @v by 1
- * and returns true if the result is zero, or false for all
- * other cases.
+ * Atomically adds @a to @v, so long as it was not @u.
+ * Returns non-zero if @v was not @u, and zero otherwise.
*/
-#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
-
-#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
-#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
-
-#define atomic_inc(v) atomic_add(1,(v))
-#define atomic_dec(v) atomic_sub(1,(v))
-
-#if !defined(CONFIG_GUSA_RB) && !defined(CONFIG_CPU_SH4A)
-static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
-{
- int ret;
- unsigned long flags;
-
- local_irq_save(flags);
- ret = v->counter;
- if (likely(ret == old))
- v->counter = new;
- local_irq_restore(flags);
-
- return ret;
-}
-
static inline int atomic_add_unless(atomic_t *v, int a, int u)
{
- int ret;
- unsigned long flags;
-
- local_irq_save(flags);
- ret = v->counter;
- if (ret != u)
- v->counter += a;
- local_irq_restore(flags);
-
- return ret != u;
+ int c, old;
+ c = atomic_read(v);
+ for (;;) {
+ if (unlikely(c == (u)))
+ break;
+ old = atomic_cmpxchg((v), c, c + (a));
+ if (likely(old == c))
+ break;
+ c = old;
+ }
+
+ return c != (u);
}
-#endif /* !CONFIG_GUSA_RB && !CONFIG_CPU_SH4A */
-
-#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
-#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
-/* Atomic operations are already serializing on SH */
-#define smp_mb__before_atomic_dec() barrier()
-#define smp_mb__after_atomic_dec() barrier()
-#define smp_mb__before_atomic_inc() barrier()
-#define smp_mb__after_atomic_inc() barrier()
+#define smp_mb__before_atomic_dec() smp_mb()
+#define smp_mb__after_atomic_dec() smp_mb()
+#define smp_mb__before_atomic_inc() smp_mb()
+#define smp_mb__after_atomic_inc() smp_mb()
#include <asm-generic/atomic-long.h>
#include <asm-generic/atomic64.h>