diff options
author | Linus Torvalds <torvalds@ppc970.osdl.org> | 2005-04-17 00:20:36 +0200 |
---|---|---|
committer | Linus Torvalds <torvalds@ppc970.osdl.org> | 2005-04-17 00:20:36 +0200 |
commit | 1da177e4c3f41524e886b7f1b8a0c1fc7321cac2 (patch) | |
tree | 0bba044c4ce775e45a88a51686b5d9f90697ea9d /include/asm-i386/atomic.h | |
download | linux-1da177e4c3f41524e886b7f1b8a0c1fc7321cac2.tar.xz linux-1da177e4c3f41524e886b7f1b8a0c1fc7321cac2.zip |
Linux-2.6.12-rc2v2.6.12-rc2
Initial git repository build. I'm not bothering with the full history,
even though we have it. We can create a separate "historical" git
archive of that later if we want to, and in the meantime it's about
3.2GB when imported into git - space that would just make the early
git days unnecessarily complicated, when we don't have a lot of good
infrastructure for it.
Let it rip!
Diffstat (limited to 'include/asm-i386/atomic.h')
-rw-r--r-- | include/asm-i386/atomic.h | 236 |
1 files changed, 236 insertions, 0 deletions
diff --git a/include/asm-i386/atomic.h b/include/asm-i386/atomic.h new file mode 100644 index 000000000000..509720be772a --- /dev/null +++ b/include/asm-i386/atomic.h @@ -0,0 +1,236 @@ +#ifndef __ARCH_I386_ATOMIC__ +#define __ARCH_I386_ATOMIC__ + +#include <linux/config.h> +#include <linux/compiler.h> +#include <asm/processor.h> + +/* + * Atomic operations that C can't guarantee us. Useful for + * resource counting etc.. + */ + +#ifdef CONFIG_SMP +#define LOCK "lock ; " +#else +#define LOCK "" +#endif + +/* + * Make sure gcc doesn't try to be clever and move things around + * on us. We need to use _exactly_ the address the user gave us, + * not some alias that contains the same information. + */ +typedef struct { volatile int counter; } atomic_t; + +#define ATOMIC_INIT(i) { (i) } + +/** + * atomic_read - read atomic variable + * @v: pointer of type atomic_t + * + * Atomically reads the value of @v. + */ +#define atomic_read(v) ((v)->counter) + +/** + * atomic_set - set atomic variable + * @v: pointer of type atomic_t + * @i: required value + * + * Atomically sets the value of @v to @i. + */ +#define atomic_set(v,i) (((v)->counter) = (i)) + +/** + * atomic_add - add integer to atomic variable + * @i: integer value to add + * @v: pointer of type atomic_t + * + * Atomically adds @i to @v. + */ +static __inline__ void atomic_add(int i, atomic_t *v) +{ + __asm__ __volatile__( + LOCK "addl %1,%0" + :"=m" (v->counter) + :"ir" (i), "m" (v->counter)); +} + +/** + * atomic_sub - subtract the atomic variable + * @i: integer value to subtract + * @v: pointer of type atomic_t + * + * Atomically subtracts @i from @v. + */ +static __inline__ void atomic_sub(int i, atomic_t *v) +{ + __asm__ __volatile__( + LOCK "subl %1,%0" + :"=m" (v->counter) + :"ir" (i), "m" (v->counter)); +} + +/** + * atomic_sub_and_test - subtract value from variable and test result + * @i: integer value to subtract + * @v: pointer of type atomic_t + * + * Atomically subtracts @i from @v and returns + * true if the result is zero, or false for all + * other cases. + */ +static __inline__ int atomic_sub_and_test(int i, atomic_t *v) +{ + unsigned char c; + + __asm__ __volatile__( + LOCK "subl %2,%0; sete %1" + :"=m" (v->counter), "=qm" (c) + :"ir" (i), "m" (v->counter) : "memory"); + return c; +} + +/** + * atomic_inc - increment atomic variable + * @v: pointer of type atomic_t + * + * Atomically increments @v by 1. + */ +static __inline__ void atomic_inc(atomic_t *v) +{ + __asm__ __volatile__( + LOCK "incl %0" + :"=m" (v->counter) + :"m" (v->counter)); +} + +/** + * atomic_dec - decrement atomic variable + * @v: pointer of type atomic_t + * + * Atomically decrements @v by 1. + */ +static __inline__ void atomic_dec(atomic_t *v) +{ + __asm__ __volatile__( + LOCK "decl %0" + :"=m" (v->counter) + :"m" (v->counter)); +} + +/** + * atomic_dec_and_test - decrement and test + * @v: pointer of type atomic_t + * + * Atomically decrements @v by 1 and + * returns true if the result is 0, or false for all other + * cases. + */ +static __inline__ int atomic_dec_and_test(atomic_t *v) +{ + unsigned char c; + + __asm__ __volatile__( + LOCK "decl %0; sete %1" + :"=m" (v->counter), "=qm" (c) + :"m" (v->counter) : "memory"); + return c != 0; +} + +/** + * atomic_inc_and_test - increment and test + * @v: pointer of type atomic_t + * + * Atomically increments @v by 1 + * and returns true if the result is zero, or false for all + * other cases. + */ +static __inline__ int atomic_inc_and_test(atomic_t *v) +{ + unsigned char c; + + __asm__ __volatile__( + LOCK "incl %0; sete %1" + :"=m" (v->counter), "=qm" (c) + :"m" (v->counter) : "memory"); + return c != 0; +} + +/** + * atomic_add_negative - add and test if negative + * @v: pointer of type atomic_t + * @i: integer value to add + * + * Atomically adds @i to @v and returns true + * if the result is negative, or false when + * result is greater than or equal to zero. + */ +static __inline__ int atomic_add_negative(int i, atomic_t *v) +{ + unsigned char c; + + __asm__ __volatile__( + LOCK "addl %2,%0; sets %1" + :"=m" (v->counter), "=qm" (c) + :"ir" (i), "m" (v->counter) : "memory"); + return c; +} + +/** + * atomic_add_return - add and return + * @v: pointer of type atomic_t + * @i: integer value to add + * + * Atomically adds @i to @v and returns @i + @v + */ +static __inline__ int atomic_add_return(int i, atomic_t *v) +{ + int __i; +#ifdef CONFIG_M386 + if(unlikely(boot_cpu_data.x86==3)) + goto no_xadd; +#endif + /* Modern 486+ processor */ + __i = i; + __asm__ __volatile__( + LOCK "xaddl %0, %1;" + :"=r"(i) + :"m"(v->counter), "0"(i)); + return i + __i; + +#ifdef CONFIG_M386 +no_xadd: /* Legacy 386 processor */ + local_irq_disable(); + __i = atomic_read(v); + atomic_set(v, i + __i); + local_irq_enable(); + return i + __i; +#endif +} + +static __inline__ int atomic_sub_return(int i, atomic_t *v) +{ + return atomic_add_return(-i,v); +} + +#define atomic_inc_return(v) (atomic_add_return(1,v)) +#define atomic_dec_return(v) (atomic_sub_return(1,v)) + +/* These are x86-specific, used by some header files */ +#define atomic_clear_mask(mask, addr) \ +__asm__ __volatile__(LOCK "andl %0,%1" \ +: : "r" (~(mask)),"m" (*addr) : "memory") + +#define atomic_set_mask(mask, addr) \ +__asm__ __volatile__(LOCK "orl %0,%1" \ +: : "r" (mask),"m" (*(addr)) : "memory") + +/* Atomic operations are already serializing on x86 */ +#define smp_mb__before_atomic_dec() barrier() +#define smp_mb__after_atomic_dec() barrier() +#define smp_mb__before_atomic_inc() barrier() +#define smp_mb__after_atomic_inc() barrier() + +#endif |