summaryrefslogtreecommitdiffstats
path: root/include
diff options
context:
space:
mode:
authorIngo Molnar <mingo@elte.hu>2006-01-10 00:59:17 +0100
committerIngo Molnar <mingo@hera.kernel.org>2006-01-10 00:59:17 +0100
commitffbf670f5cd50501a34a5187981460da2216071e (patch)
treef2cd9eeeb59839d15feddf906310bb375474c573 /include
parentMerge master.kernel.org:/pub/scm/linux/kernel/git/mchehab/v4l-dvb (diff)
downloadlinux-ffbf670f5cd50501a34a5187981460da2216071e.tar.xz
linux-ffbf670f5cd50501a34a5187981460da2216071e.zip
[PATCH] mutex subsystem, add atomic_xchg() to all arches
add atomic_xchg() to all the architectures. Needed by the new mutex code. Signed-off-by: Ingo Molnar <mingo@elte.hu> Signed-off-by: Arjan van de Ven <arjan@infradead.org>
Diffstat (limited to 'include')
-rw-r--r--include/asm-alpha/atomic.h1
-rw-r--r--include/asm-arm/atomic.h2
-rw-r--r--include/asm-arm26/atomic.h2
-rw-r--r--include/asm-cris/atomic.h2
-rw-r--r--include/asm-frv/atomic.h1
-rw-r--r--include/asm-h8300/atomic.h2
-rw-r--r--include/asm-i386/atomic.h1
-rw-r--r--include/asm-ia64/atomic.h1
-rw-r--r--include/asm-m32r/atomic.h1
-rw-r--r--include/asm-m68k/atomic.h1
-rw-r--r--include/asm-m68knommu/atomic.h1
-rw-r--r--include/asm-mips/atomic.h1
-rw-r--r--include/asm-parisc/atomic.h1
-rw-r--r--include/asm-powerpc/atomic.h1
-rw-r--r--include/asm-s390/atomic.h2
-rw-r--r--include/asm-sh/atomic.h2
-rw-r--r--include/asm-sh64/atomic.h2
-rw-r--r--include/asm-sparc/atomic.h1
-rw-r--r--include/asm-sparc64/atomic.h1
-rw-r--r--include/asm-v850/atomic.h2
-rw-r--r--include/asm-x86_64/atomic.h1
-rw-r--r--include/asm-xtensa/atomic.h1
22 files changed, 30 insertions, 0 deletions
diff --git a/include/asm-alpha/atomic.h b/include/asm-alpha/atomic.h
index cb03bbe92cdf..fc77f7413083 100644
--- a/include/asm-alpha/atomic.h
+++ b/include/asm-alpha/atomic.h
@@ -176,6 +176,7 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
}
#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
+#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
#define atomic_add_unless(v, a, u) \
({ \
diff --git a/include/asm-arm/atomic.h b/include/asm-arm/atomic.h
index f72b63309bc5..3d7283d84405 100644
--- a/include/asm-arm/atomic.h
+++ b/include/asm-arm/atomic.h
@@ -175,6 +175,8 @@ static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
#endif /* __LINUX_ARM_ARCH__ */
+#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
+
static inline int atomic_add_unless(atomic_t *v, int a, int u)
{
int c, old;
diff --git a/include/asm-arm26/atomic.h b/include/asm-arm26/atomic.h
index 3074b0e76343..1552c8653990 100644
--- a/include/asm-arm26/atomic.h
+++ b/include/asm-arm26/atomic.h
@@ -76,6 +76,8 @@ static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
return ret;
}
+#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
+
static inline int atomic_add_unless(atomic_t *v, int a, int u)
{
int ret;
diff --git a/include/asm-cris/atomic.h b/include/asm-cris/atomic.h
index 2df2c7aa19b7..0b51a87e5532 100644
--- a/include/asm-cris/atomic.h
+++ b/include/asm-cris/atomic.h
@@ -136,6 +136,8 @@ static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
return ret;
}
+#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
+
static inline int atomic_add_unless(atomic_t *v, int a, int u)
{
int ret;
diff --git a/include/asm-frv/atomic.h b/include/asm-frv/atomic.h
index 9c9e9499cfd8..a59f684b4f33 100644
--- a/include/asm-frv/atomic.h
+++ b/include/asm-frv/atomic.h
@@ -328,6 +328,7 @@ extern uint32_t __cmpxchg_32(uint32_t *v, uint32_t test, uint32_t new);
#endif
#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
+#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
#define atomic_add_unless(v, a, u) \
({ \
diff --git a/include/asm-h8300/atomic.h b/include/asm-h8300/atomic.h
index d891541e89c3..21f54428c86b 100644
--- a/include/asm-h8300/atomic.h
+++ b/include/asm-h8300/atomic.h
@@ -95,6 +95,8 @@ static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
return ret;
}
+#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
+
static inline int atomic_add_unless(atomic_t *v, int a, int u)
{
int ret;
diff --git a/include/asm-i386/atomic.h b/include/asm-i386/atomic.h
index 7a5472d77091..de649d3aa2d4 100644
--- a/include/asm-i386/atomic.h
+++ b/include/asm-i386/atomic.h
@@ -216,6 +216,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t *v)
}
#define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new))
+#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
/**
* atomic_add_unless - add unless the number is a given value
diff --git a/include/asm-ia64/atomic.h b/include/asm-ia64/atomic.h
index 15cf7984c48e..d3e0dfa99e1f 100644
--- a/include/asm-ia64/atomic.h
+++ b/include/asm-ia64/atomic.h
@@ -89,6 +89,7 @@ ia64_atomic64_sub (__s64 i, atomic64_t *v)
}
#define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new))
+#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
#define atomic_add_unless(v, a, u) \
({ \
diff --git a/include/asm-m32r/atomic.h b/include/asm-m32r/atomic.h
index 70761278b6cb..3122fe106f05 100644
--- a/include/asm-m32r/atomic.h
+++ b/include/asm-m32r/atomic.h
@@ -243,6 +243,7 @@ static __inline__ int atomic_dec_return(atomic_t *v)
#define atomic_add_negative(i,v) (atomic_add_return((i), (v)) < 0)
#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
+#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
/**
* atomic_add_unless - add unless the number is a given value
diff --git a/include/asm-m68k/atomic.h b/include/asm-m68k/atomic.h
index b8a4e75d679d..a4a84d5c65d5 100644
--- a/include/asm-m68k/atomic.h
+++ b/include/asm-m68k/atomic.h
@@ -140,6 +140,7 @@ static inline void atomic_set_mask(unsigned long mask, unsigned long *v)
}
#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
+#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
#define atomic_add_unless(v, a, u) \
({ \
diff --git a/include/asm-m68knommu/atomic.h b/include/asm-m68knommu/atomic.h
index 1702dbe9318c..6c4e4b63e454 100644
--- a/include/asm-m68knommu/atomic.h
+++ b/include/asm-m68knommu/atomic.h
@@ -129,6 +129,7 @@ static inline int atomic_sub_return(int i, atomic_t * v)
}
#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
+#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
#define atomic_add_unless(v, a, u) \
({ \
diff --git a/include/asm-mips/atomic.h b/include/asm-mips/atomic.h
index 92256e43a938..94a95872d727 100644
--- a/include/asm-mips/atomic.h
+++ b/include/asm-mips/atomic.h
@@ -289,6 +289,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
}
#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
+#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
/**
* atomic_add_unless - add unless the number is a given value
diff --git a/include/asm-parisc/atomic.h b/include/asm-parisc/atomic.h
index 64ebd086c40d..2ca56d34aaad 100644
--- a/include/asm-parisc/atomic.h
+++ b/include/asm-parisc/atomic.h
@@ -165,6 +165,7 @@ static __inline__ int atomic_read(const atomic_t *v)
/* exported interface */
#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
+#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
/**
* atomic_add_unless - add unless the number is a given value
diff --git a/include/asm-powerpc/atomic.h b/include/asm-powerpc/atomic.h
index ae395a0632a6..248f9aec959c 100644
--- a/include/asm-powerpc/atomic.h
+++ b/include/asm-powerpc/atomic.h
@@ -165,6 +165,7 @@ static __inline__ int atomic_dec_return(atomic_t *v)
}
#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
+#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
/**
* atomic_add_unless - add unless the number is a given value
diff --git a/include/asm-s390/atomic.h b/include/asm-s390/atomic.h
index d82aedf616fe..be6fefe223d6 100644
--- a/include/asm-s390/atomic.h
+++ b/include/asm-s390/atomic.h
@@ -75,6 +75,8 @@ static __inline__ void atomic_set_mask(unsigned long mask, atomic_t * v)
__CS_LOOP(v, mask, "or");
}
+#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
+
static __inline__ int atomic_cmpxchg(atomic_t *v, int old, int new)
{
__asm__ __volatile__(" cs %0,%3,0(%2)\n"
diff --git a/include/asm-sh/atomic.h b/include/asm-sh/atomic.h
index 618d8e0de348..fb627de217f2 100644
--- a/include/asm-sh/atomic.h
+++ b/include/asm-sh/atomic.h
@@ -101,6 +101,8 @@ static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
return ret;
}
+#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
+
static inline int atomic_add_unless(atomic_t *v, int a, int u)
{
int ret;
diff --git a/include/asm-sh64/atomic.h b/include/asm-sh64/atomic.h
index f3ce5c0df13a..28f2ea9b567b 100644
--- a/include/asm-sh64/atomic.h
+++ b/include/asm-sh64/atomic.h
@@ -113,6 +113,8 @@ static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
return ret;
}
+#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
+
static inline int atomic_add_unless(atomic_t *v, int a, int u)
{
int ret;
diff --git a/include/asm-sparc/atomic.h b/include/asm-sparc/atomic.h
index accb4967e9d2..e1033170bd3a 100644
--- a/include/asm-sparc/atomic.h
+++ b/include/asm-sparc/atomic.h
@@ -20,6 +20,7 @@ typedef struct { volatile int counter; } atomic_t;
extern int __atomic_add_return(int, atomic_t *);
extern int atomic_cmpxchg(atomic_t *, int, int);
+#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
extern int atomic_add_unless(atomic_t *, int, int);
extern void atomic_set(atomic_t *, int);
diff --git a/include/asm-sparc64/atomic.h b/include/asm-sparc64/atomic.h
index 11f5aa5d108c..25256bdc8aae 100644
--- a/include/asm-sparc64/atomic.h
+++ b/include/asm-sparc64/atomic.h
@@ -72,6 +72,7 @@ extern int atomic64_sub_ret(int, atomic64_t *);
#define atomic64_add_negative(i, v) (atomic64_add_ret(i, v) < 0)
#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
+#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
#define atomic_add_unless(v, a, u) \
({ \
diff --git a/include/asm-v850/atomic.h b/include/asm-v850/atomic.h
index f5b9ab6f4e70..166df00457ea 100644
--- a/include/asm-v850/atomic.h
+++ b/include/asm-v850/atomic.h
@@ -104,6 +104,8 @@ static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
return ret;
}
+#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
+
static inline int atomic_add_unless(atomic_t *v, int a, int u)
{
int ret;
diff --git a/include/asm-x86_64/atomic.h b/include/asm-x86_64/atomic.h
index 72eb071488c7..6b540237a2f8 100644
--- a/include/asm-x86_64/atomic.h
+++ b/include/asm-x86_64/atomic.h
@@ -389,6 +389,7 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t *v)
#define atomic64_dec_return(v) (atomic64_sub_return(1,v))
#define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new))
+#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
/**
* atomic_add_unless - add unless the number is a given value
diff --git a/include/asm-xtensa/atomic.h b/include/asm-xtensa/atomic.h
index e2ce06b101ad..fe105a123924 100644
--- a/include/asm-xtensa/atomic.h
+++ b/include/asm-xtensa/atomic.h
@@ -224,6 +224,7 @@ static inline int atomic_sub_return(int i, atomic_t * v)
#define atomic_add_negative(i,v) (atomic_add_return((i),(v)) < 0)
#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
+#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
/**
* atomic_add_unless - add unless the number is a given value