summaryrefslogtreecommitdiffstats
path: root/include/asm-mips/bitops.h
diff options
context:
space:
mode:
authorRalf Baechle <ralf@linux-mips.org>2006-11-30 02:14:50 +0100
committerRalf Baechle <ralf@linux-mips.org>2006-11-30 02:14:50 +0100
commit4ffd8b3838f22c34b21a25b7612795ca45d14db6 (patch)
tree7a5660bd7fc07e54e4540ed864b9631005390994 /include/asm-mips/bitops.h
parent[MIPS] Remove old junk left from old atomic_lock. (diff)
downloadlinux-4ffd8b3838f22c34b21a25b7612795ca45d14db6.tar.xz
linux-4ffd8b3838f22c34b21a25b7612795ca45d14db6.zip
[MIPS] Remove userspace proofing from <asm/bitops.h>.
Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
Diffstat (limited to 'include/asm-mips/bitops.h')
-rw-r--r--include/asm-mips/bitops.h63
1 files changed, 21 insertions, 42 deletions
diff --git a/include/asm-mips/bitops.h b/include/asm-mips/bitops.h
index 1bb89c5a10ee..8cd61c197052 100644
--- a/include/asm-mips/bitops.h
+++ b/include/asm-mips/bitops.h
@@ -10,10 +10,13 @@
#define _ASM_BITOPS_H
#include <linux/compiler.h>
+#include <linux/irqflags.h>
#include <linux/types.h>
#include <asm/bug.h>
#include <asm/byteorder.h> /* sigh ... */
#include <asm/cpu-features.h>
+#include <asm/sgidefs.h>
+#include <asm/war.h>
#if (_MIPS_SZLONG == 32)
#define SZLONG_LOG 5
@@ -29,12 +32,6 @@
#define cpu_to_lelongp(x) cpu_to_le64p((__u64 *) (x))
#endif
-#ifdef __KERNEL__
-
-#include <linux/irqflags.h>
-#include <asm/sgidefs.h>
-#include <asm/war.h>
-
/*
* clear_bit() doesn't provide any barrier for the compiler.
*/
@@ -42,20 +39,6 @@
#define smp_mb__after_clear_bit() smp_mb()
/*
- * Only disable interrupt for kernel mode stuff to keep usermode stuff
- * that dares to use kernel include files alive.
- */
-
-#define __bi_flags unsigned long flags
-#define __bi_local_irq_save(x) local_irq_save(x)
-#define __bi_local_irq_restore(x) local_irq_restore(x)
-#else
-#define __bi_flags
-#define __bi_local_irq_save(x)
-#define __bi_local_irq_restore(x)
-#endif /* __KERNEL__ */
-
-/*
* set_bit - Atomically set a bit in memory
* @nr: the bit to set
* @addr: the address to start counting from
@@ -93,13 +76,13 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
} else {
volatile unsigned long *a = addr;
unsigned long mask;
- __bi_flags;
+ unsigned long flags;
a += nr >> SZLONG_LOG;
mask = 1UL << (nr & SZLONG_MASK);
- __bi_local_irq_save(flags);
+ local_irq_save(flags);
*a |= mask;
- __bi_local_irq_restore(flags);
+ local_irq_restore(flags);
}
}
@@ -141,13 +124,13 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
} else {
volatile unsigned long *a = addr;
unsigned long mask;
- __bi_flags;
+ unsigned long flags;
a += nr >> SZLONG_LOG;
mask = 1UL << (nr & SZLONG_MASK);
- __bi_local_irq_save(flags);
+ local_irq_save(flags);
*a &= ~mask;
- __bi_local_irq_restore(flags);
+ local_irq_restore(flags);
}
}
@@ -191,13 +174,13 @@ static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
} else {
volatile unsigned long *a = addr;
unsigned long mask;
- __bi_flags;
+ unsigned long flags;
a += nr >> SZLONG_LOG;
mask = 1UL << (nr & SZLONG_MASK);
- __bi_local_irq_save(flags);
+ local_irq_save(flags);
*a ^= mask;
- __bi_local_irq_restore(flags);
+ local_irq_restore(flags);
}
}
@@ -258,14 +241,14 @@ static inline int test_and_set_bit(unsigned long nr,
volatile unsigned long *a = addr;
unsigned long mask;
int retval;
- __bi_flags;
+ unsigned long flags;
a += nr >> SZLONG_LOG;
mask = 1UL << (nr & SZLONG_MASK);
- __bi_local_irq_save(flags);
+ local_irq_save(flags);
retval = (mask & *a) != 0;
*a |= mask;
- __bi_local_irq_restore(flags);
+ local_irq_restore(flags);
return retval;
}
@@ -330,14 +313,14 @@ static inline int test_and_clear_bit(unsigned long nr,
volatile unsigned long *a = addr;
unsigned long mask;
int retval;
- __bi_flags;
+ unsigned long flags;
a += nr >> SZLONG_LOG;
mask = 1UL << (nr & SZLONG_MASK);
- __bi_local_irq_save(flags);
+ local_irq_save(flags);
retval = (mask & *a) != 0;
*a &= ~mask;
- __bi_local_irq_restore(flags);
+ local_irq_restore(flags);
return retval;
}
@@ -399,23 +382,19 @@ static inline int test_and_change_bit(unsigned long nr,
} else {
volatile unsigned long *a = addr;
unsigned long mask, retval;
- __bi_flags;
+ unsigned long flags;
a += nr >> SZLONG_LOG;
mask = 1UL << (nr & SZLONG_MASK);
- __bi_local_irq_save(flags);
+ local_irq_save(flags);
retval = (mask & *a) != 0;
*a ^= mask;
- __bi_local_irq_restore(flags);
+ local_irq_restore(flags);
return retval;
}
}
-#undef __bi_flags
-#undef __bi_local_irq_save
-#undef __bi_local_irq_restore
-
#include <asm-generic/bitops/non-atomic.h>
/*