diff options
Diffstat (limited to 'include/asm-arm/system.h')
-rw-r--r-- | include/asm-arm/system.h | 94 |
1 files changed, 52 insertions, 42 deletions
diff --git a/include/asm-arm/system.h b/include/asm-arm/system.h index aa223fc546af..69134c7518c1 100644 --- a/include/asm-arm/system.h +++ b/include/asm-arm/system.h @@ -3,6 +3,7 @@ #ifdef __KERNEL__ +#include <asm/memory.h> #define CPU_ARCH_UNKNOWN 0 #define CPU_ARCH_ARMv3 1 @@ -140,6 +141,55 @@ static inline int cpu_is_xsc3(void) #define cpu_is_xscale() 1 #endif +#define UDBG_UNDEFINED (1 << 0) +#define UDBG_SYSCALL (1 << 1) +#define UDBG_BADABORT (1 << 2) +#define UDBG_SEGV (1 << 3) +#define UDBG_BUS (1 << 4) + +extern unsigned int user_debug; + +#if __LINUX_ARM_ARCH__ >= 4 +#define vectors_high() (cr_alignment & CR_V) +#else +#define vectors_high() (0) +#endif + +#if defined(CONFIG_CPU_XSC3) || __LINUX_ARM_ARCH__ >= 6 +#define isb() __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \ + : : "r" (0) : "memory") +#define dsb() __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \ + : : "r" (0) : "memory") +#define dmb() __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 5" \ + : : "r" (0) : "memory") +#else +#define isb() __asm__ __volatile__ ("" : : : "memory") +#define dsb() __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 4" \ + : : "r" (0) : "memory") +#define dmb() __asm__ __volatile__ ("" : : : "memory") +#endif + +#ifndef CONFIG_SMP +#define mb() do { if (arch_is_coherent()) dmb(); else barrier(); } while (0) +#define rmb() do { if (arch_is_coherent()) dmb(); else barrier(); } while (0) +#define wmb() do { if (arch_is_coherent()) dmb(); else barrier(); } while (0) +#define smp_mb() barrier() +#define smp_rmb() barrier() +#define smp_wmb() barrier() +#else +#define mb() dmb() +#define rmb() dmb() +#define wmb() dmb() +#define smp_mb() dmb() +#define smp_rmb() dmb() +#define smp_wmb() dmb() +#endif +#define read_barrier_depends() do { } while(0) +#define smp_read_barrier_depends() do { } while(0) + +#define set_mb(var, value) do { var = value; smp_mb(); } while (0) +#define nop() __asm__ __volatile__("mov\tr0,r0\t@ nop\n\t"); + extern unsigned long cr_no_alignment; /* defined in entry-armv.S */ extern unsigned long cr_alignment; /* defined in entry-armv.S */ @@ -154,6 +204,7 @@ static inline void set_cr(unsigned int val) { asm volatile("mcr p15, 0, %0, c1, c0, 0 @ set CR" : : "r" (val) : "cc"); + isb(); } #ifndef CONFIG_SMP @@ -176,34 +227,9 @@ static inline void set_copro_access(unsigned int val) { asm volatile("mcr p15, 0, %0, c1, c0, 2 @ set copro access" : : "r" (val) : "cc"); + isb(); } -#define UDBG_UNDEFINED (1 << 0) -#define UDBG_SYSCALL (1 << 1) -#define UDBG_BADABORT (1 << 2) -#define UDBG_SEGV (1 << 3) -#define UDBG_BUS (1 << 4) - -extern unsigned int user_debug; - -#if __LINUX_ARM_ARCH__ >= 4 -#define vectors_high() (cr_alignment & CR_V) -#else -#define vectors_high() (0) -#endif - -#if __LINUX_ARM_ARCH__ >= 6 -#define mb() __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 5" \ - : : "r" (0) : "memory") -#else -#define mb() __asm__ __volatile__ ("" : : : "memory") -#endif -#define rmb() mb() -#define wmb() mb() -#define read_barrier_depends() do { } while(0) -#define set_mb(var, value) do { var = value; mb(); } while (0) -#define nop() __asm__ __volatile__("mov\tr0,r0\t@ nop\n\t"); - /* * switch_mm() may do a full cache flush over the context switch, * so enable interrupts over the context switch to avoid high @@ -233,22 +259,6 @@ static inline void sched_cacheflush(void) { } -#ifdef CONFIG_SMP - -#define smp_mb() mb() -#define smp_rmb() rmb() -#define smp_wmb() wmb() -#define smp_read_barrier_depends() read_barrier_depends() - -#else - -#define smp_mb() barrier() -#define smp_rmb() barrier() -#define smp_wmb() barrier() -#define smp_read_barrier_depends() do { } while(0) - -#endif /* CONFIG_SMP */ - #if defined(CONFIG_CPU_SA1100) || defined(CONFIG_CPU_SA110) /* * On the StrongARM, "swp" is terminally broken since it bypasses the |