summaryrefslogtreecommitdiffstats
path: root/arch/powerpc
diff options
context:
space:
mode:
authorNick Piggin <npiggin@suse.de>2008-11-11 18:50:48 +0100
committerPaul Mackerras <paulus@samba.org>2008-11-19 06:04:55 +0100
commit46d075be585eae2b74265e4e64ca38dde16a09c6 (patch)
treed12904aeb09e4f07cf07e6f8b0a9831c953f5d78 /arch/powerpc
parentpowerpc: Update 64bit __copy_tofrom_user() using CPU_FTR_UNALIGNED_LD_STD (diff)
downloadlinux-46d075be585eae2b74265e4e64ca38dde16a09c6.tar.xz
linux-46d075be585eae2b74265e4e64ca38dde16a09c6.zip
powerpc: Optimise smp_wmb
Change 2d1b2027626d5151fff8ef7c06ca8e7876a1a510 ("powerpc: Fixup lwsync at runtime") removed __SUBARCH_HAS_LWSYNC, causing smp_wmb to revert back to eieio for all CPUs. This restores the behaviour intorduced in 74f0609526afddd88bef40b651da24f3167b10b2 ("powerpc: Optimise smp_wmb on 64-bit processors"). Signed-off-by: Nick Piggin <npiggin@suse.de> Signed-off-by: Paul Mackerras <paulus@samba.org>
Diffstat (limited to 'arch/powerpc')
-rw-r--r--arch/powerpc/include/asm/synch.h4
-rw-r--r--arch/powerpc/include/asm/system.h4
2 files changed, 6 insertions, 2 deletions
diff --git a/arch/powerpc/include/asm/synch.h b/arch/powerpc/include/asm/synch.h
index 45963e80f557..28f6ddbff4cf 100644
--- a/arch/powerpc/include/asm/synch.h
+++ b/arch/powerpc/include/asm/synch.h
@@ -5,6 +5,10 @@
#include <linux/stringify.h>
#include <asm/feature-fixups.h>
+#if defined(__powerpc64__) || defined(CONFIG_PPC_E500MC)
+#define __SUBARCH_HAS_LWSYNC
+#endif
+
#ifndef __ASSEMBLY__
extern unsigned int __start___lwsync_fixup, __stop___lwsync_fixup;
extern void do_lwsync_fixups(unsigned long value, void *fixup_start,
diff --git a/arch/powerpc/include/asm/system.h b/arch/powerpc/include/asm/system.h
index d6648c143322..917f515bc671 100644
--- a/arch/powerpc/include/asm/system.h
+++ b/arch/powerpc/include/asm/system.h
@@ -45,14 +45,14 @@
#ifdef CONFIG_SMP
#ifdef __SUBARCH_HAS_LWSYNC
-# define SMPWMB lwsync
+# define SMPWMB LWSYNC
#else
# define SMPWMB eieio
#endif
#define smp_mb() mb()
#define smp_rmb() rmb()
-#define smp_wmb() __asm__ __volatile__ (__stringify(SMPWMB) : : :"memory")
+#define smp_wmb() __asm__ __volatile__ (stringify_in_c(SMPWMB) : : :"memory")
#define smp_read_barrier_depends() read_barrier_depends()
#else
#define smp_mb() barrier()