powerpc: Optimise smp_wmb
authorNick Piggin <npiggin@suse.de>
Tue, 11 Nov 2008 17:50:48 +0000 (17:50 +0000)
committerPaul Mackerras <paulus@samba.org>
Wed, 19 Nov 2008 05:04:55 +0000 (16:04 +1100)
Change 2d1b2027626d5151fff8ef7c06ca8e7876a1a510 ("powerpc: Fixup
lwsync at runtime") removed __SUBARCH_HAS_LWSYNC, causing smp_wmb to
revert back to eieio for all CPUs.  This restores the behaviour
intorduced in 74f0609526afddd88bef40b651da24f3167b10b2 ("powerpc:
Optimise smp_wmb on 64-bit processors").

Signed-off-by: Nick Piggin <npiggin@suse.de>
Signed-off-by: Paul Mackerras <paulus@samba.org>
arch/powerpc/include/asm/synch.h
arch/powerpc/include/asm/system.h

index 45963e8..28f6ddb 100644 (file)
@@ -5,6 +5,10 @@
 #include <linux/stringify.h>
 #include <asm/feature-fixups.h>
 
+#if defined(__powerpc64__) || defined(CONFIG_PPC_E500MC)
+#define __SUBARCH_HAS_LWSYNC
+#endif
+
 #ifndef __ASSEMBLY__
 extern unsigned int __start___lwsync_fixup, __stop___lwsync_fixup;
 extern void do_lwsync_fixups(unsigned long value, void *fixup_start,
index d6648c1..917f515 100644 (file)
 #ifdef CONFIG_SMP
 
 #ifdef __SUBARCH_HAS_LWSYNC
-#    define SMPWMB      lwsync
+#    define SMPWMB      LWSYNC
 #else
 #    define SMPWMB      eieio
 #endif
 
 #define smp_mb()       mb()
 #define smp_rmb()      rmb()
-#define smp_wmb()      __asm__ __volatile__ (__stringify(SMPWMB) : : :"memory")
+#define smp_wmb()      __asm__ __volatile__ (stringify_in_c(SMPWMB) : : :"memory")
 #define smp_read_barrier_depends()     read_barrier_depends()
 #else
 #define smp_mb()       barrier()