summaryrefslogtreecommitdiffstats
path: root/arch/mips/include/asm/cmpxchg.h
diff options
context:
space:
mode:
authorIngo Molnar <mingo@kernel.org>2012-04-14 13:18:27 +0200
committerIngo Molnar <mingo@kernel.org>2012-04-14 13:19:04 +0200
commit6ac1ef482d7ae0c690f1640bf6eb818ff9a2d91e (patch)
tree021cc9f6b477146fcebe6f3be4752abfa2ba18a9 /arch/mips/include/asm/cmpxchg.h
parent682968e0c425c60f0dde37977e5beb2b12ddc4cc (diff)
parenta385ec4f11bdcf81af094c03e2444ee9b7fad2e5 (diff)
Merge branch 'perf/core' into perf/uprobes
Merge in latest upstream (and the latest perf development tree), to prepare for tooling changes, and also to pick up v3.4 MM changes that the uprobes code needs to take care of. Signed-off-by: Ingo Molnar <mingo@kernel.org>
Diffstat (limited to 'arch/mips/include/asm/cmpxchg.h')
-rw-r--r--arch/mips/include/asm/cmpxchg.h124
1 files changed, 124 insertions, 0 deletions
diff --git a/arch/mips/include/asm/cmpxchg.h b/arch/mips/include/asm/cmpxchg.h
index d8d1c2805ac..285a41fa0b1 100644
--- a/arch/mips/include/asm/cmpxchg.h
+++ b/arch/mips/include/asm/cmpxchg.h
@@ -9,6 +9,130 @@
#define __ASM_CMPXCHG_H
#include <linux/irqflags.h>
+#include <asm/war.h>
+
+static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
+{
+ __u32 retval;
+
+ smp_mb__before_llsc();
+
+ if (kernel_uses_llsc && R10000_LLSC_WAR) {
+ unsigned long dummy;
+
+ __asm__ __volatile__(
+ " .set mips3 \n"
+ "1: ll %0, %3 # xchg_u32 \n"
+ " .set mips0 \n"
+ " move %2, %z4 \n"
+ " .set mips3 \n"
+ " sc %2, %1 \n"
+ " beqzl %2, 1b \n"
+ " .set mips0 \n"
+ : "=&r" (retval), "=m" (*m), "=&r" (dummy)
+ : "R" (*m), "Jr" (val)
+ : "memory");
+ } else if (kernel_uses_llsc) {
+ unsigned long dummy;
+
+ do {
+ __asm__ __volatile__(
+ " .set mips3 \n"
+ " ll %0, %3 # xchg_u32 \n"
+ " .set mips0 \n"
+ " move %2, %z4 \n"
+ " .set mips3 \n"
+ " sc %2, %1 \n"
+ " .set mips0 \n"
+ : "=&r" (retval), "=m" (*m), "=&r" (dummy)
+ : "R" (*m), "Jr" (val)
+ : "memory");
+ } while (unlikely(!dummy));
+ } else {
+ unsigned long flags;
+
+ raw_local_irq_save(flags);
+ retval = *m;
+ *m = val;
+ raw_local_irq_restore(flags); /* implies memory barrier */
+ }
+
+ smp_llsc_mb();
+
+ return retval;
+}
+
+#ifdef CONFIG_64BIT
+static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
+{
+ __u64 retval;
+
+ smp_mb__before_llsc();
+
+ if (kernel_uses_llsc && R10000_LLSC_WAR) {
+ unsigned long dummy;
+
+ __asm__ __volatile__(
+ " .set mips3 \n"
+ "1: lld %0, %3 # xchg_u64 \n"
+ " move %2, %z4 \n"
+ " scd %2, %1 \n"
+ " beqzl %2, 1b \n"
+ " .set mips0 \n"
+ : "=&r" (retval), "=m" (*m), "=&r" (dummy)
+ : "R" (*m), "Jr" (val)
+ : "memory");
+ } else if (kernel_uses_llsc) {
+ unsigned long dummy;
+
+ do {
+ __asm__ __volatile__(
+ " .set mips3 \n"
+ " lld %0, %3 # xchg_u64 \n"
+ " move %2, %z4 \n"
+ " scd %2, %1 \n"
+ " .set mips0 \n"
+ : "=&r" (retval), "=m" (*m), "=&r" (dummy)
+ : "R" (*m), "Jr" (val)
+ : "memory");
+ } while (unlikely(!dummy));
+ } else {
+ unsigned long flags;
+
+ raw_local_irq_save(flags);
+ retval = *m;
+ *m = val;
+ raw_local_irq_restore(flags); /* implies memory barrier */
+ }
+
+ smp_llsc_mb();
+
+ return retval;
+}
+#else
+extern __u64 __xchg_u64_unsupported_on_32bit_kernels(volatile __u64 * m, __u64 val);
+#define __xchg_u64 __xchg_u64_unsupported_on_32bit_kernels
+#endif
+
+static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
+{
+ switch (size) {
+ case 4:
+ return __xchg_u32(ptr, x);
+ case 8:
+ return __xchg_u64(ptr, x);
+ }
+
+ return x;
+}
+
+#define xchg(ptr, x) \
+({ \
+ BUILD_BUG_ON(sizeof(*(ptr)) & ~0xc); \
+ \
+ ((__typeof__(*(ptr))) \
+ __xchg((unsigned long)(x), (ptr), sizeof(*(ptr)))); \
+})
#define __HAVE_ARCH_CMPXCHG 1