summaryrefslogtreecommitdiffstats
path: root/arch/arm/lib
diff options
context:
space:
mode:
authorWill Deacon <will.deacon@arm.com>2013-06-27 12:01:51 +0100
committerWill Deacon <will.deacon@arm.com>2013-09-30 16:42:56 +0100
commitd779c07dd72098a7416d907494f958213b7726f3 (patch)
treece55eca4b34d090604b8dfcc227e74de4eede72c /arch/arm/lib
parentf38d999c4d16fc0fce4270374f15fbb2d8713c09 (diff)
ARM: bitops: prefetch the destination word for write prior to strex
The cost of changing a cacheline from shared to exclusive state can be significant, especially when this is triggered by an exclusive store, since it may result in having to retry the transaction. This patch prefixes our atomic bitops implementation with prefetchw, to try and grab the line in exclusive state from the start. The testop macro is left alone, since the barrier semantics limit the usefulness of prefetching data. Acked-by: Nicolas Pitre <nico@linaro.org> Signed-off-by: Will Deacon <will.deacon@arm.com>
Diffstat (limited to 'arch/arm/lib')
-rw-r--r--arch/arm/lib/bitops.h5
1 files changed, 5 insertions, 0 deletions
diff --git a/arch/arm/lib/bitops.h b/arch/arm/lib/bitops.h
index d6408d1ee54..e0c68d5bb7d 100644
--- a/arch/arm/lib/bitops.h
+++ b/arch/arm/lib/bitops.h
@@ -10,6 +10,11 @@ UNWIND( .fnstart )
and r3, r0, #31 @ Get bit offset
mov r0, r0, lsr #5
add r1, r1, r0, lsl #2 @ Get word offset
+#if __LINUX_ARM_ARCH__ >= 7
+ .arch_extension mp
+ ALT_SMP(W(pldw) [r1])
+ ALT_UP(W(nop))
+#endif
mov r3, r2, lsl r3
1: ldrex r2, [r1]
\instr r2, r2, r3