summaryrefslogtreecommitdiffstats
path: root/arch/powerpc/lib/copypage_64.S
diff options
context:
space:
mode:
authorAnton Blanchard <anton@samba.org>2012-05-29 19:33:12 +0000
committerBenjamin Herrenschmidt <benh@kernel.crashing.org>2012-07-03 14:14:44 +1000
commitfde69282b7ba2701560764b81ebb756deb98cf2b (patch)
treee350f7d55f90885e9ea9ee26dcb020f0fbcbaa4e /arch/powerpc/lib/copypage_64.S
parent6f7839e542ee18770288be75114bd2e6771e1421 (diff)
powerpc: POWER7 optimised copy_page using VMX and enhanced prefetch
Implement a POWER7 optimised copy_page using VMX and enhanced prefetch instructions. We use enhanced prefetch hints to prefetch both the load and store side. We copy a cacheline at a time and fall back to regular loads and stores if we are unable to use VMX (eg we are in an interrupt). The following microbenchmark was used to assess the impact of the patch: http://ozlabs.org/~anton/junkcode/page_fault_file.c We test MAP_PRIVATE page faults across a 1GB file, 100 times: # time ./page_fault_file -p -l 1G -i 100 Before: 22.25s After: 18.89s 17% faster Signed-off-by: Anton Blanchard <anton@samba.org> Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org>
Diffstat (limited to 'arch/powerpc/lib/copypage_64.S')
-rw-r--r--arch/powerpc/lib/copypage_64.S4
1 files changed, 4 insertions, 0 deletions
diff --git a/arch/powerpc/lib/copypage_64.S b/arch/powerpc/lib/copypage_64.S
index 53dcb6b1b70..9f9434a8526 100644
--- a/arch/powerpc/lib/copypage_64.S
+++ b/arch/powerpc/lib/copypage_64.S
@@ -17,7 +17,11 @@ PPC64_CACHES:
.section ".text"
_GLOBAL(copy_page)
+BEGIN_FTR_SECTION
lis r5,PAGE_SIZE@h
+FTR_SECTION_ELSE
+ b .copypage_power7
+ALT_FTR_SECTION_END_IFCLR(CPU_FTR_VMX_COPY)
ori r5,r5,PAGE_SIZE@l
BEGIN_FTR_SECTION
ld r10,PPC64_CACHES@toc(r2)