summaryrefslogtreecommitdiffstats
path: root/arch/sparc64/kernel/ktlb.S
diff options
context:
space:
mode:
Diffstat (limited to 'arch/sparc64/kernel/ktlb.S')
-rw-r--r--arch/sparc64/kernel/ktlb.S29
1 files changed, 27 insertions, 2 deletions
diff --git a/arch/sparc64/kernel/ktlb.S b/arch/sparc64/kernel/ktlb.S
index 883180be3d5..ae1dac17bc8 100644
--- a/arch/sparc64/kernel/ktlb.S
+++ b/arch/sparc64/kernel/ktlb.S
@@ -133,8 +133,33 @@ kvmap_dtlb_4v:
brgez,pn %g4, kvmap_dtlb_nonlinear
nop
- sethi %hi(kern_linear_pte_xor), %g2
- ldx [%g2 + %lo(kern_linear_pte_xor)], %g2
+ sethi %hi(kpte_linear_bitmap), %g2
+ or %g2, %lo(kpte_linear_bitmap), %g2
+
+ /* Clear the PAGE_OFFSET top virtual bits, then shift
+ * down to get a 256MB physical address index.
+ */
+ sllx %g4, 21, %g5
+ mov 1, %g7
+ srlx %g5, 21 + 28, %g5
+
+ /* Don't try this at home kids... this depends upon srlx
+ * only taking the low 6 bits of the shift count in %g5.
+ */
+ sllx %g7, %g5, %g7
+
+ /* Divide by 64 to get the offset into the bitmask. */
+ srlx %g5, 6, %g5
+
+ /* kern_linear_pte_xor[((mask & bit) ? 1 : 0)] */
+ ldx [%g2 + %g5], %g2
+ andcc %g2, %g7, %g0
+ sethi %hi(kern_linear_pte_xor), %g5
+ or %g5, %lo(kern_linear_pte_xor), %g5
+ bne,a,pt %xcc, 1f
+ add %g5, 8, %g5
+
+1: ldx [%g5], %g2
.globl kvmap_linear_patch
kvmap_linear_patch: