diff options
Diffstat (limited to 'arch/powerpc/mm')
-rw-r--r-- | arch/powerpc/mm/tlb_low_64e.S | 19 |
1 files changed, 11 insertions, 8 deletions
diff --git a/arch/powerpc/mm/tlb_low_64e.S b/arch/powerpc/mm/tlb_low_64e.S index 356e8b41fb0..57c4d662be3 100644 --- a/arch/powerpc/mm/tlb_low_64e.S +++ b/arch/powerpc/mm/tlb_low_64e.S @@ -296,9 +296,10 @@ itlb_miss_fault_bolted: * r14 = page table base * r13 = PACA * r11 = tlb_per_core ptr - * r10 = cpu number + * r10 = crap (free to use) */ tlb_miss_common_e6500: +BEGIN_FTR_SECTION /* * Search if we already have an indirect entry for that virtual * address, and if we do, bail out. @@ -309,6 +310,7 @@ tlb_miss_common_e6500: lhz r10,PACAPACAINDEX(r13) cmpdi r15,0 cmpdi cr1,r15,1 /* set cr1.eq = 0 for non-recursive */ + addi r10,r10,1 bne 2f stbcx. r10,0,r11 bne 1b @@ -322,18 +324,17 @@ tlb_miss_common_e6500: b 1b .previous - mfspr r15,SPRN_MAS2 + mfspr r15,SPRN_MAS1 + mfspr r10,SPRN_MAS2 tlbsx 0,r16 + mtspr SPRN_MAS2,r10 mfspr r10,SPRN_MAS1 + mtspr SPRN_MAS1,r15 + andis. r10,r10,MAS1_VALID@h bne tlb_miss_done_e6500 - - /* Undo MAS-damage from the tlbsx */ - mfspr r10,SPRN_MAS1 - oris r10,r10,MAS1_VALID@h - mtspr SPRN_MAS1,r10 - mtspr SPRN_MAS2,r15 +END_FTR_SECTION_IFSET(CPU_FTR_SMT) /* Now, we need to walk the page tables. First check if we are in * range. @@ -394,11 +395,13 @@ tlb_miss_common_e6500: tlb_miss_done_e6500: .macro tlb_unlock_e6500 +BEGIN_FTR_SECTION beq cr1,1f /* no unlock if lock was recursively grabbed */ li r15,0 isync stb r15,0(r11) 1: +END_FTR_SECTION_IFSET(CPU_FTR_SMT) .endm tlb_unlock_e6500 |