diff options
Diffstat (limited to 'arch/arm/boot/compressed/head.S')
-rw-r--r-- | arch/arm/boot/compressed/head.S | 71 |
1 files changed, 21 insertions, 50 deletions
diff --git a/arch/arm/boot/compressed/head.S b/arch/arm/boot/compressed/head.S index dc7e8ce8e6b..b8c64b80baf 100644 --- a/arch/arm/boot/compressed/head.S +++ b/arch/arm/boot/compressed/head.S @@ -567,6 +567,12 @@ __armv3_mpu_cache_on: mcr p15, 0, r0, c7, c0, 0 @ invalidate whole cache v3 mov pc, lr +#ifdef CONFIG_CPU_DCACHE_WRITETHROUGH +#define CB_BITS 0x08 +#else +#define CB_BITS 0x0c +#endif + __setup_mmu: sub r3, r4, #16384 @ Page directory size bic r3, r3, #0xff @ Align the pointer bic r3, r3, #0x3f00 @@ -578,17 +584,14 @@ __setup_mmu: sub r3, r4, #16384 @ Page directory size mov r9, r0, lsr #18 mov r9, r9, lsl #18 @ start of RAM add r10, r9, #0x10000000 @ a reasonable RAM size - mov r1, #0x12 - orr r1, r1, #3 << 10 + mov r1, #0x12 @ XN|U + section mapping + orr r1, r1, #3 << 10 @ AP=11 add r2, r3, #16384 1: cmp r1, r9 @ if virt > start of RAM -#ifdef CONFIG_CPU_DCACHE_WRITETHROUGH - orrhs r1, r1, #0x08 @ set cacheable -#else - orrhs r1, r1, #0x0c @ set cacheable, bufferable -#endif - cmp r1, r10 @ if virt > end of RAM - bichs r1, r1, #0x0c @ clear cacheable, bufferable + cmphs r10, r1 @ && end of RAM > virt + bic r1, r1, #0x1c @ clear XN|U + C + B + orrlo r1, r1, #0x10 @ Set XN|U for non-RAM + orrhs r1, r1, r6 @ set RAM section settings str r1, [r0], #4 @ 1:1 mapping add r1, r1, #1048576 teq r0, r2 @@ -599,7 +602,7 @@ __setup_mmu: sub r3, r4, #16384 @ Page directory size * so there is no map overlap problem for up to 1 MB compressed kernel. * If the execution is in RAM then we would only be duplicating the above. */ - mov r1, #0x1e + orr r1, r6, #0x04 @ ensure B is set for this orr r1, r1, #3 << 10 mov r2, pc mov r2, r2, lsr #20 @@ -620,6 +623,7 @@ __arm926ejs_mmu_cache_on: __armv4_mmu_cache_on: mov r12, lr #ifdef CONFIG_MMU + mov r6, #CB_BITS | 0x12 @ U bl __setup_mmu mov r0, #0 mcr p15, 0, r0, c7, c10, 4 @ drain write buffer @@ -641,6 +645,7 @@ __armv7_mmu_cache_on: #ifdef CONFIG_MMU mrc p15, 0, r11, c0, c1, 4 @ read ID_MMFR0 tst r11, #0xf @ VMSA + movne r6, #CB_BITS | 0x02 @ !XN blne __setup_mmu mov r0, #0 mcr p15, 0, r0, c7, c10, 4 @ drain write buffer @@ -655,7 +660,7 @@ __armv7_mmu_cache_on: orr r0, r0, #1 << 25 @ big-endian page tables #endif orrne r0, r0, #1 @ MMU enabled - movne r1, #-1 + movne r1, #0xfffffffd @ domain 0 = client mcrne p15, 0, r3, c2, c0, 0 @ load page table pointer mcrne p15, 0, r1, c3, c0, 0 @ load domain access control #endif @@ -668,6 +673,7 @@ __armv7_mmu_cache_on: __fa526_cache_on: mov r12, lr + mov r6, #CB_BITS | 0x12 @ U bl __setup_mmu mov r0, #0 mcr p15, 0, r0, c7, c7, 0 @ Invalidate whole cache @@ -680,18 +686,6 @@ __fa526_cache_on: mcr p15, 0, r0, c8, c7, 0 @ flush UTLB mov pc, r12 -__arm6_mmu_cache_on: - mov r12, lr - bl __setup_mmu - mov r0, #0 - mcr p15, 0, r0, c7, c0, 0 @ invalidate whole cache v3 - mcr p15, 0, r0, c5, c0, 0 @ invalidate whole TLB v3 - mov r0, #0x30 - bl __common_mmu_cache_on - mov r0, #0 - mcr p15, 0, r0, c5, c0, 0 @ invalidate whole TLB v3 - mov pc, r12 - __common_mmu_cache_on: #ifndef CONFIG_THUMB2_KERNEL #ifndef DEBUG @@ -756,16 +750,6 @@ call_cache_fn: adr r12, proc_types .align 2 .type proc_types,#object proc_types: - .word 0x41560600 @ ARM6/610 - .word 0xffffffe0 - W(b) __arm6_mmu_cache_off @ works, but slow - W(b) __arm6_mmu_cache_off - mov pc, lr - THUMB( nop ) -@ b __arm6_mmu_cache_on @ untested -@ b __arm6_mmu_cache_off -@ b __armv3_mmu_cache_flush - .word 0x00000000 @ old ARM ID .word 0x0000f000 mov pc, lr @@ -777,8 +761,10 @@ proc_types: .word 0x41007000 @ ARM7/710 .word 0xfff8fe00 - W(b) __arm7_mmu_cache_off - W(b) __arm7_mmu_cache_off + mov pc, lr + THUMB( nop ) + mov pc, lr + THUMB( nop ) mov pc, lr THUMB( nop ) @@ -977,21 +963,6 @@ __armv7_mmu_cache_off: mcr p15, 0, r0, c7, c5, 4 @ ISB mov pc, r12 -__arm6_mmu_cache_off: - mov r0, #0x00000030 @ ARM6 control reg. - b __armv3_mmu_cache_off - -__arm7_mmu_cache_off: - mov r0, #0x00000070 @ ARM7 control reg. - b __armv3_mmu_cache_off - -__armv3_mmu_cache_off: - mcr p15, 0, r0, c1, c0, 0 @ turn MMU and cache off - mov r0, #0 - mcr p15, 0, r0, c7, c0, 0 @ invalidate whole cache v3 - mcr p15, 0, r0, c5, c0, 0 @ invalidate whole TLB v3 - mov pc, lr - /* * Clean and flush the cache to maintain consistency. * |