diff options
Diffstat (limited to 'arch/parisc/kernel/entry.S')
-rw-r--r-- | arch/parisc/kernel/entry.S | 189 |
1 files changed, 86 insertions, 103 deletions
diff --git a/arch/parisc/kernel/entry.S b/arch/parisc/kernel/entry.S index be0f07f2fa5..9af4b22a6d7 100644 --- a/arch/parisc/kernel/entry.S +++ b/arch/parisc/kernel/entry.S @@ -30,14 +30,14 @@ * - save registers to kernel stack and handle in assembly or C */ +#include <asm/psw.h> #include <asm/assembly.h> /* for LDREG/STREG defines */ #include <asm/pgtable.h> -#include <asm/psw.h> #include <asm/signal.h> #include <asm/unistd.h> #include <asm/thread_info.h> -#ifdef __LP64__ +#ifdef CONFIG_64BIT #define CMPIB cmpib,* #define CMPB cmpb,* #define COND(x) *x @@ -67,19 +67,22 @@ /* Switch to virtual mapping, trashing only %r1 */ .macro virt_map - rsm PSW_SM_Q,%r0 - tovirt_r1 %r29 - mfsp %sr7, %r1 - or,= %r0,%r1,%r0 /* Only save sr7 in sr3 if sr7 != 0 */ - mtsp %r1, %sr3 + /* pcxt_ssm_bug */ + rsm PSW_SM_I, %r0 /* barrier for "Relied upon Translation */ mtsp %r0, %sr4 mtsp %r0, %sr5 + mfsp %sr7, %r1 + or,= %r0,%r1,%r0 /* Only save sr7 in sr3 if sr7 != 0 */ + mtsp %r1, %sr3 + tovirt_r1 %r29 + load32 KERNEL_PSW, %r1 + + rsm PSW_SM_QUIET,%r0 /* second "heavy weight" ctl op */ mtsp %r0, %sr6 mtsp %r0, %sr7 - load32 KERNEL_PSW, %r1 - mtctl %r1, %cr22 mtctl %r0, %cr17 /* Clear IIASQ tail */ mtctl %r0, %cr17 /* Clear IIASQ head */ + mtctl %r1, %ipsw load32 4f, %r1 mtctl %r1, %cr18 /* Set IIAOQ tail */ ldo 4(%r1), %r1 @@ -214,7 +217,7 @@ va = r8 /* virtual address for which the trap occured */ spc = r24 /* space for which the trap occured */ -#ifndef __LP64__ +#ifndef CONFIG_64BIT /* * itlb miss interruption handler (parisc 1.1 - 32 bit) @@ -236,7 +239,7 @@ .macro itlb_20 code mfctl %pcsq, spc -#ifdef __LP64__ +#ifdef CONFIG_64BIT b itlb_miss_20w #else b itlb_miss_20 @@ -246,7 +249,7 @@ .align 32 .endm -#ifndef __LP64__ +#ifndef CONFIG_64BIT /* * naitlb miss interruption handler (parisc 1.1 - 32 bit) * @@ -283,7 +286,7 @@ .macro naitlb_20 code mfctl %isr,spc -#ifdef __LP64__ +#ifdef CONFIG_64BIT b itlb_miss_20w #else b itlb_miss_20 @@ -296,7 +299,7 @@ .align 32 .endm -#ifndef __LP64__ +#ifndef CONFIG_64BIT /* * dtlb miss interruption handler (parisc 1.1 - 32 bit) */ @@ -318,7 +321,7 @@ .macro dtlb_20 code mfctl %isr, spc -#ifdef __LP64__ +#ifdef CONFIG_64BIT b dtlb_miss_20w #else b dtlb_miss_20 @@ -328,7 +331,7 @@ .align 32 .endm -#ifndef __LP64__ +#ifndef CONFIG_64BIT /* nadtlb miss interruption handler (parisc 1.1 - 32 bit) */ .macro nadtlb_11 code @@ -346,7 +349,7 @@ .macro nadtlb_20 code mfctl %isr,spc -#ifdef __LP64__ +#ifdef CONFIG_64BIT b nadtlb_miss_20w #else b nadtlb_miss_20 @@ -356,7 +359,7 @@ .align 32 .endm -#ifndef __LP64__ +#ifndef CONFIG_64BIT /* * dirty bit trap interruption handler (parisc 1.1 - 32 bit) */ @@ -378,7 +381,7 @@ .macro dbit_20 code mfctl %isr,spc -#ifdef __LP64__ +#ifdef CONFIG_64BIT b dbit_trap_20w #else b dbit_trap_20 @@ -391,7 +394,7 @@ /* The following are simple 32 vs 64 bit instruction * abstractions for the macros */ .macro EXTR reg1,start,length,reg2 -#ifdef __LP64__ +#ifdef CONFIG_64BIT extrd,u \reg1,32+\start,\length,\reg2 #else extrw,u \reg1,\start,\length,\reg2 @@ -399,7 +402,7 @@ .endm .macro DEP reg1,start,length,reg2 -#ifdef __LP64__ +#ifdef CONFIG_64BIT depd \reg1,32+\start,\length,\reg2 #else depw \reg1,\start,\length,\reg2 @@ -407,7 +410,7 @@ .endm .macro DEPI val,start,length,reg -#ifdef __LP64__ +#ifdef CONFIG_64BIT depdi \val,32+\start,\length,\reg #else depwi \val,\start,\length,\reg @@ -418,7 +421,7 @@ * fault. We have to extract this and place it in the va, * zeroing the corresponding bits in the space register */ .macro space_adjust spc,va,tmp -#ifdef __LP64__ +#ifdef CONFIG_64BIT extrd,u \spc,63,SPACEID_SHIFT,\tmp depd %r0,63,SPACEID_SHIFT,\spc depd \tmp,31,SPACEID_SHIFT,\va @@ -476,7 +479,7 @@ bb,>=,n \pmd,_PxD_PRESENT_BIT,\fault DEP %r0,31,PxD_FLAG_SHIFT,\pmd /* clear flags */ copy \pmd,%r9 -#ifdef __LP64__ +#ifdef CONFIG_64BIT shld %r9,PxD_VALUE_SHIFT,\pmd #else shlw %r9,PxD_VALUE_SHIFT,\pmd @@ -607,7 +610,7 @@ .macro do_alias spc,tmp,tmp1,va,pte,prot,fault cmpib,COND(<>),n 0,\spc,\fault ldil L%(TMPALIAS_MAP_START),\tmp -#if defined(__LP64__) && (TMPALIAS_MAP_START >= 0x80000000) +#if defined(CONFIG_64BIT) && (TMPALIAS_MAP_START >= 0x80000000) /* on LP64, ldi will sign extend into the upper 32 bits, * which is behaviour we don't want */ depdi 0,31,32,\tmp @@ -621,7 +624,7 @@ * OK, it is in the temp alias region, check whether "from" or "to". * Check "subtle" note in pacache.S re: r23/r26. */ -#ifdef __LP64__ +#ifdef CONFIG_64BIT extrd,u,*= \va,41,1,%r0 #else extrw,u,= \va,9,1,%r0 @@ -688,7 +691,7 @@ fault_vector_20: def 30 def 31 -#ifndef __LP64__ +#ifndef CONFIG_64BIT .export fault_vector_11 @@ -761,7 +764,7 @@ __kernel_thread: copy %r30, %r1 ldo PT_SZ_ALGN(%r30),%r30 -#ifdef __LP64__ +#ifdef CONFIG_64BIT /* Yo, function pointers in wide mode are little structs... -PB */ ldd 24(%r26), %r2 STREG %r2, PT_GR27(%r1) /* Store childs %dp */ @@ -777,7 +780,7 @@ __kernel_thread: or %r26, %r24, %r26 /* will have kernel mappings. */ ldi 1, %r25 /* stack_start, signals kernel thread */ stw %r0, -52(%r30) /* user_tid */ -#ifdef __LP64__ +#ifdef CONFIG_64BIT ldo -16(%r30),%r29 /* Reference param save area */ #endif BL do_fork, %r2 @@ -806,7 +809,7 @@ ret_from_kernel_thread: LDREG TI_TASK-THREAD_SZ_ALGN(%r30), %r1 LDREG TASK_PT_GR25(%r1), %r26 -#ifdef __LP64__ +#ifdef CONFIG_64BIT LDREG TASK_PT_GR27(%r1), %r27 LDREG TASK_PT_GR22(%r1), %r22 #endif @@ -814,11 +817,16 @@ ret_from_kernel_thread: ble 0(%sr7, %r1) copy %r31, %r2 -#ifdef __LP64__ +#ifdef CONFIG_64BIT ldo -16(%r30),%r29 /* Reference param save area */ loadgp /* Thread could have been in a module */ #endif +#ifndef CONFIG_64BIT b sys_exit +#else + load32 sys_exit, %r1 + bv %r0(%r1) +#endif ldi 0, %r26 .import sys_execve, code @@ -830,7 +838,7 @@ __execve: STREG %r26, PT_GR26(%r16) STREG %r25, PT_GR25(%r16) STREG %r24, PT_GR24(%r16) -#ifdef __LP64__ +#ifdef CONFIG_64BIT ldo -16(%r30),%r29 /* Reference param save area */ #endif BL sys_execve, %r2 @@ -855,6 +863,7 @@ __execve: _switch_to: STREG %r2, -RP_OFFSET(%r30) + callee_save_float callee_save load32 _switch_to_ret, %r2 @@ -871,6 +880,7 @@ _switch_to: _switch_to_ret: mtctl %r0, %cr0 /* Needed for single stepping */ callee_rest + callee_rest_float LDREG -RP_OFFSET(%r30), %r2 bv %r0(%r2) @@ -888,9 +898,6 @@ _switch_to_ret: * this way, then we will need to copy %sr3 in to PT_SR[3..7], and * adjust IASQ[0..1]. * - * Note that the following code uses a "relied upon translation". - * See the parisc ACD for details. The ssm is necessary due to a - * PCXT bug. */ .align 4096 @@ -911,7 +918,7 @@ syscall_exit_rfi: STREG %r19,PT_IAOQ1(%r16) LDREG PT_PSW(%r16),%r19 load32 USER_PSW_MASK,%r1 -#ifdef __LP64__ +#ifdef CONFIG_64BIT load32 USER_PSW_HI_MASK,%r20 depd %r20,31,32,%r1 #endif @@ -955,7 +962,7 @@ intr_return: /* shift left ____cacheline_aligned (aka L1_CACHE_BYTES) amount ** irq_stat[] is defined using ____cacheline_aligned. */ -#ifdef __LP64__ +#ifdef CONFIG_64BIT shld %r1, 6, %r20 #else shlw %r1, 5, %r20 @@ -963,9 +970,6 @@ intr_return: add %r19,%r20,%r19 /* now have &irq_stat[smp_processor_id()] */ #endif /* CONFIG_SMP */ - LDREG IRQSTAT_SIRQ_PEND(%r19),%r20 /* hardirq.h: unsigned long */ - cmpib,<>,n 0,%r20,intr_do_softirq /* forward */ - intr_check_resched: /* check for reschedule */ @@ -985,24 +989,19 @@ intr_restore: rest_fp %r1 rest_general %r29 - /* Create a "relied upon translation" PA 2.0 Arch. F-5 */ - ssm 0,%r0 - nop - nop - nop - nop - nop - nop - nop + /* inverse of virt_map */ + pcxt_ssm_bug + rsm PSW_SM_QUIET,%r0 /* prepare for rfi */ tophys_r1 %r29 - rsm (PSW_SM_Q|PSW_SM_P|PSW_SM_D|PSW_SM_I),%r0 /* Restore space id's and special cr's from PT_REGS - * structure pointed to by r29 */ + * structure pointed to by r29 + */ rest_specials %r29 - /* Important: Note that rest_stack restores r29 - * last (we are using it)! It also restores r1 and r30. */ + /* IMPORTANT: rest_stack restores r29 last (we are using it)! + * It also restores r1 and r30. + */ rest_stack rfi @@ -1015,17 +1014,6 @@ intr_restore: nop nop - .import do_softirq,code -intr_do_softirq: - bl do_softirq,%r2 -#ifdef __LP64__ - ldo -16(%r30),%r29 /* Reference param save area */ -#else - nop -#endif - b intr_check_resched - nop - .import schedule,code intr_do_resched: /* Only do reschedule if we are returning to user space */ @@ -1036,12 +1024,17 @@ intr_do_resched: CMPIB= 0,%r20,intr_restore /* backward */ nop -#ifdef __LP64__ +#ifdef CONFIG_64BIT ldo -16(%r30),%r29 /* Reference param save area */ #endif ldil L%intr_check_sig, %r2 +#ifndef CONFIG_64BIT b schedule +#else + load32 schedule, %r20 + bv %r0(%r20) +#endif ldo R%intr_check_sig(%r2), %r2 @@ -1064,7 +1057,7 @@ intr_do_signal: copy %r0, %r24 /* unsigned long in_syscall */ copy %r16, %r25 /* struct pt_regs *regs */ -#ifdef __LP64__ +#ifdef CONFIG_64BIT ldo -16(%r30),%r29 /* Reference param save area */ #endif @@ -1088,7 +1081,7 @@ intr_extint: mfctl %cr31,%r1 copy %r30,%r17 /* FIXME! depi below has hardcoded idea of interrupt stack size (32k)*/ -#ifdef __LP64__ +#ifdef CONFIG_64BIT depdi 0,63,15,%r17 #else depi 0,31,15,%r17 @@ -1115,7 +1108,7 @@ intr_extint: ldil L%intr_return, %r2 -#ifdef __LP64__ +#ifdef CONFIG_64BIT ldo -16(%r30),%r29 /* Reference param save area */ #endif @@ -1153,15 +1146,17 @@ intr_save: CMPIB=,n 6,%r26,skip_save_ior - /* save_specials left ipsw value in r8 for us to test */ mfctl %cr20, %r16 /* isr */ + nop /* serialize mfctl on PA 2.0 to avoid 4 cycle penalty */ mfctl %cr21, %r17 /* ior */ -#ifdef __LP64__ + +#ifdef CONFIG_64BIT /* * If the interrupted code was running with W bit off (32 bit), * clear the b bits (bits 0 & 1) in the ior. + * save_specials left ipsw value in r8 for us to test. */ extrd,u,*<> %r8,PSW_W_BIT,1,%r0 depdi 0,1,2,%r17 @@ -1192,7 +1187,7 @@ skip_save_ior: loadgp copy %r29, %r25 /* arg1 is pt_regs */ -#ifdef __LP64__ +#ifdef CONFIG_64BIT ldo -16(%r30),%r29 /* Reference param save area */ #endif @@ -1230,7 +1225,7 @@ skip_save_ior: spc = r24 /* space for which the trap occured */ ptp = r25 /* page directory/page table pointer */ -#ifdef __LP64__ +#ifdef CONFIG_64BIT dtlb_miss_20w: space_adjust spc,va,t0 @@ -1487,10 +1482,10 @@ nadtlb_emulate: add,l %r1,%r24,%r1 /* doesn't affect c/b bits */ nadtlb_nullify: - mfctl %cr22,%r8 /* Get ipsw */ + mfctl %ipsw,%r8 ldil L%PSW_N,%r9 or %r8,%r9,%r8 /* Set PSW_N */ - mtctl %r8,%cr22 + mtctl %r8,%ipsw rfir nop @@ -1521,7 +1516,7 @@ nadtlb_probe_check: nop -#ifdef __LP64__ +#ifdef CONFIG_64BIT itlb_miss_20w: /* @@ -1588,7 +1583,7 @@ itlb_miss_20: #endif -#ifdef __LP64__ +#ifdef CONFIG_64BIT dbit_trap_20w: space_adjust spc,va,t0 @@ -1797,7 +1792,7 @@ sys_fork_wrapper: STREG %r2,-RP_OFFSET(%r30) ldo FRAME_SIZE(%r30),%r30 -#ifdef __LP64__ +#ifdef CONFIG_64BIT ldo -16(%r30),%r29 /* Reference param save area */ #endif @@ -1847,10 +1842,11 @@ sys_clone_wrapper: STREG %r2,-RP_OFFSET(%r30) ldo FRAME_SIZE(%r30),%r30 -#ifdef __LP64__ +#ifdef CONFIG_64BIT ldo -16(%r30),%r29 /* Reference param save area */ #endif + /* WARNING - Clobbers r19 and r21, userspace must save these! */ STREG %r2,PT_GR19(%r1) /* save for child */ STREG %r30,PT_GR21(%r1) BL sys_clone,%r2 @@ -1869,7 +1865,7 @@ sys_vfork_wrapper: STREG %r2,-RP_OFFSET(%r30) ldo FRAME_SIZE(%r30),%r30 -#ifdef __LP64__ +#ifdef CONFIG_64BIT ldo -16(%r30),%r29 /* Reference param save area */ #endif @@ -1897,10 +1893,10 @@ sys_vfork_wrapper: STREG %r2,-RP_OFFSET(%r30) ldo FRAME_SIZE(%r30),%r30 -#ifdef __LP64__ +#ifdef CONFIG_64BIT ldo -16(%r30),%r29 /* Reference param save area */ #endif - bl \execve,%r2 + BL \execve,%r2 copy %r1,%arg0 ldo -FRAME_SIZE(%r30),%r30 @@ -1923,7 +1919,7 @@ error_\execve: sys_execve_wrapper: execve_wrapper sys_execve -#ifdef __LP64__ +#ifdef CONFIG_64BIT .export sys32_execve_wrapper .import sys32_execve @@ -1937,7 +1933,7 @@ sys_rt_sigreturn_wrapper: ldo TASK_REGS(%r26),%r26 /* get pt regs */ /* Don't save regs, we are going to restore them from sigcontext. */ STREG %r2, -RP_OFFSET(%r30) -#ifdef __LP64__ +#ifdef CONFIG_64BIT ldo FRAME_SIZE(%r30), %r30 BL sys_rt_sigreturn,%r2 ldo -16(%r30),%r29 /* Reference param save area */ @@ -1968,7 +1964,7 @@ sys_sigaltstack_wrapper: ldo TASK_REGS(%r1),%r24 /* get pt regs */ LDREG TASK_PT_GR30(%r24),%r24 STREG %r2, -RP_OFFSET(%r30) -#ifdef __LP64__ +#ifdef CONFIG_64BIT ldo FRAME_SIZE(%r30), %r30 b,l do_sigaltstack,%r2 ldo -16(%r30),%r29 /* Reference param save area */ @@ -1982,7 +1978,7 @@ sys_sigaltstack_wrapper: bv %r0(%r2) nop -#ifdef __LP64__ +#ifdef CONFIG_64BIT .export sys32_sigaltstack_wrapper sys32_sigaltstack_wrapper: /* Get the user stack pointer */ @@ -2006,7 +2002,7 @@ sys_rt_sigsuspend_wrapper: reg_save %r24 STREG %r2, -RP_OFFSET(%r30) -#ifdef __LP64__ +#ifdef CONFIG_64BIT ldo FRAME_SIZE(%r30), %r30 b,l sys_rt_sigsuspend,%r2 ldo -16(%r30),%r29 /* Reference param save area */ @@ -2079,7 +2075,7 @@ syscall_check_bh: ldw TI_CPU-THREAD_SZ_ALGN-FRAME_SIZE(%r30),%r26 /* cpu # */ /* shift left ____cacheline_aligned (aka L1_CACHE_BYTES) bits */ -#ifdef __LP64__ +#ifdef CONFIG_64BIT shld %r26, 6, %r20 #else shlw %r26, 5, %r20 @@ -2087,9 +2083,6 @@ syscall_check_bh: add %r19,%r20,%r19 /* now have &irq_stat[smp_processor_id()] */ #endif /* CONFIG_SMP */ - LDREG IRQSTAT_SIRQ_PEND(%r19),%r20 /* hardirq.h: unsigned long */ - cmpib,<>,n 0,%r20,syscall_do_softirq /* forward */ - syscall_check_resched: /* check for reschedule */ @@ -2144,7 +2137,7 @@ syscall_restore: depi 3,31,2,%r31 /* ensure return to user mode. */ -#ifdef __LP64__ +#ifdef CONFIG_64BIT /* decide whether to reset the wide mode bit * * For a syscall, the W bit is stored in the lowest bit @@ -2227,20 +2220,10 @@ pt_regs_ok: b intr_restore nop - .import do_softirq,code -syscall_do_softirq: - bl do_softirq,%r2 - nop - /* NOTE: We enable I-bit incase we schedule later, - * and we might be going back to userspace if we were - * traced. */ - b syscall_check_resched - ssm PSW_SM_I, %r0 /* do_softirq returns with I bit off */ - .import schedule,code syscall_do_resched: BL schedule,%r2 -#ifdef __LP64__ +#ifdef CONFIG_64BIT ldo -16(%r30),%r29 /* Reference param save area */ #else nop @@ -2260,7 +2243,7 @@ syscall_do_signal: ldi 1, %r24 /* unsigned long in_syscall */ -#ifdef __LP64__ +#ifdef CONFIG_64BIT ldo -16(%r30),%r29 /* Reference param save area */ #endif BL do_signal,%r2 |