diff options
Diffstat (limited to 'arch/sh/kernel/cpu/sh3/entry.S')
| -rw-r--r-- | arch/sh/kernel/cpu/sh3/entry.S | 485 |
1 files changed, 212 insertions, 273 deletions
diff --git a/arch/sh/kernel/cpu/sh3/entry.S b/arch/sh/kernel/cpu/sh3/entry.S index 4004073f98c..262db6ec067 100644 --- a/arch/sh/kernel/cpu/sh3/entry.S +++ b/arch/sh/kernel/cpu/sh3/entry.S @@ -2,7 +2,7 @@ * arch/sh/kernel/cpu/sh3/entry.S * * Copyright (C) 1999, 2000, 2002 Niibe Yutaka - * Copyright (C) 2003 - 2006 Paul Mundt + * Copyright (C) 2003 - 2012 Paul Mundt * * This file is subject to the terms and conditions of the GNU General Public * License. See the file "COPYING" in the main directory of this archive @@ -14,8 +14,10 @@ #include <asm/asm-offsets.h> #include <asm/thread_info.h> #include <asm/unistd.h> -#include <asm/cpu/mmu_context.h> +#include <cpu/mmu_context.h> #include <asm/page.h> +#include <asm/cache.h> +#include <asm/thread_info.h> ! NOTE: ! GNU as (as of 2.9.1) changes bf/s into bt/s and bra, when the address @@ -52,10 +54,6 @@ * syscall # * */ -#if defined(CONFIG_KGDB_NMI) -NMI_VEC = 0x1c0 ! Must catch early for debounce -#endif - /* Offsets to the stack */ OFF_R0 = 0 /* Return value. New ABI also arg4 */ OFF_R1 = 4 /* New ABI: arg5 */ @@ -70,7 +68,6 @@ OFF_PC = (16*4) OFF_SR = (16*4+8) OFF_TRA = (16*4+6*4) - #define k0 r0 #define k1 r1 #define k2 r2 @@ -112,34 +109,33 @@ OFF_TRA = (16*4+6*4) #if defined(CONFIG_MMU) .align 2 ENTRY(tlb_miss_load) - bra call_dpf + bra call_handle_tlbmiss mov #0, r5 .align 2 ENTRY(tlb_miss_store) - bra call_dpf - mov #1, r5 + bra call_handle_tlbmiss + mov #FAULT_CODE_WRITE, r5 .align 2 ENTRY(initial_page_write) - bra call_dpf - mov #1, r5 + bra call_handle_tlbmiss + mov #FAULT_CODE_INITIAL, r5 .align 2 ENTRY(tlb_protection_violation_load) - bra call_dpf - mov #0, r5 + bra call_do_page_fault + mov #FAULT_CODE_PROT, r5 .align 2 ENTRY(tlb_protection_violation_store) - bra call_dpf - mov #1, r5 + bra call_do_page_fault + mov #(FAULT_CODE_PROT | FAULT_CODE_WRITE), r5 -call_dpf: +call_handle_tlbmiss: mov.l 1f, r0 mov r5, r8 mov.l @r0, r6 - mov r6, r9 mov.l 2f, r0 sts pr, r10 jsr @r0 @@ -150,16 +146,23 @@ call_dpf: lds r10, pr rts nop -0: mov.l 3f, r0 - mov r9, r6 +0: mov r8, r5 +call_do_page_fault: + mov.l 1f, r0 + mov.l @r0, r6 + + mov.l 3f, r0 + mov.l 4f, r1 + mov r15, r4 jmp @r0 - mov r15, r4 + lds r1, pr .align 2 1: .long MMU_TEA -2: .long __do_page_fault +2: .long handle_tlbmiss 3: .long do_page_fault +4: .long ret_from_exception .align 2 ENTRY(address_error_load) @@ -187,44 +190,35 @@ call_dae: #if defined(CONFIG_SH_STANDARD_BIOS) /* Unwind the stack and jmp to the debug entry */ ENTRY(sh_bios_handler) - mov.l @r15+, r0 - mov.l @r15+, r1 - mov.l @r15+, r2 - mov.l @r15+, r3 - mov.l @r15+, r4 - mov.l @r15+, r5 - mov.l @r15+, r6 - mov.l @r15+, r7 - stc sr, r8 - mov.l 1f, r9 ! BL =1, RB=1, IMASK=0x0F - or r9, r8 - ldc r8, sr ! here, change the register bank - mov.l @r15+, r8 - mov.l @r15+, r9 - mov.l @r15+, r10 - mov.l @r15+, r11 - mov.l @r15+, r12 - mov.l @r15+, r13 - mov.l @r15+, r14 - mov.l @r15+, k0 - ldc.l @r15+, spc - lds.l @r15+, pr - mov.l @r15+, k1 - ldc.l @r15+, gbr - lds.l @r15+, mach - lds.l @r15+, macl - mov k0, r15 + mov.l 1f, r8 + bsr restore_regs + nop + + lds k2, pr ! restore pr + mov k4, r15 ! mov.l 2f, k0 mov.l @k0, k0 jmp @k0 - ldc k1, ssr + ldc k3, ssr .align 2 1: .long 0x300000f0 2: .long gdb_vbr_vector #endif /* CONFIG_SH_STANDARD_BIOS */ -restore_all: +! restore_regs() +! - restore r0, r1, r2, r3, r4, r5, r6, r7 from the stack +! - switch bank +! - restore r8, r9, r10, r11, r12, r13, r14, r15 from the stack +! - restore spc, pr*, ssr, gbr, mach, macl, skip default tra +! k2 returns original pr +! k3 returns original sr +! k4 returns original stack pointer +! r8 passes SR bitmask, overwritten with restored data on return +! r9 trashed +! BL=0 on entry, on exit BL=1 (depending on r8). + +ENTRY(restore_regs) mov.l @r15+, r0 mov.l @r15+, r1 mov.l @r15+, r2 @@ -234,10 +228,9 @@ restore_all: mov.l @r15+, r6 mov.l @r15+, r7 ! - stc sr, r8 - mov.l 7f, r9 - or r9, r8 ! BL =1, RB=1 - ldc r8, sr ! here, change the register bank + stc sr, r9 + or r8, r9 + ldc r9, sr ! mov.l @r15+, r8 mov.l @r15+, r9 @@ -248,53 +241,27 @@ restore_all: mov.l @r15+, r14 mov.l @r15+, k4 ! original stack pointer ldc.l @r15+, spc - lds.l @r15+, pr + mov.l @r15+, k2 ! original PR mov.l @r15+, k3 ! original SR ldc.l @r15+, gbr lds.l @r15+, mach lds.l @r15+, macl - add #4, r15 ! Skip syscall number - ! -#ifdef CONFIG_SH_DSP - mov.l @r15+, k0 ! DSP mode marker - mov.l 5f, k1 - cmp/eq k0, k1 ! Do we have a DSP stack frame? - bf skip_restore - - stc sr, k0 ! Enable CPU DSP mode - or k1, k0 ! (within kernel it may be disabled) - ldc k0, sr - mov r2, k0 ! Backup r2 - - ! Restore DSP registers from stack - mov r15, r2 - movs.l @r2+, a1 - movs.l @r2+, a0g - movs.l @r2+, a1g - movs.l @r2+, m0 - movs.l @r2+, m1 - mov r2, r15 - - lds.l @r15+, a0 - lds.l @r15+, x0 - lds.l @r15+, x1 - lds.l @r15+, y0 - lds.l @r15+, y1 - lds.l @r15+, dsr - ldc.l @r15+, rs - ldc.l @r15+, re - ldc.l @r15+, mod - - mov k0, r2 ! Restore r2 -skip_restore: -#endif + rts + add #4, r15 ! Skip syscall number + +restore_all: + mov.l 7f, r8 + bsr restore_regs + nop + + lds k2, pr ! restore pr ! ! Calculate new SR value mov k3, k2 ! original SR value - mov #0xf0, k1 + mov #0xfffffff0, k1 extu.b k1, k1 not k1, k1 - and k1, k2 ! Mask orignal SR value + and k1, k2 ! Mask original SR value ! mov k3, k0 ! Calculate IMASK-bits shlr2 k0 @@ -307,22 +274,12 @@ skip_restore: 6: or k0, k2 ! Set the IMASK-bits ldc k2, ssr ! -#if defined(CONFIG_KGDB_NMI) - ! Clear in_nmi - mov.l 6f, k0 - mov #0, k1 - mov.b k1, @k0 -#endif - mov.l @r15+, k2 ! restore EXPEVT mov k4, r15 rte nop .align 2 5: .long 0x00001000 ! DSP -#ifdef CONFIG_KGDB_NMI -6: .long in_nmi -#endif 7: .long 0x30000000 ! common exception handler @@ -336,81 +293,21 @@ skip_restore: ENTRY(vbr_base) .long 0 ! +! 0x100: General exception vector +! .balign 256,0,256 general_exception: - mov.l 1f, k2 - mov.l 2f, k3 -#ifdef CONFIG_CPU_SUBTYPE_SHX3 - mov.l @k2, k2 - - ! Is EXPEVT larger than 0x800? - mov #0x8, k0 - shll8 k0 - cmp/hs k0, k2 - bf 0f - - ! then add 0x580 (k2 is 0xd80 or 0xda0) - mov #0x58, k0 - shll2 k0 - shll2 k0 - add k0, k2 -0: - bra handle_exception - nop -#else - bra handle_exception - mov.l @k2, k2 -#endif - .align 2 -1: .long EXPEVT -2: .long ret_from_exception -! -! - - .balign 1024,0,1024 -tlb_miss: - mov.l 1f, k2 - mov.l 4f, k3 - bra handle_exception - mov.l @k2, k2 -! - .balign 512,0,512 -interrupt: - mov.l 2f, k2 - mov.l 3f, k3 -#if defined(CONFIG_KGDB_NMI) - ! Debounce (filter nested NMI) - mov.l @k2, k0 - mov.l 5f, k1 - cmp/eq k1, k0 - bf 0f - mov.l 6f, k1 - tas.b @k1 - bt 0f - rte - nop - .align 2 -5: .long NMI_VEC -6: .long in_nmi -0: -#endif /* defined(CONFIG_KGDB_NMI) */ bra handle_exception - mov #-1, k2 ! interrupt exception marker + sts pr, k3 ! save original pr value in k3 - .align 2 -1: .long EXPEVT -2: .long INTEVT -3: .long ret_from_irq -4: .long ret_from_exception - -! -! - .align 2 -ENTRY(handle_exception) - ! Using k0, k1 for scratch registers (r0_bank1, r1_bank), - ! save all registers onto stack. - ! +! prepare_stack() +! - roll back gRB +! - switch to kernel stack +! k0 returns original sp (after roll back) +! k1 trashed +! k2 trashed +prepare_stack: #ifdef CONFIG_GUSA ! Check for roll back gRB (User and Kernel) mov r15, k0 @@ -430,7 +327,7 @@ ENTRY(handle_exception) 2: mov k1, r15 ! SP = r1 1: #endif - + ! Switch to kernel stack if needed stc ssr, k0 ! Is it from kernel space? shll k0 ! Check MD bit (bit30) by shifting it into... shll k0 ! ...the T bit @@ -443,65 +340,67 @@ ENTRY(handle_exception) add current, k1 mov k1, r15 ! change to kernel stack ! -1: mov.l 2f, k1 - ! -#ifdef CONFIG_SH_DSP - mov.l r2, @-r15 ! Save r2, we need another reg - stc sr, k4 - mov.l 1f, r2 - tst r2, k4 ! Check if in DSP mode - mov.l @r15+, r2 ! Restore r2 now - bt/s skip_save - mov #0, k4 ! Set marker for no stack frame - - mov r2, k4 ! Backup r2 (in k4) for later - - ! Save DSP registers on stack - stc.l mod, @-r15 - stc.l re, @-r15 - stc.l rs, @-r15 - sts.l dsr, @-r15 - sts.l y1, @-r15 - sts.l y0, @-r15 - sts.l x1, @-r15 - sts.l x0, @-r15 - sts.l a0, @-r15 - - ! GAS is broken, does not generate correct "movs.l Ds,@-As" instr. - - ! FIXME: Make sure that this is still the case with newer toolchains, - ! as we're not at all interested in supporting ancient toolchains at - ! this point. -- PFM. - - mov r15, r2 - .word 0xf653 ! movs.l a1, @-r2 - .word 0xf6f3 ! movs.l a0g, @-r2 - .word 0xf6d3 ! movs.l a1g, @-r2 - .word 0xf6c3 ! movs.l m0, @-r2 - .word 0xf6e3 ! movs.l m1, @-r2 - mov r2, r15 - - mov k4, r2 ! Restore r2 - mov.l 1f, k4 ! Force DSP stack frame -skip_save: - mov.l k4, @-r15 ! Push DSP mode marker onto stack -#endif - ! Save the user registers on the stack. - mov.l k2, @-r15 ! EXPEVT +1: + rts + nop - mov #-1, k4 - mov.l k4, @-r15 ! set TRA (default: -1) - ! +! +! 0x400: Instruction and Data TLB miss exception vector +! + .balign 1024,0,1024 +tlb_miss: + sts pr, k3 ! save original pr value in k3 + +handle_exception: + mova exception_data, k0 + + ! Setup stack and save DSP context (k0 contains original r15 on return) + bsr prepare_stack + PREF(k0) + + ! Save registers / Switch to bank 0 + mov.l 5f, k2 ! vector register address + mov.l 1f, k4 ! SR bits to clear in k4 + bsr save_regs ! needs original pr value in k3 + mov.l @k2, k2 ! read out vector and keep in k2 + +handle_exception_special: + setup_frame_reg + + ! Setup return address and jump to exception handler + mov.l 7f, r9 ! fetch return address + stc r2_bank, r0 ! k2 (vector) + mov.l 6f, r10 + shlr2 r0 + shlr r0 + mov.l @(r0, r10), r10 + jmp @r10 + lds r9, pr ! put return address in pr + + .align L1_CACHE_SHIFT + +! save_regs() +! - save default tra, macl, mach, gbr, ssr, pr* and spc on the stack +! - save r15*, r14, r13, r12, r11, r10, r9, r8 on the stack +! - switch bank +! - save r7, r6, r5, r4, r3, r2, r1, r0 on the stack +! k0 contains original stack pointer* +! k1 trashed +! k3 passes original pr* +! k4 passes SR bitmask +! BL=1 on entry, on exit BL=0. + +ENTRY(save_regs) + mov #-1, r1 + mov.l k1, @-r15 ! set TRA (default: -1) sts.l macl, @-r15 sts.l mach, @-r15 stc.l gbr, @-r15 stc.l ssr, @-r15 - sts.l pr, @-r15 + mov.l k3, @-r15 ! original pr in k3 stc.l spc, @-r15 - ! - lds k3, pr ! Set the return address to pr - ! - mov.l k0, @-r15 ! save orignal stack + + mov.l k0, @-r15 ! original stack pointer in k0 mov.l r14, @-r15 mov.l r13, @-r15 mov.l r12, @-r15 @@ -509,13 +408,23 @@ skip_save: mov.l r10, @-r15 mov.l r9, @-r15 mov.l r8, @-r15 - ! - stc sr, r8 ! Back to normal register bank, and - or k1, r8 ! Block all interrupts - mov.l 3f, k1 - and k1, r8 ! ... - ldc r8, sr ! ...changed here. - ! + + mov.l 0f, k3 ! SR bits to set in k3 + + ! fall-through + +! save_low_regs() +! - modify SR for bank switch +! - save r7, r6, r5, r4, r3, r2, r1, r0 on the stack +! k3 passes bits to set in SR +! k4 passes bits to clear in SR + +ENTRY(save_low_regs) + stc sr, r8 + or k3, r8 + and k4, r8 + ldc r8, sr + mov.l r7, @-r15 mov.l r6, @-r15 mov.l r5, @-r15 @@ -523,52 +432,82 @@ skip_save: mov.l r3, @-r15 mov.l r2, @-r15 mov.l r1, @-r15 - mov.l r0, @-r15 + rts + mov.l r0, @-r15 + +! +! 0x600: Interrupt / NMI vector +! + .balign 512,0,512 +ENTRY(handle_interrupt) + sts pr, k3 ! save original pr value in k3 + mova exception_data, k0 + + ! Setup stack and save DSP context (k0 contains original r15 on return) + bsr prepare_stack + PREF(k0) + + ! Save registers / Switch to bank 0 + mov.l 1f, k4 ! SR bits to clear in k4 + bsr save_regs ! needs original pr value in k3 + mov #-1, k2 ! default vector kept in k2 + + setup_frame_reg + + stc sr, r0 ! get status register + shlr2 r0 + and #0x3c, r0 + cmp/eq #0x3c, r0 + bf 9f + TRACE_IRQS_OFF +9: + + ! Setup return address and jump to do_IRQ + mov.l 4f, r9 ! fetch return address + lds r9, pr ! put return address in pr + mov.l 2f, r4 + mov.l 3f, r9 + mov.l @r4, r4 ! pass INTEVT vector as arg0 + + shlr2 r4 + shlr r4 + mov r4, r0 ! save vector->jmp table offset for later + + shlr2 r4 ! vector to IRQ# conversion + add #-0x10, r4 + + cmp/pz r4 ! is it a valid IRQ? + bt 10f /* - * This gets a bit tricky.. in the INTEVT case we don't want to use - * the VBR offset as a destination in the jump call table, since all - * of the destinations are the same. In this case, (interrupt) sets - * a marker in r2 (now r2_bank since SR.RB changed), which we check - * to determine the exception type. For all other exceptions, we - * forcibly read EXPEVT from memory and fix up the jump address, in - * the interrupt exception case we jump to do_IRQ() and defer the - * INTEVT read until there. As a bonus, we can also clean up the SR.RB - * checks that do_IRQ() was doing.. + * We got here as a result of taking the INTEVT path for something + * that isn't a valid hard IRQ, therefore we bypass the do_IRQ() + * path and special case the event dispatch instead. This is the + * expected path for the NMI (and any other brilliantly implemented + * exception), which effectively wants regular exception dispatch + * but is unfortunately reported through INTEVT rather than + * EXPEVT. Grr. */ - stc r2_bank, r8 - cmp/pz r8 - bf interrupt_exception - shlr2 r8 - shlr r8 - mov.l 4f, r9 - add r8, r9 - mov.l @r9, r9 + mov.l 6f, r9 + mov.l @(r0, r9), r9 jmp @r9 - nop - rts - nop + mov r15, r8 ! trap handlers take saved regs in r8 - .align 2 -1: .long 0x00001000 ! DSP=1 -2: .long 0x000080f0 ! FD=1, IMASK=15 -3: .long 0xcfffffff ! RB=0, BL=0 -4: .long exception_handling_table +10: + jmp @r9 ! Off to do_IRQ() we go. + mov r15, r5 ! pass saved registers as arg1 -interrupt_exception: - mov.l 1f, r9 - mov.l 2f, r4 - mov.l @r4, r4 - jmp @r9 - mov r15, r5 +ENTRY(exception_none) rts nop - .align 2 -1: .long do_IRQ + .align L1_CACHE_SHIFT +exception_data: +0: .long 0x000080f0 ! FD=1, IMASK=15 +1: .long 0xcfffffff ! RB=0, BL=0 2: .long INTEVT - - .align 2 -ENTRY(exception_none) - rts - nop +3: .long do_IRQ +4: .long ret_from_irq +5: .long EXPEVT +6: .long exception_handling_table +7: .long ret_from_exception |
