aboutsummaryrefslogtreecommitdiff
path: root/arch/xtensa/kernel/align.S
diff options
context:
space:
mode:
Diffstat (limited to 'arch/xtensa/kernel/align.S')
-rw-r--r--arch/xtensa/kernel/align.S75
1 files changed, 35 insertions, 40 deletions
diff --git a/arch/xtensa/kernel/align.S b/arch/xtensa/kernel/align.S
index 74b1e90ef08..d4cef6039a5 100644
--- a/arch/xtensa/kernel/align.S
+++ b/arch/xtensa/kernel/align.S
@@ -16,14 +16,9 @@
*/
#include <linux/linkage.h>
-#include <asm/ptrace.h>
-#include <asm/ptrace.h>
#include <asm/current.h>
-#include <asm/offsets.h>
-#include <asm/pgtable.h>
+#include <asm/asm-offsets.h>
#include <asm/processor.h>
-#include <asm/page.h>
-#include <asm/thread_info.h>
#if XCHAL_UNALIGNED_LOAD_EXCEPTION || XCHAL_UNALIGNED_STORE_EXCEPTION
@@ -151,9 +146,9 @@
* a0: trashed, original value saved on stack (PT_AREG0)
* a1: a1
* a2: new stack pointer, original in DEPC
- * a3: dispatch table
+ * a3: a3
* depc: a2, original value saved on stack (PT_DEPC)
- * excsave_1: a3
+ * excsave_1: dispatch table
*
* PT_DEPC >= VALID_DOUBLE_EXCEPTION_ADDRESS: double exception, DEPC
* < VALID_DOUBLE_EXCEPTION_ADDRESS: regular exception
@@ -175,15 +170,14 @@ ENTRY(fast_unaligned)
s32i a7, a2, PT_AREG7
s32i a8, a2, PT_AREG8
- rsr a0, DEPC
- xsr a3, EXCSAVE_1
+ rsr a0, depc
s32i a0, a2, PT_AREG2
s32i a3, a2, PT_AREG3
/* Keep value of SAR in a0 */
- rsr a0, SAR
- rsr a8, EXCVADDR # load unaligned memory address
+ rsr a0, sar
+ rsr a8, excvaddr # load unaligned memory address
/* Now, identify one of the following load/store instructions.
*
@@ -202,7 +196,7 @@ ENTRY(fast_unaligned)
/* Extract the instruction that caused the unaligned access. */
- rsr a7, EPC_1 # load exception address
+ rsr a7, epc1 # load exception address
movi a3, ~3
and a3, a3, a7 # mask lower bits
@@ -216,7 +210,7 @@ ENTRY(fast_unaligned)
extui a5, a4, INSN_OP0, 4 # get insn.op0 nibble
-#if XCHAL_HAVE_NARROW
+#if XCHAL_HAVE_DENSITY
_beqi a5, OP0_L32I_N, .Lload # L32I.N, jump
addi a6, a5, -OP0_S32I_N
_beqz a6, .Lstore # S32I.N, do a store
@@ -251,7 +245,7 @@ ENTRY(fast_unaligned)
#endif
__src_b a3, a5, a6 # a3 has the data word
-#if XCHAL_HAVE_NARROW
+#if XCHAL_HAVE_DENSITY
addi a7, a7, 2 # increment PC (assume 16-bit insn)
extui a5, a4, INSN_OP0, 4
@@ -279,17 +273,17 @@ ENTRY(fast_unaligned)
1:
-#if XCHAL_HAVE_LOOP
- rsr a3, LEND # check if we reached LEND
- bne a7, a3, 1f
- rsr a3, LCOUNT # and LCOUNT != 0
- beqz a3, 1f
- addi a3, a3, -1 # decrement LCOUNT and set
- rsr a7, LBEG # set PC to LBEGIN
- wsr a3, LCOUNT
+#if XCHAL_HAVE_LOOPS
+ rsr a5, lend # check if we reached LEND
+ bne a7, a5, 1f
+ rsr a5, lcount # and LCOUNT != 0
+ beqz a5, 1f
+ addi a5, a5, -1 # decrement LCOUNT and set
+ rsr a7, lbeg # set PC to LBEGIN
+ wsr a5, lcount
#endif
-1: wsr a7, EPC_1 # skip load instruction
+1: wsr a7, epc1 # skip load instruction
extui a4, a4, INSN_T, 4 # extract target register
movi a5, .Lload_table
addx8 a4, a4, a5
@@ -336,7 +330,7 @@ ENTRY(fast_unaligned)
movi a6, 0 # mask: ffffffff:00000000
-#if XCHAL_HAVE_NARROW
+#if XCHAL_HAVE_DENSITY
addi a7, a7, 2 # incr. PC,assume 16-bit instruction
extui a5, a4, INSN_OP0, 4 # extract OP0
@@ -359,17 +353,17 @@ ENTRY(fast_unaligned)
/* Get memory address */
1:
-#if XCHAL_HAVE_LOOP
- rsr a3, LEND # check if we reached LEND
- bne a7, a3, 1f
- rsr a3, LCOUNT # and LCOUNT != 0
- beqz a3, 1f
- addi a3, a3, -1 # decrement LCOUNT and set
- rsr a7, LBEG # set PC to LBEGIN
- wsr a3, LCOUNT
+#if XCHAL_HAVE_LOOPS
+ rsr a4, lend # check if we reached LEND
+ bne a7, a4, 1f
+ rsr a4, lcount # and LCOUNT != 0
+ beqz a4, 1f
+ addi a4, a4, -1 # decrement LCOUNT and set
+ rsr a7, lbeg # set PC to LBEGIN
+ wsr a4, lcount
#endif
-1: wsr a7, EPC_1 # skip store instruction
+1: wsr a7, epc1 # skip store instruction
movi a4, ~3
and a4, a4, a8 # align memory address
@@ -411,11 +405,12 @@ ENTRY(fast_unaligned)
.Lexit:
movi a4, 0
- rsr a3, EXCSAVE_1
+ rsr a3, excsave1
s32i a4, a3, EXC_TABLE_FIXUP
/* Restore working register */
+ l32i a8, a2, PT_AREG8
l32i a7, a2, PT_AREG7
l32i a6, a2, PT_AREG6
l32i a5, a2, PT_AREG5
@@ -424,7 +419,7 @@ ENTRY(fast_unaligned)
/* restore SAR and return */
- wsr a0, SAR
+ wsr a0, sar
l32i a0, a2, PT_AREG0
l32i a2, a2, PT_AREG2
rfe
@@ -442,11 +437,11 @@ ENTRY(fast_unaligned)
l32i a6, a2, PT_AREG6
l32i a5, a2, PT_AREG5
l32i a4, a2, PT_AREG4
- wsr a0, SAR
+ wsr a0, sar
mov a1, a2
- rsr a0, PS
- bbsi.l a2, PS_UM_SHIFT, 1f # jump if user mode
+ rsr a0, ps
+ bbsi.l a2, PS_UM_BIT, 1f # jump if user mode
movi a0, _kernel_exception
jx a0
@@ -454,6 +449,6 @@ ENTRY(fast_unaligned)
1: movi a0, _user_exception
jx a0
+ENDPROC(fast_unaligned)
#endif /* XCHAL_UNALIGNED_LOAD_EXCEPTION || XCHAL_UNALIGNED_STORE_EXCEPTION */
-