summaryrefslogtreecommitdiffstats
path: root/cpukit/score/cpu/aarch64/aarch64-exception-default.S
diff options
context:
space:
mode:
Diffstat (limited to 'cpukit/score/cpu/aarch64/aarch64-exception-default.S')
-rw-r--r--cpukit/score/cpu/aarch64/aarch64-exception-default.S131
1 files changed, 48 insertions, 83 deletions
diff --git a/cpukit/score/cpu/aarch64/aarch64-exception-default.S b/cpukit/score/cpu/aarch64/aarch64-exception-default.S
index 2a4ddbcc61..c7c9d03465 100644
--- a/cpukit/score/cpu/aarch64/aarch64-exception-default.S
+++ b/cpukit/score/cpu/aarch64/aarch64-exception-default.S
@@ -72,7 +72,7 @@
* * The exception returns to the previous execution state
*/
- .macro JUMP_HANDLER_SHORT
+ .macro JUMP_HANDLER
/* Mask to use in BIC, lower 7 bits */
mov x0, #0x7f
/* LR contains PC, mask off to the base of the current vector */
@@ -109,10 +109,6 @@
nop
nop
nop
- .endm
-
- .macro JUMP_HANDLER
- JUMP_HANDLER_SHORT
nop
.endm
@@ -144,11 +140,48 @@ Vector_table_el3:
* using SP0.
*/
curr_el_sp0_sync:
- stp x0, lr, [sp, #-0x10]! /* Push x0,lr on to the stack */
- bl curr_el_sp0_sync_get_pc /* Get current execution address */
-curr_el_sp0_sync_get_pc: /* The current PC is now in LR */
- JUMP_HANDLER
- JUMP_TARGET_SP0
+ sub sp, sp, #AARCH64_EXCEPTION_FRAME_SIZE /* reserve space for CEF */
+ str lr, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_LR_OFFSET] /* shove lr into CEF */
+ bl .push_exception_context_start /* bl to CEF store routine */
+/* Save original sp in x0 for .push_exception_context_finish */
+ add x0, sp, #AARCH64_EXCEPTION_FRAME_SIZE /* save original sp */
+/* Push the remainder of the context */
+ bl .push_exception_context_finish
+/* get jump target and branch/link */
+ bl curr_el_sp0_sync_get_pc /* Get current execution address */
+curr_el_sp0_sync_get_pc: /* The current PC is now in LR */
+ mov x0, #0x7f /* Mask to use in BIC, lower 7 bits */
+ bic x0, lr, x0 /* Mask LR to base of current vector */
+ ldr x1, [x0, #0x78] /* Load target from last word in vector */
+ and lr, lr, #0x780 /* Mask off bits for vector number */
+ lsr lr, lr, #7 /* Shift the vector bits down */
+/* Store the vector */
+ str lr, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_VECTOR_OFFSET]
+ mov x0, sp
+ blr x1
+ b twiddle
+ nop
+ nop
+ nop
+ nop
+ nop
+ nop
+ nop
+ nop
+ nop
+ nop
+ nop
+ nop
+ nop
+ nop
+ nop
+/* Takes up the space of 2 instructions */
+#ifdef AARCH64_MULTILIB_ARCH_V8_ILP32
+ .word _AArch64_Exception_default
+ .word 0x0
+#else
+ .dword _AArch64_Exception_default
+#endif
.balign 0x80
/* The exception handler for IRQ exceptions from the current EL using SP0. */
curr_el_sp0_irq:
@@ -204,13 +237,11 @@ curr_el_spx_sync_get_pc: /* The current PC is now in LR */
str lr, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_VECTOR_OFFSET]
mov x0, sp
blr x1
-/* bl to CEF restore routine (doesn't restore lr) */
- bl .pop_exception_context
- ldr lr, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_LR_OFFSET] /* get lr from CEF */
-/* drop space reserved for CEF and clear exclusive */
- add sp, sp, #AARCH64_EXCEPTION_FRAME_SIZE
- msr spsel, #1 /* switch to thread stack */
- eret /* exception return */
+ b twiddle
+ nop
+ nop
+ nop
+ nop
nop
nop
nop
@@ -475,69 +506,3 @@ twiddle:
stp q30, q31, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x1e0)]
/* Done, return to exception handler */
ret
-
-/*
- * Apply the exception frame to the current register status, SP points to the EF
- */
-.pop_exception_context:
-/* Pop daif and spsr */
- ldp x2, x3, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_DAIF_OFFSET]
-/* Restore daif and spsr */
- msr DAIF, x2
- msr SPSR_EL1, x3
-/* Pop FAR and ESR */
- ldp x2, x3, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_SYNDROME_OFFSET]
-/* Restore ESR and FAR */
- msr ESR_EL1, x2
- msr FAR_EL1, x3
-/* Pop fpcr and fpsr */
- ldp x2, x3, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_FPSR_OFFSET]
-/* Restore fpcr and fpsr */
- msr FPSR, x2
- msr FPCR, x3
-/* Pop VFP registers */
- ldp q0, q1, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x000)]
- ldp q2, q3, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x020)]
- ldp q4, q5, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x040)]
- ldp q6, q7, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x060)]
- ldp q8, q9, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x080)]
- ldp q10, q11, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x0a0)]
- ldp q12, q13, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x0c0)]
- ldp q14, q15, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x0e0)]
- ldp q16, q17, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x100)]
- ldp q18, q19, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x120)]
- ldp q20, q21, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x140)]
- ldp q22, q23, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x160)]
- ldp q24, q25, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x180)]
- ldp q26, q27, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x1a0)]
- ldp q28, q29, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x1c0)]
- ldp q30, q31, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x1e0)]
-/* Pop x0-x29(fp) */
- ldp x2, x3, [sp, #0x10]
- ldp x4, x5, [sp, #0x20]
- ldp x6, x7, [sp, #0x30]
- ldp x8, x9, [sp, #0x40]
- ldp x10, x11, [sp, #0x50]
- ldp x12, x13, [sp, #0x60]
- ldp x14, x15, [sp, #0x70]
- ldp x16, x17, [sp, #0x80]
- ldp x18, x19, [sp, #0x90]
- ldp x20, x21, [sp, #0xa0]
- ldp x22, x23, [sp, #0xb0]
- ldp x24, x25, [sp, #0xc0]
- ldp x26, x27, [sp, #0xd0]
- ldp x28, x29, [sp, #0xe0]
-/* Pop sp and ELR */
- ldp x0, x1, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_SP_OFFSET]
-/* Restore thread SP */
- msr spsel, #1
- mov sp, x0
- msr spsel, #0
-/* Restore exception LR */
- msr ELR_EL1, x1
- ldp x0, x1, [sp, #0x00]
-
-/* We must clear reservations to ensure consistency with atomic operations */
- clrex
-
- ret