From 12a2a8e4426c05161aa76a9c86de1e36e43e8a61 Mon Sep 17 00:00:00 2001 From: Sebastian Huber Date: Thu, 13 Jan 2022 14:51:55 +0100 Subject: arm: Optimize interrupt handling Use the SRS (Store Return State) instruction if available. This considerably simplifies the context save and restore. --- cpukit/score/cpu/arm/arm_exc_interrupt.S | 39 ++++++++++++++++++++++ cpukit/score/cpu/arm/include/rtems/score/arm.h | 1 + cpukit/score/cpu/arm/include/rtems/score/cpuimpl.h | 13 ++++++++ 3 files changed, 53 insertions(+) (limited to 'cpukit/score/cpu/arm') diff --git a/cpukit/score/cpu/arm/arm_exc_interrupt.S b/cpukit/score/cpu/arm/arm_exc_interrupt.S index a16dc88585..92ff781f40 100644 --- a/cpukit/score/cpu/arm/arm_exc_interrupt.S +++ b/cpukit/score/cpu/arm/arm_exc_interrupt.S @@ -37,6 +37,8 @@ #define STACK_POINTER_ADJUST r7 #define NON_VOLATILE_SCRATCH r9 +#ifndef ARM_MULTILIB_HAS_STORE_RETURN_STATE + #define EXCHANGE_LR r4 #define EXCHANGE_SPSR r5 #define EXCHANGE_CPSR r6 @@ -48,10 +50,28 @@ #define CONTEXT_LIST {r0, r1, r2, r3, EXCHANGE_LR, EXCHANGE_SPSR, NON_VOLATILE_SCRATCH, r12} #define CONTEXT_SIZE 32 +#endif /* ARM_MULTILIB_HAS_STORE_RETURN_STATE */ + .arm .globl _ARMV4_Exception_interrupt _ARMV4_Exception_interrupt: +#ifdef ARM_MULTILIB_HAS_STORE_RETURN_STATE + /* Prepare return from interrupt */ + sub lr, lr, #4 + + /* Save LR_irq and SPSR_irq to the SVC stack */ + srsfd sp!, #ARM_PSR_M_SVC + + /* Switch to SVC mode */ + cps #ARM_PSR_M_SVC + + /* + * Save the volatile registers, two non-volatile registers used for + * interrupt processing, and the link register. + */ + push {r0-r3, STACK_POINTER_ADJUST, NON_VOLATILE_SCRATCH, r12, lr} +#else /* ARM_MULTILIB_HAS_STORE_RETURN_STATE */ /* Save exchange registers to exchange area */ stmdb sp, EXCHANGE_LIST @@ -74,6 +94,7 @@ _ARMV4_Exception_interrupt: */ push CONTEXT_LIST push {STACK_POINTER_ADJUST, lr} +#endif /* ARM_MULTILIB_HAS_STORE_RETURN_STATE */ /* * On a public interface, the stack pointer must be aligned on an @@ -97,11 +118,13 @@ _ARMV4_Exception_interrupt: push {r2, r3} #endif /* ARM_MULTILIB_VFP */ +#ifndef ARM_MULTILIB_HAS_STORE_RETURN_STATE /* Remember INT stack pointer */ mov r1, EXCHANGE_INT_SP /* Restore exchange registers from exchange area */ ldmia r1, EXCHANGE_LIST +#endif /* ARM_MULTILIB_HAS_STORE_RETURN_STATE */ /* Get interrupt nest level */ ldr r2, [r0, #PER_CPU_ISR_NEST_LEVEL] @@ -109,7 +132,11 @@ _ARMV4_Exception_interrupt: /* Switch stack if necessary and save original stack pointer */ mov NON_VOLATILE_SCRATCH, sp cmp r2, #0 +#ifdef ARM_MULTILIB_HAS_STORE_RETURN_STATE + ldreq sp, [r0, #PER_CPU_INTERRUPT_STACK_HIGH] +#else moveq sp, r1 +#endif /* Increment interrupt nest and thread dispatch disable level */ ldr r3, [r0, #PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL] @@ -215,6 +242,13 @@ _ARMV4_Exception_interrupt: /* Undo stack pointer adjustment */ add sp, sp, STACK_POINTER_ADJUST +#ifdef ARM_MULTILIB_HAS_STORE_RETURN_STATE + /* + * Restore the volatile registers, two non-volatile registers used for + * interrupt processing, and the link register. + */ + pop {r0-r3, STACK_POINTER_ADJUST, NON_VOLATILE_SCRATCH, r12, lr} +#else /* ARM_MULTILIB_HAS_STORE_RETURN_STATE */ /* Restore STACK_POINTER_ADJUST register and link register */ pop {STACK_POINTER_ADJUST, lr} @@ -245,6 +279,7 @@ _ARMV4_Exception_interrupt: /* Restore EXCHANGE_LR and EXCHANGE_SPSR registers from exchange area */ pop {EXCHANGE_LR, EXCHANGE_SPSR} +#endif /* ARM_MULTILIB_HAS_STORE_RETURN_STATE */ #ifdef ARM_MULTILIB_HAS_LOAD_STORE_EXCLUSIVE /* @@ -274,7 +309,11 @@ _ARMV4_Exception_interrupt: #endif /* Return from interrupt */ +#ifdef ARM_MULTILIB_HAS_STORE_RETURN_STATE + rfefd sp! +#else subs pc, lr, #4 +#endif #ifdef RTEMS_PROFILING .Lskip_profiling: diff --git a/cpukit/score/cpu/arm/include/rtems/score/arm.h b/cpukit/score/cpu/arm/include/rtems/score/arm.h index b1e4b07a37..7eaa69d889 100644 --- a/cpukit/score/cpu/arm/include/rtems/score/arm.h +++ b/cpukit/score/cpu/arm/include/rtems/score/arm.h @@ -47,6 +47,7 @@ extern "C" { #define ARM_MULTILIB_HAS_WFI #define ARM_MULTILIB_HAS_LOAD_STORE_EXCLUSIVE #define ARM_MULTILIB_HAS_BARRIER_INSTRUCTIONS + #define ARM_MULTILIB_HAS_STORE_RETURN_STATE #endif #ifndef ARM_DISABLE_THREAD_ID_REGISTER_USE diff --git a/cpukit/score/cpu/arm/include/rtems/score/cpuimpl.h b/cpukit/score/cpu/arm/include/rtems/score/cpuimpl.h index 0f86710966..a6fe74e9ad 100644 --- a/cpukit/score/cpu/arm/include/rtems/score/cpuimpl.h +++ b/cpukit/score/cpu/arm/include/rtems/score/cpuimpl.h @@ -79,6 +79,18 @@ typedef struct { double d6; double d7; #endif /* ARM_MULTILIB_VFP */ +#ifdef ARM_MULTILIB_HAS_STORE_RETURN_STATE + uint32_t r0; + uint32_t r1; + uint32_t r2; + uint32_t r3; + uint32_t r7; + uint32_t r9; + uint32_t r12; + uint32_t lr; + uint32_t return_pc; + uint32_t return_cpsr; +#else /* ARM_MULTILIB_HAS_STORE_RETURN_STATE */ uint32_t r9; uint32_t lr; uint32_t r0; @@ -89,6 +101,7 @@ typedef struct { uint32_t return_cpsr; uint32_t r7; uint32_t r12; +#endif /* ARM_MULTILIB_HAS_STORE_RETURN_STATE */ } CPU_Interrupt_frame; #ifdef RTEMS_SMP -- cgit v1.2.3