summaryrefslogtreecommitdiffstats
path: root/cpukit/score/cpu/arm/arm_exc_interrupt.S
diff options
context:
space:
mode:
Diffstat (limited to 'cpukit/score/cpu/arm/arm_exc_interrupt.S')
-rw-r--r--cpukit/score/cpu/arm/arm_exc_interrupt.S70
1 files changed, 61 insertions, 9 deletions
diff --git a/cpukit/score/cpu/arm/arm_exc_interrupt.S b/cpukit/score/cpu/arm/arm_exc_interrupt.S
index a16dc88585..5a7109da26 100644
--- a/cpukit/score/cpu/arm/arm_exc_interrupt.S
+++ b/cpukit/score/cpu/arm/arm_exc_interrupt.S
@@ -1,3 +1,5 @@
+/* SPDX-License-Identifier: BSD-2-Clause */
+
/**
* @file
*
@@ -7,17 +9,28 @@
*/
/*
- * Copyright (c) 2009, 2022 embedded brains GmbH. All rights reserved.
+ * Copyright (C) 2009, 2022 embedded brains GmbH & Co. KG
*
- * embedded brains GmbH
- * Dornierstr. 4
- * 82178 Puchheim
- * Germany
- * <rtems@embedded-brains.de>
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
*
- * The license and distribution terms for this file may be
- * found in the file LICENSE in this distribution or at
- * http://www.rtems.org/license/LICENSE.
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
*/
/*
@@ -37,6 +50,8 @@
#define STACK_POINTER_ADJUST r7
#define NON_VOLATILE_SCRATCH r9
+#ifndef ARM_MULTILIB_HAS_STORE_RETURN_STATE
+
#define EXCHANGE_LR r4
#define EXCHANGE_SPSR r5
#define EXCHANGE_CPSR r6
@@ -48,10 +63,28 @@
#define CONTEXT_LIST {r0, r1, r2, r3, EXCHANGE_LR, EXCHANGE_SPSR, NON_VOLATILE_SCRATCH, r12}
#define CONTEXT_SIZE 32
+#endif /* ARM_MULTILIB_HAS_STORE_RETURN_STATE */
+
.arm
.globl _ARMV4_Exception_interrupt
_ARMV4_Exception_interrupt:
+#ifdef ARM_MULTILIB_HAS_STORE_RETURN_STATE
+ /* Prepare return from interrupt */
+ sub lr, lr, #4
+
+ /* Save LR_irq and SPSR_irq to the SVC stack */
+ srsfd sp!, #ARM_PSR_M_SVC
+
+ /* Switch to SVC mode */
+ cps #ARM_PSR_M_SVC
+
+ /*
+ * Save the volatile registers, two non-volatile registers used for
+ * interrupt processing, and the link register.
+ */
+ push {r0-r3, STACK_POINTER_ADJUST, NON_VOLATILE_SCRATCH, r12, lr}
+#else /* ARM_MULTILIB_HAS_STORE_RETURN_STATE */
/* Save exchange registers to exchange area */
stmdb sp, EXCHANGE_LIST
@@ -74,6 +107,7 @@ _ARMV4_Exception_interrupt:
*/
push CONTEXT_LIST
push {STACK_POINTER_ADJUST, lr}
+#endif /* ARM_MULTILIB_HAS_STORE_RETURN_STATE */
/*
* On a public interface, the stack pointer must be aligned on an
@@ -97,11 +131,13 @@ _ARMV4_Exception_interrupt:
push {r2, r3}
#endif /* ARM_MULTILIB_VFP */
+#ifndef ARM_MULTILIB_HAS_STORE_RETURN_STATE
/* Remember INT stack pointer */
mov r1, EXCHANGE_INT_SP
/* Restore exchange registers from exchange area */
ldmia r1, EXCHANGE_LIST
+#endif /* ARM_MULTILIB_HAS_STORE_RETURN_STATE */
/* Get interrupt nest level */
ldr r2, [r0, #PER_CPU_ISR_NEST_LEVEL]
@@ -109,7 +145,11 @@ _ARMV4_Exception_interrupt:
/* Switch stack if necessary and save original stack pointer */
mov NON_VOLATILE_SCRATCH, sp
cmp r2, #0
+#ifdef ARM_MULTILIB_HAS_STORE_RETURN_STATE
+ ldreq sp, [r0, #PER_CPU_INTERRUPT_STACK_HIGH]
+#else
moveq sp, r1
+#endif
/* Increment interrupt nest and thread dispatch disable level */
ldr r3, [r0, #PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL]
@@ -215,6 +255,13 @@ _ARMV4_Exception_interrupt:
/* Undo stack pointer adjustment */
add sp, sp, STACK_POINTER_ADJUST
+#ifdef ARM_MULTILIB_HAS_STORE_RETURN_STATE
+ /*
+ * Restore the volatile registers, two non-volatile registers used for
+ * interrupt processing, and the link register.
+ */
+ pop {r0-r3, STACK_POINTER_ADJUST, NON_VOLATILE_SCRATCH, r12, lr}
+#else /* ARM_MULTILIB_HAS_STORE_RETURN_STATE */
/* Restore STACK_POINTER_ADJUST register and link register */
pop {STACK_POINTER_ADJUST, lr}
@@ -245,6 +292,7 @@ _ARMV4_Exception_interrupt:
/* Restore EXCHANGE_LR and EXCHANGE_SPSR registers from exchange area */
pop {EXCHANGE_LR, EXCHANGE_SPSR}
+#endif /* ARM_MULTILIB_HAS_STORE_RETURN_STATE */
#ifdef ARM_MULTILIB_HAS_LOAD_STORE_EXCLUSIVE
/*
@@ -274,7 +322,11 @@ _ARMV4_Exception_interrupt:
#endif
/* Return from interrupt */
+#ifdef ARM_MULTILIB_HAS_STORE_RETURN_STATE
+ rfefd sp!
+#else
subs pc, lr, #4
+#endif
#ifdef RTEMS_PROFILING
.Lskip_profiling: