summaryrefslogtreecommitdiffstats
path: root/cpukit/score/cpu/arm/arm_exc_interrupt.S
blob: e8026c869bb149168c29f6f85b48ccf34eceddab (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
/**
 * @file
 *
 * @ingroup ScoreCPU
 *
 * @brief ARM interrupt exception prologue and epilogue.
 */

/*
 * Copyright (c) 2009-2014 embedded brains GmbH.  All rights reserved.
 *
 *  embedded brains GmbH
 *  Dornierstr. 4
 *  82178 Puchheim
 *  Germany
 *  <rtems@embedded-brains.de>
 *
 * The license and distribution terms for this file may be
 * found in the file LICENSE in this distribution or at
 * http://www.rtems.org/license/LICENSE.
 */

/*
 * The upper EXCHANGE_SIZE bytes of the INT stack area are used for data
 * exchange between INT and SVC mode.  Below of this is the actual INT stack.
 * The exchange area is only accessed if INT is disabled.
 */

#ifdef HAVE_CONFIG_H
#include "config.h"
#endif

#include <rtems/asm.h>

#ifdef ARM_MULTILIB_ARCH_V4

#define EXCHANGE_LR r4
#define EXCHANGE_SPSR r5
#define EXCHANGE_CPSR r6
#define EXCHANGE_INT_SP r8

#define EXCHANGE_LIST {EXCHANGE_LR, EXCHANGE_SPSR, EXCHANGE_CPSR, EXCHANGE_INT_SP}
#define EXCHANGE_SIZE 16

#define SELF_CPU_CONTROL r7
#define SP_OF_INTERRUPTED_CONTEXT r9

#define CONTEXT_LIST {r0, r1, r2, r3, EXCHANGE_LR, EXCHANGE_SPSR, SELF_CPU_CONTROL, r12}
#define CONTEXT_SIZE 32

.arm
.globl _ARMV4_Exception_interrupt
_ARMV4_Exception_interrupt:

	/* Save exchange registers to exchange area */
	stmdb	sp, EXCHANGE_LIST

	/* Set exchange registers */
	mov	EXCHANGE_LR, lr
	mrs	EXCHANGE_SPSR, SPSR
	mrs	EXCHANGE_CPSR, CPSR
	sub	EXCHANGE_INT_SP, sp, #EXCHANGE_SIZE

	/* Switch to SVC mode */
	orr	EXCHANGE_CPSR, EXCHANGE_CPSR, #0x1
	msr	CPSR_c, EXCHANGE_CPSR

	/*
	 * Save context.  We save the link register separately because it has
	 * to be restored in SVC mode.  The other registers can be restored in
	 * INT mode.  Ensure that stack remains 8 byte aligned.  Use register
	 * necessary for the stack alignment for the stack pointer of the
	 * interrupted context.
	 */
	stmdb	sp!, CONTEXT_LIST
	stmdb	sp!, {SP_OF_INTERRUPTED_CONTEXT, lr}

#ifdef ARM_MULTILIB_VFP_D32
	/* Save VFP context */
	vmrs	r0, FPSCR
	vstmdb	sp!, {d0-d7}
	vstmdb	sp!, {d16-d31}
	stmdb	sp!, {r0, r1}
#endif

	/* Get per-CPU control of current processor */
	GET_SELF_CPU_CONTROL	SELF_CPU_CONTROL, r1

	/* Remember INT stack pointer */
	mov	r1, EXCHANGE_INT_SP

	/* Restore exchange registers from exchange area */
	ldmia	r1, EXCHANGE_LIST

	/* Get interrupt nest level */
	ldr	r2, [SELF_CPU_CONTROL, #PER_CPU_ISR_NEST_LEVEL]

	/* Switch stack if necessary and save original stack pointer */
	mov	SP_OF_INTERRUPTED_CONTEXT, sp
	cmp	r2, #0
	moveq	sp, r1

	/* Switch to THUMB instructions if necessary */
	SWITCH_FROM_ARM_TO_THUMB	r1

	/* Increment interrupt nest and thread dispatch disable level */
	ldr	r3, [SELF_CPU_CONTROL, #PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL]
	add	r2, #1
	add	r3, #1
	str	r2, [SELF_CPU_CONTROL, #PER_CPU_ISR_NEST_LEVEL]
	str	r3, [SELF_CPU_CONTROL, #PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL]

#ifdef RTEMS_PROFILING
	cmp	r2, #1
	bne	profiling_entry_done
	bl	_CPU_Counter_read
	push	{r0, r1}
profiling_entry_done:
#endif

	/* Call BSP dependent interrupt dispatcher */
	bl	bsp_interrupt_dispatch

	/* Decrement interrupt nest and thread dispatch disable level */
	ldr	r2, [SELF_CPU_CONTROL, #PER_CPU_ISR_NEST_LEVEL]
	ldr	r3, [SELF_CPU_CONTROL, #PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL]
	sub	r2, #1
	sub	r3, #1
	str	r2, [SELF_CPU_CONTROL, #PER_CPU_ISR_NEST_LEVEL]
	str	r3, [SELF_CPU_CONTROL, #PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL]

#ifdef RTEMS_PROFILING
	cmp	r2, #0
	bne	profiling_exit_done
	bl	_CPU_Counter_read
	pop	{r1, r3}
	mov	r2, r0
	mov	r0, SELF_CPU_CONTROL
	bl	_Profiling_Outer_most_interrupt_entry_and_exit
	ldr	r3, [SELF_CPU_CONTROL, #PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL]
profiling_exit_done:
#endif

	/* Restore stack pointer */
	mov	sp, SP_OF_INTERRUPTED_CONTEXT

	/* Check thread dispatch disable level */
	cmp	r3, #0
	bne	thread_dispatch_done

	/* Check context switch necessary */
	ldrb	r1, [SELF_CPU_CONTROL, #PER_CPU_DISPATCH_NEEDED]
	cmp	r1, #0
	beq	thread_dispatch_done

        /* This aligns thread_dispatch_done on a 4 byte boundary */
#ifdef __thumb__
	nop
#endif /* __thumb__ */

	/* Thread dispatch */
	bl	_Thread_Dispatch

thread_dispatch_done:

	/* Switch to ARM instructions if necessary */
	SWITCH_FROM_THUMB_TO_ARM

#ifdef ARM_MULTILIB_VFP_D32
	/* Restore VFP context */
	ldmia	sp!, {r0, r1}
	vldmia	sp!, {d16-d31}
	vldmia	sp!, {d0-d7}
	vmsr	FPSCR, r0
#endif

	/* Restore SP_OF_INTERRUPTED_CONTEXT register and link register */
	ldmia	sp!, {SP_OF_INTERRUPTED_CONTEXT, lr}

	/*
	 * XXX: Remember and restore stack pointer.  The data on the stack is
	 * still in use.  So the stack is now in an inconsistent state.  The
	 * FIQ handler implementation must not use this area.
	 */
	mov	r0, sp
	add	sp, #CONTEXT_SIZE

	/* Get INT mode program status register */
	mrs	r1, CPSR
	bic	r1, r1, #0x1

	/* Switch to INT mode */
	msr	CPSR_c, r1

	/* Save EXCHANGE_LR and EXCHANGE_SPSR registers to exchange area */
	stmdb	sp!, {EXCHANGE_LR, EXCHANGE_SPSR}

	/* Restore context */
	ldmia	r0, CONTEXT_LIST

	/* Set return address and program status */
	mov	lr, EXCHANGE_LR
	msr	SPSR_fsxc, EXCHANGE_SPSR

	/* Restore EXCHANGE_LR and EXCHANGE_SPSR registers from exchange area */
	ldmia	sp!, {EXCHANGE_LR, EXCHANGE_SPSR}

	/* Return from interrupt */
	subs	pc, lr, #4

#endif /* ARM_MULTILIB_ARCH_V4 */