1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
|
/**
* @file
*
* @ingroup ppc_exc
*
* @brief PowerPC Exceptions implementation.
*/
/*
* Copyright (c) 2009
* embedded brains GmbH
* Obere Lagerstr. 30
* D-82178 Puchheim
* Germany
* <rtems@embedded-brains.de>
*
* The license and distribution terms for this file may be
* found in the file LICENSE in this distribution or at
* http://www.rtems.org/license/LICENSE.
*/
#include "ppc_exc_asm_macros.h"
.global ppc_exc_min_prolog_tmpl_naked
ppc_exc_min_prolog_tmpl_naked:
stwu r1, -EXCEPTION_FRAME_END(r1)
stw VECTOR_REGISTER, VECTOR_OFFSET(r1)
li VECTOR_REGISTER, 0
/*
* We store the absolute branch target address here. It will be used
* to generate the branch operation in ppc_exc_make_prologue().
*/
.int ppc_exc_wrap_naked
.global ppc_exc_wrap_naked
ppc_exc_wrap_naked:
/* Save scratch registers */
stw SCRATCH_REGISTER_0, SCRATCH_REGISTER_0_OFFSET(r1)
stw SCRATCH_REGISTER_1, SCRATCH_REGISTER_1_OFFSET(r1)
stw SCRATCH_REGISTER_2, SCRATCH_REGISTER_2_OFFSET(r1)
/* Save volatile registers */
stw r0, GPR0_OFFSET(r1)
stw r3, GPR3_OFFSET(r1)
stw r8, GPR8_OFFSET(r1)
stw r9, GPR9_OFFSET(r1)
stw r10, GPR10_OFFSET(r1)
stw r11, GPR11_OFFSET(r1)
stw r12, GPR12_OFFSET(r1)
/* Save CR */
mfcr SCRATCH_REGISTER_0
stw SCRATCH_REGISTER_0, EXC_CR_OFFSET(r1)
/* Save SRR0 */
mfspr SCRATCH_REGISTER_0, srr0
stw SCRATCH_REGISTER_0, SRR0_FRAME_OFFSET(r1)
/* Save SRR1 */
mfspr SCRATCH_REGISTER_0, srr1
stw SCRATCH_REGISTER_0, SRR1_FRAME_OFFSET(r1)
/* Save CTR */
mfctr SCRATCH_REGISTER_0
stw SCRATCH_REGISTER_0, EXC_CTR_OFFSET(r1)
/* Save XER */
mfxer SCRATCH_REGISTER_0
stw SCRATCH_REGISTER_0, EXC_XER_OFFSET(r1)
/* Save LR */
mflr SCRATCH_REGISTER_0
stw SCRATCH_REGISTER_0, EXC_LR_OFFSET(r1)
#ifndef PPC_EXC_CONFIG_BOOKE_ONLY
/* Load MSR bit mask */
lwz SCRATCH_REGISTER_0, ppc_exc_msr_bits@sdarel(r13)
/*
* Change the MSR if necessary (MMU, RI), remember decision in
* non-volatile CR_MSR.
*/
cmpwi CR_MSR, SCRATCH_REGISTER_0, 0
bne CR_MSR, wrap_change_msr_naked
wrap_change_msr_done_naked:
#endif /* PPC_EXC_CONFIG_BOOKE_ONLY */
/*
* Call high level exception handler
*/
/*
* Get the handler table index from the vector number. We have to
* discard the exception type. Take only the least significant five
* bits (= LAST_VALID_EXC + 1) from the vector register. Multiply by
* four (= size of function pointer).
*/
rlwinm SCRATCH_REGISTER_1, VECTOR_REGISTER, 2, 25, 29
/* Load handler table address */
LA SCRATCH_REGISTER_0, ppc_exc_handler_table
/* Load handler address */
lwzx SCRATCH_REGISTER_0, SCRATCH_REGISTER_0, SCRATCH_REGISTER_1
/*
* First parameter = exception frame pointer + FRAME_LINK_SPACE
*
* We add FRAME_LINK_SPACE to the frame pointer because the high level
* handler expects a BSP_Exception_frame structure.
*/
addi r3, r1, FRAME_LINK_SPACE
/*
* Second parameter = vector number (r4 is the VECTOR_REGISTER)
*
* Discard the exception type and store the vector number
* in the vector register. Take only the least significant
* five bits (= LAST_VALID_EXC + 1).
*/
rlwinm VECTOR_REGISTER, VECTOR_REGISTER, 0, 27, 31
/* Call handler */
mtctr SCRATCH_REGISTER_0
bctrl
#ifndef PPC_EXC_CONFIG_BOOKE_ONLY
/* Restore MSR? */
bne CR_MSR, wrap_restore_msr_naked
wrap_restore_msr_done_naked:
#endif /* PPC_EXC_CONFIG_BOOKE_ONLY */
/* Restore XER and CTR */
lwz SCRATCH_REGISTER_0, EXC_XER_OFFSET(r1)
lwz SCRATCH_REGISTER_1, EXC_CTR_OFFSET(r1)
mtxer SCRATCH_REGISTER_0
mtctr SCRATCH_REGISTER_1
/* Restore CR and LR */
lwz SCRATCH_REGISTER_0, EXC_CR_OFFSET(r1)
lwz SCRATCH_REGISTER_1, EXC_LR_OFFSET(r1)
mtcr SCRATCH_REGISTER_0
mtlr SCRATCH_REGISTER_1
/* Restore volatile registers */
lwz r0, GPR0_OFFSET(r1)
lwz r3, GPR3_OFFSET(r1)
lwz r8, GPR8_OFFSET(r1)
lwz r9, GPR9_OFFSET(r1)
lwz r10, GPR10_OFFSET(r1)
lwz r11, GPR11_OFFSET(r1)
lwz r12, GPR12_OFFSET(r1)
/* Restore vector register */
lwz VECTOR_REGISTER, VECTOR_OFFSET(r1)
/* Restore scratch registers and SRRs */
lwz SCRATCH_REGISTER_0, SRR0_FRAME_OFFSET(r1)
lwz SCRATCH_REGISTER_1, SRR1_FRAME_OFFSET(r1)
lwz SCRATCH_REGISTER_2, SCRATCH_REGISTER_2_OFFSET(r1)
mtspr srr0, SCRATCH_REGISTER_0
lwz SCRATCH_REGISTER_0, SCRATCH_REGISTER_0_OFFSET(r1)
mtspr srr1, SCRATCH_REGISTER_1
lwz SCRATCH_REGISTER_1, SCRATCH_REGISTER_1_OFFSET(r1)
/*
* We restore r1 from the frame rather than just popping (adding to
* current r1) since the exception handler might have done strange
* things (e.g. a debugger moving and relocating the stack).
*/
lwz r1, 0(r1)
/* Return */
rfi
#ifndef PPC_EXC_CONFIG_BOOKE_ONLY
wrap_change_msr_naked:
mfmsr SCRATCH_REGISTER_1
or SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, SCRATCH_REGISTER_0
mtmsr SCRATCH_REGISTER_1
sync
isync
b wrap_change_msr_done_naked
wrap_restore_msr_naked:
lwz SCRATCH_REGISTER_0, ppc_exc_msr_bits@sdarel(r13)
mfmsr SCRATCH_REGISTER_1
andc SCRATCH_REGISTER_1, SCRATCH_REGISTER_1, SCRATCH_REGISTER_0
mtmsr SCRATCH_REGISTER_1
sync
isync
b wrap_restore_msr_done_naked
#endif /* PPC_EXC_CONFIG_BOOKE_ONLY */
|