1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
|
/*
* Copyright (c) 2011, 2017 embedded brains GmbH. All rights reserved.
*
* The license and distribution terms for this file may be
* found in the file LICENSE in this distribution or at
* http://www.rtems.org/license/LICENSE.
*/
#include <bspopts.h>
#include <rtems/score/percpu.h>
#include <bsp/vectors.h>
#ifdef PPC_EXC_CONFIG_USE_FIXED_HANDLER
#define SCRATCH_0_REGISTER r0
#define SCRATCH_1_REGISTER r3
#define SCRATCH_2_REGISTER r4
#define SCRATCH_3_REGISTER r5
#define SCRATCH_4_REGISTER r6
#define SCRATCH_5_REGISTER r7
#define SCRATCH_6_REGISTER r8
#define SCRATCH_7_REGISTER r9
#define SCRATCH_8_REGISTER r10
#define SCRATCH_9_REGISTER r11
#define SCRATCH_10_REGISTER r12
#define FRAME_REGISTER r14
#define SCRATCH_0_OFFSET GPR0_OFFSET
#define SCRATCH_1_OFFSET GPR3_OFFSET
#define SCRATCH_2_OFFSET GPR4_OFFSET
#define SCRATCH_3_OFFSET GPR5_OFFSET
#define SCRATCH_4_OFFSET GPR6_OFFSET
#define SCRATCH_5_OFFSET GPR7_OFFSET
#define SCRATCH_6_OFFSET GPR8_OFFSET
#define SCRATCH_7_OFFSET GPR9_OFFSET
#define SCRATCH_8_OFFSET GPR10_OFFSET
#define SCRATCH_9_OFFSET GPR11_OFFSET
#define SCRATCH_10_OFFSET GPR12_OFFSET
#define FRAME_OFFSET PPC_EXC_INTERRUPT_FRAME_OFFSET
#ifdef RTEMS_PROFILING
.macro GET_TIME_BASE REG
#if defined(__PPC_CPU_E6500__)
mfspr \REG, FSL_EIS_ATBL
#elif defined(ppc8540)
mfspr \REG, TBRL
#else /* ppc8540 */
mftb \REG
#endif /* ppc8540 */
.endm
#endif /* RTEMS_PROFILING */
.global ppc_exc_min_prolog_async_tmpl_normal
.global ppc_exc_interrupt
ppc_exc_min_prolog_async_tmpl_normal:
stwu r1, -PPC_EXC_INTERRUPT_FRAME_SIZE(r1)
PPC_REG_STORE SCRATCH_1_REGISTER, SCRATCH_1_OFFSET(r1)
li SCRATCH_1_REGISTER, 0xffff8000
/*
* We store the absolute branch target address here. It will be used
* to generate the branch operation in ppc_exc_make_prologue().
*/
.int ppc_exc_interrupt
ppc_exc_interrupt:
/* Save non-volatile FRAME_REGISTER */
PPC_REG_STORE FRAME_REGISTER, FRAME_OFFSET(r1)
#ifdef RTEMS_PROFILING
/* Get entry instant */
GET_TIME_BASE FRAME_REGISTER
stw FRAME_REGISTER, PPC_EXC_INTERRUPT_ENTRY_INSTANT_OFFSET(r1)
#endif /* RTEMS_PROFILING */
#ifdef __SPE__
/* Enable SPE */
mfmsr FRAME_REGISTER
oris FRAME_REGISTER, FRAME_REGISTER, MSR_SPE >> 16
mtmsr FRAME_REGISTER
isync
/*
* Save high order part of SCRATCH_1_REGISTER here. The low order part
* was saved in the minimal prologue.
*/
evmergehi SCRATCH_1_REGISTER, SCRATCH_1_REGISTER, FRAME_REGISTER
PPC_REG_STORE FRAME_REGISTER, GPR3_OFFSET(r1)
#endif
#if defined(PPC_MULTILIB_FPU) || defined(PPC_MULTILIB_ALTIVEC)
/* Enable FPU and/or AltiVec */
mfmsr FRAME_REGISTER
#ifdef PPC_MULTILIB_FPU
ori FRAME_REGISTER, FRAME_REGISTER, MSR_FP
#endif
#ifdef PPC_MULTILIB_ALTIVEC
oris FRAME_REGISTER, FRAME_REGISTER, MSR_VE >> 16
#endif
mtmsr FRAME_REGISTER
isync
#endif
/* Move frame pointer to non-volatile FRAME_REGISTER */
mr FRAME_REGISTER, r1
/*
* Save volatile registers. The SCRATCH_1_REGISTER has been saved in
* minimum prologue.
*/
PPC_GPR_STORE SCRATCH_0_REGISTER, SCRATCH_0_OFFSET(r1)
#ifdef __powerpc64__
PPC_GPR_STORE r2, GPR2_OFFSET(r1)
LA32 r2, .TOC.
#endif
PPC_GPR_STORE SCRATCH_2_REGISTER, SCRATCH_2_OFFSET(r1)
GET_SELF_CPU_CONTROL SCRATCH_2_REGISTER
PPC_GPR_STORE SCRATCH_3_REGISTER, SCRATCH_3_OFFSET(r1)
PPC_GPR_STORE SCRATCH_4_REGISTER, SCRATCH_4_OFFSET(r1)
PPC_GPR_STORE SCRATCH_5_REGISTER, SCRATCH_5_OFFSET(r1)
PPC_GPR_STORE SCRATCH_6_REGISTER, SCRATCH_6_OFFSET(r1)
PPC_GPR_STORE SCRATCH_7_REGISTER, SCRATCH_7_OFFSET(r1)
PPC_GPR_STORE SCRATCH_8_REGISTER, SCRATCH_8_OFFSET(r1)
PPC_GPR_STORE SCRATCH_9_REGISTER, SCRATCH_9_OFFSET(r1)
PPC_GPR_STORE SCRATCH_10_REGISTER, SCRATCH_10_OFFSET(r1)
/* Load ISR nest level and thread dispatch disable level */
lwz SCRATCH_3_REGISTER, PER_CPU_ISR_NEST_LEVEL(SCRATCH_2_REGISTER)
lwz SCRATCH_4_REGISTER, PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL(SCRATCH_2_REGISTER)
/* Save SRR0, SRR1, CR, XER, CTR, and LR */
mfsrr0 SCRATCH_0_REGISTER
mfsrr1 SCRATCH_5_REGISTER
mfcr SCRATCH_6_REGISTER
mfxer SCRATCH_7_REGISTER
mfctr SCRATCH_8_REGISTER
mflr SCRATCH_9_REGISTER
PPC_REG_STORE SCRATCH_0_REGISTER, SRR0_FRAME_OFFSET(r1)
PPC_REG_STORE SCRATCH_5_REGISTER, SRR1_FRAME_OFFSET(r1)
stw SCRATCH_6_REGISTER, EXC_CR_OFFSET(r1)
stw SCRATCH_7_REGISTER, EXC_XER_OFFSET(r1)
PPC_REG_STORE SCRATCH_8_REGISTER, EXC_CTR_OFFSET(r1)
PPC_REG_STORE SCRATCH_9_REGISTER, EXC_LR_OFFSET(r1)
#ifdef __SPE__
/* Save SPEFSCR and ACC */
mfspr SCRATCH_0_REGISTER, FSL_EIS_SPEFSCR
evxor SCRATCH_5_REGISTER, SCRATCH_5_REGISTER, SCRATCH_5_REGISTER
evmwumiaa SCRATCH_5_REGISTER, SCRATCH_5_REGISTER, SCRATCH_5_REGISTER
stw SCRATCH_0_REGISTER, PPC_EXC_SPEFSCR_OFFSET(r1)
evstdd SCRATCH_5_REGISTER, PPC_EXC_ACC_OFFSET(r1)
#endif
#ifdef PPC_MULTILIB_ALTIVEC
/* Save volatile AltiVec context */
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(0)
stvx v0, r1, SCRATCH_0_REGISTER
mfvscr v0
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(1)
stvx v1, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(2)
stvx v2, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(3)
stvx v3, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(4)
stvx v4, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(5)
stvx v5, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(6)
stvx v6, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(7)
stvx v7, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(8)
stvx v8, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(9)
stvx v9, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(10)
stvx v10, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(11)
stvx v11, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(12)
stvx v12, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(13)
stvx v13, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(14)
stvx v14, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(15)
stvx v15, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(16)
stvx v16, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(17)
stvx v17, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(18)
stvx v18, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(19)
stvx v19, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VSCR_OFFSET
stvewx v0, r1, SCRATCH_0_REGISTER
#endif
#ifdef PPC_MULTILIB_FPU
/* Save volatile FPU context */
stfd f0, PPC_EXC_MIN_FR_OFFSET(0)(r1)
mffs f0
stfd f1, PPC_EXC_MIN_FR_OFFSET(1)(r1)
stfd f2, PPC_EXC_MIN_FR_OFFSET(2)(r1)
stfd f3, PPC_EXC_MIN_FR_OFFSET(3)(r1)
stfd f4, PPC_EXC_MIN_FR_OFFSET(4)(r1)
stfd f5, PPC_EXC_MIN_FR_OFFSET(5)(r1)
stfd f6, PPC_EXC_MIN_FR_OFFSET(6)(r1)
stfd f7, PPC_EXC_MIN_FR_OFFSET(7)(r1)
stfd f8, PPC_EXC_MIN_FR_OFFSET(8)(r1)
stfd f9, PPC_EXC_MIN_FR_OFFSET(9)(r1)
stfd f10, PPC_EXC_MIN_FR_OFFSET(10)(r1)
stfd f11, PPC_EXC_MIN_FR_OFFSET(11)(r1)
stfd f12, PPC_EXC_MIN_FR_OFFSET(12)(r1)
stfd f13, PPC_EXC_MIN_FR_OFFSET(13)(r1)
stfd f0, PPC_EXC_MIN_FPSCR_OFFSET(r1)
#endif
/* Increment ISR nest level and thread dispatch disable level */
cmpwi SCRATCH_3_REGISTER, 0
#ifdef RTEMS_PROFILING
cmpwi cr2, SCRATCH_3_REGISTER, 0
#endif
addi SCRATCH_3_REGISTER, SCRATCH_3_REGISTER, 1
addi SCRATCH_4_REGISTER, SCRATCH_4_REGISTER, 1
stw SCRATCH_3_REGISTER, PER_CPU_ISR_NEST_LEVEL(SCRATCH_2_REGISTER)
stw SCRATCH_4_REGISTER, PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL(SCRATCH_2_REGISTER)
/* Switch stack if necessary */
mfspr SCRATCH_0_REGISTER, SPRG1
iselgt r1, r1, SCRATCH_0_REGISTER
/* Call fixed high level handler */
bl bsp_interrupt_dispatch
PPC64_NOP_FOR_LINKER_TOC_POINTER_RESTORE
#ifdef RTEMS_PROFILING
/* Update profiling data if necessary */
bne cr2, .Lprofiling_done
GET_SELF_CPU_CONTROL r3
lwz r4, PPC_EXC_INTERRUPT_ENTRY_INSTANT_OFFSET(FRAME_REGISTER)
GET_TIME_BASE r5
bl _Profiling_Outer_most_interrupt_entry_and_exit
PPC64_NOP_FOR_LINKER_TOC_POINTER_RESTORE
.Lprofiling_done:
#endif /* RTEMS_PROFILING */
/* Load some per-CPU variables */
GET_SELF_CPU_CONTROL SCRATCH_1_REGISTER
lbz SCRATCH_0_REGISTER, PER_CPU_DISPATCH_NEEDED(SCRATCH_1_REGISTER)
lwz SCRATCH_5_REGISTER, PER_CPU_ISR_DISPATCH_DISABLE(SCRATCH_1_REGISTER)
lwz SCRATCH_6_REGISTER, PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL(SCRATCH_1_REGISTER)
lwz SCRATCH_3_REGISTER, PER_CPU_ISR_NEST_LEVEL(SCRATCH_1_REGISTER)
/*
* Switch back to original stack (FRAME_REGISTER == r1 if we are still
* on the IRQ stack) and restore FRAME_REGISTER.
*/
mr r1, FRAME_REGISTER
PPC_REG_LOAD FRAME_REGISTER, FRAME_OFFSET(r1)
/* Decrement levels and determine thread dispatch state */
xori SCRATCH_0_REGISTER, SCRATCH_0_REGISTER, 1
or SCRATCH_0_REGISTER, SCRATCH_0_REGISTER, SCRATCH_5_REGISTER
subi SCRATCH_4_REGISTER, SCRATCH_6_REGISTER, 1
or. SCRATCH_0_REGISTER, SCRATCH_0_REGISTER, SCRATCH_4_REGISTER
subi SCRATCH_3_REGISTER, SCRATCH_3_REGISTER, 1
/* Store thread dispatch disable and ISR nest levels */
stw SCRATCH_4_REGISTER, PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL(SCRATCH_1_REGISTER)
stw SCRATCH_3_REGISTER, PER_CPU_ISR_NEST_LEVEL(SCRATCH_1_REGISTER)
/*
* Check thread dispatch necessary, ISR dispatch disable and thread
* dispatch disable level.
*/
bne .Lthread_dispatch_done
/* Thread dispatch */
.Ldo_thread_dispatch:
/* Set ISR dispatch disable and thread dispatch disable level to one */
li SCRATCH_0_REGISTER, 1
stw SCRATCH_0_REGISTER, PER_CPU_ISR_DISPATCH_DISABLE(SCRATCH_1_REGISTER)
stw SCRATCH_0_REGISTER, PER_CPU_THREAD_DISPATCH_DISABLE_LEVEL(SCRATCH_1_REGISTER)
/*
* Call _Thread_Do_dispatch(), this function will enable interrupts.
* The r3 is SCRATCH_1_REGISTER.
*/
mfmsr r4
ori r4, r4, MSR_EE
bl _Thread_Do_dispatch
PPC64_NOP_FOR_LINKER_TOC_POINTER_RESTORE
/* Disable interrupts */
wrteei 0
/* SCRATCH_1_REGISTER is volatile, we must set it again */
GET_SELF_CPU_CONTROL SCRATCH_1_REGISTER
/* Check if we have to do the thread dispatch again */
lbz SCRATCH_0_REGISTER, PER_CPU_DISPATCH_NEEDED(SCRATCH_1_REGISTER)
cmpwi SCRATCH_0_REGISTER, 0
bne .Ldo_thread_dispatch
/* We are done with thread dispatching */
li SCRATCH_0_REGISTER, 0
stw SCRATCH_0_REGISTER, PER_CPU_ISR_DISPATCH_DISABLE(SCRATCH_1_REGISTER)
.Lthread_dispatch_done:
#ifdef PPC_MULTILIB_ALTIVEC
/* Restore volatile AltiVec context */
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VSCR_OFFSET
lvewx v0, r1, SCRATCH_0_REGISTER
mtvscr v0
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(0)
lvx v0, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(1)
lvx v1, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(2)
lvx v2, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(3)
lvx v3, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(4)
lvx v4, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(5)
lvx v5, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(6)
lvx v6, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(7)
lvx v7, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(8)
lvx v8, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(9)
lvx v9, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(10)
lvx v10, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(11)
lvx v11, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(12)
lvx v12, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(13)
lvx v13, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(14)
lvx v14, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(15)
lvx v15, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(16)
lvx v16, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(17)
lvx v17, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(18)
lvx v18, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(19)
lvx v19, r1, SCRATCH_0_REGISTER
#endif
#ifdef PPC_MULTILIB_FPU
/* Restore volatile FPU context */
lfd f0, PPC_EXC_MIN_FPSCR_OFFSET(r1)
mtfsf 0xff, f0
lfd f0, PPC_EXC_MIN_FR_OFFSET(0)(r1)
lfd f1, PPC_EXC_MIN_FR_OFFSET(1)(r1)
lfd f2, PPC_EXC_MIN_FR_OFFSET(2)(r1)
lfd f3, PPC_EXC_MIN_FR_OFFSET(3)(r1)
lfd f4, PPC_EXC_MIN_FR_OFFSET(4)(r1)
lfd f5, PPC_EXC_MIN_FR_OFFSET(5)(r1)
lfd f6, PPC_EXC_MIN_FR_OFFSET(6)(r1)
lfd f7, PPC_EXC_MIN_FR_OFFSET(7)(r1)
lfd f8, PPC_EXC_MIN_FR_OFFSET(8)(r1)
lfd f9, PPC_EXC_MIN_FR_OFFSET(9)(r1)
lfd f10, PPC_EXC_MIN_FR_OFFSET(10)(r1)
lfd f11, PPC_EXC_MIN_FR_OFFSET(11)(r1)
lfd f12, PPC_EXC_MIN_FR_OFFSET(12)(r1)
lfd f13, PPC_EXC_MIN_FR_OFFSET(13)(r1)
#endif
#ifdef __SPE__
/* Load SPEFSCR and ACC */
lwz SCRATCH_3_REGISTER, PPC_EXC_SPEFSCR_OFFSET(r1)
evldd SCRATCH_4_REGISTER, PPC_EXC_ACC_OFFSET(r1)
#endif
/*
* We must clear reservations here, since otherwise compare-and-swap
* atomic operations with interrupts enabled may yield wrong results.
* A compare-and-swap atomic operation is generated by the compiler
* like this:
*
* .L1:
* lwarx r9, r0, r3
* cmpw r9, r4
* bne- .L2
* stwcx. r5, r0, r3
* bne- .L1
* .L2:
*
* Consider the following scenario. A thread is interrupted right
* before the stwcx. The interrupt updates the value using a
* compare-and-swap sequence. Everything is fine up to this point.
* The interrupt performs now a compare-and-swap sequence which fails
* with a branch to .L2. The current processor has now a reservation.
* The interrupt returns without further stwcx. The thread updates the
* value using the unrelated reservation of the interrupt.
*/
li SCRATCH_0_REGISTER, FRAME_OFFSET
stwcx. SCRATCH_0_REGISTER, r1, SCRATCH_0_REGISTER
/* Load SRR0, SRR1, CR, XER, CTR, and LR */
PPC_REG_LOAD SCRATCH_5_REGISTER, SRR0_FRAME_OFFSET(r1)
PPC_REG_LOAD SCRATCH_6_REGISTER, SRR1_FRAME_OFFSET(r1)
lwz SCRATCH_7_REGISTER, EXC_CR_OFFSET(r1)
lwz SCRATCH_8_REGISTER, EXC_XER_OFFSET(r1)
PPC_REG_LOAD SCRATCH_9_REGISTER, EXC_CTR_OFFSET(r1)
PPC_REG_LOAD SCRATCH_10_REGISTER, EXC_LR_OFFSET(r1)
/* Restore volatile registers */
PPC_GPR_LOAD SCRATCH_0_REGISTER, SCRATCH_0_OFFSET(r1)
#ifdef __powerpc64__
PPC_GPR_LOAD r2, GPR2_OFFSET(r1)
#endif
PPC_GPR_LOAD SCRATCH_1_REGISTER, SCRATCH_1_OFFSET(r1)
PPC_GPR_LOAD SCRATCH_2_REGISTER, SCRATCH_2_OFFSET(r1)
#ifdef __SPE__
/* Restore SPEFSCR and ACC */
mtspr FSL_EIS_SPEFSCR, SCRATCH_3_REGISTER
evmra SCRATCH_4_REGISTER, SCRATCH_4_REGISTER
#endif
/* Restore volatile registers */
PPC_GPR_LOAD SCRATCH_3_REGISTER, SCRATCH_3_OFFSET(r1)
PPC_GPR_LOAD SCRATCH_4_REGISTER, SCRATCH_4_OFFSET(r1)
/* Restore SRR0, SRR1, CR, CTR, XER, and LR plus volatile registers */
mtsrr0 SCRATCH_5_REGISTER
PPC_GPR_LOAD SCRATCH_5_REGISTER, SCRATCH_5_OFFSET(r1)
mtsrr1 SCRATCH_6_REGISTER
PPC_GPR_LOAD SCRATCH_6_REGISTER, SCRATCH_6_OFFSET(r1)
mtcr SCRATCH_7_REGISTER
PPC_GPR_LOAD SCRATCH_7_REGISTER, SCRATCH_7_OFFSET(r1)
mtxer SCRATCH_8_REGISTER
PPC_GPR_LOAD SCRATCH_8_REGISTER, SCRATCH_8_OFFSET(r1)
mtctr SCRATCH_9_REGISTER
PPC_GPR_LOAD SCRATCH_9_REGISTER, SCRATCH_9_OFFSET(r1)
mtlr SCRATCH_10_REGISTER
PPC_GPR_LOAD SCRATCH_10_REGISTER, SCRATCH_10_OFFSET(r1)
/* Pop stack */
addi r1, r1, PPC_EXC_INTERRUPT_FRAME_SIZE
/* Return */
rfi
/* Symbol provided for debugging and tracing */
ppc_exc_interrupt_end:
#endif /* PPC_EXC_CONFIG_USE_FIXED_HANDLER */
|