summaryrefslogtreecommitdiffstats
path: root/cpukit/score/cpu/aarch64/aarch64-exception-default.S
blob: d139fdc6a467751b62123456c69291a7fea12ca9 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
/* SPDX-License-Identifier: BSD-2-Clause */

/**
 * @file
 *
 * @ingroup RTEMSScoreCPUAArch64
 *
 * @brief Implementation of AArch64 exception vector table.
 *
 * This file implements the AArch64 exception vector table and its embedded
 * jump handlers along with the code necessary to call higher level C handlers.
 */

/*
 * Copyright (C) 2020 On-Line Applications Research Corporation (OAR)
 * Written by Kinsey Moore <kinsey.moore@oarcorp.com>
 *
 * Redistribution and use in source and binary forms, with or without
 * modification, are permitted provided that the following conditions
 * are met:
 * 1. Redistributions of source code must retain the above copyright
 *    notice, this list of conditions and the following disclaimer.
 * 2. Redistributions in binary form must reproduce the above copyright
 *    notice, this list of conditions and the following disclaimer in the
 *    documentation and/or other materials provided with the distribution.
 *
 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
 * POSSIBILITY OF SUCH DAMAGE.
 */

#ifdef HAVE_CONFIG_H
#include "config.h"
#endif

#include <rtems/asm.h>

.extern _AArch64_Exception_default

.globl	bsp_start_vector_table_begin
.globl	bsp_start_vector_table_end
.globl	bsp_start_vector_table_size
.globl	bsp_vector_table_size

.section ".text"

/*
 * This is the exception vector table and the pointers to the default
 * exceptions handlers. Each vector in the table has space for up to 32
 * instructions. The space of the last two instructions in each vector is used
 * for the exception handler pointer.
 *
 * The operation of all exceptions is as follows:
 * * An exception occurs
 * * A vector is chosen based on the exception type and machine state
 * * Execution begins at the chosen vector
 * * X0 and LR are pushed onto the current stack
 * * An unconditional branch and link is taken to the next instruction to get
 *   the PC
 * * The exception handler pointer (EHP) is retrieved from the current vector using
 *   the PC
 * * Branch and link to the EHP
 * * X0 and LR are popped from the current stack after returning from the EHP
 * * The exception returns to the previous execution state
 */

/*
 * TODO(kmoore) The current implementation here assumes that SP is not
 * misaligned.
 */
	.macro	JUMP_HANDLER_SHORT
/* Mask to use in BIC, lower 7 bits */
	mov x0, #0x7f
/* LR contains PC, mask off to the base of the current vector */
	bic x0,	lr,	x0
/* Load address from the last word in the vector */
	ldr x0,	[x0,	#0x78]
/*
 * Branch and link to the address in x0. There is no reason to save the current
 * LR since it has already been saved and the current contents are junk.
 */
	blr x0
/* Pop x0,lr from stack */
	ldp x0,	lr,	[sp],	#0x10
/* Return from exception */
	eret
	nop
	nop
	nop
	nop
	nop
	nop
	nop
	nop
	nop
	nop
	nop
	nop
	nop
	nop
	nop
	nop
	nop
	nop
	nop
	nop
	nop
	.endm

	.macro	JUMP_HANDLER
	JUMP_HANDLER_SHORT
	nop
	.endm

	.macro	JUMP_TARGET_SP0
/* Takes up the space of 2 instructions */
#ifdef AARCH64_MULTILIB_ARCH_V8_ILP32
	.word .print_exception_dump_sp0
	.word 0x0
#else
	.dword .print_exception_dump_sp0
#endif
	.endm

	.macro	JUMP_TARGET_SPx
/* Takes up the space of 2 instructions */
#ifdef AARCH64_MULTILIB_ARCH_V8_ILP32
	.word .print_exception_dump_spx
	.word 0x0
#else
	.dword .print_exception_dump_spx
#endif
	.endm

bsp_start_vector_table_begin:
.balign 0x800
Vector_table_el3:
/*
 * The exception handler for synchronous exceptions from the current EL
 * using SP0.
 */
curr_el_sp0_sync:
	stp x0,	lr,	[sp, #-0x10]!	/* Push x0,lr on to the stack */
	bl curr_el_sp0_sync_get_pc	/* Get current execution address */
curr_el_sp0_sync_get_pc:		/* The current PC is now in LR */
	JUMP_HANDLER
	JUMP_TARGET_SP0
.balign 0x80
/* The exception handler for IRQ exceptions from the current EL using SP0. */
curr_el_sp0_irq:
	stp x0,	lr,	[sp, #-0x10]!	/* Push x0,lr on to the stack */
	bl curr_el_sp0_irq_get_pc	/* Get current execution address */
curr_el_sp0_irq_get_pc:			/* The current PC is now in LR */
	JUMP_HANDLER
	JUMP_TARGET_SP0
.balign 0x80
/* The exception handler for FIQ exceptions from the current EL using SP0. */
curr_el_sp0_fiq:
	stp x0,	lr,	[sp, #-0x10]!	/* Push x0,lr on to the stack */
	bl curr_el_sp0_fiq_get_pc	/* Get current execution address */
curr_el_sp0_fiq_get_pc:			/* The current PC is now in LR */
	JUMP_HANDLER
	JUMP_TARGET_SP0
.balign 0x80
/*
 * The exception handler for system error exceptions from the current EL using
 * SP0.
 */
curr_el_sp0_serror:
	stp x0,	lr,	[sp, #-0x10]!	/* Push x0,lr on to the stack */
	bl curr_el_sp0_serror_get_pc	/* Get current execution address */
curr_el_sp0_serror_get_pc:		/* The current PC is now in LR */
	JUMP_HANDLER
	JUMP_TARGET_SP0
.balign 0x80
/*
 * The exception handler for synchronous exceptions from the current EL using
 * the current SP.
 */
curr_el_spx_sync:
	msr SCTLR_EL1, XZR
	stp x0,	lr,	[sp, #-0x10]!	/* Push x0,lr on to the stack */
	bl curr_el_spx_sync_get_pc	/* Get current execution address */
curr_el_spx_sync_get_pc:		/* The current PC is now in LR */
/* Use short jump handler since this has an extra instruction to clear SCTLR */
	JUMP_HANDLER_SHORT
	JUMP_TARGET_SPx
.balign 0x80
/*
 * The exception handler for IRQ exceptions from the current EL using the
 * current SP.
 */
curr_el_spx_irq:
	stp x0,	lr,	[sp, #-0x10]!	/* Push x0,lr on to the stack */
	bl curr_el_spx_irq_get_pc	/* Get current execution address */
curr_el_spx_irq_get_pc:			/* The current PC is now in LR */
	JUMP_HANDLER
	JUMP_TARGET_SPx
.balign 0x80
/*
 * The exception handler for FIQ exceptions from the current EL using the
 * current SP.
 */
curr_el_spx_fiq:
	stp x0,	lr,	[sp, #-0x10]!	/* Push x0,lr on to the stack */
	bl curr_el_spx_fiq_get_pc	/* Get current execution address */
curr_el_spx_fiq_get_pc:			/* The current PC is now in LR */
	JUMP_HANDLER
	JUMP_TARGET_SPx
.balign 0x80
/*
 * The exception handler for system error exceptions from the current EL using
 * the current SP.
 */
curr_el_spx_serror:
	stp x0,	lr,	[sp, #-0x10]!	/* Push x0,lr on to the stack */
	bl curr_el_spx_serror_get_pc	/* Get current execution address */
curr_el_spx_serror_get_pc:		/* The current PC is now in LR */
	JUMP_HANDLER
	JUMP_TARGET_SPx
.balign 0x80
/*
 * The exception handler for synchronous exceptions from a lower EL (AArch64).
 */
lower_el_aarch64_sync:
	stp x0,	lr,	[sp, #-0x10]!	/* Push x0,lr on to the stack */
	bl lower_el_aarch64_sync_get_pc	/* Get current execution address */
lower_el_aarch64_sync_get_pc:		/* The current PC is now in LR */
	JUMP_HANDLER
	JUMP_TARGET_SPx
.balign 0x80
/* The exception handler for IRQ exceptions from a lower EL (AArch64). */
lower_el_aarch64_irq:
	stp x0,	lr,	[sp, #-0x10]!	/* Push x0,lr on to the stack */
	bl lower_el_aarch64_irq_get_pc	/* Get current execution address */
lower_el_aarch64_irq_get_pc:		/* The current PC is now in LR */
	JUMP_HANDLER
	JUMP_TARGET_SPx
.balign 0x80
/* The exception handler for FIQ exceptions from a lower EL (AArch64). */
lower_el_aarch64_fiq:
	stp x0,	lr,	[sp, #-0x10]!	/* Push x0,lr on to the stack */
	bl lower_el_aarch64_fiq_get_pc	/* Get current execution address */
lower_el_aarch64_fiq_get_pc:		/* The current PC is now in LR */
	JUMP_HANDLER
	JUMP_TARGET_SPx
.balign 0x80
/*
 * The exception handler for system error exceptions from a lower EL(AArch64).
 */
lower_el_aarch64_serror:
/* Push x0,lr on to the stack */
	stp x0,	lr,	[sp, #-0x10]!
/* Get current execution address */
	bl lower_el_aarch64_serror_get_pc
lower_el_aarch64_serror_get_pc:		/* The current PC is now in LR */
	JUMP_HANDLER
	JUMP_TARGET_SPx
.balign 0x80
/*
 * The exception handler for the synchronous exception from a lower EL(AArch32).
 */
lower_el_aarch32_sync:
	stp x0,	lr,	[sp, #-0x10]!	/* Push x0,lr on to the stack */
	bl lower_el_aarch32_sync_get_pc	/* Get current execution address */
lower_el_aarch32_sync_get_pc:		/* The current PC is now in LR */
	JUMP_HANDLER
	JUMP_TARGET_SPx
.balign 0x80
/* The exception handler for the IRQ exception from a lower EL (AArch32). */
lower_el_aarch32_irq:
	stp x0,	lr,	[sp, #-0x10]!	/* Push x0,lr on to the stack */
	bl lower_el_aarch32_irq_get_pc	/* Get current execution address */
lower_el_aarch32_irq_get_pc:		/* The current PC is now in LR */
	JUMP_HANDLER
	JUMP_TARGET_SPx
.balign 0x80
/* The exception handler for the FIQ exception from a lower EL (AArch32). */
lower_el_aarch32_fiq:
	stp x0,	lr,	[sp, #-0x10]!	/* Push x0,lr on to the stack */
	bl lower_el_aarch32_fiq_get_pc	/* Get current execution address */
lower_el_aarch32_fiq_get_pc:		/* The current PC is now in LR */
	JUMP_HANDLER
	JUMP_TARGET_SPx
.balign 0x80
/*
 * The exception handler for the system error exception from a lower EL
 * (AArch32).
 */
lower_el_aarch32_serror:
/* Push x0,lr on to the stack */
	stp x0,	lr,	[sp, #-0x10]!
/* Get current execution address */
	bl lower_el_aarch32_serror_get_pc
lower_el_aarch32_serror_get_pc	:		/* The current PC is now in LR */
	JUMP_HANDLER
	JUMP_TARGET_SPx

bsp_start_vector_table_end:

	.set	bsp_start_vector_table_size, bsp_start_vector_table_end - bsp_start_vector_table_begin
	.set	bsp_vector_table_size, bsp_start_vector_table_size

/*
 * This involves switching a few things around. the real x0 and lr are on SPx
 * and need to be retrieved while the lr upon entry contains the pointer into
 * the AArch64 vector table
 */
.print_exception_dump_spx:
/* Switch to exception stack (SP0) */
	msr spsel, #0
/* Save space for exception context */
	sub sp, sp, #AARCH64_EXCEPTION_FRAME_SIZE
/*
 * Push exception vector, LR currently points into the actual exception vector
 * table
 */
	and lr, lr, #0x780
	lsr lr, lr, #7
	str lr, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_VECTOR_OFFSET]
/* Pop x0,lr from stack, saved by generic handler */
/*
 * This modifies the stack pointer back to the pre-vector-handler value which is
 * safe because this will never return
 */
	msr spsel, #1
	ldp x0,	lr, [sp], #0x10
	msr spsel, #0
/* Save LR */
	str lr, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_LR_OFFSET]
/* Push the start of the context */
	bl .push_exception_context_start
/* Save original sp in x0 for .push_exception_context_finish */
	msr spsel, #1
	mov x0, sp
	msr spsel, #0
/* Push the remainder of the context */
	bl .push_exception_context_finish
/* Save sp into x0 for handler */
	mov x0, sp
/* Jump into the handler */
	bl _AArch64_Exception_default

	/* Just in case */
	b	twiddle

.print_exception_dump_sp0:
/* Save space for exception context */
	sub sp, sp, #AARCH64_EXCEPTION_FRAME_SIZE
/*
 * Push exception vector, LR currently points into the actual exception vector
 */
	and lr, lr, #0x780
	lsr lr, lr, #7
	str lr, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_VECTOR_OFFSET]
/* Get x0,lr from stack, saved by generic handler */
	add sp, sp, #AARCH64_EXCEPTION_FRAME_SIZE
	ldp x0,	lr, [sp]
	sub sp, sp, #AARCH64_EXCEPTION_FRAME_SIZE
/* Save LR */
	str lr, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_LR_OFFSET]
/* Push the start of the context */
	bl .push_exception_context_start
/* Save original sp in x0 for .push_exception_context_finish */
	add x0, sp, #(AARCH64_EXCEPTION_FRAME_SIZE + 0x10)
/* Push the remainder of the context */
	bl .push_exception_context_finish
/* Save sp (exception frame) into x0 for handler */
	mov x0, sp
/* Jump into the handler */
	bl _AArch64_Exception_default

	/* Just in case */
twiddle:
	b	twiddle

/* Assumes SP is at the base of the context and LR has already been pushed */
.push_exception_context_start:
/* Push x0-x29(fp) */
	stp x0,  x1,  [sp, #0x00]
	stp x2,  x3,  [sp, #0x10]
	stp x4,  x5,  [sp, #0x20]
	stp x6,  x7,  [sp, #0x30]
	stp x8,  x9,  [sp, #0x40]
	stp x10, x11, [sp, #0x50]
	stp x12, x13, [sp, #0x60]
	stp x14, x15, [sp, #0x70]
	stp x16, x17, [sp, #0x80]
	stp x18, x19, [sp, #0x90]
	stp x20, x21, [sp, #0xa0]
	stp x22, x23, [sp, #0xb0]
	stp x24, x25, [sp, #0xc0]
	stp x26, x27, [sp, #0xd0]
	stp x28, x29, [sp, #0xe0]
	ret

/* Expects original SP to be stored in x0 */
.push_exception_context_finish:
/* Get exception LR for PC */
	mrs x1, ELR_EL1
/* Push sp and pc */
	stp x0, x1, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_SP_OFFSET]
/* Get daif and spsr */
	mrs x0, DAIF
	mrs x1, SPSR_EL1
/* Push daif and spsr */
	stp x0, x1, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_DAIF_OFFSET]
/* Get ESR and FAR */
	mrs x0, ESR_EL1
	mrs x1, FAR_EL1
/* Push FAR and ESR */
	stp x0, x1, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_SYNDROME_OFFSET]
/* Get fpcr and fpsr */
	mrs x0, FPSR
	mrs x1, FPCR
/* Push fpcr and fpsr */
	stp x0, x1, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_FPSR_OFFSET]
/* Push VFP registers */
	stp q0,  q1,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x000)]
	stp q2,  q3,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x020)]
	stp q4,  q5,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x040)]
	stp q6,  q7,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x060)]
	stp q8,  q9,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x080)]
	stp q10, q11, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x0a0)]
	stp q12, q13, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x0c0)]
	stp q14, q15, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x0e0)]
	stp q16, q17, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x100)]
	stp q18, q19, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x120)]
	stp q20, q21, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x140)]
	stp q22, q23, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x160)]
	stp q24, q25, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x180)]
	stp q26, q27, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x1a0)]
	stp q28, q29, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x1c0)]
	stp q30, q31, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x1e0)]
/* Done, return to exception handler */
	ret

/*
 * Apply the exception frame to the current register status, SP points to the EF
 */
.pop_exception_context_and_ret:
/* Pop daif and spsr */
	ldp x2, x3, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_DAIF_OFFSET]
/* Restore daif and spsr */
	msr DAIF, x2
	msr SPSR_EL1, x3
/* Pop FAR and ESR */
	ldp x2, x3, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_SYNDROME_OFFSET]
/* Restore ESR and FAR */
	msr ESR_EL1, x2
	msr FAR_EL1, x3
/* Pop fpcr and fpsr */
	ldp x2, x3, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_FPSR_OFFSET]
/* Restore fpcr and fpsr */
	msr FPSR, x2
	msr FPCR, x3
/* Restore LR */
	ldr lr, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_LR_OFFSET]
/* Pop VFP registers */
	ldp q0,  q1,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x000)]
	ldp q2,  q3,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x020)]
	ldp q4,  q5,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x040)]
	ldp q6,  q7,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x060)]
	ldp q8,  q9,  [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x080)]
	ldp q10, q11, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x0a0)]
	ldp q12, q13, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x0c0)]
	ldp q14, q15, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x0e0)]
	ldp q16, q17, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x100)]
	ldp q18, q19, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x120)]
	ldp q20, q21, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x140)]
	ldp q22, q23, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x160)]
	ldp q24, q25, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x180)]
	ldp q26, q27, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x1a0)]
	ldp q28, q29, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x1c0)]
	ldp q30, q31, [sp, #(AARCH64_EXCEPTION_FRAME_REGISTER_Q0_OFFSET + 0x1e0)]
/* Pop x0-x29(fp) */
	ldp x2,  x3,  [sp, #0x10]
	ldp x4,  x5,  [sp, #0x20]
	ldp x6,  x7,  [sp, #0x30]
	ldp x8,  x9,  [sp, #0x40]
	ldp x10, x11, [sp, #0x50]
	ldp x12, x13, [sp, #0x60]
	ldp x14, x15, [sp, #0x70]
	ldp x16, x17, [sp, #0x80]
	ldp x18, x19, [sp, #0x90]
	ldp x20, x21, [sp, #0xa0]
	ldp x22, x23, [sp, #0xb0]
	ldp x24, x25, [sp, #0xc0]
	ldp x26, x27, [sp, #0xd0]
	ldp x28, x29, [sp, #0xe0]
/* Pop sp (ignored since sp should be shortly restored anyway) and ELR */
	ldp x0, x1, [sp, #AARCH64_EXCEPTION_FRAME_REGISTER_SP_OFFSET]
/* Restore exception LR */
	msr ELR_EL1, x1
	ldp x0,  x1,  [sp, #0x00]
	add sp, sp, #AARCH64_EXCEPTION_FRAME_SIZE

/* We must clear reservations to ensure consistency with atomic operations */
	clrex

	ret