summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorSebastian Huber <sebastian.huber@embedded-brains.de>2017-08-01 10:57:46 +0200
committerSebastian Huber <sebastian.huber@embedded-brains.de>2017-08-22 16:26:19 +0200
commita6f84b275318dbd89ba0bfd12ff6df631a8ac4bc (patch)
tree0105282863a5a9b538098ed88a5bd72ab799aa9c
parentpowerpc: 64-bit _CPU_Context_Initialize() support (diff)
downloadrtems-a6f84b275318dbd89ba0bfd12ff6df631a8ac4bc.tar.bz2
powerpc: Add 64-bit context/interrupt support
Update #3082.
-rw-r--r--c/src/lib/libbsp/powerpc/qoriq/start/start.S110
-rw-r--r--c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_async_normal.S74
-rw-r--r--c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_fatal.S90
-rw-r--r--c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_print.c84
-rw-r--r--c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/vectors.h2
-rw-r--r--c/src/lib/libcpu/powerpc/new-exceptions/cpu.c6
-rw-r--r--c/src/lib/libcpu/powerpc/new-exceptions/cpu_asm.S37
-rw-r--r--c/src/lib/libcpu/powerpc/shared/include/powerpc-utility.h5
-rw-r--r--cpukit/score/cpu/powerpc/cpu.c9
-rw-r--r--cpukit/score/cpu/powerpc/ppc-context-validate.S165
-rw-r--r--cpukit/score/cpu/powerpc/ppc-context-volatile-clobber.S43
-rw-r--r--cpukit/score/cpu/powerpc/rtems/score/cpu.h75
-rw-r--r--cpukit/score/cpu/powerpc/rtems/score/cpuimpl.h152
-rw-r--r--cpukit/score/cpu/powerpc/rtems/score/powerpc.h6
14 files changed, 465 insertions, 393 deletions
diff --git a/c/src/lib/libbsp/powerpc/qoriq/start/start.S b/c/src/lib/libbsp/powerpc/qoriq/start/start.S
index 6be06e9273..a0c7cc7993 100644
--- a/c/src/lib/libbsp/powerpc/qoriq/start/start.S
+++ b/c/src/lib/libbsp/powerpc/qoriq/start/start.S
@@ -202,7 +202,7 @@ _start:
subi r1, START_STACK, 2 * PPC_DEFAULT_CACHE_LINE_SIZE
clrrwi r1, r1, PPC_DEFAULT_CACHE_LINE_POWER
li r0, 0
- stw r0, 0(r1)
+ PPC_REG_STORE r0, 0(r1)
#ifdef INITIALIZE_FPU
bl .Linitfpu
@@ -288,7 +288,7 @@ _start_thread:
subi r1, r3, PPC_MINIMUM_STACK_FRAME_SIZE
clrrwi r1, r1, PPC_STACK_ALIGN_POWER
li r0, 0
- stw r0, 0(r1)
+ PPC_REG_STORE r0, 0(r1)
#ifdef INITIALIZE_FPU
bl .Linitfpu
@@ -313,145 +313,145 @@ _start_secondary_processor:
.align 4
bsp_exc_vector_base:
/* Critical input */
- stwu r1, -EXC_GENERIC_SIZE(r1)
- stw r3, GPR3_OFFSET(r1)
+ PPC_REG_STORE_UPDATE r1, -EXC_GENERIC_SIZE(r1)
+ PPC_REG_STORE r3, GPR3_OFFSET(r1)
li r3, 0
b ppc_exc_fatal_critical
/* Machine check */
- stwu r1, -EXC_GENERIC_SIZE(r1)
- stw r3, GPR3_OFFSET(r1)
+ PPC_REG_STORE_UPDATE r1, -EXC_GENERIC_SIZE(r1)
+ PPC_REG_STORE r3, GPR3_OFFSET(r1)
li r3, 1
b ppc_exc_fatal_machine_check
/* Data storage */
- stwu r1, -EXC_GENERIC_SIZE(r1)
- stw r3, GPR3_OFFSET(r1)
+ PPC_REG_STORE_UPDATE r1, -EXC_GENERIC_SIZE(r1)
+ PPC_REG_STORE r3, GPR3_OFFSET(r1)
li r3, 2
b ppc_exc_fatal_normal
/* Instruction storage */
- stwu r1, -EXC_GENERIC_SIZE(r1)
- stw r3, GPR3_OFFSET(r1)
+ PPC_REG_STORE_UPDATE r1, -EXC_GENERIC_SIZE(r1)
+ PPC_REG_STORE r3, GPR3_OFFSET(r1)
li r3, 3
b ppc_exc_fatal_normal
/* External input */
- stwu r1, -PPC_EXC_INTERRUPT_FRAME_SIZE(r1)
+ PPC_REG_STORE_UPDATE r1, -PPC_EXC_INTERRUPT_FRAME_SIZE(r1)
b ppc_exc_interrupt
nop
nop
/* Alignment */
- stwu r1, -EXC_GENERIC_SIZE(r1)
- stw r3, GPR3_OFFSET(r1)
+ PPC_REG_STORE_UPDATE r1, -EXC_GENERIC_SIZE(r1)
+ PPC_REG_STORE r3, GPR3_OFFSET(r1)
li r3, 5
b ppc_exc_fatal_normal
/* Program */
- stwu r1, -EXC_GENERIC_SIZE(r1)
- stw r3, GPR3_OFFSET(r1)
+ PPC_REG_STORE_UPDATE r1, -EXC_GENERIC_SIZE(r1)
+ PPC_REG_STORE r3, GPR3_OFFSET(r1)
li r3, 6
b ppc_exc_fatal_normal
#ifdef __PPC_CPU_E6500__
/* Floating-point unavailable */
- stwu r1, -EXC_GENERIC_SIZE(r1)
- stw r3, GPR3_OFFSET(r1)
+ PPC_REG_STORE_UPDATE r1, -EXC_GENERIC_SIZE(r1)
+ PPC_REG_STORE r3, GPR3_OFFSET(r1)
li r3, 7
b ppc_exc_fatal_normal
#endif
/* System call */
- stwu r1, -EXC_GENERIC_SIZE(r1)
- stw r3, GPR3_OFFSET(r1)
+ PPC_REG_STORE_UPDATE r1, -EXC_GENERIC_SIZE(r1)
+ PPC_REG_STORE r3, GPR3_OFFSET(r1)
li r3, 8
b ppc_exc_fatal_normal
#ifdef __PPC_CPU_E6500__
/* APU unavailable */
- stwu r1, -EXC_GENERIC_SIZE(r1)
- stw r3, GPR3_OFFSET(r1)
+ PPC_REG_STORE_UPDATE r1, -EXC_GENERIC_SIZE(r1)
+ PPC_REG_STORE r3, GPR3_OFFSET(r1)
li r3, 9
b ppc_exc_fatal_normal
#endif
/* Decrementer */
- stwu r1, -EXC_GENERIC_SIZE(r1)
- stw r3, GPR3_OFFSET(r1)
+ PPC_REG_STORE_UPDATE r1, -EXC_GENERIC_SIZE(r1)
+ PPC_REG_STORE r3, GPR3_OFFSET(r1)
li r3, 10
b ppc_exc_fatal_normal
/* Fixed-interval timer interrupt */
- stwu r1, -EXC_GENERIC_SIZE(r1)
- stw r3, GPR3_OFFSET(r1)
+ PPC_REG_STORE_UPDATE r1, -EXC_GENERIC_SIZE(r1)
+ PPC_REG_STORE r3, GPR3_OFFSET(r1)
li r3, 11
b ppc_exc_fatal_normal
/* Watchdog timer interrupt */
- stwu r1, -EXC_GENERIC_SIZE(r1)
- stw r3, GPR3_OFFSET(r1)
+ PPC_REG_STORE_UPDATE r1, -EXC_GENERIC_SIZE(r1)
+ PPC_REG_STORE r3, GPR3_OFFSET(r1)
li r3, 12
b ppc_exc_fatal_critical
/* Data TLB error */
- stwu r1, -EXC_GENERIC_SIZE(r1)
- stw r3, GPR3_OFFSET(r1)
+ PPC_REG_STORE_UPDATE r1, -EXC_GENERIC_SIZE(r1)
+ PPC_REG_STORE r3, GPR3_OFFSET(r1)
li r3, 13
b ppc_exc_fatal_normal
/* Instruction TLB error */
- stwu r1, -EXC_GENERIC_SIZE(r1)
- stw r3, GPR3_OFFSET(r1)
+ PPC_REG_STORE_UPDATE r1, -EXC_GENERIC_SIZE(r1)
+ PPC_REG_STORE r3, GPR3_OFFSET(r1)
li r3, 14
b ppc_exc_fatal_normal
/* Debug */
- stwu r1, -EXC_GENERIC_SIZE(r1)
- stw r3, GPR3_OFFSET(r1)
+ PPC_REG_STORE_UPDATE r1, -EXC_GENERIC_SIZE(r1)
+ PPC_REG_STORE r3, GPR3_OFFSET(r1)
li r3, 15
b ppc_exc_fatal_debug
/* SPE APU unavailable or AltiVec unavailable */
- stwu r1, -EXC_GENERIC_SIZE(r1)
- stw r3, GPR3_OFFSET(r1)
+ PPC_REG_STORE_UPDATE r1, -EXC_GENERIC_SIZE(r1)
+ PPC_REG_STORE r3, GPR3_OFFSET(r1)
li r3, 32
b ppc_exc_fatal_normal
/* SPE floating-point data exception or AltiVec assist */
- stwu r1, -EXC_GENERIC_SIZE(r1)
- stw r3, GPR3_OFFSET(r1)
+ PPC_REG_STORE_UPDATE r1, -EXC_GENERIC_SIZE(r1)
+ PPC_REG_STORE r3, GPR3_OFFSET(r1)
li r3, 33
b ppc_exc_fatal_normal
#ifndef __PPC_CPU_E6500__
/* SPE floating-point round exception */
- stwu r1, -EXC_GENERIC_SIZE(r1)
- stw r3, GPR3_OFFSET(r1)
+ PPC_REG_STORE_UPDATE r1, -EXC_GENERIC_SIZE(r1)
+ PPC_REG_STORE r3, GPR3_OFFSET(r1)
li r3, 34
b ppc_exc_fatal_normal
#endif
/* Performance monitor */
- stwu r1, -EXC_GENERIC_SIZE(r1)
- stw r3, GPR3_OFFSET(r1)
+ PPC_REG_STORE_UPDATE r1, -EXC_GENERIC_SIZE(r1)
+ PPC_REG_STORE r3, GPR3_OFFSET(r1)
li r3, 35
b ppc_exc_fatal_normal
#ifdef __PPC_CPU_E6500__
/* Processor doorbell interrupt */
- stwu r1, -EXC_GENERIC_SIZE(r1)
- stw r3, GPR3_OFFSET(r1)
+ PPC_REG_STORE_UPDATE r1, -EXC_GENERIC_SIZE(r1)
+ PPC_REG_STORE r3, GPR3_OFFSET(r1)
li r3, 36
b ppc_exc_fatal_normal
/* Processor doorbell critical interrupt */
- stwu r1, -EXC_GENERIC_SIZE(r1)
- stw r3, GPR3_OFFSET(r1)
+ PPC_REG_STORE_UPDATE r1, -EXC_GENERIC_SIZE(r1)
+ PPC_REG_STORE r3, GPR3_OFFSET(r1)
li r3, 37
b ppc_exc_fatal_critical
/* Guest processor doorbell */
- stwu r1, -EXC_GENERIC_SIZE(r1)
- stw r3, GPR3_OFFSET(r1)
+ PPC_REG_STORE_UPDATE r1, -EXC_GENERIC_SIZE(r1)
+ PPC_REG_STORE r3, GPR3_OFFSET(r1)
li r3, 38
b ppc_exc_fatal_normal
/* Guest processor doorbell critical and machine check */
- stwu r1, -EXC_GENERIC_SIZE(r1)
- stw r3, GPR3_OFFSET(r1)
+ PPC_REG_STORE_UPDATE r1, -EXC_GENERIC_SIZE(r1)
+ PPC_REG_STORE r3, GPR3_OFFSET(r1)
li r3, 39
b ppc_exc_fatal_critical
/* Hypervisor system call */
- stwu r1, -EXC_GENERIC_SIZE(r1)
- stw r3, GPR3_OFFSET(r1)
+ PPC_REG_STORE_UPDATE r1, -EXC_GENERIC_SIZE(r1)
+ PPC_REG_STORE r3, GPR3_OFFSET(r1)
li r3, 40
b ppc_exc_fatal_normal
/* Hypervisor privilege */
- stwu r1, -EXC_GENERIC_SIZE(r1)
- stw r3, GPR3_OFFSET(r1)
+ PPC_REG_STORE_UPDATE r1, -EXC_GENERIC_SIZE(r1)
+ PPC_REG_STORE r3, GPR3_OFFSET(r1)
li r3, 41
b ppc_exc_fatal_normal
/* LRAT error */
- stwu r1, -EXC_GENERIC_SIZE(r1)
- stw r3, GPR3_OFFSET(r1)
+ PPC_REG_STORE_UPDATE r1, -EXC_GENERIC_SIZE(r1)
+ PPC_REG_STORE r3, GPR3_OFFSET(r1)
li r3, 42
b ppc_exc_fatal_normal
#endif
diff --git a/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_async_normal.S b/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_async_normal.S
index 7a137a526d..c131bf0de0 100644
--- a/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_async_normal.S
+++ b/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_async_normal.S
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2011, 2016 embedded brains GmbH. All rights reserved.
+ * Copyright (c) 2011, 2017 embedded brains GmbH. All rights reserved.
*
* embedded brains GmbH
* Dornierstr. 4
@@ -40,19 +40,9 @@
#define SCRATCH_3_OFFSET GPR10_OFFSET
#define SCRATCH_4_OFFSET GPR11_OFFSET
#define SCRATCH_5_OFFSET GPR12_OFFSET
-
-/*
- * The register 2 slot is free, since this is the read-only small data anchor.
- */
-#define FRAME_OFFSET GPR2_OFFSET
+#define FRAME_OFFSET PPC_EXC_INTERRUPT_FRAME_OFFSET
#ifdef RTEMS_PROFILING
-/*
- * The CPU_INTERRUPT_FRAME_SIZE is enough to store this additional register.
- */
-#define ENTRY_INSTANT_REGISTER r15
-#define ENTRY_INSTANT_OFFSET GPR13_OFFSET
-
.macro GET_TIME_BASE REG
#if defined(__PPC_CPU_E6500__)
mfspr \REG, FSL_EIS_ATBL
@@ -88,17 +78,15 @@ ppc_exc_min_prolog_async_tmpl_normal:
ppc_exc_interrupt:
-#ifdef RTEMS_PROFILING
- /* Save non-volatile ENTRY_INSTANT_REGISTER */
- stw ENTRY_INSTANT_REGISTER, ENTRY_INSTANT_OFFSET(r1)
+ /* Save non-volatile FRAME_REGISTER */
+ PPC_REG_STORE FRAME_REGISTER, FRAME_OFFSET(r1)
+#ifdef RTEMS_PROFILING
/* Get entry instant */
- GET_TIME_BASE ENTRY_INSTANT_REGISTER
+ GET_TIME_BASE FRAME_REGISTER
+ stw FRAME_REGISTER, PPC_EXC_INTERRUPT_ENTRY_INSTANT_OFFSET(r1)
#endif /* RTEMS_PROFILING */
- /* Save non-volatile FRAME_REGISTER */
- stw FRAME_REGISTER, FRAME_OFFSET(r1)
-
#ifdef __SPE__
/* Enable SPE */
mfmsr FRAME_REGISTER
@@ -162,25 +150,29 @@ ppc_exc_interrupt:
lwzx HANDLER_REGISTER, HANDLER_REGISTER, SCRATCH_0_REGISTER
#endif /* PPC_EXC_CONFIG_USE_FIXED_HANDLER */
+#ifdef __powerpc64__
+ PPC_GPR_STORE r2, GPR2_OFFSET(r1)
+ LA32 r2, .TOC.
+#endif
PPC_GPR_STORE SCRATCH_1_REGISTER, SCRATCH_1_OFFSET(r1)
PPC_GPR_STORE SCRATCH_2_REGISTER, SCRATCH_2_OFFSET(r1)
PPC_GPR_STORE SCRATCH_3_REGISTER, SCRATCH_3_OFFSET(r1)
PPC_GPR_STORE SCRATCH_4_REGISTER, SCRATCH_4_OFFSET(r1)
PPC_GPR_STORE SCRATCH_5_REGISTER, SCRATCH_5_OFFSET(r1)
- /* Save SRR0, SRR1, CR, CTR, XER, and LR */
+ /* Save SRR0, SRR1, CR, XER, CTR, and LR */
mfsrr0 SCRATCH_0_REGISTER
mfsrr1 SCRATCH_1_REGISTER
mfcr SCRATCH_2_REGISTER
- mfctr SCRATCH_3_REGISTER
- mfxer SCRATCH_4_REGISTER
+ mfxer SCRATCH_3_REGISTER
+ mfctr SCRATCH_4_REGISTER
mflr SCRATCH_5_REGISTER
- stw SCRATCH_0_REGISTER, SRR0_FRAME_OFFSET(r1)
- stw SCRATCH_1_REGISTER, SRR1_FRAME_OFFSET(r1)
+ PPC_REG_STORE SCRATCH_0_REGISTER, SRR0_FRAME_OFFSET(r1)
+ PPC_REG_STORE SCRATCH_1_REGISTER, SRR1_FRAME_OFFSET(r1)
stw SCRATCH_2_REGISTER, EXC_CR_OFFSET(r1)
- stw SCRATCH_3_REGISTER, EXC_CTR_OFFSET(r1)
- stw SCRATCH_4_REGISTER, EXC_XER_OFFSET(r1)
- stw SCRATCH_5_REGISTER, EXC_LR_OFFSET(r1)
+ stw SCRATCH_3_REGISTER, EXC_XER_OFFSET(r1)
+ PPC_REG_STORE SCRATCH_4_REGISTER, EXC_CTR_OFFSET(r1)
+ PPC_REG_STORE SCRATCH_5_REGISTER, EXC_LR_OFFSET(r1)
#ifdef __SPE__
/* Save SPEFSCR and ACC */
@@ -292,7 +284,7 @@ ppc_exc_interrupt:
/* Update profiling data if necessary */
bne cr2, .Lprofiling_done
GET_SELF_CPU_CONTROL r3
- mr r4, ENTRY_INSTANT_REGISTER
+ lwz r4, PPC_EXC_INTERRUPT_ENTRY_INSTANT_OFFSET(FRAME_REGISTER)
GET_TIME_BASE r5
bl _Profiling_Outer_most_interrupt_entry_and_exit
.Lprofiling_done:
@@ -310,7 +302,7 @@ ppc_exc_interrupt:
* on the IRQ stack) and restore FRAME_REGISTER.
*/
mr r1, FRAME_REGISTER
- lwz FRAME_REGISTER, FRAME_OFFSET(r1)
+ PPC_REG_LOAD FRAME_REGISTER, FRAME_OFFSET(r1)
/* Decrement levels and determine thread dispatch state */
xori SCRATCH_0_REGISTER, SCRATCH_0_REGISTER, 1
@@ -458,13 +450,13 @@ ppc_exc_interrupt:
li SCRATCH_0_REGISTER, FRAME_OFFSET
stwcx. SCRATCH_0_REGISTER, r1, SCRATCH_0_REGISTER
- /* Load SRR0, SRR1, CR, CTR, XER, and LR */
- lwz SCRATCH_0_REGISTER, SRR0_FRAME_OFFSET(r1)
- lwz SCRATCH_1_REGISTER, SRR1_FRAME_OFFSET(r1)
+ /* Load SRR0, SRR1, CR, XER, CTR, and LR */
+ PPC_REG_LOAD SCRATCH_0_REGISTER, SRR0_FRAME_OFFSET(r1)
+ PPC_REG_LOAD SCRATCH_1_REGISTER, SRR1_FRAME_OFFSET(r1)
lwz SCRATCH_2_REGISTER, EXC_CR_OFFSET(r1)
- lwz SCRATCH_3_REGISTER, EXC_CTR_OFFSET(r1)
- lwz SCRATCH_4_REGISTER, EXC_XER_OFFSET(r1)
- lwz SCRATCH_5_REGISTER, EXC_LR_OFFSET(r1)
+ lwz SCRATCH_3_REGISTER, EXC_XER_OFFSET(r1)
+ PPC_REG_LOAD SCRATCH_4_REGISTER, EXC_CTR_OFFSET(r1)
+ PPC_REG_LOAD SCRATCH_5_REGISTER, EXC_LR_OFFSET(r1)
PPC_GPR_LOAD VECTOR_REGISTER, VECTOR_OFFSET(r1)
PPC_GPR_LOAD SELF_CPU_REGISTER, SELF_CPU_OFFSET(r1)
@@ -486,21 +478,19 @@ ppc_exc_interrupt:
mtsrr0 SCRATCH_0_REGISTER
PPC_GPR_LOAD SCRATCH_0_REGISTER, SCRATCH_0_OFFSET(r1)
mtsrr1 SCRATCH_1_REGISTER
+#ifdef __powerpc64__
+ PPC_GPR_LOAD r2, GPR2_OFFSET(r1)
+#endif
PPC_GPR_LOAD SCRATCH_1_REGISTER, SCRATCH_1_OFFSET(r1)
mtcr SCRATCH_2_REGISTER
PPC_GPR_LOAD SCRATCH_2_REGISTER, SCRATCH_2_OFFSET(r1)
- mtctr SCRATCH_3_REGISTER
+ mtxer SCRATCH_3_REGISTER
PPC_GPR_LOAD SCRATCH_3_REGISTER, SCRATCH_3_OFFSET(r1)
- mtxer SCRATCH_4_REGISTER
+ mtctr SCRATCH_4_REGISTER
PPC_GPR_LOAD SCRATCH_4_REGISTER, SCRATCH_4_OFFSET(r1)
mtlr SCRATCH_5_REGISTER
PPC_GPR_LOAD SCRATCH_5_REGISTER, SCRATCH_5_OFFSET(r1)
-#ifdef RTEMS_PROFILING
- /* Restore ENTRY_INSTANT_REGISTER */
- lwz ENTRY_INSTANT_REGISTER, ENTRY_INSTANT_OFFSET(r1)
-#endif /* RTEMS_PROFILING */
-
/* Pop stack */
addi r1, r1, PPC_EXC_INTERRUPT_FRAME_SIZE
diff --git a/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_fatal.S b/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_fatal.S
index 31774a792a..0bfba57352 100644
--- a/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_fatal.S
+++ b/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_fatal.S
@@ -26,80 +26,80 @@
ppc_exc_fatal_critical:
- stw SCRATCH_REGISTER_1, GPR4_OFFSET(r1)
+ PPC_REG_STORE SCRATCH_REGISTER_1, GPR4_OFFSET(r1)
mfcsrr0 SCRATCH_REGISTER_1
- stw SCRATCH_REGISTER_1, SRR0_FRAME_OFFSET(r1)
+ PPC_REG_STORE SCRATCH_REGISTER_1, SRR0_FRAME_OFFSET(r1)
mfcsrr1 SCRATCH_REGISTER_1
- stw SCRATCH_REGISTER_1, SRR1_FRAME_OFFSET(r1)
+ PPC_REG_STORE SCRATCH_REGISTER_1, SRR1_FRAME_OFFSET(r1)
b .Lppc_exc_fatal
ppc_exc_fatal_machine_check:
- stw SCRATCH_REGISTER_1, GPR4_OFFSET(r1)
+ PPC_REG_STORE SCRATCH_REGISTER_1, GPR4_OFFSET(r1)
mfmcsrr0 SCRATCH_REGISTER_1
- stw SCRATCH_REGISTER_1, SRR0_FRAME_OFFSET(r1)
+ PPC_REG_STORE SCRATCH_REGISTER_1, SRR0_FRAME_OFFSET(r1)
mfmcsrr1 SCRATCH_REGISTER_1
- stw SCRATCH_REGISTER_1, SRR1_FRAME_OFFSET(r1)
+ PPC_REG_STORE SCRATCH_REGISTER_1, SRR1_FRAME_OFFSET(r1)
b .Lppc_exc_fatal
ppc_exc_fatal_debug:
- stw SCRATCH_REGISTER_1, GPR4_OFFSET(r1)
+ PPC_REG_STORE SCRATCH_REGISTER_1, GPR4_OFFSET(r1)
mfspr SCRATCH_REGISTER_1, BOOKE_DSRR0
- stw SCRATCH_REGISTER_1, SRR0_FRAME_OFFSET(r1)
+ PPC_REG_STORE SCRATCH_REGISTER_1, SRR0_FRAME_OFFSET(r1)
mfspr SCRATCH_REGISTER_1, BOOKE_DSRR1
- stw SCRATCH_REGISTER_1, SRR1_FRAME_OFFSET(r1)
+ PPC_REG_STORE SCRATCH_REGISTER_1, SRR1_FRAME_OFFSET(r1)
b .Lppc_exc_fatal
ppc_exc_fatal_normal:
- stw SCRATCH_REGISTER_1, GPR4_OFFSET(r1)
+ PPC_REG_STORE SCRATCH_REGISTER_1, GPR4_OFFSET(r1)
mfsrr0 SCRATCH_REGISTER_1
- stw SCRATCH_REGISTER_1, SRR0_FRAME_OFFSET(r1)
+ PPC_REG_STORE SCRATCH_REGISTER_1, SRR0_FRAME_OFFSET(r1)
mfsrr1 SCRATCH_REGISTER_1
- stw SCRATCH_REGISTER_1, SRR1_FRAME_OFFSET(r1)
+ PPC_REG_STORE SCRATCH_REGISTER_1, SRR1_FRAME_OFFSET(r1)
.Lppc_exc_fatal:
stw r3, EXCEPTION_NUMBER_OFFSET(r1)
mfcr SCRATCH_REGISTER_1
stw SCRATCH_REGISTER_1, EXC_CR_OFFSET(r1)
- mfctr SCRATCH_REGISTER_1
- stw SCRATCH_REGISTER_1, EXC_CTR_OFFSET(r1)
mfxer SCRATCH_REGISTER_1
stw SCRATCH_REGISTER_1, EXC_XER_OFFSET(r1)
+ mfctr SCRATCH_REGISTER_1
+ PPC_REG_STORE SCRATCH_REGISTER_1, EXC_CTR_OFFSET(r1)
mflr SCRATCH_REGISTER_1
- stw SCRATCH_REGISTER_1, EXC_LR_OFFSET(r1)
- stw r0, GPR0_OFFSET(r1)
- stw r1, GPR1_OFFSET(r1)
- stw r2, GPR2_OFFSET(r1)
- stw r5, GPR5_OFFSET(r1)
- stw r6, GPR6_OFFSET(r1)
- stw r7, GPR7_OFFSET(r1)
- stw r8, GPR8_OFFSET(r1)
- stw r9, GPR9_OFFSET(r1)
- stw r10, GPR10_OFFSET(r1)
- stw r11, GPR11_OFFSET(r1)
- stw r12, GPR12_OFFSET(r1)
- stw r13, GPR13_OFFSET(r1)
- stw r14, GPR14_OFFSET(r1)
- stw r15, GPR15_OFFSET(r1)
- stw r16, GPR16_OFFSET(r1)
- stw r17, GPR17_OFFSET(r1)
- stw r18, GPR18_OFFSET(r1)
- stw r19, GPR19_OFFSET(r1)
- stw r20, GPR20_OFFSET(r1)
- stw r21, GPR21_OFFSET(r1)
- stw r22, GPR22_OFFSET(r1)
- stw r23, GPR23_OFFSET(r1)
- stw r24, GPR24_OFFSET(r1)
- stw r25, GPR25_OFFSET(r1)
- stw r26, GPR26_OFFSET(r1)
- stw r27, GPR27_OFFSET(r1)
- stw r28, GPR28_OFFSET(r1)
- stw r29, GPR29_OFFSET(r1)
- stw r30, GPR30_OFFSET(r1)
- stw r31, GPR31_OFFSET(r1)
+ PPC_REG_STORE SCRATCH_REGISTER_1, EXC_LR_OFFSET(r1)
+ PPC_REG_STORE r0, GPR0_OFFSET(r1)
+ PPC_REG_STORE r1, GPR1_OFFSET(r1)
+ PPC_REG_STORE r2, GPR2_OFFSET(r1)
+ PPC_REG_STORE r5, GPR5_OFFSET(r1)
+ PPC_REG_STORE r6, GPR6_OFFSET(r1)
+ PPC_REG_STORE r7, GPR7_OFFSET(r1)
+ PPC_REG_STORE r8, GPR8_OFFSET(r1)
+ PPC_REG_STORE r9, GPR9_OFFSET(r1)
+ PPC_REG_STORE r10, GPR10_OFFSET(r1)
+ PPC_REG_STORE r11, GPR11_OFFSET(r1)
+ PPC_REG_STORE r12, GPR12_OFFSET(r1)
+ PPC_REG_STORE r13, GPR13_OFFSET(r1)
+ PPC_REG_STORE r14, GPR14_OFFSET(r1)
+ PPC_REG_STORE r15, GPR15_OFFSET(r1)
+ PPC_REG_STORE r16, GPR16_OFFSET(r1)
+ PPC_REG_STORE r17, GPR17_OFFSET(r1)
+ PPC_REG_STORE r18, GPR18_OFFSET(r1)
+ PPC_REG_STORE r19, GPR19_OFFSET(r1)
+ PPC_REG_STORE r20, GPR20_OFFSET(r1)
+ PPC_REG_STORE r21, GPR21_OFFSET(r1)
+ PPC_REG_STORE r22, GPR22_OFFSET(r1)
+ PPC_REG_STORE r23, GPR23_OFFSET(r1)
+ PPC_REG_STORE r24, GPR24_OFFSET(r1)
+ PPC_REG_STORE r25, GPR25_OFFSET(r1)
+ PPC_REG_STORE r26, GPR26_OFFSET(r1)
+ PPC_REG_STORE r27, GPR27_OFFSET(r1)
+ PPC_REG_STORE r28, GPR28_OFFSET(r1)
+ PPC_REG_STORE r29, GPR29_OFFSET(r1)
+ PPC_REG_STORE r30, GPR30_OFFSET(r1)
+ PPC_REG_STORE r31, GPR31_OFFSET(r1)
/* Enable FPU and/or AltiVec */
#if defined(PPC_MULTILIB_FPU) || defined(PPC_MULTILIB_ALTIVEC)
diff --git a/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_print.c b/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_print.c
index 79b2f69c36..5eb8d4dd88 100644
--- a/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_print.c
+++ b/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_print.c
@@ -67,7 +67,7 @@ void BSP_printStackTrace(const BSP_Exception_frame *excPtr)
printk("Stack Trace: \n ");
if (excPtr) {
- printk("IP: 0x%08" PRIu32 ", ", excPtr->EXC_SRR0);
+ printk("IP: 0x%08" PRIxPTR ", ", excPtr->EXC_SRR0);
sp = (LRFrame) GET_GPR(excPtr->GPR1);
lr = (void *) excPtr->EXC_LR;
} else {
@@ -75,9 +75,9 @@ void BSP_printStackTrace(const BSP_Exception_frame *excPtr)
__asm__ __volatile__("mr %0, 1":"=r"(sp));
lr = (LRFrame) ppc_link_register();
}
- printk("LR: 0x%08" PRIuPTR "\n", (uintptr_t) lr);
+ printk("LR: 0x%08" PRIxPTR "\n", (uintptr_t) lr);
for (f = (LRFrame) sp, i = 0; f->frameLink && i < STACK_CLAMP; f = f->frameLink) {
- printk("--^ 0x%08" PRIuPTR "", (uintptr_t) (f->frameLink->lr));
+ printk("--^ 0x%08" PRIxPTR "", (uintptr_t) (f->frameLink->lr));
if (!(++i % 5))
printk("\n");
}
@@ -96,8 +96,8 @@ void _CPU_Exception_frame_print(const CPU_Exception_frame *excPtr)
unsigned n = excPtr->_EXC_number & 0x7fff;
printk("exception vector %d (0x%x)\n", n, n);
- printk(" next PC or address of fault = 0x%08" PRIx32 "\n", excPtr->EXC_SRR0);
- printk(" saved MSR = 0x%08" PRIx32 "\n", excPtr->EXC_SRR1);
+ printk(" next PC or address of fault = 0x%08" PRIxPTR "\n", excPtr->EXC_SRR0);
+ printk(" saved MSR = 0x%08" PRIxPTR "\n", excPtr->EXC_SRR1);
/* Try to find out more about the context where this happened */
printk(
@@ -112,51 +112,51 @@ void _CPU_Exception_frame_print(const CPU_Exception_frame *excPtr)
/* Dump registers */
- printk(" R0 = 0x%08" PRIx32 "", GET_GPR(excPtr->GPR0));
+ printk(" R0 = 0x%08" PRIxPTR "", GET_GPR(excPtr->GPR0));
if (synch) {
- printk(" R1 = 0x%08" PRIx32 "", GET_GPR(excPtr->GPR1));
- printk(" R2 = 0x%08" PRIx32 "", GET_GPR(excPtr->GPR2));
+ printk(" R1 = 0x%08" PRIxPTR "", GET_GPR(excPtr->GPR1));
+ printk(" R2 = 0x%08" PRIxPTR "", GET_GPR(excPtr->GPR2));
} else {
printk(" ");
printk(" ");
}
- printk(" R3 = 0x%08" PRIx32 "\n", GET_GPR(excPtr->GPR3));
- printk(" R4 = 0x%08" PRIx32 "", GET_GPR(excPtr->GPR4));
- printk(" R5 = 0x%08" PRIx32 "", GET_GPR(excPtr->GPR5));
- printk(" R6 = 0x%08" PRIx32 "", GET_GPR(excPtr->GPR6));
- printk(" R7 = 0x%08" PRIx32 "\n", GET_GPR(excPtr->GPR7));
- printk(" R8 = 0x%08" PRIx32 "", GET_GPR(excPtr->GPR8));
- printk(" R9 = 0x%08" PRIx32 "", GET_GPR(excPtr->GPR9));
- printk(" R10 = 0x%08" PRIx32 "", GET_GPR(excPtr->GPR10));
- printk(" R11 = 0x%08" PRIx32 "\n", GET_GPR(excPtr->GPR11));
- printk(" R12 = 0x%08" PRIx32 "", GET_GPR(excPtr->GPR12));
+ printk(" R3 = 0x%08" PRIxPTR "\n", GET_GPR(excPtr->GPR3));
+ printk(" R4 = 0x%08" PRIxPTR "", GET_GPR(excPtr->GPR4));
+ printk(" R5 = 0x%08" PRIxPTR "", GET_GPR(excPtr->GPR5));
+ printk(" R6 = 0x%08" PRIxPTR "", GET_GPR(excPtr->GPR6));
+ printk(" R7 = 0x%08" PRIxPTR "\n", GET_GPR(excPtr->GPR7));
+ printk(" R8 = 0x%08" PRIxPTR "", GET_GPR(excPtr->GPR8));
+ printk(" R9 = 0x%08" PRIxPTR "", GET_GPR(excPtr->GPR9));
+ printk(" R10 = 0x%08" PRIxPTR "", GET_GPR(excPtr->GPR10));
+ printk(" R11 = 0x%08" PRIxPTR "\n", GET_GPR(excPtr->GPR11));
+ printk(" R12 = 0x%08" PRIxPTR "", GET_GPR(excPtr->GPR12));
if (synch) {
- printk(" R13 = 0x%08" PRIx32 "", GET_GPR(excPtr->GPR13));
- printk(" R14 = 0x%08" PRIx32 "", GET_GPR(excPtr->GPR14));
- printk(" R15 = 0x%08" PRIx32 "\n", GET_GPR(excPtr->GPR15));
- printk(" R16 = 0x%08" PRIx32 "", GET_GPR(excPtr->GPR16));
- printk(" R17 = 0x%08" PRIx32 "", GET_GPR(excPtr->GPR17));
- printk(" R18 = 0x%08" PRIx32 "", GET_GPR(excPtr->GPR18));
- printk(" R19 = 0x%08" PRIx32 "\n", GET_GPR(excPtr->GPR19));
- printk(" R20 = 0x%08" PRIx32 "", GET_GPR(excPtr->GPR20));
- printk(" R21 = 0x%08" PRIx32 "", GET_GPR(excPtr->GPR21));
- printk(" R22 = 0x%08" PRIx32 "", GET_GPR(excPtr->GPR22));
- printk(" R23 = 0x%08" PRIx32 "\n", GET_GPR(excPtr->GPR23));
- printk(" R24 = 0x%08" PRIx32 "", GET_GPR(excPtr->GPR24));
- printk(" R25 = 0x%08" PRIx32 "", GET_GPR(excPtr->GPR25));
- printk(" R26 = 0x%08" PRIx32 "", GET_GPR(excPtr->GPR26));
- printk(" R27 = 0x%08" PRIx32 "\n", GET_GPR(excPtr->GPR27));
- printk(" R28 = 0x%08" PRIx32 "", GET_GPR(excPtr->GPR28));
- printk(" R29 = 0x%08" PRIx32 "", GET_GPR(excPtr->GPR29));
- printk(" R30 = 0x%08" PRIx32 "", GET_GPR(excPtr->GPR30));
- printk(" R31 = 0x%08" PRIx32 "\n", GET_GPR(excPtr->GPR31));
+ printk(" R13 = 0x%08" PRIxPTR "", GET_GPR(excPtr->GPR13));
+ printk(" R14 = 0x%08" PRIxPTR "", GET_GPR(excPtr->GPR14));
+ printk(" R15 = 0x%08" PRIxPTR "\n", GET_GPR(excPtr->GPR15));
+ printk(" R16 = 0x%08" PRIxPTR "", GET_GPR(excPtr->GPR16));
+ printk(" R17 = 0x%08" PRIxPTR "", GET_GPR(excPtr->GPR17));
+ printk(" R18 = 0x%08" PRIxPTR "", GET_GPR(excPtr->GPR18));
+ printk(" R19 = 0x%08" PRIxPTR "\n", GET_GPR(excPtr->GPR19));
+ printk(" R20 = 0x%08" PRIxPTR "", GET_GPR(excPtr->GPR20));
+ printk(" R21 = 0x%08" PRIxPTR "", GET_GPR(excPtr->GPR21));
+ printk(" R22 = 0x%08" PRIxPTR "", GET_GPR(excPtr->GPR22));
+ printk(" R23 = 0x%08" PRIxPTR "\n", GET_GPR(excPtr->GPR23));
+ printk(" R24 = 0x%08" PRIxPTR "", GET_GPR(excPtr->GPR24));
+ printk(" R25 = 0x%08" PRIxPTR "", GET_GPR(excPtr->GPR25));
+ printk(" R26 = 0x%08" PRIxPTR "", GET_GPR(excPtr->GPR26));
+ printk(" R27 = 0x%08" PRIxPTR "\n", GET_GPR(excPtr->GPR27));
+ printk(" R28 = 0x%08" PRIxPTR "", GET_GPR(excPtr->GPR28));
+ printk(" R29 = 0x%08" PRIxPTR "", GET_GPR(excPtr->GPR29));
+ printk(" R30 = 0x%08" PRIxPTR "", GET_GPR(excPtr->GPR30));
+ printk(" R31 = 0x%08" PRIxPTR "\n", GET_GPR(excPtr->GPR31));
} else {
printk("\n");
}
printk(" CR = 0x%08" PRIx32 "\n", excPtr->EXC_CR);
- printk(" CTR = 0x%08" PRIx32 "\n", excPtr->EXC_CTR);
+ printk(" CTR = 0x%08" PRIxPTR "\n", excPtr->EXC_CTR);
printk(" XER = 0x%08" PRIx32 "\n", excPtr->EXC_XER);
- printk(" LR = 0x%08" PRIx32 "\n", excPtr->EXC_LR);
+ printk(" LR = 0x%08" PRIxPTR "\n", excPtr->EXC_LR);
/* Would be great to print DAR but unfortunately,
* that is not portable across different CPUs.
@@ -206,13 +206,13 @@ void _CPU_Exception_frame_print(const CPU_Exception_frame *excPtr)
#ifdef PPC_MULTILIB_FPU
{
- unsigned long long *f = (unsigned long long *) &excPtr->F0;
+ uint64_t *f = (uint64_t *) &excPtr->F0;
int i;
- printk("FPSCR = 0x%08llx\n", excPtr->FPSCR);
+ printk("FPSCR = 0x%08" PRIu64 "\n", excPtr->FPSCR);
for (i = 0; i < 32; ++i) {
- printk(" F%02i = 0x%016llx\n", i, f[i]);
+ printk(" F%02i = 0x%016" PRIu64 "\n", i, f[i]);
}
}
#endif
diff --git a/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/vectors.h b/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/vectors.h
index 3068ec2c0d..56c9e64991 100644
--- a/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/vectors.h
+++ b/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/vectors.h
@@ -158,7 +158,7 @@ extern "C" {
*/
#define LINK_REGISTER_CALLEE_UPDATE_ROOM 4
-#define EXC_GENERIC_SIZE PPC_EXC_FRAME_SIZE
+#define EXC_GENERIC_SIZE (PPC_EXC_FRAME_SIZE + PPC_STACK_RED_ZONE_SIZE)
#define PPC_EXC_INTERRUPT_FRAME_SIZE CPU_INTERRUPT_FRAME_SIZE
diff --git a/c/src/lib/libcpu/powerpc/new-exceptions/cpu.c b/c/src/lib/libcpu/powerpc/new-exceptions/cpu.c
index bee5eb2091..ae5065daa4 100644
--- a/c/src/lib/libcpu/powerpc/new-exceptions/cpu.c
+++ b/c/src/lib/libcpu/powerpc/new-exceptions/cpu.c
@@ -129,10 +129,6 @@ void _CPU_Context_Initialize(
if ( tls_area != NULL ) {
void *tls_block = _TLS_TCB_before_TLS_block_initialize( tls_area );
- the_ppc_context->gpr2 = (uint32_t) tls_block + 0x7000;
- } else {
- register uint32_t gpr2 __asm__("2");
-
- the_ppc_context->gpr2 = gpr2;
+ the_ppc_context->tp = (uintptr_t) tls_block + 0x7000;
}
}
diff --git a/c/src/lib/libcpu/powerpc/new-exceptions/cpu_asm.S b/c/src/lib/libcpu/powerpc/new-exceptions/cpu_asm.S
index 1289813f2e..cdbf403dce 100644
--- a/c/src/lib/libcpu/powerpc/new-exceptions/cpu_asm.S
+++ b/c/src/lib/libcpu/powerpc/new-exceptions/cpu_asm.S
@@ -23,7 +23,7 @@
* COPYRIGHT (c) 1989-1997.
* On-Line Applications Research Corporation (OAR).
*
- * Copyright (c) 2011, 2016 embedded brains GmbH
+ * Copyright (c) 2011, 2017 embedded brains GmbH
*
* The license and distribution terms for this file may in
* the file LICENSE in this distribution or at
@@ -267,8 +267,8 @@ PROC (_CPU_Context_switch):
GET_SELF_CPU_CONTROL r12
mfmsr r6
- mflr r7
- mfcr r8
+ mfcr r7
+ mflr r8
lwz r11, PER_CPU_ISR_DISPATCH_DISABLE(r12)
/*
@@ -287,10 +287,11 @@ PROC (_CPU_Context_switch):
stwcx. r1, r3, r10
#endif
- stw r1, PPC_CONTEXT_OFFSET_GPR1(r3)
stw r6, PPC_CONTEXT_OFFSET_MSR(r3)
- stw r7, PPC_CONTEXT_OFFSET_LR(r3)
- stw r8, PPC_CONTEXT_OFFSET_CR(r3)
+ stw r7, PPC_CONTEXT_OFFSET_CR(r3)
+ PPC_REG_STORE r1, PPC_CONTEXT_OFFSET_GPR1(r3)
+ PPC_REG_STORE r8, PPC_CONTEXT_OFFSET_LR(r3)
+
PPC_GPR_STORE r14, PPC_CONTEXT_OFFSET_GPR14(r3)
PPC_GPR_STORE r15, PPC_CONTEXT_OFFSET_GPR15(r3)
@@ -439,10 +440,10 @@ restore_context:
bl _CPU_Context_switch_altivec
#endif
- lwz r1, PPC_CONTEXT_OFFSET_GPR1(r5)
lwz r6, PPC_CONTEXT_OFFSET_MSR(r5)
- lwz r7, PPC_CONTEXT_OFFSET_LR(r5)
- lwz r8, PPC_CONTEXT_OFFSET_CR(r5)
+ lwz r7, PPC_CONTEXT_OFFSET_CR(r5)
+ PPC_REG_LOAD r1, PPC_CONTEXT_OFFSET_GPR1(r5)
+ PPC_REG_LOAD r8, PPC_CONTEXT_OFFSET_LR(r5)
PPC_GPR_LOAD r14, PPC_CONTEXT_OFFSET_GPR14(r5)
PPC_GPR_LOAD r15, PPC_CONTEXT_OFFSET_GPR15(r5)
@@ -469,7 +470,11 @@ restore_context:
PPC_GPR_LOAD r30, PPC_CONTEXT_OFFSET_GPR30(r5)
PPC_GPR_LOAD r31, PPC_CONTEXT_OFFSET_GPR31(r5)
- lwz r2, PPC_CONTEXT_OFFSET_GPR2(r5)
+#ifdef __powerpc64__
+ ld r13, PPC_CONTEXT_OFFSET_TP(r5)
+#else
+ lwz r2, PPC_CONTEXT_OFFSET_TP(r5)
+#endif
lwz r11, PPC_CONTEXT_OFFSET_ISR_DISPATCH_DISABLE(r5)
#ifdef PPC_MULTILIB_ALTIVEC
@@ -522,8 +527,8 @@ restore_context:
lfd f31, PPC_CONTEXT_OFFSET_F31(r5)
#endif
- mtcr r8
- mtlr r7
+ mtlr r8
+ mtcr r7
mtmsr r6
stw r11, PER_CPU_ISR_DISPATCH_DISABLE(r12)
@@ -552,14 +557,14 @@ PROC (_CPU_Context_restore):
/* We may have a new heir */
/* Read the executing and heir */
- lwz r7, PER_CPU_OFFSET_EXECUTING(r12)
- lwz r8, PER_CPU_OFFSET_HEIR(r12)
+ PPC_REG_LOAD r7, PER_CPU_OFFSET_EXECUTING(r12)
+ PPC_REG_LOAD r8, PER_CPU_OFFSET_HEIR(r12)
/*
* Update the executing only if necessary to avoid cache line
* monopolization.
*/
- cmpw r7, r8
+ PPC_REG_CMP r7, r8
beq .Lcheck_is_executing
/* Calculate the heir context pointer */
@@ -568,7 +573,7 @@ PROC (_CPU_Context_restore):
clrrwi r5, r4, PPC_DEFAULT_CACHE_LINE_POWER
/* Update the executing */
- stw r8, PER_CPU_OFFSET_EXECUTING(r12)
+ PPC_REG_STORE r8, PER_CPU_OFFSET_EXECUTING(r12)
b .Lcheck_is_executing
#endif
diff --git a/c/src/lib/libcpu/powerpc/shared/include/powerpc-utility.h b/c/src/lib/libcpu/powerpc/shared/include/powerpc-utility.h
index 22cf46bea3..0fe340cf6e 100644
--- a/c/src/lib/libcpu/powerpc/shared/include/powerpc-utility.h
+++ b/c/src/lib/libcpu/powerpc/shared/include/powerpc-utility.h
@@ -873,6 +873,11 @@ void ShowBATS(void);
ori \reg, \reg, (\addr)@l
.endm
+.macro LA32 reg, addr
+ lis \reg, (\addr)@h
+ ori \reg, \reg, (\addr)@l
+.endm
+
.macro LWI reg, value
lis \reg, (\value)@h
ori \reg, \reg, (\value)@l
diff --git a/cpukit/score/cpu/powerpc/cpu.c b/cpukit/score/cpu/powerpc/cpu.c
index 9d653f79ef..e089239515 100644
--- a/cpukit/score/cpu/powerpc/cpu.c
+++ b/cpukit/score/cpu/powerpc/cpu.c
@@ -5,7 +5,7 @@
*/
/*
- * Copyright (C) 2009, 2016 embedded brains GmbH.
+ * Copyright (C) 2009, 2017 embedded brains GmbH.
*
* The license and distribution terms for this file may be
* found in the file LICENSE in this distribution or at
@@ -52,7 +52,7 @@ PPC_ASSERT_OFFSET(gpr28, GPR28);
PPC_ASSERT_OFFSET(gpr29, GPR29);
PPC_ASSERT_OFFSET(gpr30, GPR30);
PPC_ASSERT_OFFSET(gpr31, GPR31);
-PPC_ASSERT_OFFSET(gpr2, GPR2);
+PPC_ASSERT_OFFSET(tp, TP);
PPC_ASSERT_OFFSET(isr_dispatch_disable, ISR_DISPATCH_DISABLE);
#ifdef RTEMS_SMP
@@ -169,10 +169,15 @@ PPC_EXC_ASSERT_CANONIC_OFFSET(GPR31);
PPC_EXC_MIN_ASSERT_OFFSET(EXC_SRR0, SRR0_FRAME_OFFSET);
PPC_EXC_MIN_ASSERT_OFFSET(EXC_SRR1, SRR1_FRAME_OFFSET);
+PPC_EXC_MIN_ASSERT_OFFSET(
+ EXC_INTERRUPT_ENTRY_INSTANT,
+ PPC_EXC_INTERRUPT_ENTRY_INSTANT_OFFSET
+);
PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(EXC_CR);
PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(EXC_CTR);
PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(EXC_XER);
PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(EXC_LR);
+PPC_EXC_MIN_ASSERT_OFFSET(EXC_INTERRUPT_FRAME, PPC_EXC_INTERRUPT_FRAME_OFFSET);
#ifdef __SPE__
PPC_EXC_MIN_ASSERT_OFFSET(EXC_SPEFSCR, PPC_EXC_SPEFSCR_OFFSET);
PPC_EXC_MIN_ASSERT_OFFSET(EXC_ACC, PPC_EXC_ACC_OFFSET);
diff --git a/cpukit/score/cpu/powerpc/ppc-context-validate.S b/cpukit/score/cpu/powerpc/ppc-context-validate.S
index b34438a361..523707b157 100644
--- a/cpukit/score/cpu/powerpc/ppc-context-validate.S
+++ b/cpukit/score/cpu/powerpc/ppc-context-validate.S
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2013-2015 embedded brains GmbH. All rights reserved.
+ * Copyright (c) 2013, 2017 embedded brains GmbH. All rights reserved.
*
* embedded brains GmbH
* Dornierstr. 4
@@ -20,8 +20,8 @@
#include <rtems/score/cpu.h>
#define LR_OFFSET 8
-#define CR_OFFSET 12
-#define OFFSET(i) ((i) * PPC_GPR_SIZE + 16)
+#define CR_OFFSET 16
+#define OFFSET(i) ((i) * PPC_GPR_SIZE + 32)
#define GPR14_OFFSET OFFSET(0)
#define GPR15_OFFSET OFFSET(1)
#define GPR16_OFFSET OFFSET(2)
@@ -100,29 +100,29 @@
_CPU_Context_validate:
/* Save */
- stwu r1, -FRAME_SIZE(r1)
+ PPC_REG_STORE_UPDATE r1, -FRAME_SIZE(r1)
mflr r4
- stw r4, LR_OFFSET(r1)
+ PPC_REG_STORE r4, LR_OFFSET(r1)
mfcr r4
stw r4, CR_OFFSET(r1)
- stw r14, GPR14_OFFSET(r1)
- stw r15, GPR15_OFFSET(r1)
- stw r16, GPR16_OFFSET(r1)
- stw r17, GPR17_OFFSET(r1)
- stw r18, GPR18_OFFSET(r1)
- stw r19, GPR19_OFFSET(r1)
- stw r20, GPR20_OFFSET(r1)
- stw r21, GPR21_OFFSET(r1)
- stw r22, GPR22_OFFSET(r1)
- stw r23, GPR23_OFFSET(r1)
- stw r24, GPR24_OFFSET(r1)
- stw r25, GPR25_OFFSET(r1)
- stw r26, GPR26_OFFSET(r1)
- stw r27, GPR27_OFFSET(r1)
- stw r28, GPR28_OFFSET(r1)
- stw r29, GPR29_OFFSET(r1)
- stw r30, GPR30_OFFSET(r1)
- stw r31, GPR31_OFFSET(r1)
+ PPC_REG_STORE r14, GPR14_OFFSET(r1)
+ PPC_REG_STORE r15, GPR15_OFFSET(r1)
+ PPC_REG_STORE r16, GPR16_OFFSET(r1)
+ PPC_REG_STORE r17, GPR17_OFFSET(r1)
+ PPC_REG_STORE r18, GPR18_OFFSET(r1)
+ PPC_REG_STORE r19, GPR19_OFFSET(r1)
+ PPC_REG_STORE r20, GPR20_OFFSET(r1)
+ PPC_REG_STORE r21, GPR21_OFFSET(r1)
+ PPC_REG_STORE r22, GPR22_OFFSET(r1)
+ PPC_REG_STORE r23, GPR23_OFFSET(r1)
+ PPC_REG_STORE r24, GPR24_OFFSET(r1)
+ PPC_REG_STORE r25, GPR25_OFFSET(r1)
+ PPC_REG_STORE r26, GPR26_OFFSET(r1)
+ PPC_REG_STORE r27, GPR27_OFFSET(r1)
+ PPC_REG_STORE r28, GPR28_OFFSET(r1)
+ PPC_REG_STORE r29, GPR29_OFFSET(r1)
+ PPC_REG_STORE r30, GPR30_OFFSET(r1)
+ PPC_REG_STORE r31, GPR31_OFFSET(r1)
#ifdef PPC_MULTILIB_FPU
stfd f14, F14_OFFSET(r1)
@@ -218,8 +218,12 @@ _CPU_Context_validate:
addi r26, r3, 21
addi r27, r3, 22
- /* GPR28 contains the GPR2 pattern */
+ /* GPR28 contains the TP pattern */
+#ifdef __powerpc64__
+ xor r28, r13, r3
+#else
xor r28, r2, r3
+#endif
/* GPR29 and CR are equal most of the time */
addi r29, r3, 24
@@ -330,101 +334,114 @@ check:
cmpw r4, r29
bne restore
addi r4, r3, 1
- cmpw r4, r5
+ PPC_REG_CMP r4, r5
bne restore
addi r4, r3, 2
- cmpw r4, r6
+ PPC_REG_CMP r4, r6
bne restore
addi r4, r3, 3
- cmpw r4, r7
+ PPC_REG_CMP r4, r7
bne restore
addi r4, r3, 4
- cmpw r4, r8
+ PPC_REG_CMP r4, r8
bne restore
addi r4, r3, 5
- cmpw r4, r9
+ PPC_REG_CMP r4, r9
bne restore
addi r4, r3, 6
- cmpw r4, r10
+ PPC_REG_CMP r4, r10
bne restore
addi r4, r3, 7
- cmpw r4, r11
+ PPC_REG_CMP r4, r11
bne restore
addi r4, r3, 8
- cmpw r4, r12
- bne restore
+ PPC_REG_CMP r4, r12
+ bne restore
+#ifdef __powerpc64__
+ lis r4, .TOC.@highest
+ ori r4, r4, .TOC.@higher
+ rldicr r4, r4, 32, 31
+ oris r4, r4, .TOC.@h
+ ori r4, r4, .TOC.@l
+ PPC_REG_CMP r4, r2
+#else
lis r4, _SDA_BASE_@h
ori r4, r4, _SDA_BASE_@l
- cmpw r4, r13
+ PPC_REG_CMP r4, r13
+#endif
bne restore
addi r4, r3, 9
- cmpw r4, r14
+ PPC_REG_CMP r4, r14
bne restore
addi r4, r3, 10
- cmpw r4, r15
+ PPC_REG_CMP r4, r15
bne restore
addi r4, r3, 11
- cmpw r4, r16
+ PPC_REG_CMP r4, r16
bne restore
addi r4, r3, 12
- cmpw r4, r17
+ PPC_REG_CMP r4, r17
bne restore
addi r4, r3, 13
- cmpw r4, r18
+ PPC_REG_CMP r4, r18
bne restore
addi r4, r3, 14
- cmpw r4, r19
+ PPC_REG_CMP r4, r19
bne restore
addi r4, r3, 15
- cmpw r4, r20
+ PPC_REG_CMP r4, r20
bne restore
addi r4, r3, 16
- cmpw r4, r21
+ PPC_REG_CMP r4, r21
bne restore
addi r4, r3, 17
- cmpw r4, r22
+ PPC_REG_CMP r4, r22
bne restore
addi r4, r3, 18
- cmpw r4, r23
+ PPC_REG_CMP r4, r23
bne restore
addi r4, r3, 19
- cmpw r4, r24
+ PPC_REG_CMP r4, r24
bne restore
addi r4, r3, 20
- cmpw r4, r25
+ PPC_REG_CMP r4, r25
bne restore
addi r4, r3, 21
- cmpw r4, r26
+ PPC_REG_CMP r4, r26
bne restore
addi r4, r3, 22
- cmpw r4, r27
+ PPC_REG_CMP r4, r27
bne restore
+#ifdef __powerpc64__
+ xor r4, r13, r3
+#else
xor r4, r2, r3
- cmpw r4, r28
+#endif
+ PPC_REG_CMP r4, r28
bne restore
addi r4, r3, 24
- cmpw r4, r29
+ PPC_REG_CMP r4, r29
bne restore
mfmsr r4
xor r4, r4, r3
- cmpw r4, r30
+ PPC_REG_CMP r4, r30
bne restore
addi r4, r3, 25
mflr r5
- cmpw r4, r5
+ PPC_REG_CMP r4, r5
bne restore
addi r4, r3, 26
mfctr r5
- cmpw r4, r5
+ PPC_REG_CMP r4, r5
bne restore
rlwinm r4, r3, 0, 25, 2
mfxer r5
cmpw r4, r5
bne restore
addi r4, r3, 28
- cmpw r4, r0
+ PPC_REG_CMP r4, r0
bne restore
- cmpw r31, r1
+ PPC_REG_CMP r31, r1
bne restore
#ifdef PPC_MULTILIB_FPU
@@ -614,27 +631,27 @@ restore:
lfd f14, F14_OFFSET(r1)
#endif
- lwz r31, GPR31_OFFSET(r1)
- lwz r30, GPR30_OFFSET(r1)
- lwz r29, GPR29_OFFSET(r1)
- lwz r28, GPR28_OFFSET(r1)
- lwz r27, GPR27_OFFSET(r1)
- lwz r26, GPR26_OFFSET(r1)
- lwz r25, GPR25_OFFSET(r1)
- lwz r24, GPR24_OFFSET(r1)
- lwz r23, GPR23_OFFSET(r1)
- lwz r22, GPR22_OFFSET(r1)
- lwz r21, GPR21_OFFSET(r1)
- lwz r20, GPR20_OFFSET(r1)
- lwz r19, GPR19_OFFSET(r1)
- lwz r18, GPR18_OFFSET(r1)
- lwz r17, GPR17_OFFSET(r1)
- lwz r16, GPR16_OFFSET(r1)
- lwz r15, GPR15_OFFSET(r1)
- lwz r14, GPR14_OFFSET(r1)
+ PPC_REG_LOAD r31, GPR31_OFFSET(r1)
+ PPC_REG_LOAD r30, GPR30_OFFSET(r1)
+ PPC_REG_LOAD r29, GPR29_OFFSET(r1)
+ PPC_REG_LOAD r28, GPR28_OFFSET(r1)
+ PPC_REG_LOAD r27, GPR27_OFFSET(r1)
+ PPC_REG_LOAD r26, GPR26_OFFSET(r1)
+ PPC_REG_LOAD r25, GPR25_OFFSET(r1)
+ PPC_REG_LOAD r24, GPR24_OFFSET(r1)
+ PPC_REG_LOAD r23, GPR23_OFFSET(r1)
+ PPC_REG_LOAD r22, GPR22_OFFSET(r1)
+ PPC_REG_LOAD r21, GPR21_OFFSET(r1)
+ PPC_REG_LOAD r20, GPR20_OFFSET(r1)
+ PPC_REG_LOAD r19, GPR19_OFFSET(r1)
+ PPC_REG_LOAD r18, GPR18_OFFSET(r1)
+ PPC_REG_LOAD r17, GPR17_OFFSET(r1)
+ PPC_REG_LOAD r16, GPR16_OFFSET(r1)
+ PPC_REG_LOAD r15, GPR15_OFFSET(r1)
+ PPC_REG_LOAD r14, GPR14_OFFSET(r1)
lwz r4, CR_OFFSET(r1)
mtcr r4
- lwz r4, LR_OFFSET(r1)
+ PPC_REG_LOAD r4, LR_OFFSET(r1)
mtlr r4
addi r1, r1, FRAME_SIZE
blr
diff --git a/cpukit/score/cpu/powerpc/ppc-context-volatile-clobber.S b/cpukit/score/cpu/powerpc/ppc-context-volatile-clobber.S
index d0c2159a06..e3a7a9cc14 100644
--- a/cpukit/score/cpu/powerpc/ppc-context-volatile-clobber.S
+++ b/cpukit/score/cpu/powerpc/ppc-context-volatile-clobber.S
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2013 embedded brains GmbH. All rights reserved.
+ * Copyright (c) 2013, 2017 embedded brains GmbH. All rights reserved.
*
* embedded brains GmbH
* Dornierstr. 4
@@ -17,6 +17,7 @@
#endif
#include <rtems/asm.h>
+#include <rtems/score/cpu.h>
.global _CPU_Context_volatile_clobber
@@ -25,24 +26,24 @@ _CPU_Context_volatile_clobber:
#ifdef PPC_MULTILIB_FPU
.macro CLOBBER_F i
addi r4, r3, 0x100 + \i
- stw r4, 16(r1)
+ stw r4, 32(r1)
addi r4, r3, 0x200 + \i
- stw r4, 16 + 4(r1)
- lfd \i, 16(r1)
+ stw r4, 32 + 4(r1)
+ lfd \i, 32(r1)
.endm
- stwu r1, -32(r1)
+ PPC_REG_STORE_UPDATE r1, -96(r1)
/* Negate FPSCR[FPRF] bits */
mffs f0
- stfd f0, 16(r1)
- lwz r0, 20(r1)
+ stfd f0, 32(r1)
+ lwz r0, 36(r1)
nor r3, r0, r0
rlwinm r0, r0, 0, 20, 14
rlwinm r3, r3, 0, 15, 19
or r0, r3, r0
- stw r0, 20(r1)
- lfd f0, 16(r1)
+ stw r0, 36(r1)
+ lfd f0, 32(r1)
mtfsf 0xff, f0
CLOBBER_F 0
@@ -59,36 +60,36 @@ _CPU_Context_volatile_clobber:
CLOBBER_F 11
CLOBBER_F 12
CLOBBER_F 13
- addi r1, r1, 32
+ addi r1, r1, 96
#endif
#ifdef PPC_MULTILIB_ALTIVEC
.macro CLOBBER_V i
addi r4, r3, 0x300 + \i
- stw r4, 16(r1)
+ stw r4, 32(r1)
addi r4, r3, 0x400 + \i
- stw r4, 16 + 4(r1)
+ stw r4, 32 + 4(r1)
addi r4, r3, 0x500 + \i
- stw r4, 16 + 8(r1)
+ stw r4, 32 + 8(r1)
addi r4, r3, 0x600 + \i
- stw r4, 16 + 12(r1)
- li r4, 16
+ stw r4, 32 + 12(r1)
+ li r4, 32
lvx \i, r1, r4
.endm
- stwu r1, -32(r1)
+ PPC_REG_STORE_UPDATE r1, -96(r1)
/* Negate VSCR[SAT] bit */
mfvscr v0
- li r3, 28
+ li r3, 44
stvewx v0, r1, r3
- lwz r0, 28(r1)
+ lwz r0, 44(r1)
nor r3, r0, r0
rlwinm r0, r0, 0, 0, 30
rlwinm r3, r3, 0, 31, 31
or r0, r3, r0
- stw r0, 28(r1)
- li r3, 28
+ stw r0, 44(r1)
+ li r3, 44
lvewx v0, r1, r3
mtvscr v0
@@ -112,7 +113,7 @@ _CPU_Context_volatile_clobber:
CLOBBER_V 17
CLOBBER_V 18
CLOBBER_V 19
- addi r1, r1, 32
+ addi r1, r1, 96
#endif
addi r4, r3, 10
diff --git a/cpukit/score/cpu/powerpc/rtems/score/cpu.h b/cpukit/score/cpu/powerpc/rtems/score/cpu.h
index 72fc48318f..cacd3ea105 100644
--- a/cpukit/score/cpu/powerpc/rtems/score/cpu.h
+++ b/cpukit/score/cpu/powerpc/rtems/score/cpu.h
@@ -25,7 +25,7 @@
*
* Copyright (c) 2001 Surrey Satellite Technology Limited (SSTL).
*
- * Copyright (c) 2010, 2016 embedded brains GmbH.
+ * Copyright (c) 2010, 2017 embedded brains GmbH.
*
* The license and distribution terms for this file may be
* found in the file LICENSE in this distribution or at
@@ -181,10 +181,16 @@ extern "C" {
*/
#ifndef __SPE__
- #define PPC_GPR_TYPE uint32_t
- #define PPC_GPR_SIZE 4
- #define PPC_GPR_LOAD lwz
- #define PPC_GPR_STORE stw
+ #define PPC_GPR_TYPE uintptr_t
+ #if defined(__powerpc64__)
+ #define PPC_GPR_SIZE 8
+ #define PPC_GPR_LOAD ld
+ #define PPC_GPR_STORE std
+ #else
+ #define PPC_GPR_SIZE 4
+ #define PPC_GPR_LOAD lwz
+ #define PPC_GPR_STORE stw
+ #endif
#else
#define PPC_GPR_TYPE uint64_t
#define PPC_GPR_SIZE 8
@@ -192,6 +198,20 @@ extern "C" {
#define PPC_GPR_STORE evstdd
#endif
+#if defined(__powerpc64__)
+ #define PPC_REG_SIZE 8
+ #define PPC_REG_LOAD ld
+ #define PPC_REG_STORE std
+ #define PPC_REG_STORE_UPDATE stdu
+ #define PPC_REG_CMP cmpd
+#else
+ #define PPC_REG_SIZE 4
+ #define PPC_REG_LOAD lwz
+ #define PPC_REG_STORE stw
+ #define PPC_REG_STORE_UPDATE stwu
+ #define PPC_REG_CMP cmpw
+#endif
+
#ifndef ASM
/*
@@ -200,10 +220,10 @@ extern "C" {
* Linux and Embedded")
*/
typedef struct {
- uint32_t gpr1;
uint32_t msr;
- uint32_t lr;
uint32_t cr;
+ uintptr_t gpr1;
+ uintptr_t lr;
PPC_GPR_TYPE gpr14;
PPC_GPR_TYPE gpr15;
PPC_GPR_TYPE gpr16;
@@ -275,7 +295,7 @@ typedef struct {
* the previous items to optimize the context switch. We must not set the
* following items to zero via the dcbz.
*/
- uint32_t gpr2;
+ uintptr_t tp;
#if defined(RTEMS_SMP)
volatile uint32_t is_executing;
#endif
@@ -322,13 +342,14 @@ static inline ppc_context *ppc_get_context( const Context_Control *context )
#endif
#endif /* ASM */
-#define PPC_CONTEXT_OFFSET_GPR1 (PPC_DEFAULT_CACHE_LINE_SIZE + 0)
-#define PPC_CONTEXT_OFFSET_MSR (PPC_DEFAULT_CACHE_LINE_SIZE + 4)
-#define PPC_CONTEXT_OFFSET_LR (PPC_DEFAULT_CACHE_LINE_SIZE + 8)
-#define PPC_CONTEXT_OFFSET_CR (PPC_DEFAULT_CACHE_LINE_SIZE + 12)
+#define PPC_CONTEXT_OFFSET_MSR (PPC_DEFAULT_CACHE_LINE_SIZE)
+#define PPC_CONTEXT_OFFSET_CR (PPC_DEFAULT_CACHE_LINE_SIZE + 4)
+#define PPC_CONTEXT_OFFSET_GPR1 (PPC_DEFAULT_CACHE_LINE_SIZE + 8)
+#define PPC_CONTEXT_OFFSET_LR (PPC_DEFAULT_CACHE_LINE_SIZE + PPC_REG_SIZE + 8)
#define PPC_CONTEXT_GPR_OFFSET( gpr ) \
- (((gpr) - 14) * PPC_GPR_SIZE + PPC_DEFAULT_CACHE_LINE_SIZE + 16)
+ (((gpr) - 14) * PPC_GPR_SIZE + \
+ PPC_DEFAULT_CACHE_LINE_SIZE + 8 + 2 * PPC_REG_SIZE)
#define PPC_CONTEXT_OFFSET_GPR14 PPC_CONTEXT_GPR_OFFSET( 14 )
#define PPC_CONTEXT_OFFSET_GPR15 PPC_CONTEXT_GPR_OFFSET( 15 )
@@ -352,7 +373,7 @@ static inline ppc_context *ppc_get_context( const Context_Control *context )
#ifdef PPC_MULTILIB_ALTIVEC
#define PPC_CONTEXT_OFFSET_V( v ) \
- ( ( ( v ) - 20 ) * 16 + PPC_DEFAULT_CACHE_LINE_SIZE + 96 )
+ ( ( ( v ) - 20 ) * 16 + PPC_CONTEXT_OFFSET_ISR_DISPATCH_DISABLE + 8)
#define PPC_CONTEXT_OFFSET_V20 PPC_CONTEXT_OFFSET_V( 20 )
#define PPC_CONTEXT_OFFSET_V21 PPC_CONTEXT_OFFSET_V( 21 )
#define PPC_CONTEXT_OFFSET_V22 PPC_CONTEXT_OFFSET_V( 22 )
@@ -367,10 +388,10 @@ static inline ppc_context *ppc_get_context( const Context_Control *context )
#define PPC_CONTEXT_OFFSET_V31 PPC_CONTEXT_OFFSET_V( 31 )
#define PPC_CONTEXT_OFFSET_VRSAVE PPC_CONTEXT_OFFSET_V( 32 )
#define PPC_CONTEXT_OFFSET_F( f ) \
- ( ( ( f ) - 14 ) * 8 + PPC_DEFAULT_CACHE_LINE_SIZE + 296 )
+ ( ( ( f ) - 14 ) * 8 + PPC_CONTEXT_OFFSET_VRSAVE + 8 )
#else
#define PPC_CONTEXT_OFFSET_F( f ) \
- ( ( ( f ) - 14 ) * 8 + PPC_DEFAULT_CACHE_LINE_SIZE + 96 )
+ ( ( ( f ) - 14 ) * 8 + PPC_CONTEXT_OFFSET_ISR_DISPATCH_DISABLE + 8 )
#endif
#ifdef PPC_MULTILIB_FPU
@@ -406,10 +427,11 @@ static inline ppc_context *ppc_get_context( const Context_Control *context )
#define PPC_CONTEXT_VOLATILE_SIZE (PPC_CONTEXT_GPR_OFFSET( 32 ) + 8)
#endif
-#define PPC_CONTEXT_OFFSET_GPR2 PPC_CONTEXT_VOLATILE_SIZE
+#define PPC_CONTEXT_OFFSET_TP PPC_CONTEXT_VOLATILE_SIZE
#ifdef RTEMS_SMP
- #define PPC_CONTEXT_OFFSET_IS_EXECUTING (PPC_CONTEXT_VOLATILE_SIZE + 4)
+ #define PPC_CONTEXT_OFFSET_IS_EXECUTING \
+ (PPC_CONTEXT_OFFSET_TP + PPC_REG_SIZE)
#endif
#ifndef ASM
@@ -1056,13 +1078,15 @@ void _CPU_Context_validate( uintptr_t pattern );
#endif
typedef struct {
- uint32_t EXC_SRR0;
- uint32_t EXC_SRR1;
+ uintptr_t EXC_SRR0;
+ uintptr_t EXC_SRR1;
uint32_t _EXC_number;
+ uint32_t RESERVED_FOR_ALIGNMENT_0;
uint32_t EXC_CR;
- uint32_t EXC_CTR;
uint32_t EXC_XER;
- uint32_t EXC_LR;
+ uintptr_t EXC_CTR;
+ uintptr_t EXC_LR;
+ uintptr_t RESERVED_FOR_ALIGNMENT_1;
#ifdef __SPE__
uint32_t EXC_SPEFSCR;
uint64_t EXC_ACC;
@@ -1099,13 +1123,13 @@ typedef struct {
PPC_GPR_TYPE GPR29;
PPC_GPR_TYPE GPR30;
PPC_GPR_TYPE GPR31;
- #if defined(PPC_MULTILIB_ALTIVEC) || defined(PPC_MULTILIB_FPU)
- uint32_t reserved_for_alignment;
- #endif
+ uintptr_t RESERVED_FOR_ALIGNMENT_2;
#ifdef PPC_MULTILIB_ALTIVEC
uint32_t VRSAVE;
+ uint32_t RESERVED_FOR_ALIGNMENT_3[3];
/* This field must take stvewx/lvewx requirements into account */
+ uint32_t RESERVED_FOR_ALIGNMENT_4[3];
uint32_t VSCR;
uint8_t V0[16];
@@ -1175,6 +1199,7 @@ typedef struct {
double F30;
double F31;
uint64_t FPSCR;
+ uint64_t RESERVED_FOR_ALIGNMENT_5;
#endif
} CPU_Exception_frame;
diff --git a/cpukit/score/cpu/powerpc/rtems/score/cpuimpl.h b/cpukit/score/cpu/powerpc/rtems/score/cpuimpl.h
index 57c2db1822..c292feb6fd 100644
--- a/cpukit/score/cpu/powerpc/rtems/score/cpuimpl.h
+++ b/cpukit/score/cpu/powerpc/rtems/score/cpuimpl.h
@@ -10,7 +10,7 @@
*
* Copyright (C) 2007 Till Straumann <strauman@slac.stanford.edu>
*
- * Copyright (c) 2009, 2016 embedded brains GmbH
+ * Copyright (c) 2009, 2017 embedded brains GmbH
*
* The license and distribution terms for this file may be
* found in the file LICENSE in this distribution or at
@@ -22,13 +22,71 @@
#include <rtems/score/cpu.h>
-#define SRR0_FRAME_OFFSET 8
-#define SRR1_FRAME_OFFSET 12
-#define EXCEPTION_NUMBER_OFFSET 16
-#define EXC_CR_OFFSET 20
-#define EXC_CTR_OFFSET 24
-#define EXC_XER_OFFSET 28
-#define EXC_LR_OFFSET 32
+/* Exception stack frame -> BSP_Exception_frame */
+#ifdef __powerpc64__
+ #define FRAME_LINK_SPACE 32
+#else
+ #define FRAME_LINK_SPACE 8
+#endif
+
+#define SRR0_FRAME_OFFSET FRAME_LINK_SPACE
+#define SRR1_FRAME_OFFSET (SRR0_FRAME_OFFSET + PPC_REG_SIZE)
+#define EXCEPTION_NUMBER_OFFSET (SRR1_FRAME_OFFSET + PPC_REG_SIZE)
+#define PPC_EXC_INTERRUPT_ENTRY_INSTANT_OFFSET (EXCEPTION_NUMBER_OFFSET + 4)
+#define EXC_CR_OFFSET (EXCEPTION_NUMBER_OFFSET + 8)
+#define EXC_XER_OFFSET (EXC_CR_OFFSET + 4)
+#define EXC_CTR_OFFSET (EXC_XER_OFFSET + 4)
+#define EXC_LR_OFFSET (EXC_CTR_OFFSET + PPC_REG_SIZE)
+#define PPC_EXC_INTERRUPT_FRAME_OFFSET (EXC_LR_OFFSET + PPC_REG_SIZE)
+
+#ifndef __SPE__
+ #define PPC_EXC_GPR_OFFSET(gpr) \
+ ((gpr) * PPC_GPR_SIZE + PPC_EXC_INTERRUPT_FRAME_OFFSET + PPC_REG_SIZE)
+ #define PPC_EXC_VECTOR_PROLOGUE_OFFSET PPC_EXC_GPR_OFFSET(4)
+ #if defined(PPC_MULTILIB_ALTIVEC) && defined(PPC_MULTILIB_FPU)
+ #define PPC_EXC_VRSAVE_OFFSET PPC_EXC_GPR_OFFSET(33)
+ #define PPC_EXC_VSCR_OFFSET (PPC_EXC_VRSAVE_OFFSET + 28)
+ #define PPC_EXC_VR_OFFSET(v) ((v) * 16 + PPC_EXC_VSCR_OFFSET + 4)
+ #define PPC_EXC_FR_OFFSET(f) ((f) * 8 + PPC_EXC_VR_OFFSET(32))
+ #define PPC_EXC_FPSCR_OFFSET PPC_EXC_FR_OFFSET(32)
+ #define PPC_EXC_FRAME_SIZE PPC_EXC_FR_OFFSET(34)
+ #define PPC_EXC_MIN_VSCR_OFFSET (PPC_EXC_GPR_OFFSET(13) + 12)
+ #define PPC_EXC_MIN_VR_OFFSET(v) ((v) * 16 + PPC_EXC_MIN_VSCR_OFFSET + 4)
+ #define PPC_EXC_MIN_FR_OFFSET(f) ((f) * 8 + PPC_EXC_MIN_VR_OFFSET(20))
+ #define PPC_EXC_MIN_FPSCR_OFFSET PPC_EXC_MIN_FR_OFFSET(14)
+ #define CPU_INTERRUPT_FRAME_SIZE \
+ (PPC_EXC_MIN_FR_OFFSET(16) + PPC_STACK_RED_ZONE_SIZE)
+ #elif defined(PPC_MULTILIB_ALTIVEC)
+ #define PPC_EXC_VRSAVE_OFFSET PPC_EXC_GPR_OFFSET(33)
+ #define PPC_EXC_VSCR_OFFSET (PPC_EXC_VRSAVE_OFFSET + 28)
+ #define PPC_EXC_VR_OFFSET(v) ((v) * 16 + PPC_EXC_VSCR_OFFSET + 4)
+ #define PPC_EXC_FRAME_SIZE PPC_EXC_VR_OFFSET(32)
+ #define PPC_EXC_MIN_VSCR_OFFSET (PPC_EXC_GPR_OFFSET(13) + 12)
+ #define PPC_EXC_MIN_VR_OFFSET(v) ((v) * 16 + PPC_EXC_MIN_VSCR_OFFSET + 4)
+ #define CPU_INTERRUPT_FRAME_SIZE \
+ (PPC_EXC_MIN_VR_OFFSET(20) + PPC_STACK_RED_ZONE_SIZE)
+ #elif defined(PPC_MULTILIB_FPU)
+ #define PPC_EXC_FR_OFFSET(f) ((f) * 8 + PPC_EXC_GPR_OFFSET(33))
+ #define PPC_EXC_FPSCR_OFFSET PPC_EXC_FR_OFFSET(32)
+ #define PPC_EXC_FRAME_SIZE PPC_EXC_FR_OFFSET(34)
+ #define PPC_EXC_MIN_FR_OFFSET(f) ((f) * 8 + PPC_EXC_GPR_OFFSET(13))
+ #define PPC_EXC_MIN_FPSCR_OFFSET PPC_EXC_MIN_FR_OFFSET(14)
+ #define CPU_INTERRUPT_FRAME_SIZE \
+ (PPC_EXC_MIN_FR_OFFSET(16) + PPC_STACK_RED_ZONE_SIZE)
+ #else
+ #define PPC_EXC_FRAME_SIZE PPC_EXC_GPR_OFFSET(33)
+ #define CPU_INTERRUPT_FRAME_SIZE \
+ (PPC_EXC_GPR_OFFSET(13) + PPC_STACK_RED_ZONE_SIZE)
+ #endif
+#else
+ #define PPC_EXC_SPEFSCR_OFFSET 44
+ #define PPC_EXC_ACC_OFFSET 48
+ #define PPC_EXC_GPR_OFFSET(gpr) ((gpr) * PPC_GPR_SIZE + 56)
+ #define PPC_EXC_VECTOR_PROLOGUE_OFFSET (PPC_EXC_GPR_OFFSET(4) + 4)
+ #define CPU_INTERRUPT_FRAME_SIZE (160 + PPC_STACK_RED_ZONE_SIZE)
+ #define PPC_EXC_FRAME_SIZE 320
+#endif
+
#define GPR0_OFFSET PPC_EXC_GPR_OFFSET(0)
#define GPR1_OFFSET PPC_EXC_GPR_OFFSET(1)
#define GPR2_OFFSET PPC_EXC_GPR_OFFSET(2)
@@ -62,52 +120,6 @@
#define GPR30_OFFSET PPC_EXC_GPR_OFFSET(30)
#define GPR31_OFFSET PPC_EXC_GPR_OFFSET(31)
-/* Exception stack frame -> BSP_Exception_frame */
-#define FRAME_LINK_SPACE 8
-
-#ifndef __SPE__
- #define PPC_EXC_GPR_OFFSET(gpr) ((gpr) * PPC_GPR_SIZE + 36)
- #define PPC_EXC_VECTOR_PROLOGUE_OFFSET PPC_EXC_GPR_OFFSET(4)
- #if defined(PPC_MULTILIB_ALTIVEC) && defined(PPC_MULTILIB_FPU)
- #define PPC_EXC_VRSAVE_OFFSET 168
- #define PPC_EXC_VSCR_OFFSET 172
- #define PPC_EXC_VR_OFFSET(v) ((v) * 16 + 176)
- #define PPC_EXC_FR_OFFSET(f) ((f) * 8 + 688)
- #define PPC_EXC_FPSCR_OFFSET 944
- #define PPC_EXC_FRAME_SIZE 960
- #define PPC_EXC_MIN_VSCR_OFFSET 92
- #define PPC_EXC_MIN_VR_OFFSET(v) ((v) * 16 + 96)
- #define PPC_EXC_MIN_FR_OFFSET(f) ((f) * 8 + 416)
- #define PPC_EXC_MIN_FPSCR_OFFSET 528
- #define CPU_INTERRUPT_FRAME_SIZE 544
- #elif defined(PPC_MULTILIB_ALTIVEC)
- #define PPC_EXC_VRSAVE_OFFSET 168
- #define PPC_EXC_VSCR_OFFSET 172
- #define PPC_EXC_VR_OFFSET(v) ((v) * 16 + 176)
- #define PPC_EXC_FRAME_SIZE 688
- #define PPC_EXC_MIN_VSCR_OFFSET 92
- #define PPC_EXC_MIN_VR_OFFSET(v) ((v) * 16 + 96)
- #define CPU_INTERRUPT_FRAME_SIZE 416
- #elif defined(PPC_MULTILIB_FPU)
- #define PPC_EXC_FR_OFFSET(f) ((f) * 8 + 168)
- #define PPC_EXC_FPSCR_OFFSET 424
- #define PPC_EXC_FRAME_SIZE 448
- #define PPC_EXC_MIN_FR_OFFSET(f) ((f) * 8 + 96)
- #define PPC_EXC_MIN_FPSCR_OFFSET 92
- #define CPU_INTERRUPT_FRAME_SIZE 224
- #else
- #define PPC_EXC_FRAME_SIZE 176
- #define CPU_INTERRUPT_FRAME_SIZE 96
- #endif
-#else
- #define PPC_EXC_SPEFSCR_OFFSET 36
- #define PPC_EXC_ACC_OFFSET 40
- #define PPC_EXC_GPR_OFFSET(gpr) ((gpr) * PPC_GPR_SIZE + 48)
- #define PPC_EXC_VECTOR_PROLOGUE_OFFSET (PPC_EXC_GPR_OFFSET(4) + 4)
- #define CPU_INTERRUPT_FRAME_SIZE 160
- #define PPC_EXC_FRAME_SIZE 320
-#endif
-
#define CPU_PER_CPU_CONTROL_SIZE 0
#ifdef RTEMS_SMP
@@ -124,15 +136,24 @@ extern "C" {
#endif
typedef struct {
- uint32_t FRAME_SP;
- uint32_t FRAME_LR;
- uint32_t EXC_SRR0;
- uint32_t EXC_SRR1;
- uint32_t unused;
+ uintptr_t FRAME_SP;
+ #ifdef __powerpc64__
+ uint32_t FRAME_CR;
+ uint32_t FRAME_RESERVED;
+ #endif
+ uintptr_t FRAME_LR;
+ #ifdef __powerpc64__
+ uintptr_t FRAME_TOC;
+ #endif
+ uintptr_t EXC_SRR0;
+ uintptr_t EXC_SRR1;
+ uint32_t RESERVED_FOR_ALIGNMENT_0;
+ uint32_t EXC_INTERRUPT_ENTRY_INSTANT;
uint32_t EXC_CR;
- uint32_t EXC_CTR;
uint32_t EXC_XER;
- uint32_t EXC_LR;
+ uintptr_t EXC_CTR;
+ uintptr_t EXC_LR;
+ uintptr_t EXC_INTERRUPT_FRAME;
#ifdef __SPE__
uint32_t EXC_SPEFSCR;
uint64_t EXC_ACC;
@@ -150,12 +171,12 @@ typedef struct {
PPC_GPR_TYPE GPR10;
PPC_GPR_TYPE GPR11;
PPC_GPR_TYPE GPR12;
- uint32_t EARLY_INSTANT;
#ifdef PPC_MULTILIB_ALTIVEC
/* This field must take stvewx/lvewx requirements into account */
+ uint32_t RESERVED_FOR_ALIGNMENT_3[3];
uint32_t VSCR;
- uint8_t V0[16] RTEMS_ALIGNED(16);
+ uint8_t V0[16];
uint8_t V1[16];
uint8_t V2[16];
uint8_t V3[16];
@@ -192,9 +213,10 @@ typedef struct {
double F12;
double F13;
uint64_t FPSCR;
+ uint64_t RESERVED_FOR_ALIGNMENT_4;
#endif
- #if !defined(PPC_MULTILIB_ALTIVEC) && !defined(PPC_MULTILIB_FPU)
- uint32_t RESERVED_FOR_STACK_ALIGNMENT;
+ #if PPC_STACK_RED_ZONE_SIZE > 0
+ uint8_t RED_ZONE[ PPC_STACK_RED_ZONE_SIZE ];
#endif
} CPU_Interrupt_frame;
diff --git a/cpukit/score/cpu/powerpc/rtems/score/powerpc.h b/cpukit/score/cpu/powerpc/rtems/score/powerpc.h
index 29469bc584..88ee0020e6 100644
--- a/cpukit/score/cpu/powerpc/rtems/score/powerpc.h
+++ b/cpukit/score/cpu/powerpc/rtems/score/powerpc.h
@@ -140,6 +140,12 @@ extern "C" {
#define PPC_ALIGNMENT 8
#endif
+#ifdef __powerpc64__
+#define PPC_STACK_RED_ZONE_SIZE 512
+#else
+#define PPC_STACK_RED_ZONE_SIZE 0
+#endif
+
/*
* Unless specified above, If the model has FP support, it is assumed to
* support doubles (8-byte floating point numbers).