From a6f84b275318dbd89ba0bfd12ff6df631a8ac4bc Mon Sep 17 00:00:00 2001 From: Sebastian Huber Date: Tue, 1 Aug 2017 10:57:46 +0200 Subject: powerpc: Add 64-bit context/interrupt support Update #3082. --- cpukit/score/cpu/powerpc/cpu.c | 9 +- cpukit/score/cpu/powerpc/ppc-context-validate.S | 165 ++++++++++++--------- .../cpu/powerpc/ppc-context-volatile-clobber.S | 43 +++--- cpukit/score/cpu/powerpc/rtems/score/cpu.h | 75 ++++++---- cpukit/score/cpu/powerpc/rtems/score/cpuimpl.h | 152 +++++++++++-------- cpukit/score/cpu/powerpc/rtems/score/powerpc.h | 6 + 6 files changed, 263 insertions(+), 187 deletions(-) (limited to 'cpukit/score') diff --git a/cpukit/score/cpu/powerpc/cpu.c b/cpukit/score/cpu/powerpc/cpu.c index 9d653f79ef..e089239515 100644 --- a/cpukit/score/cpu/powerpc/cpu.c +++ b/cpukit/score/cpu/powerpc/cpu.c @@ -5,7 +5,7 @@ */ /* - * Copyright (C) 2009, 2016 embedded brains GmbH. + * Copyright (C) 2009, 2017 embedded brains GmbH. * * The license and distribution terms for this file may be * found in the file LICENSE in this distribution or at @@ -52,7 +52,7 @@ PPC_ASSERT_OFFSET(gpr28, GPR28); PPC_ASSERT_OFFSET(gpr29, GPR29); PPC_ASSERT_OFFSET(gpr30, GPR30); PPC_ASSERT_OFFSET(gpr31, GPR31); -PPC_ASSERT_OFFSET(gpr2, GPR2); +PPC_ASSERT_OFFSET(tp, TP); PPC_ASSERT_OFFSET(isr_dispatch_disable, ISR_DISPATCH_DISABLE); #ifdef RTEMS_SMP @@ -169,10 +169,15 @@ PPC_EXC_ASSERT_CANONIC_OFFSET(GPR31); PPC_EXC_MIN_ASSERT_OFFSET(EXC_SRR0, SRR0_FRAME_OFFSET); PPC_EXC_MIN_ASSERT_OFFSET(EXC_SRR1, SRR1_FRAME_OFFSET); +PPC_EXC_MIN_ASSERT_OFFSET( + EXC_INTERRUPT_ENTRY_INSTANT, + PPC_EXC_INTERRUPT_ENTRY_INSTANT_OFFSET +); PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(EXC_CR); PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(EXC_CTR); PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(EXC_XER); PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(EXC_LR); +PPC_EXC_MIN_ASSERT_OFFSET(EXC_INTERRUPT_FRAME, PPC_EXC_INTERRUPT_FRAME_OFFSET); #ifdef __SPE__ PPC_EXC_MIN_ASSERT_OFFSET(EXC_SPEFSCR, PPC_EXC_SPEFSCR_OFFSET); PPC_EXC_MIN_ASSERT_OFFSET(EXC_ACC, PPC_EXC_ACC_OFFSET); diff --git a/cpukit/score/cpu/powerpc/ppc-context-validate.S b/cpukit/score/cpu/powerpc/ppc-context-validate.S index b34438a361..523707b157 100644 --- a/cpukit/score/cpu/powerpc/ppc-context-validate.S +++ b/cpukit/score/cpu/powerpc/ppc-context-validate.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2013-2015 embedded brains GmbH. All rights reserved. + * Copyright (c) 2013, 2017 embedded brains GmbH. All rights reserved. * * embedded brains GmbH * Dornierstr. 4 @@ -20,8 +20,8 @@ #include #define LR_OFFSET 8 -#define CR_OFFSET 12 -#define OFFSET(i) ((i) * PPC_GPR_SIZE + 16) +#define CR_OFFSET 16 +#define OFFSET(i) ((i) * PPC_GPR_SIZE + 32) #define GPR14_OFFSET OFFSET(0) #define GPR15_OFFSET OFFSET(1) #define GPR16_OFFSET OFFSET(2) @@ -100,29 +100,29 @@ _CPU_Context_validate: /* Save */ - stwu r1, -FRAME_SIZE(r1) + PPC_REG_STORE_UPDATE r1, -FRAME_SIZE(r1) mflr r4 - stw r4, LR_OFFSET(r1) + PPC_REG_STORE r4, LR_OFFSET(r1) mfcr r4 stw r4, CR_OFFSET(r1) - stw r14, GPR14_OFFSET(r1) - stw r15, GPR15_OFFSET(r1) - stw r16, GPR16_OFFSET(r1) - stw r17, GPR17_OFFSET(r1) - stw r18, GPR18_OFFSET(r1) - stw r19, GPR19_OFFSET(r1) - stw r20, GPR20_OFFSET(r1) - stw r21, GPR21_OFFSET(r1) - stw r22, GPR22_OFFSET(r1) - stw r23, GPR23_OFFSET(r1) - stw r24, GPR24_OFFSET(r1) - stw r25, GPR25_OFFSET(r1) - stw r26, GPR26_OFFSET(r1) - stw r27, GPR27_OFFSET(r1) - stw r28, GPR28_OFFSET(r1) - stw r29, GPR29_OFFSET(r1) - stw r30, GPR30_OFFSET(r1) - stw r31, GPR31_OFFSET(r1) + PPC_REG_STORE r14, GPR14_OFFSET(r1) + PPC_REG_STORE r15, GPR15_OFFSET(r1) + PPC_REG_STORE r16, GPR16_OFFSET(r1) + PPC_REG_STORE r17, GPR17_OFFSET(r1) + PPC_REG_STORE r18, GPR18_OFFSET(r1) + PPC_REG_STORE r19, GPR19_OFFSET(r1) + PPC_REG_STORE r20, GPR20_OFFSET(r1) + PPC_REG_STORE r21, GPR21_OFFSET(r1) + PPC_REG_STORE r22, GPR22_OFFSET(r1) + PPC_REG_STORE r23, GPR23_OFFSET(r1) + PPC_REG_STORE r24, GPR24_OFFSET(r1) + PPC_REG_STORE r25, GPR25_OFFSET(r1) + PPC_REG_STORE r26, GPR26_OFFSET(r1) + PPC_REG_STORE r27, GPR27_OFFSET(r1) + PPC_REG_STORE r28, GPR28_OFFSET(r1) + PPC_REG_STORE r29, GPR29_OFFSET(r1) + PPC_REG_STORE r30, GPR30_OFFSET(r1) + PPC_REG_STORE r31, GPR31_OFFSET(r1) #ifdef PPC_MULTILIB_FPU stfd f14, F14_OFFSET(r1) @@ -218,8 +218,12 @@ _CPU_Context_validate: addi r26, r3, 21 addi r27, r3, 22 - /* GPR28 contains the GPR2 pattern */ + /* GPR28 contains the TP pattern */ +#ifdef __powerpc64__ + xor r28, r13, r3 +#else xor r28, r2, r3 +#endif /* GPR29 and CR are equal most of the time */ addi r29, r3, 24 @@ -330,101 +334,114 @@ check: cmpw r4, r29 bne restore addi r4, r3, 1 - cmpw r4, r5 + PPC_REG_CMP r4, r5 bne restore addi r4, r3, 2 - cmpw r4, r6 + PPC_REG_CMP r4, r6 bne restore addi r4, r3, 3 - cmpw r4, r7 + PPC_REG_CMP r4, r7 bne restore addi r4, r3, 4 - cmpw r4, r8 + PPC_REG_CMP r4, r8 bne restore addi r4, r3, 5 - cmpw r4, r9 + PPC_REG_CMP r4, r9 bne restore addi r4, r3, 6 - cmpw r4, r10 + PPC_REG_CMP r4, r10 bne restore addi r4, r3, 7 - cmpw r4, r11 + PPC_REG_CMP r4, r11 bne restore addi r4, r3, 8 - cmpw r4, r12 - bne restore + PPC_REG_CMP r4, r12 + bne restore +#ifdef __powerpc64__ + lis r4, .TOC.@highest + ori r4, r4, .TOC.@higher + rldicr r4, r4, 32, 31 + oris r4, r4, .TOC.@h + ori r4, r4, .TOC.@l + PPC_REG_CMP r4, r2 +#else lis r4, _SDA_BASE_@h ori r4, r4, _SDA_BASE_@l - cmpw r4, r13 + PPC_REG_CMP r4, r13 +#endif bne restore addi r4, r3, 9 - cmpw r4, r14 + PPC_REG_CMP r4, r14 bne restore addi r4, r3, 10 - cmpw r4, r15 + PPC_REG_CMP r4, r15 bne restore addi r4, r3, 11 - cmpw r4, r16 + PPC_REG_CMP r4, r16 bne restore addi r4, r3, 12 - cmpw r4, r17 + PPC_REG_CMP r4, r17 bne restore addi r4, r3, 13 - cmpw r4, r18 + PPC_REG_CMP r4, r18 bne restore addi r4, r3, 14 - cmpw r4, r19 + PPC_REG_CMP r4, r19 bne restore addi r4, r3, 15 - cmpw r4, r20 + PPC_REG_CMP r4, r20 bne restore addi r4, r3, 16 - cmpw r4, r21 + PPC_REG_CMP r4, r21 bne restore addi r4, r3, 17 - cmpw r4, r22 + PPC_REG_CMP r4, r22 bne restore addi r4, r3, 18 - cmpw r4, r23 + PPC_REG_CMP r4, r23 bne restore addi r4, r3, 19 - cmpw r4, r24 + PPC_REG_CMP r4, r24 bne restore addi r4, r3, 20 - cmpw r4, r25 + PPC_REG_CMP r4, r25 bne restore addi r4, r3, 21 - cmpw r4, r26 + PPC_REG_CMP r4, r26 bne restore addi r4, r3, 22 - cmpw r4, r27 + PPC_REG_CMP r4, r27 bne restore +#ifdef __powerpc64__ + xor r4, r13, r3 +#else xor r4, r2, r3 - cmpw r4, r28 +#endif + PPC_REG_CMP r4, r28 bne restore addi r4, r3, 24 - cmpw r4, r29 + PPC_REG_CMP r4, r29 bne restore mfmsr r4 xor r4, r4, r3 - cmpw r4, r30 + PPC_REG_CMP r4, r30 bne restore addi r4, r3, 25 mflr r5 - cmpw r4, r5 + PPC_REG_CMP r4, r5 bne restore addi r4, r3, 26 mfctr r5 - cmpw r4, r5 + PPC_REG_CMP r4, r5 bne restore rlwinm r4, r3, 0, 25, 2 mfxer r5 cmpw r4, r5 bne restore addi r4, r3, 28 - cmpw r4, r0 + PPC_REG_CMP r4, r0 bne restore - cmpw r31, r1 + PPC_REG_CMP r31, r1 bne restore #ifdef PPC_MULTILIB_FPU @@ -614,27 +631,27 @@ restore: lfd f14, F14_OFFSET(r1) #endif - lwz r31, GPR31_OFFSET(r1) - lwz r30, GPR30_OFFSET(r1) - lwz r29, GPR29_OFFSET(r1) - lwz r28, GPR28_OFFSET(r1) - lwz r27, GPR27_OFFSET(r1) - lwz r26, GPR26_OFFSET(r1) - lwz r25, GPR25_OFFSET(r1) - lwz r24, GPR24_OFFSET(r1) - lwz r23, GPR23_OFFSET(r1) - lwz r22, GPR22_OFFSET(r1) - lwz r21, GPR21_OFFSET(r1) - lwz r20, GPR20_OFFSET(r1) - lwz r19, GPR19_OFFSET(r1) - lwz r18, GPR18_OFFSET(r1) - lwz r17, GPR17_OFFSET(r1) - lwz r16, GPR16_OFFSET(r1) - lwz r15, GPR15_OFFSET(r1) - lwz r14, GPR14_OFFSET(r1) + PPC_REG_LOAD r31, GPR31_OFFSET(r1) + PPC_REG_LOAD r30, GPR30_OFFSET(r1) + PPC_REG_LOAD r29, GPR29_OFFSET(r1) + PPC_REG_LOAD r28, GPR28_OFFSET(r1) + PPC_REG_LOAD r27, GPR27_OFFSET(r1) + PPC_REG_LOAD r26, GPR26_OFFSET(r1) + PPC_REG_LOAD r25, GPR25_OFFSET(r1) + PPC_REG_LOAD r24, GPR24_OFFSET(r1) + PPC_REG_LOAD r23, GPR23_OFFSET(r1) + PPC_REG_LOAD r22, GPR22_OFFSET(r1) + PPC_REG_LOAD r21, GPR21_OFFSET(r1) + PPC_REG_LOAD r20, GPR20_OFFSET(r1) + PPC_REG_LOAD r19, GPR19_OFFSET(r1) + PPC_REG_LOAD r18, GPR18_OFFSET(r1) + PPC_REG_LOAD r17, GPR17_OFFSET(r1) + PPC_REG_LOAD r16, GPR16_OFFSET(r1) + PPC_REG_LOAD r15, GPR15_OFFSET(r1) + PPC_REG_LOAD r14, GPR14_OFFSET(r1) lwz r4, CR_OFFSET(r1) mtcr r4 - lwz r4, LR_OFFSET(r1) + PPC_REG_LOAD r4, LR_OFFSET(r1) mtlr r4 addi r1, r1, FRAME_SIZE blr diff --git a/cpukit/score/cpu/powerpc/ppc-context-volatile-clobber.S b/cpukit/score/cpu/powerpc/ppc-context-volatile-clobber.S index d0c2159a06..e3a7a9cc14 100644 --- a/cpukit/score/cpu/powerpc/ppc-context-volatile-clobber.S +++ b/cpukit/score/cpu/powerpc/ppc-context-volatile-clobber.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2013 embedded brains GmbH. All rights reserved. + * Copyright (c) 2013, 2017 embedded brains GmbH. All rights reserved. * * embedded brains GmbH * Dornierstr. 4 @@ -17,6 +17,7 @@ #endif #include +#include .global _CPU_Context_volatile_clobber @@ -25,24 +26,24 @@ _CPU_Context_volatile_clobber: #ifdef PPC_MULTILIB_FPU .macro CLOBBER_F i addi r4, r3, 0x100 + \i - stw r4, 16(r1) + stw r4, 32(r1) addi r4, r3, 0x200 + \i - stw r4, 16 + 4(r1) - lfd \i, 16(r1) + stw r4, 32 + 4(r1) + lfd \i, 32(r1) .endm - stwu r1, -32(r1) + PPC_REG_STORE_UPDATE r1, -96(r1) /* Negate FPSCR[FPRF] bits */ mffs f0 - stfd f0, 16(r1) - lwz r0, 20(r1) + stfd f0, 32(r1) + lwz r0, 36(r1) nor r3, r0, r0 rlwinm r0, r0, 0, 20, 14 rlwinm r3, r3, 0, 15, 19 or r0, r3, r0 - stw r0, 20(r1) - lfd f0, 16(r1) + stw r0, 36(r1) + lfd f0, 32(r1) mtfsf 0xff, f0 CLOBBER_F 0 @@ -59,36 +60,36 @@ _CPU_Context_volatile_clobber: CLOBBER_F 11 CLOBBER_F 12 CLOBBER_F 13 - addi r1, r1, 32 + addi r1, r1, 96 #endif #ifdef PPC_MULTILIB_ALTIVEC .macro CLOBBER_V i addi r4, r3, 0x300 + \i - stw r4, 16(r1) + stw r4, 32(r1) addi r4, r3, 0x400 + \i - stw r4, 16 + 4(r1) + stw r4, 32 + 4(r1) addi r4, r3, 0x500 + \i - stw r4, 16 + 8(r1) + stw r4, 32 + 8(r1) addi r4, r3, 0x600 + \i - stw r4, 16 + 12(r1) - li r4, 16 + stw r4, 32 + 12(r1) + li r4, 32 lvx \i, r1, r4 .endm - stwu r1, -32(r1) + PPC_REG_STORE_UPDATE r1, -96(r1) /* Negate VSCR[SAT] bit */ mfvscr v0 - li r3, 28 + li r3, 44 stvewx v0, r1, r3 - lwz r0, 28(r1) + lwz r0, 44(r1) nor r3, r0, r0 rlwinm r0, r0, 0, 0, 30 rlwinm r3, r3, 0, 31, 31 or r0, r3, r0 - stw r0, 28(r1) - li r3, 28 + stw r0, 44(r1) + li r3, 44 lvewx v0, r1, r3 mtvscr v0 @@ -112,7 +113,7 @@ _CPU_Context_volatile_clobber: CLOBBER_V 17 CLOBBER_V 18 CLOBBER_V 19 - addi r1, r1, 32 + addi r1, r1, 96 #endif addi r4, r3, 10 diff --git a/cpukit/score/cpu/powerpc/rtems/score/cpu.h b/cpukit/score/cpu/powerpc/rtems/score/cpu.h index 72fc48318f..cacd3ea105 100644 --- a/cpukit/score/cpu/powerpc/rtems/score/cpu.h +++ b/cpukit/score/cpu/powerpc/rtems/score/cpu.h @@ -25,7 +25,7 @@ * * Copyright (c) 2001 Surrey Satellite Technology Limited (SSTL). * - * Copyright (c) 2010, 2016 embedded brains GmbH. + * Copyright (c) 2010, 2017 embedded brains GmbH. * * The license and distribution terms for this file may be * found in the file LICENSE in this distribution or at @@ -181,10 +181,16 @@ extern "C" { */ #ifndef __SPE__ - #define PPC_GPR_TYPE uint32_t - #define PPC_GPR_SIZE 4 - #define PPC_GPR_LOAD lwz - #define PPC_GPR_STORE stw + #define PPC_GPR_TYPE uintptr_t + #if defined(__powerpc64__) + #define PPC_GPR_SIZE 8 + #define PPC_GPR_LOAD ld + #define PPC_GPR_STORE std + #else + #define PPC_GPR_SIZE 4 + #define PPC_GPR_LOAD lwz + #define PPC_GPR_STORE stw + #endif #else #define PPC_GPR_TYPE uint64_t #define PPC_GPR_SIZE 8 @@ -192,6 +198,20 @@ extern "C" { #define PPC_GPR_STORE evstdd #endif +#if defined(__powerpc64__) + #define PPC_REG_SIZE 8 + #define PPC_REG_LOAD ld + #define PPC_REG_STORE std + #define PPC_REG_STORE_UPDATE stdu + #define PPC_REG_CMP cmpd +#else + #define PPC_REG_SIZE 4 + #define PPC_REG_LOAD lwz + #define PPC_REG_STORE stw + #define PPC_REG_STORE_UPDATE stwu + #define PPC_REG_CMP cmpw +#endif + #ifndef ASM /* @@ -200,10 +220,10 @@ extern "C" { * Linux and Embedded") */ typedef struct { - uint32_t gpr1; uint32_t msr; - uint32_t lr; uint32_t cr; + uintptr_t gpr1; + uintptr_t lr; PPC_GPR_TYPE gpr14; PPC_GPR_TYPE gpr15; PPC_GPR_TYPE gpr16; @@ -275,7 +295,7 @@ typedef struct { * the previous items to optimize the context switch. We must not set the * following items to zero via the dcbz. */ - uint32_t gpr2; + uintptr_t tp; #if defined(RTEMS_SMP) volatile uint32_t is_executing; #endif @@ -322,13 +342,14 @@ static inline ppc_context *ppc_get_context( const Context_Control *context ) #endif #endif /* ASM */ -#define PPC_CONTEXT_OFFSET_GPR1 (PPC_DEFAULT_CACHE_LINE_SIZE + 0) -#define PPC_CONTEXT_OFFSET_MSR (PPC_DEFAULT_CACHE_LINE_SIZE + 4) -#define PPC_CONTEXT_OFFSET_LR (PPC_DEFAULT_CACHE_LINE_SIZE + 8) -#define PPC_CONTEXT_OFFSET_CR (PPC_DEFAULT_CACHE_LINE_SIZE + 12) +#define PPC_CONTEXT_OFFSET_MSR (PPC_DEFAULT_CACHE_LINE_SIZE) +#define PPC_CONTEXT_OFFSET_CR (PPC_DEFAULT_CACHE_LINE_SIZE + 4) +#define PPC_CONTEXT_OFFSET_GPR1 (PPC_DEFAULT_CACHE_LINE_SIZE + 8) +#define PPC_CONTEXT_OFFSET_LR (PPC_DEFAULT_CACHE_LINE_SIZE + PPC_REG_SIZE + 8) #define PPC_CONTEXT_GPR_OFFSET( gpr ) \ - (((gpr) - 14) * PPC_GPR_SIZE + PPC_DEFAULT_CACHE_LINE_SIZE + 16) + (((gpr) - 14) * PPC_GPR_SIZE + \ + PPC_DEFAULT_CACHE_LINE_SIZE + 8 + 2 * PPC_REG_SIZE) #define PPC_CONTEXT_OFFSET_GPR14 PPC_CONTEXT_GPR_OFFSET( 14 ) #define PPC_CONTEXT_OFFSET_GPR15 PPC_CONTEXT_GPR_OFFSET( 15 ) @@ -352,7 +373,7 @@ static inline ppc_context *ppc_get_context( const Context_Control *context ) #ifdef PPC_MULTILIB_ALTIVEC #define PPC_CONTEXT_OFFSET_V( v ) \ - ( ( ( v ) - 20 ) * 16 + PPC_DEFAULT_CACHE_LINE_SIZE + 96 ) + ( ( ( v ) - 20 ) * 16 + PPC_CONTEXT_OFFSET_ISR_DISPATCH_DISABLE + 8) #define PPC_CONTEXT_OFFSET_V20 PPC_CONTEXT_OFFSET_V( 20 ) #define PPC_CONTEXT_OFFSET_V21 PPC_CONTEXT_OFFSET_V( 21 ) #define PPC_CONTEXT_OFFSET_V22 PPC_CONTEXT_OFFSET_V( 22 ) @@ -367,10 +388,10 @@ static inline ppc_context *ppc_get_context( const Context_Control *context ) #define PPC_CONTEXT_OFFSET_V31 PPC_CONTEXT_OFFSET_V( 31 ) #define PPC_CONTEXT_OFFSET_VRSAVE PPC_CONTEXT_OFFSET_V( 32 ) #define PPC_CONTEXT_OFFSET_F( f ) \ - ( ( ( f ) - 14 ) * 8 + PPC_DEFAULT_CACHE_LINE_SIZE + 296 ) + ( ( ( f ) - 14 ) * 8 + PPC_CONTEXT_OFFSET_VRSAVE + 8 ) #else #define PPC_CONTEXT_OFFSET_F( f ) \ - ( ( ( f ) - 14 ) * 8 + PPC_DEFAULT_CACHE_LINE_SIZE + 96 ) + ( ( ( f ) - 14 ) * 8 + PPC_CONTEXT_OFFSET_ISR_DISPATCH_DISABLE + 8 ) #endif #ifdef PPC_MULTILIB_FPU @@ -406,10 +427,11 @@ static inline ppc_context *ppc_get_context( const Context_Control *context ) #define PPC_CONTEXT_VOLATILE_SIZE (PPC_CONTEXT_GPR_OFFSET( 32 ) + 8) #endif -#define PPC_CONTEXT_OFFSET_GPR2 PPC_CONTEXT_VOLATILE_SIZE +#define PPC_CONTEXT_OFFSET_TP PPC_CONTEXT_VOLATILE_SIZE #ifdef RTEMS_SMP - #define PPC_CONTEXT_OFFSET_IS_EXECUTING (PPC_CONTEXT_VOLATILE_SIZE + 4) + #define PPC_CONTEXT_OFFSET_IS_EXECUTING \ + (PPC_CONTEXT_OFFSET_TP + PPC_REG_SIZE) #endif #ifndef ASM @@ -1056,13 +1078,15 @@ void _CPU_Context_validate( uintptr_t pattern ); #endif typedef struct { - uint32_t EXC_SRR0; - uint32_t EXC_SRR1; + uintptr_t EXC_SRR0; + uintptr_t EXC_SRR1; uint32_t _EXC_number; + uint32_t RESERVED_FOR_ALIGNMENT_0; uint32_t EXC_CR; - uint32_t EXC_CTR; uint32_t EXC_XER; - uint32_t EXC_LR; + uintptr_t EXC_CTR; + uintptr_t EXC_LR; + uintptr_t RESERVED_FOR_ALIGNMENT_1; #ifdef __SPE__ uint32_t EXC_SPEFSCR; uint64_t EXC_ACC; @@ -1099,13 +1123,13 @@ typedef struct { PPC_GPR_TYPE GPR29; PPC_GPR_TYPE GPR30; PPC_GPR_TYPE GPR31; - #if defined(PPC_MULTILIB_ALTIVEC) || defined(PPC_MULTILIB_FPU) - uint32_t reserved_for_alignment; - #endif + uintptr_t RESERVED_FOR_ALIGNMENT_2; #ifdef PPC_MULTILIB_ALTIVEC uint32_t VRSAVE; + uint32_t RESERVED_FOR_ALIGNMENT_3[3]; /* This field must take stvewx/lvewx requirements into account */ + uint32_t RESERVED_FOR_ALIGNMENT_4[3]; uint32_t VSCR; uint8_t V0[16]; @@ -1175,6 +1199,7 @@ typedef struct { double F30; double F31; uint64_t FPSCR; + uint64_t RESERVED_FOR_ALIGNMENT_5; #endif } CPU_Exception_frame; diff --git a/cpukit/score/cpu/powerpc/rtems/score/cpuimpl.h b/cpukit/score/cpu/powerpc/rtems/score/cpuimpl.h index 57c2db1822..c292feb6fd 100644 --- a/cpukit/score/cpu/powerpc/rtems/score/cpuimpl.h +++ b/cpukit/score/cpu/powerpc/rtems/score/cpuimpl.h @@ -10,7 +10,7 @@ * * Copyright (C) 2007 Till Straumann * - * Copyright (c) 2009, 2016 embedded brains GmbH + * Copyright (c) 2009, 2017 embedded brains GmbH * * The license and distribution terms for this file may be * found in the file LICENSE in this distribution or at @@ -22,13 +22,71 @@ #include -#define SRR0_FRAME_OFFSET 8 -#define SRR1_FRAME_OFFSET 12 -#define EXCEPTION_NUMBER_OFFSET 16 -#define EXC_CR_OFFSET 20 -#define EXC_CTR_OFFSET 24 -#define EXC_XER_OFFSET 28 -#define EXC_LR_OFFSET 32 +/* Exception stack frame -> BSP_Exception_frame */ +#ifdef __powerpc64__ + #define FRAME_LINK_SPACE 32 +#else + #define FRAME_LINK_SPACE 8 +#endif + +#define SRR0_FRAME_OFFSET FRAME_LINK_SPACE +#define SRR1_FRAME_OFFSET (SRR0_FRAME_OFFSET + PPC_REG_SIZE) +#define EXCEPTION_NUMBER_OFFSET (SRR1_FRAME_OFFSET + PPC_REG_SIZE) +#define PPC_EXC_INTERRUPT_ENTRY_INSTANT_OFFSET (EXCEPTION_NUMBER_OFFSET + 4) +#define EXC_CR_OFFSET (EXCEPTION_NUMBER_OFFSET + 8) +#define EXC_XER_OFFSET (EXC_CR_OFFSET + 4) +#define EXC_CTR_OFFSET (EXC_XER_OFFSET + 4) +#define EXC_LR_OFFSET (EXC_CTR_OFFSET + PPC_REG_SIZE) +#define PPC_EXC_INTERRUPT_FRAME_OFFSET (EXC_LR_OFFSET + PPC_REG_SIZE) + +#ifndef __SPE__ + #define PPC_EXC_GPR_OFFSET(gpr) \ + ((gpr) * PPC_GPR_SIZE + PPC_EXC_INTERRUPT_FRAME_OFFSET + PPC_REG_SIZE) + #define PPC_EXC_VECTOR_PROLOGUE_OFFSET PPC_EXC_GPR_OFFSET(4) + #if defined(PPC_MULTILIB_ALTIVEC) && defined(PPC_MULTILIB_FPU) + #define PPC_EXC_VRSAVE_OFFSET PPC_EXC_GPR_OFFSET(33) + #define PPC_EXC_VSCR_OFFSET (PPC_EXC_VRSAVE_OFFSET + 28) + #define PPC_EXC_VR_OFFSET(v) ((v) * 16 + PPC_EXC_VSCR_OFFSET + 4) + #define PPC_EXC_FR_OFFSET(f) ((f) * 8 + PPC_EXC_VR_OFFSET(32)) + #define PPC_EXC_FPSCR_OFFSET PPC_EXC_FR_OFFSET(32) + #define PPC_EXC_FRAME_SIZE PPC_EXC_FR_OFFSET(34) + #define PPC_EXC_MIN_VSCR_OFFSET (PPC_EXC_GPR_OFFSET(13) + 12) + #define PPC_EXC_MIN_VR_OFFSET(v) ((v) * 16 + PPC_EXC_MIN_VSCR_OFFSET + 4) + #define PPC_EXC_MIN_FR_OFFSET(f) ((f) * 8 + PPC_EXC_MIN_VR_OFFSET(20)) + #define PPC_EXC_MIN_FPSCR_OFFSET PPC_EXC_MIN_FR_OFFSET(14) + #define CPU_INTERRUPT_FRAME_SIZE \ + (PPC_EXC_MIN_FR_OFFSET(16) + PPC_STACK_RED_ZONE_SIZE) + #elif defined(PPC_MULTILIB_ALTIVEC) + #define PPC_EXC_VRSAVE_OFFSET PPC_EXC_GPR_OFFSET(33) + #define PPC_EXC_VSCR_OFFSET (PPC_EXC_VRSAVE_OFFSET + 28) + #define PPC_EXC_VR_OFFSET(v) ((v) * 16 + PPC_EXC_VSCR_OFFSET + 4) + #define PPC_EXC_FRAME_SIZE PPC_EXC_VR_OFFSET(32) + #define PPC_EXC_MIN_VSCR_OFFSET (PPC_EXC_GPR_OFFSET(13) + 12) + #define PPC_EXC_MIN_VR_OFFSET(v) ((v) * 16 + PPC_EXC_MIN_VSCR_OFFSET + 4) + #define CPU_INTERRUPT_FRAME_SIZE \ + (PPC_EXC_MIN_VR_OFFSET(20) + PPC_STACK_RED_ZONE_SIZE) + #elif defined(PPC_MULTILIB_FPU) + #define PPC_EXC_FR_OFFSET(f) ((f) * 8 + PPC_EXC_GPR_OFFSET(33)) + #define PPC_EXC_FPSCR_OFFSET PPC_EXC_FR_OFFSET(32) + #define PPC_EXC_FRAME_SIZE PPC_EXC_FR_OFFSET(34) + #define PPC_EXC_MIN_FR_OFFSET(f) ((f) * 8 + PPC_EXC_GPR_OFFSET(13)) + #define PPC_EXC_MIN_FPSCR_OFFSET PPC_EXC_MIN_FR_OFFSET(14) + #define CPU_INTERRUPT_FRAME_SIZE \ + (PPC_EXC_MIN_FR_OFFSET(16) + PPC_STACK_RED_ZONE_SIZE) + #else + #define PPC_EXC_FRAME_SIZE PPC_EXC_GPR_OFFSET(33) + #define CPU_INTERRUPT_FRAME_SIZE \ + (PPC_EXC_GPR_OFFSET(13) + PPC_STACK_RED_ZONE_SIZE) + #endif +#else + #define PPC_EXC_SPEFSCR_OFFSET 44 + #define PPC_EXC_ACC_OFFSET 48 + #define PPC_EXC_GPR_OFFSET(gpr) ((gpr) * PPC_GPR_SIZE + 56) + #define PPC_EXC_VECTOR_PROLOGUE_OFFSET (PPC_EXC_GPR_OFFSET(4) + 4) + #define CPU_INTERRUPT_FRAME_SIZE (160 + PPC_STACK_RED_ZONE_SIZE) + #define PPC_EXC_FRAME_SIZE 320 +#endif + #define GPR0_OFFSET PPC_EXC_GPR_OFFSET(0) #define GPR1_OFFSET PPC_EXC_GPR_OFFSET(1) #define GPR2_OFFSET PPC_EXC_GPR_OFFSET(2) @@ -62,52 +120,6 @@ #define GPR30_OFFSET PPC_EXC_GPR_OFFSET(30) #define GPR31_OFFSET PPC_EXC_GPR_OFFSET(31) -/* Exception stack frame -> BSP_Exception_frame */ -#define FRAME_LINK_SPACE 8 - -#ifndef __SPE__ - #define PPC_EXC_GPR_OFFSET(gpr) ((gpr) * PPC_GPR_SIZE + 36) - #define PPC_EXC_VECTOR_PROLOGUE_OFFSET PPC_EXC_GPR_OFFSET(4) - #if defined(PPC_MULTILIB_ALTIVEC) && defined(PPC_MULTILIB_FPU) - #define PPC_EXC_VRSAVE_OFFSET 168 - #define PPC_EXC_VSCR_OFFSET 172 - #define PPC_EXC_VR_OFFSET(v) ((v) * 16 + 176) - #define PPC_EXC_FR_OFFSET(f) ((f) * 8 + 688) - #define PPC_EXC_FPSCR_OFFSET 944 - #define PPC_EXC_FRAME_SIZE 960 - #define PPC_EXC_MIN_VSCR_OFFSET 92 - #define PPC_EXC_MIN_VR_OFFSET(v) ((v) * 16 + 96) - #define PPC_EXC_MIN_FR_OFFSET(f) ((f) * 8 + 416) - #define PPC_EXC_MIN_FPSCR_OFFSET 528 - #define CPU_INTERRUPT_FRAME_SIZE 544 - #elif defined(PPC_MULTILIB_ALTIVEC) - #define PPC_EXC_VRSAVE_OFFSET 168 - #define PPC_EXC_VSCR_OFFSET 172 - #define PPC_EXC_VR_OFFSET(v) ((v) * 16 + 176) - #define PPC_EXC_FRAME_SIZE 688 - #define PPC_EXC_MIN_VSCR_OFFSET 92 - #define PPC_EXC_MIN_VR_OFFSET(v) ((v) * 16 + 96) - #define CPU_INTERRUPT_FRAME_SIZE 416 - #elif defined(PPC_MULTILIB_FPU) - #define PPC_EXC_FR_OFFSET(f) ((f) * 8 + 168) - #define PPC_EXC_FPSCR_OFFSET 424 - #define PPC_EXC_FRAME_SIZE 448 - #define PPC_EXC_MIN_FR_OFFSET(f) ((f) * 8 + 96) - #define PPC_EXC_MIN_FPSCR_OFFSET 92 - #define CPU_INTERRUPT_FRAME_SIZE 224 - #else - #define PPC_EXC_FRAME_SIZE 176 - #define CPU_INTERRUPT_FRAME_SIZE 96 - #endif -#else - #define PPC_EXC_SPEFSCR_OFFSET 36 - #define PPC_EXC_ACC_OFFSET 40 - #define PPC_EXC_GPR_OFFSET(gpr) ((gpr) * PPC_GPR_SIZE + 48) - #define PPC_EXC_VECTOR_PROLOGUE_OFFSET (PPC_EXC_GPR_OFFSET(4) + 4) - #define CPU_INTERRUPT_FRAME_SIZE 160 - #define PPC_EXC_FRAME_SIZE 320 -#endif - #define CPU_PER_CPU_CONTROL_SIZE 0 #ifdef RTEMS_SMP @@ -124,15 +136,24 @@ extern "C" { #endif typedef struct { - uint32_t FRAME_SP; - uint32_t FRAME_LR; - uint32_t EXC_SRR0; - uint32_t EXC_SRR1; - uint32_t unused; + uintptr_t FRAME_SP; + #ifdef __powerpc64__ + uint32_t FRAME_CR; + uint32_t FRAME_RESERVED; + #endif + uintptr_t FRAME_LR; + #ifdef __powerpc64__ + uintptr_t FRAME_TOC; + #endif + uintptr_t EXC_SRR0; + uintptr_t EXC_SRR1; + uint32_t RESERVED_FOR_ALIGNMENT_0; + uint32_t EXC_INTERRUPT_ENTRY_INSTANT; uint32_t EXC_CR; - uint32_t EXC_CTR; uint32_t EXC_XER; - uint32_t EXC_LR; + uintptr_t EXC_CTR; + uintptr_t EXC_LR; + uintptr_t EXC_INTERRUPT_FRAME; #ifdef __SPE__ uint32_t EXC_SPEFSCR; uint64_t EXC_ACC; @@ -150,12 +171,12 @@ typedef struct { PPC_GPR_TYPE GPR10; PPC_GPR_TYPE GPR11; PPC_GPR_TYPE GPR12; - uint32_t EARLY_INSTANT; #ifdef PPC_MULTILIB_ALTIVEC /* This field must take stvewx/lvewx requirements into account */ + uint32_t RESERVED_FOR_ALIGNMENT_3[3]; uint32_t VSCR; - uint8_t V0[16] RTEMS_ALIGNED(16); + uint8_t V0[16]; uint8_t V1[16]; uint8_t V2[16]; uint8_t V3[16]; @@ -192,9 +213,10 @@ typedef struct { double F12; double F13; uint64_t FPSCR; + uint64_t RESERVED_FOR_ALIGNMENT_4; #endif - #if !defined(PPC_MULTILIB_ALTIVEC) && !defined(PPC_MULTILIB_FPU) - uint32_t RESERVED_FOR_STACK_ALIGNMENT; + #if PPC_STACK_RED_ZONE_SIZE > 0 + uint8_t RED_ZONE[ PPC_STACK_RED_ZONE_SIZE ]; #endif } CPU_Interrupt_frame; diff --git a/cpukit/score/cpu/powerpc/rtems/score/powerpc.h b/cpukit/score/cpu/powerpc/rtems/score/powerpc.h index 29469bc584..88ee0020e6 100644 --- a/cpukit/score/cpu/powerpc/rtems/score/powerpc.h +++ b/cpukit/score/cpu/powerpc/rtems/score/powerpc.h @@ -140,6 +140,12 @@ extern "C" { #define PPC_ALIGNMENT 8 #endif +#ifdef __powerpc64__ +#define PPC_STACK_RED_ZONE_SIZE 512 +#else +#define PPC_STACK_RED_ZONE_SIZE 0 +#endif + /* * Unless specified above, If the model has FP support, it is assumed to * support doubles (8-byte floating point numbers). -- cgit v1.2.3