diff options
author | Sebastian Huber <sebastian.huber@embedded-brains.de> | 2014-12-23 14:18:06 +0100 |
---|---|---|
committer | Sebastian Huber <sebastian.huber@embedded-brains.de> | 2015-01-13 11:37:28 +0100 |
commit | 3e2647a7146d4b972c6a0290e6657bab0de18afa (patch) | |
tree | 027f8a7d676d3ae80950344b3891072e2ca0a736 /c/src/lib/libcpu/powerpc/new-exceptions | |
parent | bsps/powerpc: Use e500 exc categories for e6500 (diff) | |
download | rtems-3e2647a7146d4b972c6a0290e6657bab0de18afa.tar.bz2 |
powerpc: AltiVec and FPU context support
Add AltiVec and FPU support to the Context_Control in case we use the
e6500 multilib.
Add PPC_MULTILIB_ALTIVEC and PPC_MULTILIB_FPU multilib defines. Add
non-volatile AltiVec and FPU context to Context_Control. Add save/restore of
non-volatile AltiVec and FPU to _CPU_Context_switch(). Add save/restore
of volatile AltiVec and FPU context to the exception code. Adjust data
cache optimizations for the new context and cache line size.
Diffstat (limited to 'c/src/lib/libcpu/powerpc/new-exceptions')
7 files changed, 821 insertions, 12 deletions
diff --git a/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_asm_macros.h b/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_asm_macros.h index 0e3bc96895..c89046619b 100644 --- a/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_asm_macros.h +++ b/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_asm_macros.h @@ -426,6 +426,19 @@ wrap_no_save_frame_register_\_FLVR: /* Check exception type and remember it in non-volatile CR_TYPE */ cmpwi CR_TYPE, VECTOR_REGISTER, 0 +#if defined(PPC_MULTILIB_FPU) || defined(PPC_MULTILIB_ALTIVEC) + /* Enable FPU and/or AltiVec */ + mfmsr SCRATCH_REGISTER_0 +#ifdef PPC_MULTILIB_FPU + ori SCRATCH_REGISTER_0, SCRATCH_REGISTER_0, MSR_FP +#endif +#ifdef PPC_MULTILIB_ALTIVEC + oris SCRATCH_REGISTER_0, SCRATCH_REGISTER_0, MSR_VE >> 16 +#endif + mtmsr SCRATCH_REGISTER_0 + isync +#endif + /* * Depending on the exception type we do now save the non-volatile * registers or disable thread dispatching and switch to the ISR stack. @@ -545,7 +558,7 @@ wrap_change_msr_done_\_FLVR: #endif /* PPC_EXC_CONFIG_BOOKE_ONLY */ -#ifdef __ALTIVEC__ +#if defined(__ALTIVEC__) && !defined(PPC_MULTILIB_ALTIVEC) LA SCRATCH_REGISTER_0, _CPU_save_altivec_volatile mtctr SCRATCH_REGISTER_0 addi r3, FRAME_REGISTER, EXC_VEC_OFFSET @@ -566,6 +579,71 @@ wrap_change_msr_done_\_FLVR: lwz VECTOR_REGISTER, EXCEPTION_NUMBER_OFFSET(FRAME_REGISTER) #endif +#ifdef PPC_MULTILIB_ALTIVEC + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(0) + stvx v0, FRAME_REGISTER, SCRATCH_REGISTER_0 + mfvscr v0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(1) + stvx v1, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(2) + stvx v2, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(3) + stvx v3, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(4) + stvx v4, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(5) + stvx v5, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(6) + stvx v6, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(7) + stvx v7, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(8) + stvx v8, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(9) + stvx v9, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(0) + stvx v10, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(11) + stvx v11, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(12) + stvx v12, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(13) + stvx v13, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(14) + stvx v14, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(15) + stvx v15, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(16) + stvx v16, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(17) + stvx v17, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(18) + stvx v18, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(19) + stvx v19, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VSCR_OFFSET + stvewx v0, r1, SCRATCH_REGISTER_0 +#endif + +#ifdef PPC_MULTILIB_FPU + stfd f0, PPC_EXC_FR_OFFSET(0)(FRAME_REGISTER) + mffs f0 + stfd f1, PPC_EXC_FR_OFFSET(1)(FRAME_REGISTER) + stfd f2, PPC_EXC_FR_OFFSET(2)(FRAME_REGISTER) + stfd f3, PPC_EXC_FR_OFFSET(3)(FRAME_REGISTER) + stfd f4, PPC_EXC_FR_OFFSET(4)(FRAME_REGISTER) + stfd f5, PPC_EXC_FR_OFFSET(5)(FRAME_REGISTER) + stfd f6, PPC_EXC_FR_OFFSET(6)(FRAME_REGISTER) + stfd f7, PPC_EXC_FR_OFFSET(7)(FRAME_REGISTER) + stfd f8, PPC_EXC_FR_OFFSET(8)(FRAME_REGISTER) + stfd f9, PPC_EXC_FR_OFFSET(9)(FRAME_REGISTER) + stfd f10, PPC_EXC_FR_OFFSET(10)(FRAME_REGISTER) + stfd f11, PPC_EXC_FR_OFFSET(11)(FRAME_REGISTER) + stfd f12, PPC_EXC_FR_OFFSET(12)(FRAME_REGISTER) + stfd f13, PPC_EXC_FR_OFFSET(13)(FRAME_REGISTER) + stfd f0, PPC_EXC_FPSCR_OFFSET(FRAME_REGISTER) +#endif + /* * Call high level exception handler */ @@ -666,13 +744,78 @@ wrap_handler_done_\_FLVR: wrap_thread_dispatching_done_\_FLVR: -#ifdef __ALTIVEC__ +#if defined(__ALTIVEC__) && !defined(PPC_MULTILIB_ALTIVEC) LA SCRATCH_REGISTER_0, _CPU_load_altivec_volatile mtctr SCRATCH_REGISTER_0 addi r3, FRAME_REGISTER, EXC_VEC_OFFSET bctrl #endif +#ifdef PPC_MULTILIB_ALTIVEC + li SCRATCH_REGISTER_0, PPC_EXC_MIN_VSCR_OFFSET + lvewx v0, r1, SCRATCH_REGISTER_0 + mtvscr v0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(0) + lvx v0, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(1) + lvx v1, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(2) + lvx v2, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(3) + lvx v3, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(4) + lvx v4, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(5) + lvx v5, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(6) + lvx v6, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(7) + lvx v7, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(8) + lvx v8, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(9) + lvx v9, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(0) + lvx v10, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(11) + lvx v11, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(12) + lvx v12, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(13) + lvx v13, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(14) + lvx v14, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(15) + lvx v15, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(16) + lvx v16, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(17) + lvx v17, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(18) + lvx v18, FRAME_REGISTER, SCRATCH_REGISTER_0 + li SCRATCH_REGISTER_0, PPC_EXC_VR_OFFSET(19) + lvx v19, FRAME_REGISTER, SCRATCH_REGISTER_0 +#endif + +#ifdef PPC_MULTILIB_FPU + lfd f0, PPC_EXC_FPSCR_OFFSET(FRAME_REGISTER) + mtfsf 0xff, f0 + lfd f0, PPC_EXC_FR_OFFSET(0)(FRAME_REGISTER) + lfd f1, PPC_EXC_FR_OFFSET(1)(FRAME_REGISTER) + lfd f2, PPC_EXC_FR_OFFSET(2)(FRAME_REGISTER) + lfd f3, PPC_EXC_FR_OFFSET(3)(FRAME_REGISTER) + lfd f4, PPC_EXC_FR_OFFSET(4)(FRAME_REGISTER) + lfd f5, PPC_EXC_FR_OFFSET(5)(FRAME_REGISTER) + lfd f6, PPC_EXC_FR_OFFSET(6)(FRAME_REGISTER) + lfd f7, PPC_EXC_FR_OFFSET(7)(FRAME_REGISTER) + lfd f8, PPC_EXC_FR_OFFSET(8)(FRAME_REGISTER) + lfd f9, PPC_EXC_FR_OFFSET(9)(FRAME_REGISTER) + lfd f10, PPC_EXC_FR_OFFSET(10)(FRAME_REGISTER) + lfd f11, PPC_EXC_FR_OFFSET(11)(FRAME_REGISTER) + lfd f12, PPC_EXC_FR_OFFSET(12)(FRAME_REGISTER) + lfd f13, PPC_EXC_FR_OFFSET(13)(FRAME_REGISTER) +#endif + #ifndef PPC_EXC_CONFIG_BOOKE_ONLY /* Restore MSR? */ @@ -801,6 +944,56 @@ wrap_save_non_volatile_regs_\_FLVR: stw r31, GPR31_OFFSET(FRAME_REGISTER) #endif +#ifdef PPC_MULTILIB_ALTIVEC + li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(20) + stvx v20, FRAME_REGISTER, SCRATCH_REGISTER_1 + li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(21) + stvx v21, FRAME_REGISTER, SCRATCH_REGISTER_1 + li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(22) + stvx v22, FRAME_REGISTER, SCRATCH_REGISTER_1 + li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(23) + stvx v23, FRAME_REGISTER, SCRATCH_REGISTER_1 + li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(24) + stvx v24, FRAME_REGISTER, SCRATCH_REGISTER_1 + li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(25) + stvx v25, FRAME_REGISTER, SCRATCH_REGISTER_1 + li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(26) + stvx v26, FRAME_REGISTER, SCRATCH_REGISTER_1 + li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(27) + stvx v27, FRAME_REGISTER, SCRATCH_REGISTER_1 + li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(28) + stvx v28, FRAME_REGISTER, SCRATCH_REGISTER_1 + li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(29) + stvx v29, FRAME_REGISTER, SCRATCH_REGISTER_1 + li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(30) + stvx v30, FRAME_REGISTER, SCRATCH_REGISTER_1 + li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(31) + stvx v31, FRAME_REGISTER, SCRATCH_REGISTER_1 + mfvrsave SCRATCH_REGISTER_1 + stw SCRATCH_REGISTER_1, PPC_EXC_VRSAVE_OFFSET(FRAME_REGISTER) +#endif + +#ifdef PPC_MULTILIB_FPU + stfd f14, PPC_EXC_FR_OFFSET(14)(FRAME_REGISTER) + stfd f15, PPC_EXC_FR_OFFSET(15)(FRAME_REGISTER) + stfd f16, PPC_EXC_FR_OFFSET(16)(FRAME_REGISTER) + stfd f17, PPC_EXC_FR_OFFSET(17)(FRAME_REGISTER) + stfd f18, PPC_EXC_FR_OFFSET(18)(FRAME_REGISTER) + stfd f19, PPC_EXC_FR_OFFSET(19)(FRAME_REGISTER) + stfd f20, PPC_EXC_FR_OFFSET(20)(FRAME_REGISTER) + stfd f21, PPC_EXC_FR_OFFSET(21)(FRAME_REGISTER) + stfd f22, PPC_EXC_FR_OFFSET(22)(FRAME_REGISTER) + stfd f23, PPC_EXC_FR_OFFSET(23)(FRAME_REGISTER) + stfd f24, PPC_EXC_FR_OFFSET(24)(FRAME_REGISTER) + stfd f25, PPC_EXC_FR_OFFSET(25)(FRAME_REGISTER) + stfd f26, PPC_EXC_FR_OFFSET(26)(FRAME_REGISTER) + stfd f27, PPC_EXC_FR_OFFSET(27)(FRAME_REGISTER) + stfd f28, PPC_EXC_FR_OFFSET(28)(FRAME_REGISTER) + stfd f29, PPC_EXC_FR_OFFSET(29)(FRAME_REGISTER) + stfd f30, PPC_EXC_FR_OFFSET(30)(FRAME_REGISTER) + stfd f31, PPC_EXC_FR_OFFSET(31)(FRAME_REGISTER) +#endif + b wrap_disable_thread_dispatching_done_\_FLVR wrap_restore_non_volatile_regs_\_FLVR: @@ -839,6 +1032,56 @@ wrap_restore_non_volatile_regs_\_FLVR: /* Restore stack pointer */ stw SCRATCH_REGISTER_0, 0(r1) +#ifdef PPC_MULTILIB_ALTIVEC + li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(20) + lvx v20, FRAME_REGISTER, SCRATCH_REGISTER_1 + li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(21) + lvx v21, FRAME_REGISTER, SCRATCH_REGISTER_1 + li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(22) + lvx v22, FRAME_REGISTER, SCRATCH_REGISTER_1 + li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(23) + lvx v23, FRAME_REGISTER, SCRATCH_REGISTER_1 + li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(24) + lvx v24, FRAME_REGISTER, SCRATCH_REGISTER_1 + li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(25) + lvx v25, FRAME_REGISTER, SCRATCH_REGISTER_1 + li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(26) + lvx v26, FRAME_REGISTER, SCRATCH_REGISTER_1 + li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(27) + lvx v27, FRAME_REGISTER, SCRATCH_REGISTER_1 + li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(28) + lvx v28, FRAME_REGISTER, SCRATCH_REGISTER_1 + li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(29) + lvx v29, FRAME_REGISTER, SCRATCH_REGISTER_1 + li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(30) + lvx v30, FRAME_REGISTER, SCRATCH_REGISTER_1 + li SCRATCH_REGISTER_1, PPC_EXC_VR_OFFSET(31) + lvx v31, FRAME_REGISTER, SCRATCH_REGISTER_1 + lwz SCRATCH_REGISTER_1, PPC_EXC_VRSAVE_OFFSET(FRAME_REGISTER) + mtvrsave SCRATCH_REGISTER_1 +#endif + +#ifdef PPC_MULTILIB_FPU + lfd f14, PPC_EXC_FR_OFFSET(14)(FRAME_REGISTER) + lfd f15, PPC_EXC_FR_OFFSET(15)(FRAME_REGISTER) + lfd f16, PPC_EXC_FR_OFFSET(16)(FRAME_REGISTER) + lfd f17, PPC_EXC_FR_OFFSET(17)(FRAME_REGISTER) + lfd f18, PPC_EXC_FR_OFFSET(18)(FRAME_REGISTER) + lfd f19, PPC_EXC_FR_OFFSET(19)(FRAME_REGISTER) + lfd f20, PPC_EXC_FR_OFFSET(20)(FRAME_REGISTER) + lfd f21, PPC_EXC_FR_OFFSET(21)(FRAME_REGISTER) + lfd f22, PPC_EXC_FR_OFFSET(22)(FRAME_REGISTER) + lfd f23, PPC_EXC_FR_OFFSET(23)(FRAME_REGISTER) + lfd f24, PPC_EXC_FR_OFFSET(24)(FRAME_REGISTER) + lfd f25, PPC_EXC_FR_OFFSET(25)(FRAME_REGISTER) + lfd f26, PPC_EXC_FR_OFFSET(26)(FRAME_REGISTER) + lfd f27, PPC_EXC_FR_OFFSET(27)(FRAME_REGISTER) + lfd f28, PPC_EXC_FR_OFFSET(28)(FRAME_REGISTER) + lfd f29, PPC_EXC_FR_OFFSET(29)(FRAME_REGISTER) + lfd f30, PPC_EXC_FR_OFFSET(30)(FRAME_REGISTER) + lfd f31, PPC_EXC_FR_OFFSET(31)(FRAME_REGISTER) +#endif + b wrap_thread_dispatching_done_\_FLVR wrap_call_global_handler_\_FLVR: diff --git a/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_async_normal.S b/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_async_normal.S index a1ae8931d7..ae575c5825 100644 --- a/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_async_normal.S +++ b/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_async_normal.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2011-2014 embedded brains GmbH. All rights reserved. + * Copyright (c) 2011-2015 embedded brains GmbH. All rights reserved. * * embedded brains GmbH * Dornierstr. 4 @@ -105,6 +105,19 @@ ppc_exc_wrap_async_normal: isync #endif +#if defined(PPC_MULTILIB_FPU) || defined(PPC_MULTILIB_ALTIVEC) + /* Enable FPU and/or AltiVec */ + mfmsr FRAME_REGISTER +#ifdef PPC_MULTILIB_FPU + ori FRAME_REGISTER, FRAME_REGISTER, MSR_FP +#endif +#ifdef PPC_MULTILIB_ALTIVEC + oris FRAME_REGISTER, FRAME_REGISTER, MSR_VE >> 16 +#endif + mtmsr FRAME_REGISTER + isync +#endif + /* Move frame pointer to non-volatile FRAME_REGISTER */ mr FRAME_REGISTER, r1 @@ -176,6 +189,73 @@ ppc_exc_wrap_async_normal: evstdd SCRATCH_1_REGISTER, PPC_EXC_ACC_OFFSET(r1) #endif +#ifdef PPC_MULTILIB_ALTIVEC + /* Save volatile AltiVec context */ + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(0) + stvx v0, r1, SCRATCH_0_REGISTER + mfvscr v0 + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(1) + stvx v1, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(2) + stvx v2, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(3) + stvx v3, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(4) + stvx v4, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(5) + stvx v5, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(6) + stvx v6, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(7) + stvx v7, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(8) + stvx v8, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(9) + stvx v9, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(10) + stvx v10, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(11) + stvx v11, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(12) + stvx v12, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(13) + stvx v13, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(14) + stvx v14, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(15) + stvx v15, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(16) + stvx v16, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(17) + stvx v17, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(18) + stvx v18, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(19) + stvx v19, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VSCR_OFFSET + stvewx v0, r1, SCRATCH_0_REGISTER +#endif + +#ifdef PPC_MULTILIB_FPU + /* Save volatile FPU context */ + stfd f0, PPC_EXC_MIN_FR_OFFSET(0)(r1) + mffs f0 + stfd f1, PPC_EXC_MIN_FR_OFFSET(1)(r1) + stfd f2, PPC_EXC_MIN_FR_OFFSET(2)(r1) + stfd f3, PPC_EXC_MIN_FR_OFFSET(3)(r1) + stfd f4, PPC_EXC_MIN_FR_OFFSET(4)(r1) + stfd f5, PPC_EXC_MIN_FR_OFFSET(5)(r1) + stfd f6, PPC_EXC_MIN_FR_OFFSET(6)(r1) + stfd f7, PPC_EXC_MIN_FR_OFFSET(7)(r1) + stfd f8, PPC_EXC_MIN_FR_OFFSET(8)(r1) + stfd f9, PPC_EXC_MIN_FR_OFFSET(9)(r1) + stfd f10, PPC_EXC_MIN_FR_OFFSET(10)(r1) + stfd f11, PPC_EXC_MIN_FR_OFFSET(11)(r1) + stfd f12, PPC_EXC_MIN_FR_OFFSET(12)(r1) + stfd f13, PPC_EXC_MIN_FR_OFFSET(13)(r1) + stfd f0, PPC_EXC_MIN_FPSCR_OFFSET(r1) +#endif + /* Increment ISR nest level and thread dispatch disable level */ cmpwi ISR_NEST_REGISTER, 0 addi ISR_NEST_REGISTER, ISR_NEST_REGISTER, 1 @@ -246,6 +326,73 @@ profiling_done: bl _Thread_Dispatch thread_dispatching_done: +#ifdef PPC_MULTILIB_ALTIVEC + /* Restore volatile AltiVec context */ + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VSCR_OFFSET + lvewx v0, r1, SCRATCH_0_REGISTER + mtvscr v0 + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(0) + lvx v0, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(1) + lvx v1, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(2) + lvx v2, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(3) + lvx v3, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(4) + lvx v4, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(5) + lvx v5, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(6) + lvx v6, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(7) + lvx v7, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(8) + lvx v8, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(9) + lvx v9, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(10) + lvx v10, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(11) + lvx v11, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(12) + lvx v12, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(13) + lvx v13, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(14) + lvx v14, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(15) + lvx v15, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(16) + lvx v16, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(17) + lvx v17, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(18) + lvx v18, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(19) + lvx v19, r1, SCRATCH_0_REGISTER +#endif + +#ifdef PPC_MULTILIB_FPU + /* Restore volatile FPU context */ + lfd f0, PPC_EXC_MIN_FPSCR_OFFSET(r1) + mtfsf 0xff, f0 + lfd f0, PPC_EXC_MIN_FR_OFFSET(0)(r1) + lfd f1, PPC_EXC_MIN_FR_OFFSET(1)(r1) + lfd f2, PPC_EXC_MIN_FR_OFFSET(2)(r1) + lfd f3, PPC_EXC_MIN_FR_OFFSET(3)(r1) + lfd f4, PPC_EXC_MIN_FR_OFFSET(4)(r1) + lfd f5, PPC_EXC_MIN_FR_OFFSET(5)(r1) + lfd f6, PPC_EXC_MIN_FR_OFFSET(6)(r1) + lfd f7, PPC_EXC_MIN_FR_OFFSET(7)(r1) + lfd f8, PPC_EXC_MIN_FR_OFFSET(8)(r1) + lfd f9, PPC_EXC_MIN_FR_OFFSET(9)(r1) + lfd f10, PPC_EXC_MIN_FR_OFFSET(10)(r1) + lfd f11, PPC_EXC_MIN_FR_OFFSET(11)(r1) + lfd f12, PPC_EXC_MIN_FR_OFFSET(12)(r1) + lfd f13, PPC_EXC_MIN_FR_OFFSET(13)(r1) +#endif + #ifdef __SPE__ /* Load SPEFSCR and ACC */ lwz DISPATCH_LEVEL_REGISTER, PPC_EXC_SPEFSCR_OFFSET(r1) diff --git a/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_initialize.c b/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_initialize.c index ef978f8d57..54451afe03 100644 --- a/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_initialize.c +++ b/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_initialize.c @@ -36,6 +36,15 @@ #define PPC_EXC_ASSERT_CANONIC_OFFSET(field) \ PPC_EXC_ASSERT_OFFSET(field, field ## _OFFSET) +#define PPC_EXC_MIN_ASSERT_OFFSET(field, off) \ + RTEMS_STATIC_ASSERT( \ + offsetof(ppc_exc_min_frame, field) + FRAME_LINK_SPACE == off, \ + ppc_exc_min_frame_offset_ ## field \ + ) + +#define PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(field) \ + PPC_EXC_MIN_ASSERT_OFFSET(field, field ## _OFFSET) + PPC_EXC_ASSERT_OFFSET(EXC_SRR0, SRR0_FRAME_OFFSET); PPC_EXC_ASSERT_OFFSET(EXC_SRR1, SRR1_FRAME_OFFSET); PPC_EXC_ASSERT_OFFSET(_EXC_number, EXCEPTION_NUMBER_OFFSET); @@ -80,6 +89,145 @@ PPC_EXC_ASSERT_CANONIC_OFFSET(GPR29); PPC_EXC_ASSERT_CANONIC_OFFSET(GPR30); PPC_EXC_ASSERT_CANONIC_OFFSET(GPR31); +PPC_EXC_MIN_ASSERT_OFFSET(EXC_SRR0, SRR0_FRAME_OFFSET); +PPC_EXC_MIN_ASSERT_OFFSET(EXC_SRR1, SRR1_FRAME_OFFSET); +PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(EXC_CR); +PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(EXC_CTR); +PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(EXC_XER); +PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(EXC_LR); +#ifdef __SPE__ + PPC_EXC_MIN_ASSERT_OFFSET(EXC_SPEFSCR, PPC_EXC_SPEFSCR_OFFSET); + PPC_EXC_MIN_ASSERT_OFFSET(EXC_ACC, PPC_EXC_ACC_OFFSET); +#endif +PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(GPR0); +PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(GPR1); +PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(GPR2); +PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(GPR3); +PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(GPR4); +PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(GPR5); +PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(GPR6); +PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(GPR7); +PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(GPR8); +PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(GPR9); +PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(GPR10); +PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(GPR11); +PPC_EXC_MIN_ASSERT_CANONIC_OFFSET(GPR12); + +#ifdef PPC_MULTILIB_ALTIVEC +PPC_EXC_ASSERT_OFFSET(VSCR, PPC_EXC_VSCR_OFFSET); +PPC_EXC_ASSERT_OFFSET(VRSAVE, PPC_EXC_VRSAVE_OFFSET); +RTEMS_STATIC_ASSERT(PPC_EXC_VR_OFFSET(0) % 16 == 0, PPC_EXC_VR_OFFSET); +PPC_EXC_ASSERT_OFFSET(V0, PPC_EXC_VR_OFFSET(0)); +PPC_EXC_ASSERT_OFFSET(V1, PPC_EXC_VR_OFFSET(1)); +PPC_EXC_ASSERT_OFFSET(V2, PPC_EXC_VR_OFFSET(2)); +PPC_EXC_ASSERT_OFFSET(V3, PPC_EXC_VR_OFFSET(3)); +PPC_EXC_ASSERT_OFFSET(V4, PPC_EXC_VR_OFFSET(4)); +PPC_EXC_ASSERT_OFFSET(V5, PPC_EXC_VR_OFFSET(5)); +PPC_EXC_ASSERT_OFFSET(V6, PPC_EXC_VR_OFFSET(6)); +PPC_EXC_ASSERT_OFFSET(V7, PPC_EXC_VR_OFFSET(7)); +PPC_EXC_ASSERT_OFFSET(V8, PPC_EXC_VR_OFFSET(8)); +PPC_EXC_ASSERT_OFFSET(V9, PPC_EXC_VR_OFFSET(9)); +PPC_EXC_ASSERT_OFFSET(V10, PPC_EXC_VR_OFFSET(10)); +PPC_EXC_ASSERT_OFFSET(V11, PPC_EXC_VR_OFFSET(11)); +PPC_EXC_ASSERT_OFFSET(V12, PPC_EXC_VR_OFFSET(12)); +PPC_EXC_ASSERT_OFFSET(V13, PPC_EXC_VR_OFFSET(13)); +PPC_EXC_ASSERT_OFFSET(V14, PPC_EXC_VR_OFFSET(14)); +PPC_EXC_ASSERT_OFFSET(V15, PPC_EXC_VR_OFFSET(15)); +PPC_EXC_ASSERT_OFFSET(V16, PPC_EXC_VR_OFFSET(16)); +PPC_EXC_ASSERT_OFFSET(V17, PPC_EXC_VR_OFFSET(17)); +PPC_EXC_ASSERT_OFFSET(V18, PPC_EXC_VR_OFFSET(18)); +PPC_EXC_ASSERT_OFFSET(V19, PPC_EXC_VR_OFFSET(19)); +PPC_EXC_ASSERT_OFFSET(V20, PPC_EXC_VR_OFFSET(20)); +PPC_EXC_ASSERT_OFFSET(V21, PPC_EXC_VR_OFFSET(21)); +PPC_EXC_ASSERT_OFFSET(V22, PPC_EXC_VR_OFFSET(22)); +PPC_EXC_ASSERT_OFFSET(V23, PPC_EXC_VR_OFFSET(23)); +PPC_EXC_ASSERT_OFFSET(V24, PPC_EXC_VR_OFFSET(24)); +PPC_EXC_ASSERT_OFFSET(V25, PPC_EXC_VR_OFFSET(25)); +PPC_EXC_ASSERT_OFFSET(V26, PPC_EXC_VR_OFFSET(26)); +PPC_EXC_ASSERT_OFFSET(V27, PPC_EXC_VR_OFFSET(27)); +PPC_EXC_ASSERT_OFFSET(V28, PPC_EXC_VR_OFFSET(28)); +PPC_EXC_ASSERT_OFFSET(V29, PPC_EXC_VR_OFFSET(29)); +PPC_EXC_ASSERT_OFFSET(V30, PPC_EXC_VR_OFFSET(30)); +PPC_EXC_ASSERT_OFFSET(V31, PPC_EXC_VR_OFFSET(31)); + +PPC_EXC_MIN_ASSERT_OFFSET(VSCR, PPC_EXC_MIN_VSCR_OFFSET); +RTEMS_STATIC_ASSERT(PPC_EXC_MIN_VR_OFFSET(0) % 16 == 0, PPC_EXC_MIN_VR_OFFSET); +PPC_EXC_MIN_ASSERT_OFFSET(V0, PPC_EXC_MIN_VR_OFFSET(0)); +PPC_EXC_MIN_ASSERT_OFFSET(V1, PPC_EXC_MIN_VR_OFFSET(1)); +PPC_EXC_MIN_ASSERT_OFFSET(V2, PPC_EXC_MIN_VR_OFFSET(2)); +PPC_EXC_MIN_ASSERT_OFFSET(V3, PPC_EXC_MIN_VR_OFFSET(3)); +PPC_EXC_MIN_ASSERT_OFFSET(V4, PPC_EXC_MIN_VR_OFFSET(4)); +PPC_EXC_MIN_ASSERT_OFFSET(V5, PPC_EXC_MIN_VR_OFFSET(5)); +PPC_EXC_MIN_ASSERT_OFFSET(V6, PPC_EXC_MIN_VR_OFFSET(6)); +PPC_EXC_MIN_ASSERT_OFFSET(V7, PPC_EXC_MIN_VR_OFFSET(7)); +PPC_EXC_MIN_ASSERT_OFFSET(V8, PPC_EXC_MIN_VR_OFFSET(8)); +PPC_EXC_MIN_ASSERT_OFFSET(V9, PPC_EXC_MIN_VR_OFFSET(9)); +PPC_EXC_MIN_ASSERT_OFFSET(V10, PPC_EXC_MIN_VR_OFFSET(10)); +PPC_EXC_MIN_ASSERT_OFFSET(V11, PPC_EXC_MIN_VR_OFFSET(11)); +PPC_EXC_MIN_ASSERT_OFFSET(V12, PPC_EXC_MIN_VR_OFFSET(12)); +PPC_EXC_MIN_ASSERT_OFFSET(V13, PPC_EXC_MIN_VR_OFFSET(13)); +PPC_EXC_MIN_ASSERT_OFFSET(V14, PPC_EXC_MIN_VR_OFFSET(14)); +PPC_EXC_MIN_ASSERT_OFFSET(V15, PPC_EXC_MIN_VR_OFFSET(15)); +PPC_EXC_MIN_ASSERT_OFFSET(V16, PPC_EXC_MIN_VR_OFFSET(16)); +PPC_EXC_MIN_ASSERT_OFFSET(V17, PPC_EXC_MIN_VR_OFFSET(17)); +PPC_EXC_MIN_ASSERT_OFFSET(V18, PPC_EXC_MIN_VR_OFFSET(18)); +PPC_EXC_MIN_ASSERT_OFFSET(V19, PPC_EXC_MIN_VR_OFFSET(19)); +#endif + +#ifdef PPC_MULTILIB_FPU +RTEMS_STATIC_ASSERT(PPC_EXC_FR_OFFSET(0) % 8 == 0, PPC_EXC_FR_OFFSET); +PPC_EXC_ASSERT_OFFSET(F0, PPC_EXC_FR_OFFSET(0)); +PPC_EXC_ASSERT_OFFSET(F1, PPC_EXC_FR_OFFSET(1)); +PPC_EXC_ASSERT_OFFSET(F2, PPC_EXC_FR_OFFSET(2)); +PPC_EXC_ASSERT_OFFSET(F3, PPC_EXC_FR_OFFSET(3)); +PPC_EXC_ASSERT_OFFSET(F4, PPC_EXC_FR_OFFSET(4)); +PPC_EXC_ASSERT_OFFSET(F5, PPC_EXC_FR_OFFSET(5)); +PPC_EXC_ASSERT_OFFSET(F6, PPC_EXC_FR_OFFSET(6)); +PPC_EXC_ASSERT_OFFSET(F7, PPC_EXC_FR_OFFSET(7)); +PPC_EXC_ASSERT_OFFSET(F8, PPC_EXC_FR_OFFSET(8)); +PPC_EXC_ASSERT_OFFSET(F9, PPC_EXC_FR_OFFSET(9)); +PPC_EXC_ASSERT_OFFSET(F10, PPC_EXC_FR_OFFSET(10)); +PPC_EXC_ASSERT_OFFSET(F11, PPC_EXC_FR_OFFSET(11)); +PPC_EXC_ASSERT_OFFSET(F12, PPC_EXC_FR_OFFSET(12)); +PPC_EXC_ASSERT_OFFSET(F13, PPC_EXC_FR_OFFSET(13)); +PPC_EXC_ASSERT_OFFSET(F14, PPC_EXC_FR_OFFSET(14)); +PPC_EXC_ASSERT_OFFSET(F15, PPC_EXC_FR_OFFSET(15)); +PPC_EXC_ASSERT_OFFSET(F16, PPC_EXC_FR_OFFSET(16)); +PPC_EXC_ASSERT_OFFSET(F17, PPC_EXC_FR_OFFSET(17)); +PPC_EXC_ASSERT_OFFSET(F18, PPC_EXC_FR_OFFSET(18)); +PPC_EXC_ASSERT_OFFSET(F19, PPC_EXC_FR_OFFSET(19)); +PPC_EXC_ASSERT_OFFSET(F20, PPC_EXC_FR_OFFSET(20)); +PPC_EXC_ASSERT_OFFSET(F21, PPC_EXC_FR_OFFSET(21)); +PPC_EXC_ASSERT_OFFSET(F22, PPC_EXC_FR_OFFSET(22)); +PPC_EXC_ASSERT_OFFSET(F23, PPC_EXC_FR_OFFSET(23)); +PPC_EXC_ASSERT_OFFSET(F24, PPC_EXC_FR_OFFSET(24)); +PPC_EXC_ASSERT_OFFSET(F25, PPC_EXC_FR_OFFSET(25)); +PPC_EXC_ASSERT_OFFSET(F26, PPC_EXC_FR_OFFSET(26)); +PPC_EXC_ASSERT_OFFSET(F27, PPC_EXC_FR_OFFSET(27)); +PPC_EXC_ASSERT_OFFSET(F28, PPC_EXC_FR_OFFSET(28)); +PPC_EXC_ASSERT_OFFSET(F29, PPC_EXC_FR_OFFSET(29)); +PPC_EXC_ASSERT_OFFSET(F30, PPC_EXC_FR_OFFSET(30)); +PPC_EXC_ASSERT_OFFSET(F31, PPC_EXC_FR_OFFSET(31)); +PPC_EXC_ASSERT_OFFSET(FPSCR, PPC_EXC_FPSCR_OFFSET); + +RTEMS_STATIC_ASSERT(PPC_EXC_MIN_FR_OFFSET(0) % 8 == 0, PPC_EXC_MIN_FR_OFFSET); +PPC_EXC_MIN_ASSERT_OFFSET(F0, PPC_EXC_MIN_FR_OFFSET(0)); +PPC_EXC_MIN_ASSERT_OFFSET(F1, PPC_EXC_MIN_FR_OFFSET(1)); +PPC_EXC_MIN_ASSERT_OFFSET(F2, PPC_EXC_MIN_FR_OFFSET(2)); +PPC_EXC_MIN_ASSERT_OFFSET(F3, PPC_EXC_MIN_FR_OFFSET(3)); +PPC_EXC_MIN_ASSERT_OFFSET(F4, PPC_EXC_MIN_FR_OFFSET(4)); +PPC_EXC_MIN_ASSERT_OFFSET(F5, PPC_EXC_MIN_FR_OFFSET(5)); +PPC_EXC_MIN_ASSERT_OFFSET(F6, PPC_EXC_MIN_FR_OFFSET(6)); +PPC_EXC_MIN_ASSERT_OFFSET(F7, PPC_EXC_MIN_FR_OFFSET(7)); +PPC_EXC_MIN_ASSERT_OFFSET(F8, PPC_EXC_MIN_FR_OFFSET(8)); +PPC_EXC_MIN_ASSERT_OFFSET(F9, PPC_EXC_MIN_FR_OFFSET(9)); +PPC_EXC_MIN_ASSERT_OFFSET(F10, PPC_EXC_MIN_FR_OFFSET(10)); +PPC_EXC_MIN_ASSERT_OFFSET(F11, PPC_EXC_MIN_FR_OFFSET(11)); +PPC_EXC_MIN_ASSERT_OFFSET(F12, PPC_EXC_MIN_FR_OFFSET(12)); +PPC_EXC_MIN_ASSERT_OFFSET(F13, PPC_EXC_MIN_FR_OFFSET(13)); +PPC_EXC_MIN_ASSERT_OFFSET(FPSCR, PPC_EXC_MIN_FPSCR_OFFSET); +#endif + RTEMS_STATIC_ASSERT( PPC_EXC_MINIMAL_FRAME_SIZE % CPU_STACK_ALIGNMENT == 0, PPC_EXC_MINIMAL_FRAME_SIZE diff --git a/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_print.c b/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_print.c index cb95aba006..27b76a1934 100644 --- a/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_print.c +++ b/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_print.c @@ -179,6 +179,42 @@ void _CPU_Exception_frame_print(const CPU_Exception_frame *excPtr) printk(" MCSR = 0x%08x\n", mcsr); } +#ifdef PPC_MULTILIB_ALTIVEC + { + unsigned char *v = (unsigned char *) &excPtr->V0; + int i; + int j; + + printk(" VSCR = 0x%08x\n", excPtr->VSCR); + printk("VRSAVE = 0x%08x\n", excPtr->VRSAVE); + + for (i = 0; i < 32; ++i) { + printk(" V%02i = 0x", i); + + for (j = 0; j < 16; ++j) { + printk("%02x", v[j]); + } + + printk("\n"); + + v += 16; + } + } +#endif + +#ifdef PPC_MULTILIB_FPU + { + unsigned long long *f = (unsigned long long *) &excPtr->F0; + int i; + + printk("FPSCR = 0x%08llx\n", excPtr->FPSCR); + + for (i = 0; i < 32; ++i) { + printk(" F%02i = 0x%016llx\n", i, f[i]); + } + } +#endif + if (executing != NULL) { const char *name = (const char *) &executing->Object.name; diff --git a/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/vectors.h b/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/vectors.h index bf13c757f1..86bd0f11f2 100644 --- a/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/vectors.h +++ b/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/vectors.h @@ -146,8 +146,37 @@ extern "C" { #ifndef __SPE__ #define PPC_EXC_GPR_OFFSET(gpr) ((gpr) * PPC_GPR_SIZE + 36) #define PPC_EXC_VECTOR_PROLOGUE_OFFSET PPC_EXC_GPR_OFFSET(4) - #define PPC_EXC_MINIMAL_FRAME_SIZE 96 - #define PPC_EXC_FRAME_SIZE 176 + #if defined(PPC_MULTILIB_ALTIVEC) && defined(PPC_MULTILIB_FPU) + #define PPC_EXC_VSCR_OFFSET 168 + #define PPC_EXC_VRSAVE_OFFSET 172 + #define PPC_EXC_VR_OFFSET(v) ((v) * 16 + 176) + #define PPC_EXC_FR_OFFSET(f) ((f) * 8 + 688) + #define PPC_EXC_FPSCR_OFFSET 944 + #define PPC_EXC_FRAME_SIZE 960 + #define PPC_EXC_MIN_VSCR_OFFSET 92 + #define PPC_EXC_MIN_VR_OFFSET(v) ((v) * 16 + 96) + #define PPC_EXC_MIN_FR_OFFSET(f) ((f) * 8 + 416) + #define PPC_EXC_MIN_FPSCR_OFFSET 528 + #define PPC_EXC_MINIMAL_FRAME_SIZE 544 + #elif defined(PPC_MULTILIB_ALTIVEC) + #define PPC_EXC_VSCR_OFFSET 168 + #define PPC_EXC_VRSAVE_OFFSET 172 + #define PPC_EXC_VR_OFFSET(v) ((v) * 16 + 176) + #define PPC_EXC_FRAME_SIZE 688 + #define PPC_EXC_MIN_VSCR_OFFSET 92 + #define PPC_EXC_MIN_VR_OFFSET(v) ((v) * 16 + 96) + #define PPC_EXC_MINIMAL_FRAME_SIZE 416 + #elif defined(PPC_MULTILIB_FPU) + #define PPC_EXC_FR_OFFSET(f) ((f) * 8 + 168) + #define PPC_EXC_FPSCR_OFFSET 424 + #define PPC_EXC_FRAME_SIZE 448 + #define PPC_EXC_MIN_FR_OFFSET(f) ((f) * 8 + 96) + #define PPC_EXC_MIN_FPSCR_OFFSET 92 + #define PPC_EXC_MINIMAL_FRAME_SIZE 224 + #else + #define PPC_EXC_FRAME_SIZE 176 + #define PPC_EXC_MINIMAL_FRAME_SIZE 96 + #endif #else #define PPC_EXC_SPEFSCR_OFFSET 36 #define PPC_EXC_ACC_OFFSET 40 @@ -214,7 +243,7 @@ extern "C" { #define EXC_GENERIC_SIZE PPC_EXC_FRAME_SIZE -#ifdef __ALTIVEC__ +#if defined(__ALTIVEC__) && !defined(PPC_MULTILIB_ALTIVEC) #define EXC_VEC_OFFSET EXC_GENERIC_SIZE #ifndef PPC_CACHE_ALIGNMENT #error "Missing include file!" @@ -248,6 +277,77 @@ extern "C" { * @{ */ +typedef struct { + uint32_t EXC_SRR0; + uint32_t EXC_SRR1; + uint32_t unused; + uint32_t EXC_CR; + uint32_t EXC_CTR; + uint32_t EXC_XER; + uint32_t EXC_LR; + #ifdef __SPE__ + uint32_t EXC_SPEFSCR; + uint64_t EXC_ACC; + #endif + PPC_GPR_TYPE GPR0; + PPC_GPR_TYPE GPR1; + PPC_GPR_TYPE GPR2; + PPC_GPR_TYPE GPR3; + PPC_GPR_TYPE GPR4; + PPC_GPR_TYPE GPR5; + PPC_GPR_TYPE GPR6; + PPC_GPR_TYPE GPR7; + PPC_GPR_TYPE GPR8; + PPC_GPR_TYPE GPR9; + PPC_GPR_TYPE GPR10; + PPC_GPR_TYPE GPR11; + PPC_GPR_TYPE GPR12; + uint32_t EARLY_INSTANT; + #ifdef PPC_MULTILIB_ALTIVEC + uint32_t VSCR; + uint8_t V0[16]; + uint8_t V1[16]; + uint8_t V2[16]; + uint8_t V3[16]; + uint8_t V4[16]; + uint8_t V5[16]; + uint8_t V6[16]; + uint8_t V7[16]; + uint8_t V8[16]; + uint8_t V9[16]; + uint8_t V10[16]; + uint8_t V11[16]; + uint8_t V12[16]; + uint8_t V13[16]; + uint8_t V14[16]; + uint8_t V15[16]; + uint8_t V16[16]; + uint8_t V17[16]; + uint8_t V18[16]; + uint8_t V19[16]; + #endif + #ifdef PPC_MULTILIB_FPU + #ifndef PPC_MULTILIB_ALTIVEC + uint32_t reserved_for_alignment; + #endif + double F0; + double F1; + double F2; + double F3; + double F4; + double F5; + double F6; + double F7; + double F8; + double F9; + double F10; + double F11; + double F12; + double F13; + uint64_t FPSCR; + #endif +} ppc_exc_min_frame; + typedef CPU_Exception_frame BSP_Exception_frame; /** @} */ diff --git a/c/src/lib/libcpu/powerpc/new-exceptions/cpu.c b/c/src/lib/libcpu/powerpc/new-exceptions/cpu.c index 6aa1301baf..0b0527ec4d 100644 --- a/c/src/lib/libcpu/powerpc/new-exceptions/cpu.c +++ b/c/src/lib/libcpu/powerpc/new-exceptions/cpu.c @@ -45,7 +45,7 @@ */ void _CPU_Initialize(void) { -#ifdef __ALTIVEC__ +#if defined(__ALTIVEC__) && !defined(PPC_MULTILIB_ALTIVEC) _CPU_Initialize_altivec(); #endif } @@ -75,6 +75,8 @@ void _CPU_Context_Initialize( _CPU_MSR_GET( msr_value ); + the_ppc_context = ppc_get_context( the_context ); + /* * Setting the interrupt mask here is not strictly necessary * since the IRQ level will be established from _Thread_Handler() @@ -95,6 +97,9 @@ void _CPU_Context_Initialize( msr_value &= ~ppc_interrupt_get_disable_mask(); } +#ifdef PPC_MULTILIB_FPU + msr_value |= MSR_FP; +#else /* * The FP bit of the MSR should only be enabled if this is a floating * point task. Unfortunately, the vfprintf_r routine in newlib @@ -118,13 +123,19 @@ void _CPU_Context_Initialize( msr_value |= PPC_MSR_FP; else msr_value &= ~PPC_MSR_FP; +#endif + +#ifdef PPC_MULTILIB_ALTIVEC + msr_value |= MSR_VE; + + the_ppc_context->vrsave = 0; +#endif - the_ppc_context = ppc_get_context( the_context ); the_ppc_context->gpr1 = sp; the_ppc_context->msr = msr_value; the_ppc_context->lr = (uint32_t) entry_point; -#ifdef __ALTIVEC__ +#if defined(__ALTIVEC__) && !defined(PPC_MULTILIB_ALTIVEC) _CPU_Context_initialize_altivec( the_ppc_context ); #endif diff --git a/c/src/lib/libcpu/powerpc/new-exceptions/cpu_asm.S b/c/src/lib/libcpu/powerpc/new-exceptions/cpu_asm.S index 26ef58d7b9..5d8c70d290 100644 --- a/c/src/lib/libcpu/powerpc/new-exceptions/cpu_asm.S +++ b/c/src/lib/libcpu/powerpc/new-exceptions/cpu_asm.S @@ -23,7 +23,7 @@ * COPYRIGHT (c) 1989-1997. * On-Line Applications Research Corporation (OAR). * - * Copyright (c) 2011-2014 embedded brains GmbH + * Copyright (c) 2011-2015 embedded brains GmbH * * The license and distribution terms for this file may in * the file LICENSE in this distribution or at @@ -55,6 +55,7 @@ #define PPC_CONTEXT_CACHE_LINE_2 (3 * PPC_DEFAULT_CACHE_LINE_SIZE) #define PPC_CONTEXT_CACHE_LINE_3 (4 * PPC_DEFAULT_CACHE_LINE_SIZE) #define PPC_CONTEXT_CACHE_LINE_4 (5 * PPC_DEFAULT_CACHE_LINE_SIZE) +#define PPC_CONTEXT_CACHE_LINE_5 (6 * PPC_DEFAULT_CACHE_LINE_SIZE) BEGIN_CODE @@ -257,7 +258,10 @@ PROC (_CPU_Context_switch): clrrwi r5, r4, PPC_DEFAULT_CACHE_LINE_POWER DATA_CACHE_ZERO_AND_TOUCH(r10, PPC_CONTEXT_CACHE_LINE_0) + +#if PPC_CONTEXT_CACHE_LINE_2 <= PPC_CONTEXT_VOLATILE_SIZE DATA_CACHE_ZERO_AND_TOUCH(r11, PPC_CONTEXT_CACHE_LINE_1) +#endif /* Save context to r3 */ @@ -317,6 +321,11 @@ PROC (_CPU_Context_switch): PPC_GPR_STORE r24, PPC_CONTEXT_OFFSET_GPR24(r3) PPC_GPR_STORE r25, PPC_CONTEXT_OFFSET_GPR25(r3) + +#if PPC_CONTEXT_OFFSET_V22 == PPC_CONTEXT_CACHE_LINE_2 + DATA_CACHE_ZERO_AND_TOUCH(r10, PPC_CONTEXT_CACHE_LINE_2) +#endif + PPC_GPR_STORE r26, PPC_CONTEXT_OFFSET_GPR26(r3) PPC_GPR_STORE r27, PPC_CONTEXT_OFFSET_GPR27(r3) @@ -327,6 +336,71 @@ PROC (_CPU_Context_switch): stw r2, PPC_CONTEXT_OFFSET_GPR2(r3) +#ifdef PPC_MULTILIB_ALTIVEC + li r9, PPC_CONTEXT_OFFSET_V20 + stvx v20, r3, r9 + li r9, PPC_CONTEXT_OFFSET_V21 + stvx v21, r3, r9 + +#if PPC_CONTEXT_OFFSET_V26 == PPC_CONTEXT_CACHE_LINE_3 + DATA_CACHE_ZERO_AND_TOUCH(r10, PPC_CONTEXT_CACHE_LINE_3) +#endif + + li r9, PPC_CONTEXT_OFFSET_V22 + stvx v22, r3, r9 + li r9, PPC_CONTEXT_OFFSET_V23 + stvx v23, r3, r9 + li r9, PPC_CONTEXT_OFFSET_V24 + stvx v24, r3, r9 + li r9, PPC_CONTEXT_OFFSET_V25 + stvx v25, r3, r9 + +#if PPC_CONTEXT_OFFSET_V30 == PPC_CONTEXT_CACHE_LINE_4 + DATA_CACHE_ZERO_AND_TOUCH(r10, PPC_CONTEXT_CACHE_LINE_4) +#endif + + li r9, PPC_CONTEXT_OFFSET_V26 + stvx v26, r3, r9 + li r9, PPC_CONTEXT_OFFSET_V27 + stvx v27, r3, r9 + li r9, PPC_CONTEXT_OFFSET_V28 + stvx v28, r3, r9 + li r9, PPC_CONTEXT_OFFSET_V29 + stvx v29, r3, r9 + +#if PPC_CONTEXT_OFFSET_F17 == PPC_CONTEXT_CACHE_LINE_5 + DATA_CACHE_ZERO_AND_TOUCH(r10, PPC_CONTEXT_CACHE_LINE_5) +#endif + + li r9, PPC_CONTEXT_OFFSET_V30 + stvx v30, r3, r9 + li r9, PPC_CONTEXT_OFFSET_V31 + stvx v31, r3, r9 + mfvrsave r9 + stw r9, PPC_CONTEXT_OFFSET_VRSAVE(r3) +#endif + +#ifdef PPC_MULTILIB_FPU + stfd f14, PPC_CONTEXT_OFFSET_F14(r3) + stfd f15, PPC_CONTEXT_OFFSET_F15(r3) + stfd f16, PPC_CONTEXT_OFFSET_F16(r3) + stfd f17, PPC_CONTEXT_OFFSET_F17(r3) + stfd f18, PPC_CONTEXT_OFFSET_F18(r3) + stfd f19, PPC_CONTEXT_OFFSET_F19(r3) + stfd f20, PPC_CONTEXT_OFFSET_F20(r3) + stfd f21, PPC_CONTEXT_OFFSET_F21(r3) + stfd f22, PPC_CONTEXT_OFFSET_F22(r3) + stfd f23, PPC_CONTEXT_OFFSET_F23(r3) + stfd f24, PPC_CONTEXT_OFFSET_F24(r3) + stfd f25, PPC_CONTEXT_OFFSET_F25(r3) + stfd f26, PPC_CONTEXT_OFFSET_F26(r3) + stfd f27, PPC_CONTEXT_OFFSET_F27(r3) + stfd f28, PPC_CONTEXT_OFFSET_F28(r3) + stfd f29, PPC_CONTEXT_OFFSET_F29(r3) + stfd f30, PPC_CONTEXT_OFFSET_F30(r3) + stfd f31, PPC_CONTEXT_OFFSET_F31(r3) +#endif + #ifdef RTEMS_SMP /* The executing context no longer executes on this processor */ msync @@ -351,7 +425,7 @@ check_is_executing: /* Restore context from r5 */ restore_context: -#ifdef __ALTIVEC__ +#if defined(__ALTIVEC__) && !defined(PPC_MULTILIB_ALTIVEC) mr r14, r5 .extern _CPU_Context_switch_altivec bl _CPU_Context_switch_altivec @@ -390,6 +464,56 @@ restore_context: lwz r2, PPC_CONTEXT_OFFSET_GPR2(r5) +#ifdef PPC_MULTILIB_ALTIVEC + li r9, PPC_CONTEXT_OFFSET_V20 + lvx v20, r5, r9 + li r9, PPC_CONTEXT_OFFSET_V21 + lvx v21, r5, r9 + li r9, PPC_CONTEXT_OFFSET_V22 + lvx v22, r5, r9 + li r9, PPC_CONTEXT_OFFSET_V23 + lvx v23, r5, r9 + li r9, PPC_CONTEXT_OFFSET_V24 + lvx v24, r5, r9 + li r9, PPC_CONTEXT_OFFSET_V25 + lvx v25, r5, r9 + li r9, PPC_CONTEXT_OFFSET_V26 + lvx v26, r5, r9 + li r9, PPC_CONTEXT_OFFSET_V27 + lvx v27, r5, r9 + li r9, PPC_CONTEXT_OFFSET_V28 + lvx v28, r5, r9 + li r9, PPC_CONTEXT_OFFSET_V29 + lvx v29, r5, r9 + li r9, PPC_CONTEXT_OFFSET_V30 + lvx v30, r5, r9 + li r9, PPC_CONTEXT_OFFSET_V31 + lvx v31, r5, r9 + lwz r9, PPC_CONTEXT_OFFSET_VRSAVE(r5) + mtvrsave r9 +#endif + +#ifdef PPC_MULTILIB_FPU + lfd f14, PPC_CONTEXT_OFFSET_F14(r5) + lfd f15, PPC_CONTEXT_OFFSET_F15(r5) + lfd f16, PPC_CONTEXT_OFFSET_F16(r5) + lfd f17, PPC_CONTEXT_OFFSET_F17(r5) + lfd f18, PPC_CONTEXT_OFFSET_F18(r5) + lfd f19, PPC_CONTEXT_OFFSET_F19(r5) + lfd f20, PPC_CONTEXT_OFFSET_F20(r5) + lfd f21, PPC_CONTEXT_OFFSET_F21(r5) + lfd f22, PPC_CONTEXT_OFFSET_F22(r5) + lfd f23, PPC_CONTEXT_OFFSET_F23(r5) + lfd f24, PPC_CONTEXT_OFFSET_F24(r5) + lfd f25, PPC_CONTEXT_OFFSET_F25(r5) + lfd f26, PPC_CONTEXT_OFFSET_F26(r5) + lfd f27, PPC_CONTEXT_OFFSET_F27(r5) + lfd f28, PPC_CONTEXT_OFFSET_F28(r5) + lfd f29, PPC_CONTEXT_OFFSET_F29(r5) + lfd f30, PPC_CONTEXT_OFFSET_F30(r5) + lfd f31, PPC_CONTEXT_OFFSET_F31(r5) +#endif + mtcr r8 mtlr r7 mtmsr r6 @@ -405,7 +529,7 @@ PROC (_CPU_Context_restore): /* Align to a cache line */ clrrwi r5, r3, PPC_DEFAULT_CACHE_LINE_POWER -#ifdef __ALTIVEC__ +#if defined(__ALTIVEC__) && !defined(PPC_MULTILIB_ALTIVEC) li r3, 0 #endif |