diff options
author | Sebastian Huber <sebastian.huber@embedded-brains.de> | 2014-12-23 14:18:06 +0100 |
---|---|---|
committer | Sebastian Huber <sebastian.huber@embedded-brains.de> | 2015-01-13 11:37:28 +0100 |
commit | 3e2647a7146d4b972c6a0290e6657bab0de18afa (patch) | |
tree | 027f8a7d676d3ae80950344b3891072e2ca0a736 /c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_async_normal.S | |
parent | bsps/powerpc: Use e500 exc categories for e6500 (diff) | |
download | rtems-3e2647a7146d4b972c6a0290e6657bab0de18afa.tar.bz2 |
powerpc: AltiVec and FPU context support
Add AltiVec and FPU support to the Context_Control in case we use the
e6500 multilib.
Add PPC_MULTILIB_ALTIVEC and PPC_MULTILIB_FPU multilib defines. Add
non-volatile AltiVec and FPU context to Context_Control. Add save/restore of
non-volatile AltiVec and FPU to _CPU_Context_switch(). Add save/restore
of volatile AltiVec and FPU context to the exception code. Adjust data
cache optimizations for the new context and cache line size.
Diffstat (limited to 'c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_async_normal.S')
-rw-r--r-- | c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_async_normal.S | 149 |
1 files changed, 148 insertions, 1 deletions
diff --git a/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_async_normal.S b/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_async_normal.S index a1ae8931d7..ae575c5825 100644 --- a/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_async_normal.S +++ b/c/src/lib/libcpu/powerpc/new-exceptions/bspsupport/ppc_exc_async_normal.S @@ -1,5 +1,5 @@ /* - * Copyright (c) 2011-2014 embedded brains GmbH. All rights reserved. + * Copyright (c) 2011-2015 embedded brains GmbH. All rights reserved. * * embedded brains GmbH * Dornierstr. 4 @@ -105,6 +105,19 @@ ppc_exc_wrap_async_normal: isync #endif +#if defined(PPC_MULTILIB_FPU) || defined(PPC_MULTILIB_ALTIVEC) + /* Enable FPU and/or AltiVec */ + mfmsr FRAME_REGISTER +#ifdef PPC_MULTILIB_FPU + ori FRAME_REGISTER, FRAME_REGISTER, MSR_FP +#endif +#ifdef PPC_MULTILIB_ALTIVEC + oris FRAME_REGISTER, FRAME_REGISTER, MSR_VE >> 16 +#endif + mtmsr FRAME_REGISTER + isync +#endif + /* Move frame pointer to non-volatile FRAME_REGISTER */ mr FRAME_REGISTER, r1 @@ -176,6 +189,73 @@ ppc_exc_wrap_async_normal: evstdd SCRATCH_1_REGISTER, PPC_EXC_ACC_OFFSET(r1) #endif +#ifdef PPC_MULTILIB_ALTIVEC + /* Save volatile AltiVec context */ + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(0) + stvx v0, r1, SCRATCH_0_REGISTER + mfvscr v0 + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(1) + stvx v1, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(2) + stvx v2, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(3) + stvx v3, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(4) + stvx v4, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(5) + stvx v5, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(6) + stvx v6, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(7) + stvx v7, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(8) + stvx v8, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(9) + stvx v9, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(10) + stvx v10, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(11) + stvx v11, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(12) + stvx v12, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(13) + stvx v13, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(14) + stvx v14, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(15) + stvx v15, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(16) + stvx v16, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(17) + stvx v17, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(18) + stvx v18, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(19) + stvx v19, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VSCR_OFFSET + stvewx v0, r1, SCRATCH_0_REGISTER +#endif + +#ifdef PPC_MULTILIB_FPU + /* Save volatile FPU context */ + stfd f0, PPC_EXC_MIN_FR_OFFSET(0)(r1) + mffs f0 + stfd f1, PPC_EXC_MIN_FR_OFFSET(1)(r1) + stfd f2, PPC_EXC_MIN_FR_OFFSET(2)(r1) + stfd f3, PPC_EXC_MIN_FR_OFFSET(3)(r1) + stfd f4, PPC_EXC_MIN_FR_OFFSET(4)(r1) + stfd f5, PPC_EXC_MIN_FR_OFFSET(5)(r1) + stfd f6, PPC_EXC_MIN_FR_OFFSET(6)(r1) + stfd f7, PPC_EXC_MIN_FR_OFFSET(7)(r1) + stfd f8, PPC_EXC_MIN_FR_OFFSET(8)(r1) + stfd f9, PPC_EXC_MIN_FR_OFFSET(9)(r1) + stfd f10, PPC_EXC_MIN_FR_OFFSET(10)(r1) + stfd f11, PPC_EXC_MIN_FR_OFFSET(11)(r1) + stfd f12, PPC_EXC_MIN_FR_OFFSET(12)(r1) + stfd f13, PPC_EXC_MIN_FR_OFFSET(13)(r1) + stfd f0, PPC_EXC_MIN_FPSCR_OFFSET(r1) +#endif + /* Increment ISR nest level and thread dispatch disable level */ cmpwi ISR_NEST_REGISTER, 0 addi ISR_NEST_REGISTER, ISR_NEST_REGISTER, 1 @@ -246,6 +326,73 @@ profiling_done: bl _Thread_Dispatch thread_dispatching_done: +#ifdef PPC_MULTILIB_ALTIVEC + /* Restore volatile AltiVec context */ + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VSCR_OFFSET + lvewx v0, r1, SCRATCH_0_REGISTER + mtvscr v0 + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(0) + lvx v0, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(1) + lvx v1, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(2) + lvx v2, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(3) + lvx v3, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(4) + lvx v4, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(5) + lvx v5, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(6) + lvx v6, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(7) + lvx v7, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(8) + lvx v8, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(9) + lvx v9, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(10) + lvx v10, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(11) + lvx v11, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(12) + lvx v12, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(13) + lvx v13, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(14) + lvx v14, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(15) + lvx v15, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(16) + lvx v16, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(17) + lvx v17, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(18) + lvx v18, r1, SCRATCH_0_REGISTER + li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(19) + lvx v19, r1, SCRATCH_0_REGISTER +#endif + +#ifdef PPC_MULTILIB_FPU + /* Restore volatile FPU context */ + lfd f0, PPC_EXC_MIN_FPSCR_OFFSET(r1) + mtfsf 0xff, f0 + lfd f0, PPC_EXC_MIN_FR_OFFSET(0)(r1) + lfd f1, PPC_EXC_MIN_FR_OFFSET(1)(r1) + lfd f2, PPC_EXC_MIN_FR_OFFSET(2)(r1) + lfd f3, PPC_EXC_MIN_FR_OFFSET(3)(r1) + lfd f4, PPC_EXC_MIN_FR_OFFSET(4)(r1) + lfd f5, PPC_EXC_MIN_FR_OFFSET(5)(r1) + lfd f6, PPC_EXC_MIN_FR_OFFSET(6)(r1) + lfd f7, PPC_EXC_MIN_FR_OFFSET(7)(r1) + lfd f8, PPC_EXC_MIN_FR_OFFSET(8)(r1) + lfd f9, PPC_EXC_MIN_FR_OFFSET(9)(r1) + lfd f10, PPC_EXC_MIN_FR_OFFSET(10)(r1) + lfd f11, PPC_EXC_MIN_FR_OFFSET(11)(r1) + lfd f12, PPC_EXC_MIN_FR_OFFSET(12)(r1) + lfd f13, PPC_EXC_MIN_FR_OFFSET(13)(r1) +#endif + #ifdef __SPE__ /* Load SPEFSCR and ACC */ lwz DISPATCH_LEVEL_REGISTER, PPC_EXC_SPEFSCR_OFFSET(r1) |