summaryrefslogtreecommitdiffstats
path: root/bsps/powerpc/shared/exceptions/ppc_exc_async_normal.S
diff options
context:
space:
mode:
Diffstat (limited to 'bsps/powerpc/shared/exceptions/ppc_exc_async_normal.S')
-rw-r--r--bsps/powerpc/shared/exceptions/ppc_exc_async_normal.S214
1 files changed, 206 insertions, 8 deletions
diff --git a/bsps/powerpc/shared/exceptions/ppc_exc_async_normal.S b/bsps/powerpc/shared/exceptions/ppc_exc_async_normal.S
index 87d7d2e028..701fc20bbb 100644
--- a/bsps/powerpc/shared/exceptions/ppc_exc_async_normal.S
+++ b/bsps/powerpc/shared/exceptions/ppc_exc_async_normal.S
@@ -1,9 +1,28 @@
+/* SPDX-License-Identifier: BSD-2-Clause */
+
/*
- * Copyright (c) 2011, 2017 embedded brains GmbH. All rights reserved.
+ * Copyright (C) 2011, 2020 embedded brains GmbH & Co. KG
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
*
- * The license and distribution terms for this file may be
- * found in the file LICENSE in this distribution or at
- * http://www.rtems.org/license/LICENSE.
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
*/
#include <bspopts.h>
@@ -154,8 +173,15 @@ ppc_exc_interrupt:
evstdd SCRATCH_5_REGISTER, PPC_EXC_ACC_OFFSET(r1)
#endif
-#ifdef PPC_MULTILIB_ALTIVEC
/* Save volatile AltiVec context */
+#ifdef PPC_MULTILIB_ALTIVEC
+#ifdef __PPC_VRSAVE__
+ mfvrsave SCRATCH_0_REGISTER
+ cmpwi SCRATCH_0_REGISTER, 0
+ bne .Laltivec_save
+
+.Laltivec_save_continue:
+#else /* __PPC_VRSAVE__ */
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(0)
stvx v0, r1, SCRATCH_0_REGISTER
mfvscr v0
@@ -199,7 +225,8 @@ ppc_exc_interrupt:
stvx v19, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VSCR_OFFSET
stvewx v0, r1, SCRATCH_0_REGISTER
-#endif
+#endif /* __PPC_VRSAVE__ */
+#endif /* PPC_MULTILIB_ALTIVEC */
#ifdef PPC_MULTILIB_FPU
/* Save volatile FPU context */
@@ -315,8 +342,15 @@ ppc_exc_interrupt:
.Lthread_dispatch_done:
-#ifdef PPC_MULTILIB_ALTIVEC
/* Restore volatile AltiVec context */
+#ifdef PPC_MULTILIB_ALTIVEC
+#ifdef __PPC_VRSAVE__
+ mfvrsave SCRATCH_0_REGISTER
+ cmpwi SCRATCH_0_REGISTER, 0
+ bne .Laltivec_restore
+
+.Laltivec_restore_continue:
+#else /* __PPC_VRSAVE__ */
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VSCR_OFFSET
lvewx v0, r1, SCRATCH_0_REGISTER
mtvscr v0
@@ -360,7 +394,8 @@ ppc_exc_interrupt:
lvx v18, r1, SCRATCH_0_REGISTER
li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(19)
lvx v19, r1, SCRATCH_0_REGISTER
-#endif
+#endif /* __PPC_VRSAVE__ */
+#endif /* PPC_MULTILIB_ALTIVEC */
#ifdef PPC_MULTILIB_FPU
/* Restore volatile FPU context */
@@ -459,6 +494,169 @@ ppc_exc_interrupt:
/* Return */
rfi
+#if defined(PPC_MULTILIB_ALTIVEC) && defined(__PPC_VRSAVE__)
+.Laltivec_save:
+
+ /*
+ * Let X be VRSAVE, calculate:
+ *
+ * Y = 0x77777777
+ * Z = X & Y
+ * Z = Z + Y
+ * X = X | Z
+ *
+ * Afterwards, we have in X for each group of four VR registers:
+ *
+ * 0111b, if VRSAVE group of four registers == 0
+ * 1XXXb, if VRSAVE group of four registers != 0
+ */
+ lis SCRATCH_5_REGISTER, 0x7777
+ ori SCRATCH_5_REGISTER, SCRATCH_5_REGISTER, 0x7777
+ and SCRATCH_6_REGISTER, SCRATCH_0_REGISTER, SCRATCH_5_REGISTER
+ add SCRATCH_6_REGISTER, SCRATCH_5_REGISTER, SCRATCH_6_REGISTER
+ or SCRATCH_0_REGISTER, SCRATCH_0_REGISTER, SCRATCH_6_REGISTER
+ mtcr SCRATCH_0_REGISTER
+
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(0)
+ stvx v0, r1, SCRATCH_0_REGISTER
+
+ /* Move VCSR to V0 */
+ mfvscr v0
+
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(1)
+ stvx v1, r1, SCRATCH_0_REGISTER
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(2)
+ stvx v2, r1, SCRATCH_0_REGISTER
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(3)
+ stvx v3, r1, SCRATCH_0_REGISTER
+
+ /* Save VCSR using V0 */
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VSCR_OFFSET
+ stvewx v0, r1, SCRATCH_0_REGISTER
+
+ bf 4, .Laltivec_save_v8
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(4)
+ stvx v4, r1, SCRATCH_0_REGISTER
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(5)
+ stvx v5, r1, SCRATCH_0_REGISTER
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(6)
+ stvx v6, r1, SCRATCH_0_REGISTER
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(7)
+ stvx v7, r1, SCRATCH_0_REGISTER
+
+.Laltivec_save_v8:
+
+ bf 8, .Laltivec_save_v12
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(8)
+ stvx v8, r1, SCRATCH_0_REGISTER
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(9)
+ stvx v9, r1, SCRATCH_0_REGISTER
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(10)
+ stvx v10, r1, SCRATCH_0_REGISTER
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(11)
+ stvx v11, r1, SCRATCH_0_REGISTER
+
+.Laltivec_save_v12:
+
+ bf 12, .Laltivec_save_v16
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(12)
+ stvx v12, r1, SCRATCH_0_REGISTER
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(13)
+ stvx v13, r1, SCRATCH_0_REGISTER
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(14)
+ stvx v14, r1, SCRATCH_0_REGISTER
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(15)
+ stvx v15, r1, SCRATCH_0_REGISTER
+
+.Laltivec_save_v16:
+
+ bf 16, .Laltivec_save_continue
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(16)
+ stvx v16, r1, SCRATCH_0_REGISTER
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(17)
+ stvx v17, r1, SCRATCH_0_REGISTER
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(18)
+ stvx v18, r1, SCRATCH_0_REGISTER
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(19)
+ stvx v19, r1, SCRATCH_0_REGISTER
+
+ b .Laltivec_save_continue
+
+.Laltivec_restore:
+
+ /* Load VCSR using V0 */
+ li SCRATCH_5_REGISTER, PPC_EXC_MIN_VSCR_OFFSET
+ lvewx v0, r1, SCRATCH_5_REGISTER
+
+ /* See comment at .Laltivec_save */
+ lis SCRATCH_5_REGISTER, 0x7777
+ ori SCRATCH_5_REGISTER, SCRATCH_5_REGISTER, 0x7777
+ and SCRATCH_6_REGISTER, SCRATCH_0_REGISTER, SCRATCH_5_REGISTER
+ add SCRATCH_6_REGISTER, SCRATCH_5_REGISTER, SCRATCH_6_REGISTER
+ or SCRATCH_0_REGISTER, SCRATCH_0_REGISTER, SCRATCH_6_REGISTER
+ mtcr SCRATCH_0_REGISTER
+
+ /* Restore VCR using V0 */
+ mtvscr v0
+
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(0)
+ lvx v0, r1, SCRATCH_0_REGISTER
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(1)
+ lvx v1, r1, SCRATCH_0_REGISTER
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(2)
+ lvx v2, r1, SCRATCH_0_REGISTER
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(3)
+ lvx v3, r1, SCRATCH_0_REGISTER
+
+ bf 4, .Laltivec_restore_v8
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(4)
+ lvx v4, r1, SCRATCH_0_REGISTER
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(5)
+ lvx v5, r1, SCRATCH_0_REGISTER
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(6)
+ lvx v6, r1, SCRATCH_0_REGISTER
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(7)
+ lvx v7, r1, SCRATCH_0_REGISTER
+
+.Laltivec_restore_v8:
+
+ bf 8, .Laltivec_restore_v12
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(8)
+ lvx v8, r1, SCRATCH_0_REGISTER
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(9)
+ lvx v9, r1, SCRATCH_0_REGISTER
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(10)
+ lvx v10, r1, SCRATCH_0_REGISTER
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(11)
+ lvx v11, r1, SCRATCH_0_REGISTER
+
+.Laltivec_restore_v12:
+
+ bf 12, .Laltivec_restore_v16
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(12)
+ lvx v12, r1, SCRATCH_0_REGISTER
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(13)
+ lvx v13, r1, SCRATCH_0_REGISTER
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(14)
+ lvx v14, r1, SCRATCH_0_REGISTER
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(15)
+ lvx v15, r1, SCRATCH_0_REGISTER
+
+.Laltivec_restore_v16:
+
+ bf 16, .Laltivec_restore_continue
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(16)
+ lvx v16, r1, SCRATCH_0_REGISTER
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(17)
+ lvx v17, r1, SCRATCH_0_REGISTER
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(18)
+ lvx v18, r1, SCRATCH_0_REGISTER
+ li SCRATCH_0_REGISTER, PPC_EXC_MIN_VR_OFFSET(19)
+ lvx v19, r1, SCRATCH_0_REGISTER
+
+ b .Laltivec_restore_continue
+#endif /* PPC_MULTILIB_ALTIVEC && __PPC_VRSAVE__ */
+
/* Symbol provided for debugging and tracing */
ppc_exc_interrupt_end: