aboutsummaryrefslogtreecommitdiff
path: root/target/arm/tcg/sve_ldst_internal.h
diff options
context:
space:
mode:
Diffstat (limited to 'target/arm/tcg/sve_ldst_internal.h')
-rw-r--r--target/arm/tcg/sve_ldst_internal.h91
1 files changed, 90 insertions, 1 deletions
diff --git a/target/arm/tcg/sve_ldst_internal.h b/target/arm/tcg/sve_ldst_internal.h
index 4f159ec..c67cda9 100644
--- a/target/arm/tcg/sve_ldst_internal.h
+++ b/target/arm/tcg/sve_ldst_internal.h
@@ -20,7 +20,7 @@
#ifndef TARGET_ARM_SVE_LDST_INTERNAL_H
#define TARGET_ARM_SVE_LDST_INTERNAL_H
-#include "exec/cpu_ldst.h"
+#include "accel/tcg/cpu-ldst.h"
/*
* Load one element into @vd + @reg_off from @host.
@@ -116,6 +116,94 @@ DO_ST_PRIM_2(sd, H1_8, uint64_t, uint32_t, stl)
DO_LD_PRIM_2(dd, H1_8, uint64_t, uint64_t, ldq)
DO_ST_PRIM_2(dd, H1_8, uint64_t, uint64_t, stq)
+#define DO_LD_PRIM_3(NAME, FUNC) \
+ static inline void sve_##NAME##_host(void *vd, \
+ intptr_t reg_off, void *host) \
+ { sve_##FUNC##_host(vd, reg_off, host); \
+ *(uint64_t *)(vd + reg_off + 8) = 0; } \
+ static inline void sve_##NAME##_tlb(CPUARMState *env, void *vd, \
+ intptr_t reg_off, target_ulong addr, uintptr_t ra) \
+ { sve_##FUNC##_tlb(env, vd, reg_off, addr, ra); \
+ *(uint64_t *)(vd + reg_off + 8) = 0; }
+
+DO_LD_PRIM_3(ld1squ_be, ld1sdu_be)
+DO_LD_PRIM_3(ld1squ_le, ld1sdu_le)
+DO_LD_PRIM_3(ld1dqu_be, ld1dd_be)
+DO_LD_PRIM_3(ld1dqu_le, ld1dd_le)
+
+#define sve_st1sq_be_host sve_st1sd_be_host
+#define sve_st1sq_le_host sve_st1sd_le_host
+#define sve_st1sq_be_tlb sve_st1sd_be_tlb
+#define sve_st1sq_le_tlb sve_st1sd_le_tlb
+
+#define sve_st1dq_be_host sve_st1dd_be_host
+#define sve_st1dq_le_host sve_st1dd_le_host
+#define sve_st1dq_be_tlb sve_st1dd_be_tlb
+#define sve_st1dq_le_tlb sve_st1dd_le_tlb
+
+/*
+ * The ARMVectorReg elements are stored in host-endian 64-bit units.
+ * For 128-bit quantities, the sequence defined by the Elem[] pseudocode
+ * corresponds to storing the two 64-bit pieces in little-endian order.
+ */
+/* FIXME: Nothing in this file makes any effort at atomicity. */
+
+static inline void sve_ld1qq_be_host(void *vd, intptr_t reg_off, void *host)
+{
+ sve_ld1dd_be_host(vd, reg_off + 8, host);
+ sve_ld1dd_be_host(vd, reg_off, host + 8);
+}
+
+static inline void sve_ld1qq_le_host(void *vd, intptr_t reg_off, void *host)
+{
+ sve_ld1dd_le_host(vd, reg_off, host);
+ sve_ld1dd_le_host(vd, reg_off + 8, host + 8);
+}
+
+static inline void
+sve_ld1qq_be_tlb(CPUARMState *env, void *vd, intptr_t reg_off,
+ target_ulong addr, uintptr_t ra)
+{
+ sve_ld1dd_be_tlb(env, vd, reg_off + 8, addr, ra);
+ sve_ld1dd_be_tlb(env, vd, reg_off, addr + 8, ra);
+}
+
+static inline void
+sve_ld1qq_le_tlb(CPUARMState *env, void *vd, intptr_t reg_off,
+ target_ulong addr, uintptr_t ra)
+{
+ sve_ld1dd_le_tlb(env, vd, reg_off, addr, ra);
+ sve_ld1dd_le_tlb(env, vd, reg_off + 8, addr + 8, ra);
+}
+
+static inline void sve_st1qq_be_host(void *vd, intptr_t reg_off, void *host)
+{
+ sve_st1dd_be_host(vd, reg_off + 8, host);
+ sve_st1dd_be_host(vd, reg_off, host + 8);
+}
+
+static inline void sve_st1qq_le_host(void *vd, intptr_t reg_off, void *host)
+{
+ sve_st1dd_le_host(vd, reg_off, host);
+ sve_st1dd_le_host(vd, reg_off + 8, host + 8);
+}
+
+static inline void
+sve_st1qq_be_tlb(CPUARMState *env, void *vd, intptr_t reg_off,
+ target_ulong addr, uintptr_t ra)
+{
+ sve_st1dd_be_tlb(env, vd, reg_off + 8, addr, ra);
+ sve_st1dd_be_tlb(env, vd, reg_off, addr + 8, ra);
+}
+
+static inline void
+sve_st1qq_le_tlb(CPUARMState *env, void *vd, intptr_t reg_off,
+ target_ulong addr, uintptr_t ra)
+{
+ sve_st1dd_le_tlb(env, vd, reg_off, addr, ra);
+ sve_st1dd_le_tlb(env, vd, reg_off + 8, addr + 8, ra);
+}
+
#undef DO_LD_TLB
#undef DO_ST_TLB
#undef DO_LD_HOST
@@ -123,6 +211,7 @@ DO_ST_PRIM_2(dd, H1_8, uint64_t, uint64_t, stq)
#undef DO_ST_PRIM_1
#undef DO_LD_PRIM_2
#undef DO_ST_PRIM_2
+#undef DO_LD_PRIM_3
/*
* Resolve the guest virtual address to info->host and info->flags.