aboutsummaryrefslogtreecommitdiff
path: root/target/riscv
diff options
context:
space:
mode:
authorLIU Zhiwei <zhiwei_liu@c-sky.com>2022-01-20 20:20:43 +0800
committerAlistair Francis <alistair.francis@wdc.com>2022-01-21 15:52:57 +1000
commit31961cfe505e11cc4ec4cfde52c851957e1bf605 (patch)
tree8f891b80ae6a9d3330ca2150bc601a3fb734e792 /target/riscv
parentd96a271a8daedb83247baf89349ead0cb8f0c449 (diff)
downloadqemu-31961cfe505e11cc4ec4cfde52c851957e1bf605.zip
qemu-31961cfe505e11cc4ec4cfde52c851957e1bf605.tar.gz
qemu-31961cfe505e11cc4ec4cfde52c851957e1bf605.tar.bz2
target/riscv: Adjust vsetvl according to XLEN
Signed-off-by: LIU Zhiwei <zhiwei_liu@c-sky.com> Reviewed-by: Richard Henderson <richard.henderson@linaro.org> Reviewed-by: Alistair Francis <alistair.francis@wdc.com> Message-id: 20220120122050.41546-17-zhiwei_liu@c-sky.com Signed-off-by: Alistair Francis <alistair.francis@wdc.com>
Diffstat (limited to 'target/riscv')
-rw-r--r--target/riscv/cpu.h5
-rw-r--r--target/riscv/vector_helper.c7
2 files changed, 10 insertions, 2 deletions
diff --git a/target/riscv/cpu.h b/target/riscv/cpu.h
index 6c740b9..fe58cca 100644
--- a/target/riscv/cpu.h
+++ b/target/riscv/cpu.h
@@ -491,6 +491,11 @@ static inline RISCVMXL cpu_recompute_xl(CPURISCVState *env)
}
#endif
+static inline int riscv_cpu_xlen(CPURISCVState *env)
+{
+ return 16 << env->xl;
+}
+
/*
* Encode LMUL to lmul as follows:
* LMUL vlmul lmul
diff --git a/target/riscv/vector_helper.c b/target/riscv/vector_helper.c
index a9484c2..8b7c9ec 100644
--- a/target/riscv/vector_helper.c
+++ b/target/riscv/vector_helper.c
@@ -36,8 +36,11 @@ target_ulong HELPER(vsetvl)(CPURISCVState *env, target_ulong s1,
uint64_t lmul = FIELD_EX64(s2, VTYPE, VLMUL);
uint16_t sew = 8 << FIELD_EX64(s2, VTYPE, VSEW);
uint8_t ediv = FIELD_EX64(s2, VTYPE, VEDIV);
- bool vill = FIELD_EX64(s2, VTYPE, VILL);
- target_ulong reserved = FIELD_EX64(s2, VTYPE, RESERVED);
+ int xlen = riscv_cpu_xlen(env);
+ bool vill = (s2 >> (xlen - 1)) & 0x1;
+ target_ulong reserved = s2 &
+ MAKE_64BIT_MASK(R_VTYPE_RESERVED_SHIFT,
+ xlen - 1 - R_VTYPE_RESERVED_SHIFT);
if (lmul & 4) {
/* Fractional LMUL. */