diff options
author | Richard Henderson <richard.henderson@linaro.org> | 2022-07-13 10:28:48 +0530 |
---|---|---|
committer | Peter Maydell <peter.maydell@linaro.org> | 2022-07-18 13:20:13 +0100 |
commit | 6a775fd6e0423e76d3e3cb751b4b468d68f19ca5 (patch) | |
tree | 1eb264028298ce3e6cf145788151a63a0f8c0a21 | |
parent | 6215113355a508c5e6d0a4337c79184d85eeed61 (diff) | |
download | qemu-6a775fd6e0423e76d3e3cb751b4b468d68f19ca5.zip qemu-6a775fd6e0423e76d3e3cb751b4b468d68f19ca5.tar.gz qemu-6a775fd6e0423e76d3e3cb751b4b468d68f19ca5.tar.bz2 |
target/arm: Fix aarch64_sve_change_el for SME
We were only checking for SVE disabled and not taking into
account PSTATE.SM to check SME disabled, which resulted in
vectors being incorrectly truncated.
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
Message-id: 20220713045848.217364-3-richard.henderson@linaro.org
Reviewed-by: Peter Maydell <peter.maydell@linaro.org>
Signed-off-by: Peter Maydell <peter.maydell@linaro.org>
-rw-r--r-- | target/arm/helper.c | 31 |
1 files changed, 25 insertions, 6 deletions
diff --git a/target/arm/helper.c b/target/arm/helper.c index 6fff7fc..24c45a9 100644 --- a/target/arm/helper.c +++ b/target/arm/helper.c @@ -11228,6 +11228,21 @@ void aarch64_sve_narrow_vq(CPUARMState *env, unsigned vq) } } +static uint32_t sve_vqm1_for_el_sm_ena(CPUARMState *env, int el, bool sm) +{ + int exc_el; + + if (sm) { + exc_el = sme_exception_el(env, el); + } else { + exc_el = sve_exception_el(env, el); + } + if (exc_el) { + return 0; /* disabled */ + } + return sve_vqm1_for_el_sm(env, el, sm); +} + /* * Notice a change in SVE vector size when changing EL. */ @@ -11236,7 +11251,7 @@ void aarch64_sve_change_el(CPUARMState *env, int old_el, { ARMCPU *cpu = env_archcpu(env); int old_len, new_len; - bool old_a64, new_a64; + bool old_a64, new_a64, sm; /* Nothing to do if no SVE. */ if (!cpu_isar_feature(aa64_sve, cpu)) { @@ -11256,7 +11271,8 @@ void aarch64_sve_change_el(CPUARMState *env, int old_el, * invoke ResetSVEState when taking an exception from, or * returning to, AArch32 state when PSTATE.SM is enabled. */ - if (old_a64 != new_a64 && FIELD_EX64(env->svcr, SVCR, SM)) { + sm = FIELD_EX64(env->svcr, SVCR, SM); + if (old_a64 != new_a64 && sm) { arm_reset_sve_state(env); return; } @@ -11273,10 +11289,13 @@ void aarch64_sve_change_el(CPUARMState *env, int old_el, * we already have the correct register contents when encountering the * vq0->vq0 transition between EL0->EL1. */ - old_len = (old_a64 && !sve_exception_el(env, old_el) - ? sve_vqm1_for_el(env, old_el) : 0); - new_len = (new_a64 && !sve_exception_el(env, new_el) - ? sve_vqm1_for_el(env, new_el) : 0); + old_len = new_len = 0; + if (old_a64) { + old_len = sve_vqm1_for_el_sm_ena(env, old_el, sm); + } + if (new_a64) { + new_len = sve_vqm1_for_el_sm_ena(env, new_el, sm); + } /* When changing vector length, clear inaccessible state. */ if (new_len < old_len) { |