aboutsummaryrefslogtreecommitdiff
path: root/tcg/arm
diff options
context:
space:
mode:
authorRichard Henderson <richard.henderson@linaro.org>2023-04-02 20:48:47 -0700
committerRichard Henderson <richard.henderson@linaro.org>2023-05-16 15:21:39 -0700
commitb6ee2453f63bf1b9fe5d3e20e00b128820b4902e (patch)
tree665b3be4fc0b52a51744fd0ff8597346bb3ca73d /tcg/arm
parent7212812263402605abb147e5b2468f523a1471ab (diff)
downloadqemu-b6ee2453f63bf1b9fe5d3e20e00b128820b4902e.zip
qemu-b6ee2453f63bf1b9fe5d3e20e00b128820b4902e.tar.gz
qemu-b6ee2453f63bf1b9fe5d3e20e00b128820b4902e.tar.bz2
tcg/arm: Use full load/store helpers in user-only mode
Instead of using helper_unaligned_{ld,st}, use the full load/store helpers. This will allow the fast path to increase alignment to implement atomicity while not immediately raising an alignment exception. Reviewed-by: Peter Maydell <peter.maydell@linaro.org> Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
Diffstat (limited to 'tcg/arm')
-rw-r--r--tcg/arm/tcg-target.c.inc45
1 files changed, 0 insertions, 45 deletions
diff --git a/tcg/arm/tcg-target.c.inc b/tcg/arm/tcg-target.c.inc
index a02804d..eb0542f 100644
--- a/tcg/arm/tcg-target.c.inc
+++ b/tcg/arm/tcg-target.c.inc
@@ -1325,7 +1325,6 @@ typedef struct {
bool index_scratch;
} HostAddress;
-#ifdef CONFIG_SOFTMMU
static TCGReg ldst_ra_gen(TCGContext *s, const TCGLabelQemuLdst *l, int arg)
{
/* We arrive at the slow path via "BLNE", so R14 contains l->raddr. */
@@ -1368,50 +1367,6 @@ static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *lb)
tcg_out_goto(s, COND_AL, qemu_st_helpers[opc & MO_SIZE]);
return true;
}
-#else
-static bool tcg_out_fail_alignment(TCGContext *s, TCGLabelQemuLdst *l)
-{
- if (!reloc_pc24(l->label_ptr[0], tcg_splitwx_to_rx(s->code_ptr))) {
- return false;
- }
-
- if (TARGET_LONG_BITS == 64) {
- /* 64-bit target address is aligned into R2:R3. */
- TCGMovExtend ext[2] = {
- { .dst = TCG_REG_R2, .dst_type = TCG_TYPE_I32,
- .src = l->addrlo_reg,
- .src_type = TCG_TYPE_I32, .src_ext = MO_UL },
- { .dst = TCG_REG_R3, .dst_type = TCG_TYPE_I32,
- .src = l->addrhi_reg,
- .src_type = TCG_TYPE_I32, .src_ext = MO_UL },
- };
- tcg_out_movext2(s, &ext[0], &ext[1], TCG_REG_TMP);
- } else {
- tcg_out_mov(s, TCG_TYPE_I32, TCG_REG_R1, l->addrlo_reg);
- }
- tcg_out_mov(s, TCG_TYPE_PTR, TCG_REG_R0, TCG_AREG0);
-
- /*
- * Tail call to the helper, with the return address back inline,
- * just for the clarity of the debugging traceback -- the helper
- * cannot return. We have used BLNE to arrive here, so LR is
- * already set.
- */
- tcg_out_goto(s, COND_AL, (const void *)
- (l->is_ld ? helper_unaligned_ld : helper_unaligned_st));
- return true;
-}
-
-static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
-{
- return tcg_out_fail_alignment(s, l);
-}
-
-static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
-{
- return tcg_out_fail_alignment(s, l);
-}
-#endif /* SOFTMMU */
static TCGLabelQemuLdst *prepare_host_addr(TCGContext *s, HostAddress *h,
TCGReg addrlo, TCGReg addrhi,