aboutsummaryrefslogtreecommitdiff
path: root/tcg/riscv/tcg-target.c.inc
diff options
context:
space:
mode:
authorPeter Maydell <peter.maydell@linaro.org>2022-02-14 15:24:26 +0000
committerPeter Maydell <peter.maydell@linaro.org>2022-02-14 15:24:26 +0000
commit50a75ff680ec8999baa0bffc49af8c6ad5c0035a (patch)
tree5c08bff139272919eed6fb4789f1931326cd8c3b /tcg/riscv/tcg-target.c.inc
parentcc5ce8b8b6be83e5fe3b668dbd061ad97c534e3f (diff)
parent5c1a101ef6b85537a4ade93c39ea81cadd5c246e (diff)
downloadqemu-50a75ff680ec8999baa0bffc49af8c6ad5c0035a.zip
qemu-50a75ff680ec8999baa0bffc49af8c6ad5c0035a.tar.gz
qemu-50a75ff680ec8999baa0bffc49af8c6ad5c0035a.tar.bz2
Merge remote-tracking branch 'remotes/rth-gitlab/tags/pull-tcg-20220211' into staging
Fix safe_syscall_base for sparc64. Fix host signal handling for sparc64-linux. Speedups for jump cache and work list probing. Fix for exception replays. Raise guest SIGBUS for user-only misaligned accesses. # gpg: Signature made Fri 11 Feb 2022 01:27:16 GMT # gpg: using RSA key 7A481E78868B4DB6A85A05C064DF38E8AF7E215F # gpg: issuer "richard.henderson@linaro.org" # gpg: Good signature from "Richard Henderson <richard.henderson@linaro.org>" [full] # Primary key fingerprint: 7A48 1E78 868B 4DB6 A85A 05C0 64DF 38E8 AF7E 215F * remotes/rth-gitlab/tags/pull-tcg-20220211: (34 commits) tests/tcg/multiarch: Add sigbus.c tcg/sparc: Support unaligned access for user-only tcg/sparc: Add tcg_out_jmpl_const for better tail calls tcg/sparc: Use the constant pool for 64-bit constants tcg/sparc: Convert patch_reloc to return bool tcg/sparc: Improve code gen for shifted 32-bit constants tcg/sparc: Add scratch argument to tcg_out_movi_int tcg/sparc: Split out tcg_out_movi_imm32 tcg/sparc: Use tcg_out_movi_imm13 in tcg_out_addsub2_i64 tcg/mips: Support unaligned access for softmmu tcg/mips: Support unaligned access for user-only tcg/arm: Support raising sigbus for user-only tcg/arm: Reserve a register for guest_base tcg/arm: Support unaligned access for softmmu tcg/arm: Check alignment for ldrd and strd tcg/arm: Remove use_armv6_instructions tcg/arm: Remove use_armv5t_instructions tcg/arm: Drop support for armv4 and armv5 hosts tcg/loongarch64: Support raising sigbus for user-only tcg/tci: Support raising sigbus for user-only ... Signed-off-by: Peter Maydell <peter.maydell@linaro.org>
Diffstat (limited to 'tcg/riscv/tcg-target.c.inc')
-rw-r--r--tcg/riscv/tcg-target.c.inc63
1 files changed, 61 insertions, 2 deletions
diff --git a/tcg/riscv/tcg-target.c.inc b/tcg/riscv/tcg-target.c.inc
index e9488f7..6409d9c 100644
--- a/tcg/riscv/tcg-target.c.inc
+++ b/tcg/riscv/tcg-target.c.inc
@@ -27,6 +27,7 @@
* THE SOFTWARE.
*/
+#include "../tcg-ldst.c.inc"
#include "../tcg-pool.c.inc"
#ifdef CONFIG_DEBUG_TCG
@@ -847,8 +848,6 @@ static void tcg_out_mb(TCGContext *s, TCGArg a0)
*/
#if defined(CONFIG_SOFTMMU)
-#include "../tcg-ldst.c.inc"
-
/* helper signature: helper_ret_ld_mmu(CPUState *env, target_ulong addr,
* MemOpIdx oi, uintptr_t ra)
*/
@@ -1053,6 +1052,54 @@ static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
tcg_out_goto(s, l->raddr);
return true;
}
+#else
+
+static void tcg_out_test_alignment(TCGContext *s, bool is_ld, TCGReg addr_reg,
+ unsigned a_bits)
+{
+ unsigned a_mask = (1 << a_bits) - 1;
+ TCGLabelQemuLdst *l = new_ldst_label(s);
+
+ l->is_ld = is_ld;
+ l->addrlo_reg = addr_reg;
+
+ /* We are expecting a_bits to max out at 7, so we can always use andi. */
+ tcg_debug_assert(a_bits < 12);
+ tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_TMP1, addr_reg, a_mask);
+
+ l->label_ptr[0] = s->code_ptr;
+ tcg_out_opc_branch(s, OPC_BNE, TCG_REG_TMP1, TCG_REG_ZERO, 0);
+
+ l->raddr = tcg_splitwx_to_rx(s->code_ptr);
+}
+
+static bool tcg_out_fail_alignment(TCGContext *s, TCGLabelQemuLdst *l)
+{
+ /* resolve label address */
+ if (!reloc_sbimm12(l->label_ptr[0], tcg_splitwx_to_rx(s->code_ptr))) {
+ return false;
+ }
+
+ tcg_out_mov(s, TCG_TYPE_TL, TCG_REG_A1, l->addrlo_reg);
+ tcg_out_mov(s, TCG_TYPE_PTR, TCG_REG_A0, TCG_AREG0);
+
+ /* tail call, with the return address back inline. */
+ tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_RA, (uintptr_t)l->raddr);
+ tcg_out_call_int(s, (const void *)(l->is_ld ? helper_unaligned_ld
+ : helper_unaligned_st), true);
+ return true;
+}
+
+static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
+{
+ return tcg_out_fail_alignment(s, l);
+}
+
+static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
+{
+ return tcg_out_fail_alignment(s, l);
+}
+
#endif /* CONFIG_SOFTMMU */
static void tcg_out_qemu_ld_direct(TCGContext *s, TCGReg lo, TCGReg hi,
@@ -1108,6 +1155,8 @@ static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args, bool is_64)
MemOp opc;
#if defined(CONFIG_SOFTMMU)
tcg_insn_unit *label_ptr[1];
+#else
+ unsigned a_bits;
#endif
TCGReg base = TCG_REG_TMP0;
@@ -1130,6 +1179,10 @@ static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args, bool is_64)
tcg_out_ext32u(s, base, addr_regl);
addr_regl = base;
}
+ a_bits = get_alignment_bits(opc);
+ if (a_bits) {
+ tcg_out_test_alignment(s, true, addr_regl, a_bits);
+ }
if (guest_base != 0) {
tcg_out_opc_reg(s, OPC_ADD, base, TCG_GUEST_BASE_REG, addr_regl);
}
@@ -1174,6 +1227,8 @@ static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args, bool is_64)
MemOp opc;
#if defined(CONFIG_SOFTMMU)
tcg_insn_unit *label_ptr[1];
+#else
+ unsigned a_bits;
#endif
TCGReg base = TCG_REG_TMP0;
@@ -1196,6 +1251,10 @@ static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args, bool is_64)
tcg_out_ext32u(s, base, addr_regl);
addr_regl = base;
}
+ a_bits = get_alignment_bits(opc);
+ if (a_bits) {
+ tcg_out_test_alignment(s, false, addr_regl, a_bits);
+ }
if (guest_base != 0) {
tcg_out_opc_reg(s, OPC_ADD, base, TCG_GUEST_BASE_REG, addr_regl);
}