diff options
author | Richard Henderson <rth@twiddle.net> | 2015-07-23 13:32:35 -0700 |
---|---|---|
committer | Richard Henderson <rth@twiddle.net> | 2015-08-24 11:10:54 -0700 |
commit | a5e39810b9088b5d20fac8e0293f281e1c8b608f (patch) | |
tree | 5583b3245184fd6a1abe06b6be368339effa26f3 /tcg | |
parent | 68d45bb61c5bbfb3999486f78cf026c1e79eb301 (diff) | |
download | qemu-a5e39810b9088b5d20fac8e0293f281e1c8b608f.zip qemu-a5e39810b9088b5d20fac8e0293f281e1c8b608f.tar.gz qemu-a5e39810b9088b5d20fac8e0293f281e1c8b608f.tar.bz2 |
tcg/s390: Use softmmu fast path for unaligned accesses
Signed-off-by: Richard Henderson <rth@twiddle.net>
Diffstat (limited to 'tcg')
-rw-r--r-- | tcg/s390/tcg-target.c | 26 |
1 files changed, 21 insertions, 5 deletions
diff --git a/tcg/s390/tcg-target.c b/tcg/s390/tcg-target.c index 96c3d65..be51c8b 100644 --- a/tcg/s390/tcg-target.c +++ b/tcg/s390/tcg-target.c @@ -1504,20 +1504,36 @@ QEMU_BUILD_BUG_ON(offsetof(CPUArchState, tlb_table[NB_MMU_MODES - 1][1]) static TCGReg tcg_out_tlb_read(TCGContext* s, TCGReg addr_reg, TCGMemOp opc, int mem_index, bool is_ld) { - TCGMemOp s_bits = opc & MO_SIZE; - uint64_t tlb_mask = TARGET_PAGE_MASK | ((1 << s_bits) - 1); - int ofs; + int s_mask = (1 << (opc & MO_SIZE)) - 1; + int ofs, a_off; + uint64_t tlb_mask; + + /* For aligned accesses, we check the first byte and include the alignment + bits within the address. For unaligned access, we check that we don't + cross pages using the address of the last byte of the access. */ + if ((opc & MO_AMASK) == MO_ALIGN || s_mask == 0) { + a_off = 0; + tlb_mask = TARGET_PAGE_MASK | s_mask; + } else { + a_off = s_mask; + tlb_mask = TARGET_PAGE_MASK; + } if (facilities & FACILITY_GEN_INST_EXT) { tcg_out_risbg(s, TCG_REG_R2, addr_reg, 64 - CPU_TLB_BITS - CPU_TLB_ENTRY_BITS, 63 - CPU_TLB_ENTRY_BITS, 64 + CPU_TLB_ENTRY_BITS - TARGET_PAGE_BITS, 1); - tgen_andi_risbg(s, TCG_REG_R3, addr_reg, tlb_mask); + if (a_off) { + tcg_out_insn(s, RX, LA, TCG_REG_R3, addr_reg, TCG_REG_NONE, a_off); + tgen_andi(s, TCG_TYPE_TL, TCG_REG_R3, tlb_mask); + } else { + tgen_andi_risbg(s, TCG_REG_R3, addr_reg, tlb_mask); + } } else { tcg_out_sh64(s, RSY_SRLG, TCG_REG_R2, addr_reg, TCG_REG_NONE, TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS); - tcg_out_movi(s, TCG_TYPE_TL, TCG_REG_R3, addr_reg); + tcg_out_insn(s, RX, LA, TCG_REG_R3, addr_reg, TCG_REG_NONE, a_off); tgen_andi(s, TCG_TYPE_I64, TCG_REG_R2, (CPU_TLB_SIZE - 1) << CPU_TLB_ENTRY_BITS); tgen_andi(s, TCG_TYPE_TL, TCG_REG_R3, tlb_mask); |