diff options
author | Richard Henderson <richard.henderson@linaro.org> | 2023-05-02 15:36:47 +0100 |
---|---|---|
committer | Richard Henderson <richard.henderson@linaro.org> | 2023-05-05 17:05:58 +0100 |
commit | 33948b68a7ca99f05f88f4aac5d5019a11853662 (patch) | |
tree | 7a7685e790f4f3104215669e1aa2e202619dcf24 | |
parent | 6ffaac9ca01094341ce64526411b8065df9ac39f (diff) | |
download | qemu-33948b68a7ca99f05f88f4aac5d5019a11853662.zip qemu-33948b68a7ca99f05f88f4aac5d5019a11853662.tar.gz qemu-33948b68a7ca99f05f88f4aac5d5019a11853662.tar.bz2 |
target/alpha: Use MO_ALIGN where required
Mark all memory operations that are not already marked with UNALIGN.
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
-rw-r--r-- | target/alpha/translate.c | 36 |
1 files changed, 20 insertions, 16 deletions
diff --git a/target/alpha/translate.c b/target/alpha/translate.c index ffbac1c..be8adb2 100644 --- a/target/alpha/translate.c +++ b/target/alpha/translate.c @@ -2399,21 +2399,21 @@ static DisasJumpType translate_one(DisasContext *ctx, uint32_t insn) switch ((insn >> 12) & 0xF) { case 0x0: /* Longword physical access (hw_ldl/p) */ - tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LESL); + tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LESL | MO_ALIGN); break; case 0x1: /* Quadword physical access (hw_ldq/p) */ - tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LEUQ); + tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LEUQ | MO_ALIGN); break; case 0x2: /* Longword physical access with lock (hw_ldl_l/p) */ - tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LESL); + tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LESL | MO_ALIGN); tcg_gen_mov_i64(cpu_lock_addr, addr); tcg_gen_mov_i64(cpu_lock_value, va); break; case 0x3: /* Quadword physical access with lock (hw_ldq_l/p) */ - tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LEUQ); + tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LEUQ | MO_ALIGN); tcg_gen_mov_i64(cpu_lock_addr, addr); tcg_gen_mov_i64(cpu_lock_value, va); break; @@ -2438,11 +2438,13 @@ static DisasJumpType translate_one(DisasContext *ctx, uint32_t insn) goto invalid_opc; case 0xA: /* Longword virtual access with protection check (hw_ldl/w) */ - tcg_gen_qemu_ld_i64(va, addr, MMU_KERNEL_IDX, MO_LESL); + tcg_gen_qemu_ld_i64(va, addr, MMU_KERNEL_IDX, + MO_LESL | MO_ALIGN); break; case 0xB: /* Quadword virtual access with protection check (hw_ldq/w) */ - tcg_gen_qemu_ld_i64(va, addr, MMU_KERNEL_IDX, MO_LEUQ); + tcg_gen_qemu_ld_i64(va, addr, MMU_KERNEL_IDX, + MO_LEUQ | MO_ALIGN); break; case 0xC: /* Longword virtual access with alt access mode (hw_ldl/a)*/ @@ -2453,12 +2455,14 @@ static DisasJumpType translate_one(DisasContext *ctx, uint32_t insn) case 0xE: /* Longword virtual access with alternate access mode and protection checks (hw_ldl/wa) */ - tcg_gen_qemu_ld_i64(va, addr, MMU_USER_IDX, MO_LESL); + tcg_gen_qemu_ld_i64(va, addr, MMU_USER_IDX, + MO_LESL | MO_ALIGN); break; case 0xF: /* Quadword virtual access with alternate access mode and protection checks (hw_ldq/wa) */ - tcg_gen_qemu_ld_i64(va, addr, MMU_USER_IDX, MO_LEUQ); + tcg_gen_qemu_ld_i64(va, addr, MMU_USER_IDX, + MO_LEUQ | MO_ALIGN); break; } break; @@ -2659,7 +2663,7 @@ static DisasJumpType translate_one(DisasContext *ctx, uint32_t insn) vb = load_gpr(ctx, rb); tmp = tcg_temp_new(); tcg_gen_addi_i64(tmp, vb, disp12); - tcg_gen_qemu_st_i64(va, tmp, MMU_PHYS_IDX, MO_LESL); + tcg_gen_qemu_st_i64(va, tmp, MMU_PHYS_IDX, MO_LESL | MO_ALIGN); break; case 0x1: /* Quadword physical access */ @@ -2667,17 +2671,17 @@ static DisasJumpType translate_one(DisasContext *ctx, uint32_t insn) vb = load_gpr(ctx, rb); tmp = tcg_temp_new(); tcg_gen_addi_i64(tmp, vb, disp12); - tcg_gen_qemu_st_i64(va, tmp, MMU_PHYS_IDX, MO_LEUQ); + tcg_gen_qemu_st_i64(va, tmp, MMU_PHYS_IDX, MO_LEUQ | MO_ALIGN); break; case 0x2: /* Longword physical access with lock */ ret = gen_store_conditional(ctx, ra, rb, disp12, - MMU_PHYS_IDX, MO_LESL); + MMU_PHYS_IDX, MO_LESL | MO_ALIGN); break; case 0x3: /* Quadword physical access with lock */ ret = gen_store_conditional(ctx, ra, rb, disp12, - MMU_PHYS_IDX, MO_LEUQ); + MMU_PHYS_IDX, MO_LEUQ | MO_ALIGN); break; case 0x4: /* Longword virtual access */ @@ -2771,11 +2775,11 @@ static DisasJumpType translate_one(DisasContext *ctx, uint32_t insn) break; case 0x2A: /* LDL_L */ - gen_load_int(ctx, ra, rb, disp16, MO_LESL, 0, 1); + gen_load_int(ctx, ra, rb, disp16, MO_LESL | MO_ALIGN, 0, 1); break; case 0x2B: /* LDQ_L */ - gen_load_int(ctx, ra, rb, disp16, MO_LEUQ, 0, 1); + gen_load_int(ctx, ra, rb, disp16, MO_LEUQ | MO_ALIGN, 0, 1); break; case 0x2C: /* STL */ @@ -2788,12 +2792,12 @@ static DisasJumpType translate_one(DisasContext *ctx, uint32_t insn) case 0x2E: /* STL_C */ ret = gen_store_conditional(ctx, ra, rb, disp16, - ctx->mem_idx, MO_LESL); + ctx->mem_idx, MO_LESL | MO_ALIGN); break; case 0x2F: /* STQ_C */ ret = gen_store_conditional(ctx, ra, rb, disp16, - ctx->mem_idx, MO_LEUQ); + ctx->mem_idx, MO_LEUQ | MO_ALIGN); break; case 0x30: /* BR */ |