diff options
author | Richard Henderson <richard.henderson@linaro.org> | 2023-10-16 14:45:12 -0700 |
---|---|---|
committer | Richard Henderson <richard.henderson@linaro.org> | 2023-10-22 16:32:28 -0700 |
commit | bfefdbea9ee491842d4e61e54f97508c040913a8 (patch) | |
tree | 134a023d0af035df17cfb5a778d01335d7695692 | |
parent | 40f40fc8f09edd8db620786a16a927e711257ff2 (diff) | |
download | qemu-bfefdbea9ee491842d4e61e54f97508c040913a8.zip qemu-bfefdbea9ee491842d4e61e54f97508c040913a8.tar.gz qemu-bfefdbea9ee491842d4e61e54f97508c040913a8.tar.bz2 |
tcg: Use constant zero when expanding with divu2
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
-rw-r--r-- | tcg/tcg-op.c | 16 |
1 files changed, 8 insertions, 8 deletions
diff --git a/tcg/tcg-op.c b/tcg/tcg-op.c index 393dbcd..c29355b 100644 --- a/tcg/tcg-op.c +++ b/tcg/tcg-op.c @@ -342,8 +342,8 @@ void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2); } else if (TCG_TARGET_HAS_div2_i32) { TCGv_i32 t0 = tcg_temp_ebb_new_i32(); - tcg_gen_movi_i32(t0, 0); - tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2); + TCGv_i32 zero = tcg_constant_i32(0); + tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, zero, arg2); tcg_temp_free_i32(t0); } else { gen_helper_divu_i32(ret, arg1, arg2); @@ -362,8 +362,8 @@ void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2) tcg_temp_free_i32(t0); } else if (TCG_TARGET_HAS_div2_i32) { TCGv_i32 t0 = tcg_temp_ebb_new_i32(); - tcg_gen_movi_i32(t0, 0); - tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2); + TCGv_i32 zero = tcg_constant_i32(0); + tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, zero, arg2); tcg_temp_free_i32(t0); } else { gen_helper_remu_i32(ret, arg1, arg2); @@ -1674,8 +1674,8 @@ void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2); } else if (TCG_TARGET_HAS_div2_i64) { TCGv_i64 t0 = tcg_temp_ebb_new_i64(); - tcg_gen_movi_i64(t0, 0); - tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2); + TCGv_i64 zero = tcg_constant_i64(0); + tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, zero, arg2); tcg_temp_free_i64(t0); } else { gen_helper_divu_i64(ret, arg1, arg2); @@ -1694,8 +1694,8 @@ void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2) tcg_temp_free_i64(t0); } else if (TCG_TARGET_HAS_div2_i64) { TCGv_i64 t0 = tcg_temp_ebb_new_i64(); - tcg_gen_movi_i64(t0, 0); - tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2); + TCGv_i64 zero = tcg_constant_i64(0); + tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, zero, arg2); tcg_temp_free_i64(t0); } else { gen_helper_remu_i64(ret, arg1, arg2); |