aboutsummaryrefslogtreecommitdiff
path: root/tcg/tcg-op.c
diff options
context:
space:
mode:
Diffstat (limited to 'tcg/tcg-op.c')
-rw-r--r--tcg/tcg-op.c1244
1 files changed, 578 insertions, 666 deletions
diff --git a/tcg/tcg-op.c b/tcg/tcg-op.c
index fec6d67..dfa5c38 100644
--- a/tcg/tcg-op.c
+++ b/tcg/tcg-op.c
@@ -249,24 +249,6 @@ static void DNI tcg_gen_op5ii_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
tcgv_i64_arg(a3), a4, a5);
}
-static void DNI tcg_gen_op6_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
- TCGv_i32 a3, TCGv_i32 a4,
- TCGv_i32 a5, TCGv_i32 a6)
-{
- tcg_gen_op6(opc, TCG_TYPE_I32, tcgv_i32_arg(a1), tcgv_i32_arg(a2),
- tcgv_i32_arg(a3), tcgv_i32_arg(a4), tcgv_i32_arg(a5),
- tcgv_i32_arg(a6));
-}
-
-static void DNI tcg_gen_op6_i64(TCGOpcode opc, TCGv_i64 a1, TCGv_i64 a2,
- TCGv_i64 a3, TCGv_i64 a4,
- TCGv_i64 a5, TCGv_i64 a6)
-{
- tcg_gen_op6(opc, TCG_TYPE_I64, tcgv_i64_arg(a1), tcgv_i64_arg(a2),
- tcgv_i64_arg(a3), tcgv_i64_arg(a4), tcgv_i64_arg(a5),
- tcgv_i64_arg(a6));
-}
-
static void DNI tcg_gen_op6i_i32(TCGOpcode opc, TCGv_i32 a1, TCGv_i32 a2,
TCGv_i32 a3, TCGv_i32 a4,
TCGv_i32 a5, TCGArg a6)
@@ -351,7 +333,7 @@ void tcg_gen_discard_i32(TCGv_i32 arg)
void tcg_gen_mov_i32(TCGv_i32 ret, TCGv_i32 arg)
{
if (ret != arg) {
- tcg_gen_op2_i32(INDEX_op_mov_i32, ret, arg);
+ tcg_gen_op2_i32(INDEX_op_mov, ret, arg);
}
}
@@ -362,7 +344,7 @@ void tcg_gen_movi_i32(TCGv_i32 ret, int32_t arg)
void tcg_gen_add_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- tcg_gen_op3_i32(INDEX_op_add_i32, ret, arg1, arg2);
+ tcg_gen_op3_i32(INDEX_op_add, ret, arg1, arg2);
}
void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
@@ -377,7 +359,7 @@ void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
void tcg_gen_sub_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- tcg_gen_op3_i32(INDEX_op_sub_i32, ret, arg1, arg2);
+ tcg_gen_op3_i32(INDEX_op_sub, ret, arg1, arg2);
}
void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
@@ -396,12 +378,12 @@ void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
void tcg_gen_neg_i32(TCGv_i32 ret, TCGv_i32 arg)
{
- tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg);
+ tcg_gen_op2_i32(INDEX_op_neg, ret, arg);
}
void tcg_gen_and_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- tcg_gen_op3_i32(INDEX_op_and_i32, ret, arg1, arg2);
+ tcg_gen_op3_i32(INDEX_op_and, ret, arg1, arg2);
}
void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
@@ -414,17 +396,19 @@ void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
case -1:
tcg_gen_mov_i32(ret, arg1);
return;
- case 0xff:
- /* Don't recurse with tcg_gen_ext8u_i32. */
- if (TCG_TARGET_HAS_ext8u_i32) {
- tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
- return;
- }
- break;
- case 0xffff:
- if (TCG_TARGET_HAS_ext16u_i32) {
- tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
- return;
+ default:
+ /*
+ * Canonicalize on extract, if valid. This aids x86 with its
+ * 2 operand MOVZBL and 2 operand AND, selecting the TCGOpcode
+ * which does not require matching operands. Other backends can
+ * trivially expand the extract to AND during code generation.
+ */
+ if (!(arg2 & (arg2 + 1))) {
+ unsigned len = ctz32(~arg2);
+ if (TCG_TARGET_extract_valid(TCG_TYPE_I32, 0, len)) {
+ tcg_gen_extract_i32(ret, arg1, 0, len);
+ return;
+ }
}
break;
}
@@ -434,7 +418,7 @@ void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
void tcg_gen_or_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- tcg_gen_op3_i32(INDEX_op_or_i32, ret, arg1, arg2);
+ tcg_gen_op3_i32(INDEX_op_or, ret, arg1, arg2);
}
void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
@@ -451,7 +435,7 @@ void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
void tcg_gen_xor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- tcg_gen_op3_i32(INDEX_op_xor_i32, ret, arg1, arg2);
+ tcg_gen_op3_i32(INDEX_op_xor, ret, arg1, arg2);
}
void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
@@ -459,9 +443,10 @@ void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
/* Some cases can be optimized here. */
if (arg2 == 0) {
tcg_gen_mov_i32(ret, arg1);
- } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
+ } else if (arg2 == -1 &&
+ tcg_op_supported(INDEX_op_not, TCG_TYPE_I32, 0)) {
/* Don't recurse with tcg_gen_not_i32. */
- tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
+ tcg_gen_op2_i32(INDEX_op_not, ret, arg1);
} else {
tcg_gen_xor_i32(ret, arg1, tcg_constant_i32(arg2));
}
@@ -469,8 +454,8 @@ void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
void tcg_gen_not_i32(TCGv_i32 ret, TCGv_i32 arg)
{
- if (TCG_TARGET_HAS_not_i32) {
- tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg);
+ if (tcg_op_supported(INDEX_op_not, TCG_TYPE_I32, 0)) {
+ tcg_gen_op2_i32(INDEX_op_not, ret, arg);
} else {
tcg_gen_xori_i32(ret, arg, -1);
}
@@ -478,7 +463,7 @@ void tcg_gen_not_i32(TCGv_i32 ret, TCGv_i32 arg)
void tcg_gen_shl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- tcg_gen_op3_i32(INDEX_op_shl_i32, ret, arg1, arg2);
+ tcg_gen_op3_i32(INDEX_op_shl, ret, arg1, arg2);
}
void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
@@ -493,7 +478,7 @@ void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
void tcg_gen_shr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- tcg_gen_op3_i32(INDEX_op_shr_i32, ret, arg1, arg2);
+ tcg_gen_op3_i32(INDEX_op_shr, ret, arg1, arg2);
}
void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
@@ -508,7 +493,7 @@ void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
void tcg_gen_sar_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- tcg_gen_op3_i32(INDEX_op_sar_i32, ret, arg1, arg2);
+ tcg_gen_op3_i32(INDEX_op_sar, ret, arg1, arg2);
}
void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
@@ -526,7 +511,7 @@ void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, TCGv_i32 arg2, TCGLabel *l)
if (cond == TCG_COND_ALWAYS) {
tcg_gen_br(l);
} else if (cond != TCG_COND_NEVER) {
- TCGOp *op = tcg_gen_op4ii_i32(INDEX_op_brcond_i32,
+ TCGOp *op = tcg_gen_op4ii_i32(INDEX_op_brcond,
arg1, arg2, cond, label_arg(l));
add_as_label_use(l, op);
}
@@ -549,7 +534,7 @@ void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
} else if (cond == TCG_COND_NEVER) {
tcg_gen_movi_i32(ret, 0);
} else {
- tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
+ tcg_gen_op4i_i32(INDEX_op_setcond, ret, arg1, arg2, cond);
}
}
@@ -566,11 +551,8 @@ void tcg_gen_negsetcond_i32(TCGCond cond, TCGv_i32 ret,
tcg_gen_movi_i32(ret, -1);
} else if (cond == TCG_COND_NEVER) {
tcg_gen_movi_i32(ret, 0);
- } else if (TCG_TARGET_HAS_negsetcond_i32) {
- tcg_gen_op4i_i32(INDEX_op_negsetcond_i32, ret, arg1, arg2, cond);
} else {
- tcg_gen_setcond_i32(cond, ret, arg1, arg2);
- tcg_gen_neg_i32(ret, ret);
+ tcg_gen_op4i_i32(INDEX_op_negsetcond, ret, arg1, arg2, cond);
}
}
@@ -582,7 +564,7 @@ void tcg_gen_negsetcondi_i32(TCGCond cond, TCGv_i32 ret,
void tcg_gen_mul_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- tcg_gen_op3_i32(INDEX_op_mul_i32, ret, arg1, arg2);
+ tcg_gen_op3_i32(INDEX_op_mul, ret, arg1, arg2);
}
void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
@@ -598,12 +580,12 @@ void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- if (TCG_TARGET_HAS_div_i32) {
- tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
- } else if (TCG_TARGET_HAS_div2_i32) {
+ if (tcg_op_supported(INDEX_op_divs, TCG_TYPE_I32, 0)) {
+ tcg_gen_op3_i32(INDEX_op_divs, ret, arg1, arg2);
+ } else if (tcg_op_supported(INDEX_op_divs2, TCG_TYPE_I32, 0)) {
TCGv_i32 t0 = tcg_temp_ebb_new_i32();
tcg_gen_sari_i32(t0, arg1, 31);
- tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
+ tcg_gen_op5_i32(INDEX_op_divs2, ret, t0, arg1, t0, arg2);
tcg_temp_free_i32(t0);
} else {
gen_helper_div_i32(ret, arg1, arg2);
@@ -612,18 +594,18 @@ void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- if (TCG_TARGET_HAS_rem_i32) {
- tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
- } else if (TCG_TARGET_HAS_div_i32) {
+ if (tcg_op_supported(INDEX_op_rems, TCG_TYPE_I32, 0)) {
+ tcg_gen_op3_i32(INDEX_op_rems, ret, arg1, arg2);
+ } else if (tcg_op_supported(INDEX_op_divs, TCG_TYPE_I32, 0)) {
TCGv_i32 t0 = tcg_temp_ebb_new_i32();
- tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2);
+ tcg_gen_op3_i32(INDEX_op_divs, t0, arg1, arg2);
tcg_gen_mul_i32(t0, t0, arg2);
tcg_gen_sub_i32(ret, arg1, t0);
tcg_temp_free_i32(t0);
- } else if (TCG_TARGET_HAS_div2_i32) {
+ } else if (tcg_op_supported(INDEX_op_divs2, TCG_TYPE_I32, 0)) {
TCGv_i32 t0 = tcg_temp_ebb_new_i32();
tcg_gen_sari_i32(t0, arg1, 31);
- tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
+ tcg_gen_op5_i32(INDEX_op_divs2, t0, ret, arg1, t0, arg2);
tcg_temp_free_i32(t0);
} else {
gen_helper_rem_i32(ret, arg1, arg2);
@@ -632,12 +614,12 @@ void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- if (TCG_TARGET_HAS_div_i32) {
- tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
- } else if (TCG_TARGET_HAS_div2_i32) {
+ if (tcg_op_supported(INDEX_op_divu, TCG_TYPE_I32, 0)) {
+ tcg_gen_op3_i32(INDEX_op_divu, ret, arg1, arg2);
+ } else if (tcg_op_supported(INDEX_op_divu2, TCG_TYPE_I32, 0)) {
TCGv_i32 t0 = tcg_temp_ebb_new_i32();
TCGv_i32 zero = tcg_constant_i32(0);
- tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, zero, arg2);
+ tcg_gen_op5_i32(INDEX_op_divu2, ret, t0, arg1, zero, arg2);
tcg_temp_free_i32(t0);
} else {
gen_helper_divu_i32(ret, arg1, arg2);
@@ -646,18 +628,18 @@ void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- if (TCG_TARGET_HAS_rem_i32) {
- tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
- } else if (TCG_TARGET_HAS_div_i32) {
+ if (tcg_op_supported(INDEX_op_remu, TCG_TYPE_I32, 0)) {
+ tcg_gen_op3_i32(INDEX_op_remu, ret, arg1, arg2);
+ } else if (tcg_op_supported(INDEX_op_divu, TCG_TYPE_I32, 0)) {
TCGv_i32 t0 = tcg_temp_ebb_new_i32();
- tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2);
+ tcg_gen_op3_i32(INDEX_op_divu, t0, arg1, arg2);
tcg_gen_mul_i32(t0, t0, arg2);
tcg_gen_sub_i32(ret, arg1, t0);
tcg_temp_free_i32(t0);
- } else if (TCG_TARGET_HAS_div2_i32) {
+ } else if (tcg_op_supported(INDEX_op_divu2, TCG_TYPE_I32, 0)) {
TCGv_i32 t0 = tcg_temp_ebb_new_i32();
TCGv_i32 zero = tcg_constant_i32(0);
- tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, zero, arg2);
+ tcg_gen_op5_i32(INDEX_op_divu2, t0, ret, arg1, zero, arg2);
tcg_temp_free_i32(t0);
} else {
gen_helper_remu_i32(ret, arg1, arg2);
@@ -666,8 +648,8 @@ void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- if (TCG_TARGET_HAS_andc_i32) {
- tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
+ if (tcg_op_supported(INDEX_op_andc, TCG_TYPE_I32, 0)) {
+ tcg_gen_op3_i32(INDEX_op_andc, ret, arg1, arg2);
} else {
TCGv_i32 t0 = tcg_temp_ebb_new_i32();
tcg_gen_not_i32(t0, arg2);
@@ -678,8 +660,8 @@ void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- if (TCG_TARGET_HAS_eqv_i32) {
- tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
+ if (tcg_op_supported(INDEX_op_eqv, TCG_TYPE_I32, 0)) {
+ tcg_gen_op3_i32(INDEX_op_eqv, ret, arg1, arg2);
} else {
tcg_gen_xor_i32(ret, arg1, arg2);
tcg_gen_not_i32(ret, ret);
@@ -688,8 +670,8 @@ void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- if (TCG_TARGET_HAS_nand_i32) {
- tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
+ if (tcg_op_supported(INDEX_op_nand, TCG_TYPE_I32, 0)) {
+ tcg_gen_op3_i32(INDEX_op_nand, ret, arg1, arg2);
} else {
tcg_gen_and_i32(ret, arg1, arg2);
tcg_gen_not_i32(ret, ret);
@@ -698,8 +680,8 @@ void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- if (TCG_TARGET_HAS_nor_i32) {
- tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
+ if (tcg_op_supported(INDEX_op_nor, TCG_TYPE_I32, 0)) {
+ tcg_gen_op3_i32(INDEX_op_nor, ret, arg1, arg2);
} else {
tcg_gen_or_i32(ret, arg1, arg2);
tcg_gen_not_i32(ret, ret);
@@ -708,8 +690,8 @@ void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- if (TCG_TARGET_HAS_orc_i32) {
- tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
+ if (tcg_op_supported(INDEX_op_orc, TCG_TYPE_I32, 0)) {
+ tcg_gen_op3_i32(INDEX_op_orc, ret, arg1, arg2);
} else {
TCGv_i32 t0 = tcg_temp_ebb_new_i32();
tcg_gen_not_i32(t0, arg2);
@@ -720,9 +702,9 @@ void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
void tcg_gen_clz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- if (TCG_TARGET_HAS_clz_i32) {
- tcg_gen_op3_i32(INDEX_op_clz_i32, ret, arg1, arg2);
- } else if (TCG_TARGET_HAS_clz_i64) {
+ if (tcg_op_supported(INDEX_op_clz, TCG_TYPE_I32, 0)) {
+ tcg_gen_op3_i32(INDEX_op_clz, ret, arg1, arg2);
+ } else if (tcg_op_supported(INDEX_op_clz, TCG_TYPE_I64, 0)) {
TCGv_i64 t1 = tcg_temp_ebb_new_i64();
TCGv_i64 t2 = tcg_temp_ebb_new_i64();
tcg_gen_extu_i32_i64(t1, arg1);
@@ -745,9 +727,13 @@ void tcg_gen_clzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
void tcg_gen_ctz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- if (TCG_TARGET_HAS_ctz_i32) {
- tcg_gen_op3_i32(INDEX_op_ctz_i32, ret, arg1, arg2);
- } else if (TCG_TARGET_HAS_ctz_i64) {
+ TCGv_i32 z, t;
+
+ if (tcg_op_supported(INDEX_op_ctz, TCG_TYPE_I32, 0)) {
+ tcg_gen_op3_i32(INDEX_op_ctz, ret, arg1, arg2);
+ return;
+ }
+ if (tcg_op_supported(INDEX_op_ctz, TCG_TYPE_I64, 0)) {
TCGv_i64 t1 = tcg_temp_ebb_new_i64();
TCGv_i64 t2 = tcg_temp_ebb_new_i64();
tcg_gen_extu_i32_i64(t1, arg1);
@@ -756,34 +742,34 @@ void tcg_gen_ctz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
tcg_gen_extrl_i64_i32(ret, t1);
tcg_temp_free_i64(t1);
tcg_temp_free_i64(t2);
- } else if (TCG_TARGET_HAS_ctpop_i32
- || TCG_TARGET_HAS_ctpop_i64
- || TCG_TARGET_HAS_clz_i32
- || TCG_TARGET_HAS_clz_i64) {
- TCGv_i32 z, t = tcg_temp_ebb_new_i32();
-
- if (TCG_TARGET_HAS_ctpop_i32 || TCG_TARGET_HAS_ctpop_i64) {
- tcg_gen_subi_i32(t, arg1, 1);
- tcg_gen_andc_i32(t, t, arg1);
- tcg_gen_ctpop_i32(t, t);
- } else {
- /* Since all non-x86 hosts have clz(0) == 32, don't fight it. */
- tcg_gen_neg_i32(t, arg1);
- tcg_gen_and_i32(t, t, arg1);
- tcg_gen_clzi_i32(t, t, 32);
- tcg_gen_xori_i32(t, t, 31);
- }
- z = tcg_constant_i32(0);
- tcg_gen_movcond_i32(TCG_COND_EQ, ret, arg1, z, arg2, t);
- tcg_temp_free_i32(t);
+ return;
+ }
+ if (tcg_op_supported(INDEX_op_ctpop, TCG_TYPE_REG, 0)) {
+ t = tcg_temp_ebb_new_i32();
+ tcg_gen_subi_i32(t, arg1, 1);
+ tcg_gen_andc_i32(t, t, arg1);
+ tcg_gen_ctpop_i32(t, t);
+ } else if (tcg_op_supported(INDEX_op_clz, TCG_TYPE_REG, 0)) {
+ t = tcg_temp_ebb_new_i32();
+ tcg_gen_neg_i32(t, arg1);
+ tcg_gen_and_i32(t, t, arg1);
+ tcg_gen_clzi_i32(t, t, 32);
+ tcg_gen_xori_i32(t, t, 31);
} else {
gen_helper_ctz_i32(ret, arg1, arg2);
+ return;
}
+
+ z = tcg_constant_i32(0);
+ tcg_gen_movcond_i32(TCG_COND_EQ, ret, arg1, z, arg2, t);
+ tcg_temp_free_i32(t);
}
void tcg_gen_ctzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
{
- if (!TCG_TARGET_HAS_ctz_i32 && TCG_TARGET_HAS_ctpop_i32 && arg2 == 32) {
+ if (arg2 == 32
+ && !tcg_op_supported(INDEX_op_ctz, TCG_TYPE_I32, 0)
+ && tcg_op_supported(INDEX_op_ctpop, TCG_TYPE_REG, 0)) {
/* This equivalence has the advantage of not requiring a fixup. */
TCGv_i32 t = tcg_temp_ebb_new_i32();
tcg_gen_subi_i32(t, arg1, 1);
@@ -797,7 +783,7 @@ void tcg_gen_ctzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
void tcg_gen_clrsb_i32(TCGv_i32 ret, TCGv_i32 arg)
{
- if (TCG_TARGET_HAS_clz_i32) {
+ if (tcg_op_supported(INDEX_op_clz, TCG_TYPE_REG, 0)) {
TCGv_i32 t = tcg_temp_ebb_new_i32();
tcg_gen_sari_i32(t, arg, 31);
tcg_gen_xor_i32(t, t, arg);
@@ -811,9 +797,9 @@ void tcg_gen_clrsb_i32(TCGv_i32 ret, TCGv_i32 arg)
void tcg_gen_ctpop_i32(TCGv_i32 ret, TCGv_i32 arg1)
{
- if (TCG_TARGET_HAS_ctpop_i32) {
- tcg_gen_op2_i32(INDEX_op_ctpop_i32, ret, arg1);
- } else if (TCG_TARGET_HAS_ctpop_i64) {
+ if (tcg_op_supported(INDEX_op_ctpop, TCG_TYPE_I32, 0)) {
+ tcg_gen_op2_i32(INDEX_op_ctpop, ret, arg1);
+ } else if (tcg_op_supported(INDEX_op_ctpop, TCG_TYPE_I64, 0)) {
TCGv_i64 t = tcg_temp_ebb_new_i64();
tcg_gen_extu_i32_i64(t, arg1);
tcg_gen_ctpop_i64(t, t);
@@ -826,15 +812,18 @@ void tcg_gen_ctpop_i32(TCGv_i32 ret, TCGv_i32 arg1)
void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- if (TCG_TARGET_HAS_rot_i32) {
- tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
+ if (tcg_op_supported(INDEX_op_rotl, TCG_TYPE_I32, 0)) {
+ tcg_gen_op3_i32(INDEX_op_rotl, ret, arg1, arg2);
+ } else if (tcg_op_supported(INDEX_op_rotr, TCG_TYPE_I32, 0)) {
+ TCGv_i32 t0 = tcg_temp_ebb_new_i32();
+ tcg_gen_neg_i32(t0, arg2);
+ tcg_gen_op3_i32(INDEX_op_rotr, ret, arg1, t0);
+ tcg_temp_free_i32(t0);
} else {
- TCGv_i32 t0, t1;
-
- t0 = tcg_temp_ebb_new_i32();
- t1 = tcg_temp_ebb_new_i32();
+ TCGv_i32 t0 = tcg_temp_ebb_new_i32();
+ TCGv_i32 t1 = tcg_temp_ebb_new_i32();
tcg_gen_shl_i32(t0, arg1, arg2);
- tcg_gen_subfi_i32(t1, 32, arg2);
+ tcg_gen_neg_i32(t1, arg2);
tcg_gen_shr_i32(t1, arg1, t1);
tcg_gen_or_i32(ret, t0, t1);
tcg_temp_free_i32(t0);
@@ -848,12 +837,15 @@ void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
/* some cases can be optimized here */
if (arg2 == 0) {
tcg_gen_mov_i32(ret, arg1);
- } else if (TCG_TARGET_HAS_rot_i32) {
- tcg_gen_rotl_i32(ret, arg1, tcg_constant_i32(arg2));
+ } else if (tcg_op_supported(INDEX_op_rotl, TCG_TYPE_I32, 0)) {
+ TCGv_i32 t0 = tcg_constant_i32(arg2);
+ tcg_gen_op3_i32(INDEX_op_rotl, ret, arg1, t0);
+ } else if (tcg_op_supported(INDEX_op_rotr, TCG_TYPE_I32, 0)) {
+ TCGv_i32 t0 = tcg_constant_i32(32 - arg2);
+ tcg_gen_op3_i32(INDEX_op_rotr, ret, arg1, t0);
} else {
- TCGv_i32 t0, t1;
- t0 = tcg_temp_ebb_new_i32();
- t1 = tcg_temp_ebb_new_i32();
+ TCGv_i32 t0 = tcg_temp_ebb_new_i32();
+ TCGv_i32 t1 = tcg_temp_ebb_new_i32();
tcg_gen_shli_i32(t0, arg1, arg2);
tcg_gen_shri_i32(t1, arg1, 32 - arg2);
tcg_gen_or_i32(ret, t0, t1);
@@ -864,15 +856,18 @@ void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
{
- if (TCG_TARGET_HAS_rot_i32) {
- tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
+ if (tcg_op_supported(INDEX_op_rotr, TCG_TYPE_I32, 0)) {
+ tcg_gen_op3_i32(INDEX_op_rotr, ret, arg1, arg2);
+ } else if (tcg_op_supported(INDEX_op_rotl, TCG_TYPE_I32, 0)) {
+ TCGv_i32 t0 = tcg_temp_ebb_new_i32();
+ tcg_gen_neg_i32(t0, arg2);
+ tcg_gen_op3_i32(INDEX_op_rotl, ret, arg1, t0);
+ tcg_temp_free_i32(t0);
} else {
- TCGv_i32 t0, t1;
-
- t0 = tcg_temp_ebb_new_i32();
- t1 = tcg_temp_ebb_new_i32();
+ TCGv_i32 t0 = tcg_temp_ebb_new_i32();
+ TCGv_i32 t1 = tcg_temp_ebb_new_i32();
tcg_gen_shr_i32(t0, arg1, arg2);
- tcg_gen_subfi_i32(t1, 32, arg2);
+ tcg_gen_neg_i32(t1, arg2);
tcg_gen_shl_i32(t1, arg1, t1);
tcg_gen_or_i32(ret, t0, t1);
tcg_temp_free_i32(t0);
@@ -883,12 +878,7 @@ void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
{
tcg_debug_assert(arg2 >= 0 && arg2 < 32);
- /* some cases can be optimized here */
- if (arg2 == 0) {
- tcg_gen_mov_i32(ret, arg1);
- } else {
- tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
- }
+ tcg_gen_rotli_i32(ret, arg1, -arg2 & 31);
}
void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2,
@@ -907,13 +897,13 @@ void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2,
return;
}
if (TCG_TARGET_deposit_valid(TCG_TYPE_I32, ofs, len)) {
- tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
+ tcg_gen_op5ii_i32(INDEX_op_deposit, ret, arg1, arg2, ofs, len);
return;
}
t1 = tcg_temp_ebb_new_i32();
- if (TCG_TARGET_HAS_extract2_i32) {
+ if (tcg_op_supported(INDEX_op_extract2, TCG_TYPE_I32, 0)) {
if (ofs + len == 32) {
tcg_gen_shli_i32(t1, arg1, len);
tcg_gen_extract2_i32(ret, t1, arg2, len);
@@ -953,42 +943,22 @@ void tcg_gen_deposit_z_i32(TCGv_i32 ret, TCGv_i32 arg,
tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
} else if (TCG_TARGET_deposit_valid(TCG_TYPE_I32, ofs, len)) {
TCGv_i32 zero = tcg_constant_i32(0);
- tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, zero, arg, ofs, len);
- } else {
- /* To help two-operand hosts we prefer to zero-extend first,
- which allows ARG to stay live. */
- switch (len) {
- case 16:
- if (TCG_TARGET_HAS_ext16u_i32) {
- tcg_gen_ext16u_i32(ret, arg);
- tcg_gen_shli_i32(ret, ret, ofs);
- return;
- }
- break;
- case 8:
- if (TCG_TARGET_HAS_ext8u_i32) {
- tcg_gen_ext8u_i32(ret, arg);
- tcg_gen_shli_i32(ret, ret, ofs);
- return;
- }
- break;
+ tcg_gen_op5ii_i32(INDEX_op_deposit, ret, zero, arg, ofs, len);
+ } else {
+ /*
+ * To help two-operand hosts we prefer to zero-extend first,
+ * which allows ARG to stay live.
+ */
+ if (TCG_TARGET_extract_valid(TCG_TYPE_I32, 0, len)) {
+ tcg_gen_extract_i32(ret, arg, 0, len);
+ tcg_gen_shli_i32(ret, ret, ofs);
+ return;
}
/* Otherwise prefer zero-extension over AND for code size. */
- switch (ofs + len) {
- case 16:
- if (TCG_TARGET_HAS_ext16u_i32) {
- tcg_gen_shli_i32(ret, arg, ofs);
- tcg_gen_ext16u_i32(ret, ret);
- return;
- }
- break;
- case 8:
- if (TCG_TARGET_HAS_ext8u_i32) {
- tcg_gen_shli_i32(ret, arg, ofs);
- tcg_gen_ext8u_i32(ret, ret);
- return;
- }
- break;
+ if (TCG_TARGET_extract_valid(TCG_TYPE_I32, 0, ofs + len)) {
+ tcg_gen_shli_i32(ret, arg, ofs);
+ tcg_gen_extract_i32(ret, ret, 0, ofs + len);
+ return;
}
tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
tcg_gen_shli_i32(ret, ret, ofs);
@@ -1008,32 +978,21 @@ void tcg_gen_extract_i32(TCGv_i32 ret, TCGv_i32 arg,
tcg_gen_shri_i32(ret, arg, 32 - len);
return;
}
- if (ofs == 0) {
- tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
- return;
- }
if (TCG_TARGET_extract_valid(TCG_TYPE_I32, ofs, len)) {
- tcg_gen_op4ii_i32(INDEX_op_extract_i32, ret, arg, ofs, len);
+ tcg_gen_op4ii_i32(INDEX_op_extract, ret, arg, ofs, len);
+ return;
+ }
+ if (ofs == 0) {
+ tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
return;
}
/* Assume that zero-extension, if available, is cheaper than a shift. */
- switch (ofs + len) {
- case 16:
- if (TCG_TARGET_HAS_ext16u_i32) {
- tcg_gen_ext16u_i32(ret, arg);
- tcg_gen_shri_i32(ret, ret, ofs);
- return;
- }
- break;
- case 8:
- if (TCG_TARGET_HAS_ext8u_i32) {
- tcg_gen_ext8u_i32(ret, arg);
- tcg_gen_shri_i32(ret, ret, ofs);
- return;
- }
- break;
+ if (TCG_TARGET_extract_valid(TCG_TYPE_I32, 0, ofs + len)) {
+ tcg_gen_op4ii_i32(INDEX_op_extract, ret, arg, 0, ofs + len);
+ tcg_gen_shri_i32(ret, ret, ofs);
+ return;
}
/* ??? Ideally we'd know what values are available for immediate AND.
@@ -1064,54 +1023,22 @@ void tcg_gen_sextract_i32(TCGv_i32 ret, TCGv_i32 arg,
tcg_gen_sari_i32(ret, arg, 32 - len);
return;
}
- if (ofs == 0) {
- switch (len) {
- case 16:
- tcg_gen_ext16s_i32(ret, arg);
- return;
- case 8:
- tcg_gen_ext8s_i32(ret, arg);
- return;
- }
- }
if (TCG_TARGET_sextract_valid(TCG_TYPE_I32, ofs, len)) {
- tcg_gen_op4ii_i32(INDEX_op_sextract_i32, ret, arg, ofs, len);
+ tcg_gen_op4ii_i32(INDEX_op_sextract, ret, arg, ofs, len);
return;
}
/* Assume that sign-extension, if available, is cheaper than a shift. */
- switch (ofs + len) {
- case 16:
- if (TCG_TARGET_HAS_ext16s_i32) {
- tcg_gen_ext16s_i32(ret, arg);
- tcg_gen_sari_i32(ret, ret, ofs);
- return;
- }
- break;
- case 8:
- if (TCG_TARGET_HAS_ext8s_i32) {
- tcg_gen_ext8s_i32(ret, arg);
- tcg_gen_sari_i32(ret, ret, ofs);
- return;
- }
- break;
+ if (TCG_TARGET_sextract_valid(TCG_TYPE_I32, 0, ofs + len)) {
+ tcg_gen_op4ii_i32(INDEX_op_sextract, ret, arg, 0, ofs + len);
+ tcg_gen_sari_i32(ret, ret, ofs);
+ return;
}
- switch (len) {
- case 16:
- if (TCG_TARGET_HAS_ext16s_i32) {
- tcg_gen_shri_i32(ret, arg, ofs);
- tcg_gen_ext16s_i32(ret, ret);
- return;
- }
- break;
- case 8:
- if (TCG_TARGET_HAS_ext8s_i32) {
- tcg_gen_shri_i32(ret, arg, ofs);
- tcg_gen_ext8s_i32(ret, ret);
- return;
- }
- break;
+ if (TCG_TARGET_sextract_valid(TCG_TYPE_I32, 0, len)) {
+ tcg_gen_shri_i32(ret, arg, ofs);
+ tcg_gen_op4ii_i32(INDEX_op_sextract, ret, ret, 0, len);
+ return;
}
tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
@@ -1132,8 +1059,8 @@ void tcg_gen_extract2_i32(TCGv_i32 ret, TCGv_i32 al, TCGv_i32 ah,
tcg_gen_mov_i32(ret, ah);
} else if (al == ah) {
tcg_gen_rotri_i32(ret, al, ofs);
- } else if (TCG_TARGET_HAS_extract2_i32) {
- tcg_gen_op4i_i32(INDEX_op_extract2_i32, ret, al, ah, ofs);
+ } else if (tcg_op_supported(INDEX_op_extract2, TCG_TYPE_I32, 0)) {
+ tcg_gen_op4i_i32(INDEX_op_extract2, ret, al, ah, ofs);
} else {
TCGv_i32 t0 = tcg_temp_ebb_new_i32();
tcg_gen_shri_i32(t0, al, ofs);
@@ -1150,52 +1077,89 @@ void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, TCGv_i32 c1,
} else if (cond == TCG_COND_NEVER) {
tcg_gen_mov_i32(ret, v2);
} else {
- tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
+ tcg_gen_op6i_i32(INDEX_op_movcond, ret, c1, c2, v1, v2, cond);
}
}
void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
{
- if (TCG_TARGET_HAS_add2_i32) {
- tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
+ if (tcg_op_supported(INDEX_op_addci, TCG_TYPE_I32, 0)) {
+ TCGv_i32 t0 = tcg_temp_ebb_new_i32();
+ tcg_gen_op3_i32(INDEX_op_addco, t0, al, bl);
+ tcg_gen_op3_i32(INDEX_op_addci, rh, ah, bh);
+ tcg_gen_mov_i32(rl, t0);
+ tcg_temp_free_i32(t0);
} else {
- TCGv_i64 t0 = tcg_temp_ebb_new_i64();
- TCGv_i64 t1 = tcg_temp_ebb_new_i64();
- tcg_gen_concat_i32_i64(t0, al, ah);
- tcg_gen_concat_i32_i64(t1, bl, bh);
- tcg_gen_add_i64(t0, t0, t1);
- tcg_gen_extr_i64_i32(rl, rh, t0);
- tcg_temp_free_i64(t0);
- tcg_temp_free_i64(t1);
+ TCGv_i32 t0 = tcg_temp_ebb_new_i32();
+ TCGv_i32 t1 = tcg_temp_ebb_new_i32();
+ tcg_gen_add_i32(t0, al, bl);
+ tcg_gen_setcond_i32(TCG_COND_LTU, t1, t0, al);
+ tcg_gen_add_i32(rh, ah, bh);
+ tcg_gen_add_i32(rh, rh, t1);
+ tcg_gen_mov_i32(rl, t0);
+ tcg_temp_free_i32(t0);
+ tcg_temp_free_i32(t1);
+ }
+}
+
+void tcg_gen_addcio_i32(TCGv_i32 r, TCGv_i32 co,
+ TCGv_i32 a, TCGv_i32 b, TCGv_i32 ci)
+{
+ if (tcg_op_supported(INDEX_op_addci, TCG_TYPE_I32, 0)) {
+ TCGv_i32 t0 = tcg_temp_ebb_new_i32();
+ TCGv_i32 zero = tcg_constant_i32(0);
+ TCGv_i32 mone = tcg_constant_i32(-1);
+
+ tcg_gen_op3_i32(INDEX_op_addco, t0, ci, mone);
+ tcg_gen_op3_i32(INDEX_op_addcio, r, a, b);
+ tcg_gen_op3_i32(INDEX_op_addci, co, zero, zero);
+ tcg_temp_free_i32(t0);
+ } else {
+ TCGv_i32 t0 = tcg_temp_ebb_new_i32();
+ TCGv_i32 t1 = tcg_temp_ebb_new_i32();
+
+ tcg_gen_add_i32(t0, a, b);
+ tcg_gen_setcond_i32(TCG_COND_LTU, t1, t0, a);
+ tcg_gen_add_i32(r, t0, ci);
+ tcg_gen_setcond_i32(TCG_COND_LTU, t0, r, t0);
+ tcg_gen_or_i32(co, t0, t1);
+
+ tcg_temp_free_i32(t0);
+ tcg_temp_free_i32(t1);
}
}
void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
{
- if (TCG_TARGET_HAS_sub2_i32) {
- tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh);
+ if (tcg_op_supported(INDEX_op_subbi, TCG_TYPE_I32, 0)) {
+ TCGv_i32 t0 = tcg_temp_ebb_new_i32();
+ tcg_gen_op3_i32(INDEX_op_subbo, t0, al, bl);
+ tcg_gen_op3_i32(INDEX_op_subbi, rh, ah, bh);
+ tcg_gen_mov_i32(rl, t0);
+ tcg_temp_free_i32(t0);
} else {
- TCGv_i64 t0 = tcg_temp_ebb_new_i64();
- TCGv_i64 t1 = tcg_temp_ebb_new_i64();
- tcg_gen_concat_i32_i64(t0, al, ah);
- tcg_gen_concat_i32_i64(t1, bl, bh);
- tcg_gen_sub_i64(t0, t0, t1);
- tcg_gen_extr_i64_i32(rl, rh, t0);
- tcg_temp_free_i64(t0);
- tcg_temp_free_i64(t1);
+ TCGv_i32 t0 = tcg_temp_ebb_new_i32();
+ TCGv_i32 t1 = tcg_temp_ebb_new_i32();
+ tcg_gen_sub_i32(t0, al, bl);
+ tcg_gen_setcond_i32(TCG_COND_LTU, t1, al, bl);
+ tcg_gen_sub_i32(rh, ah, bh);
+ tcg_gen_sub_i32(rh, rh, t1);
+ tcg_gen_mov_i32(rl, t0);
+ tcg_temp_free_i32(t0);
+ tcg_temp_free_i32(t1);
}
}
void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
{
- if (TCG_TARGET_HAS_mulu2_i32) {
- tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
- } else if (TCG_TARGET_HAS_muluh_i32) {
+ if (tcg_op_supported(INDEX_op_mulu2, TCG_TYPE_I32, 0)) {
+ tcg_gen_op4_i32(INDEX_op_mulu2, rl, rh, arg1, arg2);
+ } else if (tcg_op_supported(INDEX_op_muluh, TCG_TYPE_I32, 0)) {
TCGv_i32 t = tcg_temp_ebb_new_i32();
- tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
- tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
+ tcg_gen_op3_i32(INDEX_op_mul, t, arg1, arg2);
+ tcg_gen_op3_i32(INDEX_op_muluh, rh, arg1, arg2);
tcg_gen_mov_i32(rl, t);
tcg_temp_free_i32(t);
} else if (TCG_TARGET_REG_BITS == 64) {
@@ -1208,18 +1172,18 @@ void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
tcg_temp_free_i64(t0);
tcg_temp_free_i64(t1);
} else {
- qemu_build_not_reached();
+ g_assert_not_reached();
}
}
void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
{
- if (TCG_TARGET_HAS_muls2_i32) {
- tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
- } else if (TCG_TARGET_HAS_mulsh_i32) {
+ if (tcg_op_supported(INDEX_op_muls2, TCG_TYPE_I32, 0)) {
+ tcg_gen_op4_i32(INDEX_op_muls2, rl, rh, arg1, arg2);
+ } else if (tcg_op_supported(INDEX_op_mulsh, TCG_TYPE_I32, 0)) {
TCGv_i32 t = tcg_temp_ebb_new_i32();
- tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
- tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
+ tcg_gen_op3_i32(INDEX_op_mul, t, arg1, arg2);
+ tcg_gen_op3_i32(INDEX_op_mulsh, rh, arg1, arg2);
tcg_gen_mov_i32(rl, t);
tcg_temp_free_i32(t);
} else if (TCG_TARGET_REG_BITS == 32) {
@@ -1281,40 +1245,22 @@ void tcg_gen_mulsu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
{
- if (TCG_TARGET_HAS_ext8s_i32) {
- tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
- } else {
- tcg_gen_shli_i32(ret, arg, 24);
- tcg_gen_sari_i32(ret, ret, 24);
- }
+ tcg_gen_sextract_i32(ret, arg, 0, 8);
}
void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
{
- if (TCG_TARGET_HAS_ext16s_i32) {
- tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
- } else {
- tcg_gen_shli_i32(ret, arg, 16);
- tcg_gen_sari_i32(ret, ret, 16);
- }
+ tcg_gen_sextract_i32(ret, arg, 0, 16);
}
void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
{
- if (TCG_TARGET_HAS_ext8u_i32) {
- tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
- } else {
- tcg_gen_andi_i32(ret, arg, 0xffu);
- }
+ tcg_gen_extract_i32(ret, arg, 0, 8);
}
void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
{
- if (TCG_TARGET_HAS_ext16u_i32) {
- tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
- } else {
- tcg_gen_andi_i32(ret, arg, 0xffffu);
- }
+ tcg_gen_extract_i32(ret, arg, 0, 16);
}
/*
@@ -1330,8 +1276,8 @@ void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg, int flags)
/* Only one extension flag may be present. */
tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
- if (TCG_TARGET_HAS_bswap16_i32) {
- tcg_gen_op3i_i32(INDEX_op_bswap16_i32, ret, arg, flags);
+ if (tcg_op_supported(INDEX_op_bswap16, TCG_TYPE_I32, 0)) {
+ tcg_gen_op3i_i32(INDEX_op_bswap16, ret, arg, flags);
} else {
TCGv_i32 t0 = tcg_temp_ebb_new_i32();
TCGv_i32 t1 = tcg_temp_ebb_new_i32();
@@ -1367,8 +1313,8 @@ void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg, int flags)
*/
void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
{
- if (TCG_TARGET_HAS_bswap32_i32) {
- tcg_gen_op3i_i32(INDEX_op_bswap32_i32, ret, arg, 0);
+ if (tcg_op_supported(INDEX_op_bswap32, TCG_TYPE_I32, 0)) {
+ tcg_gen_op3i_i32(INDEX_op_bswap32, ret, arg, 0);
} else {
TCGv_i32 t0 = tcg_temp_ebb_new_i32();
TCGv_i32 t1 = tcg_temp_ebb_new_i32();
@@ -1433,42 +1379,42 @@ void tcg_gen_abs_i32(TCGv_i32 ret, TCGv_i32 a)
void tcg_gen_ld8u_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
{
- tcg_gen_ldst_op_i32(INDEX_op_ld8u_i32, ret, arg2, offset);
+ tcg_gen_ldst_op_i32(INDEX_op_ld8u, ret, arg2, offset);
}
void tcg_gen_ld8s_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
{
- tcg_gen_ldst_op_i32(INDEX_op_ld8s_i32, ret, arg2, offset);
+ tcg_gen_ldst_op_i32(INDEX_op_ld8s, ret, arg2, offset);
}
void tcg_gen_ld16u_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
{
- tcg_gen_ldst_op_i32(INDEX_op_ld16u_i32, ret, arg2, offset);
+ tcg_gen_ldst_op_i32(INDEX_op_ld16u, ret, arg2, offset);
}
void tcg_gen_ld16s_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
{
- tcg_gen_ldst_op_i32(INDEX_op_ld16s_i32, ret, arg2, offset);
+ tcg_gen_ldst_op_i32(INDEX_op_ld16s, ret, arg2, offset);
}
void tcg_gen_ld_i32(TCGv_i32 ret, TCGv_ptr arg2, tcg_target_long offset)
{
- tcg_gen_ldst_op_i32(INDEX_op_ld_i32, ret, arg2, offset);
+ tcg_gen_ldst_op_i32(INDEX_op_ld, ret, arg2, offset);
}
void tcg_gen_st8_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
{
- tcg_gen_ldst_op_i32(INDEX_op_st8_i32, arg1, arg2, offset);
+ tcg_gen_ldst_op_i32(INDEX_op_st8, arg1, arg2, offset);
}
void tcg_gen_st16_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
{
- tcg_gen_ldst_op_i32(INDEX_op_st16_i32, arg1, arg2, offset);
+ tcg_gen_ldst_op_i32(INDEX_op_st16, arg1, arg2, offset);
}
void tcg_gen_st_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
{
- tcg_gen_ldst_op_i32(INDEX_op_st_i32, arg1, arg2, offset);
+ tcg_gen_ldst_op_i32(INDEX_op_st, arg1, arg2, offset);
}
@@ -1490,7 +1436,7 @@ void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
return;
}
if (TCG_TARGET_REG_BITS == 64) {
- tcg_gen_op2_i64(INDEX_op_mov_i64, ret, arg);
+ tcg_gen_op2_i64(INDEX_op_mov, ret, arg);
} else {
TCGTemp *ts = tcgv_i64_temp(arg);
@@ -1517,7 +1463,7 @@ void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
{
if (TCG_TARGET_REG_BITS == 64) {
- tcg_gen_ldst_op_i64(INDEX_op_ld8u_i64, ret, arg2, offset);
+ tcg_gen_ldst_op_i64(INDEX_op_ld8u, ret, arg2, offset);
} else {
tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
@@ -1527,7 +1473,7 @@ void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
{
if (TCG_TARGET_REG_BITS == 64) {
- tcg_gen_ldst_op_i64(INDEX_op_ld8s_i64, ret, arg2, offset);
+ tcg_gen_ldst_op_i64(INDEX_op_ld8s, ret, arg2, offset);
} else {
tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
@@ -1537,7 +1483,7 @@ void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
{
if (TCG_TARGET_REG_BITS == 64) {
- tcg_gen_ldst_op_i64(INDEX_op_ld16u_i64, ret, arg2, offset);
+ tcg_gen_ldst_op_i64(INDEX_op_ld16u, ret, arg2, offset);
} else {
tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
@@ -1547,7 +1493,7 @@ void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
{
if (TCG_TARGET_REG_BITS == 64) {
- tcg_gen_ldst_op_i64(INDEX_op_ld16s_i64, ret, arg2, offset);
+ tcg_gen_ldst_op_i64(INDEX_op_ld16s, ret, arg2, offset);
} else {
tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
@@ -1557,7 +1503,7 @@ void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
{
if (TCG_TARGET_REG_BITS == 64) {
- tcg_gen_ldst_op_i64(INDEX_op_ld32u_i64, ret, arg2, offset);
+ tcg_gen_ldst_op_i64(INDEX_op_ld32u, ret, arg2, offset);
} else {
tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
@@ -1567,7 +1513,7 @@ void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
{
if (TCG_TARGET_REG_BITS == 64) {
- tcg_gen_ldst_op_i64(INDEX_op_ld32s_i64, ret, arg2, offset);
+ tcg_gen_ldst_op_i64(INDEX_op_ld32s, ret, arg2, offset);
} else {
tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
@@ -1581,7 +1527,7 @@ void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
* they cannot be the same temporary -- no chance of overlap.
*/
if (TCG_TARGET_REG_BITS == 64) {
- tcg_gen_ldst_op_i64(INDEX_op_ld_i64, ret, arg2, offset);
+ tcg_gen_ldst_op_i64(INDEX_op_ld, ret, arg2, offset);
} else if (HOST_BIG_ENDIAN) {
tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
@@ -1594,7 +1540,7 @@ void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
{
if (TCG_TARGET_REG_BITS == 64) {
- tcg_gen_ldst_op_i64(INDEX_op_st8_i64, arg1, arg2, offset);
+ tcg_gen_ldst_op_i64(INDEX_op_st8, arg1, arg2, offset);
} else {
tcg_gen_st8_i32(TCGV_LOW(arg1), arg2, offset);
}
@@ -1603,7 +1549,7 @@ void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
{
if (TCG_TARGET_REG_BITS == 64) {
- tcg_gen_ldst_op_i64(INDEX_op_st16_i64, arg1, arg2, offset);
+ tcg_gen_ldst_op_i64(INDEX_op_st16, arg1, arg2, offset);
} else {
tcg_gen_st16_i32(TCGV_LOW(arg1), arg2, offset);
}
@@ -1612,7 +1558,7 @@ void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
{
if (TCG_TARGET_REG_BITS == 64) {
- tcg_gen_ldst_op_i64(INDEX_op_st32_i64, arg1, arg2, offset);
+ tcg_gen_ldst_op_i64(INDEX_op_st32, arg1, arg2, offset);
} else {
tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
}
@@ -1621,7 +1567,7 @@ void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
{
if (TCG_TARGET_REG_BITS == 64) {
- tcg_gen_ldst_op_i64(INDEX_op_st_i64, arg1, arg2, offset);
+ tcg_gen_ldst_op_i64(INDEX_op_st, arg1, arg2, offset);
} else if (HOST_BIG_ENDIAN) {
tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
@@ -1634,7 +1580,7 @@ void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
void tcg_gen_add_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
if (TCG_TARGET_REG_BITS == 64) {
- tcg_gen_op3_i64(INDEX_op_add_i64, ret, arg1, arg2);
+ tcg_gen_op3_i64(INDEX_op_add, ret, arg1, arg2);
} else {
tcg_gen_add2_i32(TCGV_LOW(ret), TCGV_HIGH(ret), TCGV_LOW(arg1),
TCGV_HIGH(arg1), TCGV_LOW(arg2), TCGV_HIGH(arg2));
@@ -1644,7 +1590,7 @@ void tcg_gen_add_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
if (TCG_TARGET_REG_BITS == 64) {
- tcg_gen_op3_i64(INDEX_op_sub_i64, ret, arg1, arg2);
+ tcg_gen_op3_i64(INDEX_op_sub, ret, arg1, arg2);
} else {
tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret), TCGV_LOW(arg1),
TCGV_HIGH(arg1), TCGV_LOW(arg2), TCGV_HIGH(arg2));
@@ -1654,7 +1600,7 @@ void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
if (TCG_TARGET_REG_BITS == 64) {
- tcg_gen_op3_i64(INDEX_op_and_i64, ret, arg1, arg2);
+ tcg_gen_op3_i64(INDEX_op_and, ret, arg1, arg2);
} else {
tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
@@ -1664,7 +1610,7 @@ void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
if (TCG_TARGET_REG_BITS == 64) {
- tcg_gen_op3_i64(INDEX_op_or_i64, ret, arg1, arg2);
+ tcg_gen_op3_i64(INDEX_op_or, ret, arg1, arg2);
} else {
tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
@@ -1674,7 +1620,7 @@ void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
if (TCG_TARGET_REG_BITS == 64) {
- tcg_gen_op3_i64(INDEX_op_xor_i64, ret, arg1, arg2);
+ tcg_gen_op3_i64(INDEX_op_xor, ret, arg1, arg2);
} else {
tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
@@ -1684,7 +1630,7 @@ void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
if (TCG_TARGET_REG_BITS == 64) {
- tcg_gen_op3_i64(INDEX_op_shl_i64, ret, arg1, arg2);
+ tcg_gen_op3_i64(INDEX_op_shl, ret, arg1, arg2);
} else {
gen_helper_shl_i64(ret, arg1, arg2);
}
@@ -1693,7 +1639,7 @@ void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
if (TCG_TARGET_REG_BITS == 64) {
- tcg_gen_op3_i64(INDEX_op_shr_i64, ret, arg1, arg2);
+ tcg_gen_op3_i64(INDEX_op_shr, ret, arg1, arg2);
} else {
gen_helper_shr_i64(ret, arg1, arg2);
}
@@ -1702,7 +1648,7 @@ void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
if (TCG_TARGET_REG_BITS == 64) {
- tcg_gen_op3_i64(INDEX_op_sar_i64, ret, arg1, arg2);
+ tcg_gen_op3_i64(INDEX_op_sar, ret, arg1, arg2);
} else {
gen_helper_sar_i64(ret, arg1, arg2);
}
@@ -1714,7 +1660,7 @@ void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
TCGv_i32 t1;
if (TCG_TARGET_REG_BITS == 64) {
- tcg_gen_op3_i64(INDEX_op_mul_i64, ret, arg1, arg2);
+ tcg_gen_op3_i64(INDEX_op_mul, ret, arg1, arg2);
return;
}
@@ -1770,7 +1716,7 @@ void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
void tcg_gen_neg_i64(TCGv_i64 ret, TCGv_i64 arg)
{
if (TCG_TARGET_REG_BITS == 64) {
- tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg);
+ tcg_gen_op2_i64(INDEX_op_neg, ret, arg);
} else {
TCGv_i32 zero = tcg_constant_i32(0);
tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
@@ -1794,23 +1740,19 @@ void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
case -1:
tcg_gen_mov_i64(ret, arg1);
return;
- case 0xff:
- /* Don't recurse with tcg_gen_ext8u_i64. */
- if (TCG_TARGET_HAS_ext8u_i64) {
- tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
- return;
- }
- break;
- case 0xffff:
- if (TCG_TARGET_HAS_ext16u_i64) {
- tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
- return;
- }
- break;
- case 0xffffffffu:
- if (TCG_TARGET_HAS_ext32u_i64) {
- tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
- return;
+ default:
+ /*
+ * Canonicalize on extract, if valid. This aids x86 with its
+ * 2 operand MOVZBL and 2 operand AND, selecting the TCGOpcode
+ * which does not require matching operands. Other backends can
+ * trivially expand the extract to AND during code generation.
+ */
+ if (!(arg2 & (arg2 + 1))) {
+ unsigned len = ctz64(~arg2);
+ if (TCG_TARGET_extract_valid(TCG_TYPE_I64, 0, len)) {
+ tcg_gen_extract_i64(ret, arg1, 0, len);
+ return;
+ }
}
break;
}
@@ -1845,9 +1787,10 @@ void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
/* Some cases can be optimized here. */
if (arg2 == 0) {
tcg_gen_mov_i64(ret, arg1);
- } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
+ } else if (arg2 == -1 &&
+ tcg_op_supported(INDEX_op_not, TCG_TYPE_I64, 0)) {
/* Don't recurse with tcg_gen_not_i64. */
- tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
+ tcg_gen_op2_i64(INDEX_op_not, ret, arg1);
} else {
tcg_gen_xor_i64(ret, arg1, tcg_constant_i64(arg2));
}
@@ -1875,7 +1818,7 @@ static inline void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
tcg_gen_movi_i32(TCGV_LOW(ret), 0);
}
} else if (right) {
- if (TCG_TARGET_HAS_extract2_i32) {
+ if (tcg_op_supported(INDEX_op_extract2, TCG_TYPE_I32, 0)) {
tcg_gen_extract2_i32(TCGV_LOW(ret),
TCGV_LOW(arg1), TCGV_HIGH(arg1), c);
} else {
@@ -1889,7 +1832,7 @@ static inline void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
tcg_gen_shri_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
}
} else {
- if (TCG_TARGET_HAS_extract2_i32) {
+ if (tcg_op_supported(INDEX_op_extract2, TCG_TYPE_I32, 0)) {
tcg_gen_extract2_i32(TCGV_HIGH(ret),
TCGV_LOW(arg1), TCGV_HIGH(arg1), 32 - c);
} else {
@@ -1950,7 +1893,7 @@ void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, TCGLabel *l)
TCGV_HIGH(arg1), TCGV_LOW(arg2),
TCGV_HIGH(arg2), cond, label_arg(l));
} else {
- op = tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond,
+ op = tcg_gen_op4ii_i64(INDEX_op_brcond, arg1, arg2, cond,
label_arg(l));
}
add_as_label_use(l, op);
@@ -1987,7 +1930,7 @@ void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
} else {
- tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
+ tcg_gen_op4i_i64(INDEX_op_setcond, ret, arg1, arg2, cond);
}
}
}
@@ -2023,17 +1966,14 @@ void tcg_gen_negsetcond_i64(TCGCond cond, TCGv_i64 ret,
tcg_gen_movi_i64(ret, -1);
} else if (cond == TCG_COND_NEVER) {
tcg_gen_movi_i64(ret, 0);
- } else if (TCG_TARGET_HAS_negsetcond_i64) {
- tcg_gen_op4i_i64(INDEX_op_negsetcond_i64, ret, arg1, arg2, cond);
- } else if (TCG_TARGET_REG_BITS == 32) {
+ } else if (TCG_TARGET_REG_BITS == 64) {
+ tcg_gen_op4i_i64(INDEX_op_negsetcond, ret, arg1, arg2, cond);
+ } else {
tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
TCGV_LOW(arg1), TCGV_HIGH(arg1),
TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
tcg_gen_neg_i32(TCGV_LOW(ret), TCGV_LOW(ret));
tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_LOW(ret));
- } else {
- tcg_gen_setcond_i64(cond, ret, arg1, arg2);
- tcg_gen_neg_i64(ret, ret);
}
}
@@ -2050,12 +1990,12 @@ void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- if (TCG_TARGET_HAS_div_i64) {
- tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
- } else if (TCG_TARGET_HAS_div2_i64) {
+ if (tcg_op_supported(INDEX_op_divs, TCG_TYPE_I64, 0)) {
+ tcg_gen_op3_i64(INDEX_op_divs, ret, arg1, arg2);
+ } else if (tcg_op_supported(INDEX_op_divs2, TCG_TYPE_I64, 0)) {
TCGv_i64 t0 = tcg_temp_ebb_new_i64();
tcg_gen_sari_i64(t0, arg1, 63);
- tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
+ tcg_gen_op5_i64(INDEX_op_divs2, ret, t0, arg1, t0, arg2);
tcg_temp_free_i64(t0);
} else {
gen_helper_div_i64(ret, arg1, arg2);
@@ -2064,18 +2004,18 @@ void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- if (TCG_TARGET_HAS_rem_i64) {
- tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
- } else if (TCG_TARGET_HAS_div_i64) {
+ if (tcg_op_supported(INDEX_op_rems, TCG_TYPE_I64, 0)) {
+ tcg_gen_op3_i64(INDEX_op_rems, ret, arg1, arg2);
+ } else if (tcg_op_supported(INDEX_op_divs, TCG_TYPE_I64, 0)) {
TCGv_i64 t0 = tcg_temp_ebb_new_i64();
- tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2);
+ tcg_gen_op3_i64(INDEX_op_divs, t0, arg1, arg2);
tcg_gen_mul_i64(t0, t0, arg2);
tcg_gen_sub_i64(ret, arg1, t0);
tcg_temp_free_i64(t0);
- } else if (TCG_TARGET_HAS_div2_i64) {
+ } else if (tcg_op_supported(INDEX_op_divs2, TCG_TYPE_I64, 0)) {
TCGv_i64 t0 = tcg_temp_ebb_new_i64();
tcg_gen_sari_i64(t0, arg1, 63);
- tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
+ tcg_gen_op5_i64(INDEX_op_divs2, t0, ret, arg1, t0, arg2);
tcg_temp_free_i64(t0);
} else {
gen_helper_rem_i64(ret, arg1, arg2);
@@ -2084,12 +2024,12 @@ void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- if (TCG_TARGET_HAS_div_i64) {
- tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
- } else if (TCG_TARGET_HAS_div2_i64) {
+ if (tcg_op_supported(INDEX_op_divu, TCG_TYPE_I64, 0)) {
+ tcg_gen_op3_i64(INDEX_op_divu, ret, arg1, arg2);
+ } else if (tcg_op_supported(INDEX_op_divu2, TCG_TYPE_I64, 0)) {
TCGv_i64 t0 = tcg_temp_ebb_new_i64();
TCGv_i64 zero = tcg_constant_i64(0);
- tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, zero, arg2);
+ tcg_gen_op5_i64(INDEX_op_divu2, ret, t0, arg1, zero, arg2);
tcg_temp_free_i64(t0);
} else {
gen_helper_divu_i64(ret, arg1, arg2);
@@ -2098,18 +2038,18 @@ void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- if (TCG_TARGET_HAS_rem_i64) {
- tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
- } else if (TCG_TARGET_HAS_div_i64) {
+ if (tcg_op_supported(INDEX_op_remu, TCG_TYPE_I64, 0)) {
+ tcg_gen_op3_i64(INDEX_op_remu, ret, arg1, arg2);
+ } else if (tcg_op_supported(INDEX_op_divu, TCG_TYPE_I64, 0)) {
TCGv_i64 t0 = tcg_temp_ebb_new_i64();
- tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2);
+ tcg_gen_op3_i64(INDEX_op_divu, t0, arg1, arg2);
tcg_gen_mul_i64(t0, t0, arg2);
tcg_gen_sub_i64(ret, arg1, t0);
tcg_temp_free_i64(t0);
- } else if (TCG_TARGET_HAS_div2_i64) {
+ } else if (tcg_op_supported(INDEX_op_divu2, TCG_TYPE_I64, 0)) {
TCGv_i64 t0 = tcg_temp_ebb_new_i64();
TCGv_i64 zero = tcg_constant_i64(0);
- tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, zero, arg2);
+ tcg_gen_op5_i64(INDEX_op_divu2, t0, ret, arg1, zero, arg2);
tcg_temp_free_i64(t0);
} else {
gen_helper_remu_i64(ret, arg1, arg2);
@@ -2118,77 +2058,32 @@ void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
{
- if (TCG_TARGET_REG_BITS == 32) {
- tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
- tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
- } else if (TCG_TARGET_HAS_ext8s_i64) {
- tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
- } else {
- tcg_gen_shli_i64(ret, arg, 56);
- tcg_gen_sari_i64(ret, ret, 56);
- }
+ tcg_gen_sextract_i64(ret, arg, 0, 8);
}
void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
{
- if (TCG_TARGET_REG_BITS == 32) {
- tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
- tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
- } else if (TCG_TARGET_HAS_ext16s_i64) {
- tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
- } else {
- tcg_gen_shli_i64(ret, arg, 48);
- tcg_gen_sari_i64(ret, ret, 48);
- }
+ tcg_gen_sextract_i64(ret, arg, 0, 16);
}
void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
{
- if (TCG_TARGET_REG_BITS == 32) {
- tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
- tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
- } else if (TCG_TARGET_HAS_ext32s_i64) {
- tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
- } else {
- tcg_gen_shli_i64(ret, arg, 32);
- tcg_gen_sari_i64(ret, ret, 32);
- }
+ tcg_gen_sextract_i64(ret, arg, 0, 32);
}
void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
{
- if (TCG_TARGET_REG_BITS == 32) {
- tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
- tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
- } else if (TCG_TARGET_HAS_ext8u_i64) {
- tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
- } else {
- tcg_gen_andi_i64(ret, arg, 0xffu);
- }
+ tcg_gen_extract_i64(ret, arg, 0, 8);
}
void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
{
- if (TCG_TARGET_REG_BITS == 32) {
- tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
- tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
- } else if (TCG_TARGET_HAS_ext16u_i64) {
- tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
- } else {
- tcg_gen_andi_i64(ret, arg, 0xffffu);
- }
+ tcg_gen_extract_i64(ret, arg, 0, 16);
}
void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
{
- if (TCG_TARGET_REG_BITS == 32) {
- tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
- tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
- } else if (TCG_TARGET_HAS_ext32u_i64) {
- tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
- } else {
- tcg_gen_andi_i64(ret, arg, 0xffffffffu);
- }
+ tcg_gen_extract_i64(ret, arg, 0, 32);
}
/*
@@ -2211,8 +2106,8 @@ void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg, int flags)
} else {
tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
}
- } else if (TCG_TARGET_HAS_bswap16_i64) {
- tcg_gen_op3i_i64(INDEX_op_bswap16_i64, ret, arg, flags);
+ } else if (tcg_op_supported(INDEX_op_bswap16, TCG_TYPE_I64, 0)) {
+ tcg_gen_op3i_i64(INDEX_op_bswap16, ret, arg, flags);
} else {
TCGv_i64 t0 = tcg_temp_ebb_new_i64();
TCGv_i64 t1 = tcg_temp_ebb_new_i64();
@@ -2261,8 +2156,8 @@ void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg, int flags)
} else {
tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
}
- } else if (TCG_TARGET_HAS_bswap32_i64) {
- tcg_gen_op3i_i64(INDEX_op_bswap32_i64, ret, arg, flags);
+ } else if (tcg_op_supported(INDEX_op_bswap32, TCG_TYPE_I64, 0)) {
+ tcg_gen_op3i_i64(INDEX_op_bswap32, ret, arg, flags);
} else {
TCGv_i64 t0 = tcg_temp_ebb_new_i64();
TCGv_i64 t1 = tcg_temp_ebb_new_i64();
@@ -2308,8 +2203,8 @@ void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
tcg_temp_free_i32(t0);
tcg_temp_free_i32(t1);
- } else if (TCG_TARGET_HAS_bswap64_i64) {
- tcg_gen_op3i_i64(INDEX_op_bswap64_i64, ret, arg, 0);
+ } else if (tcg_op_supported(INDEX_op_bswap64, TCG_TYPE_I64, 0)) {
+ tcg_gen_op3i_i64(INDEX_op_bswap64, ret, arg, 0);
} else {
TCGv_i64 t0 = tcg_temp_ebb_new_i64();
TCGv_i64 t1 = tcg_temp_ebb_new_i64();
@@ -2380,8 +2275,8 @@ void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
if (TCG_TARGET_REG_BITS == 32) {
tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
- } else if (TCG_TARGET_HAS_not_i64) {
- tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
+ } else if (tcg_op_supported(INDEX_op_not, TCG_TYPE_I64, 0)) {
+ tcg_gen_op2_i64(INDEX_op_not, ret, arg);
} else {
tcg_gen_xori_i64(ret, arg, -1);
}
@@ -2392,8 +2287,8 @@ void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
if (TCG_TARGET_REG_BITS == 32) {
tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
- } else if (TCG_TARGET_HAS_andc_i64) {
- tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
+ } else if (tcg_op_supported(INDEX_op_andc, TCG_TYPE_I64, 0)) {
+ tcg_gen_op3_i64(INDEX_op_andc, ret, arg1, arg2);
} else {
TCGv_i64 t0 = tcg_temp_ebb_new_i64();
tcg_gen_not_i64(t0, arg2);
@@ -2407,8 +2302,8 @@ void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
if (TCG_TARGET_REG_BITS == 32) {
tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
- } else if (TCG_TARGET_HAS_eqv_i64) {
- tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
+ } else if (tcg_op_supported(INDEX_op_eqv, TCG_TYPE_I64, 0)) {
+ tcg_gen_op3_i64(INDEX_op_eqv, ret, arg1, arg2);
} else {
tcg_gen_xor_i64(ret, arg1, arg2);
tcg_gen_not_i64(ret, ret);
@@ -2420,8 +2315,8 @@ void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
if (TCG_TARGET_REG_BITS == 32) {
tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
- } else if (TCG_TARGET_HAS_nand_i64) {
- tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
+ } else if (tcg_op_supported(INDEX_op_nand, TCG_TYPE_I64, 0)) {
+ tcg_gen_op3_i64(INDEX_op_nand, ret, arg1, arg2);
} else {
tcg_gen_and_i64(ret, arg1, arg2);
tcg_gen_not_i64(ret, ret);
@@ -2433,8 +2328,8 @@ void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
if (TCG_TARGET_REG_BITS == 32) {
tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
- } else if (TCG_TARGET_HAS_nor_i64) {
- tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
+ } else if (tcg_op_supported(INDEX_op_nor, TCG_TYPE_I64, 0)) {
+ tcg_gen_op3_i64(INDEX_op_nor, ret, arg1, arg2);
} else {
tcg_gen_or_i64(ret, arg1, arg2);
tcg_gen_not_i64(ret, ret);
@@ -2446,8 +2341,8 @@ void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
if (TCG_TARGET_REG_BITS == 32) {
tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
- } else if (TCG_TARGET_HAS_orc_i64) {
- tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
+ } else if (tcg_op_supported(INDEX_op_orc, TCG_TYPE_I64, 0)) {
+ tcg_gen_op3_i64(INDEX_op_orc, ret, arg1, arg2);
} else {
TCGv_i64 t0 = tcg_temp_ebb_new_i64();
tcg_gen_not_i64(t0, arg2);
@@ -2458,8 +2353,8 @@ void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
void tcg_gen_clz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- if (TCG_TARGET_HAS_clz_i64) {
- tcg_gen_op3_i64(INDEX_op_clz_i64, ret, arg1, arg2);
+ if (tcg_op_supported(INDEX_op_clz, TCG_TYPE_I64, 0)) {
+ tcg_gen_op3_i64(INDEX_op_clz, ret, arg1, arg2);
} else {
gen_helper_clz_i64(ret, arg1, arg2);
}
@@ -2468,8 +2363,8 @@ void tcg_gen_clz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
void tcg_gen_clzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
{
if (TCG_TARGET_REG_BITS == 32
- && TCG_TARGET_HAS_clz_i32
- && arg2 <= 0xffffffffu) {
+ && arg2 <= 0xffffffffu
+ && tcg_op_supported(INDEX_op_clz, TCG_TYPE_I32, 0)) {
TCGv_i32 t = tcg_temp_ebb_new_i32();
tcg_gen_clzi_i32(t, TCGV_LOW(arg1), arg2 - 32);
tcg_gen_addi_i32(t, t, 32);
@@ -2483,45 +2378,47 @@ void tcg_gen_clzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
void tcg_gen_ctz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- if (TCG_TARGET_HAS_ctz_i64) {
- tcg_gen_op3_i64(INDEX_op_ctz_i64, ret, arg1, arg2);
- } else if (TCG_TARGET_HAS_ctpop_i64 || TCG_TARGET_HAS_clz_i64) {
- TCGv_i64 z, t = tcg_temp_ebb_new_i64();
+ TCGv_i64 z, t;
- if (TCG_TARGET_HAS_ctpop_i64) {
- tcg_gen_subi_i64(t, arg1, 1);
- tcg_gen_andc_i64(t, t, arg1);
- tcg_gen_ctpop_i64(t, t);
- } else {
- /* Since all non-x86 hosts have clz(0) == 64, don't fight it. */
- tcg_gen_neg_i64(t, arg1);
- tcg_gen_and_i64(t, t, arg1);
- tcg_gen_clzi_i64(t, t, 64);
- tcg_gen_xori_i64(t, t, 63);
- }
- z = tcg_constant_i64(0);
- tcg_gen_movcond_i64(TCG_COND_EQ, ret, arg1, z, arg2, t);
- tcg_temp_free_i64(t);
- tcg_temp_free_i64(z);
+ if (tcg_op_supported(INDEX_op_ctz, TCG_TYPE_I64, 0)) {
+ tcg_gen_op3_i64(INDEX_op_ctz, ret, arg1, arg2);
+ return;
+ }
+ if (tcg_op_supported(INDEX_op_ctpop, TCG_TYPE_I64, 0)) {
+ t = tcg_temp_ebb_new_i64();
+ tcg_gen_subi_i64(t, arg1, 1);
+ tcg_gen_andc_i64(t, t, arg1);
+ tcg_gen_ctpop_i64(t, t);
+ } else if (tcg_op_supported(INDEX_op_clz, TCG_TYPE_I64, 0)) {
+ t = tcg_temp_ebb_new_i64();
+ tcg_gen_neg_i64(t, arg1);
+ tcg_gen_and_i64(t, t, arg1);
+ tcg_gen_clzi_i64(t, t, 64);
+ tcg_gen_xori_i64(t, t, 63);
} else {
gen_helper_ctz_i64(ret, arg1, arg2);
+ return;
}
+
+ z = tcg_constant_i64(0);
+ tcg_gen_movcond_i64(TCG_COND_EQ, ret, arg1, z, arg2, t);
+ tcg_temp_free_i64(t);
}
void tcg_gen_ctzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
{
if (TCG_TARGET_REG_BITS == 32
- && TCG_TARGET_HAS_ctz_i32
- && arg2 <= 0xffffffffu) {
+ && arg2 <= 0xffffffffu
+ && tcg_op_supported(INDEX_op_ctz, TCG_TYPE_I32, 0)) {
TCGv_i32 t32 = tcg_temp_ebb_new_i32();
tcg_gen_ctzi_i32(t32, TCGV_HIGH(arg1), arg2 - 32);
tcg_gen_addi_i32(t32, t32, 32);
tcg_gen_ctz_i32(TCGV_LOW(ret), TCGV_LOW(arg1), t32);
tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
tcg_temp_free_i32(t32);
- } else if (!TCG_TARGET_HAS_ctz_i64
- && TCG_TARGET_HAS_ctpop_i64
- && arg2 == 64) {
+ } else if (arg2 == 64
+ && !tcg_op_supported(INDEX_op_ctz, TCG_TYPE_I64, 0)
+ && tcg_op_supported(INDEX_op_ctpop, TCG_TYPE_I64, 0)) {
/* This equivalence has the advantage of not requiring a fixup. */
TCGv_i64 t = tcg_temp_ebb_new_i64();
tcg_gen_subi_i64(t, arg1, 1);
@@ -2535,7 +2432,7 @@ void tcg_gen_ctzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
void tcg_gen_clrsb_i64(TCGv_i64 ret, TCGv_i64 arg)
{
- if (TCG_TARGET_HAS_clz_i64 || TCG_TARGET_HAS_clz_i32) {
+ if (tcg_op_supported(INDEX_op_clz, TCG_TYPE_I64, 0)) {
TCGv_i64 t = tcg_temp_ebb_new_i64();
tcg_gen_sari_i64(t, arg, 63);
tcg_gen_xor_i64(t, t, arg);
@@ -2549,28 +2446,37 @@ void tcg_gen_clrsb_i64(TCGv_i64 ret, TCGv_i64 arg)
void tcg_gen_ctpop_i64(TCGv_i64 ret, TCGv_i64 arg1)
{
- if (TCG_TARGET_HAS_ctpop_i64) {
- tcg_gen_op2_i64(INDEX_op_ctpop_i64, ret, arg1);
- } else if (TCG_TARGET_REG_BITS == 32 && TCG_TARGET_HAS_ctpop_i32) {
- tcg_gen_ctpop_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
- tcg_gen_ctpop_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
- tcg_gen_add_i32(TCGV_LOW(ret), TCGV_LOW(ret), TCGV_HIGH(ret));
- tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
+ if (TCG_TARGET_REG_BITS == 64) {
+ if (tcg_op_supported(INDEX_op_ctpop, TCG_TYPE_I64, 0)) {
+ tcg_gen_op2_i64(INDEX_op_ctpop, ret, arg1);
+ return;
+ }
} else {
- gen_helper_ctpop_i64(ret, arg1);
+ if (tcg_op_supported(INDEX_op_ctpop, TCG_TYPE_I32, 0)) {
+ tcg_gen_ctpop_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
+ tcg_gen_ctpop_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
+ tcg_gen_add_i32(TCGV_LOW(ret), TCGV_LOW(ret), TCGV_HIGH(ret));
+ tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
+ return;
+ }
}
+ gen_helper_ctpop_i64(ret, arg1);
}
void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- if (TCG_TARGET_HAS_rot_i64) {
- tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
+ if (tcg_op_supported(INDEX_op_rotl, TCG_TYPE_I64, 0)) {
+ tcg_gen_op3_i64(INDEX_op_rotl, ret, arg1, arg2);
+ } else if (tcg_op_supported(INDEX_op_rotl, TCG_TYPE_I64, 0)) {
+ TCGv_i64 t0 = tcg_temp_ebb_new_i64();
+ tcg_gen_neg_i64(t0, arg2);
+ tcg_gen_op3_i64(INDEX_op_rotr, ret, arg1, t0);
+ tcg_temp_free_i64(t0);
} else {
- TCGv_i64 t0, t1;
- t0 = tcg_temp_ebb_new_i64();
- t1 = tcg_temp_ebb_new_i64();
+ TCGv_i64 t0 = tcg_temp_ebb_new_i64();
+ TCGv_i64 t1 = tcg_temp_ebb_new_i64();
tcg_gen_shl_i64(t0, arg1, arg2);
- tcg_gen_subfi_i64(t1, 64, arg2);
+ tcg_gen_neg_i64(t1, arg2);
tcg_gen_shr_i64(t1, arg1, t1);
tcg_gen_or_i64(ret, t0, t1);
tcg_temp_free_i64(t0);
@@ -2584,12 +2490,15 @@ void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
/* some cases can be optimized here */
if (arg2 == 0) {
tcg_gen_mov_i64(ret, arg1);
- } else if (TCG_TARGET_HAS_rot_i64) {
- tcg_gen_rotl_i64(ret, arg1, tcg_constant_i64(arg2));
+ } else if (tcg_op_supported(INDEX_op_rotl, TCG_TYPE_I64, 0)) {
+ TCGv_i64 t0 = tcg_constant_i64(arg2);
+ tcg_gen_op3_i64(INDEX_op_rotl, ret, arg1, t0);
+ } else if (tcg_op_supported(INDEX_op_rotr, TCG_TYPE_I64, 0)) {
+ TCGv_i64 t0 = tcg_constant_i64(64 - arg2);
+ tcg_gen_op3_i64(INDEX_op_rotr, ret, arg1, t0);
} else {
- TCGv_i64 t0, t1;
- t0 = tcg_temp_ebb_new_i64();
- t1 = tcg_temp_ebb_new_i64();
+ TCGv_i64 t0 = tcg_temp_ebb_new_i64();
+ TCGv_i64 t1 = tcg_temp_ebb_new_i64();
tcg_gen_shli_i64(t0, arg1, arg2);
tcg_gen_shri_i64(t1, arg1, 64 - arg2);
tcg_gen_or_i64(ret, t0, t1);
@@ -2600,14 +2509,18 @@ void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- if (TCG_TARGET_HAS_rot_i64) {
- tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
+ if (tcg_op_supported(INDEX_op_rotr, TCG_TYPE_I64, 0)) {
+ tcg_gen_op3_i64(INDEX_op_rotr, ret, arg1, arg2);
+ } else if (tcg_op_supported(INDEX_op_rotl, TCG_TYPE_I64, 0)) {
+ TCGv_i64 t0 = tcg_temp_ebb_new_i64();
+ tcg_gen_neg_i64(t0, arg2);
+ tcg_gen_op3_i64(INDEX_op_rotl, ret, arg1, t0);
+ tcg_temp_free_i64(t0);
} else {
- TCGv_i64 t0, t1;
- t0 = tcg_temp_ebb_new_i64();
- t1 = tcg_temp_ebb_new_i64();
+ TCGv_i64 t0 = tcg_temp_ebb_new_i64();
+ TCGv_i64 t1 = tcg_temp_ebb_new_i64();
tcg_gen_shr_i64(t0, arg1, arg2);
- tcg_gen_subfi_i64(t1, 64, arg2);
+ tcg_gen_neg_i64(t1, arg2);
tcg_gen_shl_i64(t1, arg1, t1);
tcg_gen_or_i64(ret, t0, t1);
tcg_temp_free_i64(t0);
@@ -2618,12 +2531,7 @@ void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
{
tcg_debug_assert(arg2 >= 0 && arg2 < 64);
- /* some cases can be optimized here */
- if (arg2 == 0) {
- tcg_gen_mov_i64(ret, arg1);
- } else {
- tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
- }
+ tcg_gen_rotli_i64(ret, arg1, -arg2 & 63);
}
void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2,
@@ -2644,7 +2552,7 @@ void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2,
if (TCG_TARGET_REG_BITS == 64) {
if (TCG_TARGET_deposit_valid(TCG_TYPE_I64, ofs, len)) {
- tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
+ tcg_gen_op5ii_i64(INDEX_op_deposit, ret, arg1, arg2, ofs, len);
return;
}
} else {
@@ -2664,7 +2572,7 @@ void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2,
t1 = tcg_temp_ebb_new_i64();
- if (TCG_TARGET_HAS_extract2_i64) {
+ if (tcg_op_supported(INDEX_op_extract2, TCG_TYPE_I64, 0)) {
if (ofs + len == 64) {
tcg_gen_shli_i64(t1, arg1, len);
tcg_gen_extract2_i64(ret, t1, arg2, len);
@@ -2705,7 +2613,7 @@ void tcg_gen_deposit_z_i64(TCGv_i64 ret, TCGv_i64 arg,
} else if (TCG_TARGET_REG_BITS == 64 &&
TCG_TARGET_deposit_valid(TCG_TYPE_I64, ofs, len)) {
TCGv_i64 zero = tcg_constant_i64(0);
- tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, zero, arg, ofs, len);
+ tcg_gen_op5ii_i64(INDEX_op_deposit, ret, zero, arg, ofs, len);
} else {
if (TCG_TARGET_REG_BITS == 32) {
if (ofs >= 32) {
@@ -2720,54 +2628,20 @@ void tcg_gen_deposit_z_i64(TCGv_i64 ret, TCGv_i64 arg,
return;
}
}
- /* To help two-operand hosts we prefer to zero-extend first,
- which allows ARG to stay live. */
- switch (len) {
- case 32:
- if (TCG_TARGET_HAS_ext32u_i64) {
- tcg_gen_ext32u_i64(ret, arg);
- tcg_gen_shli_i64(ret, ret, ofs);
- return;
- }
- break;
- case 16:
- if (TCG_TARGET_HAS_ext16u_i64) {
- tcg_gen_ext16u_i64(ret, arg);
- tcg_gen_shli_i64(ret, ret, ofs);
- return;
- }
- break;
- case 8:
- if (TCG_TARGET_HAS_ext8u_i64) {
- tcg_gen_ext8u_i64(ret, arg);
- tcg_gen_shli_i64(ret, ret, ofs);
- return;
- }
- break;
+ /*
+ * To help two-operand hosts we prefer to zero-extend first,
+ * which allows ARG to stay live.
+ */
+ if (TCG_TARGET_extract_valid(TCG_TYPE_I64, 0, len)) {
+ tcg_gen_extract_i64(ret, arg, 0, len);
+ tcg_gen_shli_i64(ret, ret, ofs);
+ return;
}
/* Otherwise prefer zero-extension over AND for code size. */
- switch (ofs + len) {
- case 32:
- if (TCG_TARGET_HAS_ext32u_i64) {
- tcg_gen_shli_i64(ret, arg, ofs);
- tcg_gen_ext32u_i64(ret, ret);
- return;
- }
- break;
- case 16:
- if (TCG_TARGET_HAS_ext16u_i64) {
- tcg_gen_shli_i64(ret, arg, ofs);
- tcg_gen_ext16u_i64(ret, ret);
- return;
- }
- break;
- case 8:
- if (TCG_TARGET_HAS_ext8u_i64) {
- tcg_gen_shli_i64(ret, arg, ofs);
- tcg_gen_ext8u_i64(ret, ret);
- return;
- }
- break;
+ if (TCG_TARGET_extract_valid(TCG_TYPE_I64, 0, ofs + len)) {
+ tcg_gen_shli_i64(ret, arg, ofs);
+ tcg_gen_extract_i64(ret, ret, 0, ofs + len);
+ return;
}
tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
tcg_gen_shli_i64(ret, ret, ofs);
@@ -2787,10 +2661,6 @@ void tcg_gen_extract_i64(TCGv_i64 ret, TCGv_i64 arg,
tcg_gen_shri_i64(ret, arg, 64 - len);
return;
}
- if (ofs == 0) {
- tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
- return;
- }
if (TCG_TARGET_REG_BITS == 32) {
/* Look for a 32-bit extract within one of the two words. */
@@ -2804,39 +2674,34 @@ void tcg_gen_extract_i64(TCGv_i64 ret, TCGv_i64 arg,
tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
return;
}
- /* The field is split across two words. One double-word
- shift is better than two double-word shifts. */
- goto do_shift_and;
+
+ /* The field is split across two words. */
+ tcg_gen_extract2_i32(TCGV_LOW(ret), TCGV_LOW(arg),
+ TCGV_HIGH(arg), ofs);
+ if (len <= 32) {
+ tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_LOW(ret), 0, len);
+ tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
+ } else {
+ tcg_gen_extract_i32(TCGV_HIGH(ret), TCGV_HIGH(arg),
+ ofs, len - 32);
+ }
+ return;
}
if (TCG_TARGET_extract_valid(TCG_TYPE_I64, ofs, len)) {
- tcg_gen_op4ii_i64(INDEX_op_extract_i64, ret, arg, ofs, len);
+ tcg_gen_op4ii_i64(INDEX_op_extract, ret, arg, ofs, len);
+ return;
+ }
+ if (ofs == 0) {
+ tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
return;
}
/* Assume that zero-extension, if available, is cheaper than a shift. */
- switch (ofs + len) {
- case 32:
- if (TCG_TARGET_HAS_ext32u_i64) {
- tcg_gen_ext32u_i64(ret, arg);
- tcg_gen_shri_i64(ret, ret, ofs);
- return;
- }
- break;
- case 16:
- if (TCG_TARGET_HAS_ext16u_i64) {
- tcg_gen_ext16u_i64(ret, arg);
- tcg_gen_shri_i64(ret, ret, ofs);
- return;
- }
- break;
- case 8:
- if (TCG_TARGET_HAS_ext8u_i64) {
- tcg_gen_ext8u_i64(ret, arg);
- tcg_gen_shri_i64(ret, ret, ofs);
- return;
- }
- break;
+ if (TCG_TARGET_extract_valid(TCG_TYPE_I64, 0, ofs + len)) {
+ tcg_gen_op4ii_i64(INDEX_op_extract, ret, arg, 0, ofs + len);
+ tcg_gen_shri_i64(ret, ret, ofs);
+ return;
}
/* ??? Ideally we'd know what values are available for immediate AND.
@@ -2844,7 +2709,6 @@ void tcg_gen_extract_i64(TCGv_i64 ret, TCGv_i64 arg,
so that we get ext8u, ext16u, and ext32u. */
switch (len) {
case 1 ... 8: case 16: case 32:
- do_shift_and:
tcg_gen_shri_i64(ret, arg, ofs);
tcg_gen_andi_i64(ret, ret, (1ull << len) - 1);
break;
@@ -2868,19 +2732,6 @@ void tcg_gen_sextract_i64(TCGv_i64 ret, TCGv_i64 arg,
tcg_gen_sari_i64(ret, arg, 64 - len);
return;
}
- if (ofs == 0) {
- switch (len) {
- case 32:
- tcg_gen_ext32s_i64(ret, arg);
- return;
- case 16:
- tcg_gen_ext16s_i64(ret, arg);
- return;
- case 8:
- tcg_gen_ext8s_i64(ret, arg);
- return;
- }
- }
if (TCG_TARGET_REG_BITS == 32) {
/* Look for a 32-bit extract within one of the two words. */
@@ -2915,57 +2766,22 @@ void tcg_gen_sextract_i64(TCGv_i64 ret, TCGv_i64 arg,
}
if (TCG_TARGET_sextract_valid(TCG_TYPE_I64, ofs, len)) {
- tcg_gen_op4ii_i64(INDEX_op_sextract_i64, ret, arg, ofs, len);
+ tcg_gen_op4ii_i64(INDEX_op_sextract, ret, arg, ofs, len);
return;
}
/* Assume that sign-extension, if available, is cheaper than a shift. */
- switch (ofs + len) {
- case 32:
- if (TCG_TARGET_HAS_ext32s_i64) {
- tcg_gen_ext32s_i64(ret, arg);
- tcg_gen_sari_i64(ret, ret, ofs);
- return;
- }
- break;
- case 16:
- if (TCG_TARGET_HAS_ext16s_i64) {
- tcg_gen_ext16s_i64(ret, arg);
- tcg_gen_sari_i64(ret, ret, ofs);
- return;
- }
- break;
- case 8:
- if (TCG_TARGET_HAS_ext8s_i64) {
- tcg_gen_ext8s_i64(ret, arg);
- tcg_gen_sari_i64(ret, ret, ofs);
- return;
- }
- break;
+ if (TCG_TARGET_sextract_valid(TCG_TYPE_I64, 0, ofs + len)) {
+ tcg_gen_op4ii_i64(INDEX_op_sextract, ret, arg, 0, ofs + len);
+ tcg_gen_sari_i64(ret, ret, ofs);
+ return;
}
- switch (len) {
- case 32:
- if (TCG_TARGET_HAS_ext32s_i64) {
- tcg_gen_shri_i64(ret, arg, ofs);
- tcg_gen_ext32s_i64(ret, ret);
- return;
- }
- break;
- case 16:
- if (TCG_TARGET_HAS_ext16s_i64) {
- tcg_gen_shri_i64(ret, arg, ofs);
- tcg_gen_ext16s_i64(ret, ret);
- return;
- }
- break;
- case 8:
- if (TCG_TARGET_HAS_ext8s_i64) {
- tcg_gen_shri_i64(ret, arg, ofs);
- tcg_gen_ext8s_i64(ret, ret);
- return;
- }
- break;
+ if (TCG_TARGET_sextract_valid(TCG_TYPE_I64, 0, len)) {
+ tcg_gen_shri_i64(ret, arg, ofs);
+ tcg_gen_op4ii_i64(INDEX_op_sextract, ret, ret, 0, len);
+ return;
}
+
tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
tcg_gen_sari_i64(ret, ret, 64 - len);
}
@@ -2984,8 +2800,8 @@ void tcg_gen_extract2_i64(TCGv_i64 ret, TCGv_i64 al, TCGv_i64 ah,
tcg_gen_mov_i64(ret, ah);
} else if (al == ah) {
tcg_gen_rotri_i64(ret, al, ofs);
- } else if (TCG_TARGET_HAS_extract2_i64) {
- tcg_gen_op4i_i64(INDEX_op_extract2_i64, ret, al, ah, ofs);
+ } else if (tcg_op_supported(INDEX_op_extract2, TCG_TYPE_I64, 0)) {
+ tcg_gen_op4i_i64(INDEX_op_extract2, ret, al, ah, ofs);
} else {
TCGv_i64 t0 = tcg_temp_ebb_new_i64();
tcg_gen_shri_i64(t0, al, ofs);
@@ -3002,7 +2818,7 @@ void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, TCGv_i64 c1,
} else if (cond == TCG_COND_NEVER) {
tcg_gen_mov_i64(ret, v2);
} else if (TCG_TARGET_REG_BITS == 64) {
- tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
+ tcg_gen_op6i_i64(INDEX_op_movcond, ret, c1, c2, v1, v2, cond);
} else {
TCGv_i32 t0 = tcg_temp_ebb_new_i32();
TCGv_i32 zero = tcg_constant_i32(0);
@@ -3023,8 +2839,25 @@ void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, TCGv_i64 c1,
void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
{
- if (TCG_TARGET_HAS_add2_i64) {
- tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
+ if (tcg_op_supported(INDEX_op_addci, TCG_TYPE_REG, 0)) {
+ TCGv_i64 t0 = tcg_temp_ebb_new_i64();
+
+ if (TCG_TARGET_REG_BITS == 32) {
+ tcg_gen_op3_i32(INDEX_op_addco, TCGV_LOW(t0),
+ TCGV_LOW(al), TCGV_LOW(bl));
+ tcg_gen_op3_i32(INDEX_op_addcio, TCGV_HIGH(t0),
+ TCGV_HIGH(al), TCGV_HIGH(bl));
+ tcg_gen_op3_i32(INDEX_op_addcio, TCGV_LOW(rh),
+ TCGV_LOW(ah), TCGV_LOW(bh));
+ tcg_gen_op3_i32(INDEX_op_addci, TCGV_HIGH(rh),
+ TCGV_HIGH(ah), TCGV_HIGH(bh));
+ } else {
+ tcg_gen_op3_i64(INDEX_op_addco, t0, al, bl);
+ tcg_gen_op3_i64(INDEX_op_addci, rh, ah, bh);
+ }
+
+ tcg_gen_mov_i64(rl, t0);
+ tcg_temp_free_i64(t0);
} else {
TCGv_i64 t0 = tcg_temp_ebb_new_i64();
TCGv_i64 t1 = tcg_temp_ebb_new_i64();
@@ -3038,11 +2871,96 @@ void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
}
}
+void tcg_gen_addcio_i64(TCGv_i64 r, TCGv_i64 co,
+ TCGv_i64 a, TCGv_i64 b, TCGv_i64 ci)
+{
+ if (TCG_TARGET_REG_BITS == 64) {
+ if (tcg_op_supported(INDEX_op_addci, TCG_TYPE_I64, 0)) {
+ TCGv_i64 discard = tcg_temp_ebb_new_i64();
+ TCGv_i64 zero = tcg_constant_i64(0);
+ TCGv_i64 mone = tcg_constant_i64(-1);
+
+ tcg_gen_op3_i64(INDEX_op_addco, discard, ci, mone);
+ tcg_gen_op3_i64(INDEX_op_addcio, r, a, b);
+ tcg_gen_op3_i64(INDEX_op_addci, co, zero, zero);
+ tcg_temp_free_i64(discard);
+ } else {
+ TCGv_i64 t0 = tcg_temp_ebb_new_i64();
+ TCGv_i64 t1 = tcg_temp_ebb_new_i64();
+
+ tcg_gen_add_i64(t0, a, b);
+ tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, a);
+ tcg_gen_add_i64(r, t0, ci);
+ tcg_gen_setcond_i64(TCG_COND_LTU, t0, r, t0);
+ tcg_gen_or_i64(co, t0, t1);
+
+ tcg_temp_free_i64(t0);
+ tcg_temp_free_i64(t1);
+ }
+ } else {
+ if (tcg_op_supported(INDEX_op_addci, TCG_TYPE_I32, 0)) {
+ TCGv_i32 discard = tcg_temp_ebb_new_i32();
+ TCGv_i32 zero = tcg_constant_i32(0);
+ TCGv_i32 mone = tcg_constant_i32(-1);
+
+ tcg_gen_op3_i32(INDEX_op_addco, discard, TCGV_LOW(ci), mone);
+ tcg_gen_op3_i32(INDEX_op_addcio, discard, TCGV_HIGH(ci), mone);
+ tcg_gen_op3_i32(INDEX_op_addcio, TCGV_LOW(r),
+ TCGV_LOW(a), TCGV_LOW(b));
+ tcg_gen_op3_i32(INDEX_op_addcio, TCGV_HIGH(r),
+ TCGV_HIGH(a), TCGV_HIGH(b));
+ tcg_gen_op3_i32(INDEX_op_addci, TCGV_LOW(co), zero, zero);
+ tcg_temp_free_i32(discard);
+ } else {
+ TCGv_i32 t0 = tcg_temp_ebb_new_i32();
+ TCGv_i32 c0 = tcg_temp_ebb_new_i32();
+ TCGv_i32 c1 = tcg_temp_ebb_new_i32();
+
+ tcg_gen_or_i32(c1, TCGV_LOW(ci), TCGV_HIGH(ci));
+ tcg_gen_setcondi_i32(TCG_COND_NE, c1, c1, 0);
+
+ tcg_gen_add_i32(t0, TCGV_LOW(a), TCGV_LOW(b));
+ tcg_gen_setcond_i32(TCG_COND_LTU, c0, t0, TCGV_LOW(a));
+ tcg_gen_add_i32(TCGV_LOW(r), t0, c1);
+ tcg_gen_setcond_i32(TCG_COND_LTU, c1, TCGV_LOW(r), c1);
+ tcg_gen_or_i32(c1, c1, c0);
+
+ tcg_gen_add_i32(t0, TCGV_HIGH(a), TCGV_HIGH(b));
+ tcg_gen_setcond_i32(TCG_COND_LTU, c0, t0, TCGV_HIGH(a));
+ tcg_gen_add_i32(TCGV_HIGH(r), t0, c1);
+ tcg_gen_setcond_i32(TCG_COND_LTU, c1, TCGV_HIGH(r), c1);
+ tcg_gen_or_i32(TCGV_LOW(co), c0, c1);
+
+ tcg_temp_free_i32(t0);
+ tcg_temp_free_i32(c0);
+ tcg_temp_free_i32(c1);
+ }
+ tcg_gen_movi_i32(TCGV_HIGH(co), 0);
+ }
+}
+
void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
{
- if (TCG_TARGET_HAS_sub2_i64) {
- tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh);
+ if (tcg_op_supported(INDEX_op_subbi, TCG_TYPE_REG, 0)) {
+ TCGv_i64 t0 = tcg_temp_ebb_new_i64();
+
+ if (TCG_TARGET_REG_BITS == 32) {
+ tcg_gen_op3_i32(INDEX_op_subbo, TCGV_LOW(t0),
+ TCGV_LOW(al), TCGV_LOW(bl));
+ tcg_gen_op3_i32(INDEX_op_subbio, TCGV_HIGH(t0),
+ TCGV_HIGH(al), TCGV_HIGH(bl));
+ tcg_gen_op3_i32(INDEX_op_subbio, TCGV_LOW(rh),
+ TCGV_LOW(ah), TCGV_LOW(bh));
+ tcg_gen_op3_i32(INDEX_op_subbi, TCGV_HIGH(rh),
+ TCGV_HIGH(ah), TCGV_HIGH(bh));
+ } else {
+ tcg_gen_op3_i64(INDEX_op_subbo, t0, al, bl);
+ tcg_gen_op3_i64(INDEX_op_subbi, rh, ah, bh);
+ }
+
+ tcg_gen_mov_i64(rl, t0);
+ tcg_temp_free_i64(t0);
} else {
TCGv_i64 t0 = tcg_temp_ebb_new_i64();
TCGv_i64 t1 = tcg_temp_ebb_new_i64();
@@ -3058,12 +2976,12 @@ void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
{
- if (TCG_TARGET_HAS_mulu2_i64) {
- tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
- } else if (TCG_TARGET_HAS_muluh_i64) {
+ if (tcg_op_supported(INDEX_op_mulu2, TCG_TYPE_I64, 0)) {
+ tcg_gen_op4_i64(INDEX_op_mulu2, rl, rh, arg1, arg2);
+ } else if (tcg_op_supported(INDEX_op_muluh, TCG_TYPE_I64, 0)) {
TCGv_i64 t = tcg_temp_ebb_new_i64();
- tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
- tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
+ tcg_gen_op3_i64(INDEX_op_mul, t, arg1, arg2);
+ tcg_gen_op3_i64(INDEX_op_muluh, rh, arg1, arg2);
tcg_gen_mov_i64(rl, t);
tcg_temp_free_i64(t);
} else {
@@ -3077,15 +2995,16 @@ void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
{
- if (TCG_TARGET_HAS_muls2_i64) {
- tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
- } else if (TCG_TARGET_HAS_mulsh_i64) {
+ if (tcg_op_supported(INDEX_op_muls2, TCG_TYPE_I64, 0)) {
+ tcg_gen_op4_i64(INDEX_op_muls2, rl, rh, arg1, arg2);
+ } else if (tcg_op_supported(INDEX_op_mulsh, TCG_TYPE_I64, 0)) {
TCGv_i64 t = tcg_temp_ebb_new_i64();
- tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
- tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
+ tcg_gen_op3_i64(INDEX_op_mul, t, arg1, arg2);
+ tcg_gen_op3_i64(INDEX_op_mulsh, rh, arg1, arg2);
tcg_gen_mov_i64(rl, t);
tcg_temp_free_i64(t);
- } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
+ } else if (tcg_op_supported(INDEX_op_mulu2, TCG_TYPE_I64, 0) ||
+ tcg_op_supported(INDEX_op_muluh, TCG_TYPE_I64, 0)) {
TCGv_i64 t0 = tcg_temp_ebb_new_i64();
TCGv_i64 t1 = tcg_temp_ebb_new_i64();
TCGv_i64 t2 = tcg_temp_ebb_new_i64();
@@ -3164,11 +3083,9 @@ void tcg_gen_extrl_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
{
if (TCG_TARGET_REG_BITS == 32) {
tcg_gen_mov_i32(ret, TCGV_LOW(arg));
- } else if (TCG_TARGET_HAS_extr_i64_i32) {
+ } else {
tcg_gen_op2(INDEX_op_extrl_i64_i32, TCG_TYPE_I32,
tcgv_i32_arg(ret), tcgv_i64_arg(arg));
- } else {
- tcg_gen_mov_i32(ret, (TCGv_i32)arg);
}
}
@@ -3176,14 +3093,9 @@ void tcg_gen_extrh_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
{
if (TCG_TARGET_REG_BITS == 32) {
tcg_gen_mov_i32(ret, TCGV_HIGH(arg));
- } else if (TCG_TARGET_HAS_extr_i64_i32) {
+ } else {
tcg_gen_op2(INDEX_op_extrh_i64_i32, TCG_TYPE_I32,
tcgv_i32_arg(ret), tcgv_i64_arg(arg));
- } else {
- TCGv_i64 t = tcg_temp_ebb_new_i64();
- tcg_gen_shri_i64(t, arg, 32);
- tcg_gen_mov_i32(ret, (TCGv_i32)t);
- tcg_temp_free_i64(t);
}
}