diff options
author | Richard Henderson <richard.henderson@linaro.org> | 2024-12-08 18:47:15 -0600 |
---|---|---|
committer | Richard Henderson <richard.henderson@linaro.org> | 2024-12-24 08:32:14 -0800 |
commit | 1ca7372c033d8d958add8f4f4c7d8e37c06e6ef7 (patch) | |
tree | 866a14f62375afbabe68ee3a07b191845639bf60 /tcg | |
parent | e1b6c141e98034d44d7e9004dc35545b87ebcade (diff) | |
download | qemu-1ca7372c033d8d958add8f4f4c7d8e37c06e6ef7.zip qemu-1ca7372c033d8d958add8f4f4c7d8e37c06e6ef7.tar.gz qemu-1ca7372c033d8d958add8f4f4c7d8e37c06e6ef7.tar.bz2 |
tcg/optimize: Use fold_masks_zs in fold_and
Avoid the use of the OptContext slots. Find TempOptInfo once.
Sink mask computation below fold_affected_mask early exit.
Reviewed-by: Pierrick Bouvier <pierrick.bouvier@linaro.org>
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
Diffstat (limited to 'tcg')
-rw-r--r-- | tcg/optimize.c | 30 |
1 files changed, 16 insertions, 14 deletions
diff --git a/tcg/optimize.c b/tcg/optimize.c index 5090f6e..4a5b529 100644 --- a/tcg/optimize.c +++ b/tcg/optimize.c @@ -1294,7 +1294,8 @@ static bool fold_add2(OptContext *ctx, TCGOp *op) static bool fold_and(OptContext *ctx, TCGOp *op) { - uint64_t z1, z2; + uint64_t z1, z2, z_mask, s_mask; + TempOptInfo *t1, *t2; if (fold_const2_commutative(ctx, op) || fold_xi_to_i(ctx, op, 0) || @@ -1303,27 +1304,28 @@ static bool fold_and(OptContext *ctx, TCGOp *op) return true; } - z1 = arg_info(op->args[1])->z_mask; - z2 = arg_info(op->args[2])->z_mask; - ctx->z_mask = z1 & z2; - - /* - * Sign repetitions are perforce all identical, whether they are 1 or 0. - * Bitwise operations preserve the relative quantity of the repetitions. - */ - ctx->s_mask = arg_info(op->args[1])->s_mask - & arg_info(op->args[2])->s_mask; + t1 = arg_info(op->args[1]); + t2 = arg_info(op->args[2]); + z1 = t1->z_mask; + z2 = t2->z_mask; /* * Known-zeros does not imply known-ones. Therefore unless * arg2 is constant, we can't infer affected bits from it. */ - if (arg_is_const(op->args[2]) && - fold_affected_mask(ctx, op, z1 & ~z2)) { + if (ti_is_const(t2) && fold_affected_mask(ctx, op, z1 & ~z2)) { return true; } - return fold_masks(ctx, op); + z_mask = z1 & z2; + + /* + * Sign repetitions are perforce all identical, whether they are 1 or 0. + * Bitwise operations preserve the relative quantity of the repetitions. + */ + s_mask = t1->s_mask & t2->s_mask; + + return fold_masks_zs(ctx, op, z_mask, s_mask); } static bool fold_andc(OptContext *ctx, TCGOp *op) |