aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRichard Biener <rguenther@suse.de>2023-10-31 10:13:13 +0100
committerRichard Biener <rguenther@suse.de>2023-10-31 13:10:04 +0100
commite3da1d7bb288c8c864f0284bc4bc5877b466a2f7 (patch)
tree62538be0a67da1b32b62660c4977d05a7da05c4d
parent1cf5dc05c678232b52ac0572a94b891131ef48f0 (diff)
downloadgcc-e3da1d7bb288c8c864f0284bc4bc5877b466a2f7.zip
gcc-e3da1d7bb288c8c864f0284bc4bc5877b466a2f7.tar.gz
gcc-e3da1d7bb288c8c864f0284bc4bc5877b466a2f7.tar.bz2
tree-optimization/112305 - SCEV cprop and conditional undefined overflow
The following adjusts final value replacement to also rewrite the replacement to defined overflow behavior if there's conditionally evaluated stmts (with possibly undefined overflow), not only when we "folded casts". The patch hooks into expression_expensive for this. PR tree-optimization/112305 * tree-scalar-evolution.h (expression_expensive): Adjust. * tree-scalar-evolution.cc (expression_expensive): Record when we see a COND_EXPR. (final_value_replacement_loop): When the replacement contains a COND_EXPR, rewrite it to defined overflow. * tree-ssa-loop-ivopts.cc (may_eliminate_iv): Adjust. * gcc.dg/torture/pr112305.c: New testcase.
-rw-r--r--gcc/testsuite/gcc.dg/torture/pr112305.c18
-rw-r--r--gcc/tree-scalar-evolution.cc59
-rw-r--r--gcc/tree-scalar-evolution.h2
-rw-r--r--gcc/tree-ssa-loop-ivopts.cc3
4 files changed, 56 insertions, 26 deletions
diff --git a/gcc/testsuite/gcc.dg/torture/pr112305.c b/gcc/testsuite/gcc.dg/torture/pr112305.c
new file mode 100644
index 0000000..9d363aa
--- /dev/null
+++ b/gcc/testsuite/gcc.dg/torture/pr112305.c
@@ -0,0 +1,18 @@
+/* { dg-do run } */
+/* { dg-require-effective-target int32plus } */
+
+int a;
+void b()
+{
+ long c = 3;
+ unsigned int d = 50253292;
+ int e = 2147483648;
+ for (; a < 5; a++)
+ do {
+ e += 4;
+ d -= c;
+ } while (e < 20);
+ if (d != -1560359471u)
+ __builtin_abort ();
+}
+int main() { b(); }
diff --git a/gcc/tree-scalar-evolution.cc b/gcc/tree-scalar-evolution.cc
index 95a15fe..a6524de 100644
--- a/gcc/tree-scalar-evolution.cc
+++ b/gcc/tree-scalar-evolution.cc
@@ -3353,11 +3353,13 @@ scev_finalize (void)
}
/* Returns true if the expression EXPR is considered to be too expensive
- for scev_const_prop. */
+ for scev_const_prop. Sets *COND_OVERFLOW_P to true when the
+ expression might contain a sub-expression that is subject to undefined
+ overflow behavior and conditionally evaluated. */
static bool
-expression_expensive_p (tree expr, hash_map<tree, uint64_t> &cache,
- uint64_t &cost)
+expression_expensive_p (tree expr, bool *cond_overflow_p,
+ hash_map<tree, uint64_t> &cache, uint64_t &cost)
{
enum tree_code code;
@@ -3444,7 +3446,7 @@ bitcount_call:
}
FOR_EACH_CALL_EXPR_ARG (arg, iter, expr)
- if (expression_expensive_p (arg, cache, op_cost))
+ if (expression_expensive_p (arg, cond_overflow_p, cache, op_cost))
return true;
*cache.get (expr) += op_cost;
cost += op_cost + 1;
@@ -3453,7 +3455,8 @@ bitcount_call:
if (code == COND_EXPR)
{
- if (expression_expensive_p (TREE_OPERAND (expr, 0), cache, op_cost)
+ if (expression_expensive_p (TREE_OPERAND (expr, 0), cond_overflow_p,
+ cache, op_cost)
|| (EXPR_P (TREE_OPERAND (expr, 1))
&& EXPR_P (TREE_OPERAND (expr, 2)))
/* If either branch has side effects or could trap. */
@@ -3461,11 +3464,13 @@ bitcount_call:
|| generic_expr_could_trap_p (TREE_OPERAND (expr, 1))
|| TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 0))
|| generic_expr_could_trap_p (TREE_OPERAND (expr, 0))
- || expression_expensive_p (TREE_OPERAND (expr, 1),
+ || expression_expensive_p (TREE_OPERAND (expr, 1), cond_overflow_p,
cache, op_cost)
- || expression_expensive_p (TREE_OPERAND (expr, 2),
+ || expression_expensive_p (TREE_OPERAND (expr, 2), cond_overflow_p,
cache, op_cost))
return true;
+ /* Conservatively assume there's overflow for now. */
+ *cond_overflow_p = true;
*cache.get (expr) += op_cost;
cost += op_cost + 1;
return false;
@@ -3475,12 +3480,14 @@ bitcount_call:
{
case tcc_binary:
case tcc_comparison:
- if (expression_expensive_p (TREE_OPERAND (expr, 1), cache, op_cost))
+ if (expression_expensive_p (TREE_OPERAND (expr, 1), cond_overflow_p,
+ cache, op_cost))
return true;
/* Fallthru. */
case tcc_unary:
- if (expression_expensive_p (TREE_OPERAND (expr, 0), cache, op_cost))
+ if (expression_expensive_p (TREE_OPERAND (expr, 0), cond_overflow_p,
+ cache, op_cost))
return true;
*cache.get (expr) += op_cost;
cost += op_cost + 1;
@@ -3492,11 +3499,12 @@ bitcount_call:
}
bool
-expression_expensive_p (tree expr)
+expression_expensive_p (tree expr, bool *cond_overflow_p)
{
hash_map<tree, uint64_t> cache;
uint64_t expanded_size = 0;
- return (expression_expensive_p (expr, cache, expanded_size)
+ *cond_overflow_p = false;
+ return (expression_expensive_p (expr, cond_overflow_p, cache, expanded_size)
|| expanded_size > cache.elements ());
}
@@ -3796,6 +3804,7 @@ final_value_replacement_loop (class loop *loop)
niter_num)))
def = bit_def;
+ bool cond_overflow_p;
if (!tree_does_not_contain_chrecs (def)
|| chrec_contains_symbols_defined_in_loop (def, ex_loop->num)
/* Moving the computation from the loop may prolong life range
@@ -3809,7 +3818,7 @@ final_value_replacement_loop (class loop *loop)
he probably knows that n is not large, and does not want it
to be turned into n %= 45. */
- || expression_expensive_p (def))
+ || expression_expensive_p (def, &cond_overflow_p))
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
@@ -3834,15 +3843,23 @@ final_value_replacement_loop (class loop *loop)
def = unshare_expr (def);
remove_phi_node (&psi, false);
+ /* Create the replacement statements. */
+ gimple_seq stmts;
+ def = force_gimple_operand (def, &stmts, false, NULL_TREE);
+ gassign *ass = gimple_build_assign (rslt, def);
+ gimple_set_location (ass,
+ gimple_phi_arg_location (phi, exit->dest_idx));
+ gimple_seq_add_stmt (&stmts, ass);
+
/* If def's type has undefined overflow and there were folded
casts, rewrite all stmts added for def into arithmetics
with defined overflow behavior. */
- if (folded_casts && ANY_INTEGRAL_TYPE_P (TREE_TYPE (def))
- && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (def)))
+ if ((folded_casts
+ && ANY_INTEGRAL_TYPE_P (TREE_TYPE (def))
+ && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (def)))
+ || cond_overflow_p)
{
- gimple_seq stmts;
gimple_stmt_iterator gsi2;
- def = force_gimple_operand (def, &stmts, true, NULL_TREE);
gsi2 = gsi_start (stmts);
while (!gsi_end_p (gsi2))
{
@@ -3861,17 +3878,11 @@ final_value_replacement_loop (class loop *loop)
}
}
else
- def = force_gimple_operand_gsi (&gsi, def, false, NULL_TREE,
- true, GSI_SAME_STMT);
-
- gassign *ass = gimple_build_assign (rslt, def);
- gimple_set_location (ass,
- gimple_phi_arg_location (phi, exit->dest_idx));
- gsi_insert_before (&gsi, ass, GSI_SAME_STMT);
+ gsi_insert_seq_before (&gsi, stmts, GSI_SAME_STMT);
if (dump_file)
{
fprintf (dump_file, "\n final stmt:\n ");
- print_gimple_stmt (dump_file, ass, 0);
+ print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (rslt), 0);
fprintf (dump_file, "\n");
}
}
diff --git a/gcc/tree-scalar-evolution.h b/gcc/tree-scalar-evolution.h
index f35ca1b..a64ed78 100644
--- a/gcc/tree-scalar-evolution.h
+++ b/gcc/tree-scalar-evolution.h
@@ -36,7 +36,7 @@ extern tree resolve_mixers (class loop *, tree, bool *);
extern void gather_stats_on_scev_database (void);
extern bool final_value_replacement_loop (class loop *);
extern unsigned int scev_const_prop (void);
-extern bool expression_expensive_p (tree);
+extern bool expression_expensive_p (tree, bool *);
extern bool simple_iv_with_niters (class loop *, class loop *, tree,
struct affine_iv *, tree *, bool);
extern bool simple_iv (class loop *, class loop *, tree, struct affine_iv *,
diff --git a/gcc/tree-ssa-loop-ivopts.cc b/gcc/tree-ssa-loop-ivopts.cc
index 98e5b30..c333660 100644
--- a/gcc/tree-ssa-loop-ivopts.cc
+++ b/gcc/tree-ssa-loop-ivopts.cc
@@ -5527,7 +5527,8 @@ may_eliminate_iv (struct ivopts_data *data,
/* It is unlikely that computing the number of iterations using division
would be more profitable than keeping the original induction variable. */
- if (expression_expensive_p (*bound))
+ bool cond_overflow_p;
+ if (expression_expensive_p (*bound, &cond_overflow_p))
return false;
/* Sometimes, it is possible to handle the situation that the number of