aboutsummaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
authorRichard Biener <rguenther@suse.de>2023-09-28 11:51:30 +0200
committerRichard Biener <rguenther@suse.de>2023-09-28 15:12:06 +0200
commitf194c684a28a5d449bd034a2c604d04ba465e4fe (patch)
tree3b81ce5cc8dce976a805c779013046d894e5f41d /gcc
parent4f41d497c9eeec6f97a5c240e03c7e5e1a1ec05e (diff)
downloadgcc-f194c684a28a5d449bd034a2c604d04ba465e4fe.zip
gcc-f194c684a28a5d449bd034a2c604d04ba465e4fe.tar.gz
gcc-f194c684a28a5d449bd034a2c604d04ba465e4fe.tar.bz2
target/111600 - avoid deep recursion in access diagnostics
pass_waccess::check_dangling_stores uses recursion to traverse the CFG. The following changes this to use a heap allocated worklist to avoid blowing the stack. Instead of using a better iteration order it tries hard to preserve the current iteration order to avoid new false positives to pop up since the set of stores we keep track isn't properly modeling flow, so what is diagnosed and what not is quite random. We are also lacking the ideal RPO compute on the inverted graph that would just ignore reverse unreachable code (as the current iteration scheme does). PR target/111600 * gimple-ssa-warn-access.cc (pass_waccess::check_dangling_stores): Use a heap allocated worklist for CFG traversal instead of recursion.
Diffstat (limited to 'gcc')
-rw-r--r--gcc/gimple-ssa-warn-access.cc51
1 files changed, 32 insertions, 19 deletions
diff --git a/gcc/gimple-ssa-warn-access.cc b/gcc/gimple-ssa-warn-access.cc
index ac07a6f..fcaff128 100644
--- a/gcc/gimple-ssa-warn-access.cc
+++ b/gcc/gimple-ssa-warn-access.cc
@@ -2141,7 +2141,7 @@ private:
void check_dangling_uses (tree, tree, bool = false, bool = false);
void check_dangling_uses ();
void check_dangling_stores ();
- void check_dangling_stores (basic_block, hash_set<tree> &, auto_bitmap &);
+ bool check_dangling_stores (basic_block, hash_set<tree> &);
void warn_invalid_pointer (tree, gimple *, gimple *, tree, bool, bool = false);
@@ -4524,17 +4524,13 @@ pass_waccess::check_dangling_uses (tree var, tree decl, bool maybe /* = false */
/* Diagnose stores in BB and (recursively) its predecessors of the addresses
of local variables into nonlocal pointers that are left dangling after
- the function returns. BBS is a bitmap of basic blocks visited. */
+ the function returns. Returns true when we can continue walking
+ the CFG to predecessors. */
-void
+bool
pass_waccess::check_dangling_stores (basic_block bb,
- hash_set<tree> &stores,
- auto_bitmap &bbs)
+ hash_set<tree> &stores)
{
- if (!bitmap_set_bit (bbs, bb->index))
- /* Avoid cycles. */
- return;
-
/* Iterate backwards over the statements looking for a store of
the address of a local variable into a nonlocal pointer. */
for (auto gsi = gsi_last_nondebug_bb (bb); ; gsi_prev_nondebug (&gsi))
@@ -4550,7 +4546,7 @@ pass_waccess::check_dangling_stores (basic_block bb,
&& !(gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE)))
/* Avoid looking before nonconst, nonpure calls since those might
use the escaped locals. */
- return;
+ return false;
if (!is_gimple_assign (stmt) || gimple_clobber_p (stmt)
|| !gimple_store_p (stmt))
@@ -4576,7 +4572,7 @@ pass_waccess::check_dangling_stores (basic_block bb,
gimple *def_stmt = SSA_NAME_DEF_STMT (lhs_ref.ref);
if (!gimple_nop_p (def_stmt))
/* Avoid looking at or before stores into unknown objects. */
- return;
+ return false;
lhs_ref.ref = SSA_NAME_VAR (lhs_ref.ref);
}
@@ -4620,13 +4616,7 @@ pass_waccess::check_dangling_stores (basic_block bb,
}
}
- edge e;
- edge_iterator ei;
- FOR_EACH_EDGE (e, ei, bb->preds)
- {
- basic_block pred = e->src;
- check_dangling_stores (pred, stores, bbs);
- }
+ return true;
}
/* Diagnose stores of the addresses of local variables into nonlocal
@@ -4635,9 +4625,32 @@ pass_waccess::check_dangling_stores (basic_block bb,
void
pass_waccess::check_dangling_stores ()
{
+ if (EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (m_func)->preds) == 0)
+ return;
+
auto_bitmap bbs;
hash_set<tree> stores;
- check_dangling_stores (EXIT_BLOCK_PTR_FOR_FN (m_func), stores, bbs);
+ auto_vec<edge_iterator, 8> worklist (n_basic_blocks_for_fn (cfun) + 1);
+ worklist.quick_push (ei_start (EXIT_BLOCK_PTR_FOR_FN (m_func)->preds));
+ do
+ {
+ edge_iterator ei = worklist.last ();
+ basic_block src = ei_edge (ei)->src;
+ if (bitmap_set_bit (bbs, src->index))
+ {
+ if (check_dangling_stores (src, stores)
+ && EDGE_COUNT (src->preds) > 0)
+ worklist.quick_push (ei_start (src->preds));
+ }
+ else
+ {
+ if (ei_one_before_end_p (ei))
+ worklist.pop ();
+ else
+ ei_next (&worklist.last ());
+ }
+ }
+ while (!worklist.is_empty ());
}
/* Check for and diagnose uses of dangling pointers to auto objects