public inbox for gcc-patches@gcc.gnu.org
 help / color / mirror / Atom feed
* [PATCH] target/111600 - avoid deep recursion in access diagnostics
@ 2023-09-28 13:13 Richard Biener
  0 siblings, 0 replies; only message in thread
From: Richard Biener @ 2023-09-28 13:13 UTC (permalink / raw)
  To: gcc-patches

pass_waccess::check_dangling_stores uses recursion to traverse the CFG.
The following changes this to use a heap allocated worklist to avoid
blowing the stack.

Instead of using a better iteration order it tries hard to preserve
the current iteration order to avoid new false positives to pop up
since the set of stores we keep track isn't properly modeling flow,
so what is diagnosed and what not is quite random.  We are also
lacking the ideal RPO compute on the inverted graph that would just
ignore reverse unreachable code (as the current iteration scheme does).

Bootstrapped and tested on x86_64-unknown-linux-gnu, with this
8MB of stack are now enough to build riscv insn-opinit.cc.

Pushed.

	PR target/111600
	* gimple-ssa-warn-access.cc (pass_waccess::check_dangling_stores):
	Use a heap allocated worklist for CFG traversal instead of
	recursion.
---
 gcc/gimple-ssa-warn-access.cc | 51 ++++++++++++++++++++++-------------
 1 file changed, 32 insertions(+), 19 deletions(-)

diff --git a/gcc/gimple-ssa-warn-access.cc b/gcc/gimple-ssa-warn-access.cc
index ac07a6f9b95..fcaff128d60 100644
--- a/gcc/gimple-ssa-warn-access.cc
+++ b/gcc/gimple-ssa-warn-access.cc
@@ -2141,7 +2141,7 @@ private:
   void check_dangling_uses (tree, tree, bool = false, bool = false);
   void check_dangling_uses ();
   void check_dangling_stores ();
-  void check_dangling_stores (basic_block, hash_set<tree> &, auto_bitmap &);
+  bool check_dangling_stores (basic_block, hash_set<tree> &);
 
   void warn_invalid_pointer (tree, gimple *, gimple *, tree, bool, bool = false);
 
@@ -4524,17 +4524,13 @@ pass_waccess::check_dangling_uses (tree var, tree decl, bool maybe /* = false */
 
 /* Diagnose stores in BB and (recursively) its predecessors of the addresses
    of local variables into nonlocal pointers that are left dangling after
-   the function returns.  BBS is a bitmap of basic blocks visited.  */
+   the function returns.  Returns true when we can continue walking
+   the CFG to predecessors.  */
 
-void
+bool
 pass_waccess::check_dangling_stores (basic_block bb,
-				     hash_set<tree> &stores,
-				     auto_bitmap &bbs)
+				     hash_set<tree> &stores)
 {
-  if (!bitmap_set_bit (bbs, bb->index))
-    /* Avoid cycles. */
-    return;
-
   /* Iterate backwards over the statements looking for a store of
      the address of a local variable into a nonlocal pointer.  */
   for (auto gsi = gsi_last_nondebug_bb (bb); ; gsi_prev_nondebug (&gsi))
@@ -4550,7 +4546,7 @@ pass_waccess::check_dangling_stores (basic_block bb,
 	  && !(gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE)))
 	/* Avoid looking before nonconst, nonpure calls since those might
 	   use the escaped locals.  */
-	return;
+	return false;
 
       if (!is_gimple_assign (stmt) || gimple_clobber_p (stmt)
 	  || !gimple_store_p (stmt))
@@ -4576,7 +4572,7 @@ pass_waccess::check_dangling_stores (basic_block bb,
 	  gimple *def_stmt = SSA_NAME_DEF_STMT (lhs_ref.ref);
 	  if (!gimple_nop_p (def_stmt))
 	    /* Avoid looking at or before stores into unknown objects.  */
-	    return;
+	    return false;
 
 	  lhs_ref.ref = SSA_NAME_VAR (lhs_ref.ref);
 	}
@@ -4620,13 +4616,7 @@ pass_waccess::check_dangling_stores (basic_block bb,
 	}
     }
 
-  edge e;
-  edge_iterator ei;
-  FOR_EACH_EDGE (e, ei, bb->preds)
-    {
-      basic_block pred = e->src;
-      check_dangling_stores (pred, stores, bbs);
-    }
+  return true;
 }
 
 /* Diagnose stores of the addresses of local variables into nonlocal
@@ -4635,9 +4625,32 @@ pass_waccess::check_dangling_stores (basic_block bb,
 void
 pass_waccess::check_dangling_stores ()
 {
+  if (EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (m_func)->preds) == 0)
+    return;
+
   auto_bitmap bbs;
   hash_set<tree> stores;
-  check_dangling_stores (EXIT_BLOCK_PTR_FOR_FN (m_func), stores, bbs);
+  auto_vec<edge_iterator, 8> worklist (n_basic_blocks_for_fn (cfun) + 1);
+  worklist.quick_push (ei_start (EXIT_BLOCK_PTR_FOR_FN (m_func)->preds));
+  do
+    {
+      edge_iterator ei = worklist.last ();
+      basic_block src = ei_edge (ei)->src;
+      if (bitmap_set_bit (bbs, src->index))
+	{
+	  if (check_dangling_stores (src, stores)
+	      && EDGE_COUNT (src->preds) > 0)
+	    worklist.quick_push (ei_start (src->preds));
+	}
+      else
+	{
+	  if (ei_one_before_end_p (ei))
+	    worklist.pop ();
+	  else
+	    ei_next (&worklist.last ());
+	}
+    }
+  while (!worklist.is_empty ());
 }
 
 /* Check for and diagnose uses of dangling pointers to auto objects
-- 
2.35.3

^ permalink raw reply	[flat|nested] only message in thread

only message in thread, other threads:[~2023-09-28 13:13 UTC | newest]

Thread overview: (only message) (download: mbox.gz / follow: Atom feed)
-- links below jump to the message on this page --
2023-09-28 13:13 [PATCH] target/111600 - avoid deep recursion in access diagnostics Richard Biener

This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox;
as well as URLs for read-only IMAP folder(s) and NNTP newsgroup(s).