public inbox for gcc-cvs@sourceware.org
help / color / mirror / Atom feed
* [gcc(refs/users/hubicka/heads/honza-gcc-benchmark-branch-v2)] Constraint generation rewrite
@ 2021-09-18 16:05 Jan Hubicka
  0 siblings, 0 replies; only message in thread
From: Jan Hubicka @ 2021-09-18 16:05 UTC (permalink / raw)
  To: gcc-cvs

https://gcc.gnu.org/g:d511843fad1a6f933bfcdc713ab13d9910d2cdc4

commit d511843fad1a6f933bfcdc713ab13d9910d2cdc4
Author: Jan Hubicka <jh@suse.cz>
Date:   Sat Sep 18 18:05:15 2021 +0200

    Constraint generation rewrite

Diff:
---
 gcc/ipa-modref-tree.h      |  25 ++
 gcc/ipa-modref.c           | 186 ++++++++++++---
 gcc/ipa-modref.h           |  14 ++
 gcc/tree-ssa-structalias.c | 577 +++++++++++++++++++++++----------------------
 4 files changed, 481 insertions(+), 321 deletions(-)

diff --git a/gcc/ipa-modref-tree.h b/gcc/ipa-modref-tree.h
index 6a9ed5ce54b..a874b7ff19f 100644
--- a/gcc/ipa-modref-tree.h
+++ b/gcc/ipa-modref-tree.h
@@ -1008,6 +1008,31 @@ struct GTY((user)) modref_tree
     return NULL;
   }
 
+  /* Return true if tree contains access to global memory.  */
+  bool global_access_p ()
+  {
+    size_t i, j, k;
+    modref_base_node <T> *base_node;
+    modref_ref_node <T> *ref_node;
+    modref_access_node *access_node;
+    if (every_base)
+      return true;
+    FOR_EACH_VEC_SAFE_ELT (bases, i, base_node)
+      {
+	if (base_node->every_ref)
+	  return true;
+	FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
+	  {
+	    if (ref_node->every_access)
+	      return true;
+	    FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
+	      if (access_node->parm_index < 0)
+		return true;
+	  }
+      }
+    return false;
+  }
+
   /* Return ggc allocated instance.  We explicitly call destructors via
      ggc_delete and do not want finalizers to be registered and
      called at the garbage collection time.  */
diff --git a/gcc/ipa-modref.c b/gcc/ipa-modref.c
index 6d49cc1410e..6edb172bbe9 100644
--- a/gcc/ipa-modref.c
+++ b/gcc/ipa-modref.c
@@ -268,7 +268,7 @@ static GTY(()) fast_function_summary <modref_summary_lto *, va_gc>
 /* Summary for a single function which this pass produces.  */
 
 modref_summary::modref_summary ()
-  : loads (NULL), stores (NULL), writes_errno (NULL)
+  : loads (NULL), stores (NULL), writes_errno (false), takes_address (false)
 {
 }
 
@@ -280,17 +280,6 @@ modref_summary::~modref_summary ()
     ggc_delete (stores);
 }
 
-/* All flags that are implied by the ECF_CONST functions.  */
-const int implicit_const_eaf_flags = EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE
-				     | EAF_NODIRECTESCAPE | EAF_NOREAD;
-/* All flags that are implied by the ECF_PURE function.  */
-const int implicit_pure_eaf_flags = EAF_NOCLOBBER | EAF_NOESCAPE
-				    | EAF_NODIRECTESCAPE;
-/* All flags implied when we know we can ignore stores (i.e. when handling
-   call to noreturn).  */
-const int ignore_stores_eaf_flags = EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE
-				    | EAF_NODIRECTESCAPE;
-
 /* Remove all flags from EAF_FLAGS that are implied by ECF_FLAGS and not
    useful to track.  If returns_void is true moreover clear
    EAF_NOT_RETURNED.  */
@@ -305,10 +294,6 @@ remove_useless_eaf_flags (int eaf_flags, int ecf_flags, bool returns_void)
     eaf_flags &= ~implicit_pure_eaf_flags;
   else if ((ecf_flags & ECF_NORETURN) || returns_void)
     eaf_flags &= ~EAF_NOT_RETURNED;
-  /* Only NOCLOBBER or DIRECT flags alone are not useful (see comments
-     in tree-ssa-alias.c).  Give up earlier.  */
-  if ((eaf_flags & ~(EAF_DIRECT | EAF_NOCLOBBER)) == 0)
-    return 0;
   return eaf_flags;
 }
 
@@ -329,6 +314,8 @@ eaf_flags_useful_p (vec <eaf_flags_t> &flags, int ecf_flags)
 bool
 modref_summary::useful_p (int ecf_flags, bool check_flags)
 {
+  if (!takes_address)
+    return true;
   if (ecf_flags & ECF_NOVOPS)
     return false;
   if (arg_flags.length () && !check_flags)
@@ -345,6 +332,22 @@ modref_summary::useful_p (int ecf_flags, bool check_flags)
   return stores && !stores->every_base;
 }
 
+bool
+modref_summary::global_memory_read_p ()
+{
+  if (!loads)
+    return true;
+  return loads->global_access_p ();
+}
+
+bool
+modref_summary::global_memory_written_p ()
+{
+  if (!stores)
+    return true;
+  return stores->global_access_p ();
+}
+
 /* Single function summary used for LTO.  */
 
 typedef modref_tree <tree> modref_records_lto;
@@ -358,6 +361,7 @@ struct GTY(()) modref_summary_lto
   modref_records_lto *stores;
   auto_vec<eaf_flags_t> GTY((skip)) arg_flags;
   bool writes_errno;
+  bool takes_address;
 
   modref_summary_lto ();
   ~modref_summary_lto ();
@@ -368,7 +372,7 @@ struct GTY(()) modref_summary_lto
 /* Summary for a single function which this pass produces.  */
 
 modref_summary_lto::modref_summary_lto ()
-  : loads (NULL), stores (NULL), writes_errno (NULL)
+  : loads (NULL), stores (NULL), writes_errno (false), takes_address (false)
 {
 }
 
@@ -387,6 +391,8 @@ modref_summary_lto::~modref_summary_lto ()
 bool
 modref_summary_lto::useful_p (int ecf_flags, bool check_flags)
 {
+  if (!takes_address)
+    return true;
   if (ecf_flags & ECF_NOVOPS)
     return false;
   if (arg_flags.length () && !check_flags)
@@ -593,6 +599,8 @@ modref_summary::dump (FILE *out)
     }
   if (writes_errno)
     fprintf (out, "  Writes errno\n");
+  if (takes_address)
+    fprintf (out, "  Takes address\n");
   if (arg_flags.length ())
     {
       for (unsigned int i = 0; i < arg_flags.length (); i++)
@@ -615,6 +623,8 @@ modref_summary_lto::dump (FILE *out)
   dump_lto_records (stores, out);
   if (writes_errno)
     fprintf (out, "  Writes errno\n");
+  if (takes_address)
+    fprintf (out, "  Takes address\n");
   if (arg_flags.length ())
     {
       for (unsigned int i = 0; i < arg_flags.length (); i++)
@@ -921,6 +931,15 @@ merge_call_side_effects (modref_summary *cur_summary,
 	  changed = true;
 	}
     }
+  if (!ignore_stores || gimple_call_lhs (stmt))
+    {
+      if (!cur_summary->takes_address
+	  && callee_summary->takes_address)
+	{
+	  cur_summary->takes_address = true;
+	  changed = true;
+	}
+    }
   return changed;
 }
 
@@ -1013,9 +1032,28 @@ collapse_stores (modref_summary *cur_summary,
 static bool
 process_fnspec (modref_summary *cur_summary,
 		modref_summary_lto *cur_summary_lto,
-		gcall *call, bool ignore_stores)
+		gcall *call, bool ignore_stores,
+		int flags)
 {
   attr_fnspec fnspec = gimple_call_fnspec (call);
+  if ((!ignore_stores || gimple_call_lhs (call))
+      && !gimple_call_internal_p (call))
+    {
+      if (cur_summary)
+	cur_summary->takes_address = true;
+      if (cur_summary_lto)
+	cur_summary_lto->takes_address = true;
+      if (dump_file)
+	fprintf (dump_file, " - may take address.\n");
+    }
+  if (flags & (ECF_CONST | ECF_NOVOPS))
+    {
+      if (dump_file)
+	fprintf (dump_file,
+		 " - ECF_CONST | ECF_NOVOPS, ignoring all stores and all loads "
+		 "except for args.\n");
+      return true;
+    }
   if (!fnspec.known_p ())
     {
       if (dump_file && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
@@ -1116,14 +1154,6 @@ analyze_call (modref_summary *cur_summary, modref_summary_lto *cur_summary_lto,
   /* Check flags on the function call.  In certain cases, analysis can be
      simplified.  */
   int flags = gimple_call_flags (stmt);
-  if (flags & (ECF_CONST | ECF_NOVOPS))
-    {
-      if (dump_file)
-	fprintf (dump_file,
-		 " - ECF_CONST | ECF_NOVOPS, ignoring all stores and all loads "
-		 "except for args.\n");
-      return true;
-    }
 
   /* Pure functions do not affect global memory.  Stores by functions which are
      noreturn and do not throw can safely be ignored.  */
@@ -1139,7 +1169,8 @@ analyze_call (modref_summary *cur_summary, modref_summary_lto *cur_summary_lto,
       if (dump_file)
 	fprintf (dump_file, gimple_call_internal_p (stmt)
 		 ? " - Internal call" : " - Indirect call.\n");
-      return process_fnspec (cur_summary, cur_summary_lto, stmt, ignore_stores);
+      return process_fnspec (cur_summary, cur_summary_lto, stmt, ignore_stores,
+			     flags);
     }
   /* We only need to handle internal calls in IPA mode.  */
   gcc_checking_assert (!cur_summary_lto);
@@ -1165,7 +1196,8 @@ analyze_call (modref_summary *cur_summary, modref_summary_lto *cur_summary_lto,
     {
       if (dump_file)
 	fprintf (dump_file, " - Function availability <= AVAIL_INTERPOSABLE.\n");
-      return process_fnspec (cur_summary, cur_summary_lto, stmt, ignore_stores);
+      return process_fnspec (cur_summary, cur_summary_lto, stmt, ignore_stores,
+	 		     flags);
     }
 
   /* Get callee's modref summary.  As above, if there's no summary, we either
@@ -1175,7 +1207,8 @@ analyze_call (modref_summary *cur_summary, modref_summary_lto *cur_summary_lto,
     {
       if (dump_file)
 	fprintf (dump_file, " - No modref summary available for callee.\n");
-      return process_fnspec (cur_summary, cur_summary_lto, stmt, ignore_stores);
+      return process_fnspec (cur_summary, cur_summary_lto, stmt, ignore_stores,
+	  		     flags);
     }
 
   merge_call_side_effects (cur_summary, stmt, callee_summary, ignore_stores,
@@ -1248,6 +1281,23 @@ analyze_store (gimple *, tree, tree op, void *data)
   return false;
 }
 
+static bool
+analyze_address (gimple *, tree in_addr, tree, void *data)
+{
+  tree addr = get_base_address (in_addr);
+  if (!VAR_P (addr) || (TREE_STATIC (addr) || DECL_EXTERNAL (addr)))
+    {
+      modref_summary *summary = ((summary_ptrs *)data)->nolto;
+      modref_summary_lto *summary_lto = ((summary_ptrs *)data)->lto;
+
+      if (summary)
+	summary->takes_address = true;
+      if (summary_lto)
+	summary_lto->takes_address = true;
+    }
+  return false;
+}
+
 /* Analyze statement STMT of function F.
    If IPA is true do not merge in side effects of calls.  */
 
@@ -1265,8 +1315,8 @@ analyze_stmt (modref_summary *summary, modref_summary_lto *summary_lto,
   struct summary_ptrs sums = {summary, summary_lto};
 
   /* Analyze all loads and stores in STMT.  */
-  walk_stmt_load_store_ops (stmt, &sums,
-			    analyze_load, analyze_store);
+  walk_stmt_load_store_addr_ops (stmt, &sums,
+			         analyze_load, analyze_store, analyze_address);
 
   switch (gimple_code (stmt))
    {
@@ -2097,6 +2147,7 @@ analyze_function (function *f, bool ipa)
 						    param_modref_max_refs,
 						    param_modref_max_accesses);
       summary->writes_errno = false;
+      summary->takes_address = false;
     }
   if (lto)
     {
@@ -2111,6 +2162,7 @@ analyze_function (function *f, bool ipa)
 				  param_modref_max_refs,
 				  param_modref_max_accesses);
       summary_lto->writes_errno = false;
+      summary_lto->takes_address = false;
     }
 
   analyze_parms (summary, summary_lto, ipa);
@@ -2278,6 +2330,7 @@ modref_summaries::duplicate (cgraph_node *, cgraph_node *dst,
 			 src_data->loads->max_accesses);
   dst_data->loads->copy_from (src_data->loads);
   dst_data->writes_errno = src_data->writes_errno;
+  dst_data->takes_address = src_data->takes_address;
   if (src_data->arg_flags.length ())
     dst_data->arg_flags = src_data->arg_flags.copy ();
 }
@@ -2303,6 +2356,7 @@ modref_summaries_lto::duplicate (cgraph_node *, cgraph_node *,
 			 src_data->loads->max_accesses);
   dst_data->loads->copy_from (src_data->loads);
   dst_data->writes_errno = src_data->writes_errno;
+  dst_data->takes_address = src_data->takes_address;
   if (src_data->arg_flags.length ())
     dst_data->arg_flags = src_data->arg_flags.copy ();
 }
@@ -2627,6 +2681,7 @@ modref_write ()
 
 	  struct bitpack_d bp = bitpack_create (ob->main_stream);
 	  bp_pack_value (&bp, r->writes_errno, 1);
+	  bp_pack_value (&bp, r->takes_address, 1);
 	  if (!flag_wpa)
 	    {
 	      for (cgraph_edge *e = cnode->indirect_calls;
@@ -2699,6 +2754,10 @@ read_section (struct lto_file_decl_data *file_data, const char *data,
 	modref_sum->writes_errno = false;
       if (modref_sum_lto)
 	modref_sum_lto->writes_errno = false;
+      if (modref_sum)
+	modref_sum->takes_address = false;
+      if (modref_sum_lto)
+	modref_sum_lto->takes_address = false;
 
       gcc_assert (!modref_sum || (!modref_sum->loads
 				  && !modref_sum->stores));
@@ -2731,6 +2790,13 @@ read_section (struct lto_file_decl_data *file_data, const char *data,
 	  if (modref_sum_lto)
 	    modref_sum_lto->writes_errno = true;
 	}
+      if (bp_unpack_value (&bp, 1))
+	{
+	  if (modref_sum)
+	    modref_sum->takes_address = true;
+	  if (modref_sum_lto)
+	    modref_sum_lto->takes_address = true;
+	}
       if (!flag_ltrans)
 	{
 	  for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
@@ -2988,9 +3054,7 @@ ignore_edge (struct cgraph_edge *e)
 
   return (avail <= AVAIL_INTERPOSABLE
 	  || ((!optimization_summaries || !optimization_summaries->get (callee))
-	      && (!summaries_lto || !summaries_lto->get (callee)))
-	  || flags_from_decl_or_type (e->callee->decl)
-	     & (ECF_CONST | ECF_NOVOPS));
+	      && (!summaries_lto || !summaries_lto->get (callee))));
 }
 
 /* Compute parm_map for CALLEE_EDGE.  */
@@ -3184,6 +3248,7 @@ ipa_merge_modref_summary_after_inlining (cgraph_edge *edge)
 	to_info->loads->collapse ();
       if (!ignore_stores)
 	to_info->stores->collapse ();
+      to_info->takes_address = true;
     }
   if (!callee_info_lto && to_info_lto)
     {
@@ -3191,6 +3256,7 @@ ipa_merge_modref_summary_after_inlining (cgraph_edge *edge)
 	to_info_lto->loads->collapse ();
       if (!ignore_stores)
 	to_info_lto->stores->collapse ();
+      to_info_lto->takes_address = true;
     }
   if (callee_info || callee_info_lto)
     {
@@ -3214,6 +3280,16 @@ ipa_merge_modref_summary_after_inlining (cgraph_edge *edge)
 	    to_info_lto->loads->merge (callee_info_lto->loads, &parm_map,
 				       false);
 	}
+      if (to_info && callee_info)
+	{
+	  to_info->writes_errno |= callee_info->writes_errno;
+	  to_info->takes_address |= callee_info->takes_address;
+	}
+      if (to_info_lto && callee_info_lto)
+	{
+	  to_info_lto->writes_errno |= callee_info_lto->writes_errno;
+	  to_info_lto->takes_address |= callee_info_lto->takes_address;
+	}
     }
 
   /* Now merge escape summaries.
@@ -3387,6 +3463,20 @@ propagate_unknown_call (cgraph_node *node,
   bool changed = false;
   class fnspec_summary *fnspec_sum = fnspec_summaries->get (e);
   auto_vec <modref_parm_map, 32> parm_map;
+
+  if (cur_summary && !cur_summary->takes_address)
+    {
+      cur_summary->takes_address = true;
+      changed = true;
+    }
+  if (cur_summary_lto && !cur_summary_lto->takes_address)
+    {
+      cur_summary_lto->takes_address = true;
+      changed = true;
+    }
+  if (ecf_flags & (ECF_CONST | ECF_NOVOPS))
+    return changed;
+
   if (fnspec_sum
       && compute_parm_map (e, &parm_map))
     {
@@ -3537,6 +3627,16 @@ modref_propagate_in_scc (cgraph_node *component_node)
 
 	  for (cgraph_edge *e = cur->indirect_calls; e; e = e->next_callee)
 	    {
+	      if (cur_summary && !cur_summary->takes_address)
+		{
+		  cur_summary->takes_address = true;
+		  changed = true;
+		}
+	      if (cur_summary_lto && !cur_summary_lto->takes_address)
+		{
+		  cur_summary_lto->takes_address = true;
+		  changed = true;
+		}
 	      if (e->indirect_info->ecf_flags & (ECF_CONST | ECF_NOVOPS))
 		continue;
 	      if (dump_file)
@@ -3566,8 +3666,7 @@ modref_propagate_in_scc (cgraph_node *component_node)
 	      modref_summary_lto *callee_summary_lto = NULL;
 	      struct cgraph_node *callee;
 
-	      if (flags & (ECF_CONST | ECF_NOVOPS)
-		  || !callee_edge->inline_failed)
+	      if (!callee_edge->inline_failed)
 		continue;
 
 	      /* Get the callee and its summary.  */
@@ -3646,6 +3745,12 @@ modref_propagate_in_scc (cgraph_node *component_node)
 		{
 		  changed |= cur_summary->loads->merge
 				  (callee_summary->loads, &parm_map, !first);
+		  if (!cur_summary->takes_address
+		      && callee_summary->takes_address)
+		    {
+		      cur_summary->takes_address = true;
+		      changed = true;
+		    }
 		  if (!ignore_stores)
 		    {
 		      changed |= cur_summary->stores->merge
@@ -3664,6 +3769,12 @@ modref_propagate_in_scc (cgraph_node *component_node)
 		  changed |= cur_summary_lto->loads->merge
 				  (callee_summary_lto->loads, &parm_map,
 				   !first);
+		  if (!cur_summary_lto->takes_address
+		      && callee_summary_lto->takes_address)
+		    {
+		      cur_summary_lto->takes_address = true;
+		      changed = true;
+		    }
 		  if (!ignore_stores)
 		    {
 		      changed |= cur_summary_lto->stores->merge
@@ -3701,6 +3812,7 @@ modref_propagate_in_scc (cgraph_node *component_node)
 	     "Propagation finished in %i iterations\n", iteration);
 }
 
+
 /* Dump results of propagation in SCC rooted in COMPONENT_NODE.  */
 
 static void
@@ -3992,8 +4104,10 @@ ipa_modref_c_finalize ()
   if (optimization_summaries)
     ggc_delete (optimization_summaries);
   optimization_summaries = NULL;
+#if 0
   gcc_checking_assert (!summaries
 		       || flag_incremental_link == INCREMENTAL_LINK_LTO);
+#endif
   if (summaries_lto)
     ggc_delete (summaries_lto);
   summaries_lto = NULL;
diff --git a/gcc/ipa-modref.h b/gcc/ipa-modref.h
index 540fdea8efa..027674333a2 100644
--- a/gcc/ipa-modref.h
+++ b/gcc/ipa-modref.h
@@ -32,15 +32,29 @@ struct GTY(()) modref_summary
   modref_records *stores;
   auto_vec<eaf_flags_t> GTY((skip)) arg_flags;
   bool writes_errno;
+  bool takes_address;
 
   modref_summary ();
   ~modref_summary ();
   void dump (FILE *);
   bool useful_p (int ecf_flags, bool check_flags = true);
+  bool global_memory_read_p ();
+  bool global_memory_written_p ();
 };
 
 modref_summary *get_modref_function_summary (cgraph_node *func);
 void ipa_modref_c_finalize ();
 void ipa_merge_modref_summary_after_inlining (cgraph_edge *e);
 
+/* All flags that are implied by the ECF_CONST functions.  */
+static const int implicit_const_eaf_flags = EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE
+				     | EAF_NODIRECTESCAPE | EAF_NOREAD;
+/* All flags that are implied by the ECF_PURE function.  */
+static const int implicit_pure_eaf_flags = EAF_NOCLOBBER | EAF_NOESCAPE
+				    | EAF_NODIRECTESCAPE;
+/* All flags implied when we know we can ignore stores (i.e. when handling
+   call to noreturn).  */
+static const int ignore_stores_eaf_flags = EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE
+				    | EAF_NODIRECTESCAPE;
+
 #endif
diff --git a/gcc/tree-ssa-structalias.c b/gcc/tree-ssa-structalias.c
index 0b8a81ff113..44fc76fc77b 100644
--- a/gcc/tree-ssa-structalias.c
+++ b/gcc/tree-ssa-structalias.c
@@ -44,6 +44,9 @@
 #include "tree-ssa.h"
 #include "tree-cfg.h"
 #include "gimple-range.h"
+#include "ipa-modref-tree.h"
+#include "ipa-modref.h"
+#include "attr-fnspec.h"
 
 /* The idea behind this analyzer is to generate set constraints from the
    program, then solve the resulting constraints in order to generate the
@@ -3880,23 +3883,6 @@ make_escape_constraint (tree op)
   make_constraint_to (escaped_id, op);
 }
 
-/* Make constraint necessary to make all indirect references
-   from VI escape.  */
-
-static void
-make_indirect_escape_constraint (varinfo_t vi)
-{
-  struct constraint_expr lhs, rhs;
-  /* escaped = *(VAR + UNKNOWN);  */
-  lhs.type = SCALAR;
-  lhs.var = escaped_id;
-  lhs.offset = 0;
-  rhs.type = DEREF;
-  rhs.var = vi->id;
-  rhs.offset = UNKNOWN_OFFSET;
-  process_constraint (new_constraint (lhs, rhs));
-}
-
 /* Add constraints to that the solution of VI is transitively closed.  */
 
 static void
@@ -4048,98 +4034,212 @@ get_function_part_constraint (varinfo_t fi, unsigned part)
   return c;
 }
 
-/* For non-IPA mode, generate constraints necessary for a call on the
-   RHS.  */
-
 static void
-handle_rhs_call (gcall *stmt, vec<ce_s> *results)
-{
-  struct constraint_expr rhsc;
-  unsigned i;
-  bool returns_uses = false;
+handle_call_arg (gcall *stmt, tree arg, vec<ce_s> *results, int flags,
+		 int callescape_id)
+{
+  /* If the argument is not used we can ignore it.
+     Similarly argument is invisile for us if it not clobbered, does not
+     escape, is not read and can not be returned.  */
+  if ((flags & EAF_UNUSED)
+      || ((flags & (EAF_NOCLOBBER | EAF_NOESCAPE | EAF_NOREAD
+		    | EAF_NOT_RETURNED))
+	  == (EAF_NOCLOBBER | EAF_NOESCAPE | EAF_NOREAD
+	      | EAF_NOT_RETURNED)))
+    return;
+
+  varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
+  tem->is_reg_var = true;
+  make_constraint_to (tem->id, arg);
+  make_any_offset_constraints (tem);
 
-  for (i = 0; i < gimple_call_num_args (stmt); ++i)
+  if (!(flags & EAF_DIRECT))
+    make_transitive_closure_constraints (tem);
+
+  if (!(flags & EAF_NOT_RETURNED))
     {
-      tree arg = gimple_call_arg (stmt, i);
-      int flags = gimple_call_arg_flags (stmt, i);
+      struct constraint_expr cexpr;
+      cexpr.var = tem->id;
+      cexpr.type = SCALAR;
+      cexpr.offset = 0;
+      results->safe_push (cexpr);
+    }
 
-      /* If the argument is not used we can ignore it.
-	 Similarly argument is invisile for us if it not clobbered, does not
-	 escape, is not read and can not be returned.  */
-      if ((flags & EAF_UNUSED)
-	  || ((flags & (EAF_NOCLOBBER | EAF_NOESCAPE | EAF_NOREAD
-			| EAF_NOT_RETURNED))
-	      == (EAF_NOCLOBBER | EAF_NOESCAPE | EAF_NOREAD
-		  | EAF_NOT_RETURNED)))
-	continue;
+  if (!(flags & EAF_NOREAD))
+    {
+      varinfo_t uses = get_call_use_vi (stmt);
+      make_copy_constraint (uses, tem->id);
+    }
+
+  if (!(flags & EAF_NOCLOBBER))
+    {
+      struct constraint_expr lhs, rhs;
+
+      /* *arg = callescape.  */
+      lhs.type = DEREF;
+      lhs.var = tem->id;
+      lhs.offset = 0;
+
+      rhs.type = SCALAR;
+      rhs.var = callescape_id;
+      rhs.offset = 0;
+      process_constraint (new_constraint (lhs, rhs));
+
+      /* callclobbered = arg.  */
+#if 1
+      make_copy_constraint (get_call_clobber_vi (stmt), tem->id);
+#else
+      /* callclobbered = *arg.  */
+      lhs.type = SCALAR;
+      lhs.var = get_call_clobber_vi (stmt)->id;
+      lhs.offset = 0;
+
+      rhs.type = DEREF;
+      rhs.var = tem->id;
+      rhs.offset = 0;
+
+      process_constraint (new_constraint (lhs, rhs));
+#endif
+    }
+
+  if (!(flags & (EAF_NOESCAPE | EAF_NODIRECTESCAPE)))
+    {
+      struct constraint_expr lhs, rhs;
+
+      /* callescape = arg;  */
+      lhs.var = callescape_id;
+      lhs.offset = 0;
+      lhs.type = SCALAR;
+
+      rhs.var = tem->id;
+      rhs.offset = 0;
+      rhs.type = SCALAR;
+      process_constraint (new_constraint (lhs, rhs));
+    }
+  else if (!(flags & EAF_NOESCAPE))
+    {
+      struct constraint_expr lhs, rhs;
+
+      /* callescape = *(arg + UNKNOWN);  */
+      lhs.var = callescape_id;
+      lhs.offset = 0;
+      lhs.type = SCALAR;
 
-      /* As we compute ESCAPED context-insensitive we do not gain
-         any precision with just EAF_NOCLOBBER but not EAF_NOESCAPE
-	 set.  The argument would still get clobbered through the
-	 escape solution.  */
-      if ((flags & EAF_NOCLOBBER)
-	   && (flags & (EAF_NOESCAPE | EAF_NODIRECTESCAPE)))
+      rhs.var = tem->id;
+      rhs.offset = UNKNOWN_OFFSET;
+      rhs.type = DEREF;
+      process_constraint (new_constraint (lhs, rhs));
+    }
+}
+
+static void
+handle_rhs_call (gcall *stmt, vec<ce_s> *results,
+    		 int implicit_eaf_flags,
+		 bool writes_global_memory,
+		 bool reads_global_memory)
+{
+  tree callee;
+  cgraph_node *node;
+  modref_summary *summary;
+  /* This flag is set if function can segfault if global memory
+     is not initialized but it does nothing useful with it.  */
+  bool uses_global_memory = reads_global_memory;
+  bool takes_address = true;
+
+  if ((callee = gimple_call_fndecl (stmt)) != NULL_TREE
+      && (node = cgraph_node::get (callee)) != NULL
+      && (summary = get_modref_function_summary (node)))
+    {
+      if (writes_global_memory)
+	writes_global_memory = summary->global_memory_written_p ();
+      if (reads_global_memory)
+	reads_global_memory = summary->global_memory_read_p ();
+      if (node->binds_to_current_def_p ()
+	  && !reads_global_memory)
+	uses_global_memory = false;
+      if (!summary->takes_address)
+	takes_address = false;
+    }
+  if (!uses_global_memory)
+    reads_global_memory = false;
+  if (writes_global_memory || uses_global_memory)
+    {
+      attr_fnspec fnspec = gimple_call_fnspec (stmt);
+      if (fnspec.known_p ())
 	{
-	  varinfo_t uses = get_call_use_vi (stmt);
-	  varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
-	  tem->is_reg_var = true;
-	  make_constraint_to (tem->id, arg);
-	  make_any_offset_constraints (tem);
-	  if (!(flags & EAF_DIRECT))
-	    make_transitive_closure_constraints (tem);
-	  make_copy_constraint (uses, tem->id);
-	  /* TODO: This is overly conservative when some parameters are
-	     returned while others are not.  */
-	  if (!(flags & EAF_NOT_RETURNED))
-	    returns_uses = true;
-	  if (!(flags & (EAF_NOESCAPE | EAF_DIRECT)))
-	    make_indirect_escape_constraint (tem);
+	  if (!fnspec.global_memory_written_p ())
+	    writes_global_memory = false;
+	  if (!fnspec.global_memory_read_p ())
+	    uses_global_memory = reads_global_memory = false;
 	}
-      else if (flags & (EAF_NOESCAPE | EAF_NODIRECTESCAPE))
+    }
+
+  varinfo_t callescape = new_var_info (NULL_TREE, "callescape", true);
+
+  /* If function can use global memory, add it to callescape
+     and to possible return values.  If not we can still use/return addresses
+     of global symbols.  */
+  struct constraint_expr lhs, rhs;
+
+  if (reads_global_memory || takes_address || 1)
+    {
+      lhs.type = SCALAR;
+      lhs.var = callescape->id;
+      lhs.offset = 0;
+
+      if (reads_global_memory || takes_address || 1)
 	{
-	  struct constraint_expr lhs, rhs;
-	  varinfo_t uses = get_call_use_vi (stmt);
-	  varinfo_t clobbers = get_call_clobber_vi (stmt);
-	  varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
-	  tem->is_reg_var = true;
-	  make_constraint_to (tem->id, arg);
-	  make_any_offset_constraints (tem);
-	  if (!(flags & EAF_DIRECT))
-	    make_transitive_closure_constraints (tem);
-	  make_copy_constraint (uses, tem->id);
-	  if (!(flags & EAF_NOT_RETURNED))
-	    returns_uses = true;
-	  make_copy_constraint (clobbers, tem->id);
-	  /* Add *tem = nonlocal, do not add *tem = callused as
-	     EAF_NOESCAPE parameters do not escape to other parameters
-	     and all other uses appear in NONLOCAL as well.  */
-	  lhs.type = DEREF;
-	  lhs.var = tem->id;
-	  lhs.offset = 0;
-	  rhs.type = SCALAR;
+	  rhs.type = reads_global_memory ? SCALAR : ADDRESSOF;
 	  rhs.var = nonlocal_id;
 	  rhs.offset = 0;
-	  process_constraint (new_constraint (lhs, rhs));
-	  if (!(flags & (EAF_NOESCAPE | EAF_DIRECT)))
-	    make_indirect_escape_constraint (tem);
 	}
       else
-	make_escape_constraint (arg);
+	{
+	  rhs.type = SCALAR;
+	  rhs.var = integer_id;
+	  rhs.offset = 0;
+	}
+
+      process_constraint (new_constraint (lhs, rhs));
+      results->safe_push (rhs);
+    }
+
+  varinfo_t uses = get_call_use_vi (stmt);
+  make_copy_constraint (uses, callescape->id);
+
+#if 1
+  /* If we did not see read of the global memory but we are
+     not sure if the function may be interposed by variant that
+     reads it, we need to add it to uses to avoid possible segfaults.  */
+  if (uses_global_memory && !reads_global_memory)
+    {
+      /* calluse = nonlocal.  */
+      lhs.type = SCALAR;
+      lhs.var = uses->id;
+      lhs.offset = 0;
+
+      rhs.type = SCALAR;
+      rhs.var = nonlocal_id;
+      rhs.offset = 0;
+
+      process_constraint (new_constraint (lhs, rhs));
     }
+#endif
 
-  /* If we added to the calls uses solution make sure we account for
-     pointers to it to be returned.  */
-  if (returns_uses)
+  for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
     {
-      rhsc.var = get_call_use_vi (stmt)->id;
-      rhsc.offset = UNKNOWN_OFFSET;
-      rhsc.type = SCALAR;
-      results->safe_push (rhsc);
+      tree arg = gimple_call_arg (stmt, i);
+      int flags = gimple_call_arg_flags (stmt, i);
+      handle_call_arg (stmt, arg, results,
+		       flags | implicit_eaf_flags,
+		       callescape->id);
     }
 
   /* The static chain escapes as well.  */
   if (gimple_call_chain (stmt))
-    make_escape_constraint (gimple_call_chain (stmt));
+    handle_call_arg (stmt, gimple_call_chain (stmt), results,
+		     implicit_eaf_flags,
+		     callescape->id);
 
   /* And if we applied NRV the address of the return slot escapes as well.  */
   if (gimple_call_return_slot_opt_p (stmt)
@@ -4147,20 +4247,42 @@ handle_rhs_call (gcall *stmt, vec<ce_s> *results)
       && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
     {
       auto_vec<ce_s> tmpc;
-      struct constraint_expr lhsc, *c;
+      struct constraint_expr *c;
+      unsigned i;
+
       get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
-      lhsc.var = escaped_id;
-      lhsc.offset = 0;
-      lhsc.type = SCALAR;
+
+      make_constraints_to (callescape->id, tmpc);
       FOR_EACH_VEC_ELT (tmpc, i, c)
-	process_constraint (new_constraint (lhsc, *c));
+	results->safe_push (*c);
     }
 
-  /* Regular functions return nonlocal memory.  */
-  rhsc.var = nonlocal_id;
-  rhsc.offset = 0;
-  rhsc.type = SCALAR;
-  results->safe_push (rhsc);
+  if (writes_global_memory)
+    {
+      struct constraint_expr lhs, rhs;
+
+      /* escaped = callecape.  */
+      lhs.var = escaped_id;
+      lhs.offset = 0;
+      lhs.type = SCALAR;
+
+      rhs.var = callescape->id;
+      rhs.offset = 0;
+      rhs.type = SCALAR;
+      process_constraint (new_constraint (lhs, rhs));
+
+#if 1
+      /* callclobber = nonlocal.  */
+      lhs.var = get_call_clobber_vi (stmt)->id;
+      lhs.offset = 0;
+      lhs.type = SCALAR;
+
+      rhs.type = SCALAR;
+      rhs.var = nonlocal_id;
+      rhs.offset = 0;
+      process_constraint (new_constraint (lhs, rhs));
+#endif
+    }
 }
 
 /* For non-IPA mode, generate constraints necessary for a call
@@ -4227,160 +4349,6 @@ handle_lhs_call (gcall *stmt, tree lhs, int flags, vec<ce_s> &rhsc,
     process_all_all_constraints (lhsc, rhsc);
 }
 
-/* For non-IPA mode, generate constraints necessary for a call of a
-   const function that returns a pointer in the statement STMT.  */
-
-static void
-handle_const_call (gcall *stmt, vec<ce_s> *results)
-{
-  struct constraint_expr rhsc;
-  unsigned int k;
-  bool need_uses = false;
-
-  /* Treat nested const functions the same as pure functions as far
-     as the static chain is concerned.  */
-  if (gimple_call_chain (stmt))
-    {
-      varinfo_t uses = get_call_use_vi (stmt);
-      make_constraint_to (uses->id, gimple_call_chain (stmt));
-      need_uses = true;
-    }
-
-  /* And if we applied NRV the address of the return slot escapes as well.  */
-  if (gimple_call_return_slot_opt_p (stmt)
-      && gimple_call_lhs (stmt) != NULL_TREE
-      && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
-    {
-      varinfo_t uses = get_call_use_vi (stmt);
-      auto_vec<ce_s> tmpc;
-      get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
-      make_constraints_to (uses->id, tmpc);
-      need_uses = true;
-    }
-
-  if (need_uses)
-    {
-      varinfo_t uses = get_call_use_vi (stmt);
-      make_any_offset_constraints (uses);
-      make_transitive_closure_constraints (uses);
-      rhsc.var = uses->id;
-      rhsc.offset = 0;
-      rhsc.type = SCALAR;
-      results->safe_push (rhsc);
-    }
-
-  /* May return offsetted arguments.  */
-  varinfo_t tem = NULL;
-  for (k = 0; k < gimple_call_num_args (stmt); ++k)
-    {
-      int flags = gimple_call_arg_flags (stmt, k);
-
-      /* If the argument is not used or not returned we can ignore it.  */
-      if (flags & (EAF_UNUSED | EAF_NOT_RETURNED))
-	continue;
-      if (!tem)
-	{
-	  tem = new_var_info (NULL_TREE, "callarg", true);
-	  tem->is_reg_var = true;
-	}
-      tree arg = gimple_call_arg (stmt, k);
-      auto_vec<ce_s> argc;
-      get_constraint_for_rhs (arg, &argc);
-      make_constraints_to (tem->id, argc);
-    }
-  if (tem)
-    {
-      ce_s ce;
-      ce.type = SCALAR;
-      ce.var = tem->id;
-      ce.offset = UNKNOWN_OFFSET;
-      results->safe_push (ce);
-    }
-
-  /* May return addresses of globals.  */
-  rhsc.var = nonlocal_id;
-  rhsc.offset = 0;
-  rhsc.type = ADDRESSOF;
-  results->safe_push (rhsc);
-}
-
-/* For non-IPA mode, generate constraints necessary for a call to a
-   pure function in statement STMT.  */
-
-static void
-handle_pure_call (gcall *stmt, vec<ce_s> *results)
-{
-  struct constraint_expr rhsc;
-  unsigned i;
-  varinfo_t uses = NULL;
-  bool record_uses = false;
-
-  /* Memory reached from pointer arguments is call-used.  */
-  for (i = 0; i < gimple_call_num_args (stmt); ++i)
-    {
-      tree arg = gimple_call_arg (stmt, i);
-      int flags = gimple_call_arg_flags (stmt, i);
-
-      /* If the argument is not used we can ignore it.  */
-      if ((flags & EAF_UNUSED)
-	  || (flags & (EAF_NOT_RETURNED | EAF_NOREAD))
-	     == (EAF_NOT_RETURNED | EAF_NOREAD))
-	continue;
-      if (!uses)
-	{
-	  uses = get_call_use_vi (stmt);
-	  make_any_offset_constraints (uses);
-	  make_transitive_closure_constraints (uses);
-	}
-      make_constraint_to (uses->id, arg);
-      if (!(flags & EAF_NOT_RETURNED))
-	record_uses = true;
-    }
-
-  /* The static chain is used as well.  */
-  if (gimple_call_chain (stmt))
-    {
-      if (!uses)
-	{
-	  uses = get_call_use_vi (stmt);
-	  make_any_offset_constraints (uses);
-	  make_transitive_closure_constraints (uses);
-	}
-      make_constraint_to (uses->id, gimple_call_chain (stmt));
-      record_uses = true;
-    }
-
-  /* And if we applied NRV the address of the return slot.  */
-  if (gimple_call_return_slot_opt_p (stmt)
-      && gimple_call_lhs (stmt) != NULL_TREE
-      && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
-    {
-      if (!uses)
-	{
-	  uses = get_call_use_vi (stmt);
-	  make_any_offset_constraints (uses);
-	  make_transitive_closure_constraints (uses);
-	}
-      auto_vec<ce_s> tmpc;
-      get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
-      make_constraints_to (uses->id, tmpc);
-      record_uses = true;
-    }
-
-  /* Pure functions may return call-used and nonlocal memory.  */
-  if (record_uses)
-    {
-      rhsc.var = uses->id;
-      rhsc.offset = 0;
-      rhsc.type = SCALAR;
-      results->safe_push (rhsc);
-    }
-  rhsc.var = nonlocal_id;
-  rhsc.offset = 0;
-  rhsc.type = SCALAR;
-  results->safe_push (rhsc);
-}
-
 
 /* Return the varinfo for the callee of CALL.  */
 
@@ -4931,13 +4899,13 @@ find_func_aliases_for_call (struct function *fn, gcall *t)
       if (flags & (ECF_CONST|ECF_NOVOPS))
 	{
 	  if (gimple_call_lhs (t))
-	    handle_const_call (t, &rhsc);
+	    handle_rhs_call (t, &rhsc, implicit_const_eaf_flags, false, false);
 	}
       /* Pure functions can return addresses in and of memory
 	 reachable from their arguments, but they are not an escape
 	 point for reachable memory of their arguments.  */
       else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
-	handle_pure_call (t, &rhsc);
+	handle_rhs_call (t, &rhsc, implicit_pure_eaf_flags, true, false);
       /* If the call is to a replaceable operator delete and results
 	 from a delete expression as opposed to a direct call to
 	 such operator, then the effects for PTA (in particular
@@ -4947,7 +4915,7 @@ find_func_aliases_for_call (struct function *fn, gcall *t)
 	       && gimple_call_from_new_or_delete (t))
 	;
       else
-	handle_rhs_call (t, &rhsc);
+	handle_rhs_call (t, &rhsc, 0, true, true);
       if (gimple_call_lhs (t))
 	handle_lhs_call (t, gimple_call_lhs (t),
 			 gimple_call_return_flags (t), rhsc, fndecl);
@@ -7582,43 +7550,82 @@ compute_points_to_sets (void)
 	  pt = gimple_call_use_set (stmt);
 	  if (gimple_call_flags (stmt) & ECF_CONST)
 	    memset (pt, 0, sizeof (struct pt_solution));
-	  else if ((vi = lookup_call_use_vi (stmt)) != NULL)
-	    {
-	      *pt = find_what_var_points_to (cfun->decl, vi);
-	      /* Escaped (and thus nonlocal) variables are always
-	         implicitly used by calls.  */
-	      /* ???  ESCAPED can be empty even though NONLOCAL
-		 always escaped.  */
-	      pt->nonlocal = 1;
-	      pt->escaped = 1;
-	    }
 	  else
 	    {
-	      /* If there is nothing special about this call then
-		 we have made everything that is used also escape.  */
-	      *pt = cfun->gimple_df->escaped;
-	      pt->nonlocal = 1;
+	      bool uses_global_memory = true;
+	      tree callee;
+	      cgraph_node *node;
+	      modref_summary *summary;
+
+	      if ((callee = gimple_call_fndecl (stmt)) != NULL_TREE
+		  && (node = cgraph_node::get (callee)) != NULL
+		  && node->binds_to_current_def_p ()
+		  && (summary = get_modref_function_summary (node)))
+		uses_global_memory = summary->global_memory_read_p ();
+	      if (uses_global_memory)
+		{
+		  attr_fnspec fnspec = gimple_call_fnspec (stmt);
+		  if (fnspec.known_p ()
+		      && !fnspec.global_memory_read_p ())
+			uses_global_memory = false;
+		}
+	      if ((vi = lookup_call_use_vi (stmt)) != NULL)
+		{
+		  *pt = find_what_var_points_to (cfun->decl, vi);
+		  /* Escaped (and thus nonlocal) variables are always
+		     implicitly used by calls.  */
+		  /* ???  ESCAPED can be empty even though NONLOCAL
+		     always escaped.  */
+		  pt->nonlocal = uses_global_memory;
+		  pt->escaped = uses_global_memory;
+		}
+	      else if (uses_global_memory)
+		{
+		  /* If there is nothing special about this call then
+		     we have made everything that is used also escape.  */
+		  *pt = cfun->gimple_df->escaped;
+		  pt->nonlocal = 1;
+		}
 	    }
 
 	  pt = gimple_call_clobber_set (stmt);
 	  if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
 	    memset (pt, 0, sizeof (struct pt_solution));
-	  else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
-	    {
-	      *pt = find_what_var_points_to (cfun->decl, vi);
-	      /* Escaped (and thus nonlocal) variables are always
-	         implicitly clobbered by calls.  */
-	      /* ???  ESCAPED can be empty even though NONLOCAL
-		 always escaped.  */
-	      pt->nonlocal = 1;
-	      pt->escaped = 1;
-	    }
 	  else
 	    {
-	      /* If there is nothing special about this call then
-		 we have made everything that is used also escape.  */
-	      *pt = cfun->gimple_df->escaped;
-	      pt->nonlocal = 1;
+	      bool writes_global_memory = true;
+	      tree callee;
+	      cgraph_node *node;
+	      modref_summary *summary;
+
+	      if ((callee = gimple_call_fndecl (stmt)) != NULL_TREE
+		  && (node = cgraph_node::get (callee)) != NULL
+		  && (summary = get_modref_function_summary (node)))
+		writes_global_memory = summary->global_memory_written_p ();
+	      if (writes_global_memory)
+		{
+		  attr_fnspec fnspec = gimple_call_fnspec (stmt);
+		  if (fnspec.known_p ()
+		      && !fnspec.global_memory_written_p ())
+			writes_global_memory = false;
+		}
+	      if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
+		{
+		  *pt = find_what_var_points_to (cfun->decl, vi);
+		  /* Escaped (and thus nonlocal) variables are always
+		     implicitly clobbered by calls.  */
+		  /* ???  ESCAPED can be empty even though NONLOCAL
+		     always escaped.  */
+		  pt->nonlocal = writes_global_memory;
+		  pt->escaped = writes_global_memory;
+		}
+	      else if (writes_global_memory)
+		{
+		  /* If there is nothing special about this call then
+		     we have made everything that is used also escape.  */
+		  *pt = cfun->gimple_df->escaped;
+		  pt->nonlocal = 1;
+		}
 	    }
 	}
     }


^ permalink raw reply	[flat|nested] only message in thread

only message in thread, other threads:[~2021-09-18 16:05 UTC | newest]

Thread overview: (only message) (download: mbox.gz / follow: Atom feed)
-- links below jump to the message on this page --
2021-09-18 16:05 [gcc(refs/users/hubicka/heads/honza-gcc-benchmark-branch-v2)] Constraint generation rewrite Jan Hubicka

This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox;
as well as URLs for read-only IMAP folder(s) and NNTP newsgroup(s).