public inbox for gcc-cvs@sourceware.org
help / color / mirror / Atom feed
* [gcc(refs/users/aoliva/heads/strub)] docs, call check, builtin inline
@ 2021-07-25 15:58 Alexandre Oliva
  0 siblings, 0 replies; only message in thread
From: Alexandre Oliva @ 2021-07-25 15:58 UTC (permalink / raw)
  To: gcc-cvs

https://gcc.gnu.org/g:2781d19d7c1e42e967a3040df4b0c5270dcb9905

commit 2781d19d7c1e42e967a3040df4b0c5270dcb9905
Author: Alexandre Oliva <oliva@adacore.com>
Date:   Sun Jul 25 12:55:14 2021 -0300

    docs, call check, builtin inline

Diff:
---
 gcc/builtins.c      | 153 ++++++++++++++++++++++++++++++++++++++++++++++++-
 gcc/common.opt      |  22 ++++---
 gcc/doc/extend.texi |  67 ++++++++++++++++++++++
 gcc/doc/invoke.texi |  41 +++++++++++++
 gcc/ipa-strub.c     | 162 ++++++++++++++++++++++++++++++++++++++++++++--------
 5 files changed, 410 insertions(+), 35 deletions(-)

diff --git a/gcc/builtins.c b/gcc/builtins.c
index 84539d73c8c..373b4e90f70 100644
--- a/gcc/builtins.c
+++ b/gcc/builtins.c
@@ -7894,7 +7894,7 @@ expand_builtin_frame_address (tree fndecl, tree exp)
     }
 }
 
-/* Expand a call to builtin functions __builtin_stack_address.  */
+/* Expand a call to builtin function __builtin_stack_address.  */
 
 static rtx
 expand_builtin_stack_address ()
@@ -7902,6 +7902,139 @@ expand_builtin_stack_address ()
   return copy_addr_to_reg (stack_pointer_rtx);
 }
 
+/* Expand a call to builtin function __builtin_strub_enter.  */
+
+static rtx
+expand_builtin_strub_enter (tree exp)
+{
+  if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
+    return NULL_RTX;
+
+  if (!optimize)
+    return NULL_RTX;
+
+  rtx stktop = expand_builtin_stack_address ();
+
+  tree wmptr = CALL_EXPR_ARG (exp, 0);
+  tree wmtype = TREE_TYPE (TREE_TYPE (wmptr));
+  tree wmtree = fold_build2 (MEM_REF, wmtype, wmptr,
+			     build_int_cst (TREE_TYPE (wmptr), 0));
+  rtx wmark = expand_expr (wmtree, NULL_RTX, ptr_mode, EXPAND_MEMORY);
+
+  emit_move_insn (wmark, stktop);
+
+  return const0_rtx;
+}
+
+#ifndef STACK_GROWS_DOWNWARD
+# define STACK_TOPS GT
+#else
+# define STACK_TOPS LT
+#endif
+
+#ifdef POINTERS_EXTEND_UNSIGNED
+# define STACK_UNSIGNED POINTERS_EXTEND_UNSIGNED
+#else
+# define STACK_UNSIGNED true
+#endif
+
+/* Expand a call to builtin function __builtin_strub_update.  */
+
+static rtx
+expand_builtin_strub_update (tree exp)
+{
+  if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
+    return NULL_RTX;
+
+  if (optimize < 2)
+    return NULL_RTX;
+
+  rtx stktop = expand_builtin_stack_address ();
+
+  tree wmptr = CALL_EXPR_ARG (exp, 0);
+  tree wmtype = TREE_TYPE (TREE_TYPE (wmptr));
+  tree wmtree = fold_build2 (MEM_REF, wmtype, wmptr,
+			     build_int_cst (TREE_TYPE (wmptr), 0));
+  rtx wmark = expand_expr (wmtree, NULL_RTX, ptr_mode, EXPAND_MEMORY);
+  rtx wmarkr = force_reg (ptr_mode, wmark);
+
+  rtx_code_label *lab = gen_label_rtx ();
+  do_compare_rtx_and_jump (stktop, wmarkr, STACK_TOPS, STACK_UNSIGNED,
+			   ptr_mode, NULL_RTX, lab, NULL,
+			   profile_probability::very_likely ());
+  emit_move_insn (wmark, stktop);
+  emit_label (lab);
+
+  return const0_rtx;
+}
+
+
+/* Expand a call to builtin function __builtin_strub_leave.  */
+
+static rtx
+expand_builtin_strub_leave (tree exp)
+{
+  if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
+    return NULL_RTX;
+
+  if (optimize < 3)
+    return NULL_RTX;
+
+  rtx stktop = expand_builtin_stack_address ();
+
+  tree wmptr = CALL_EXPR_ARG (exp, 0);
+  tree wmtype = TREE_TYPE (TREE_TYPE (wmptr));
+  tree wmtree = fold_build2 (MEM_REF, wmtype, wmptr,
+			     build_int_cst (TREE_TYPE (wmptr), 0));
+  rtx wmark = expand_expr (wmtree, NULL_RTX, ptr_mode, EXPAND_MEMORY);
+
+  /* If the function called between enter and leave is const or pure,
+     we may assume it won't change the watermark passed indirectly to
+     it.  These ensure the value is neither dropped nor assumed
+     unchanged.  */
+  emit_insn (gen_rtx_USE (VOIDmode, wmark));
+  emit_insn (gen_rtx_CLOBBER (VOIDmode, wmark));
+
+  rtx wmarkr = force_reg (ptr_mode, wmark);
+
+#ifndef STACK_GROWS_DOWNWARD
+  rtx base = stktop;
+  rtx end = wmarkr;
+#else
+  rtx base = wmarkr;
+  rtx end = stktop;
+#endif
+
+  rtx_code_label *done = gen_label_rtx ();
+  do_compare_rtx_and_jump (base, end, LT, STACK_UNSIGNED,
+			   ptr_mode, NULL_RTX, done, NULL,
+			   profile_probability::very_likely ());
+
+  rtx zero = force_operand (const0_rtx, NULL_RTX);
+  rtx ulen = GEN_INT (GET_MODE_SIZE (ptr_mode));
+  rtx incr = gen_rtx_PLUS (ptr_mode, base, ulen);
+  rtx dstm = gen_rtx_MEM (ptr_mode, base);
+
+  rtx_code_label *loop = gen_label_rtx ();
+  emit_label (loop);
+  emit_move_insn (dstm, zero);
+  emit_move_insn (base, force_operand (incr, NULL_RTX));
+  do_compare_rtx_and_jump (base, end, LT, STACK_UNSIGNED,
+			   ptr_mode, NULL_RTX, NULL, loop,
+			   profile_probability::very_likely ());
+
+#if 0
+  emit_insn (gen_rtx_USE (VOIDmode,
+			  gen_rtx_MEM (BLKmode,
+				       gen_rtx_PLUS (ptr_mode, base,
+						     GEN_INT (-GET_MODE_SIZE (ptr_mode))))));
+#endif
+
+  emit_label (done);
+
+  return const0_rtx;
+}
+
 /* Expand EXP, a call to the alloca builtin.  Return NULL_RTX if we
    failed and the caller should emit a normal call.  */
 
@@ -10163,6 +10296,24 @@ expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
     case BUILT_IN_STACK_ADDRESS:
       return expand_builtin_stack_address ();
 
+    case BUILT_IN___STRUB_ENTER:
+      target = expand_builtin_strub_enter (exp);
+      if (target)
+	return target;
+      break;
+
+    case BUILT_IN___STRUB_UPDATE:
+      target = expand_builtin_strub_update (exp);
+      if (target)
+	return target;
+      break;
+
+    case BUILT_IN___STRUB_LEAVE:
+      target = expand_builtin_strub_leave (exp);
+      if (target)
+	return target;
+      break;
+
     /* Returns the address of the area where the structure is returned.
        0 otherwise.  */
     case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
diff --git a/gcc/common.opt b/gcc/common.opt
index 72a0b9e7a0c..c464113bb69 100644
--- a/gcc/common.opt
+++ b/gcc/common.opt
@@ -2687,22 +2687,26 @@ fstrict-overflow
 Common
 Treat signed overflow as undefined.  Negated as -fwrapv -fwrapv-pointer.
 
-fstrub
-Common Var(flag_strub, 3) Init(-2)
-Enable (or disable) stack scrubbing for all viable functions
+fstrub=all
+Common RejectNegative Var(flag_strub, 3) Init(-2)
+Enable stack scrubbing for all viable functions
 
-fstrub-default
-Common Var(flag_strub, -1)
-Enable stack scrub as requested through attributes
-
-fstrub-at-calls
+fstrub=at-calls
 Common RejectNegative Var(flag_strub, 1)
 Enable at-calls stack scrubbing for all viable functions
 
-fstrub-internal
+fstrub=internal
 Common RejectNegative Var(flag_strub, 2)
 Enable internal stack scrubbing for all viable functions
 
+fstrub=none
+Common Var(flag_strub, 0)
+Disable stack scrub entirely, disregarding strub attributes
+
+fstrub=default
+Common Var(flag_strub, -1)
+Enable stack scrub as requested through attributes
+
 fsync-libcalls
 Common Var(flag_sync_libcalls) Init(1)
 Implement __atomic operations via libcalls to legacy __sync functions.
diff --git a/gcc/doc/extend.texi b/gcc/doc/extend.texi
index 4e4206b2f35..7c1b3037699 100644
--- a/gcc/doc/extend.texi
+++ b/gcc/doc/extend.texi
@@ -8652,6 +8652,40 @@ pid_t wait (wait_status_ptr_t p)
 @}
 @end smallexample
 
+@item strub
+@cindex @code{strub} type attribute
+This attribute defines stack-scrubbing properties of functions and
+variables.  When applied to function types, it takes an optional numeric
+argument.
+
+A function associated with at-calls strub mode (@code{strub(1)})
+undergoes interface changes, and callers automatically gain code to
+scrub the stack space it uses.  If the operational numeric argument is
+omitted from the @code{strub} attribute in a function type, the at-calls
+mode is selected.
+
+A function with internal strub mode @code{strub(2)} retains an
+unmodified interface, but it may be turned into a wrapper that calls the
+wrapped body, with a custom interface, and then scrubs the stack space
+used by it.
+
+A variable whose type carries the strub attribute causes functions that
+use it to have strub enabled.
+
+A function associated with the disabled strub mode @code{strub(0)} will
+not have its stack scrubbed.  A function that has strub mode disabled
+cannot be called from strub contexts, namely from bodies of functions
+with strub modes @code{1} or @code{2}.
+
+A function whose body is not a strub context, but that should be
+callable from within strub contexts, should be marked as callable with
+@code{strub(3)}.
+
+Strub contexts are not inlined into non-strub contexts.  When an
+internal-strub function is split, the wrapper can often be inlined, but
+its wrapped body cannot.  Functions marked as @code{always_inline}, even
+if explicitly assigned internal strub mode, will not undergo wrapping.
+
 @item unused
 @cindex @code{unused} type attribute
 When attached to a type (including a @code{union} or a @code{struct}),
@@ -11674,6 +11708,39 @@ situations.
 This function returns the value of the stack pointer register.
 @end deftypefn
 
+@node Stack scrubbing
+@section Stack scrubbing interfaces
+
+Stack scrubbing involves cooperation between a strub context, i.e., a
+function whose stack frame is to be zeroed out, and its callers.  The
+caller initializes a stack watermark, the strub context updates the
+watermark to reflect its stack use, and the caller zeroes it out once it
+regains control.  Each of these steps relies on a different builtin
+function call, each one defined in libgcc, but expanded internally
+depending on optimization levels.
+
+@deftypefn {Built-in Function} {void} __builtin___strub_enter (void **@var{wmptr})
+This function initializes a stack @var{watermark} variable with the
+current top of the stack.  This builtin function should be called before
+entering a strub context.  It remains as a function call if optimization
+is not enabled.
+@end deftypefn
+
+@deftypefn {Built-in Function} {void} __builtin___strub_update (void **@var{wmptr})
+This function updates a stack @var{watermark} variable with the current
+top of the stack, if it tops the previous watermark.  This builtin
+function should be called within a strub context whenever additional
+stack space may have been used.  It remains as a function call at
+optimization levels lower than 2.
+@end deftypefn
+
+@deftypefn {Built-in Function} {void} __builtin___strub_leave (void **@var{wmptr})
+This function overwrites the memory area between the current top of the
+stack, and the @var{watermark}ed address.  This builtin function should
+be called after leaving a strub context.  It remains as a function call
+at optimization levels lower than 3.
+@end deftypefn
+
 @node Vector Extensions
 @section Using Vector Instructions through Built-in Functions
 
diff --git a/gcc/doc/invoke.texi b/gcc/doc/invoke.texi
index 32697e6117c..30a46843a15 100644
--- a/gcc/doc/invoke.texi
+++ b/gcc/doc/invoke.texi
@@ -599,6 +599,7 @@ Objective-C and Objective-C++ Dialects}.
 -fstack-protector-explicit  -fstack-check @gol
 -fstack-limit-register=@var{reg}  -fstack-limit-symbol=@var{sym} @gol
 -fno-stack-limit  -fsplit-stack @gol
+-fstrub=all -fstrub=at-calls -fstrub=internal -fstrub=default -fstrub=none @gol
 -fvtable-verify=@r{[}std@r{|}preinit@r{|}none@r{]} @gol
 -fvtv-counts  -fvtv-debug @gol
 -finstrument-functions @gol
@@ -15425,6 +15426,46 @@ without @option{-fsplit-stack} always has a large stack.  Support for
 this is implemented in the gold linker in GNU binutils release 2.21
 and later.
 
+@item -fstrub=all
+@opindex fstrub=all
+Enable stack scrub (strub) for all viable functions, and consider
+non-viable functions as callable.  When both strub modes are viable,
+at-calls is preferred.
+
+@item -fstrub=at-calls
+@opindex fstrub=at-calls
+Enable at-calls strub for all viable functions, and consider non-viable
+functions as callable.  A function is non-eligible for at-calls strub if
+a different strub mode is explicitly requested, if attribute
+@code{noipa} is present, or if it calls @code{__builtin_apply_args}.
+At-calls strub mode, if not requested with the function type, is only
+viable for an eligible function if it is not visible to other
+translation units, through its declaration or by having its address
+taken.
+
+@item -fstrub=internal
+@opindex fstrub=internal
+Enable internal strub for all viable functions, and consider non-viable
+functions as callable.  A function is not eligible for internal strub if
+another strub mode is explicitly requested, attributes @code{noipa} is
+present.  Non-@code{always_inline} functions also become ineligible if
+attribute @code{noclone} is present, if the function uses such features
+as user labels, non-default variable argument interfaces,
+@code{__builtin_next_arg}, or @code{__builtin_return_address}, or if
+they have too many (about 64Ki) arguments.  For internal strub, all
+eligible functions are viable.
+
+@item -fstrub=none
+@opindex -fstrub=none
+Disable stack scrubbing entirely, even when the @code{strub} attribute
+is present in function and variable types.
+
+@item -fstrub=default
+@opindex fstrub=default
+Restore the default strub behavior, overriding any earlier
+@samp{-fstrub} options, namely, strub is only enabled when required by
+@code{strub} attributes associated with function or variable types.
+
 @item -fvtable-verify=@r{[}std@r{|}preinit@r{|}none@r{]}
 @opindex fvtable-verify
 This option is only available when compiling C++ code.
diff --git a/gcc/ipa-strub.c b/gcc/ipa-strub.c
index f9a3c92a115..d634681f16f 100644
--- a/gcc/ipa-strub.c
+++ b/gcc/ipa-strub.c
@@ -163,14 +163,14 @@ get_strub_mode_attr_value (enum strub_mode mode)
 }
 
 static enum strub_mode
-get_strub_mode_from_attr (tree strub_attr)
+get_strub_mode_from_attr (tree strub_attr, bool var_p = false)
 {
   enum strub_mode mode = STRUB_DISABLED;
 
   if (strub_attr)
     {
       if (!TREE_VALUE (strub_attr))
-	mode = STRUB_AT_CALLS;
+	mode = !var_p ? STRUB_AT_CALLS : STRUB_INTERNAL;
       else if (TREE_CODE (TREE_VALUE (TREE_VALUE (strub_attr))) == INTEGER_CST)
 	mode = (enum strub_mode) tree_to_shwi (TREE_VALUE
 					       (TREE_VALUE (strub_attr)));
@@ -187,6 +187,21 @@ get_strub_mode (cgraph_node *node)
   return get_strub_mode_from_attr (get_strub_attr_from_decl (node->decl));
 }
 
+static enum strub_mode
+get_strub_mode_from_type (tree type)
+{
+  bool var_p = !FUNC_OR_METHOD_TYPE_P (type);
+  tree attr = get_strub_attr_from_type (type);
+
+  if (attr)
+    return get_strub_mode_from_attr (attr, var_p);
+
+  if (flag_strub > 0 && !var_p)
+    return STRUB_CALLABLE;
+
+  return STRUB_DISABLED;
+}
+
 static bool
 calls_builtin_va_start_p (cgraph_node *node)
 {
@@ -226,9 +241,43 @@ calls_builtin_apply_args_p (cgraph_node *node, bool report = false)
   return result;
 }
 
+static inline bool
+strub_always_inline_p (cgraph_node *node)
+{
+  return lookup_attribute ("always_inline", DECL_ATTRIBUTES (node->decl));
+}
+
+static inline bool
+can_strub_p (cgraph_node *node, bool report = false)
+{
+  bool result = true;
+
+  if (!report && strub_always_inline_p (node))
+    return result;
+
+  if (lookup_attribute ("noipa", DECL_ATTRIBUTES (node->decl)))
+    {
+      result = false;
+
+      if (!report)
+	return result;
+
+      sorry_at (DECL_SOURCE_LOCATION (node->decl),
+		"%qD is not eligible for strub because of attribute noipa",
+		node->decl);
+    }
+
+  return result;
+}
+
 static bool
 can_strub_at_calls_p (cgraph_node *node, bool report = false)
 {
+  bool result = !report || can_strub_p (node, report);
+
+  if (!result && !report)
+    return result;
+
   return !calls_builtin_apply_args_p (node, report);
 }
 
@@ -258,7 +307,27 @@ can_strub_at_calls_p (cgraph_node *node, bool report = false)
 static bool
 can_strub_internally_p (cgraph_node *node, bool report = false)
 {
-  bool result = true;
+  bool result = !report || can_strub_p (node, report);
+
+  if (!result && !report)
+    return result;
+
+  if (!report && strub_always_inline_p (node))
+    return result;
+
+  if (lookup_attribute ("noclone", DECL_ATTRIBUTES (node->decl)))
+
+    {
+      result = false;
+
+      if (!report)
+	return result;
+
+      sorry_at (DECL_SOURCE_LOCATION (node->decl),
+		"%qD is not eligible for internal strub"
+		" because of attribute noclone",
+		node->decl);
+    }
 
   for (cgraph_edge *e = node->callees; e; e = e->next_callee)
     {
@@ -346,7 +415,8 @@ strub_from_body_p (cgraph_node *node)
   tree var;
   FOR_EACH_LOCAL_DECL (DECL_STRUCT_FUNCTION (node->decl),
 		       i, var)
-    if (get_strub_attr_from_decl (var))
+    if (get_strub_mode_from_type (TREE_TYPE (var))
+	!= STRUB_DISABLED)
       return true;
 
   /* Now scan the body for loads with strub types.  */
@@ -361,7 +431,8 @@ strub_from_body_p (cgraph_node *node)
 	  continue;
 
 	tree rhs = gimple_assign_rhs1 (stmt);
-	if (get_strub_attr_from_type (TREE_TYPE (rhs)))
+	if (get_strub_mode_from_type (TREE_TYPE (rhs))
+	    != STRUB_DISABLED)
 	  return true;
       }
 
@@ -434,8 +505,7 @@ compute_strub_mode (cgraph_node *node, tree strub_attr)
   /* We can cope with always_inline functions even with noipa and noclone,
      because we just leave them alone.  */
   const bool is_always_inline
-    = (lookup_attribute ("always_inline",
-			 DECL_ATTRIBUTES (node->decl)));
+    = strub_always_inline_p (node);
 
   /* Strubbing in general, and each specific strub mode, may have its own set of
      requirements.  We require noipa for strubbing, either because of cloning
@@ -451,17 +521,14 @@ compute_strub_mode (cgraph_node *node, tree strub_attr)
      target hook to adjust the clone instead.  */
   const bool strub_eligible
     = (consider_strub
-       && (is_always_inline
-	   || !lookup_attribute ("noipa",
-				 DECL_ATTRIBUTES (node->decl))));
+       && (is_always_inline || can_strub_p (node)));
   const bool at_calls_eligible
     = (consider_at_calls && strub_eligible
        && can_strub_at_calls_p (node));
   const bool internal_eligible
     = (consider_internal && strub_eligible
        && (is_always_inline
-	   || (!lookup_attribute ("noclone", DECL_ATTRIBUTES (node->decl))
-	       && can_strub_internally_p (node))));
+	   || can_strub_internally_p (node)));
 
   /* In addition to the strict eligibility requirements, some additional
      constraints are placed on implicit selection of certain modes.  These do
@@ -672,14 +739,15 @@ set_strub_mode (cgraph_node *node)
   return mode;
 }
 
-#if 0
 /* Non-strub functions shouldn't be called from strub functions,
-   except through callable ones.  */
+   except through callable ones.  Always inline strub functions can
+   only be called from strub functions.  */
 
 static bool
 strub_callable_from_p (cgraph_node *callee, cgraph_node *caller)
 {
   strub_mode caller_mode = get_strub_mode (caller);
+  strub_mode callee_mode = get_strub_mode (callee);
 
   switch (caller_mode)
     {
@@ -692,15 +760,13 @@ strub_callable_from_p (cgraph_node *callee, cgraph_node *caller)
     case STRUB_WRAPPER:
     case STRUB_DISABLED:
     case STRUB_CALLABLE:
-      return true;
+      return callee_mode != STRUB_INLINABLE;
 
     default:
       gcc_unreachable ();
     }
 
-  strub_mode callee_mode = get_strub_mode (callee);
-
-  switch (caller_mode)
+  switch (callee_mode)
     {
     case STRUB_WRAPPED:
     case STRUB_AT_CALLS:
@@ -709,17 +775,20 @@ strub_callable_from_p (cgraph_node *callee, cgraph_node *caller)
       break;
 
     case STRUB_WRAPPER:
+      return (flag_strub >= 0);
+
     case STRUB_DISABLED:
-    case STRUB_CALLABLE:
       return false;
 
+    case STRUB_CALLABLE:
+      break;
+
     default:
       gcc_unreachable ();
     }
 
   return true;
 }
-#endif
 
 /* We wish to avoid inlining WRAPPED functions back into their
    WRAPPERs.  More generally, we wish to avoid inlining
@@ -1362,6 +1431,49 @@ make_pass_ipa_strub_mode (gcc::context *ctxt)
   return new pass_ipa_strub_mode (ctxt);
 }
 
+/* Check that strub functions don't call non-strub functions, and that
+   always_inline strub functions are only called by strub
+   functions.  */
+static void
+verify_strub ()
+{
+  cgraph_node *node;
+
+  FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
+  {
+    enum strub_mode caller_mode = get_strub_mode (node);
+    bool strub_context
+      = (caller_mode == STRUB_AT_CALLS
+	 || caller_mode == STRUB_WRAPPED
+	 || caller_mode == STRUB_INLINABLE);
+
+    for (cgraph_edge *e = node->callees; e; e = e->next_callee)
+      if (e->indirect_unknown_callee)
+	{
+	  if (!strub_context)
+	    continue;
+
+	  tree callee_fntype = gimple_call_fntype (e->call_stmt);
+	  enum strub_mode callee_mode
+	    = get_strub_mode_from_type (callee_fntype);
+
+	  if (callee_mode == STRUB_DISABLED
+	      || callee_mode == STRUB_INTERNAL)
+	    error_at (gimple_location (e->call_stmt),
+		      "indirect non-strub call in strub context %qD",
+		      e->callee->decl, node->decl);
+	}
+      else if (!strub_callable_from_p (e->callee, node))
+	error_at (gimple_location (e->call_stmt),
+		  "calling non-strub %qD in strub context %qD",
+		  e->callee->decl, node->decl);
+  }
+
+  /* ??? Check strub-wise pointer type compatibility of variables and
+     functions, or is this already taken care of on account of the
+     attribute's being marked as affecting type identity?  */
+}
+
 unsigned int
 pass_ipa_strub::execute (function *)
 {
@@ -1375,9 +1487,10 @@ pass_ipa_strub::execute (function *)
       {
 	int named_args = 0;
 
-	/* Adjust the signature, and all callers.  Add the new argument after all
-	   named arguments, so as to not mess with attr_fnspec or any other
-	   attributes that reference parameters.  */
+	/* Adjust the signature, and all callers.  Add the new
+	   argument after all named arguments, so as to not mess with
+	   attr_fnspec or any other attributes that reference
+	   parameters.  */
 	TREE_TYPE (onode->decl) = build_distinct_type_copy (TREE_TYPE
 							    (onode->decl));
 
@@ -2295,8 +2408,7 @@ pass_ipa_strub::execute (function *)
 #endif
   }
 
-  /* ??? Check that strub functions don't call non-strub functions, and that
-     always_inline strub functions are only called by strub functions.  */
+  verify_strub ();
 
   return 0;
 }


^ permalink raw reply	[flat|nested] only message in thread

only message in thread, other threads:[~2021-07-25 15:58 UTC | newest]

Thread overview: (only message) (download: mbox.gz / follow: Atom feed)
-- links below jump to the message on this page --
2021-07-25 15:58 [gcc(refs/users/aoliva/heads/strub)] docs, call check, builtin inline Alexandre Oliva

This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox;
as well as URLs for read-only IMAP folder(s) and NNTP newsgroup(s).