public inbox for gcc-patches@gcc.gnu.org
 help / color / mirror / Atom feed
* [PATCH 1/2] c++: make manifestly_const_eval tri-state
@ 2023-01-27 22:02 Patrick Palka
  2023-01-27 22:02 ` [PATCH 2/2] c++: speculative constexpr and is_constant_evaluated [PR108243] Patrick Palka
  2023-01-30 20:02 ` [PATCH 1/2] c++: make manifestly_const_eval tri-state Jason Merrill
  0 siblings, 2 replies; 15+ messages in thread
From: Patrick Palka @ 2023-01-27 22:02 UTC (permalink / raw)
  To: gcc-patches; +Cc: jason, Patrick Palka

This patch turns the manifestly_const_eval flag used by the constexpr
machinery into a tri-state enum so that we're able to express wanting
to fold __builtin_is_constant_evaluated to false via late speculative
constexpr evaluation.  Of all the entry points to constexpr evaluation
only maybe_constant_value is changed to take a tri-state value; the
others continue to take bool.  The subsequent patch will use this to fold
the builtin to false when called from cp_fold_function.

gcc/cp/ChangeLog:

	* constexpr.cc (constexpr_call::manifestly_const_eval): Give
	it type int instead of bool.
	(constexpr_ctx::manifestly_const_eval): Give it type mce_value
	instead of bool.
	(cxx_eval_builtin_function_call): Adjust after making
	manifestly_const_eval tri-state.
	(cxx_eval_call_expression): Likewise.
	(cxx_eval_binary_expression): Likewise.
	(cxx_eval_conditional_expression): Likewise.
	(cxx_eval_constant_expression): Likewise.
	(cxx_eval_outermost_constant_expr): Likewise.
	(cxx_constant_value): Likewise.
	(cxx_constant_dtor): Likewise.
	(maybe_constant_value): Give manifestly_const_eval parameter
	type mce_value instead of bool and adjust accordingly.
	(fold_non_dependent_expr_template): Adjust call
	to cxx_eval_outermost_constant_expr.
	(fold_non_dependent_expr): Likewise.
	(maybe_constant_init_1): Likewise.
	* constraint.cc (satisfy_atom): Adjust call to
	maybe_constant_value.
	* cp-tree.h (enum class mce_value): Define.
	(maybe_constant_value): Adjust manifestly_const_eval parameter
	type and default argument.
	* decl.cc (compute_array_index_type_loc): Adjust call to
	maybe_constant_value.
	* pt.cc (convert_nontype_argument): Likewise.
---
 gcc/cp/constexpr.cc  | 61 ++++++++++++++++++++++++--------------------
 gcc/cp/constraint.cc |  3 +--
 gcc/cp/cp-tree.h     | 18 ++++++++++++-
 gcc/cp/decl.cc       |  2 +-
 gcc/cp/pt.cc         |  6 ++---
 5 files changed, 54 insertions(+), 36 deletions(-)

diff --git a/gcc/cp/constexpr.cc b/gcc/cp/constexpr.cc
index be99bec17e7..34662198903 100644
--- a/gcc/cp/constexpr.cc
+++ b/gcc/cp/constexpr.cc
@@ -1119,8 +1119,8 @@ struct GTY((for_user)) constexpr_call {
   /* The hash of this call; we remember it here to avoid having to
      recalculate it when expanding the hash table.  */
   hashval_t hash;
-  /* Whether __builtin_is_constant_evaluated() should evaluate to true.  */
-  bool manifestly_const_eval;
+  /* The raw value of constexpr_ctx::manifestly_const_eval.  */
+  int manifestly_const_eval;
 };
 
 struct constexpr_call_hasher : ggc_ptr_hash<constexpr_call>
@@ -1248,7 +1248,7 @@ struct constexpr_ctx {
      trying harder to get a constant value.  */
   bool strict;
   /* Whether __builtin_is_constant_evaluated () should be true.  */
-  bool manifestly_const_eval;
+  mce_value manifestly_const_eval;
 };
 
 /* This internal flag controls whether we should avoid doing anything during
@@ -1463,7 +1463,7 @@ cxx_eval_builtin_function_call (const constexpr_ctx *ctx, tree t, tree fun,
   /* If we aren't requiring a constant expression, defer __builtin_constant_p
      in a constexpr function until we have values for the parameters.  */
   if (bi_const_p
-      && !ctx->manifestly_const_eval
+      && ctx->manifestly_const_eval == mce_unknown
       && current_function_decl
       && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
     {
@@ -1479,12 +1479,13 @@ cxx_eval_builtin_function_call (const constexpr_ctx *ctx, tree t, tree fun,
   if (fndecl_built_in_p (fun, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
 			 BUILT_IN_FRONTEND))
     {
-      if (!ctx->manifestly_const_eval)
+      if (ctx->manifestly_const_eval == mce_unknown)
 	{
 	  *non_constant_p = true;
 	  return t;
 	}
-      return boolean_true_node;
+      return constant_boolean_node (ctx->manifestly_const_eval == mce_true,
+				    boolean_type_node);
     }
 
   if (fndecl_built_in_p (fun, CP_BUILT_IN_SOURCE_LOCATION, BUILT_IN_FRONTEND))
@@ -1591,7 +1592,7 @@ cxx_eval_builtin_function_call (const constexpr_ctx *ctx, tree t, tree fun,
     }
 
   bool save_ffbcp = force_folding_builtin_constant_p;
-  force_folding_builtin_constant_p |= ctx->manifestly_const_eval;
+  force_folding_builtin_constant_p |= ctx->manifestly_const_eval != mce_unknown;
   tree save_cur_fn = current_function_decl;
   /* Return name of ctx->call->fundef->decl for __builtin_FUNCTION ().  */
   if (fndecl_built_in_p (fun, BUILT_IN_FUNCTION)
@@ -2644,7 +2645,7 @@ cxx_eval_call_expression (const constexpr_ctx *ctx, tree t,
   location_t loc = cp_expr_loc_or_input_loc (t);
   tree fun = get_function_named_in_call (t);
   constexpr_call new_call
-    = { NULL, NULL, NULL, 0, ctx->manifestly_const_eval };
+    = { NULL, NULL, NULL, 0, (int)ctx->manifestly_const_eval };
   int depth_ok;
 
   if (fun == NULL_TREE)
@@ -2916,7 +2917,7 @@ cxx_eval_call_expression (const constexpr_ctx *ctx, tree t,
       location_t save_loc = input_location;
       input_location = loc;
       ++function_depth;
-      if (ctx->manifestly_const_eval)
+      if (ctx->manifestly_const_eval == mce_true)
 	FNDECL_MANIFESTLY_CONST_EVALUATED (fun) = true;
       instantiate_decl (fun, /*defer_ok*/false, /*expl_inst*/false);
       --function_depth;
@@ -3676,7 +3677,7 @@ cxx_eval_binary_expression (const constexpr_ctx *ctx, tree t,
 
   if (r == NULL_TREE)
     {
-      if (ctx->manifestly_const_eval
+      if (ctx->manifestly_const_eval == mce_true
 	  && (flag_constexpr_fp_except
 	      || TREE_CODE (type) != REAL_TYPE))
 	{
@@ -3741,13 +3742,13 @@ cxx_eval_conditional_expression (const constexpr_ctx *ctx, tree t,
 	 without manifestly_const_eval even expressions or parts thereof which
 	 will later be manifestly const_eval evaluated), otherwise fold it to
 	 true.  */
-      if (ctx->manifestly_const_eval)
-	val = boolean_true_node;
-      else
+      if (ctx->manifestly_const_eval == mce_unknown)
 	{
 	  *non_constant_p = true;
 	  return t;
 	}
+      val = constant_boolean_node (ctx->manifestly_const_eval == mce_true,
+				   boolean_type_node);
     }
   /* Don't VERIFY_CONSTANT the other operands.  */
   if (integer_zerop (val))
@@ -7055,7 +7056,7 @@ cxx_eval_constant_expression (const constexpr_ctx *ctx, tree t,
 	      r = v;
 	      break;
 	    }
-      if (ctx->manifestly_const_eval)
+      if (ctx->manifestly_const_eval == mce_true)
 	maybe_warn_about_constant_value (loc, t);
       if (COMPLETE_TYPE_P (TREE_TYPE (t))
 	  && is_really_empty_class (TREE_TYPE (t), /*ignore_vptr*/false))
@@ -7644,7 +7645,7 @@ cxx_eval_constant_expression (const constexpr_ctx *ctx, tree t,
 	if (TREE_CODE (t) == CONVERT_EXPR
 	    && ARITHMETIC_TYPE_P (type)
 	    && INDIRECT_TYPE_P (TREE_TYPE (op))
-	    && ctx->manifestly_const_eval)
+	    && ctx->manifestly_const_eval == mce_true)
 	  {
 	    if (!ctx->quiet)
 	      error_at (loc,
@@ -8137,7 +8138,7 @@ mark_non_constant (tree t)
 static tree
 cxx_eval_outermost_constant_expr (tree t, bool allow_non_constant,
 				  bool strict = true,
-				  bool manifestly_const_eval = false,
+				  mce_value manifestly_const_eval = mce_unknown,
 				  bool constexpr_dtor = false,
 				  tree object = NULL_TREE)
 {
@@ -8155,10 +8156,11 @@ cxx_eval_outermost_constant_expr (tree t, bool allow_non_constant,
   constexpr_global_ctx global_ctx;
   constexpr_ctx ctx = { &global_ctx, NULL, NULL, NULL, NULL, NULL, NULL,
 			allow_non_constant, strict,
-			manifestly_const_eval || !allow_non_constant };
+			!allow_non_constant ? mce_true : manifestly_const_eval };
 
   /* Turn off -frounding-math for manifestly constant evaluation.  */
-  warning_sentinel rm (flag_rounding_math, ctx.manifestly_const_eval);
+  warning_sentinel rm (flag_rounding_math,
+		       ctx.manifestly_const_eval == mce_true);
   tree type = initialized_type (t);
   tree r = t;
   bool is_consteval = false;
@@ -8247,7 +8249,7 @@ cxx_eval_outermost_constant_expr (tree t, bool allow_non_constant,
   auto_vec<tree, 16> cleanups;
   global_ctx.cleanups = &cleanups;
 
-  if (manifestly_const_eval)
+  if (manifestly_const_eval == mce_true)
     instantiate_constexpr_fns (r);
   r = cxx_eval_constant_expression (&ctx, r, vc_prvalue,
 				    &non_constant_p, &overflow_p);
@@ -8386,7 +8388,7 @@ cxx_constant_value (tree t, tree decl /* = NULL_TREE */,
 		    tsubst_flags_t complain /* = tf_error */)
 {
   bool sfinae = !(complain & tf_error);
-  tree r = cxx_eval_outermost_constant_expr (t, sfinae, true, true, false, decl);
+  tree r = cxx_eval_outermost_constant_expr (t, sfinae, true, mce_true, false, decl);
   if (sfinae && !TREE_CONSTANT (r))
     r = error_mark_node;
   return r;
@@ -8398,7 +8400,7 @@ cxx_constant_value (tree t, tree decl /* = NULL_TREE */,
 void
 cxx_constant_dtor (tree t, tree decl)
 {
-  cxx_eval_outermost_constant_expr (t, false, true, true, true, decl);
+  cxx_eval_outermost_constant_expr (t, false, true, mce_true, true, decl);
 }
 
 /* Helper routine for fold_simple function.  Either return simplified
@@ -8484,7 +8486,7 @@ static GTY((deletable)) hash_map<tree, tree> *cv_cache;
 
 tree
 maybe_constant_value (tree t, tree decl /* = NULL_TREE */,
-		      bool manifestly_const_eval /* = false */)
+		      mce_value manifestly_const_eval /* = mce_unknown */)
 {
   tree r;
 
@@ -8499,8 +8501,9 @@ maybe_constant_value (tree t, tree decl /* = NULL_TREE */,
     /* No caching or evaluation needed.  */
     return t;
 
-  if (manifestly_const_eval)
-    return cxx_eval_outermost_constant_expr (t, true, true, true, false, decl);
+  if (manifestly_const_eval != mce_unknown)
+    return cxx_eval_outermost_constant_expr (t, true, true,
+					     manifestly_const_eval, false, decl);
 
   if (cv_cache == NULL)
     cv_cache = hash_map<tree, tree>::create_ggc (101);
@@ -8524,7 +8527,8 @@ maybe_constant_value (tree t, tree decl /* = NULL_TREE */,
     return t;
 
   uid_sensitive_constexpr_evaluation_checker c;
-  r = cxx_eval_outermost_constant_expr (t, true, true, false, false, decl);
+  r = cxx_eval_outermost_constant_expr (t, true, true,
+					manifestly_const_eval, false, decl);
   gcc_checking_assert (r == t
 		       || CONVERT_EXPR_P (t)
 		       || TREE_CODE (t) == VIEW_CONVERT_EXPR
@@ -8590,7 +8594,7 @@ fold_non_dependent_expr_template (tree t, tsubst_flags_t complain,
 	return t;
 
       tree r = cxx_eval_outermost_constant_expr (t, true, true,
-						 manifestly_const_eval,
+						 mce_value (manifestly_const_eval),
 						 false, object);
       /* cp_tree_equal looks through NOPs, so allow them.  */
       gcc_checking_assert (r == t
@@ -8637,7 +8641,7 @@ fold_non_dependent_expr (tree t,
     return fold_non_dependent_expr_template (t, complain,
 					     manifestly_const_eval, object);
 
-  return maybe_constant_value (t, object, manifestly_const_eval);
+  return maybe_constant_value (t, object, (mce_value)manifestly_const_eval);
 }
 
 /* Like fold_non_dependent_expr, but if EXPR couldn't be folded to a constant,
@@ -8715,7 +8719,8 @@ maybe_constant_init_1 (tree t, tree decl, bool allow_non_constant,
       bool is_static = (decl && DECL_P (decl)
 			&& (TREE_STATIC (decl) || DECL_EXTERNAL (decl)));
       t = cxx_eval_outermost_constant_expr (t, allow_non_constant, !is_static,
-					    manifestly_const_eval, false, decl);
+					    mce_value (manifestly_const_eval),
+					    false, decl);
     }
   if (TREE_CODE (t) == TARGET_EXPR)
     {
diff --git a/gcc/cp/constraint.cc b/gcc/cp/constraint.cc
index 2e5acdf8fcb..9374327008b 100644
--- a/gcc/cp/constraint.cc
+++ b/gcc/cp/constraint.cc
@@ -3068,8 +3068,7 @@ satisfy_atom (tree t, tree args, sat_info info)
     }
   else
     {
-      result = maybe_constant_value (result, NULL_TREE,
-				     /*manifestly_const_eval=*/true);
+      result = maybe_constant_value (result, NULL_TREE, mce_true);
       if (!TREE_CONSTANT (result))
 	result = error_mark_node;
     }
diff --git a/gcc/cp/cp-tree.h b/gcc/cp/cp-tree.h
index 74b7ab71ca5..2d39185b182 100644
--- a/gcc/cp/cp-tree.h
+++ b/gcc/cp/cp-tree.h
@@ -8456,6 +8456,22 @@ struct GTY((for_user)) constexpr_fundef {
   tree result;
 };
 
+/* Used by the constexpr machinery to control folding of
+   __builtin_is_constant_evaluated.  */
+
+enum class mce_value
+{
+  /* Treat __builtin_is_constant_evaluated as non-constant.  */
+  mce_unknown = 0,
+  /* Fold it to true.  */
+  mce_true = 1,
+  /* Fold it to false.  */
+  mce_false = -1,
+};
+constexpr mce_value mce_unknown = mce_value::mce_unknown;
+constexpr mce_value mce_true = mce_value::mce_true;
+constexpr mce_value mce_false = mce_value::mce_false;
+
 extern void fini_constexpr			(void);
 extern bool literal_type_p                      (tree);
 extern void maybe_save_constexpr_fundef		(tree);
@@ -8484,7 +8500,7 @@ inline tree cxx_constant_value (tree t, tsubst_flags_t complain)
 { return cxx_constant_value (t, NULL_TREE, complain); }
 extern void cxx_constant_dtor			(tree, tree);
 extern tree cxx_constant_init			(tree, tree = NULL_TREE);
-extern tree maybe_constant_value		(tree, tree = NULL_TREE, bool = false);
+extern tree maybe_constant_value		(tree, tree = NULL_TREE, mce_value = mce_unknown);
 extern tree maybe_constant_init			(tree, tree = NULL_TREE, bool = false);
 extern tree fold_non_dependent_expr		(tree,
 						 tsubst_flags_t = tf_warning_or_error,
diff --git a/gcc/cp/decl.cc b/gcc/cp/decl.cc
index d606b31d7a7..a023c38c59d 100644
--- a/gcc/cp/decl.cc
+++ b/gcc/cp/decl.cc
@@ -11372,7 +11372,7 @@ compute_array_index_type_loc (location_t name_loc, tree name, tree size,
 				    cp_convert (ssizetype, integer_one_node,
 						complain),
 				    complain);
-	itype = maybe_constant_value (itype, NULL_TREE, true);
+	itype = maybe_constant_value (itype, NULL_TREE, mce_true);
       }
 
       if (!TREE_CONSTANT (itype))
diff --git a/gcc/cp/pt.cc b/gcc/cp/pt.cc
index 64e9128a5f1..4d82666891c 100644
--- a/gcc/cp/pt.cc
+++ b/gcc/cp/pt.cc
@@ -7390,16 +7390,14 @@ convert_nontype_argument (tree type, tree expr, tsubst_flags_t complain)
 	      IMPLICIT_CONV_EXPR_NONTYPE_ARG (expr) = true;
 	      return expr;
 	    }
-	  expr = maybe_constant_value (expr, NULL_TREE,
-				       /*manifestly_const_eval=*/true);
+	  expr = maybe_constant_value (expr, NULL_TREE, mce_true);
 	  expr = convert_from_reference (expr);
 	  /* EXPR may have become value-dependent.  */
 	  val_dep_p = value_dependent_expression_p (expr);
 	}
       else if (TYPE_PTR_OR_PTRMEM_P (type))
 	{
-	  tree folded = maybe_constant_value (expr, NULL_TREE,
-					      /*manifestly_const_eval=*/true);
+	  tree folded = maybe_constant_value (expr, NULL_TREE, mce_true);
 	  if (TYPE_PTR_P (type) ? integer_zerop (folded)
 	      : null_member_pointer_value_p (folded))
 	    expr = folded;
-- 
2.39.1.348.g5dec958dcf


^ permalink raw reply	[flat|nested] 15+ messages in thread

* [PATCH 2/2] c++: speculative constexpr and is_constant_evaluated [PR108243]
  2023-01-27 22:02 [PATCH 1/2] c++: make manifestly_const_eval tri-state Patrick Palka
@ 2023-01-27 22:02 ` Patrick Palka
  2023-01-27 22:05   ` Patrick Palka
  2023-01-30 20:05   ` Jason Merrill
  2023-01-30 20:02 ` [PATCH 1/2] c++: make manifestly_const_eval tri-state Jason Merrill
  1 sibling, 2 replies; 15+ messages in thread
From: Patrick Palka @ 2023-01-27 22:02 UTC (permalink / raw)
  To: gcc-patches; +Cc: jason, Patrick Palka

This PR illustrates that __builtin_is_constant_evaluated currently acts
as an optimization barrier for our speculative constexpr evaluation,
since we don't want to prematurely fold the builtin to false if the
expression in question would be later manifestly constant evaluated (in
which case it must be folded to true).

This patch fixes this by permitting __builtin_is_constant_evaluated
to get folded as false during cp_fold_function, since at that point
we're sure we're doing manifestly constant evaluation.  To that end
we add a flags parameter to cp_fold that controls what mce_value the
CALL_EXPR case passes to maybe_constant_value.

bootstrapped and rgetsted no x86_64-pc-linux-gnu, does this look OK for
trunk?

	PR c++/108243

gcc/cp/ChangeLog:

	* cp-gimplify.cc (enum fold_flags): Define.
	(cp_fold_data::genericize): Replace this data member with ...
	(cp_fold_data::fold_flags): ... this.
	(cp_fold_r): Adjust cp_fold_data use and cp_fold_calls.
	(cp_fold_function): Likewise.
	(cp_fold_maybe_rvalue): Likewise.
	(cp_fully_fold_init): Likewise.
	(cp_fold): Add fold_flags parameter.  Don't cache if flags
	isn't empty.
	<case CALL_EXPR>: Pass mce_false to maybe_constant_value
	if if ff_genericize is set.

gcc/testsuite/ChangeLog:

	* g++.dg/opt/pr108243.C: New test.
---
 gcc/cp/cp-gimplify.cc               | 76 ++++++++++++++++++-----------
 gcc/testsuite/g++.dg/opt/pr108243.C | 29 +++++++++++
 2 files changed, 76 insertions(+), 29 deletions(-)
 create mode 100644 gcc/testsuite/g++.dg/opt/pr108243.C

diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
index a35cedd05cc..d023a63768f 100644
--- a/gcc/cp/cp-gimplify.cc
+++ b/gcc/cp/cp-gimplify.cc
@@ -43,12 +43,20 @@ along with GCC; see the file COPYING3.  If not see
 #include "omp-general.h"
 #include "opts.h"
 
+/* Flags for cp_fold and cp_fold_r.  */
+
+enum fold_flags {
+  ff_none = 0,
+  /* Whether we're being called from cp_fold_function.  */
+  ff_genericize = 1 << 0,
+};
+
 /* Forward declarations.  */
 
 static tree cp_genericize_r (tree *, int *, void *);
 static tree cp_fold_r (tree *, int *, void *);
 static void cp_genericize_tree (tree*, bool);
-static tree cp_fold (tree);
+static tree cp_fold (tree, fold_flags);
 
 /* Genericize a TRY_BLOCK.  */
 
@@ -996,9 +1004,8 @@ struct cp_genericize_data
 struct cp_fold_data
 {
   hash_set<tree> pset;
-  bool genericize; // called from cp_fold_function?
-
-  cp_fold_data (bool g): genericize (g) {}
+  fold_flags flags;
+  cp_fold_data (fold_flags flags): flags (flags) {}
 };
 
 static tree
@@ -1039,7 +1046,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
       break;
     }
 
-  *stmt_p = stmt = cp_fold (*stmt_p);
+  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
 
   if (data->pset.add (stmt))
     {
@@ -1119,12 +1126,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
 	 here rather than in cp_genericize to avoid problems with the invisible
 	 reference transition.  */
     case INIT_EXPR:
-      if (data->genericize)
+      if (data->flags & ff_genericize)
 	cp_genericize_init_expr (stmt_p);
       break;
 
     case TARGET_EXPR:
-      if (data->genericize)
+      if (data->flags & ff_genericize)
 	cp_genericize_target_expr (stmt_p);
 
       /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
@@ -1157,7 +1164,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
 void
 cp_fold_function (tree fndecl)
 {
-  cp_fold_data data (/*genericize*/true);
+  cp_fold_data data (ff_genericize);
   cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
 }
 
@@ -2375,7 +2382,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
 {
   while (true)
     {
-      x = cp_fold (x);
+      x = cp_fold (x, ff_none);
       if (rval)
 	x = mark_rvalue_use (x);
       if (rval && DECL_P (x)
@@ -2434,7 +2441,7 @@ cp_fully_fold_init (tree x)
   if (processing_template_decl)
     return x;
   x = cp_fully_fold (x);
-  cp_fold_data data (/*genericize*/false);
+  cp_fold_data data (ff_none);
   cp_walk_tree (&x, cp_fold_r, &data, NULL);
   return x;
 }
@@ -2469,7 +2476,7 @@ clear_fold_cache (void)
     Function returns X or its folded variant.  */
 
 static tree
-cp_fold (tree x)
+cp_fold (tree x, fold_flags flags)
 {
   tree op0, op1, op2, op3;
   tree org_x = x, r = NULL_TREE;
@@ -2490,8 +2497,11 @@ cp_fold (tree x)
   if (fold_cache == NULL)
     fold_cache = hash_map<tree, tree>::create_ggc (101);
 
-  if (tree *cached = fold_cache->get (x))
-    return *cached;
+  bool cache_p = (flags == ff_none);
+
+  if (cache_p)
+    if (tree *cached = fold_cache->get (x))
+      return *cached;
 
   uid_sensitive_constexpr_evaluation_checker c;
 
@@ -2526,7 +2536,7 @@ cp_fold (tree x)
 	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
 	     folding of the operand should be in the caches and if in cp_fold_r
 	     it will modify it in place.  */
-	  op0 = cp_fold (TREE_OPERAND (x, 0));
+	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
 	  if (op0 == error_mark_node)
 	    x = error_mark_node;
 	  break;
@@ -2571,7 +2581,7 @@ cp_fold (tree x)
 	{
 	  tree p = maybe_undo_parenthesized_ref (x);
 	  if (p != x)
-	    return cp_fold (p);
+	    return cp_fold (p, flags);
 	}
       goto unary;
 
@@ -2763,8 +2773,8 @@ cp_fold (tree x)
     case COND_EXPR:
       loc = EXPR_LOCATION (x);
       op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
-      op1 = cp_fold (TREE_OPERAND (x, 1));
-      op2 = cp_fold (TREE_OPERAND (x, 2));
+      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
+      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
 
       if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
 	{
@@ -2854,7 +2864,7 @@ cp_fold (tree x)
 	      {
 		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
 		  r = build_nop (TREE_TYPE (x), r);
-		x = cp_fold (r);
+		x = cp_fold (r, flags);
 		break;
 	      }
 	  }
@@ -2908,7 +2918,7 @@ cp_fold (tree x)
 	int m = call_expr_nargs (x);
 	for (int i = 0; i < m; i++)
 	  {
-	    r = cp_fold (CALL_EXPR_ARG (x, i));
+	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
 	    if (r != CALL_EXPR_ARG (x, i))
 	      {
 		if (r == error_mark_node)
@@ -2931,7 +2941,7 @@ cp_fold (tree x)
 
 	if (TREE_CODE (r) != CALL_EXPR)
 	  {
-	    x = cp_fold (r);
+	    x = cp_fold (r, flags);
 	    break;
 	  }
 
@@ -2944,7 +2954,15 @@ cp_fold (tree x)
 	   constant, but the call followed by an INDIRECT_REF is.  */
 	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
 	    && !flag_no_inline)
-	  r = maybe_constant_value (x);
+	  {
+	    mce_value manifestly_const_eval = mce_unknown;
+	    if (flags & ff_genericize)
+	      /* At genericization time it's safe to fold
+		 __builtin_is_constant_evaluated to false.  */
+	      manifestly_const_eval = mce_false;
+	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
+				      manifestly_const_eval);
+	  }
 	optimize = sv;
 
         if (TREE_CODE (r) != CALL_EXPR)
@@ -2971,7 +2989,7 @@ cp_fold (tree x)
 	vec<constructor_elt, va_gc> *nelts = NULL;
 	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
 	  {
-	    tree op = cp_fold (p->value);
+	    tree op = cp_fold (p->value, flags);
 	    if (op != p->value)
 	      {
 		if (op == error_mark_node)
@@ -3002,7 +3020,7 @@ cp_fold (tree x)
 
 	for (int i = 0; i < n; i++)
 	  {
-	    tree op = cp_fold (TREE_VEC_ELT (x, i));
+	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
 	    if (op != TREE_VEC_ELT (x, i))
 	      {
 		if (!changed)
@@ -3019,10 +3037,10 @@ cp_fold (tree x)
     case ARRAY_RANGE_REF:
 
       loc = EXPR_LOCATION (x);
-      op0 = cp_fold (TREE_OPERAND (x, 0));
-      op1 = cp_fold (TREE_OPERAND (x, 1));
-      op2 = cp_fold (TREE_OPERAND (x, 2));
-      op3 = cp_fold (TREE_OPERAND (x, 3));
+      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
+      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
+      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
+      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
 
       if (op0 != TREE_OPERAND (x, 0)
 	  || op1 != TREE_OPERAND (x, 1)
@@ -3050,7 +3068,7 @@ cp_fold (tree x)
       /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
 	 folding, evaluates to an invariant.  In that case no need to wrap
 	 this folded tree with a SAVE_EXPR.  */
-      r = cp_fold (TREE_OPERAND (x, 0));
+      r = cp_fold (TREE_OPERAND (x, 0), flags);
       if (tree_invariant_p (r))
 	x = r;
       break;
@@ -3069,7 +3087,7 @@ cp_fold (tree x)
       copy_warning (x, org_x);
     }
 
-  if (!c.evaluation_restricted_p ())
+  if (cache_p && !c.evaluation_restricted_p ())
     {
       fold_cache->put (org_x, x);
       /* Prevent that we try to fold an already folded result again.  */
diff --git a/gcc/testsuite/g++.dg/opt/pr108243.C b/gcc/testsuite/g++.dg/opt/pr108243.C
new file mode 100644
index 00000000000..4c45dbba13c
--- /dev/null
+++ b/gcc/testsuite/g++.dg/opt/pr108243.C
@@ -0,0 +1,29 @@
+// PR c++/108243
+// { dg-do compile { target c++11 } }
+// { dg-additional-options "-O -fdump-tree-original" }
+
+constexpr int foo() {
+  return __builtin_is_constant_evaluated() + 1;
+}
+
+#if __cpp_if_consteval
+constexpr int bar() {
+  if consteval {
+    return 5;
+  } else {
+    return 4;
+  }
+}
+#endif
+
+int p, q;
+
+int main() {
+  p = foo();
+#if __cpp_if_consteval
+  q = bar();
+#endif
+}
+
+// { dg-final { scan-tree-dump-not "= foo" "original" } }
+// { dg-final { scan-tree-dump-not "= bar" "original" } }
-- 
2.39.1.348.g5dec958dcf


^ permalink raw reply	[flat|nested] 15+ messages in thread

* Re: [PATCH 2/2] c++: speculative constexpr and is_constant_evaluated [PR108243]
  2023-01-27 22:02 ` [PATCH 2/2] c++: speculative constexpr and is_constant_evaluated [PR108243] Patrick Palka
@ 2023-01-27 22:05   ` Patrick Palka
  2023-01-30 20:05   ` Jason Merrill
  1 sibling, 0 replies; 15+ messages in thread
From: Patrick Palka @ 2023-01-27 22:05 UTC (permalink / raw)
  To: Patrick Palka; +Cc: gcc-patches, jason

On Fri, 27 Jan 2023, Patrick Palka wrote:

> This PR illustrates that __builtin_is_constant_evaluated currently acts
> as an optimization barrier for our speculative constexpr evaluation,
> since we don't want to prematurely fold the builtin to false if the
> expression in question would be later manifestly constant evaluated (in
> which case it must be folded to true).
> 
> This patch fixes this by permitting __builtin_is_constant_evaluated
> to get folded as false during cp_fold_function, since at that point
> we're sure we're doing manifestly constant evaluation.  To that end

"we're sure we're done with manifestly constant evaluation" rather

> we add a flags parameter to cp_fold that controls what mce_value the
> CALL_EXPR case passes to maybe_constant_value.
> 
> bootstrapped and rgetsted no x86_64-pc-linux-gnu, does this look OK for
> trunk?
> 
> 	PR c++/108243
> 
> gcc/cp/ChangeLog:
> 
> 	* cp-gimplify.cc (enum fold_flags): Define.
> 	(cp_fold_data::genericize): Replace this data member with ...
> 	(cp_fold_data::fold_flags): ... this.
> 	(cp_fold_r): Adjust cp_fold_data use and cp_fold_calls.
> 	(cp_fold_function): Likewise.
> 	(cp_fold_maybe_rvalue): Likewise.
> 	(cp_fully_fold_init): Likewise.
> 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
> 	isn't empty.
> 	<case CALL_EXPR>: Pass mce_false to maybe_constant_value
> 	if if ff_genericize is set.
> 
> gcc/testsuite/ChangeLog:
> 
> 	* g++.dg/opt/pr108243.C: New test.
> ---
>  gcc/cp/cp-gimplify.cc               | 76 ++++++++++++++++++-----------
>  gcc/testsuite/g++.dg/opt/pr108243.C | 29 +++++++++++
>  2 files changed, 76 insertions(+), 29 deletions(-)
>  create mode 100644 gcc/testsuite/g++.dg/opt/pr108243.C
> 
> diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> index a35cedd05cc..d023a63768f 100644
> --- a/gcc/cp/cp-gimplify.cc
> +++ b/gcc/cp/cp-gimplify.cc
> @@ -43,12 +43,20 @@ along with GCC; see the file COPYING3.  If not see
>  #include "omp-general.h"
>  #include "opts.h"
>  
> +/* Flags for cp_fold and cp_fold_r.  */
> +
> +enum fold_flags {
> +  ff_none = 0,
> +  /* Whether we're being called from cp_fold_function.  */
> +  ff_genericize = 1 << 0,
> +};
> +
>  /* Forward declarations.  */
>  
>  static tree cp_genericize_r (tree *, int *, void *);
>  static tree cp_fold_r (tree *, int *, void *);
>  static void cp_genericize_tree (tree*, bool);
> -static tree cp_fold (tree);
> +static tree cp_fold (tree, fold_flags);
>  
>  /* Genericize a TRY_BLOCK.  */
>  
> @@ -996,9 +1004,8 @@ struct cp_genericize_data
>  struct cp_fold_data
>  {
>    hash_set<tree> pset;
> -  bool genericize; // called from cp_fold_function?
> -
> -  cp_fold_data (bool g): genericize (g) {}
> +  fold_flags flags;
> +  cp_fold_data (fold_flags flags): flags (flags) {}
>  };
>  
>  static tree
> @@ -1039,7 +1046,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>        break;
>      }
>  
> -  *stmt_p = stmt = cp_fold (*stmt_p);
> +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
>  
>    if (data->pset.add (stmt))
>      {
> @@ -1119,12 +1126,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>  	 here rather than in cp_genericize to avoid problems with the invisible
>  	 reference transition.  */
>      case INIT_EXPR:
> -      if (data->genericize)
> +      if (data->flags & ff_genericize)
>  	cp_genericize_init_expr (stmt_p);
>        break;
>  
>      case TARGET_EXPR:
> -      if (data->genericize)
> +      if (data->flags & ff_genericize)
>  	cp_genericize_target_expr (stmt_p);
>  
>        /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
> @@ -1157,7 +1164,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>  void
>  cp_fold_function (tree fndecl)
>  {
> -  cp_fold_data data (/*genericize*/true);
> +  cp_fold_data data (ff_genericize);
>    cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
>  }
>  
> @@ -2375,7 +2382,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
>  {
>    while (true)
>      {
> -      x = cp_fold (x);
> +      x = cp_fold (x, ff_none);
>        if (rval)
>  	x = mark_rvalue_use (x);
>        if (rval && DECL_P (x)
> @@ -2434,7 +2441,7 @@ cp_fully_fold_init (tree x)
>    if (processing_template_decl)
>      return x;
>    x = cp_fully_fold (x);
> -  cp_fold_data data (/*genericize*/false);
> +  cp_fold_data data (ff_none);
>    cp_walk_tree (&x, cp_fold_r, &data, NULL);
>    return x;
>  }
> @@ -2469,7 +2476,7 @@ clear_fold_cache (void)
>      Function returns X or its folded variant.  */
>  
>  static tree
> -cp_fold (tree x)
> +cp_fold (tree x, fold_flags flags)
>  {
>    tree op0, op1, op2, op3;
>    tree org_x = x, r = NULL_TREE;
> @@ -2490,8 +2497,11 @@ cp_fold (tree x)
>    if (fold_cache == NULL)
>      fold_cache = hash_map<tree, tree>::create_ggc (101);
>  
> -  if (tree *cached = fold_cache->get (x))
> -    return *cached;
> +  bool cache_p = (flags == ff_none);
> +
> +  if (cache_p)
> +    if (tree *cached = fold_cache->get (x))
> +      return *cached;
>  
>    uid_sensitive_constexpr_evaluation_checker c;
>  
> @@ -2526,7 +2536,7 @@ cp_fold (tree x)
>  	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
>  	     folding of the operand should be in the caches and if in cp_fold_r
>  	     it will modify it in place.  */
> -	  op0 = cp_fold (TREE_OPERAND (x, 0));
> +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
>  	  if (op0 == error_mark_node)
>  	    x = error_mark_node;
>  	  break;
> @@ -2571,7 +2581,7 @@ cp_fold (tree x)
>  	{
>  	  tree p = maybe_undo_parenthesized_ref (x);
>  	  if (p != x)
> -	    return cp_fold (p);
> +	    return cp_fold (p, flags);
>  	}
>        goto unary;
>  
> @@ -2763,8 +2773,8 @@ cp_fold (tree x)
>      case COND_EXPR:
>        loc = EXPR_LOCATION (x);
>        op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> -      op1 = cp_fold (TREE_OPERAND (x, 1));
> -      op2 = cp_fold (TREE_OPERAND (x, 2));
> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
>  
>        if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
>  	{
> @@ -2854,7 +2864,7 @@ cp_fold (tree x)
>  	      {
>  		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
>  		  r = build_nop (TREE_TYPE (x), r);
> -		x = cp_fold (r);
> +		x = cp_fold (r, flags);
>  		break;
>  	      }
>  	  }
> @@ -2908,7 +2918,7 @@ cp_fold (tree x)
>  	int m = call_expr_nargs (x);
>  	for (int i = 0; i < m; i++)
>  	  {
> -	    r = cp_fold (CALL_EXPR_ARG (x, i));
> +	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
>  	    if (r != CALL_EXPR_ARG (x, i))
>  	      {
>  		if (r == error_mark_node)
> @@ -2931,7 +2941,7 @@ cp_fold (tree x)
>  
>  	if (TREE_CODE (r) != CALL_EXPR)
>  	  {
> -	    x = cp_fold (r);
> +	    x = cp_fold (r, flags);
>  	    break;
>  	  }
>  
> @@ -2944,7 +2954,15 @@ cp_fold (tree x)
>  	   constant, but the call followed by an INDIRECT_REF is.  */
>  	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
>  	    && !flag_no_inline)
> -	  r = maybe_constant_value (x);
> +	  {
> +	    mce_value manifestly_const_eval = mce_unknown;
> +	    if (flags & ff_genericize)
> +	      /* At genericization time it's safe to fold
> +		 __builtin_is_constant_evaluated to false.  */
> +	      manifestly_const_eval = mce_false;
> +	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
> +				      manifestly_const_eval);
> +	  }
>  	optimize = sv;
>  
>          if (TREE_CODE (r) != CALL_EXPR)
> @@ -2971,7 +2989,7 @@ cp_fold (tree x)
>  	vec<constructor_elt, va_gc> *nelts = NULL;
>  	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
>  	  {
> -	    tree op = cp_fold (p->value);
> +	    tree op = cp_fold (p->value, flags);
>  	    if (op != p->value)
>  	      {
>  		if (op == error_mark_node)
> @@ -3002,7 +3020,7 @@ cp_fold (tree x)
>  
>  	for (int i = 0; i < n; i++)
>  	  {
> -	    tree op = cp_fold (TREE_VEC_ELT (x, i));
> +	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
>  	    if (op != TREE_VEC_ELT (x, i))
>  	      {
>  		if (!changed)
> @@ -3019,10 +3037,10 @@ cp_fold (tree x)
>      case ARRAY_RANGE_REF:
>  
>        loc = EXPR_LOCATION (x);
> -      op0 = cp_fold (TREE_OPERAND (x, 0));
> -      op1 = cp_fold (TREE_OPERAND (x, 1));
> -      op2 = cp_fold (TREE_OPERAND (x, 2));
> -      op3 = cp_fold (TREE_OPERAND (x, 3));
> +      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> +      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
>  
>        if (op0 != TREE_OPERAND (x, 0)
>  	  || op1 != TREE_OPERAND (x, 1)
> @@ -3050,7 +3068,7 @@ cp_fold (tree x)
>        /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
>  	 folding, evaluates to an invariant.  In that case no need to wrap
>  	 this folded tree with a SAVE_EXPR.  */
> -      r = cp_fold (TREE_OPERAND (x, 0));
> +      r = cp_fold (TREE_OPERAND (x, 0), flags);
>        if (tree_invariant_p (r))
>  	x = r;
>        break;
> @@ -3069,7 +3087,7 @@ cp_fold (tree x)
>        copy_warning (x, org_x);
>      }
>  
> -  if (!c.evaluation_restricted_p ())
> +  if (cache_p && !c.evaluation_restricted_p ())
>      {
>        fold_cache->put (org_x, x);
>        /* Prevent that we try to fold an already folded result again.  */
> diff --git a/gcc/testsuite/g++.dg/opt/pr108243.C b/gcc/testsuite/g++.dg/opt/pr108243.C
> new file mode 100644
> index 00000000000..4c45dbba13c
> --- /dev/null
> +++ b/gcc/testsuite/g++.dg/opt/pr108243.C
> @@ -0,0 +1,29 @@
> +// PR c++/108243
> +// { dg-do compile { target c++11 } }
> +// { dg-additional-options "-O -fdump-tree-original" }
> +
> +constexpr int foo() {
> +  return __builtin_is_constant_evaluated() + 1;
> +}
> +
> +#if __cpp_if_consteval
> +constexpr int bar() {
> +  if consteval {
> +    return 5;
> +  } else {
> +    return 4;
> +  }
> +}
> +#endif
> +
> +int p, q;
> +
> +int main() {
> +  p = foo();
> +#if __cpp_if_consteval
> +  q = bar();
> +#endif
> +}
> +
> +// { dg-final { scan-tree-dump-not "= foo" "original" } }
> +// { dg-final { scan-tree-dump-not "= bar" "original" } }
> -- 
> 2.39.1.348.g5dec958dcf
> 
> 


^ permalink raw reply	[flat|nested] 15+ messages in thread

* Re: [PATCH 1/2] c++: make manifestly_const_eval tri-state
  2023-01-27 22:02 [PATCH 1/2] c++: make manifestly_const_eval tri-state Patrick Palka
  2023-01-27 22:02 ` [PATCH 2/2] c++: speculative constexpr and is_constant_evaluated [PR108243] Patrick Palka
@ 2023-01-30 20:02 ` Jason Merrill
  2023-02-03 21:21   ` Patrick Palka
  1 sibling, 1 reply; 15+ messages in thread
From: Jason Merrill @ 2023-01-30 20:02 UTC (permalink / raw)
  To: Patrick Palka, gcc-patches

On 1/27/23 17:02, Patrick Palka wrote:
> This patch turns the manifestly_const_eval flag used by the constexpr
> machinery into a tri-state enum so that we're able to express wanting
> to fold __builtin_is_constant_evaluated to false via late speculative
> constexpr evaluation.  Of all the entry points to constexpr evaluation
> only maybe_constant_value is changed to take a tri-state value; the
> others continue to take bool.  The subsequent patch will use this to fold
> the builtin to false when called from cp_fold_function.
> 
> gcc/cp/ChangeLog:
> 
> 	* constexpr.cc (constexpr_call::manifestly_const_eval): Give
> 	it type int instead of bool.
> 	(constexpr_ctx::manifestly_const_eval): Give it type mce_value
> 	instead of bool.
> 	(cxx_eval_builtin_function_call): Adjust after making
> 	manifestly_const_eval tri-state.
> 	(cxx_eval_call_expression): Likewise.
> 	(cxx_eval_binary_expression): Likewise.
> 	(cxx_eval_conditional_expression): Likewise.
> 	(cxx_eval_constant_expression): Likewise.
> 	(cxx_eval_outermost_constant_expr): Likewise.
> 	(cxx_constant_value): Likewise.
> 	(cxx_constant_dtor): Likewise.
> 	(maybe_constant_value): Give manifestly_const_eval parameter
> 	type mce_value instead of bool and adjust accordingly.
> 	(fold_non_dependent_expr_template): Adjust call
> 	to cxx_eval_outermost_constant_expr.
> 	(fold_non_dependent_expr): Likewise.
> 	(maybe_constant_init_1): Likewise.
> 	* constraint.cc (satisfy_atom): Adjust call to
> 	maybe_constant_value.
> 	* cp-tree.h (enum class mce_value): Define.
> 	(maybe_constant_value): Adjust manifestly_const_eval parameter
> 	type and default argument.
> 	* decl.cc (compute_array_index_type_loc): Adjust call to
> 	maybe_constant_value.
> 	* pt.cc (convert_nontype_argument): Likewise.
> ---
>   gcc/cp/constexpr.cc  | 61 ++++++++++++++++++++++++--------------------
>   gcc/cp/constraint.cc |  3 +--
>   gcc/cp/cp-tree.h     | 18 ++++++++++++-
>   gcc/cp/decl.cc       |  2 +-
`>   gcc/cp/pt.cc         |  6 ++---
>   5 files changed, 54 insertions(+), 36 deletions(-)
> 
> diff --git a/gcc/cp/constexpr.cc b/gcc/cp/constexpr.cc
> index be99bec17e7..34662198903 100644
> --- a/gcc/cp/constexpr.cc
> +++ b/gcc/cp/constexpr.cc
> @@ -1119,8 +1119,8 @@ struct GTY((for_user)) constexpr_call {
>     /* The hash of this call; we remember it here to avoid having to
>        recalculate it when expanding the hash table.  */
>     hashval_t hash;
> -  /* Whether __builtin_is_constant_evaluated() should evaluate to true.  */
> -  bool manifestly_const_eval;
> +  /* The raw value of constexpr_ctx::manifestly_const_eval.  */
> +  int manifestly_const_eval;

Why not mce_value?

>   };
>   
>   struct constexpr_call_hasher : ggc_ptr_hash<constexpr_call>
> @@ -1248,7 +1248,7 @@ struct constexpr_ctx {
>        trying harder to get a constant value.  */
>     bool strict;
>     /* Whether __builtin_is_constant_evaluated () should be true.  */
> -  bool manifestly_const_eval;
> +  mce_value manifestly_const_eval;
>   };
>   
>   /* This internal flag controls whether we should avoid doing anything during
> @@ -1463,7 +1463,7 @@ cxx_eval_builtin_function_call (const constexpr_ctx *ctx, tree t, tree fun,
>     /* If we aren't requiring a constant expression, defer __builtin_constant_p
>        in a constexpr function until we have values for the parameters.  */
>     if (bi_const_p
> -      && !ctx->manifestly_const_eval
> +      && ctx->manifestly_const_eval == mce_unknown
>         && current_function_decl
>         && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
>       {
> @@ -1479,12 +1479,13 @@ cxx_eval_builtin_function_call (const constexpr_ctx *ctx, tree t, tree fun,
>     if (fndecl_built_in_p (fun, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
>   			 BUILT_IN_FRONTEND))
>       {
> -      if (!ctx->manifestly_const_eval)
> +      if (ctx->manifestly_const_eval == mce_unknown)
>   	{
>   	  *non_constant_p = true;
>   	  return t;
>   	}
> -      return boolean_true_node;
> +      return constant_boolean_node (ctx->manifestly_const_eval == mce_true,
> +				    boolean_type_node);
>       }
>   
>     if (fndecl_built_in_p (fun, CP_BUILT_IN_SOURCE_LOCATION, BUILT_IN_FRONTEND))
> @@ -1591,7 +1592,7 @@ cxx_eval_builtin_function_call (const constexpr_ctx *ctx, tree t, tree fun,
>       }
>   
>     bool save_ffbcp = force_folding_builtin_constant_p;
> -  force_folding_builtin_constant_p |= ctx->manifestly_const_eval;
> +  force_folding_builtin_constant_p |= ctx->manifestly_const_eval != mce_unknown;
>     tree save_cur_fn = current_function_decl;
>     /* Return name of ctx->call->fundef->decl for __builtin_FUNCTION ().  */
>     if (fndecl_built_in_p (fun, BUILT_IN_FUNCTION)
> @@ -2644,7 +2645,7 @@ cxx_eval_call_expression (const constexpr_ctx *ctx, tree t,
>     location_t loc = cp_expr_loc_or_input_loc (t);
>     tree fun = get_function_named_in_call (t);
>     constexpr_call new_call
> -    = { NULL, NULL, NULL, 0, ctx->manifestly_const_eval };
> +    = { NULL, NULL, NULL, 0, (int)ctx->manifestly_const_eval };
>     int depth_ok;
>   
>     if (fun == NULL_TREE)
> @@ -2916,7 +2917,7 @@ cxx_eval_call_expression (const constexpr_ctx *ctx, tree t,
>         location_t save_loc = input_location;
>         input_location = loc;
>         ++function_depth;
> -      if (ctx->manifestly_const_eval)
> +      if (ctx->manifestly_const_eval == mce_true)
>   	FNDECL_MANIFESTLY_CONST_EVALUATED (fun) = true;
>         instantiate_decl (fun, /*defer_ok*/false, /*expl_inst*/false);
>         --function_depth;
> @@ -3676,7 +3677,7 @@ cxx_eval_binary_expression (const constexpr_ctx *ctx, tree t,
>   
>     if (r == NULL_TREE)
>       {
> -      if (ctx->manifestly_const_eval
> +      if (ctx->manifestly_const_eval == mce_true
>   	  && (flag_constexpr_fp_except
>   	      || TREE_CODE (type) != REAL_TYPE))
>   	{
> @@ -3741,13 +3742,13 @@ cxx_eval_conditional_expression (const constexpr_ctx *ctx, tree t,
>   	 without manifestly_const_eval even expressions or parts thereof which
>   	 will later be manifestly const_eval evaluated), otherwise fold it to
>   	 true.  */
> -      if (ctx->manifestly_const_eval)
> -	val = boolean_true_node;
> -      else
> +      if (ctx->manifestly_const_eval == mce_unknown)
>   	{
>   	  *non_constant_p = true;
>   	  return t;
>   	}
> +      val = constant_boolean_node (ctx->manifestly_const_eval == mce_true,
> +				   boolean_type_node);
>       }
>     /* Don't VERIFY_CONSTANT the other operands.  */
>     if (integer_zerop (val))
> @@ -7055,7 +7056,7 @@ cxx_eval_constant_expression (const constexpr_ctx *ctx, tree t,
>   	      r = v;
>   	      break;
>   	    }
> -      if (ctx->manifestly_const_eval)
> +      if (ctx->manifestly_const_eval == mce_true)
>   	maybe_warn_about_constant_value (loc, t);
>         if (COMPLETE_TYPE_P (TREE_TYPE (t))
>   	  && is_really_empty_class (TREE_TYPE (t), /*ignore_vptr*/false))
> @@ -7644,7 +7645,7 @@ cxx_eval_constant_expression (const constexpr_ctx *ctx, tree t,
>   	if (TREE_CODE (t) == CONVERT_EXPR
>   	    && ARITHMETIC_TYPE_P (type)
>   	    && INDIRECT_TYPE_P (TREE_TYPE (op))
> -	    && ctx->manifestly_const_eval)
> +	    && ctx->manifestly_const_eval == mce_true)
>   	  {
>   	    if (!ctx->quiet)
>   	      error_at (loc,
> @@ -8137,7 +8138,7 @@ mark_non_constant (tree t)
>   static tree
>   cxx_eval_outermost_constant_expr (tree t, bool allow_non_constant,
>   				  bool strict = true,
> -				  bool manifestly_const_eval = false,
> +				  mce_value manifestly_const_eval = mce_unknown,
>   				  bool constexpr_dtor = false,
>   				  tree object = NULL_TREE)
>   {
> @@ -8155,10 +8156,11 @@ cxx_eval_outermost_constant_expr (tree t, bool allow_non_constant,
>     constexpr_global_ctx global_ctx;
>     constexpr_ctx ctx = { &global_ctx, NULL, NULL, NULL, NULL, NULL, NULL,
>   			allow_non_constant, strict,
> -			manifestly_const_eval || !allow_non_constant };
> +			!allow_non_constant ? mce_true : manifestly_const_eval };
>   
>     /* Turn off -frounding-math for manifestly constant evaluation.  */
> -  warning_sentinel rm (flag_rounding_math, ctx.manifestly_const_eval);
> +  warning_sentinel rm (flag_rounding_math,
> +		       ctx.manifestly_const_eval == mce_true);
>     tree type = initialized_type (t);
>     tree r = t;
>     bool is_consteval = false;
> @@ -8247,7 +8249,7 @@ cxx_eval_outermost_constant_expr (tree t, bool allow_non_constant,
>     auto_vec<tree, 16> cleanups;
>     global_ctx.cleanups = &cleanups;
>   
> -  if (manifestly_const_eval)
> +  if (manifestly_const_eval == mce_true)
>       instantiate_constexpr_fns (r);
>     r = cxx_eval_constant_expression (&ctx, r, vc_prvalue,
>   				    &non_constant_p, &overflow_p);
> @@ -8386,7 +8388,7 @@ cxx_constant_value (tree t, tree decl /* = NULL_TREE */,
>   		    tsubst_flags_t complain /* = tf_error */)
>   {
>     bool sfinae = !(complain & tf_error);
> -  tree r = cxx_eval_outermost_constant_expr (t, sfinae, true, true, false, decl);
> +  tree r = cxx_eval_outermost_constant_expr (t, sfinae, true, mce_true, false, decl);
>     if (sfinae && !TREE_CONSTANT (r))
>       r = error_mark_node;
>     return r;
> @@ -8398,7 +8400,7 @@ cxx_constant_value (tree t, tree decl /* = NULL_TREE */,
>   void
>   cxx_constant_dtor (tree t, tree decl)
>   {
> -  cxx_eval_outermost_constant_expr (t, false, true, true, true, decl);
> +  cxx_eval_outermost_constant_expr (t, false, true, mce_true, true, decl);
>   }
>   
>   /* Helper routine for fold_simple function.  Either return simplified
> @@ -8484,7 +8486,7 @@ static GTY((deletable)) hash_map<tree, tree> *cv_cache;
>   
>   tree
>   maybe_constant_value (tree t, tree decl /* = NULL_TREE */,
> -		      bool manifestly_const_eval /* = false */)
> +		      mce_value manifestly_const_eval /* = mce_unknown */)
>   {
>     tree r;
>   
> @@ -8499,8 +8501,9 @@ maybe_constant_value (tree t, tree decl /* = NULL_TREE */,
>       /* No caching or evaluation needed.  */
>       return t;
>   
> -  if (manifestly_const_eval)
> -    return cxx_eval_outermost_constant_expr (t, true, true, true, false, decl);
> +  if (manifestly_const_eval != mce_unknown)
> +    return cxx_eval_outermost_constant_expr (t, true, true,
> +					     manifestly_const_eval, false, decl);
>   
>     if (cv_cache == NULL)
>       cv_cache = hash_map<tree, tree>::create_ggc (101);
> @@ -8524,7 +8527,8 @@ maybe_constant_value (tree t, tree decl /* = NULL_TREE */,
>       return t;
>   
>     uid_sensitive_constexpr_evaluation_checker c;
> -  r = cxx_eval_outermost_constant_expr (t, true, true, false, false, decl);
> +  r = cxx_eval_outermost_constant_expr (t, true, true,
> +					manifestly_const_eval, false, decl);
>     gcc_checking_assert (r == t
>   		       || CONVERT_EXPR_P (t)
>   		       || TREE_CODE (t) == VIEW_CONVERT_EXPR
> @@ -8590,7 +8594,7 @@ fold_non_dependent_expr_template (tree t, tsubst_flags_t complain,
>   	return t;
>   
>         tree r = cxx_eval_outermost_constant_expr (t, true, true,
> -						 manifestly_const_eval,
> +						 mce_value (manifestly_const_eval),
>   						 false, object);
>         /* cp_tree_equal looks through NOPs, so allow them.  */
>         gcc_checking_assert (r == t
> @@ -8637,7 +8641,7 @@ fold_non_dependent_expr (tree t,
>       return fold_non_dependent_expr_template (t, complain,
>   					     manifestly_const_eval, object);
>   
> -  return maybe_constant_value (t, object, manifestly_const_eval);
> +  return maybe_constant_value (t, object, (mce_value)manifestly_const_eval);
>   }
>   
>   /* Like fold_non_dependent_expr, but if EXPR couldn't be folded to a constant,
> @@ -8715,7 +8719,8 @@ maybe_constant_init_1 (tree t, tree decl, bool allow_non_constant,
>         bool is_static = (decl && DECL_P (decl)
>   			&& (TREE_STATIC (decl) || DECL_EXTERNAL (decl)));
>         t = cxx_eval_outermost_constant_expr (t, allow_non_constant, !is_static,
> -					    manifestly_const_eval, false, decl);
> +					    mce_value (manifestly_const_eval),
> +					    false, decl);
>       }
>     if (TREE_CODE (t) == TARGET_EXPR)
>       {
> diff --git a/gcc/cp/constraint.cc b/gcc/cp/constraint.cc
> index 2e5acdf8fcb..9374327008b 100644
> --- a/gcc/cp/constraint.cc
> +++ b/gcc/cp/constraint.cc
> @@ -3068,8 +3068,7 @@ satisfy_atom (tree t, tree args, sat_info info)
>       }
>     else
>       {
> -      result = maybe_constant_value (result, NULL_TREE,
> -				     /*manifestly_const_eval=*/true);
> +      result = maybe_constant_value (result, NULL_TREE, mce_true);
>         if (!TREE_CONSTANT (result))
>   	result = error_mark_node;
>       }
> diff --git a/gcc/cp/cp-tree.h b/gcc/cp/cp-tree.h
> index 74b7ab71ca5..2d39185b182 100644
> --- a/gcc/cp/cp-tree.h
> +++ b/gcc/cp/cp-tree.h
> @@ -8456,6 +8456,22 @@ struct GTY((for_user)) constexpr_fundef {
>     tree result;
>   };
>   
> +/* Used by the constexpr machinery to control folding of
> +   __builtin_is_constant_evaluated.  */

Add "Whether the current context is manifestly constant-evaluated." at 
the start.

> +enum class mce_value
> +{ > +  /* Treat __builtin_is_constant_evaluated as non-constant.  */

"Unknown, so..."

> +  mce_unknown = 0,
> +  /* Fold it to true.  */
> +  mce_true = 1,
> +  /* Fold it to false.  */

"Primarily used during cp_fold_function."

OK with these tweaks.

> +  mce_false = -1,
> +};
> +constexpr mce_value mce_unknown = mce_value::mce_unknown;
> +constexpr mce_value mce_true = mce_value::mce_true;
> +constexpr mce_value mce_false = mce_value::mce_false;
> +
>   extern void fini_constexpr			(void);
>   extern bool literal_type_p                      (tree);
>   extern void maybe_save_constexpr_fundef		(tree);
> @@ -8484,7 +8500,7 @@ inline tree cxx_constant_value (tree t, tsubst_flags_t complain)
>   { return cxx_constant_value (t, NULL_TREE, complain); }
>   extern void cxx_constant_dtor			(tree, tree);
>   extern tree cxx_constant_init			(tree, tree = NULL_TREE);
> -extern tree maybe_constant_value		(tree, tree = NULL_TREE, bool = false);
> +extern tree maybe_constant_value		(tree, tree = NULL_TREE, mce_value = mce_unknown);
>   extern tree maybe_constant_init			(tree, tree = NULL_TREE, bool = false);
>   extern tree fold_non_dependent_expr		(tree,
>   						 tsubst_flags_t = tf_warning_or_error,
> diff --git a/gcc/cp/decl.cc b/gcc/cp/decl.cc
> index d606b31d7a7..a023c38c59d 100644
> --- a/gcc/cp/decl.cc
> +++ b/gcc/cp/decl.cc
> @@ -11372,7 +11372,7 @@ compute_array_index_type_loc (location_t name_loc, tree name, tree size,
>   				    cp_convert (ssizetype, integer_one_node,
>   						complain),
>   				    complain);
> -	itype = maybe_constant_value (itype, NULL_TREE, true);
> +	itype = maybe_constant_value (itype, NULL_TREE, mce_true);
>         }
>   
>         if (!TREE_CONSTANT (itype))
> diff --git a/gcc/cp/pt.cc b/gcc/cp/pt.cc
> index 64e9128a5f1..4d82666891c 100644
> --- a/gcc/cp/pt.cc
> +++ b/gcc/cp/pt.cc
> @@ -7390,16 +7390,14 @@ convert_nontype_argument (tree type, tree expr, tsubst_flags_t complain)
>   	      IMPLICIT_CONV_EXPR_NONTYPE_ARG (expr) = true;
>   	      return expr;
>   	    }
> -	  expr = maybe_constant_value (expr, NULL_TREE,
> -				       /*manifestly_const_eval=*/true);
> +	  expr = maybe_constant_value (expr, NULL_TREE, mce_true);
>   	  expr = convert_from_reference (expr);
>   	  /* EXPR may have become value-dependent.  */
>   	  val_dep_p = value_dependent_expression_p (expr);
>   	}
>         else if (TYPE_PTR_OR_PTRMEM_P (type))
>   	{
> -	  tree folded = maybe_constant_value (expr, NULL_TREE,
> -					      /*manifestly_const_eval=*/true);
> +	  tree folded = maybe_constant_value (expr, NULL_TREE, mce_true);
>   	  if (TYPE_PTR_P (type) ? integer_zerop (folded)
>   	      : null_member_pointer_value_p (folded))
>   	    expr = folded;


^ permalink raw reply	[flat|nested] 15+ messages in thread

* Re: [PATCH 2/2] c++: speculative constexpr and is_constant_evaluated [PR108243]
  2023-01-27 22:02 ` [PATCH 2/2] c++: speculative constexpr and is_constant_evaluated [PR108243] Patrick Palka
  2023-01-27 22:05   ` Patrick Palka
@ 2023-01-30 20:05   ` Jason Merrill
  2023-02-03 20:51     ` Patrick Palka
  1 sibling, 1 reply; 15+ messages in thread
From: Jason Merrill @ 2023-01-30 20:05 UTC (permalink / raw)
  To: Patrick Palka, gcc-patches

On 1/27/23 17:02, Patrick Palka wrote:
> This PR illustrates that __builtin_is_constant_evaluated currently acts
> as an optimization barrier for our speculative constexpr evaluation,
> since we don't want to prematurely fold the builtin to false if the
> expression in question would be later manifestly constant evaluated (in
> which case it must be folded to true).
> 
> This patch fixes this by permitting __builtin_is_constant_evaluated
> to get folded as false during cp_fold_function, since at that point
> we're sure we're doing manifestly constant evaluation.  To that end
> we add a flags parameter to cp_fold that controls what mce_value the
> CALL_EXPR case passes to maybe_constant_value.
> 
> bootstrapped and rgetsted no x86_64-pc-linux-gnu, does this look OK for
> trunk?
> 
> 	PR c++/108243
> 
> gcc/cp/ChangeLog:
> 
> 	* cp-gimplify.cc (enum fold_flags): Define.
> 	(cp_fold_data::genericize): Replace this data member with ...
> 	(cp_fold_data::fold_flags): ... this.
> 	(cp_fold_r): Adjust cp_fold_data use and cp_fold_calls.
> 	(cp_fold_function): Likewise.
> 	(cp_fold_maybe_rvalue): Likewise.
> 	(cp_fully_fold_init): Likewise.
> 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
> 	isn't empty.
> 	<case CALL_EXPR>: Pass mce_false to maybe_constant_value
> 	if if ff_genericize is set.
> 
> gcc/testsuite/ChangeLog:
> 
> 	* g++.dg/opt/pr108243.C: New test.
> ---
>   gcc/cp/cp-gimplify.cc               | 76 ++++++++++++++++++-----------
>   gcc/testsuite/g++.dg/opt/pr108243.C | 29 +++++++++++
>   2 files changed, 76 insertions(+), 29 deletions(-)
>   create mode 100644 gcc/testsuite/g++.dg/opt/pr108243.C
> 
> diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> index a35cedd05cc..d023a63768f 100644
> --- a/gcc/cp/cp-gimplify.cc
> +++ b/gcc/cp/cp-gimplify.cc
> @@ -43,12 +43,20 @@ along with GCC; see the file COPYING3.  If not see
>   #include "omp-general.h"
>   #include "opts.h"
>   
> +/* Flags for cp_fold and cp_fold_r.  */
> +
> +enum fold_flags {
> +  ff_none = 0,
> +  /* Whether we're being called from cp_fold_function.  */
> +  ff_genericize = 1 << 0,
> +};
> +
>   /* Forward declarations.  */
>   
>   static tree cp_genericize_r (tree *, int *, void *);
>   static tree cp_fold_r (tree *, int *, void *);
>   static void cp_genericize_tree (tree*, bool);
> -static tree cp_fold (tree);
> +static tree cp_fold (tree, fold_flags);
>   
>   /* Genericize a TRY_BLOCK.  */
>   
> @@ -996,9 +1004,8 @@ struct cp_genericize_data
>   struct cp_fold_data
>   {
>     hash_set<tree> pset;
> -  bool genericize; // called from cp_fold_function?
> -
> -  cp_fold_data (bool g): genericize (g) {}
> +  fold_flags flags;
> +  cp_fold_data (fold_flags flags): flags (flags) {}
>   };
>   
>   static tree
> @@ -1039,7 +1046,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>         break;
>       }
>   
> -  *stmt_p = stmt = cp_fold (*stmt_p);
> +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
>   
>     if (data->pset.add (stmt))
>       {
> @@ -1119,12 +1126,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>   	 here rather than in cp_genericize to avoid problems with the invisible
>   	 reference transition.  */
>       case INIT_EXPR:
> -      if (data->genericize)
> +      if (data->flags & ff_genericize)
>   	cp_genericize_init_expr (stmt_p);
>         break;
>   
>       case TARGET_EXPR:
> -      if (data->genericize)
> +      if (data->flags & ff_genericize)
>   	cp_genericize_target_expr (stmt_p);
>   
>         /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
> @@ -1157,7 +1164,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>   void
>   cp_fold_function (tree fndecl)
>   {
> -  cp_fold_data data (/*genericize*/true);
> +  cp_fold_data data (ff_genericize);
>     cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
>   }
>   
> @@ -2375,7 +2382,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
>   {
>     while (true)
>       {
> -      x = cp_fold (x);
> +      x = cp_fold (x, ff_none);
>         if (rval)
>   	x = mark_rvalue_use (x);
>         if (rval && DECL_P (x)
> @@ -2434,7 +2441,7 @@ cp_fully_fold_init (tree x)
>     if (processing_template_decl)
>       return x;
>     x = cp_fully_fold (x);
> -  cp_fold_data data (/*genericize*/false);
> +  cp_fold_data data (ff_none);
>     cp_walk_tree (&x, cp_fold_r, &data, NULL);
>     return x;
>   }
> @@ -2469,7 +2476,7 @@ clear_fold_cache (void)
>       Function returns X or its folded variant.  */
>   
>   static tree
> -cp_fold (tree x)
> +cp_fold (tree x, fold_flags flags)
>   {
>     tree op0, op1, op2, op3;
>     tree org_x = x, r = NULL_TREE;
> @@ -2490,8 +2497,11 @@ cp_fold (tree x)
>     if (fold_cache == NULL)
>       fold_cache = hash_map<tree, tree>::create_ggc (101);
>   
> -  if (tree *cached = fold_cache->get (x))
> -    return *cached;
> +  bool cache_p = (flags == ff_none);
> +
> +  if (cache_p)
> +    if (tree *cached = fold_cache->get (x))
> +      return *cached;
>   
>     uid_sensitive_constexpr_evaluation_checker c;
>   
> @@ -2526,7 +2536,7 @@ cp_fold (tree x)
>   	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
>   	     folding of the operand should be in the caches and if in cp_fold_r
>   	     it will modify it in place.  */
> -	  op0 = cp_fold (TREE_OPERAND (x, 0));
> +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
>   	  if (op0 == error_mark_node)
>   	    x = error_mark_node;
>   	  break;
> @@ -2571,7 +2581,7 @@ cp_fold (tree x)
>   	{
>   	  tree p = maybe_undo_parenthesized_ref (x);
>   	  if (p != x)
> -	    return cp_fold (p);
> +	    return cp_fold (p, flags);
>   	}
>         goto unary;
>   
> @@ -2763,8 +2773,8 @@ cp_fold (tree x)
>       case COND_EXPR:
>         loc = EXPR_LOCATION (x);
>         op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> -      op1 = cp_fold (TREE_OPERAND (x, 1));
> -      op2 = cp_fold (TREE_OPERAND (x, 2));
> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
>   
>         if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
>   	{
> @@ -2854,7 +2864,7 @@ cp_fold (tree x)
>   	      {
>   		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
>   		  r = build_nop (TREE_TYPE (x), r);
> -		x = cp_fold (r);
> +		x = cp_fold (r, flags);
>   		break;
>   	      }
>   	  }
> @@ -2908,7 +2918,7 @@ cp_fold (tree x)
>   	int m = call_expr_nargs (x);
>   	for (int i = 0; i < m; i++)
>   	  {
> -	    r = cp_fold (CALL_EXPR_ARG (x, i));
> +	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
>   	    if (r != CALL_EXPR_ARG (x, i))
>   	      {
>   		if (r == error_mark_node)
> @@ -2931,7 +2941,7 @@ cp_fold (tree x)
>   
>   	if (TREE_CODE (r) != CALL_EXPR)
>   	  {
> -	    x = cp_fold (r);
> +	    x = cp_fold (r, flags);
>   	    break;
>   	  }
>   
> @@ -2944,7 +2954,15 @@ cp_fold (tree x)
>   	   constant, but the call followed by an INDIRECT_REF is.  */
>   	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
>   	    && !flag_no_inline)
> -	  r = maybe_constant_value (x);
> +	  {
> +	    mce_value manifestly_const_eval = mce_unknown;
> +	    if (flags & ff_genericize)
> +	      /* At genericization time it's safe to fold
> +		 __builtin_is_constant_evaluated to false.  */
> +	      manifestly_const_eval = mce_false;
> +	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
> +				      manifestly_const_eval);
> +	  }
>   	optimize = sv;
>   
>           if (TREE_CODE (r) != CALL_EXPR)
> @@ -2971,7 +2989,7 @@ cp_fold (tree x)
>   	vec<constructor_elt, va_gc> *nelts = NULL;
>   	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
>   	  {
> -	    tree op = cp_fold (p->value);
> +	    tree op = cp_fold (p->value, flags);
>   	    if (op != p->value)
>   	      {
>   		if (op == error_mark_node)
> @@ -3002,7 +3020,7 @@ cp_fold (tree x)
>   
>   	for (int i = 0; i < n; i++)
>   	  {
> -	    tree op = cp_fold (TREE_VEC_ELT (x, i));
> +	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
>   	    if (op != TREE_VEC_ELT (x, i))
>   	      {
>   		if (!changed)
> @@ -3019,10 +3037,10 @@ cp_fold (tree x)
>       case ARRAY_RANGE_REF:
>   
>         loc = EXPR_LOCATION (x);
> -      op0 = cp_fold (TREE_OPERAND (x, 0));
> -      op1 = cp_fold (TREE_OPERAND (x, 1));
> -      op2 = cp_fold (TREE_OPERAND (x, 2));
> -      op3 = cp_fold (TREE_OPERAND (x, 3));
> +      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> +      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
>   
>         if (op0 != TREE_OPERAND (x, 0)
>   	  || op1 != TREE_OPERAND (x, 1)
> @@ -3050,7 +3068,7 @@ cp_fold (tree x)
>         /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
>   	 folding, evaluates to an invariant.  In that case no need to wrap
>   	 this folded tree with a SAVE_EXPR.  */
> -      r = cp_fold (TREE_OPERAND (x, 0));
> +      r = cp_fold (TREE_OPERAND (x, 0), flags);
>         if (tree_invariant_p (r))
>   	x = r;
>         break;
> @@ -3069,7 +3087,7 @@ cp_fold (tree x)
>         copy_warning (x, org_x);
>       }
>   
> -  if (!c.evaluation_restricted_p ())
> +  if (cache_p && !c.evaluation_restricted_p ())
>       {
>         fold_cache->put (org_x, x);
>         /* Prevent that we try to fold an already folded result again.  */
> diff --git a/gcc/testsuite/g++.dg/opt/pr108243.C b/gcc/testsuite/g++.dg/opt/pr108243.C
> new file mode 100644
> index 00000000000..4c45dbba13c
> --- /dev/null
> +++ b/gcc/testsuite/g++.dg/opt/pr108243.C
> @@ -0,0 +1,29 @@
> +// PR c++/108243
> +// { dg-do compile { target c++11 } }
> +// { dg-additional-options "-O -fdump-tree-original" }
> +
> +constexpr int foo() {
> +  return __builtin_is_constant_evaluated() + 1;
> +}
> +
> +#if __cpp_if_consteval
> +constexpr int bar() {
> +  if consteval {
> +    return 5;
> +  } else {
> +    return 4;
> +  }
> +}
> +#endif
> +
> +int p, q;
> +
> +int main() {
> +  p = foo();
> +#if __cpp_if_consteval
> +  q = bar();
> +#endif
> +}
> +
> +// { dg-final { scan-tree-dump-not "= foo" "original" } }
> +// { dg-final { scan-tree-dump-not "= bar" "original" } }

Let's also test a static initializer that can't be fully constant-evaluated.

Jason


^ permalink raw reply	[flat|nested] 15+ messages in thread

* Re: [PATCH 2/2] c++: speculative constexpr and is_constant_evaluated [PR108243]
  2023-01-30 20:05   ` Jason Merrill
@ 2023-02-03 20:51     ` Patrick Palka
  2023-02-03 20:57       ` Patrick Palka
  2023-02-05 20:11       ` Jason Merrill
  0 siblings, 2 replies; 15+ messages in thread
From: Patrick Palka @ 2023-02-03 20:51 UTC (permalink / raw)
  To: Jason Merrill; +Cc: Patrick Palka, gcc-patches

On Mon, 30 Jan 2023, Jason Merrill wrote:

> On 1/27/23 17:02, Patrick Palka wrote:
> > This PR illustrates that __builtin_is_constant_evaluated currently acts
> > as an optimization barrier for our speculative constexpr evaluation,
> > since we don't want to prematurely fold the builtin to false if the
> > expression in question would be later manifestly constant evaluated (in
> > which case it must be folded to true).
> > 
> > This patch fixes this by permitting __builtin_is_constant_evaluated
> > to get folded as false during cp_fold_function, since at that point
> > we're sure we're doing manifestly constant evaluation.  To that end
> > we add a flags parameter to cp_fold that controls what mce_value the
> > CALL_EXPR case passes to maybe_constant_value.
> > 
> > bootstrapped and rgetsted no x86_64-pc-linux-gnu, does this look OK for
> > trunk?
> > 
> > 	PR c++/108243
> > 
> > gcc/cp/ChangeLog:
> > 
> > 	* cp-gimplify.cc (enum fold_flags): Define.
> > 	(cp_fold_data::genericize): Replace this data member with ...
> > 	(cp_fold_data::fold_flags): ... this.
> > 	(cp_fold_r): Adjust cp_fold_data use and cp_fold_calls.
> > 	(cp_fold_function): Likewise.
> > 	(cp_fold_maybe_rvalue): Likewise.
> > 	(cp_fully_fold_init): Likewise.
> > 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
> > 	isn't empty.
> > 	<case CALL_EXPR>: Pass mce_false to maybe_constant_value
> > 	if if ff_genericize is set.
> > 
> > gcc/testsuite/ChangeLog:
> > 
> > 	* g++.dg/opt/pr108243.C: New test.
> > ---
> >   gcc/cp/cp-gimplify.cc               | 76 ++++++++++++++++++-----------
> >   gcc/testsuite/g++.dg/opt/pr108243.C | 29 +++++++++++
> >   2 files changed, 76 insertions(+), 29 deletions(-)
> >   create mode 100644 gcc/testsuite/g++.dg/opt/pr108243.C
> > 
> > diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> > index a35cedd05cc..d023a63768f 100644
> > --- a/gcc/cp/cp-gimplify.cc
> > +++ b/gcc/cp/cp-gimplify.cc
> > @@ -43,12 +43,20 @@ along with GCC; see the file COPYING3.  If not see
> >   #include "omp-general.h"
> >   #include "opts.h"
> >   +/* Flags for cp_fold and cp_fold_r.  */
> > +
> > +enum fold_flags {
> > +  ff_none = 0,
> > +  /* Whether we're being called from cp_fold_function.  */
> > +  ff_genericize = 1 << 0,
> > +};
> > +
> >   /* Forward declarations.  */
> >     static tree cp_genericize_r (tree *, int *, void *);
> >   static tree cp_fold_r (tree *, int *, void *);
> >   static void cp_genericize_tree (tree*, bool);
> > -static tree cp_fold (tree);
> > +static tree cp_fold (tree, fold_flags);
> >     /* Genericize a TRY_BLOCK.  */
> >   @@ -996,9 +1004,8 @@ struct cp_genericize_data
> >   struct cp_fold_data
> >   {
> >     hash_set<tree> pset;
> > -  bool genericize; // called from cp_fold_function?
> > -
> > -  cp_fold_data (bool g): genericize (g) {}
> > +  fold_flags flags;
> > +  cp_fold_data (fold_flags flags): flags (flags) {}
> >   };
> >     static tree
> > @@ -1039,7 +1046,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > *data_)
> >         break;
> >       }
> >   -  *stmt_p = stmt = cp_fold (*stmt_p);
> > +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
> >       if (data->pset.add (stmt))
> >       {
> > @@ -1119,12 +1126,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > *data_)
> >   	 here rather than in cp_genericize to avoid problems with the
> > invisible
> >   	 reference transition.  */
> >       case INIT_EXPR:
> > -      if (data->genericize)
> > +      if (data->flags & ff_genericize)
> >   	cp_genericize_init_expr (stmt_p);
> >         break;
> >         case TARGET_EXPR:
> > -      if (data->genericize)
> > +      if (data->flags & ff_genericize)
> >   	cp_genericize_target_expr (stmt_p);
> >           /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
> > @@ -1157,7 +1164,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > *data_)
> >   void
> >   cp_fold_function (tree fndecl)
> >   {
> > -  cp_fold_data data (/*genericize*/true);
> > +  cp_fold_data data (ff_genericize);
> >     cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
> >   }
> >   @@ -2375,7 +2382,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
> >   {
> >     while (true)
> >       {
> > -      x = cp_fold (x);
> > +      x = cp_fold (x, ff_none);
> >         if (rval)
> >   	x = mark_rvalue_use (x);
> >         if (rval && DECL_P (x)
> > @@ -2434,7 +2441,7 @@ cp_fully_fold_init (tree x)
> >     if (processing_template_decl)
> >       return x;
> >     x = cp_fully_fold (x);
> > -  cp_fold_data data (/*genericize*/false);
> > +  cp_fold_data data (ff_none);
> >     cp_walk_tree (&x, cp_fold_r, &data, NULL);
> >     return x;
> >   }
> > @@ -2469,7 +2476,7 @@ clear_fold_cache (void)
> >       Function returns X or its folded variant.  */
> >     static tree
> > -cp_fold (tree x)
> > +cp_fold (tree x, fold_flags flags)
> >   {
> >     tree op0, op1, op2, op3;
> >     tree org_x = x, r = NULL_TREE;
> > @@ -2490,8 +2497,11 @@ cp_fold (tree x)
> >     if (fold_cache == NULL)
> >       fold_cache = hash_map<tree, tree>::create_ggc (101);
> >   -  if (tree *cached = fold_cache->get (x))
> > -    return *cached;
> > +  bool cache_p = (flags == ff_none);
> > +
> > +  if (cache_p)
> > +    if (tree *cached = fold_cache->get (x))
> > +      return *cached;
> >       uid_sensitive_constexpr_evaluation_checker c;
> >   @@ -2526,7 +2536,7 @@ cp_fold (tree x)
> >   	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
> >   	     folding of the operand should be in the caches and if in
> > cp_fold_r
> >   	     it will modify it in place.  */
> > -	  op0 = cp_fold (TREE_OPERAND (x, 0));
> > +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> >   	  if (op0 == error_mark_node)
> >   	    x = error_mark_node;
> >   	  break;
> > @@ -2571,7 +2581,7 @@ cp_fold (tree x)
> >   	{
> >   	  tree p = maybe_undo_parenthesized_ref (x);
> >   	  if (p != x)
> > -	    return cp_fold (p);
> > +	    return cp_fold (p, flags);
> >   	}
> >         goto unary;
> >   @@ -2763,8 +2773,8 @@ cp_fold (tree x)
> >       case COND_EXPR:
> >         loc = EXPR_LOCATION (x);
> >         op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> > -      op1 = cp_fold (TREE_OPERAND (x, 1));
> > -      op2 = cp_fold (TREE_OPERAND (x, 2));
> > +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> > +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> >           if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
> >   	{
> > @@ -2854,7 +2864,7 @@ cp_fold (tree x)
> >   	      {
> >   		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
> >   		  r = build_nop (TREE_TYPE (x), r);
> > -		x = cp_fold (r);
> > +		x = cp_fold (r, flags);
> >   		break;
> >   	      }
> >   	  }
> > @@ -2908,7 +2918,7 @@ cp_fold (tree x)
> >   	int m = call_expr_nargs (x);
> >   	for (int i = 0; i < m; i++)
> >   	  {
> > -	    r = cp_fold (CALL_EXPR_ARG (x, i));
> > +	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
> >   	    if (r != CALL_EXPR_ARG (x, i))
> >   	      {
> >   		if (r == error_mark_node)
> > @@ -2931,7 +2941,7 @@ cp_fold (tree x)
> >     	if (TREE_CODE (r) != CALL_EXPR)
> >   	  {
> > -	    x = cp_fold (r);
> > +	    x = cp_fold (r, flags);
> >   	    break;
> >   	  }
> >   @@ -2944,7 +2954,15 @@ cp_fold (tree x)
> >   	   constant, but the call followed by an INDIRECT_REF is.  */
> >   	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
> >   	    && !flag_no_inline)
> > -	  r = maybe_constant_value (x);
> > +	  {
> > +	    mce_value manifestly_const_eval = mce_unknown;
> > +	    if (flags & ff_genericize)
> > +	      /* At genericization time it's safe to fold
> > +		 __builtin_is_constant_evaluated to false.  */
> > +	      manifestly_const_eval = mce_false;
> > +	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
> > +				      manifestly_const_eval);
> > +	  }
> >   	optimize = sv;
> >             if (TREE_CODE (r) != CALL_EXPR)
> > @@ -2971,7 +2989,7 @@ cp_fold (tree x)
> >   	vec<constructor_elt, va_gc> *nelts = NULL;
> >   	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
> >   	  {
> > -	    tree op = cp_fold (p->value);
> > +	    tree op = cp_fold (p->value, flags);
> >   	    if (op != p->value)
> >   	      {
> >   		if (op == error_mark_node)
> > @@ -3002,7 +3020,7 @@ cp_fold (tree x)
> >     	for (int i = 0; i < n; i++)
> >   	  {
> > -	    tree op = cp_fold (TREE_VEC_ELT (x, i));
> > +	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
> >   	    if (op != TREE_VEC_ELT (x, i))
> >   	      {
> >   		if (!changed)
> > @@ -3019,10 +3037,10 @@ cp_fold (tree x)
> >       case ARRAY_RANGE_REF:
> >           loc = EXPR_LOCATION (x);
> > -      op0 = cp_fold (TREE_OPERAND (x, 0));
> > -      op1 = cp_fold (TREE_OPERAND (x, 1));
> > -      op2 = cp_fold (TREE_OPERAND (x, 2));
> > -      op3 = cp_fold (TREE_OPERAND (x, 3));
> > +      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> > +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> > +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> > +      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
> >           if (op0 != TREE_OPERAND (x, 0)
> >   	  || op1 != TREE_OPERAND (x, 1)
> > @@ -3050,7 +3068,7 @@ cp_fold (tree x)
> >         /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
> >   	 folding, evaluates to an invariant.  In that case no need to wrap
> >   	 this folded tree with a SAVE_EXPR.  */
> > -      r = cp_fold (TREE_OPERAND (x, 0));
> > +      r = cp_fold (TREE_OPERAND (x, 0), flags);
> >         if (tree_invariant_p (r))
> >   	x = r;
> >         break;
> > @@ -3069,7 +3087,7 @@ cp_fold (tree x)
> >         copy_warning (x, org_x);
> >       }
> >   -  if (!c.evaluation_restricted_p ())
> > +  if (cache_p && !c.evaluation_restricted_p ())
> >       {
> >         fold_cache->put (org_x, x);
> >         /* Prevent that we try to fold an already folded result again.  */
> > diff --git a/gcc/testsuite/g++.dg/opt/pr108243.C
> > b/gcc/testsuite/g++.dg/opt/pr108243.C
> > new file mode 100644
> > index 00000000000..4c45dbba13c
> > --- /dev/null
> > +++ b/gcc/testsuite/g++.dg/opt/pr108243.C
> > @@ -0,0 +1,29 @@
> > +// PR c++/108243
> > +// { dg-do compile { target c++11 } }
> > +// { dg-additional-options "-O -fdump-tree-original" }
> > +
> > +constexpr int foo() {
> > +  return __builtin_is_constant_evaluated() + 1;
> > +}
> > +
> > +#if __cpp_if_consteval
> > +constexpr int bar() {
> > +  if consteval {
> > +    return 5;
> > +  } else {
> > +    return 4;
> > +  }
> > +}
> > +#endif
> > +
> > +int p, q;
> > +
> > +int main() {
> > +  p = foo();
> > +#if __cpp_if_consteval
> > +  q = bar();
> > +#endif
> > +}
> > +
> > +// { dg-final { scan-tree-dump-not "= foo" "original" } }
> > +// { dg-final { scan-tree-dump-not "= bar" "original" } }
> 
> Let's also test a static initializer that can't be fully constant-evaluated.

D'oh, doing so revealed that cp_fold_function doesn't reach static
initializers; that's taken care of by cp_fully_fold_init.  So it seems
we need to make cp_fold when called from the latter entry point to also
assume m_c_e is false.  We can't re-use ff_genericize here because that
flag has additional effects in cp_fold_r, so it seems we need another
flag that that only affects the manifestly constant-eval stuff; I called
it ff_mce_false.  How does the following look?

-- >8 --

Subject: [PATCH 2/2] c++: speculative constexpr and is_constant_evaluated
 [PR108243]

This PR illustrates that __builtin_is_constant_evaluated currently acts
as an optimization barrier for our speculative constexpr evaluation,
since we don't want to prematurely fold the builtin to false if the
expression in question would be later manifestly constant evaluated (in
which case it must be folded to true).

This patch fixes this by permitting __builtin_is_constant_evaluated
to get folded as false during cp_fold_function and cp_fully_fold_init,
since at these points we're sure we're done with manifestly constant
evaluation.  To that end we add a flags parameter to cp_fold that
controls whether we pass mce_false or mce_unknown to maybe_constant_value
when folding a CALL_EXPR.

	PR c++/108243
	PR c++/97553

gcc/cp/ChangeLog:

	* cp-gimplify.cc (enum fold_flags): Define.
	(cp_fold_data::genericize): Replace this data member with ...
	(cp_fold_data::fold_flags): ... this.
	(cp_fold_r): Adjust use of cp_fold_data and calls to cp_fold.
	(cp_fold_function): Likewise.
	(cp_fold_maybe_rvalue): Likewise.
	(cp_fully_fold_init): Likewise.
	(cp_fold): Add fold_flags parameter.  Don't cache if flags
	isn't empty.
	<case CALL_EXPR>: If ff_genericize is set, fold
	__builtin_is_constant_evaluated to false and pass mce_false to
	maybe_constant_value.

gcc/testsuite/ChangeLog:

	* g++.dg/opt/is_constant_evaluated1.C: New test.
	* g++.dg/opt/is_constant_evaluated2.C: New test.
---
 gcc/cp/cp-gimplify.cc                         | 88 ++++++++++++-------
 .../g++.dg/opt/is_constant_evaluated1.C       | 14 +++
 .../g++.dg/opt/is_constant_evaluated2.C       | 32 +++++++
 3 files changed, 104 insertions(+), 30 deletions(-)
 create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
 create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C

diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
index 9929d29981a..590ed787997 100644
--- a/gcc/cp/cp-gimplify.cc
+++ b/gcc/cp/cp-gimplify.cc
@@ -43,12 +43,26 @@ along with GCC; see the file COPYING3.  If not see
 #include "omp-general.h"
 #include "opts.h"
 
+/* Flags for cp_fold and cp_fold_r.  */
+
+enum fold_flags {
+  ff_none = 0,
+  /* Whether we're being called from cp_fold_function.  */
+  ff_genericize = 1 << 0,
+  /* Whether we're folding late enough that we could assume
+     we're definitely not in a manifestly constant-evaluated
+     context.  */
+  ff_mce_false = 1 << 1,
+};
+
+using fold_flags_t = int;
+
 /* Forward declarations.  */
 
 static tree cp_genericize_r (tree *, int *, void *);
 static tree cp_fold_r (tree *, int *, void *);
 static void cp_genericize_tree (tree*, bool);
-static tree cp_fold (tree);
+static tree cp_fold (tree, fold_flags_t);
 
 /* Genericize a TRY_BLOCK.  */
 
@@ -1012,9 +1026,8 @@ struct cp_genericize_data
 struct cp_fold_data
 {
   hash_set<tree> pset;
-  bool genericize; // called from cp_fold_function?
-
-  cp_fold_data (bool g): genericize (g) {}
+  fold_flags_t flags;
+  cp_fold_data (fold_flags_t flags): flags (flags) {}
 };
 
 static tree
@@ -1055,7 +1068,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
       break;
     }
 
-  *stmt_p = stmt = cp_fold (*stmt_p);
+  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
 
   if (data->pset.add (stmt))
     {
@@ -1135,12 +1148,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
 	 here rather than in cp_genericize to avoid problems with the invisible
 	 reference transition.  */
     case INIT_EXPR:
-      if (data->genericize)
+      if (data->flags & ff_genericize)
 	cp_genericize_init_expr (stmt_p);
       break;
 
     case TARGET_EXPR:
-      if (data->genericize)
+      if (data->flags & ff_genericize)
 	cp_genericize_target_expr (stmt_p);
 
       /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
@@ -1173,7 +1186,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
 void
 cp_fold_function (tree fndecl)
 {
-  cp_fold_data data (/*genericize*/true);
+  cp_fold_data data (ff_genericize | ff_mce_false);
   cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
 }
 
@@ -2391,7 +2404,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
 {
   while (true)
     {
-      x = cp_fold (x);
+      x = cp_fold (x, ff_none);
       if (rval)
 	x = mark_rvalue_use (x);
       if (rval && DECL_P (x)
@@ -2450,7 +2463,7 @@ cp_fully_fold_init (tree x)
   if (processing_template_decl)
     return x;
   x = cp_fully_fold (x);
-  cp_fold_data data (/*genericize*/false);
+  cp_fold_data data (ff_mce_false);
   cp_walk_tree (&x, cp_fold_r, &data, NULL);
   return x;
 }
@@ -2485,7 +2498,7 @@ clear_fold_cache (void)
     Function returns X or its folded variant.  */
 
 static tree
-cp_fold (tree x)
+cp_fold (tree x, fold_flags_t flags)
 {
   tree op0, op1, op2, op3;
   tree org_x = x, r = NULL_TREE;
@@ -2506,8 +2519,11 @@ cp_fold (tree x)
   if (fold_cache == NULL)
     fold_cache = hash_map<tree, tree>::create_ggc (101);
 
-  if (tree *cached = fold_cache->get (x))
-    return *cached;
+  bool cache_p = (flags == ff_none);
+
+  if (cache_p)
+    if (tree *cached = fold_cache->get (x))
+      return *cached;
 
   uid_sensitive_constexpr_evaluation_checker c;
 
@@ -2542,7 +2558,7 @@ cp_fold (tree x)
 	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
 	     folding of the operand should be in the caches and if in cp_fold_r
 	     it will modify it in place.  */
-	  op0 = cp_fold (TREE_OPERAND (x, 0));
+	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
 	  if (op0 == error_mark_node)
 	    x = error_mark_node;
 	  break;
@@ -2587,7 +2603,7 @@ cp_fold (tree x)
 	{
 	  tree p = maybe_undo_parenthesized_ref (x);
 	  if (p != x)
-	    return cp_fold (p);
+	    return cp_fold (p, flags);
 	}
       goto unary;
 
@@ -2779,8 +2795,8 @@ cp_fold (tree x)
     case COND_EXPR:
       loc = EXPR_LOCATION (x);
       op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
-      op1 = cp_fold (TREE_OPERAND (x, 1));
-      op2 = cp_fold (TREE_OPERAND (x, 2));
+      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
+      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
 
       if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
 	{
@@ -2870,7 +2886,7 @@ cp_fold (tree x)
 	      {
 		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
 		  r = build_nop (TREE_TYPE (x), r);
-		x = cp_fold (r);
+		x = cp_fold (r, flags);
 		break;
 	      }
 	  }
@@ -2890,8 +2906,12 @@ cp_fold (tree x)
 	  {
 	    switch (DECL_FE_FUNCTION_CODE (callee))
 	      {
-		/* Defer folding __builtin_is_constant_evaluated.  */
 	      case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
+		/* Defer folding __builtin_is_constant_evaluated unless
+		   we can assume this isn't a manifestly constant-evaluated
+		   context.  */
+		if (flags & ff_mce_false)
+		  x = boolean_false_node;
 		break;
 	      case CP_BUILT_IN_SOURCE_LOCATION:
 		x = fold_builtin_source_location (x);
@@ -2924,7 +2944,7 @@ cp_fold (tree x)
 	int m = call_expr_nargs (x);
 	for (int i = 0; i < m; i++)
 	  {
-	    r = cp_fold (CALL_EXPR_ARG (x, i));
+	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
 	    if (r != CALL_EXPR_ARG (x, i))
 	      {
 		if (r == error_mark_node)
@@ -2947,7 +2967,7 @@ cp_fold (tree x)
 
 	if (TREE_CODE (r) != CALL_EXPR)
 	  {
-	    x = cp_fold (r);
+	    x = cp_fold (r, flags);
 	    break;
 	  }
 
@@ -2960,7 +2980,15 @@ cp_fold (tree x)
 	   constant, but the call followed by an INDIRECT_REF is.  */
 	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
 	    && !flag_no_inline)
-	  r = maybe_constant_value (x);
+	  {
+	    mce_value manifestly_const_eval = mce_unknown;
+	    if (flags & ff_mce_false)
+	      /* Allow folding __builtin_is_constant_evaluated to false during
+		 constexpr evaluation of this call.  */
+	      manifestly_const_eval = mce_false;
+	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
+				      manifestly_const_eval);
+	  }
 	optimize = sv;
 
         if (TREE_CODE (r) != CALL_EXPR)
@@ -2987,7 +3015,7 @@ cp_fold (tree x)
 	vec<constructor_elt, va_gc> *nelts = NULL;
 	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
 	  {
-	    tree op = cp_fold (p->value);
+	    tree op = cp_fold (p->value, flags);
 	    if (op != p->value)
 	      {
 		if (op == error_mark_node)
@@ -3018,7 +3046,7 @@ cp_fold (tree x)
 
 	for (int i = 0; i < n; i++)
 	  {
-	    tree op = cp_fold (TREE_VEC_ELT (x, i));
+	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
 	    if (op != TREE_VEC_ELT (x, i))
 	      {
 		if (!changed)
@@ -3035,10 +3063,10 @@ cp_fold (tree x)
     case ARRAY_RANGE_REF:
 
       loc = EXPR_LOCATION (x);
-      op0 = cp_fold (TREE_OPERAND (x, 0));
-      op1 = cp_fold (TREE_OPERAND (x, 1));
-      op2 = cp_fold (TREE_OPERAND (x, 2));
-      op3 = cp_fold (TREE_OPERAND (x, 3));
+      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
+      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
+      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
+      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
 
       if (op0 != TREE_OPERAND (x, 0)
 	  || op1 != TREE_OPERAND (x, 1)
@@ -3066,7 +3094,7 @@ cp_fold (tree x)
       /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
 	 folding, evaluates to an invariant.  In that case no need to wrap
 	 this folded tree with a SAVE_EXPR.  */
-      r = cp_fold (TREE_OPERAND (x, 0));
+      r = cp_fold (TREE_OPERAND (x, 0), flags);
       if (tree_invariant_p (r))
 	x = r;
       break;
@@ -3085,7 +3113,7 @@ cp_fold (tree x)
       copy_warning (x, org_x);
     }
 
-  if (!c.evaluation_restricted_p ())
+  if (cache_p && !c.evaluation_restricted_p ())
     {
       fold_cache->put (org_x, x);
       /* Prevent that we try to fold an already folded result again.  */
diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
new file mode 100644
index 00000000000..ee05cbab785
--- /dev/null
+++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
@@ -0,0 +1,14 @@
+// PR c++/108243
+// { dg-do compile { target c++11 } }
+// { dg-additional-options "-O -fdump-tree-original" }
+
+struct A {
+  constexpr A(int n, int m) : n(n), m(m) { }
+  int n, m;
+};
+
+void f(int n) {
+  static A a = {n, __builtin_is_constant_evaluated()};
+}
+
+// { dg-final { scan-tree-dump-not "is_constant_evaluated" "original" } }
diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
new file mode 100644
index 00000000000..ed964e20a7a
--- /dev/null
+++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
@@ -0,0 +1,32 @@
+// PR c++/97553
+// { dg-do compile { target c++11 } }
+// { dg-additional-options "-O -fdump-tree-original" }
+
+constexpr int foo() {
+  return __builtin_is_constant_evaluated() + 1;
+}
+
+#if __cpp_if_consteval
+constexpr int bar() {
+  if consteval {
+    return 5;
+  } else {
+    return 4;
+  }
+}
+#endif
+
+int p, q;
+
+int main() {
+  p = foo();
+#if __cpp_if_consteval
+  q = bar();
+#endif
+}
+
+// { dg-final { scan-tree-dump "p = 1" "original" } }
+// { dg-final { scan-tree-dump-not "= foo" "original" } }
+
+// { dg-final { scan-tree-dump "q = 4" "original" { target c++23 } } }
+// { dg-final { scan-tree-dump-not "= bar" "original" { target c++23 } } }
-- 
2.39.1.388.g2fc9e9ca3c


^ permalink raw reply	[flat|nested] 15+ messages in thread

* Re: [PATCH 2/2] c++: speculative constexpr and is_constant_evaluated [PR108243]
  2023-02-03 20:51     ` Patrick Palka
@ 2023-02-03 20:57       ` Patrick Palka
  2023-02-05 20:11       ` Jason Merrill
  1 sibling, 0 replies; 15+ messages in thread
From: Patrick Palka @ 2023-02-03 20:57 UTC (permalink / raw)
  To: Patrick Palka; +Cc: Jason Merrill, gcc-patches

On Fri, 3 Feb 2023, Patrick Palka wrote:

> On Mon, 30 Jan 2023, Jason Merrill wrote:
> 
> > On 1/27/23 17:02, Patrick Palka wrote:
> > > This PR illustrates that __builtin_is_constant_evaluated currently acts
> > > as an optimization barrier for our speculative constexpr evaluation,
> > > since we don't want to prematurely fold the builtin to false if the
> > > expression in question would be later manifestly constant evaluated (in
> > > which case it must be folded to true).
> > > 
> > > This patch fixes this by permitting __builtin_is_constant_evaluated
> > > to get folded as false during cp_fold_function, since at that point
> > > we're sure we're doing manifestly constant evaluation.  To that end
> > > we add a flags parameter to cp_fold that controls what mce_value the
> > > CALL_EXPR case passes to maybe_constant_value.
> > > 
> > > bootstrapped and rgetsted no x86_64-pc-linux-gnu, does this look OK for
> > > trunk?
> > > 
> > > 	PR c++/108243
> > > 
> > > gcc/cp/ChangeLog:
> > > 
> > > 	* cp-gimplify.cc (enum fold_flags): Define.
> > > 	(cp_fold_data::genericize): Replace this data member with ...
> > > 	(cp_fold_data::fold_flags): ... this.
> > > 	(cp_fold_r): Adjust cp_fold_data use and cp_fold_calls.
> > > 	(cp_fold_function): Likewise.
> > > 	(cp_fold_maybe_rvalue): Likewise.
> > > 	(cp_fully_fold_init): Likewise.
> > > 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
> > > 	isn't empty.
> > > 	<case CALL_EXPR>: Pass mce_false to maybe_constant_value
> > > 	if if ff_genericize is set.
> > > 
> > > gcc/testsuite/ChangeLog:
> > > 
> > > 	* g++.dg/opt/pr108243.C: New test.
> > > ---
> > >   gcc/cp/cp-gimplify.cc               | 76 ++++++++++++++++++-----------
> > >   gcc/testsuite/g++.dg/opt/pr108243.C | 29 +++++++++++
> > >   2 files changed, 76 insertions(+), 29 deletions(-)
> > >   create mode 100644 gcc/testsuite/g++.dg/opt/pr108243.C
> > > 
> > > diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> > > index a35cedd05cc..d023a63768f 100644
> > > --- a/gcc/cp/cp-gimplify.cc
> > > +++ b/gcc/cp/cp-gimplify.cc
> > > @@ -43,12 +43,20 @@ along with GCC; see the file COPYING3.  If not see
> > >   #include "omp-general.h"
> > >   #include "opts.h"
> > >   +/* Flags for cp_fold and cp_fold_r.  */
> > > +
> > > +enum fold_flags {
> > > +  ff_none = 0,
> > > +  /* Whether we're being called from cp_fold_function.  */
> > > +  ff_genericize = 1 << 0,
> > > +};
> > > +
> > >   /* Forward declarations.  */
> > >     static tree cp_genericize_r (tree *, int *, void *);
> > >   static tree cp_fold_r (tree *, int *, void *);
> > >   static void cp_genericize_tree (tree*, bool);
> > > -static tree cp_fold (tree);
> > > +static tree cp_fold (tree, fold_flags);
> > >     /* Genericize a TRY_BLOCK.  */
> > >   @@ -996,9 +1004,8 @@ struct cp_genericize_data
> > >   struct cp_fold_data
> > >   {
> > >     hash_set<tree> pset;
> > > -  bool genericize; // called from cp_fold_function?
> > > -
> > > -  cp_fold_data (bool g): genericize (g) {}
> > > +  fold_flags flags;
> > > +  cp_fold_data (fold_flags flags): flags (flags) {}
> > >   };
> > >     static tree
> > > @@ -1039,7 +1046,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > > *data_)
> > >         break;
> > >       }
> > >   -  *stmt_p = stmt = cp_fold (*stmt_p);
> > > +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
> > >       if (data->pset.add (stmt))
> > >       {
> > > @@ -1119,12 +1126,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > > *data_)
> > >   	 here rather than in cp_genericize to avoid problems with the
> > > invisible
> > >   	 reference transition.  */
> > >       case INIT_EXPR:
> > > -      if (data->genericize)
> > > +      if (data->flags & ff_genericize)
> > >   	cp_genericize_init_expr (stmt_p);
> > >         break;
> > >         case TARGET_EXPR:
> > > -      if (data->genericize)
> > > +      if (data->flags & ff_genericize)
> > >   	cp_genericize_target_expr (stmt_p);
> > >           /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
> > > @@ -1157,7 +1164,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > > *data_)
> > >   void
> > >   cp_fold_function (tree fndecl)
> > >   {
> > > -  cp_fold_data data (/*genericize*/true);
> > > +  cp_fold_data data (ff_genericize);
> > >     cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
> > >   }
> > >   @@ -2375,7 +2382,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
> > >   {
> > >     while (true)
> > >       {
> > > -      x = cp_fold (x);
> > > +      x = cp_fold (x, ff_none);
> > >         if (rval)
> > >   	x = mark_rvalue_use (x);
> > >         if (rval && DECL_P (x)
> > > @@ -2434,7 +2441,7 @@ cp_fully_fold_init (tree x)
> > >     if (processing_template_decl)
> > >       return x;
> > >     x = cp_fully_fold (x);
> > > -  cp_fold_data data (/*genericize*/false);
> > > +  cp_fold_data data (ff_none);
> > >     cp_walk_tree (&x, cp_fold_r, &data, NULL);
> > >     return x;
> > >   }
> > > @@ -2469,7 +2476,7 @@ clear_fold_cache (void)
> > >       Function returns X or its folded variant.  */
> > >     static tree
> > > -cp_fold (tree x)
> > > +cp_fold (tree x, fold_flags flags)
> > >   {
> > >     tree op0, op1, op2, op3;
> > >     tree org_x = x, r = NULL_TREE;
> > > @@ -2490,8 +2497,11 @@ cp_fold (tree x)
> > >     if (fold_cache == NULL)
> > >       fold_cache = hash_map<tree, tree>::create_ggc (101);
> > >   -  if (tree *cached = fold_cache->get (x))
> > > -    return *cached;
> > > +  bool cache_p = (flags == ff_none);
> > > +
> > > +  if (cache_p)
> > > +    if (tree *cached = fold_cache->get (x))
> > > +      return *cached;
> > >       uid_sensitive_constexpr_evaluation_checker c;
> > >   @@ -2526,7 +2536,7 @@ cp_fold (tree x)
> > >   	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
> > >   	     folding of the operand should be in the caches and if in
> > > cp_fold_r
> > >   	     it will modify it in place.  */
> > > -	  op0 = cp_fold (TREE_OPERAND (x, 0));
> > > +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> > >   	  if (op0 == error_mark_node)
> > >   	    x = error_mark_node;
> > >   	  break;
> > > @@ -2571,7 +2581,7 @@ cp_fold (tree x)
> > >   	{
> > >   	  tree p = maybe_undo_parenthesized_ref (x);
> > >   	  if (p != x)
> > > -	    return cp_fold (p);
> > > +	    return cp_fold (p, flags);
> > >   	}
> > >         goto unary;
> > >   @@ -2763,8 +2773,8 @@ cp_fold (tree x)
> > >       case COND_EXPR:
> > >         loc = EXPR_LOCATION (x);
> > >         op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> > > -      op1 = cp_fold (TREE_OPERAND (x, 1));
> > > -      op2 = cp_fold (TREE_OPERAND (x, 2));
> > > +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> > > +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> > >           if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
> > >   	{
> > > @@ -2854,7 +2864,7 @@ cp_fold (tree x)
> > >   	      {
> > >   		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
> > >   		  r = build_nop (TREE_TYPE (x), r);
> > > -		x = cp_fold (r);
> > > +		x = cp_fold (r, flags);
> > >   		break;
> > >   	      }
> > >   	  }
> > > @@ -2908,7 +2918,7 @@ cp_fold (tree x)
> > >   	int m = call_expr_nargs (x);
> > >   	for (int i = 0; i < m; i++)
> > >   	  {
> > > -	    r = cp_fold (CALL_EXPR_ARG (x, i));
> > > +	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
> > >   	    if (r != CALL_EXPR_ARG (x, i))
> > >   	      {
> > >   		if (r == error_mark_node)
> > > @@ -2931,7 +2941,7 @@ cp_fold (tree x)
> > >     	if (TREE_CODE (r) != CALL_EXPR)
> > >   	  {
> > > -	    x = cp_fold (r);
> > > +	    x = cp_fold (r, flags);
> > >   	    break;
> > >   	  }
> > >   @@ -2944,7 +2954,15 @@ cp_fold (tree x)
> > >   	   constant, but the call followed by an INDIRECT_REF is.  */
> > >   	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
> > >   	    && !flag_no_inline)
> > > -	  r = maybe_constant_value (x);
> > > +	  {
> > > +	    mce_value manifestly_const_eval = mce_unknown;
> > > +	    if (flags & ff_genericize)
> > > +	      /* At genericization time it's safe to fold
> > > +		 __builtin_is_constant_evaluated to false.  */
> > > +	      manifestly_const_eval = mce_false;
> > > +	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
> > > +				      manifestly_const_eval);
> > > +	  }
> > >   	optimize = sv;
> > >             if (TREE_CODE (r) != CALL_EXPR)
> > > @@ -2971,7 +2989,7 @@ cp_fold (tree x)
> > >   	vec<constructor_elt, va_gc> *nelts = NULL;
> > >   	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
> > >   	  {
> > > -	    tree op = cp_fold (p->value);
> > > +	    tree op = cp_fold (p->value, flags);
> > >   	    if (op != p->value)
> > >   	      {
> > >   		if (op == error_mark_node)
> > > @@ -3002,7 +3020,7 @@ cp_fold (tree x)
> > >     	for (int i = 0; i < n; i++)
> > >   	  {
> > > -	    tree op = cp_fold (TREE_VEC_ELT (x, i));
> > > +	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
> > >   	    if (op != TREE_VEC_ELT (x, i))
> > >   	      {
> > >   		if (!changed)
> > > @@ -3019,10 +3037,10 @@ cp_fold (tree x)
> > >       case ARRAY_RANGE_REF:
> > >           loc = EXPR_LOCATION (x);
> > > -      op0 = cp_fold (TREE_OPERAND (x, 0));
> > > -      op1 = cp_fold (TREE_OPERAND (x, 1));
> > > -      op2 = cp_fold (TREE_OPERAND (x, 2));
> > > -      op3 = cp_fold (TREE_OPERAND (x, 3));
> > > +      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> > > +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> > > +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> > > +      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
> > >           if (op0 != TREE_OPERAND (x, 0)
> > >   	  || op1 != TREE_OPERAND (x, 1)
> > > @@ -3050,7 +3068,7 @@ cp_fold (tree x)
> > >         /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
> > >   	 folding, evaluates to an invariant.  In that case no need to wrap
> > >   	 this folded tree with a SAVE_EXPR.  */
> > > -      r = cp_fold (TREE_OPERAND (x, 0));
> > > +      r = cp_fold (TREE_OPERAND (x, 0), flags);
> > >         if (tree_invariant_p (r))
> > >   	x = r;
> > >         break;
> > > @@ -3069,7 +3087,7 @@ cp_fold (tree x)
> > >         copy_warning (x, org_x);
> > >       }
> > >   -  if (!c.evaluation_restricted_p ())
> > > +  if (cache_p && !c.evaluation_restricted_p ())
> > >       {
> > >         fold_cache->put (org_x, x);
> > >         /* Prevent that we try to fold an already folded result again.  */
> > > diff --git a/gcc/testsuite/g++.dg/opt/pr108243.C
> > > b/gcc/testsuite/g++.dg/opt/pr108243.C
> > > new file mode 100644
> > > index 00000000000..4c45dbba13c
> > > --- /dev/null
> > > +++ b/gcc/testsuite/g++.dg/opt/pr108243.C
> > > @@ -0,0 +1,29 @@
> > > +// PR c++/108243
> > > +// { dg-do compile { target c++11 } }
> > > +// { dg-additional-options "-O -fdump-tree-original" }
> > > +
> > > +constexpr int foo() {
> > > +  return __builtin_is_constant_evaluated() + 1;
> > > +}
> > > +
> > > +#if __cpp_if_consteval
> > > +constexpr int bar() {
> > > +  if consteval {
> > > +    return 5;
> > > +  } else {
> > > +    return 4;
> > > +  }
> > > +}
> > > +#endif
> > > +
> > > +int p, q;
> > > +
> > > +int main() {
> > > +  p = foo();
> > > +#if __cpp_if_consteval
> > > +  q = bar();
> > > +#endif
> > > +}
> > > +
> > > +// { dg-final { scan-tree-dump-not "= foo" "original" } }
> > > +// { dg-final { scan-tree-dump-not "= bar" "original" } }
> > 
> > Let's also test a static initializer that can't be fully constant-evaluated.
> 
> D'oh, doing so revealed that cp_fold_function doesn't reach static
> initializers; that's taken care of by cp_fully_fold_init.  So it seems
> we need to make cp_fold when called from the latter entry point to also
> assume m_c_e is false.  We can't re-use ff_genericize here because that
> flag has additional effects in cp_fold_r, so it seems we need another
> flag that that only affects the manifestly constant-eval stuff; I called
> it ff_mce_false.  How does the following look?

N.B. cp_fully_fold_init is called only from three places:

  * from store_init_value shortly after manifestly-constant evualation of the
  initializer
  * from split_nonconstant_init
  * and from check_for_mismatched_contracts

So it seems to always be called late enough that we can safely assume
m_c_e is false as in cp_fold_function.

> 
> -- >8 --
> 
> Subject: [PATCH 2/2] c++: speculative constexpr and is_constant_evaluated
>  [PR108243]
> 
> This PR illustrates that __builtin_is_constant_evaluated currently acts
> as an optimization barrier for our speculative constexpr evaluation,
> since we don't want to prematurely fold the builtin to false if the
> expression in question would be later manifestly constant evaluated (in
> which case it must be folded to true).
> 
> This patch fixes this by permitting __builtin_is_constant_evaluated
> to get folded as false during cp_fold_function and cp_fully_fold_init,
> since at these points we're sure we're done with manifestly constant
> evaluation.  To that end we add a flags parameter to cp_fold that
> controls whether we pass mce_false or mce_unknown to maybe_constant_value
> when folding a CALL_EXPR.
> 
> 	PR c++/108243
> 	PR c++/97553
> 
> gcc/cp/ChangeLog:
> 
> 	* cp-gimplify.cc (enum fold_flags): Define.
> 	(cp_fold_data::genericize): Replace this data member with ...
> 	(cp_fold_data::fold_flags): ... this.
> 	(cp_fold_r): Adjust use of cp_fold_data and calls to cp_fold.
> 	(cp_fold_function): Likewise.
> 	(cp_fold_maybe_rvalue): Likewise.
> 	(cp_fully_fold_init): Likewise.
> 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
> 	isn't empty.
> 	<case CALL_EXPR>: If ff_genericize is set, fold
> 	__builtin_is_constant_evaluated to false and pass mce_false to
> 	maybe_constant_value.
> 
> gcc/testsuite/ChangeLog:
> 
> 	* g++.dg/opt/is_constant_evaluated1.C: New test.
> 	* g++.dg/opt/is_constant_evaluated2.C: New test.
> ---
>  gcc/cp/cp-gimplify.cc                         | 88 ++++++++++++-------
>  .../g++.dg/opt/is_constant_evaluated1.C       | 14 +++
>  .../g++.dg/opt/is_constant_evaluated2.C       | 32 +++++++
>  3 files changed, 104 insertions(+), 30 deletions(-)
>  create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
>  create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> 
> diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> index 9929d29981a..590ed787997 100644
> --- a/gcc/cp/cp-gimplify.cc
> +++ b/gcc/cp/cp-gimplify.cc
> @@ -43,12 +43,26 @@ along with GCC; see the file COPYING3.  If not see
>  #include "omp-general.h"
>  #include "opts.h"
>  
> +/* Flags for cp_fold and cp_fold_r.  */
> +
> +enum fold_flags {
> +  ff_none = 0,
> +  /* Whether we're being called from cp_fold_function.  */
> +  ff_genericize = 1 << 0,
> +  /* Whether we're folding late enough that we could assume
> +     we're definitely not in a manifestly constant-evaluated
> +     context.  */
> +  ff_mce_false = 1 << 1,
> +};
> +
> +using fold_flags_t = int;
> +
>  /* Forward declarations.  */
>  
>  static tree cp_genericize_r (tree *, int *, void *);
>  static tree cp_fold_r (tree *, int *, void *);
>  static void cp_genericize_tree (tree*, bool);
> -static tree cp_fold (tree);
> +static tree cp_fold (tree, fold_flags_t);
>  
>  /* Genericize a TRY_BLOCK.  */
>  
> @@ -1012,9 +1026,8 @@ struct cp_genericize_data
>  struct cp_fold_data
>  {
>    hash_set<tree> pset;
> -  bool genericize; // called from cp_fold_function?
> -
> -  cp_fold_data (bool g): genericize (g) {}
> +  fold_flags_t flags;
> +  cp_fold_data (fold_flags_t flags): flags (flags) {}
>  };
>  
>  static tree
> @@ -1055,7 +1068,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>        break;
>      }
>  
> -  *stmt_p = stmt = cp_fold (*stmt_p);
> +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
>  
>    if (data->pset.add (stmt))
>      {
> @@ -1135,12 +1148,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>  	 here rather than in cp_genericize to avoid problems with the invisible
>  	 reference transition.  */
>      case INIT_EXPR:
> -      if (data->genericize)
> +      if (data->flags & ff_genericize)
>  	cp_genericize_init_expr (stmt_p);
>        break;
>  
>      case TARGET_EXPR:
> -      if (data->genericize)
> +      if (data->flags & ff_genericize)
>  	cp_genericize_target_expr (stmt_p);
>  
>        /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
> @@ -1173,7 +1186,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>  void
>  cp_fold_function (tree fndecl)
>  {
> -  cp_fold_data data (/*genericize*/true);
> +  cp_fold_data data (ff_genericize | ff_mce_false);
>    cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
>  }
>  
> @@ -2391,7 +2404,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
>  {
>    while (true)
>      {
> -      x = cp_fold (x);
> +      x = cp_fold (x, ff_none);
>        if (rval)
>  	x = mark_rvalue_use (x);
>        if (rval && DECL_P (x)
> @@ -2450,7 +2463,7 @@ cp_fully_fold_init (tree x)
>    if (processing_template_decl)
>      return x;
>    x = cp_fully_fold (x);
> -  cp_fold_data data (/*genericize*/false);
> +  cp_fold_data data (ff_mce_false);
>    cp_walk_tree (&x, cp_fold_r, &data, NULL);
>    return x;
>  }
> @@ -2485,7 +2498,7 @@ clear_fold_cache (void)
>      Function returns X or its folded variant.  */
>  
>  static tree
> -cp_fold (tree x)
> +cp_fold (tree x, fold_flags_t flags)
>  {
>    tree op0, op1, op2, op3;
>    tree org_x = x, r = NULL_TREE;
> @@ -2506,8 +2519,11 @@ cp_fold (tree x)
>    if (fold_cache == NULL)
>      fold_cache = hash_map<tree, tree>::create_ggc (101);
>  
> -  if (tree *cached = fold_cache->get (x))
> -    return *cached;
> +  bool cache_p = (flags == ff_none);
> +
> +  if (cache_p)
> +    if (tree *cached = fold_cache->get (x))
> +      return *cached;
>  
>    uid_sensitive_constexpr_evaluation_checker c;
>  
> @@ -2542,7 +2558,7 @@ cp_fold (tree x)
>  	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
>  	     folding of the operand should be in the caches and if in cp_fold_r
>  	     it will modify it in place.  */
> -	  op0 = cp_fold (TREE_OPERAND (x, 0));
> +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
>  	  if (op0 == error_mark_node)
>  	    x = error_mark_node;
>  	  break;
> @@ -2587,7 +2603,7 @@ cp_fold (tree x)
>  	{
>  	  tree p = maybe_undo_parenthesized_ref (x);
>  	  if (p != x)
> -	    return cp_fold (p);
> +	    return cp_fold (p, flags);
>  	}
>        goto unary;
>  
> @@ -2779,8 +2795,8 @@ cp_fold (tree x)
>      case COND_EXPR:
>        loc = EXPR_LOCATION (x);
>        op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> -      op1 = cp_fold (TREE_OPERAND (x, 1));
> -      op2 = cp_fold (TREE_OPERAND (x, 2));
> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
>  
>        if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
>  	{
> @@ -2870,7 +2886,7 @@ cp_fold (tree x)
>  	      {
>  		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
>  		  r = build_nop (TREE_TYPE (x), r);
> -		x = cp_fold (r);
> +		x = cp_fold (r, flags);
>  		break;
>  	      }
>  	  }
> @@ -2890,8 +2906,12 @@ cp_fold (tree x)
>  	  {
>  	    switch (DECL_FE_FUNCTION_CODE (callee))
>  	      {
> -		/* Defer folding __builtin_is_constant_evaluated.  */
>  	      case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
> +		/* Defer folding __builtin_is_constant_evaluated unless
> +		   we can assume this isn't a manifestly constant-evaluated
> +		   context.  */
> +		if (flags & ff_mce_false)
> +		  x = boolean_false_node;
>  		break;
>  	      case CP_BUILT_IN_SOURCE_LOCATION:
>  		x = fold_builtin_source_location (x);
> @@ -2924,7 +2944,7 @@ cp_fold (tree x)
>  	int m = call_expr_nargs (x);
>  	for (int i = 0; i < m; i++)
>  	  {
> -	    r = cp_fold (CALL_EXPR_ARG (x, i));
> +	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
>  	    if (r != CALL_EXPR_ARG (x, i))
>  	      {
>  		if (r == error_mark_node)
> @@ -2947,7 +2967,7 @@ cp_fold (tree x)
>  
>  	if (TREE_CODE (r) != CALL_EXPR)
>  	  {
> -	    x = cp_fold (r);
> +	    x = cp_fold (r, flags);
>  	    break;
>  	  }
>  
> @@ -2960,7 +2980,15 @@ cp_fold (tree x)
>  	   constant, but the call followed by an INDIRECT_REF is.  */
>  	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
>  	    && !flag_no_inline)
> -	  r = maybe_constant_value (x);
> +	  {
> +	    mce_value manifestly_const_eval = mce_unknown;
> +	    if (flags & ff_mce_false)
> +	      /* Allow folding __builtin_is_constant_evaluated to false during
> +		 constexpr evaluation of this call.  */
> +	      manifestly_const_eval = mce_false;
> +	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
> +				      manifestly_const_eval);
> +	  }
>  	optimize = sv;
>  
>          if (TREE_CODE (r) != CALL_EXPR)
> @@ -2987,7 +3015,7 @@ cp_fold (tree x)
>  	vec<constructor_elt, va_gc> *nelts = NULL;
>  	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
>  	  {
> -	    tree op = cp_fold (p->value);
> +	    tree op = cp_fold (p->value, flags);
>  	    if (op != p->value)
>  	      {
>  		if (op == error_mark_node)
> @@ -3018,7 +3046,7 @@ cp_fold (tree x)
>  
>  	for (int i = 0; i < n; i++)
>  	  {
> -	    tree op = cp_fold (TREE_VEC_ELT (x, i));
> +	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
>  	    if (op != TREE_VEC_ELT (x, i))
>  	      {
>  		if (!changed)
> @@ -3035,10 +3063,10 @@ cp_fold (tree x)
>      case ARRAY_RANGE_REF:
>  
>        loc = EXPR_LOCATION (x);
> -      op0 = cp_fold (TREE_OPERAND (x, 0));
> -      op1 = cp_fold (TREE_OPERAND (x, 1));
> -      op2 = cp_fold (TREE_OPERAND (x, 2));
> -      op3 = cp_fold (TREE_OPERAND (x, 3));
> +      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> +      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
>  
>        if (op0 != TREE_OPERAND (x, 0)
>  	  || op1 != TREE_OPERAND (x, 1)
> @@ -3066,7 +3094,7 @@ cp_fold (tree x)
>        /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
>  	 folding, evaluates to an invariant.  In that case no need to wrap
>  	 this folded tree with a SAVE_EXPR.  */
> -      r = cp_fold (TREE_OPERAND (x, 0));
> +      r = cp_fold (TREE_OPERAND (x, 0), flags);
>        if (tree_invariant_p (r))
>  	x = r;
>        break;
> @@ -3085,7 +3113,7 @@ cp_fold (tree x)
>        copy_warning (x, org_x);
>      }
>  
> -  if (!c.evaluation_restricted_p ())
> +  if (cache_p && !c.evaluation_restricted_p ())
>      {
>        fold_cache->put (org_x, x);
>        /* Prevent that we try to fold an already folded result again.  */
> diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> new file mode 100644
> index 00000000000..ee05cbab785
> --- /dev/null
> +++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> @@ -0,0 +1,14 @@
> +// PR c++/108243
> +// { dg-do compile { target c++11 } }
> +// { dg-additional-options "-O -fdump-tree-original" }
> +
> +struct A {
> +  constexpr A(int n, int m) : n(n), m(m) { }
> +  int n, m;
> +};
> +
> +void f(int n) {
> +  static A a = {n, __builtin_is_constant_evaluated()};
> +}
> +
> +// { dg-final { scan-tree-dump-not "is_constant_evaluated" "original" } }
> diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> new file mode 100644
> index 00000000000..ed964e20a7a
> --- /dev/null
> +++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> @@ -0,0 +1,32 @@
> +// PR c++/97553
> +// { dg-do compile { target c++11 } }
> +// { dg-additional-options "-O -fdump-tree-original" }
> +
> +constexpr int foo() {
> +  return __builtin_is_constant_evaluated() + 1;
> +}
> +
> +#if __cpp_if_consteval
> +constexpr int bar() {
> +  if consteval {
> +    return 5;
> +  } else {
> +    return 4;
> +  }
> +}
> +#endif
> +
> +int p, q;
> +
> +int main() {
> +  p = foo();
> +#if __cpp_if_consteval
> +  q = bar();
> +#endif
> +}
> +
> +// { dg-final { scan-tree-dump "p = 1" "original" } }
> +// { dg-final { scan-tree-dump-not "= foo" "original" } }
> +
> +// { dg-final { scan-tree-dump "q = 4" "original" { target c++23 } } }
> +// { dg-final { scan-tree-dump-not "= bar" "original" { target c++23 } } }
> -- 
> 2.39.1.388.g2fc9e9ca3c
> 
> 


^ permalink raw reply	[flat|nested] 15+ messages in thread

* Re: [PATCH 1/2] c++: make manifestly_const_eval tri-state
  2023-01-30 20:02 ` [PATCH 1/2] c++: make manifestly_const_eval tri-state Jason Merrill
@ 2023-02-03 21:21   ` Patrick Palka
  0 siblings, 0 replies; 15+ messages in thread
From: Patrick Palka @ 2023-02-03 21:21 UTC (permalink / raw)
  To: Jason Merrill; +Cc: Patrick Palka, gcc-patches

On Mon, 30 Jan 2023, Jason Merrill wrote:

> On 1/27/23 17:02, Patrick Palka wrote:
> > This patch turns the manifestly_const_eval flag used by the constexpr
> > machinery into a tri-state enum so that we're able to express wanting
> > to fold __builtin_is_constant_evaluated to false via late speculative
> > constexpr evaluation.  Of all the entry points to constexpr evaluation
> > only maybe_constant_value is changed to take a tri-state value; the
> > others continue to take bool.  The subsequent patch will use this to fold
> > the builtin to false when called from cp_fold_function.
> > 
> > gcc/cp/ChangeLog:
> > 
> > 	* constexpr.cc (constexpr_call::manifestly_const_eval): Give
> > 	it type int instead of bool.
> > 	(constexpr_ctx::manifestly_const_eval): Give it type mce_value
> > 	instead of bool.
> > 	(cxx_eval_builtin_function_call): Adjust after making
> > 	manifestly_const_eval tri-state.
> > 	(cxx_eval_call_expression): Likewise.
> > 	(cxx_eval_binary_expression): Likewise.
> > 	(cxx_eval_conditional_expression): Likewise.
> > 	(cxx_eval_constant_expression): Likewise.
> > 	(cxx_eval_outermost_constant_expr): Likewise.
> > 	(cxx_constant_value): Likewise.
> > 	(cxx_constant_dtor): Likewise.
> > 	(maybe_constant_value): Give manifestly_const_eval parameter
> > 	type mce_value instead of bool and adjust accordingly.
> > 	(fold_non_dependent_expr_template): Adjust call
> > 	to cxx_eval_outermost_constant_expr.
> > 	(fold_non_dependent_expr): Likewise.
> > 	(maybe_constant_init_1): Likewise.
> > 	* constraint.cc (satisfy_atom): Adjust call to
> > 	maybe_constant_value.
> > 	* cp-tree.h (enum class mce_value): Define.
> > 	(maybe_constant_value): Adjust manifestly_const_eval parameter
> > 	type and default argument.
> > 	* decl.cc (compute_array_index_type_loc): Adjust call to
> > 	maybe_constant_value.
> > 	* pt.cc (convert_nontype_argument): Likewise.
> > ---
> >   gcc/cp/constexpr.cc  | 61 ++++++++++++++++++++++++--------------------
> >   gcc/cp/constraint.cc |  3 +--
> >   gcc/cp/cp-tree.h     | 18 ++++++++++++-
> >   gcc/cp/decl.cc       |  2 +-
> `>   gcc/cp/pt.cc         |  6 ++---
> >   5 files changed, 54 insertions(+), 36 deletions(-)
> > 
> > diff --git a/gcc/cp/constexpr.cc b/gcc/cp/constexpr.cc
> > index be99bec17e7..34662198903 100644
> > --- a/gcc/cp/constexpr.cc
> > +++ b/gcc/cp/constexpr.cc
> > @@ -1119,8 +1119,8 @@ struct GTY((for_user)) constexpr_call {
> >     /* The hash of this call; we remember it here to avoid having to
> >        recalculate it when expanding the hash table.  */
> >     hashval_t hash;
> > -  /* Whether __builtin_is_constant_evaluated() should evaluate to true.  */
> > -  bool manifestly_const_eval;
> > +  /* The raw value of constexpr_ctx::manifestly_const_eval.  */
> > +  int manifestly_const_eval;
> 
> Why not mce_value?

gengtype complained about 'mce_value' being an unknown type here
(constexpr_call is gengtype-enabled).  Ah, but it looks like using
'enum mce_value' makes gengtype happy.

> 
> >   };
> >     struct constexpr_call_hasher : ggc_ptr_hash<constexpr_call>
> > @@ -1248,7 +1248,7 @@ struct constexpr_ctx {
> >        trying harder to get a constant value.  */
> >     bool strict;
> >     /* Whether __builtin_is_constant_evaluated () should be true.  */
> > -  bool manifestly_const_eval;
> > +  mce_value manifestly_const_eval;
> >   };
> >     /* This internal flag controls whether we should avoid doing anything
> > during
> > @@ -1463,7 +1463,7 @@ cxx_eval_builtin_function_call (const constexpr_ctx
> > *ctx, tree t, tree fun,
> >     /* If we aren't requiring a constant expression, defer
> > __builtin_constant_p
> >        in a constexpr function until we have values for the parameters.  */
> >     if (bi_const_p
> > -      && !ctx->manifestly_const_eval
> > +      && ctx->manifestly_const_eval == mce_unknown
> >         && current_function_decl
> >         && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
> >       {
> > @@ -1479,12 +1479,13 @@ cxx_eval_builtin_function_call (const constexpr_ctx
> > *ctx, tree t, tree fun,
> >     if (fndecl_built_in_p (fun, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
> >   			 BUILT_IN_FRONTEND))
> >       {
> > -      if (!ctx->manifestly_const_eval)
> > +      if (ctx->manifestly_const_eval == mce_unknown)
> >   	{
> >   	  *non_constant_p = true;
> >   	  return t;
> >   	}
> > -      return boolean_true_node;
> > +      return constant_boolean_node (ctx->manifestly_const_eval == mce_true,
> > +				    boolean_type_node);
> >       }
> >       if (fndecl_built_in_p (fun, CP_BUILT_IN_SOURCE_LOCATION,
> > BUILT_IN_FRONTEND))
> > @@ -1591,7 +1592,7 @@ cxx_eval_builtin_function_call (const constexpr_ctx
> > *ctx, tree t, tree fun,
> >       }
> >       bool save_ffbcp = force_folding_builtin_constant_p;
> > -  force_folding_builtin_constant_p |= ctx->manifestly_const_eval;
> > +  force_folding_builtin_constant_p |= ctx->manifestly_const_eval !=
> > mce_unknown;
> >     tree save_cur_fn = current_function_decl;
> >     /* Return name of ctx->call->fundef->decl for __builtin_FUNCTION ().  */
> >     if (fndecl_built_in_p (fun, BUILT_IN_FUNCTION)
> > @@ -2644,7 +2645,7 @@ cxx_eval_call_expression (const constexpr_ctx *ctx,
> > tree t,
> >     location_t loc = cp_expr_loc_or_input_loc (t);
> >     tree fun = get_function_named_in_call (t);
> >     constexpr_call new_call
> > -    = { NULL, NULL, NULL, 0, ctx->manifestly_const_eval };
> > +    = { NULL, NULL, NULL, 0, (int)ctx->manifestly_const_eval };
> >     int depth_ok;
> >       if (fun == NULL_TREE)
> > @@ -2916,7 +2917,7 @@ cxx_eval_call_expression (const constexpr_ctx *ctx,
> > tree t,
> >         location_t save_loc = input_location;
> >         input_location = loc;
> >         ++function_depth;
> > -      if (ctx->manifestly_const_eval)
> > +      if (ctx->manifestly_const_eval == mce_true)
> >   	FNDECL_MANIFESTLY_CONST_EVALUATED (fun) = true;
> >         instantiate_decl (fun, /*defer_ok*/false, /*expl_inst*/false);
> >         --function_depth;
> > @@ -3676,7 +3677,7 @@ cxx_eval_binary_expression (const constexpr_ctx *ctx,
> > tree t,
> >       if (r == NULL_TREE)
> >       {
> > -      if (ctx->manifestly_const_eval
> > +      if (ctx->manifestly_const_eval == mce_true
> >   	  && (flag_constexpr_fp_except
> >   	      || TREE_CODE (type) != REAL_TYPE))
> >   	{
> > @@ -3741,13 +3742,13 @@ cxx_eval_conditional_expression (const constexpr_ctx
> > *ctx, tree t,
> >   	 without manifestly_const_eval even expressions or parts thereof which
> >   	 will later be manifestly const_eval evaluated), otherwise fold it to
> >   	 true.  */
> > -      if (ctx->manifestly_const_eval)
> > -	val = boolean_true_node;
> > -      else
> > +      if (ctx->manifestly_const_eval == mce_unknown)
> >   	{
> >   	  *non_constant_p = true;
> >   	  return t;
> >   	}
> > +      val = constant_boolean_node (ctx->manifestly_const_eval == mce_true,
> > +				   boolean_type_node);
> >       }
> >     /* Don't VERIFY_CONSTANT the other operands.  */
> >     if (integer_zerop (val))
> > @@ -7055,7 +7056,7 @@ cxx_eval_constant_expression (const constexpr_ctx
> > *ctx, tree t,
> >   	      r = v;
> >   	      break;
> >   	    }
> > -      if (ctx->manifestly_const_eval)
> > +      if (ctx->manifestly_const_eval == mce_true)
> >   	maybe_warn_about_constant_value (loc, t);
> >         if (COMPLETE_TYPE_P (TREE_TYPE (t))
> >   	  && is_really_empty_class (TREE_TYPE (t), /*ignore_vptr*/false))
> > @@ -7644,7 +7645,7 @@ cxx_eval_constant_expression (const constexpr_ctx
> > *ctx, tree t,
> >   	if (TREE_CODE (t) == CONVERT_EXPR
> >   	    && ARITHMETIC_TYPE_P (type)
> >   	    && INDIRECT_TYPE_P (TREE_TYPE (op))
> > -	    && ctx->manifestly_const_eval)
> > +	    && ctx->manifestly_const_eval == mce_true)
> >   	  {
> >   	    if (!ctx->quiet)
> >   	      error_at (loc,
> > @@ -8137,7 +8138,7 @@ mark_non_constant (tree t)
> >   static tree
> >   cxx_eval_outermost_constant_expr (tree t, bool allow_non_constant,
> >   				  bool strict = true,
> > -				  bool manifestly_const_eval = false,
> > +				  mce_value manifestly_const_eval =
> > mce_unknown,
> >   				  bool constexpr_dtor = false,
> >   				  tree object = NULL_TREE)
> >   {
> > @@ -8155,10 +8156,11 @@ cxx_eval_outermost_constant_expr (tree t, bool
> > allow_non_constant,
> >     constexpr_global_ctx global_ctx;
> >     constexpr_ctx ctx = { &global_ctx, NULL, NULL, NULL, NULL, NULL, NULL,
> >   			allow_non_constant, strict,
> > -			manifestly_const_eval || !allow_non_constant };
> > +			!allow_non_constant ? mce_true : manifestly_const_eval
> > };
> >       /* Turn off -frounding-math for manifestly constant evaluation.  */
> > -  warning_sentinel rm (flag_rounding_math, ctx.manifestly_const_eval);
> > +  warning_sentinel rm (flag_rounding_math,
> > +		       ctx.manifestly_const_eval == mce_true);
> >     tree type = initialized_type (t);
> >     tree r = t;
> >     bool is_consteval = false;
> > @@ -8247,7 +8249,7 @@ cxx_eval_outermost_constant_expr (tree t, bool
> > allow_non_constant,
> >     auto_vec<tree, 16> cleanups;
> >     global_ctx.cleanups = &cleanups;
> >   -  if (manifestly_const_eval)
> > +  if (manifestly_const_eval == mce_true)
> >       instantiate_constexpr_fns (r);
> >     r = cxx_eval_constant_expression (&ctx, r, vc_prvalue,
> >   				    &non_constant_p, &overflow_p);
> > @@ -8386,7 +8388,7 @@ cxx_constant_value (tree t, tree decl /* = NULL_TREE
> > */,
> >   		    tsubst_flags_t complain /* = tf_error */)
> >   {
> >     bool sfinae = !(complain & tf_error);
> > -  tree r = cxx_eval_outermost_constant_expr (t, sfinae, true, true, false,
> > decl);
> > +  tree r = cxx_eval_outermost_constant_expr (t, sfinae, true, mce_true,
> > false, decl);
> >     if (sfinae && !TREE_CONSTANT (r))
> >       r = error_mark_node;
> >     return r;
> > @@ -8398,7 +8400,7 @@ cxx_constant_value (tree t, tree decl /* = NULL_TREE
> > */,
> >   void
> >   cxx_constant_dtor (tree t, tree decl)
> >   {
> > -  cxx_eval_outermost_constant_expr (t, false, true, true, true, decl);
> > +  cxx_eval_outermost_constant_expr (t, false, true, mce_true, true, decl);
> >   }
> >     /* Helper routine for fold_simple function.  Either return simplified
> > @@ -8484,7 +8486,7 @@ static GTY((deletable)) hash_map<tree, tree>
> > *cv_cache;
> >     tree
> >   maybe_constant_value (tree t, tree decl /* = NULL_TREE */,
> > -		      bool manifestly_const_eval /* = false */)
> > +		      mce_value manifestly_const_eval /* = mce_unknown */)
> >   {
> >     tree r;
> >   @@ -8499,8 +8501,9 @@ maybe_constant_value (tree t, tree decl /* =
> > NULL_TREE */,
> >       /* No caching or evaluation needed.  */
> >       return t;
> >   -  if (manifestly_const_eval)
> > -    return cxx_eval_outermost_constant_expr (t, true, true, true, false,
> > decl);
> > +  if (manifestly_const_eval != mce_unknown)
> > +    return cxx_eval_outermost_constant_expr (t, true, true,
> > +					     manifestly_const_eval, false,
> > decl);
> >       if (cv_cache == NULL)
> >       cv_cache = hash_map<tree, tree>::create_ggc (101);
> > @@ -8524,7 +8527,8 @@ maybe_constant_value (tree t, tree decl /* = NULL_TREE
> > */,
> >       return t;
> >       uid_sensitive_constexpr_evaluation_checker c;
> > -  r = cxx_eval_outermost_constant_expr (t, true, true, false, false, decl);
> > +  r = cxx_eval_outermost_constant_expr (t, true, true,
> > +					manifestly_const_eval, false, decl);
> >     gcc_checking_assert (r == t
> >   		       || CONVERT_EXPR_P (t)
> >   		       || TREE_CODE (t) == VIEW_CONVERT_EXPR
> > @@ -8590,7 +8594,7 @@ fold_non_dependent_expr_template (tree t,
> > tsubst_flags_t complain,
> >   	return t;
> >           tree r = cxx_eval_outermost_constant_expr (t, true, true,
> > -						 manifestly_const_eval,
> > +						 mce_value
> > (manifestly_const_eval),
> >   						 false, object);
> >         /* cp_tree_equal looks through NOPs, so allow them.  */
> >         gcc_checking_assert (r == t
> > @@ -8637,7 +8641,7 @@ fold_non_dependent_expr (tree t,
> >       return fold_non_dependent_expr_template (t, complain,
> >   					     manifestly_const_eval, object);
> >   -  return maybe_constant_value (t, object, manifestly_const_eval);
> > +  return maybe_constant_value (t, object,
> > (mce_value)manifestly_const_eval);
> >   }
> >     /* Like fold_non_dependent_expr, but if EXPR couldn't be folded to a
> > constant,
> > @@ -8715,7 +8719,8 @@ maybe_constant_init_1 (tree t, tree decl, bool
> > allow_non_constant,
> >         bool is_static = (decl && DECL_P (decl)
> >   			&& (TREE_STATIC (decl) || DECL_EXTERNAL (decl)));
> >         t = cxx_eval_outermost_constant_expr (t, allow_non_constant,
> > !is_static,
> > -					    manifestly_const_eval, false,
> > decl);
> > +					    mce_value (manifestly_const_eval),
> > +					    false, decl);
> >       }
> >     if (TREE_CODE (t) == TARGET_EXPR)
> >       {
> > diff --git a/gcc/cp/constraint.cc b/gcc/cp/constraint.cc
> > index 2e5acdf8fcb..9374327008b 100644
> > --- a/gcc/cp/constraint.cc
> > +++ b/gcc/cp/constraint.cc
> > @@ -3068,8 +3068,7 @@ satisfy_atom (tree t, tree args, sat_info info)
> >       }
> >     else
> >       {
> > -      result = maybe_constant_value (result, NULL_TREE,
> > -				     /*manifestly_const_eval=*/true);
> > +      result = maybe_constant_value (result, NULL_TREE, mce_true);
> >         if (!TREE_CONSTANT (result))
> >   	result = error_mark_node;
> >       }
> > diff --git a/gcc/cp/cp-tree.h b/gcc/cp/cp-tree.h
> > index 74b7ab71ca5..2d39185b182 100644
> > --- a/gcc/cp/cp-tree.h
> > +++ b/gcc/cp/cp-tree.h
> > @@ -8456,6 +8456,22 @@ struct GTY((for_user)) constexpr_fundef {
> >     tree result;
> >   };
> >   +/* Used by the constexpr machinery to control folding of
> > +   __builtin_is_constant_evaluated.  */
> 
> Add "Whether the current context is manifestly constant-evaluated." at the
> start.
> 
> > +enum class mce_value
> > +{ > +  /* Treat __builtin_is_constant_evaluated as non-constant.  */
> 
> "Unknown, so..."
> 
> > +  mce_unknown = 0,
> > +  /* Fold it to true.  */
> > +  mce_true = 1,
> > +  /* Fold it to false.  */
> 
> "Primarily used during cp_fold_function."
> 
> OK with these tweaks.

Thanks, here's v2 with the above changes, pending approval of the
second patch of the series.  I'll make sure to adjust the comment
for mce_value::mce_false to say

  "Primarily used during cp_fold_function and cp_fully_fold_init"

if we do end up using it from cp_fully_fold_init as well.

-- >8 --

Subject: [PATCH 1/2] c++: make manifestly_const_eval tri-state

This patch turns the manifestly_const_eval flag used by the constexpr
machinery into a tri-state enum so that we're able to express wanting
to fold __builtin_is_constant_evaluated to false via late speculative
constexpr evaluation.  Of all the entry points to constexpr evaluation
only maybe_constant_value is changed to take a tri-state value; the
others continue to take bool.  The subsequent patch will use this to fold
the builtin to false when called from cp_fold_function.

gcc/cp/ChangeLog:

	* constexpr.cc (constexpr_call::manifestly_const_eval): Give
	it type int instead of bool.
	(constexpr_ctx::manifestly_const_eval): Give it type mce_value
	instead of bool.
	(cxx_eval_builtin_function_call): Adjust after making
	manifestly_const_eval tri-state.
	(cxx_eval_call_expression): Likewise.
	(cxx_eval_binary_expression): Likewise.
	(cxx_eval_conditional_expression): Likewise.
	(cxx_eval_constant_expression): Likewise.
	(cxx_eval_outermost_constant_expr): Likewise.
	(cxx_constant_value): Likewise.
	(cxx_constant_dtor): Likewise.
	(maybe_constant_value): Give manifestly_const_eval parameter
	type mce_value instead of bool and adjust accordingly.
	(fold_non_dependent_expr_template): Adjust call
	to cxx_eval_outermost_constant_expr.
	(fold_non_dependent_expr): Likewise.
	(maybe_constant_init_1): Likewise.
	* constraint.cc (satisfy_atom): Adjust call to
	maybe_constant_value.
	* cp-tree.h (enum class mce_value): Define.
	(maybe_constant_value): Adjust manifestly_const_eval parameter
	type and default argument.
	* decl.cc (compute_array_index_type_loc): Adjust call to
	maybe_constant_value.
	* pt.cc (convert_nontype_argument): Likewise.
---
 gcc/cp/constexpr.cc  | 59 ++++++++++++++++++++++++--------------------
 gcc/cp/constraint.cc |  3 +--
 gcc/cp/cp-tree.h     | 19 +++++++++++++-
 gcc/cp/decl.cc       |  2 +-
 gcc/cp/pt.cc         |  6 ++---
 5 files changed, 54 insertions(+), 35 deletions(-)

diff --git a/gcc/cp/constexpr.cc b/gcc/cp/constexpr.cc
index 5b31f9c27d1..4576aebd8d2 100644
--- a/gcc/cp/constexpr.cc
+++ b/gcc/cp/constexpr.cc
@@ -1119,8 +1119,8 @@ struct GTY((for_user)) constexpr_call {
   /* The hash of this call; we remember it here to avoid having to
      recalculate it when expanding the hash table.  */
   hashval_t hash;
-  /* Whether __builtin_is_constant_evaluated() should evaluate to true.  */
-  bool manifestly_const_eval;
+  /* The value of constexpr_ctx::manifestly_const_eval.  */
+  enum mce_value manifestly_const_eval;
 };
 
 struct constexpr_call_hasher : ggc_ptr_hash<constexpr_call>
@@ -1248,7 +1248,7 @@ struct constexpr_ctx {
      trying harder to get a constant value.  */
   bool strict;
   /* Whether __builtin_is_constant_evaluated () should be true.  */
-  bool manifestly_const_eval;
+  mce_value manifestly_const_eval;
 };
 
 /* This internal flag controls whether we should avoid doing anything during
@@ -1463,7 +1463,7 @@ cxx_eval_builtin_function_call (const constexpr_ctx *ctx, tree t, tree fun,
   /* If we aren't requiring a constant expression, defer __builtin_constant_p
      in a constexpr function until we have values for the parameters.  */
   if (bi_const_p
-      && !ctx->manifestly_const_eval
+      && ctx->manifestly_const_eval == mce_unknown
       && current_function_decl
       && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
     {
@@ -1479,12 +1479,13 @@ cxx_eval_builtin_function_call (const constexpr_ctx *ctx, tree t, tree fun,
   if (fndecl_built_in_p (fun, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
 			 BUILT_IN_FRONTEND))
     {
-      if (!ctx->manifestly_const_eval)
+      if (ctx->manifestly_const_eval == mce_unknown)
 	{
 	  *non_constant_p = true;
 	  return t;
 	}
-      return boolean_true_node;
+      return constant_boolean_node (ctx->manifestly_const_eval == mce_true,
+				    boolean_type_node);
     }
 
   if (fndecl_built_in_p (fun, CP_BUILT_IN_SOURCE_LOCATION, BUILT_IN_FRONTEND))
@@ -1591,7 +1592,7 @@ cxx_eval_builtin_function_call (const constexpr_ctx *ctx, tree t, tree fun,
     }
 
   bool save_ffbcp = force_folding_builtin_constant_p;
-  force_folding_builtin_constant_p |= ctx->manifestly_const_eval;
+  force_folding_builtin_constant_p |= ctx->manifestly_const_eval != mce_unknown;
   tree save_cur_fn = current_function_decl;
   /* Return name of ctx->call->fundef->decl for __builtin_FUNCTION ().  */
   if (fndecl_built_in_p (fun, BUILT_IN_FUNCTION)
@@ -2916,7 +2917,7 @@ cxx_eval_call_expression (const constexpr_ctx *ctx, tree t,
       location_t save_loc = input_location;
       input_location = loc;
       ++function_depth;
-      if (ctx->manifestly_const_eval)
+      if (ctx->manifestly_const_eval == mce_true)
 	FNDECL_MANIFESTLY_CONST_EVALUATED (fun) = true;
       instantiate_decl (fun, /*defer_ok*/false, /*expl_inst*/false);
       --function_depth;
@@ -3676,7 +3677,7 @@ cxx_eval_binary_expression (const constexpr_ctx *ctx, tree t,
 
   if (r == NULL_TREE)
     {
-      if (ctx->manifestly_const_eval
+      if (ctx->manifestly_const_eval == mce_true
 	  && (flag_constexpr_fp_except
 	      || TREE_CODE (type) != REAL_TYPE))
 	{
@@ -3741,13 +3742,13 @@ cxx_eval_conditional_expression (const constexpr_ctx *ctx, tree t,
 	 without manifestly_const_eval even expressions or parts thereof which
 	 will later be manifestly const_eval evaluated), otherwise fold it to
 	 true.  */
-      if (ctx->manifestly_const_eval)
-	val = boolean_true_node;
-      else
+      if (ctx->manifestly_const_eval == mce_unknown)
 	{
 	  *non_constant_p = true;
 	  return t;
 	}
+      val = constant_boolean_node (ctx->manifestly_const_eval == mce_true,
+				   boolean_type_node);
     }
   /* Don't VERIFY_CONSTANT the other operands.  */
   if (integer_zerop (val))
@@ -7055,7 +7056,7 @@ cxx_eval_constant_expression (const constexpr_ctx *ctx, tree t,
 	      r = v;
 	      break;
 	    }
-      if (ctx->manifestly_const_eval)
+      if (ctx->manifestly_const_eval == mce_true)
 	maybe_warn_about_constant_value (loc, t);
       if (COMPLETE_TYPE_P (TREE_TYPE (t))
 	  && is_really_empty_class (TREE_TYPE (t), /*ignore_vptr*/false))
@@ -7644,7 +7645,7 @@ cxx_eval_constant_expression (const constexpr_ctx *ctx, tree t,
 	if (TREE_CODE (t) == CONVERT_EXPR
 	    && ARITHMETIC_TYPE_P (type)
 	    && INDIRECT_TYPE_P (TREE_TYPE (op))
-	    && ctx->manifestly_const_eval)
+	    && ctx->manifestly_const_eval == mce_true)
 	  {
 	    if (!ctx->quiet)
 	      error_at (loc,
@@ -8182,7 +8183,7 @@ mark_non_constant (tree t)
 static tree
 cxx_eval_outermost_constant_expr (tree t, bool allow_non_constant,
 				  bool strict = true,
-				  bool manifestly_const_eval = false,
+				  mce_value manifestly_const_eval = mce_unknown,
 				  bool constexpr_dtor = false,
 				  tree object = NULL_TREE)
 {
@@ -8200,10 +8201,11 @@ cxx_eval_outermost_constant_expr (tree t, bool allow_non_constant,
   constexpr_global_ctx global_ctx;
   constexpr_ctx ctx = { &global_ctx, NULL, NULL, NULL, NULL, NULL, NULL,
 			allow_non_constant, strict,
-			manifestly_const_eval || !allow_non_constant };
+			!allow_non_constant ? mce_true : manifestly_const_eval };
 
   /* Turn off -frounding-math for manifestly constant evaluation.  */
-  warning_sentinel rm (flag_rounding_math, ctx.manifestly_const_eval);
+  warning_sentinel rm (flag_rounding_math,
+		       ctx.manifestly_const_eval == mce_true);
   tree type = initialized_type (t);
   tree r = t;
   bool is_consteval = false;
@@ -8292,7 +8294,7 @@ cxx_eval_outermost_constant_expr (tree t, bool allow_non_constant,
   auto_vec<tree, 16> cleanups;
   global_ctx.cleanups = &cleanups;
 
-  if (manifestly_const_eval)
+  if (manifestly_const_eval == mce_true)
     instantiate_constexpr_fns (r);
   r = cxx_eval_constant_expression (&ctx, r, vc_prvalue,
 				    &non_constant_p, &overflow_p);
@@ -8431,7 +8433,7 @@ cxx_constant_value (tree t, tree decl /* = NULL_TREE */,
 		    tsubst_flags_t complain /* = tf_error */)
 {
   bool sfinae = !(complain & tf_error);
-  tree r = cxx_eval_outermost_constant_expr (t, sfinae, true, true, false, decl);
+  tree r = cxx_eval_outermost_constant_expr (t, sfinae, true, mce_true, false, decl);
   if (sfinae && !TREE_CONSTANT (r))
     r = error_mark_node;
   return r;
@@ -8443,7 +8445,7 @@ cxx_constant_value (tree t, tree decl /* = NULL_TREE */,
 void
 cxx_constant_dtor (tree t, tree decl)
 {
-  cxx_eval_outermost_constant_expr (t, false, true, true, true, decl);
+  cxx_eval_outermost_constant_expr (t, false, true, mce_true, true, decl);
 }
 
 /* Helper routine for fold_simple function.  Either return simplified
@@ -8529,7 +8531,7 @@ static GTY((deletable)) hash_map<tree, tree> *cv_cache;
 
 tree
 maybe_constant_value (tree t, tree decl /* = NULL_TREE */,
-		      bool manifestly_const_eval /* = false */)
+		      mce_value manifestly_const_eval /* = mce_unknown */)
 {
   tree r;
 
@@ -8544,8 +8546,9 @@ maybe_constant_value (tree t, tree decl /* = NULL_TREE */,
     /* No caching or evaluation needed.  */
     return t;
 
-  if (manifestly_const_eval)
-    return cxx_eval_outermost_constant_expr (t, true, true, true, false, decl);
+  if (manifestly_const_eval != mce_unknown)
+    return cxx_eval_outermost_constant_expr (t, true, true,
+					     manifestly_const_eval, false, decl);
 
   if (cv_cache == NULL)
     cv_cache = hash_map<tree, tree>::create_ggc (101);
@@ -8569,7 +8572,8 @@ maybe_constant_value (tree t, tree decl /* = NULL_TREE */,
     return t;
 
   uid_sensitive_constexpr_evaluation_checker c;
-  r = cxx_eval_outermost_constant_expr (t, true, true, false, false, decl);
+  r = cxx_eval_outermost_constant_expr (t, true, true,
+					manifestly_const_eval, false, decl);
   gcc_checking_assert (r == t
 		       || CONVERT_EXPR_P (t)
 		       || TREE_CODE (t) == VIEW_CONVERT_EXPR
@@ -8635,7 +8639,7 @@ fold_non_dependent_expr_template (tree t, tsubst_flags_t complain,
 	return t;
 
       tree r = cxx_eval_outermost_constant_expr (t, true, true,
-						 manifestly_const_eval,
+						 mce_value (manifestly_const_eval),
 						 false, object);
       /* cp_tree_equal looks through NOPs, so allow them.  */
       gcc_checking_assert (r == t
@@ -8682,7 +8686,7 @@ fold_non_dependent_expr (tree t,
     return fold_non_dependent_expr_template (t, complain,
 					     manifestly_const_eval, object);
 
-  return maybe_constant_value (t, object, manifestly_const_eval);
+  return maybe_constant_value (t, object, mce_value (manifestly_const_eval));
 }
 
 /* Like fold_non_dependent_expr, but if EXPR couldn't be folded to a constant,
@@ -8760,7 +8764,8 @@ maybe_constant_init_1 (tree t, tree decl, bool allow_non_constant,
       bool is_static = (decl && DECL_P (decl)
 			&& (TREE_STATIC (decl) || DECL_EXTERNAL (decl)));
       t = cxx_eval_outermost_constant_expr (t, allow_non_constant, !is_static,
-					    manifestly_const_eval, false, decl);
+					    mce_value (manifestly_const_eval),
+					    false, decl);
     }
   if (TREE_CODE (t) == TARGET_EXPR)
     {
diff --git a/gcc/cp/constraint.cc b/gcc/cp/constraint.cc
index 2e5acdf8fcb..9374327008b 100644
--- a/gcc/cp/constraint.cc
+++ b/gcc/cp/constraint.cc
@@ -3068,8 +3068,7 @@ satisfy_atom (tree t, tree args, sat_info info)
     }
   else
     {
-      result = maybe_constant_value (result, NULL_TREE,
-				     /*manifestly_const_eval=*/true);
+      result = maybe_constant_value (result, NULL_TREE, mce_true);
       if (!TREE_CONSTANT (result))
 	result = error_mark_node;
     }
diff --git a/gcc/cp/cp-tree.h b/gcc/cp/cp-tree.h
index 31fd8af4f21..00b2bffc85c 100644
--- a/gcc/cp/cp-tree.h
+++ b/gcc/cp/cp-tree.h
@@ -8455,6 +8455,23 @@ struct GTY((for_user)) constexpr_fundef {
   tree result;
 };
 
+/* Whether the current context is manifestly constant-evaluated.
+   Used by the constexpr machinery to control folding of
+   __builtin_is_constant_evaluated.  */
+
+enum class mce_value
+{
+  /* Unknown, so treat __builtin_is_constant_evaluated as non-constant.  */
+  mce_unknown = 0,
+  /* Fold it to true.  */
+  mce_true = 1,
+  /* Fold it to false.  Primarily used during cp_fold_function.  */
+  mce_false = -1,
+};
+constexpr mce_value mce_unknown = mce_value::mce_unknown;
+constexpr mce_value mce_true = mce_value::mce_true;
+constexpr mce_value mce_false = mce_value::mce_false;
+
 extern void fini_constexpr			(void);
 extern bool literal_type_p                      (tree);
 extern void maybe_save_constexpr_fundef		(tree);
@@ -8483,7 +8500,7 @@ inline tree cxx_constant_value (tree t, tsubst_flags_t complain)
 { return cxx_constant_value (t, NULL_TREE, complain); }
 extern void cxx_constant_dtor			(tree, tree);
 extern tree cxx_constant_init			(tree, tree = NULL_TREE);
-extern tree maybe_constant_value		(tree, tree = NULL_TREE, bool = false);
+extern tree maybe_constant_value		(tree, tree = NULL_TREE, mce_value = mce_unknown);
 extern tree maybe_constant_init			(tree, tree = NULL_TREE, bool = false);
 extern tree fold_non_dependent_expr		(tree,
 						 tsubst_flags_t = tf_warning_or_error,
diff --git a/gcc/cp/decl.cc b/gcc/cp/decl.cc
index d606b31d7a7..a023c38c59d 100644
--- a/gcc/cp/decl.cc
+++ b/gcc/cp/decl.cc
@@ -11372,7 +11372,7 @@ compute_array_index_type_loc (location_t name_loc, tree name, tree size,
 				    cp_convert (ssizetype, integer_one_node,
 						complain),
 				    complain);
-	itype = maybe_constant_value (itype, NULL_TREE, true);
+	itype = maybe_constant_value (itype, NULL_TREE, mce_true);
       }
 
       if (!TREE_CONSTANT (itype))
diff --git a/gcc/cp/pt.cc b/gcc/cp/pt.cc
index 51fc246ed71..255332dc0c1 100644
--- a/gcc/cp/pt.cc
+++ b/gcc/cp/pt.cc
@@ -7390,16 +7390,14 @@ convert_nontype_argument (tree type, tree expr, tsubst_flags_t complain)
 	      IMPLICIT_CONV_EXPR_NONTYPE_ARG (expr) = true;
 	      return expr;
 	    }
-	  expr = maybe_constant_value (expr, NULL_TREE,
-				       /*manifestly_const_eval=*/true);
+	  expr = maybe_constant_value (expr, NULL_TREE, mce_true);
 	  expr = convert_from_reference (expr);
 	  /* EXPR may have become value-dependent.  */
 	  val_dep_p = value_dependent_expression_p (expr);
 	}
       else if (TYPE_PTR_OR_PTRMEM_P (type))
 	{
-	  tree folded = maybe_constant_value (expr, NULL_TREE,
-					      /*manifestly_const_eval=*/true);
+	  tree folded = maybe_constant_value (expr, NULL_TREE, mce_true);
 	  if (TYPE_PTR_P (type) ? integer_zerop (folded)
 	      : null_member_pointer_value_p (folded))
 	    expr = folded;
-- 
2.39.1.388.g2fc9e9ca3c



^ permalink raw reply	[flat|nested] 15+ messages in thread

* Re: [PATCH 2/2] c++: speculative constexpr and is_constant_evaluated [PR108243]
  2023-02-03 20:51     ` Patrick Palka
  2023-02-03 20:57       ` Patrick Palka
@ 2023-02-05 20:11       ` Jason Merrill
  2023-02-09 17:36         ` Patrick Palka
  1 sibling, 1 reply; 15+ messages in thread
From: Jason Merrill @ 2023-02-05 20:11 UTC (permalink / raw)
  To: Patrick Palka; +Cc: gcc-patches

On 2/3/23 15:51, Patrick Palka wrote:
> On Mon, 30 Jan 2023, Jason Merrill wrote:
> 
>> On 1/27/23 17:02, Patrick Palka wrote:
>>> This PR illustrates that __builtin_is_constant_evaluated currently acts
>>> as an optimization barrier for our speculative constexpr evaluation,
>>> since we don't want to prematurely fold the builtin to false if the
>>> expression in question would be later manifestly constant evaluated (in
>>> which case it must be folded to true).
>>>
>>> This patch fixes this by permitting __builtin_is_constant_evaluated
>>> to get folded as false during cp_fold_function, since at that point
>>> we're sure we're doing manifestly constant evaluation.  To that end
>>> we add a flags parameter to cp_fold that controls what mce_value the
>>> CALL_EXPR case passes to maybe_constant_value.
>>>
>>> bootstrapped and rgetsted no x86_64-pc-linux-gnu, does this look OK for
>>> trunk?
>>>
>>> 	PR c++/108243
>>>
>>> gcc/cp/ChangeLog:
>>>
>>> 	* cp-gimplify.cc (enum fold_flags): Define.
>>> 	(cp_fold_data::genericize): Replace this data member with ...
>>> 	(cp_fold_data::fold_flags): ... this.
>>> 	(cp_fold_r): Adjust cp_fold_data use and cp_fold_calls.
>>> 	(cp_fold_function): Likewise.
>>> 	(cp_fold_maybe_rvalue): Likewise.
>>> 	(cp_fully_fold_init): Likewise.
>>> 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
>>> 	isn't empty.
>>> 	<case CALL_EXPR>: Pass mce_false to maybe_constant_value
>>> 	if if ff_genericize is set.
>>>
>>> gcc/testsuite/ChangeLog:
>>>
>>> 	* g++.dg/opt/pr108243.C: New test.
>>> ---
>>>    gcc/cp/cp-gimplify.cc               | 76 ++++++++++++++++++-----------
>>>    gcc/testsuite/g++.dg/opt/pr108243.C | 29 +++++++++++
>>>    2 files changed, 76 insertions(+), 29 deletions(-)
>>>    create mode 100644 gcc/testsuite/g++.dg/opt/pr108243.C
>>>
>>> diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
>>> index a35cedd05cc..d023a63768f 100644
>>> --- a/gcc/cp/cp-gimplify.cc
>>> +++ b/gcc/cp/cp-gimplify.cc
>>> @@ -43,12 +43,20 @@ along with GCC; see the file COPYING3.  If not see
>>>    #include "omp-general.h"
>>>    #include "opts.h"
>>>    +/* Flags for cp_fold and cp_fold_r.  */
>>> +
>>> +enum fold_flags {
>>> +  ff_none = 0,
>>> +  /* Whether we're being called from cp_fold_function.  */
>>> +  ff_genericize = 1 << 0,
>>> +};
>>> +
>>>    /* Forward declarations.  */
>>>      static tree cp_genericize_r (tree *, int *, void *);
>>>    static tree cp_fold_r (tree *, int *, void *);
>>>    static void cp_genericize_tree (tree*, bool);
>>> -static tree cp_fold (tree);
>>> +static tree cp_fold (tree, fold_flags);
>>>      /* Genericize a TRY_BLOCK.  */
>>>    @@ -996,9 +1004,8 @@ struct cp_genericize_data
>>>    struct cp_fold_data
>>>    {
>>>      hash_set<tree> pset;
>>> -  bool genericize; // called from cp_fold_function?
>>> -
>>> -  cp_fold_data (bool g): genericize (g) {}
>>> +  fold_flags flags;
>>> +  cp_fold_data (fold_flags flags): flags (flags) {}
>>>    };
>>>      static tree
>>> @@ -1039,7 +1046,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
>>> *data_)
>>>          break;
>>>        }
>>>    -  *stmt_p = stmt = cp_fold (*stmt_p);
>>> +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
>>>        if (data->pset.add (stmt))
>>>        {
>>> @@ -1119,12 +1126,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
>>> *data_)
>>>    	 here rather than in cp_genericize to avoid problems with the
>>> invisible
>>>    	 reference transition.  */
>>>        case INIT_EXPR:
>>> -      if (data->genericize)
>>> +      if (data->flags & ff_genericize)
>>>    	cp_genericize_init_expr (stmt_p);
>>>          break;
>>>          case TARGET_EXPR:
>>> -      if (data->genericize)
>>> +      if (data->flags & ff_genericize)
>>>    	cp_genericize_target_expr (stmt_p);
>>>            /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
>>> @@ -1157,7 +1164,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
>>> *data_)
>>>    void
>>>    cp_fold_function (tree fndecl)
>>>    {
>>> -  cp_fold_data data (/*genericize*/true);
>>> +  cp_fold_data data (ff_genericize);
>>>      cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
>>>    }
>>>    @@ -2375,7 +2382,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
>>>    {
>>>      while (true)
>>>        {
>>> -      x = cp_fold (x);
>>> +      x = cp_fold (x, ff_none);
>>>          if (rval)
>>>    	x = mark_rvalue_use (x);
>>>          if (rval && DECL_P (x)
>>> @@ -2434,7 +2441,7 @@ cp_fully_fold_init (tree x)
>>>      if (processing_template_decl)
>>>        return x;
>>>      x = cp_fully_fold (x);
>>> -  cp_fold_data data (/*genericize*/false);
>>> +  cp_fold_data data (ff_none);
>>>      cp_walk_tree (&x, cp_fold_r, &data, NULL);
>>>      return x;
>>>    }
>>> @@ -2469,7 +2476,7 @@ clear_fold_cache (void)
>>>        Function returns X or its folded variant.  */
>>>      static tree
>>> -cp_fold (tree x)
>>> +cp_fold (tree x, fold_flags flags)
>>>    {
>>>      tree op0, op1, op2, op3;
>>>      tree org_x = x, r = NULL_TREE;
>>> @@ -2490,8 +2497,11 @@ cp_fold (tree x)
>>>      if (fold_cache == NULL)
>>>        fold_cache = hash_map<tree, tree>::create_ggc (101);
>>>    -  if (tree *cached = fold_cache->get (x))
>>> -    return *cached;
>>> +  bool cache_p = (flags == ff_none);
>>> +
>>> +  if (cache_p)
>>> +    if (tree *cached = fold_cache->get (x))
>>> +      return *cached;
>>>        uid_sensitive_constexpr_evaluation_checker c;
>>>    @@ -2526,7 +2536,7 @@ cp_fold (tree x)
>>>    	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
>>>    	     folding of the operand should be in the caches and if in
>>> cp_fold_r
>>>    	     it will modify it in place.  */
>>> -	  op0 = cp_fold (TREE_OPERAND (x, 0));
>>> +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
>>>    	  if (op0 == error_mark_node)
>>>    	    x = error_mark_node;
>>>    	  break;
>>> @@ -2571,7 +2581,7 @@ cp_fold (tree x)
>>>    	{
>>>    	  tree p = maybe_undo_parenthesized_ref (x);
>>>    	  if (p != x)
>>> -	    return cp_fold (p);
>>> +	    return cp_fold (p, flags);
>>>    	}
>>>          goto unary;
>>>    @@ -2763,8 +2773,8 @@ cp_fold (tree x)
>>>        case COND_EXPR:
>>>          loc = EXPR_LOCATION (x);
>>>          op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
>>> -      op1 = cp_fold (TREE_OPERAND (x, 1));
>>> -      op2 = cp_fold (TREE_OPERAND (x, 2));
>>> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
>>> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
>>>            if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
>>>    	{
>>> @@ -2854,7 +2864,7 @@ cp_fold (tree x)
>>>    	      {
>>>    		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
>>>    		  r = build_nop (TREE_TYPE (x), r);
>>> -		x = cp_fold (r);
>>> +		x = cp_fold (r, flags);
>>>    		break;
>>>    	      }
>>>    	  }
>>> @@ -2908,7 +2918,7 @@ cp_fold (tree x)
>>>    	int m = call_expr_nargs (x);
>>>    	for (int i = 0; i < m; i++)
>>>    	  {
>>> -	    r = cp_fold (CALL_EXPR_ARG (x, i));
>>> +	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
>>>    	    if (r != CALL_EXPR_ARG (x, i))
>>>    	      {
>>>    		if (r == error_mark_node)
>>> @@ -2931,7 +2941,7 @@ cp_fold (tree x)
>>>      	if (TREE_CODE (r) != CALL_EXPR)
>>>    	  {
>>> -	    x = cp_fold (r);
>>> +	    x = cp_fold (r, flags);
>>>    	    break;
>>>    	  }
>>>    @@ -2944,7 +2954,15 @@ cp_fold (tree x)
>>>    	   constant, but the call followed by an INDIRECT_REF is.  */
>>>    	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
>>>    	    && !flag_no_inline)
>>> -	  r = maybe_constant_value (x);
>>> +	  {
>>> +	    mce_value manifestly_const_eval = mce_unknown;
>>> +	    if (flags & ff_genericize)
>>> +	      /* At genericization time it's safe to fold
>>> +		 __builtin_is_constant_evaluated to false.  */
>>> +	      manifestly_const_eval = mce_false;
>>> +	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
>>> +				      manifestly_const_eval);
>>> +	  }
>>>    	optimize = sv;
>>>              if (TREE_CODE (r) != CALL_EXPR)
>>> @@ -2971,7 +2989,7 @@ cp_fold (tree x)
>>>    	vec<constructor_elt, va_gc> *nelts = NULL;
>>>    	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
>>>    	  {
>>> -	    tree op = cp_fold (p->value);
>>> +	    tree op = cp_fold (p->value, flags);
>>>    	    if (op != p->value)
>>>    	      {
>>>    		if (op == error_mark_node)
>>> @@ -3002,7 +3020,7 @@ cp_fold (tree x)
>>>      	for (int i = 0; i < n; i++)
>>>    	  {
>>> -	    tree op = cp_fold (TREE_VEC_ELT (x, i));
>>> +	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
>>>    	    if (op != TREE_VEC_ELT (x, i))
>>>    	      {
>>>    		if (!changed)
>>> @@ -3019,10 +3037,10 @@ cp_fold (tree x)
>>>        case ARRAY_RANGE_REF:
>>>            loc = EXPR_LOCATION (x);
>>> -      op0 = cp_fold (TREE_OPERAND (x, 0));
>>> -      op1 = cp_fold (TREE_OPERAND (x, 1));
>>> -      op2 = cp_fold (TREE_OPERAND (x, 2));
>>> -      op3 = cp_fold (TREE_OPERAND (x, 3));
>>> +      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
>>> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
>>> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
>>> +      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
>>>            if (op0 != TREE_OPERAND (x, 0)
>>>    	  || op1 != TREE_OPERAND (x, 1)
>>> @@ -3050,7 +3068,7 @@ cp_fold (tree x)
>>>          /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
>>>    	 folding, evaluates to an invariant.  In that case no need to wrap
>>>    	 this folded tree with a SAVE_EXPR.  */
>>> -      r = cp_fold (TREE_OPERAND (x, 0));
>>> +      r = cp_fold (TREE_OPERAND (x, 0), flags);
>>>          if (tree_invariant_p (r))
>>>    	x = r;
>>>          break;
>>> @@ -3069,7 +3087,7 @@ cp_fold (tree x)
>>>          copy_warning (x, org_x);
>>>        }
>>>    -  if (!c.evaluation_restricted_p ())
>>> +  if (cache_p && !c.evaluation_restricted_p ())
>>>        {
>>>          fold_cache->put (org_x, x);
>>>          /* Prevent that we try to fold an already folded result again.  */
>>> diff --git a/gcc/testsuite/g++.dg/opt/pr108243.C
>>> b/gcc/testsuite/g++.dg/opt/pr108243.C
>>> new file mode 100644
>>> index 00000000000..4c45dbba13c
>>> --- /dev/null
>>> +++ b/gcc/testsuite/g++.dg/opt/pr108243.C
>>> @@ -0,0 +1,29 @@
>>> +// PR c++/108243
>>> +// { dg-do compile { target c++11 } }
>>> +// { dg-additional-options "-O -fdump-tree-original" }
>>> +
>>> +constexpr int foo() {
>>> +  return __builtin_is_constant_evaluated() + 1;
>>> +}
>>> +
>>> +#if __cpp_if_consteval
>>> +constexpr int bar() {
>>> +  if consteval {
>>> +    return 5;
>>> +  } else {
>>> +    return 4;
>>> +  }
>>> +}
>>> +#endif
>>> +
>>> +int p, q;
>>> +
>>> +int main() {
>>> +  p = foo();
>>> +#if __cpp_if_consteval
>>> +  q = bar();
>>> +#endif
>>> +}
>>> +
>>> +// { dg-final { scan-tree-dump-not "= foo" "original" } }
>>> +// { dg-final { scan-tree-dump-not "= bar" "original" } }
>>
>> Let's also test a static initializer that can't be fully constant-evaluated.
> 
> D'oh, doing so revealed that cp_fold_function doesn't reach static
> initializers; that's taken care of by cp_fully_fold_init.  So it seems
> we need to make cp_fold when called from the latter entry point to also
> assume m_c_e is false.  We can't re-use ff_genericize here because that
> flag has additional effects in cp_fold_r, so it seems we need another
> flag that that only affects the manifestly constant-eval stuff; I called
> it ff_mce_false.  How does the following look?
> 
> -- >8 --
> 
> Subject: [PATCH 2/2] c++: speculative constexpr and is_constant_evaluated
>   [PR108243]
> 
> This PR illustrates that __builtin_is_constant_evaluated currently acts
> as an optimization barrier for our speculative constexpr evaluation,
> since we don't want to prematurely fold the builtin to false if the
> expression in question would be later manifestly constant evaluated (in
> which case it must be folded to true).
> 
> This patch fixes this by permitting __builtin_is_constant_evaluated
> to get folded as false during cp_fold_function and cp_fully_fold_init,
> since at these points we're sure we're done with manifestly constant
> evaluation.  To that end we add a flags parameter to cp_fold that
> controls whether we pass mce_false or mce_unknown to maybe_constant_value
> when folding a CALL_EXPR.
> 
> 	PR c++/108243
> 	PR c++/97553
> 
> gcc/cp/ChangeLog:
> 
> 	* cp-gimplify.cc (enum fold_flags): Define.
> 	(cp_fold_data::genericize): Replace this data member with ...
> 	(cp_fold_data::fold_flags): ... this.
> 	(cp_fold_r): Adjust use of cp_fold_data and calls to cp_fold.
> 	(cp_fold_function): Likewise.
> 	(cp_fold_maybe_rvalue): Likewise.
> 	(cp_fully_fold_init): Likewise.
> 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
> 	isn't empty.
> 	<case CALL_EXPR>: If ff_genericize is set, fold
> 	__builtin_is_constant_evaluated to false and pass mce_false to
> 	maybe_constant_value.
> 
> gcc/testsuite/ChangeLog:
> 
> 	* g++.dg/opt/is_constant_evaluated1.C: New test.
> 	* g++.dg/opt/is_constant_evaluated2.C: New test.
> ---
>   gcc/cp/cp-gimplify.cc                         | 88 ++++++++++++-------
>   .../g++.dg/opt/is_constant_evaluated1.C       | 14 +++
>   .../g++.dg/opt/is_constant_evaluated2.C       | 32 +++++++
>   3 files changed, 104 insertions(+), 30 deletions(-)
>   create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
>   create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> 
> diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> index 9929d29981a..590ed787997 100644
> --- a/gcc/cp/cp-gimplify.cc
> +++ b/gcc/cp/cp-gimplify.cc
> @@ -43,12 +43,26 @@ along with GCC; see the file COPYING3.  If not see
>   #include "omp-general.h"
>   #include "opts.h"
>   
> +/* Flags for cp_fold and cp_fold_r.  */
> +
> +enum fold_flags {
> +  ff_none = 0,
> +  /* Whether we're being called from cp_fold_function.  */
> +  ff_genericize = 1 << 0,
> +  /* Whether we're folding late enough that we could assume
> +     we're definitely not in a manifestly constant-evaluated
> +     context.  */

It's not necessarily a matter of late enough; we could fold sooner and 
still know that, as in cp_fully_fold_init.  We could do the same at 
other full-expression points, but we don't because we want to delay 
folding as much as possible.  So let's say "folding at a point where we 
know we're..."

> +  ff_mce_false = 1 << 1,
> +};
> +
> +using fold_flags_t = int;
> +
>   /* Forward declarations.  */
>   
>   static tree cp_genericize_r (tree *, int *, void *);
>   static tree cp_fold_r (tree *, int *, void *);
>   static void cp_genericize_tree (tree*, bool);
> -static tree cp_fold (tree);
> +static tree cp_fold (tree, fold_flags_t);
>   
>   /* Genericize a TRY_BLOCK.  */
>   
> @@ -1012,9 +1026,8 @@ struct cp_genericize_data
>   struct cp_fold_data
>   {
>     hash_set<tree> pset;
> -  bool genericize; // called from cp_fold_function?
> -
> -  cp_fold_data (bool g): genericize (g) {}
> +  fold_flags_t flags;
> +  cp_fold_data (fold_flags_t flags): flags (flags) {}
>   };
>   
>   static tree
> @@ -1055,7 +1068,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>         break;
>       }
>   
> -  *stmt_p = stmt = cp_fold (*stmt_p);
> +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
>   
>     if (data->pset.add (stmt))
>       {
> @@ -1135,12 +1148,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>   	 here rather than in cp_genericize to avoid problems with the invisible
>   	 reference transition.  */
>       case INIT_EXPR:
> -      if (data->genericize)
> +      if (data->flags & ff_genericize)
>   	cp_genericize_init_expr (stmt_p);
>         break;
>   
>       case TARGET_EXPR:
> -      if (data->genericize)
> +      if (data->flags & ff_genericize)
>   	cp_genericize_target_expr (stmt_p);
>   
>         /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
> @@ -1173,7 +1186,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>   void
>   cp_fold_function (tree fndecl)
>   {
> -  cp_fold_data data (/*genericize*/true);
> +  cp_fold_data data (ff_genericize | ff_mce_false);

Here would be a good place for a comment about passing mce_false because 
all manifestly-constant-evaluated expressions will have been 
constant-evaluated already if possible.

>     cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
>   }
>   
> @@ -2391,7 +2404,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
>   {
>     while (true)
>       {
> -      x = cp_fold (x);
> +      x = cp_fold (x, ff_none);
>         if (rval)
>   	x = mark_rvalue_use (x);
>         if (rval && DECL_P (x)
> @@ -2450,7 +2463,7 @@ cp_fully_fold_init (tree x)
>     if (processing_template_decl)
>       return x;
>     x = cp_fully_fold (x);
> -  cp_fold_data data (/*genericize*/false);
> +  cp_fold_data data (ff_mce_false);
>     cp_walk_tree (&x, cp_fold_r, &data, NULL);
>     return x;
>   }
> @@ -2485,7 +2498,7 @@ clear_fold_cache (void)
>       Function returns X or its folded variant.  */
>   
>   static tree
> -cp_fold (tree x)
> +cp_fold (tree x, fold_flags_t flags)
>   {
>     tree op0, op1, op2, op3;
>     tree org_x = x, r = NULL_TREE;
> @@ -2506,8 +2519,11 @@ cp_fold (tree x)
>     if (fold_cache == NULL)
>       fold_cache = hash_map<tree, tree>::create_ggc (101);
>   
> -  if (tree *cached = fold_cache->get (x))
> -    return *cached;
> +  bool cache_p = (flags == ff_none);
> +
> +  if (cache_p)
> +    if (tree *cached = fold_cache->get (x))
> +      return *cached;
>   
>     uid_sensitive_constexpr_evaluation_checker c;
>   
> @@ -2542,7 +2558,7 @@ cp_fold (tree x)
>   	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
>   	     folding of the operand should be in the caches and if in cp_fold_r
>   	     it will modify it in place.  */
> -	  op0 = cp_fold (TREE_OPERAND (x, 0));
> +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
>   	  if (op0 == error_mark_node)
>   	    x = error_mark_node;
>   	  break;
> @@ -2587,7 +2603,7 @@ cp_fold (tree x)
>   	{
>   	  tree p = maybe_undo_parenthesized_ref (x);
>   	  if (p != x)
> -	    return cp_fold (p);
> +	    return cp_fold (p, flags);
>   	}
>         goto unary;
>   
> @@ -2779,8 +2795,8 @@ cp_fold (tree x)
>       case COND_EXPR:
>         loc = EXPR_LOCATION (x);
>         op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> -      op1 = cp_fold (TREE_OPERAND (x, 1));
> -      op2 = cp_fold (TREE_OPERAND (x, 2));
> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
>   
>         if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
>   	{
> @@ -2870,7 +2886,7 @@ cp_fold (tree x)
>   	      {
>   		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
>   		  r = build_nop (TREE_TYPE (x), r);
> -		x = cp_fold (r);
> +		x = cp_fold (r, flags);
>   		break;
>   	      }
>   	  }
> @@ -2890,8 +2906,12 @@ cp_fold (tree x)
>   	  {
>   	    switch (DECL_FE_FUNCTION_CODE (callee))
>   	      {
> -		/* Defer folding __builtin_is_constant_evaluated.  */
>   	      case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
> +		/* Defer folding __builtin_is_constant_evaluated unless
> +		   we can assume this isn't a manifestly constant-evaluated

s/can assume/know/

OK with those comment changes.

> +		   context.  */
> +		if (flags & ff_mce_false)
> +		  x = boolean_false_node;
>   		break;
>   	      case CP_BUILT_IN_SOURCE_LOCATION:
>   		x = fold_builtin_source_location (x);
> @@ -2924,7 +2944,7 @@ cp_fold (tree x)
>   	int m = call_expr_nargs (x);
>   	for (int i = 0; i < m; i++)
>   	  {
> -	    r = cp_fold (CALL_EXPR_ARG (x, i));
> +	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
>   	    if (r != CALL_EXPR_ARG (x, i))
>   	      {
>   		if (r == error_mark_node)
> @@ -2947,7 +2967,7 @@ cp_fold (tree x)
>   
>   	if (TREE_CODE (r) != CALL_EXPR)
>   	  {
> -	    x = cp_fold (r);
> +	    x = cp_fold (r, flags);
>   	    break;
>   	  }
>   
> @@ -2960,7 +2980,15 @@ cp_fold (tree x)
>   	   constant, but the call followed by an INDIRECT_REF is.  */
>   	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
>   	    && !flag_no_inline)
> -	  r = maybe_constant_value (x);
> +	  {
> +	    mce_value manifestly_const_eval = mce_unknown;
> +	    if (flags & ff_mce_false)
> +	      /* Allow folding __builtin_is_constant_evaluated to false during
> +		 constexpr evaluation of this call.  */
> +	      manifestly_const_eval = mce_false;
> +	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
> +				      manifestly_const_eval);
> +	  }
>   	optimize = sv;
>   
>           if (TREE_CODE (r) != CALL_EXPR)
> @@ -2987,7 +3015,7 @@ cp_fold (tree x)
>   	vec<constructor_elt, va_gc> *nelts = NULL;
>   	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
>   	  {
> -	    tree op = cp_fold (p->value);
> +	    tree op = cp_fold (p->value, flags);
>   	    if (op != p->value)
>   	      {
>   		if (op == error_mark_node)
> @@ -3018,7 +3046,7 @@ cp_fold (tree x)
>   
>   	for (int i = 0; i < n; i++)
>   	  {
> -	    tree op = cp_fold (TREE_VEC_ELT (x, i));
> +	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
>   	    if (op != TREE_VEC_ELT (x, i))
>   	      {
>   		if (!changed)
> @@ -3035,10 +3063,10 @@ cp_fold (tree x)
>       case ARRAY_RANGE_REF:
>   
>         loc = EXPR_LOCATION (x);
> -      op0 = cp_fold (TREE_OPERAND (x, 0));
> -      op1 = cp_fold (TREE_OPERAND (x, 1));
> -      op2 = cp_fold (TREE_OPERAND (x, 2));
> -      op3 = cp_fold (TREE_OPERAND (x, 3));
> +      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> +      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
>   
>         if (op0 != TREE_OPERAND (x, 0)
>   	  || op1 != TREE_OPERAND (x, 1)
> @@ -3066,7 +3094,7 @@ cp_fold (tree x)
>         /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
>   	 folding, evaluates to an invariant.  In that case no need to wrap
>   	 this folded tree with a SAVE_EXPR.  */
> -      r = cp_fold (TREE_OPERAND (x, 0));
> +      r = cp_fold (TREE_OPERAND (x, 0), flags);
>         if (tree_invariant_p (r))
>   	x = r;
>         break;
> @@ -3085,7 +3113,7 @@ cp_fold (tree x)
>         copy_warning (x, org_x);
>       }
>   
> -  if (!c.evaluation_restricted_p ())
> +  if (cache_p && !c.evaluation_restricted_p ())
>       {
>         fold_cache->put (org_x, x);
>         /* Prevent that we try to fold an already folded result again.  */
> diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> new file mode 100644
> index 00000000000..ee05cbab785
> --- /dev/null
> +++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> @@ -0,0 +1,14 @@
> +// PR c++/108243
> +// { dg-do compile { target c++11 } }
> +// { dg-additional-options "-O -fdump-tree-original" }
> +
> +struct A {
> +  constexpr A(int n, int m) : n(n), m(m) { }
> +  int n, m;
> +};
> +
> +void f(int n) {
> +  static A a = {n, __builtin_is_constant_evaluated()};
> +}
> +
> +// { dg-final { scan-tree-dump-not "is_constant_evaluated" "original" } }
> diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> new file mode 100644
> index 00000000000..ed964e20a7a
> --- /dev/null
> +++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> @@ -0,0 +1,32 @@
> +// PR c++/97553
> +// { dg-do compile { target c++11 } }
> +// { dg-additional-options "-O -fdump-tree-original" }
> +
> +constexpr int foo() {
> +  return __builtin_is_constant_evaluated() + 1;
> +}
> +
> +#if __cpp_if_consteval
> +constexpr int bar() {
> +  if consteval {
> +    return 5;
> +  } else {
> +    return 4;
> +  }
> +}
> +#endif
> +
> +int p, q;
> +
> +int main() {
> +  p = foo();
> +#if __cpp_if_consteval
> +  q = bar();
> +#endif
> +}
> +
> +// { dg-final { scan-tree-dump "p = 1" "original" } }
> +// { dg-final { scan-tree-dump-not "= foo" "original" } }
> +
> +// { dg-final { scan-tree-dump "q = 4" "original" { target c++23 } } }
> +// { dg-final { scan-tree-dump-not "= bar" "original" { target c++23 } } }


^ permalink raw reply	[flat|nested] 15+ messages in thread

* Re: [PATCH 2/2] c++: speculative constexpr and is_constant_evaluated [PR108243]
  2023-02-05 20:11       ` Jason Merrill
@ 2023-02-09 17:36         ` Patrick Palka
  2023-02-09 23:36           ` Jason Merrill
  0 siblings, 1 reply; 15+ messages in thread
From: Patrick Palka @ 2023-02-09 17:36 UTC (permalink / raw)
  To: Jason Merrill; +Cc: Patrick Palka, gcc-patches

On Sun, 5 Feb 2023, Jason Merrill wrote:

> On 2/3/23 15:51, Patrick Palka wrote:
> > On Mon, 30 Jan 2023, Jason Merrill wrote:
> > 
> > > On 1/27/23 17:02, Patrick Palka wrote:
> > > > This PR illustrates that __builtin_is_constant_evaluated currently acts
> > > > as an optimization barrier for our speculative constexpr evaluation,
> > > > since we don't want to prematurely fold the builtin to false if the
> > > > expression in question would be later manifestly constant evaluated (in
> > > > which case it must be folded to true).
> > > > 
> > > > This patch fixes this by permitting __builtin_is_constant_evaluated
> > > > to get folded as false during cp_fold_function, since at that point
> > > > we're sure we're doing manifestly constant evaluation.  To that end
> > > > we add a flags parameter to cp_fold that controls what mce_value the
> > > > CALL_EXPR case passes to maybe_constant_value.
> > > > 
> > > > bootstrapped and rgetsted no x86_64-pc-linux-gnu, does this look OK for
> > > > trunk?
> > > > 
> > > > 	PR c++/108243
> > > > 
> > > > gcc/cp/ChangeLog:
> > > > 
> > > > 	* cp-gimplify.cc (enum fold_flags): Define.
> > > > 	(cp_fold_data::genericize): Replace this data member with ...
> > > > 	(cp_fold_data::fold_flags): ... this.
> > > > 	(cp_fold_r): Adjust cp_fold_data use and cp_fold_calls.
> > > > 	(cp_fold_function): Likewise.
> > > > 	(cp_fold_maybe_rvalue): Likewise.
> > > > 	(cp_fully_fold_init): Likewise.
> > > > 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
> > > > 	isn't empty.
> > > > 	<case CALL_EXPR>: Pass mce_false to maybe_constant_value
> > > > 	if if ff_genericize is set.
> > > > 
> > > > gcc/testsuite/ChangeLog:
> > > > 
> > > > 	* g++.dg/opt/pr108243.C: New test.
> > > > ---
> > > >    gcc/cp/cp-gimplify.cc               | 76
> > > > ++++++++++++++++++-----------
> > > >    gcc/testsuite/g++.dg/opt/pr108243.C | 29 +++++++++++
> > > >    2 files changed, 76 insertions(+), 29 deletions(-)
> > > >    create mode 100644 gcc/testsuite/g++.dg/opt/pr108243.C
> > > > 
> > > > diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> > > > index a35cedd05cc..d023a63768f 100644
> > > > --- a/gcc/cp/cp-gimplify.cc
> > > > +++ b/gcc/cp/cp-gimplify.cc
> > > > @@ -43,12 +43,20 @@ along with GCC; see the file COPYING3.  If not see
> > > >    #include "omp-general.h"
> > > >    #include "opts.h"
> > > >    +/* Flags for cp_fold and cp_fold_r.  */
> > > > +
> > > > +enum fold_flags {
> > > > +  ff_none = 0,
> > > > +  /* Whether we're being called from cp_fold_function.  */
> > > > +  ff_genericize = 1 << 0,
> > > > +};
> > > > +
> > > >    /* Forward declarations.  */
> > > >      static tree cp_genericize_r (tree *, int *, void *);
> > > >    static tree cp_fold_r (tree *, int *, void *);
> > > >    static void cp_genericize_tree (tree*, bool);
> > > > -static tree cp_fold (tree);
> > > > +static tree cp_fold (tree, fold_flags);
> > > >      /* Genericize a TRY_BLOCK.  */
> > > >    @@ -996,9 +1004,8 @@ struct cp_genericize_data
> > > >    struct cp_fold_data
> > > >    {
> > > >      hash_set<tree> pset;
> > > > -  bool genericize; // called from cp_fold_function?
> > > > -
> > > > -  cp_fold_data (bool g): genericize (g) {}
> > > > +  fold_flags flags;
> > > > +  cp_fold_data (fold_flags flags): flags (flags) {}
> > > >    };
> > > >      static tree
> > > > @@ -1039,7 +1046,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > > > *data_)
> > > >          break;
> > > >        }
> > > >    -  *stmt_p = stmt = cp_fold (*stmt_p);
> > > > +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
> > > >        if (data->pset.add (stmt))
> > > >        {
> > > > @@ -1119,12 +1126,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
> > > > void
> > > > *data_)
> > > >    	 here rather than in cp_genericize to avoid problems with the
> > > > invisible
> > > >    	 reference transition.  */
> > > >        case INIT_EXPR:
> > > > -      if (data->genericize)
> > > > +      if (data->flags & ff_genericize)
> > > >    	cp_genericize_init_expr (stmt_p);
> > > >          break;
> > > >          case TARGET_EXPR:
> > > > -      if (data->genericize)
> > > > +      if (data->flags & ff_genericize)
> > > >    	cp_genericize_target_expr (stmt_p);
> > > >            /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR;
> > > > in
> > > > @@ -1157,7 +1164,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > > > *data_)
> > > >    void
> > > >    cp_fold_function (tree fndecl)
> > > >    {
> > > > -  cp_fold_data data (/*genericize*/true);
> > > > +  cp_fold_data data (ff_genericize);
> > > >      cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
> > > >    }
> > > >    @@ -2375,7 +2382,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
> > > >    {
> > > >      while (true)
> > > >        {
> > > > -      x = cp_fold (x);
> > > > +      x = cp_fold (x, ff_none);
> > > >          if (rval)
> > > >    	x = mark_rvalue_use (x);
> > > >          if (rval && DECL_P (x)
> > > > @@ -2434,7 +2441,7 @@ cp_fully_fold_init (tree x)
> > > >      if (processing_template_decl)
> > > >        return x;
> > > >      x = cp_fully_fold (x);
> > > > -  cp_fold_data data (/*genericize*/false);
> > > > +  cp_fold_data data (ff_none);
> > > >      cp_walk_tree (&x, cp_fold_r, &data, NULL);
> > > >      return x;
> > > >    }
> > > > @@ -2469,7 +2476,7 @@ clear_fold_cache (void)
> > > >        Function returns X or its folded variant.  */
> > > >      static tree
> > > > -cp_fold (tree x)
> > > > +cp_fold (tree x, fold_flags flags)
> > > >    {
> > > >      tree op0, op1, op2, op3;
> > > >      tree org_x = x, r = NULL_TREE;
> > > > @@ -2490,8 +2497,11 @@ cp_fold (tree x)
> > > >      if (fold_cache == NULL)
> > > >        fold_cache = hash_map<tree, tree>::create_ggc (101);
> > > >    -  if (tree *cached = fold_cache->get (x))
> > > > -    return *cached;
> > > > +  bool cache_p = (flags == ff_none);
> > > > +
> > > > +  if (cache_p)
> > > > +    if (tree *cached = fold_cache->get (x))
> > > > +      return *cached;
> > > >        uid_sensitive_constexpr_evaluation_checker c;
> > > >    @@ -2526,7 +2536,7 @@ cp_fold (tree x)
> > > >    	     Don't create a new tree if op0 != TREE_OPERAND (x, 0),
> > > > the
> > > >    	     folding of the operand should be in the caches and if in
> > > > cp_fold_r
> > > >    	     it will modify it in place.  */
> > > > -	  op0 = cp_fold (TREE_OPERAND (x, 0));
> > > > +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> > > >    	  if (op0 == error_mark_node)
> > > >    	    x = error_mark_node;
> > > >    	  break;
> > > > @@ -2571,7 +2581,7 @@ cp_fold (tree x)
> > > >    	{
> > > >    	  tree p = maybe_undo_parenthesized_ref (x);
> > > >    	  if (p != x)
> > > > -	    return cp_fold (p);
> > > > +	    return cp_fold (p, flags);
> > > >    	}
> > > >          goto unary;
> > > >    @@ -2763,8 +2773,8 @@ cp_fold (tree x)
> > > >        case COND_EXPR:
> > > >          loc = EXPR_LOCATION (x);
> > > >          op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> > > > -      op1 = cp_fold (TREE_OPERAND (x, 1));
> > > > -      op2 = cp_fold (TREE_OPERAND (x, 2));
> > > > +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> > > > +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> > > >            if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
> > > >    	{
> > > > @@ -2854,7 +2864,7 @@ cp_fold (tree x)
> > > >    	      {
> > > >    		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
> > > >    		  r = build_nop (TREE_TYPE (x), r);
> > > > -		x = cp_fold (r);
> > > > +		x = cp_fold (r, flags);
> > > >    		break;
> > > >    	      }
> > > >    	  }
> > > > @@ -2908,7 +2918,7 @@ cp_fold (tree x)
> > > >    	int m = call_expr_nargs (x);
> > > >    	for (int i = 0; i < m; i++)
> > > >    	  {
> > > > -	    r = cp_fold (CALL_EXPR_ARG (x, i));
> > > > +	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
> > > >    	    if (r != CALL_EXPR_ARG (x, i))
> > > >    	      {
> > > >    		if (r == error_mark_node)
> > > > @@ -2931,7 +2941,7 @@ cp_fold (tree x)
> > > >      	if (TREE_CODE (r) != CALL_EXPR)
> > > >    	  {
> > > > -	    x = cp_fold (r);
> > > > +	    x = cp_fold (r, flags);
> > > >    	    break;
> > > >    	  }
> > > >    @@ -2944,7 +2954,15 @@ cp_fold (tree x)
> > > >    	   constant, but the call followed by an INDIRECT_REF is.  */
> > > >    	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
> > > >    	    && !flag_no_inline)
> > > > -	  r = maybe_constant_value (x);
> > > > +	  {
> > > > +	    mce_value manifestly_const_eval = mce_unknown;
> > > > +	    if (flags & ff_genericize)
> > > > +	      /* At genericization time it's safe to fold
> > > > +		 __builtin_is_constant_evaluated to false.  */
> > > > +	      manifestly_const_eval = mce_false;
> > > > +	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
> > > > +				      manifestly_const_eval);
> > > > +	  }
> > > >    	optimize = sv;
> > > >              if (TREE_CODE (r) != CALL_EXPR)
> > > > @@ -2971,7 +2989,7 @@ cp_fold (tree x)
> > > >    	vec<constructor_elt, va_gc> *nelts = NULL;
> > > >    	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
> > > >    	  {
> > > > -	    tree op = cp_fold (p->value);
> > > > +	    tree op = cp_fold (p->value, flags);
> > > >    	    if (op != p->value)
> > > >    	      {
> > > >    		if (op == error_mark_node)
> > > > @@ -3002,7 +3020,7 @@ cp_fold (tree x)
> > > >      	for (int i = 0; i < n; i++)
> > > >    	  {
> > > > -	    tree op = cp_fold (TREE_VEC_ELT (x, i));
> > > > +	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
> > > >    	    if (op != TREE_VEC_ELT (x, i))
> > > >    	      {
> > > >    		if (!changed)
> > > > @@ -3019,10 +3037,10 @@ cp_fold (tree x)
> > > >        case ARRAY_RANGE_REF:
> > > >            loc = EXPR_LOCATION (x);
> > > > -      op0 = cp_fold (TREE_OPERAND (x, 0));
> > > > -      op1 = cp_fold (TREE_OPERAND (x, 1));
> > > > -      op2 = cp_fold (TREE_OPERAND (x, 2));
> > > > -      op3 = cp_fold (TREE_OPERAND (x, 3));
> > > > +      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> > > > +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> > > > +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> > > > +      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
> > > >            if (op0 != TREE_OPERAND (x, 0)
> > > >    	  || op1 != TREE_OPERAND (x, 1)
> > > > @@ -3050,7 +3068,7 @@ cp_fold (tree x)
> > > >          /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which,
> > > > after
> > > >    	 folding, evaluates to an invariant.  In that case no need to
> > > > wrap
> > > >    	 this folded tree with a SAVE_EXPR.  */
> > > > -      r = cp_fold (TREE_OPERAND (x, 0));
> > > > +      r = cp_fold (TREE_OPERAND (x, 0), flags);
> > > >          if (tree_invariant_p (r))
> > > >    	x = r;
> > > >          break;
> > > > @@ -3069,7 +3087,7 @@ cp_fold (tree x)
> > > >          copy_warning (x, org_x);
> > > >        }
> > > >    -  if (!c.evaluation_restricted_p ())
> > > > +  if (cache_p && !c.evaluation_restricted_p ())
> > > >        {
> > > >          fold_cache->put (org_x, x);
> > > >          /* Prevent that we try to fold an already folded result again.
> > > > */
> > > > diff --git a/gcc/testsuite/g++.dg/opt/pr108243.C
> > > > b/gcc/testsuite/g++.dg/opt/pr108243.C
> > > > new file mode 100644
> > > > index 00000000000..4c45dbba13c
> > > > --- /dev/null
> > > > +++ b/gcc/testsuite/g++.dg/opt/pr108243.C
> > > > @@ -0,0 +1,29 @@
> > > > +// PR c++/108243
> > > > +// { dg-do compile { target c++11 } }
> > > > +// { dg-additional-options "-O -fdump-tree-original" }
> > > > +
> > > > +constexpr int foo() {
> > > > +  return __builtin_is_constant_evaluated() + 1;
> > > > +}
> > > > +
> > > > +#if __cpp_if_consteval
> > > > +constexpr int bar() {
> > > > +  if consteval {
> > > > +    return 5;
> > > > +  } else {
> > > > +    return 4;
> > > > +  }
> > > > +}
> > > > +#endif
> > > > +
> > > > +int p, q;
> > > > +
> > > > +int main() {
> > > > +  p = foo();
> > > > +#if __cpp_if_consteval
> > > > +  q = bar();
> > > > +#endif
> > > > +}
> > > > +
> > > > +// { dg-final { scan-tree-dump-not "= foo" "original" } }
> > > > +// { dg-final { scan-tree-dump-not "= bar" "original" } }
> > > 
> > > Let's also test a static initializer that can't be fully
> > > constant-evaluated.
> > 
> > D'oh, doing so revealed that cp_fold_function doesn't reach static
> > initializers; that's taken care of by cp_fully_fold_init.  So it seems
> > we need to make cp_fold when called from the latter entry point to also
> > assume m_c_e is false.  We can't re-use ff_genericize here because that
> > flag has additional effects in cp_fold_r, so it seems we need another
> > flag that that only affects the manifestly constant-eval stuff; I called
> > it ff_mce_false.  How does the following look?
> > 
> > -- >8 --
> > 
> > Subject: [PATCH 2/2] c++: speculative constexpr and is_constant_evaluated
> >   [PR108243]
> > 
> > This PR illustrates that __builtin_is_constant_evaluated currently acts
> > as an optimization barrier for our speculative constexpr evaluation,
> > since we don't want to prematurely fold the builtin to false if the
> > expression in question would be later manifestly constant evaluated (in
> > which case it must be folded to true).
> > 
> > This patch fixes this by permitting __builtin_is_constant_evaluated
> > to get folded as false during cp_fold_function and cp_fully_fold_init,
> > since at these points we're sure we're done with manifestly constant
> > evaluation.  To that end we add a flags parameter to cp_fold that
> > controls whether we pass mce_false or mce_unknown to maybe_constant_value
> > when folding a CALL_EXPR.
> > 
> > 	PR c++/108243
> > 	PR c++/97553
> > 
> > gcc/cp/ChangeLog:
> > 
> > 	* cp-gimplify.cc (enum fold_flags): Define.
> > 	(cp_fold_data::genericize): Replace this data member with ...
> > 	(cp_fold_data::fold_flags): ... this.
> > 	(cp_fold_r): Adjust use of cp_fold_data and calls to cp_fold.
> > 	(cp_fold_function): Likewise.
> > 	(cp_fold_maybe_rvalue): Likewise.
> > 	(cp_fully_fold_init): Likewise.
> > 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
> > 	isn't empty.
> > 	<case CALL_EXPR>: If ff_genericize is set, fold
> > 	__builtin_is_constant_evaluated to false and pass mce_false to
> > 	maybe_constant_value.
> > 
> > gcc/testsuite/ChangeLog:
> > 
> > 	* g++.dg/opt/is_constant_evaluated1.C: New test.
> > 	* g++.dg/opt/is_constant_evaluated2.C: New test.
> > ---
> >   gcc/cp/cp-gimplify.cc                         | 88 ++++++++++++-------
> >   .../g++.dg/opt/is_constant_evaluated1.C       | 14 +++
> >   .../g++.dg/opt/is_constant_evaluated2.C       | 32 +++++++
> >   3 files changed, 104 insertions(+), 30 deletions(-)
> >   create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> >   create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> > 
> > diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> > index 9929d29981a..590ed787997 100644
> > --- a/gcc/cp/cp-gimplify.cc
> > +++ b/gcc/cp/cp-gimplify.cc
> > @@ -43,12 +43,26 @@ along with GCC; see the file COPYING3.  If not see
> >   #include "omp-general.h"
> >   #include "opts.h"
> >   +/* Flags for cp_fold and cp_fold_r.  */
> > +
> > +enum fold_flags {
> > +  ff_none = 0,
> > +  /* Whether we're being called from cp_fold_function.  */
> > +  ff_genericize = 1 << 0,
> > +  /* Whether we're folding late enough that we could assume
> > +     we're definitely not in a manifestly constant-evaluated
> > +     context.  */
> 
> It's not necessarily a matter of late enough; we could fold sooner and still
> know that, as in cp_fully_fold_init.  We could do the same at other
> full-expression points, but we don't because we want to delay folding as much
> as possible.  So let's say "folding at a point where we know we're..."
> 
> > +  ff_mce_false = 1 << 1,
> > +};
> > +
> > +using fold_flags_t = int;
> > +
> >   /* Forward declarations.  */
> >     static tree cp_genericize_r (tree *, int *, void *);
> >   static tree cp_fold_r (tree *, int *, void *);
> >   static void cp_genericize_tree (tree*, bool);
> > -static tree cp_fold (tree);
> > +static tree cp_fold (tree, fold_flags_t);
> >     /* Genericize a TRY_BLOCK.  */
> >   @@ -1012,9 +1026,8 @@ struct cp_genericize_data
> >   struct cp_fold_data
> >   {
> >     hash_set<tree> pset;
> > -  bool genericize; // called from cp_fold_function?
> > -
> > -  cp_fold_data (bool g): genericize (g) {}
> > +  fold_flags_t flags;
> > +  cp_fold_data (fold_flags_t flags): flags (flags) {}
> >   };
> >     static tree
> > @@ -1055,7 +1068,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > *data_)
> >         break;
> >       }
> >   -  *stmt_p = stmt = cp_fold (*stmt_p);
> > +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
> >       if (data->pset.add (stmt))
> >       {
> > @@ -1135,12 +1148,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > *data_)
> >   	 here rather than in cp_genericize to avoid problems with the
> > invisible
> >   	 reference transition.  */
> >       case INIT_EXPR:
> > -      if (data->genericize)
> > +      if (data->flags & ff_genericize)
> >   	cp_genericize_init_expr (stmt_p);
> >         break;
> >         case TARGET_EXPR:
> > -      if (data->genericize)
> > +      if (data->flags & ff_genericize)
> >   	cp_genericize_target_expr (stmt_p);
> >           /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
> > @@ -1173,7 +1186,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > *data_)
> >   void
> >   cp_fold_function (tree fndecl)
> >   {
> > -  cp_fold_data data (/*genericize*/true);
> > +  cp_fold_data data (ff_genericize | ff_mce_false);
> 
> Here would be a good place for a comment about passing mce_false because all
> manifestly-constant-evaluated expressions will have been constant-evaluated
> already if possible.
> 
> >     cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
> >   }
> >   @@ -2391,7 +2404,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
> >   {
> >     while (true)
> >       {
> > -      x = cp_fold (x);
> > +      x = cp_fold (x, ff_none);
> >         if (rval)
> >   	x = mark_rvalue_use (x);
> >         if (rval && DECL_P (x)
> > @@ -2450,7 +2463,7 @@ cp_fully_fold_init (tree x)
> >     if (processing_template_decl)
> >       return x;
> >     x = cp_fully_fold (x);
> > -  cp_fold_data data (/*genericize*/false);
> > +  cp_fold_data data (ff_mce_false);
> >     cp_walk_tree (&x, cp_fold_r, &data, NULL);
> >     return x;
> >   }
> > @@ -2485,7 +2498,7 @@ clear_fold_cache (void)
> >       Function returns X or its folded variant.  */
> >     static tree
> > -cp_fold (tree x)
> > +cp_fold (tree x, fold_flags_t flags)
> >   {
> >     tree op0, op1, op2, op3;
> >     tree org_x = x, r = NULL_TREE;
> > @@ -2506,8 +2519,11 @@ cp_fold (tree x)
> >     if (fold_cache == NULL)
> >       fold_cache = hash_map<tree, tree>::create_ggc (101);
> >   -  if (tree *cached = fold_cache->get (x))
> > -    return *cached;
> > +  bool cache_p = (flags == ff_none);
> > +
> > +  if (cache_p)
> > +    if (tree *cached = fold_cache->get (x))
> > +      return *cached;
> >       uid_sensitive_constexpr_evaluation_checker c;
> >   @@ -2542,7 +2558,7 @@ cp_fold (tree x)
> >   	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
> >   	     folding of the operand should be in the caches and if in
> > cp_fold_r
> >   	     it will modify it in place.  */
> > -	  op0 = cp_fold (TREE_OPERAND (x, 0));
> > +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> >   	  if (op0 == error_mark_node)
> >   	    x = error_mark_node;
> >   	  break;
> > @@ -2587,7 +2603,7 @@ cp_fold (tree x)
> >   	{
> >   	  tree p = maybe_undo_parenthesized_ref (x);
> >   	  if (p != x)
> > -	    return cp_fold (p);
> > +	    return cp_fold (p, flags);
> >   	}
> >         goto unary;
> >   @@ -2779,8 +2795,8 @@ cp_fold (tree x)
> >       case COND_EXPR:
> >         loc = EXPR_LOCATION (x);
> >         op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> > -      op1 = cp_fold (TREE_OPERAND (x, 1));
> > -      op2 = cp_fold (TREE_OPERAND (x, 2));
> > +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> > +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> >           if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
> >   	{
> > @@ -2870,7 +2886,7 @@ cp_fold (tree x)
> >   	      {
> >   		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
> >   		  r = build_nop (TREE_TYPE (x), r);
> > -		x = cp_fold (r);
> > +		x = cp_fold (r, flags);
> >   		break;
> >   	      }
> >   	  }
> > @@ -2890,8 +2906,12 @@ cp_fold (tree x)
> >   	  {
> >   	    switch (DECL_FE_FUNCTION_CODE (callee))
> >   	      {
> > -		/* Defer folding __builtin_is_constant_evaluated.  */
> >   	      case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
> > +		/* Defer folding __builtin_is_constant_evaluated unless
> > +		   we can assume this isn't a manifestly constant-evaluated
> 
> s/can assume/know/
> 
> OK with those comment changes.

Thanks a lot.  Unfortunately I think the patch has a significant problem
that only just occurred to me -- disabling the cp_fold cache when the
flag ff_mce_false is set effectively makes cp_fold_function and
cp_fully_fold_init quadratic in the size of the expression (since
cp_fold_r calls cp_fold on each subtree, and cp_fold when the cache is
disabled will end up fully walking each subtree).  Note that the reason
we must disable the cache is because cp_fold with ff_mce_false might
give a different folded result than without that flag if the expression
contains a suitable CALL_EXPR subexpression.

One approach to fix this complexity issue would be to parameterize the
cache according to the flags that were passed to cp_fold, which would
allow us to keep the cache enabled when ff_mce_false is set.  A downside
to this approach is that the size of the cp_fold cache would essentially
double since for each tree we'd now have two cache entries, one for
flags=ff_none and another for flags=ff_mce_false.

Another approach would be to split out the trial constexpr evaluation
part of cp_fold's CALL_EXPR handling, parameterize that, and call it
directly from cp_fold_r.  With this approach we wouldn't perform as much
folding, e.g.

  int n = 41 + !std::is_constant_evaluated();

would get folded to 1 + 41 rather than 42.  But I suspect this would
give us 95% of the reapable benefits of the above approach.

I think I'm leaning towards this second approach, which the below patch
implements instead.  What do you think?  Bootstrapped and regtested on
x86_64-pc-linux-gnu.

-- >8 --

Subject: [PATCH] c++: speculative constexpr and is_constant_evaluated
 [PR108243]

This PR illustrates that __builtin_is_constant_evaluated currently acts
as an optimization barrier for our speculative constexpr evaluation,
since we don't want to prematurely fold the builtin to false before the
expression in question undergoes constant evaluation as in a manifestly
constant-evaluated context (in which case the builtin must instead be
folded to true).

This patch fixes this by permitting __builtin_is_constant_evaluated
to get folded to false from cp_fold_r, where we know we're done with
proper constant evaluation (of manifestly constant-evaluated contexts).

	PR c++/108243
	PR c++/97553

gcc/cp/ChangeLog:

	* cp-gimplify.cc
	(cp_fold_r): Remove redundant *stmt_p assignments.  After
	calling cp_fold, call maybe_fold_constexpr_call with mce_false.
	(cp_fold) <case CALL_EXPR>: Split out trial constexpr evaluation
	into ...
	(maybe_fold_constexpr_call): ... here.

gcc/testsuite/ChangeLog:

	* g++.dg/opt/is_constant_evaluated1.C: New test.
	* g++.dg/opt/is_constant_evaluated2.C: New test.
---
 gcc/cp/cp-gimplify.cc                         | 55 +++++++++++++++----
 .../g++.dg/opt/is_constant_evaluated1.C       | 20 +++++++
 .../g++.dg/opt/is_constant_evaluated2.C       | 32 +++++++++++
 3 files changed, 95 insertions(+), 12 deletions(-)
 create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
 create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C

diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
index 9929d29981a..dca55056b2c 100644
--- a/gcc/cp/cp-gimplify.cc
+++ b/gcc/cp/cp-gimplify.cc
@@ -49,6 +49,7 @@ static tree cp_genericize_r (tree *, int *, void *);
 static tree cp_fold_r (tree *, int *, void *);
 static void cp_genericize_tree (tree*, bool);
 static tree cp_fold (tree);
+static tree maybe_fold_constexpr_call (tree, mce_value);
 
 /* Genericize a TRY_BLOCK.  */
 
@@ -1034,7 +1035,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
 	    error_at (PTRMEM_CST_LOCATION (stmt),
 		      "taking address of an immediate function %qD",
 		      PTRMEM_CST_MEMBER (stmt));
-	  stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
+	  stmt = build_zero_cst (TREE_TYPE (stmt));
 	  break;
 	}
       break;
@@ -1046,7 +1047,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
 	  error_at (EXPR_LOCATION (stmt),
 		    "taking address of an immediate function %qD",
 		    TREE_OPERAND (stmt, 0));
-	  stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
+	  stmt = build_zero_cst (TREE_TYPE (stmt));
 	  break;
 	}
       break;
@@ -1055,7 +1056,17 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
       break;
     }
 
-  *stmt_p = stmt = cp_fold (*stmt_p);
+  stmt = cp_fold (stmt);
+
+  if (TREE_CODE (stmt) == CALL_EXPR)
+    /* Since cp_fold_r is called (from cp_genericize, cp_fold_function
+       and cp_fully_fold_init) only after the overall expression has been
+       considered for constant-evaluation, we can by now safely fold any
+       remaining __builtin_is_constant_evaluated calls to false, so try
+       constexpr expansion with mce_false.  */
+    stmt = maybe_fold_constexpr_call (stmt, mce_false);
+
+  *stmt_p = stmt;
 
   if (data->pset.add (stmt))
     {
@@ -2952,15 +2963,10 @@ cp_fold (tree x)
 	  }
 
 	optimize = nw;
-
-	/* Invoke maybe_constant_value for functions declared
-	   constexpr and not called with AGGR_INIT_EXPRs.
-	   TODO:
-	   Do constexpr expansion of expressions where the call itself is not
-	   constant, but the call followed by an INDIRECT_REF is.  */
-	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
-	    && !flag_no_inline)
-	  r = maybe_constant_value (x);
+	/* Pass mce_unknown to defer folding __builtin_is_constant_evaluated
+	   since we don't know if we're in a manifestly constant-evaluated
+	   context that hasn't yet been constant-evaluated.  */
+	r = maybe_fold_constexpr_call (x, mce_unknown);
 	optimize = sv;
 
         if (TREE_CODE (r) != CALL_EXPR)
@@ -3096,6 +3102,31 @@ cp_fold (tree x)
   return x;
 }
 
+/* If the CALL_EXPR X calls a constexpr function, try expanding it via
+   constexpr evaluation.  Returns the expanded result or X if constexpr
+   evaluation wasn't possible.
+
+   TODO: Do constexpr expansion of expressions where the call itself
+   is not constant, but the call followed by an INDIRECT_REF is.  */
+
+static tree
+maybe_fold_constexpr_call (tree x, mce_value manifestly_const_eval)
+{
+  if (flag_no_inline)
+    return x;
+  tree callee = get_callee_fndecl (x);
+  if (!callee)
+    return x;
+  if (DECL_DECLARED_CONSTEXPR_P (callee))
+    {
+      tree r = maybe_constant_value (x, /*decl=*/NULL_TREE,
+				     manifestly_const_eval);
+      if (TREE_CODE (r) != CALL_EXPR)
+	return r;
+    }
+  return x;
+}
+
 /* Look up "hot", "cold", "likely" or "unlikely" in attribute list LIST.  */
 
 tree
diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
new file mode 100644
index 00000000000..2123f20e3e5
--- /dev/null
+++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
@@ -0,0 +1,20 @@
+// PR c++/108243
+// { dg-do compile { target c++11 } }
+// { dg-additional-options "-O -fdump-tree-original" }
+
+struct A {
+  constexpr A(int n, int m) : n(n), m(m) { }
+  int n, m;
+};
+
+constexpr int foo(int n) {
+  return n + !__builtin_is_constant_evaluated();
+}
+
+A* f(int n) {
+  static A a = {n, foo(41)};
+  return &a;
+}
+
+// { dg-final { scan-tree-dump "42" "original" } }
+// { dg-final { scan-tree-dump-not "foo \\(41\\)" "original" } }
diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
new file mode 100644
index 00000000000..ed964e20a7a
--- /dev/null
+++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
@@ -0,0 +1,32 @@
+// PR c++/97553
+// { dg-do compile { target c++11 } }
+// { dg-additional-options "-O -fdump-tree-original" }
+
+constexpr int foo() {
+  return __builtin_is_constant_evaluated() + 1;
+}
+
+#if __cpp_if_consteval
+constexpr int bar() {
+  if consteval {
+    return 5;
+  } else {
+    return 4;
+  }
+}
+#endif
+
+int p, q;
+
+int main() {
+  p = foo();
+#if __cpp_if_consteval
+  q = bar();
+#endif
+}
+
+// { dg-final { scan-tree-dump "p = 1" "original" } }
+// { dg-final { scan-tree-dump-not "= foo" "original" } }
+
+// { dg-final { scan-tree-dump "q = 4" "original" { target c++23 } } }
+// { dg-final { scan-tree-dump-not "= bar" "original" { target c++23 } } }
-- 
2.39.1.418.g7876265d61


^ permalink raw reply	[flat|nested] 15+ messages in thread

* Re: [PATCH 2/2] c++: speculative constexpr and is_constant_evaluated [PR108243]
  2023-02-09 17:36         ` Patrick Palka
@ 2023-02-09 23:36           ` Jason Merrill
  2023-02-10  1:32             ` Patrick Palka
  0 siblings, 1 reply; 15+ messages in thread
From: Jason Merrill @ 2023-02-09 23:36 UTC (permalink / raw)
  To: Patrick Palka; +Cc: gcc-patches

On 2/9/23 09:36, Patrick Palka wrote:
> On Sun, 5 Feb 2023, Jason Merrill wrote:
> 
>> On 2/3/23 15:51, Patrick Palka wrote:
>>> On Mon, 30 Jan 2023, Jason Merrill wrote:
>>>
>>>> On 1/27/23 17:02, Patrick Palka wrote:
>>>>> This PR illustrates that __builtin_is_constant_evaluated currently acts
>>>>> as an optimization barrier for our speculative constexpr evaluation,
>>>>> since we don't want to prematurely fold the builtin to false if the
>>>>> expression in question would be later manifestly constant evaluated (in
>>>>> which case it must be folded to true).
>>>>>
>>>>> This patch fixes this by permitting __builtin_is_constant_evaluated
>>>>> to get folded as false during cp_fold_function, since at that point
>>>>> we're sure we're doing manifestly constant evaluation.  To that end
>>>>> we add a flags parameter to cp_fold that controls what mce_value the
>>>>> CALL_EXPR case passes to maybe_constant_value.
>>>>>
>>>>> bootstrapped and rgetsted no x86_64-pc-linux-gnu, does this look OK for
>>>>> trunk?
>>>>>
>>>>> 	PR c++/108243
>>>>>
>>>>> gcc/cp/ChangeLog:
>>>>>
>>>>> 	* cp-gimplify.cc (enum fold_flags): Define.
>>>>> 	(cp_fold_data::genericize): Replace this data member with ...
>>>>> 	(cp_fold_data::fold_flags): ... this.
>>>>> 	(cp_fold_r): Adjust cp_fold_data use and cp_fold_calls.
>>>>> 	(cp_fold_function): Likewise.
>>>>> 	(cp_fold_maybe_rvalue): Likewise.
>>>>> 	(cp_fully_fold_init): Likewise.
>>>>> 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
>>>>> 	isn't empty.
>>>>> 	<case CALL_EXPR>: Pass mce_false to maybe_constant_value
>>>>> 	if if ff_genericize is set.
>>>>>
>>>>> gcc/testsuite/ChangeLog:
>>>>>
>>>>> 	* g++.dg/opt/pr108243.C: New test.
>>>>> ---
>>>>>     gcc/cp/cp-gimplify.cc               | 76
>>>>> ++++++++++++++++++-----------
>>>>>     gcc/testsuite/g++.dg/opt/pr108243.C | 29 +++++++++++
>>>>>     2 files changed, 76 insertions(+), 29 deletions(-)
>>>>>     create mode 100644 gcc/testsuite/g++.dg/opt/pr108243.C
>>>>>
>>>>> diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
>>>>> index a35cedd05cc..d023a63768f 100644
>>>>> --- a/gcc/cp/cp-gimplify.cc
>>>>> +++ b/gcc/cp/cp-gimplify.cc
>>>>> @@ -43,12 +43,20 @@ along with GCC; see the file COPYING3.  If not see
>>>>>     #include "omp-general.h"
>>>>>     #include "opts.h"
>>>>>     +/* Flags for cp_fold and cp_fold_r.  */
>>>>> +
>>>>> +enum fold_flags {
>>>>> +  ff_none = 0,
>>>>> +  /* Whether we're being called from cp_fold_function.  */
>>>>> +  ff_genericize = 1 << 0,
>>>>> +};
>>>>> +
>>>>>     /* Forward declarations.  */
>>>>>       static tree cp_genericize_r (tree *, int *, void *);
>>>>>     static tree cp_fold_r (tree *, int *, void *);
>>>>>     static void cp_genericize_tree (tree*, bool);
>>>>> -static tree cp_fold (tree);
>>>>> +static tree cp_fold (tree, fold_flags);
>>>>>       /* Genericize a TRY_BLOCK.  */
>>>>>     @@ -996,9 +1004,8 @@ struct cp_genericize_data
>>>>>     struct cp_fold_data
>>>>>     {
>>>>>       hash_set<tree> pset;
>>>>> -  bool genericize; // called from cp_fold_function?
>>>>> -
>>>>> -  cp_fold_data (bool g): genericize (g) {}
>>>>> +  fold_flags flags;
>>>>> +  cp_fold_data (fold_flags flags): flags (flags) {}
>>>>>     };
>>>>>       static tree
>>>>> @@ -1039,7 +1046,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
>>>>> *data_)
>>>>>           break;
>>>>>         }
>>>>>     -  *stmt_p = stmt = cp_fold (*stmt_p);
>>>>> +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
>>>>>         if (data->pset.add (stmt))
>>>>>         {
>>>>> @@ -1119,12 +1126,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
>>>>> void
>>>>> *data_)
>>>>>     	 here rather than in cp_genericize to avoid problems with the
>>>>> invisible
>>>>>     	 reference transition.  */
>>>>>         case INIT_EXPR:
>>>>> -      if (data->genericize)
>>>>> +      if (data->flags & ff_genericize)
>>>>>     	cp_genericize_init_expr (stmt_p);
>>>>>           break;
>>>>>           case TARGET_EXPR:
>>>>> -      if (data->genericize)
>>>>> +      if (data->flags & ff_genericize)
>>>>>     	cp_genericize_target_expr (stmt_p);
>>>>>             /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR;
>>>>> in
>>>>> @@ -1157,7 +1164,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
>>>>> *data_)
>>>>>     void
>>>>>     cp_fold_function (tree fndecl)
>>>>>     {
>>>>> -  cp_fold_data data (/*genericize*/true);
>>>>> +  cp_fold_data data (ff_genericize);
>>>>>       cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
>>>>>     }
>>>>>     @@ -2375,7 +2382,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
>>>>>     {
>>>>>       while (true)
>>>>>         {
>>>>> -      x = cp_fold (x);
>>>>> +      x = cp_fold (x, ff_none);
>>>>>           if (rval)
>>>>>     	x = mark_rvalue_use (x);
>>>>>           if (rval && DECL_P (x)
>>>>> @@ -2434,7 +2441,7 @@ cp_fully_fold_init (tree x)
>>>>>       if (processing_template_decl)
>>>>>         return x;
>>>>>       x = cp_fully_fold (x);
>>>>> -  cp_fold_data data (/*genericize*/false);
>>>>> +  cp_fold_data data (ff_none);
>>>>>       cp_walk_tree (&x, cp_fold_r, &data, NULL);
>>>>>       return x;
>>>>>     }
>>>>> @@ -2469,7 +2476,7 @@ clear_fold_cache (void)
>>>>>         Function returns X or its folded variant.  */
>>>>>       static tree
>>>>> -cp_fold (tree x)
>>>>> +cp_fold (tree x, fold_flags flags)
>>>>>     {
>>>>>       tree op0, op1, op2, op3;
>>>>>       tree org_x = x, r = NULL_TREE;
>>>>> @@ -2490,8 +2497,11 @@ cp_fold (tree x)
>>>>>       if (fold_cache == NULL)
>>>>>         fold_cache = hash_map<tree, tree>::create_ggc (101);
>>>>>     -  if (tree *cached = fold_cache->get (x))
>>>>> -    return *cached;
>>>>> +  bool cache_p = (flags == ff_none);
>>>>> +
>>>>> +  if (cache_p)
>>>>> +    if (tree *cached = fold_cache->get (x))
>>>>> +      return *cached;
>>>>>         uid_sensitive_constexpr_evaluation_checker c;
>>>>>     @@ -2526,7 +2536,7 @@ cp_fold (tree x)
>>>>>     	     Don't create a new tree if op0 != TREE_OPERAND (x, 0),
>>>>> the
>>>>>     	     folding of the operand should be in the caches and if in
>>>>> cp_fold_r
>>>>>     	     it will modify it in place.  */
>>>>> -	  op0 = cp_fold (TREE_OPERAND (x, 0));
>>>>> +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
>>>>>     	  if (op0 == error_mark_node)
>>>>>     	    x = error_mark_node;
>>>>>     	  break;
>>>>> @@ -2571,7 +2581,7 @@ cp_fold (tree x)
>>>>>     	{
>>>>>     	  tree p = maybe_undo_parenthesized_ref (x);
>>>>>     	  if (p != x)
>>>>> -	    return cp_fold (p);
>>>>> +	    return cp_fold (p, flags);
>>>>>     	}
>>>>>           goto unary;
>>>>>     @@ -2763,8 +2773,8 @@ cp_fold (tree x)
>>>>>         case COND_EXPR:
>>>>>           loc = EXPR_LOCATION (x);
>>>>>           op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
>>>>> -      op1 = cp_fold (TREE_OPERAND (x, 1));
>>>>> -      op2 = cp_fold (TREE_OPERAND (x, 2));
>>>>> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
>>>>> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
>>>>>             if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
>>>>>     	{
>>>>> @@ -2854,7 +2864,7 @@ cp_fold (tree x)
>>>>>     	      {
>>>>>     		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
>>>>>     		  r = build_nop (TREE_TYPE (x), r);
>>>>> -		x = cp_fold (r);
>>>>> +		x = cp_fold (r, flags);
>>>>>     		break;
>>>>>     	      }
>>>>>     	  }
>>>>> @@ -2908,7 +2918,7 @@ cp_fold (tree x)
>>>>>     	int m = call_expr_nargs (x);
>>>>>     	for (int i = 0; i < m; i++)
>>>>>     	  {
>>>>> -	    r = cp_fold (CALL_EXPR_ARG (x, i));
>>>>> +	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
>>>>>     	    if (r != CALL_EXPR_ARG (x, i))
>>>>>     	      {
>>>>>     		if (r == error_mark_node)
>>>>> @@ -2931,7 +2941,7 @@ cp_fold (tree x)
>>>>>       	if (TREE_CODE (r) != CALL_EXPR)
>>>>>     	  {
>>>>> -	    x = cp_fold (r);
>>>>> +	    x = cp_fold (r, flags);
>>>>>     	    break;
>>>>>     	  }
>>>>>     @@ -2944,7 +2954,15 @@ cp_fold (tree x)
>>>>>     	   constant, but the call followed by an INDIRECT_REF is.  */
>>>>>     	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
>>>>>     	    && !flag_no_inline)
>>>>> -	  r = maybe_constant_value (x);
>>>>> +	  {
>>>>> +	    mce_value manifestly_const_eval = mce_unknown;
>>>>> +	    if (flags & ff_genericize)
>>>>> +	      /* At genericization time it's safe to fold
>>>>> +		 __builtin_is_constant_evaluated to false.  */
>>>>> +	      manifestly_const_eval = mce_false;
>>>>> +	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
>>>>> +				      manifestly_const_eval);
>>>>> +	  }
>>>>>     	optimize = sv;
>>>>>               if (TREE_CODE (r) != CALL_EXPR)
>>>>> @@ -2971,7 +2989,7 @@ cp_fold (tree x)
>>>>>     	vec<constructor_elt, va_gc> *nelts = NULL;
>>>>>     	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
>>>>>     	  {
>>>>> -	    tree op = cp_fold (p->value);
>>>>> +	    tree op = cp_fold (p->value, flags);
>>>>>     	    if (op != p->value)
>>>>>     	      {
>>>>>     		if (op == error_mark_node)
>>>>> @@ -3002,7 +3020,7 @@ cp_fold (tree x)
>>>>>       	for (int i = 0; i < n; i++)
>>>>>     	  {
>>>>> -	    tree op = cp_fold (TREE_VEC_ELT (x, i));
>>>>> +	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
>>>>>     	    if (op != TREE_VEC_ELT (x, i))
>>>>>     	      {
>>>>>     		if (!changed)
>>>>> @@ -3019,10 +3037,10 @@ cp_fold (tree x)
>>>>>         case ARRAY_RANGE_REF:
>>>>>             loc = EXPR_LOCATION (x);
>>>>> -      op0 = cp_fold (TREE_OPERAND (x, 0));
>>>>> -      op1 = cp_fold (TREE_OPERAND (x, 1));
>>>>> -      op2 = cp_fold (TREE_OPERAND (x, 2));
>>>>> -      op3 = cp_fold (TREE_OPERAND (x, 3));
>>>>> +      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
>>>>> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
>>>>> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
>>>>> +      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
>>>>>             if (op0 != TREE_OPERAND (x, 0)
>>>>>     	  || op1 != TREE_OPERAND (x, 1)
>>>>> @@ -3050,7 +3068,7 @@ cp_fold (tree x)
>>>>>           /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which,
>>>>> after
>>>>>     	 folding, evaluates to an invariant.  In that case no need to
>>>>> wrap
>>>>>     	 this folded tree with a SAVE_EXPR.  */
>>>>> -      r = cp_fold (TREE_OPERAND (x, 0));
>>>>> +      r = cp_fold (TREE_OPERAND (x, 0), flags);
>>>>>           if (tree_invariant_p (r))
>>>>>     	x = r;
>>>>>           break;
>>>>> @@ -3069,7 +3087,7 @@ cp_fold (tree x)
>>>>>           copy_warning (x, org_x);
>>>>>         }
>>>>>     -  if (!c.evaluation_restricted_p ())
>>>>> +  if (cache_p && !c.evaluation_restricted_p ())
>>>>>         {
>>>>>           fold_cache->put (org_x, x);
>>>>>           /* Prevent that we try to fold an already folded result again.
>>>>> */
>>>>> diff --git a/gcc/testsuite/g++.dg/opt/pr108243.C
>>>>> b/gcc/testsuite/g++.dg/opt/pr108243.C
>>>>> new file mode 100644
>>>>> index 00000000000..4c45dbba13c
>>>>> --- /dev/null
>>>>> +++ b/gcc/testsuite/g++.dg/opt/pr108243.C
>>>>> @@ -0,0 +1,29 @@
>>>>> +// PR c++/108243
>>>>> +// { dg-do compile { target c++11 } }
>>>>> +// { dg-additional-options "-O -fdump-tree-original" }
>>>>> +
>>>>> +constexpr int foo() {
>>>>> +  return __builtin_is_constant_evaluated() + 1;
>>>>> +}
>>>>> +
>>>>> +#if __cpp_if_consteval
>>>>> +constexpr int bar() {
>>>>> +  if consteval {
>>>>> +    return 5;
>>>>> +  } else {
>>>>> +    return 4;
>>>>> +  }
>>>>> +}
>>>>> +#endif
>>>>> +
>>>>> +int p, q;
>>>>> +
>>>>> +int main() {
>>>>> +  p = foo();
>>>>> +#if __cpp_if_consteval
>>>>> +  q = bar();
>>>>> +#endif
>>>>> +}
>>>>> +
>>>>> +// { dg-final { scan-tree-dump-not "= foo" "original" } }
>>>>> +// { dg-final { scan-tree-dump-not "= bar" "original" } }
>>>>
>>>> Let's also test a static initializer that can't be fully
>>>> constant-evaluated.
>>>
>>> D'oh, doing so revealed that cp_fold_function doesn't reach static
>>> initializers; that's taken care of by cp_fully_fold_init.  So it seems
>>> we need to make cp_fold when called from the latter entry point to also
>>> assume m_c_e is false.  We can't re-use ff_genericize here because that
>>> flag has additional effects in cp_fold_r, so it seems we need another
>>> flag that that only affects the manifestly constant-eval stuff; I called
>>> it ff_mce_false.  How does the following look?
>>>
>>> -- >8 --
>>>
>>> Subject: [PATCH 2/2] c++: speculative constexpr and is_constant_evaluated
>>>    [PR108243]
>>>
>>> This PR illustrates that __builtin_is_constant_evaluated currently acts
>>> as an optimization barrier for our speculative constexpr evaluation,
>>> since we don't want to prematurely fold the builtin to false if the
>>> expression in question would be later manifestly constant evaluated (in
>>> which case it must be folded to true).
>>>
>>> This patch fixes this by permitting __builtin_is_constant_evaluated
>>> to get folded as false during cp_fold_function and cp_fully_fold_init,
>>> since at these points we're sure we're done with manifestly constant
>>> evaluation.  To that end we add a flags parameter to cp_fold that
>>> controls whether we pass mce_false or mce_unknown to maybe_constant_value
>>> when folding a CALL_EXPR.
>>>
>>> 	PR c++/108243
>>> 	PR c++/97553
>>>
>>> gcc/cp/ChangeLog:
>>>
>>> 	* cp-gimplify.cc (enum fold_flags): Define.
>>> 	(cp_fold_data::genericize): Replace this data member with ...
>>> 	(cp_fold_data::fold_flags): ... this.
>>> 	(cp_fold_r): Adjust use of cp_fold_data and calls to cp_fold.
>>> 	(cp_fold_function): Likewise.
>>> 	(cp_fold_maybe_rvalue): Likewise.
>>> 	(cp_fully_fold_init): Likewise.
>>> 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
>>> 	isn't empty.
>>> 	<case CALL_EXPR>: If ff_genericize is set, fold
>>> 	__builtin_is_constant_evaluated to false and pass mce_false to
>>> 	maybe_constant_value.
>>>
>>> gcc/testsuite/ChangeLog:
>>>
>>> 	* g++.dg/opt/is_constant_evaluated1.C: New test.
>>> 	* g++.dg/opt/is_constant_evaluated2.C: New test.
>>> ---
>>>    gcc/cp/cp-gimplify.cc                         | 88 ++++++++++++-------
>>>    .../g++.dg/opt/is_constant_evaluated1.C       | 14 +++
>>>    .../g++.dg/opt/is_constant_evaluated2.C       | 32 +++++++
>>>    3 files changed, 104 insertions(+), 30 deletions(-)
>>>    create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
>>>    create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
>>>
>>> diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
>>> index 9929d29981a..590ed787997 100644
>>> --- a/gcc/cp/cp-gimplify.cc
>>> +++ b/gcc/cp/cp-gimplify.cc
>>> @@ -43,12 +43,26 @@ along with GCC; see the file COPYING3.  If not see
>>>    #include "omp-general.h"
>>>    #include "opts.h"
>>>    +/* Flags for cp_fold and cp_fold_r.  */
>>> +
>>> +enum fold_flags {
>>> +  ff_none = 0,
>>> +  /* Whether we're being called from cp_fold_function.  */
>>> +  ff_genericize = 1 << 0,
>>> +  /* Whether we're folding late enough that we could assume
>>> +     we're definitely not in a manifestly constant-evaluated
>>> +     context.  */
>>
>> It's not necessarily a matter of late enough; we could fold sooner and still
>> know that, as in cp_fully_fold_init.  We could do the same at other
>> full-expression points, but we don't because we want to delay folding as much
>> as possible.  So let's say "folding at a point where we know we're..."
>>
>>> +  ff_mce_false = 1 << 1,
>>> +};
>>> +
>>> +using fold_flags_t = int;
>>> +
>>>    /* Forward declarations.  */
>>>      static tree cp_genericize_r (tree *, int *, void *);
>>>    static tree cp_fold_r (tree *, int *, void *);
>>>    static void cp_genericize_tree (tree*, bool);
>>> -static tree cp_fold (tree);
>>> +static tree cp_fold (tree, fold_flags_t);
>>>      /* Genericize a TRY_BLOCK.  */
>>>    @@ -1012,9 +1026,8 @@ struct cp_genericize_data
>>>    struct cp_fold_data
>>>    {
>>>      hash_set<tree> pset;
>>> -  bool genericize; // called from cp_fold_function?
>>> -
>>> -  cp_fold_data (bool g): genericize (g) {}
>>> +  fold_flags_t flags;
>>> +  cp_fold_data (fold_flags_t flags): flags (flags) {}
>>>    };
>>>      static tree
>>> @@ -1055,7 +1068,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
>>> *data_)
>>>          break;
>>>        }
>>>    -  *stmt_p = stmt = cp_fold (*stmt_p);
>>> +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
>>>        if (data->pset.add (stmt))
>>>        {
>>> @@ -1135,12 +1148,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
>>> *data_)
>>>    	 here rather than in cp_genericize to avoid problems with the
>>> invisible
>>>    	 reference transition.  */
>>>        case INIT_EXPR:
>>> -      if (data->genericize)
>>> +      if (data->flags & ff_genericize)
>>>    	cp_genericize_init_expr (stmt_p);
>>>          break;
>>>          case TARGET_EXPR:
>>> -      if (data->genericize)
>>> +      if (data->flags & ff_genericize)
>>>    	cp_genericize_target_expr (stmt_p);
>>>            /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
>>> @@ -1173,7 +1186,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
>>> *data_)
>>>    void
>>>    cp_fold_function (tree fndecl)
>>>    {
>>> -  cp_fold_data data (/*genericize*/true);
>>> +  cp_fold_data data (ff_genericize | ff_mce_false);
>>
>> Here would be a good place for a comment about passing mce_false because all
>> manifestly-constant-evaluated expressions will have been constant-evaluated
>> already if possible.
>>
>>>      cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
>>>    }
>>>    @@ -2391,7 +2404,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
>>>    {
>>>      while (true)
>>>        {
>>> -      x = cp_fold (x);
>>> +      x = cp_fold (x, ff_none);
>>>          if (rval)
>>>    	x = mark_rvalue_use (x);
>>>          if (rval && DECL_P (x)
>>> @@ -2450,7 +2463,7 @@ cp_fully_fold_init (tree x)
>>>      if (processing_template_decl)
>>>        return x;
>>>      x = cp_fully_fold (x);
>>> -  cp_fold_data data (/*genericize*/false);
>>> +  cp_fold_data data (ff_mce_false);
>>>      cp_walk_tree (&x, cp_fold_r, &data, NULL);
>>>      return x;
>>>    }
>>> @@ -2485,7 +2498,7 @@ clear_fold_cache (void)
>>>        Function returns X or its folded variant.  */
>>>      static tree
>>> -cp_fold (tree x)
>>> +cp_fold (tree x, fold_flags_t flags)
>>>    {
>>>      tree op0, op1, op2, op3;
>>>      tree org_x = x, r = NULL_TREE;
>>> @@ -2506,8 +2519,11 @@ cp_fold (tree x)
>>>      if (fold_cache == NULL)
>>>        fold_cache = hash_map<tree, tree>::create_ggc (101);
>>>    -  if (tree *cached = fold_cache->get (x))
>>> -    return *cached;
>>> +  bool cache_p = (flags == ff_none);
>>> +
>>> +  if (cache_p)
>>> +    if (tree *cached = fold_cache->get (x))
>>> +      return *cached;
>>>        uid_sensitive_constexpr_evaluation_checker c;
>>>    @@ -2542,7 +2558,7 @@ cp_fold (tree x)
>>>    	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
>>>    	     folding of the operand should be in the caches and if in
>>> cp_fold_r
>>>    	     it will modify it in place.  */
>>> -	  op0 = cp_fold (TREE_OPERAND (x, 0));
>>> +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
>>>    	  if (op0 == error_mark_node)
>>>    	    x = error_mark_node;
>>>    	  break;
>>> @@ -2587,7 +2603,7 @@ cp_fold (tree x)
>>>    	{
>>>    	  tree p = maybe_undo_parenthesized_ref (x);
>>>    	  if (p != x)
>>> -	    return cp_fold (p);
>>> +	    return cp_fold (p, flags);
>>>    	}
>>>          goto unary;
>>>    @@ -2779,8 +2795,8 @@ cp_fold (tree x)
>>>        case COND_EXPR:
>>>          loc = EXPR_LOCATION (x);
>>>          op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
>>> -      op1 = cp_fold (TREE_OPERAND (x, 1));
>>> -      op2 = cp_fold (TREE_OPERAND (x, 2));
>>> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
>>> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
>>>            if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
>>>    	{
>>> @@ -2870,7 +2886,7 @@ cp_fold (tree x)
>>>    	      {
>>>    		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
>>>    		  r = build_nop (TREE_TYPE (x), r);
>>> -		x = cp_fold (r);
>>> +		x = cp_fold (r, flags);
>>>    		break;
>>>    	      }
>>>    	  }
>>> @@ -2890,8 +2906,12 @@ cp_fold (tree x)
>>>    	  {
>>>    	    switch (DECL_FE_FUNCTION_CODE (callee))
>>>    	      {
>>> -		/* Defer folding __builtin_is_constant_evaluated.  */
>>>    	      case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
>>> +		/* Defer folding __builtin_is_constant_evaluated unless
>>> +		   we can assume this isn't a manifestly constant-evaluated
>>
>> s/can assume/know/
>>
>> OK with those comment changes.
> 
> Thanks a lot.  Unfortunately I think the patch has a significant problem
> that only just occurred to me -- disabling the cp_fold cache when the
> flag ff_mce_false is set effectively makes cp_fold_function and
> cp_fully_fold_init quadratic in the size of the expression (since
> cp_fold_r calls cp_fold on each subtree, and cp_fold when the cache is
> disabled will end up fully walking each subtree).  Note that the reason
> we must disable the cache is because cp_fold with ff_mce_false might
> give a different folded result than without that flag if the expression
> contains a suitable CALL_EXPR subexpression.

Good point.

> One approach to fix this complexity issue would be to parameterize the
> cache according to the flags that were passed to cp_fold, which would
> allow us to keep the cache enabled when ff_mce_false is set.  A downside
> to this approach is that the size of the cp_fold cache would essentially
> double since for each tree we'd now have two cache entries, one for
> flags=ff_none and another for flags=ff_mce_false.

We could also clear the cache before cp_fold_function since the two 
folds shouldn't overlap (much).

> Another approach would be to split out the trial constexpr evaluation
> part of cp_fold's CALL_EXPR handling, parameterize that, and call it
> directly from cp_fold_r.  With this approach we wouldn't perform as much
> folding, e.g.
> 
>    int n = 41 + !std::is_constant_evaluated();
> 
> would get folded to 1 + 41 rather than 42.  But I suspect this would
> give us 95% of the reapable benefits of the above approach.
> 
> I think I'm leaning towards this second approach, which the below patch
> implements instead.  What do you think?  Bootstrapped and regtested on
> x86_64-pc-linux-gnu.

That sounds reasonable, but...

> -- >8 --
> 
> Subject: [PATCH] c++: speculative constexpr and is_constant_evaluated
>   [PR108243]
> 
> This PR illustrates that __builtin_is_constant_evaluated currently acts
> as an optimization barrier for our speculative constexpr evaluation,
> since we don't want to prematurely fold the builtin to false before the
> expression in question undergoes constant evaluation as in a manifestly
> constant-evaluated context (in which case the builtin must instead be
> folded to true).
> 
> This patch fixes this by permitting __builtin_is_constant_evaluated
> to get folded to false from cp_fold_r, where we know we're done with
> proper constant evaluation (of manifestly constant-evaluated contexts).
> 
> 	PR c++/108243
> 	PR c++/97553
> 
> gcc/cp/ChangeLog:
> 
> 	* cp-gimplify.cc
> 	(cp_fold_r): Remove redundant *stmt_p assignments.  After
> 	calling cp_fold, call maybe_fold_constexpr_call with mce_false.
> 	(cp_fold) <case CALL_EXPR>: Split out trial constexpr evaluation
> 	into ...
> 	(maybe_fold_constexpr_call): ... here.
> 
> gcc/testsuite/ChangeLog:
> 
> 	* g++.dg/opt/is_constant_evaluated1.C: New test.
> 	* g++.dg/opt/is_constant_evaluated2.C: New test.
> ---
>   gcc/cp/cp-gimplify.cc                         | 55 +++++++++++++++----
>   .../g++.dg/opt/is_constant_evaluated1.C       | 20 +++++++
>   .../g++.dg/opt/is_constant_evaluated2.C       | 32 +++++++++++
>   3 files changed, 95 insertions(+), 12 deletions(-)
>   create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
>   create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> 
> diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> index 9929d29981a..dca55056b2c 100644
> --- a/gcc/cp/cp-gimplify.cc
> +++ b/gcc/cp/cp-gimplify.cc
> @@ -49,6 +49,7 @@ static tree cp_genericize_r (tree *, int *, void *);
>   static tree cp_fold_r (tree *, int *, void *);
>   static void cp_genericize_tree (tree*, bool);
>   static tree cp_fold (tree);
> +static tree maybe_fold_constexpr_call (tree, mce_value);
>   
>   /* Genericize a TRY_BLOCK.  */
>   
> @@ -1034,7 +1035,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>   	    error_at (PTRMEM_CST_LOCATION (stmt),
>   		      "taking address of an immediate function %qD",
>   		      PTRMEM_CST_MEMBER (stmt));
> -	  stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
> +	  stmt = build_zero_cst (TREE_TYPE (stmt));
>   	  break;
>   	}
>         break;
> @@ -1046,7 +1047,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>   	  error_at (EXPR_LOCATION (stmt),
>   		    "taking address of an immediate function %qD",
>   		    TREE_OPERAND (stmt, 0));
> -	  stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
> +	  stmt = build_zero_cst (TREE_TYPE (stmt));
>   	  break;
>   	}
>         break;
> @@ -1055,7 +1056,17 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>         break;
>       }
>   
> -  *stmt_p = stmt = cp_fold (*stmt_p);
> +  stmt = cp_fold (stmt);
> +
> +  if (TREE_CODE (stmt) == CALL_EXPR)
> +    /* Since cp_fold_r is called (from cp_genericize, cp_fold_function
> +       and cp_fully_fold_init) only after the overall expression has been
> +       considered for constant-evaluation, we can by now safely fold any
> +       remaining __builtin_is_constant_evaluated calls to false, so try
> +       constexpr expansion with mce_false.  */
> +    stmt = maybe_fold_constexpr_call (stmt, mce_false);
> +
> +  *stmt_p = stmt;
>   
>     if (data->pset.add (stmt))
>       {
> @@ -2952,15 +2963,10 @@ cp_fold (tree x)
>   	  }
>   
>   	optimize = nw;
> -
> -	/* Invoke maybe_constant_value for functions declared
> -	   constexpr and not called with AGGR_INIT_EXPRs.
> -	   TODO:
> -	   Do constexpr expansion of expressions where the call itself is not
> -	   constant, but the call followed by an INDIRECT_REF is.  */
> -	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
> -	    && !flag_no_inline)
> -	  r = maybe_constant_value (x);
> +	/* Pass mce_unknown to defer folding __builtin_is_constant_evaluated
> +	   since we don't know if we're in a manifestly constant-evaluated
> +	   context that hasn't yet been constant-evaluated.  */
> +	r = maybe_fold_constexpr_call (x, mce_unknown);

It seems unfortunate to try to fold both here and in cp_fold_r.

Does this patch still fold __builtin_is_constant_evaluated() even though 
it no longer touches the cp_fold builtin handling?

>   	optimize = sv;
>   
>           if (TREE_CODE (r) != CALL_EXPR)
> @@ -3096,6 +3102,31 @@ cp_fold (tree x)
>     return x;
>   }
>   
> +/* If the CALL_EXPR X calls a constexpr function, try expanding it via
> +   constexpr evaluation.  Returns the expanded result or X if constexpr
> +   evaluation wasn't possible.
> +
> +   TODO: Do constexpr expansion of expressions where the call itself
> +   is not constant, but the call followed by an INDIRECT_REF is.  */
> +
> +static tree
> +maybe_fold_constexpr_call (tree x, mce_value manifestly_const_eval)
> +{
> +  if (flag_no_inline)
> +    return x;
> +  tree callee = get_callee_fndecl (x);
> +  if (!callee)
> +    return x;
> +  if (DECL_DECLARED_CONSTEXPR_P (callee))
> +    {
> +      tree r = maybe_constant_value (x, /*decl=*/NULL_TREE,
> +				     manifestly_const_eval);
> +      if (TREE_CODE (r) != CALL_EXPR)
> +	return r;
> +    }
> +  return x;
> +}
> +
>   /* Look up "hot", "cold", "likely" or "unlikely" in attribute list LIST.  */
>   
>   tree
> diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> new file mode 100644
> index 00000000000..2123f20e3e5
> --- /dev/null
> +++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> @@ -0,0 +1,20 @@
> +// PR c++/108243
> +// { dg-do compile { target c++11 } }
> +// { dg-additional-options "-O -fdump-tree-original" }
> +
> +struct A {
> +  constexpr A(int n, int m) : n(n), m(m) { }
> +  int n, m;
> +};
> +
> +constexpr int foo(int n) {
> +  return n + !__builtin_is_constant_evaluated();
> +}
> +
> +A* f(int n) {
> +  static A a = {n, foo(41)};
> +  return &a;
> +}
> +
> +// { dg-final { scan-tree-dump "42" "original" } }
> +// { dg-final { scan-tree-dump-not "foo \\(41\\)" "original" } }
> diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> new file mode 100644
> index 00000000000..ed964e20a7a
> --- /dev/null
> +++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> @@ -0,0 +1,32 @@
> +// PR c++/97553
> +// { dg-do compile { target c++11 } }
> +// { dg-additional-options "-O -fdump-tree-original" }
> +
> +constexpr int foo() {
> +  return __builtin_is_constant_evaluated() + 1;
> +}
> +
> +#if __cpp_if_consteval
> +constexpr int bar() {
> +  if consteval {
> +    return 5;
> +  } else {
> +    return 4;
> +  }
> +}
> +#endif
> +
> +int p, q;
> +
> +int main() {
> +  p = foo();
> +#if __cpp_if_consteval
> +  q = bar();
> +#endif
> +}
> +
> +// { dg-final { scan-tree-dump "p = 1" "original" } }
> +// { dg-final { scan-tree-dump-not "= foo" "original" } }
> +
> +// { dg-final { scan-tree-dump "q = 4" "original" { target c++23 } } }
> +// { dg-final { scan-tree-dump-not "= bar" "original" { target c++23 } } }


^ permalink raw reply	[flat|nested] 15+ messages in thread

* Re: [PATCH 2/2] c++: speculative constexpr and is_constant_evaluated [PR108243]
  2023-02-09 23:36           ` Jason Merrill
@ 2023-02-10  1:32             ` Patrick Palka
  2023-02-10 14:48               ` Patrick Palka
  0 siblings, 1 reply; 15+ messages in thread
From: Patrick Palka @ 2023-02-10  1:32 UTC (permalink / raw)
  To: Jason Merrill; +Cc: Patrick Palka, gcc-patches

On Thu, 9 Feb 2023, Jason Merrill wrote:

> On 2/9/23 09:36, Patrick Palka wrote:
> > On Sun, 5 Feb 2023, Jason Merrill wrote:
> > 
> > > On 2/3/23 15:51, Patrick Palka wrote:
> > > > On Mon, 30 Jan 2023, Jason Merrill wrote:
> > > > 
> > > > > On 1/27/23 17:02, Patrick Palka wrote:
> > > > > > This PR illustrates that __builtin_is_constant_evaluated currently
> > > > > > acts
> > > > > > as an optimization barrier for our speculative constexpr evaluation,
> > > > > > since we don't want to prematurely fold the builtin to false if the
> > > > > > expression in question would be later manifestly constant evaluated
> > > > > > (in
> > > > > > which case it must be folded to true).
> > > > > > 
> > > > > > This patch fixes this by permitting __builtin_is_constant_evaluated
> > > > > > to get folded as false during cp_fold_function, since at that point
> > > > > > we're sure we're doing manifestly constant evaluation.  To that end
> > > > > > we add a flags parameter to cp_fold that controls what mce_value the
> > > > > > CALL_EXPR case passes to maybe_constant_value.
> > > > > > 
> > > > > > bootstrapped and rgetsted no x86_64-pc-linux-gnu, does this look OK
> > > > > > for
> > > > > > trunk?
> > > > > > 
> > > > > > 	PR c++/108243
> > > > > > 
> > > > > > gcc/cp/ChangeLog:
> > > > > > 
> > > > > > 	* cp-gimplify.cc (enum fold_flags): Define.
> > > > > > 	(cp_fold_data::genericize): Replace this data member with ...
> > > > > > 	(cp_fold_data::fold_flags): ... this.
> > > > > > 	(cp_fold_r): Adjust cp_fold_data use and cp_fold_calls.
> > > > > > 	(cp_fold_function): Likewise.
> > > > > > 	(cp_fold_maybe_rvalue): Likewise.
> > > > > > 	(cp_fully_fold_init): Likewise.
> > > > > > 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
> > > > > > 	isn't empty.
> > > > > > 	<case CALL_EXPR>: Pass mce_false to maybe_constant_value
> > > > > > 	if if ff_genericize is set.
> > > > > > 
> > > > > > gcc/testsuite/ChangeLog:
> > > > > > 
> > > > > > 	* g++.dg/opt/pr108243.C: New test.
> > > > > > ---
> > > > > >     gcc/cp/cp-gimplify.cc               | 76
> > > > > > ++++++++++++++++++-----------
> > > > > >     gcc/testsuite/g++.dg/opt/pr108243.C | 29 +++++++++++
> > > > > >     2 files changed, 76 insertions(+), 29 deletions(-)
> > > > > >     create mode 100644 gcc/testsuite/g++.dg/opt/pr108243.C
> > > > > > 
> > > > > > diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> > > > > > index a35cedd05cc..d023a63768f 100644
> > > > > > --- a/gcc/cp/cp-gimplify.cc
> > > > > > +++ b/gcc/cp/cp-gimplify.cc
> > > > > > @@ -43,12 +43,20 @@ along with GCC; see the file COPYING3.  If not
> > > > > > see
> > > > > >     #include "omp-general.h"
> > > > > >     #include "opts.h"
> > > > > >     +/* Flags for cp_fold and cp_fold_r.  */
> > > > > > +
> > > > > > +enum fold_flags {
> > > > > > +  ff_none = 0,
> > > > > > +  /* Whether we're being called from cp_fold_function.  */
> > > > > > +  ff_genericize = 1 << 0,
> > > > > > +};
> > > > > > +
> > > > > >     /* Forward declarations.  */
> > > > > >       static tree cp_genericize_r (tree *, int *, void *);
> > > > > >     static tree cp_fold_r (tree *, int *, void *);
> > > > > >     static void cp_genericize_tree (tree*, bool);
> > > > > > -static tree cp_fold (tree);
> > > > > > +static tree cp_fold (tree, fold_flags);
> > > > > >       /* Genericize a TRY_BLOCK.  */
> > > > > >     @@ -996,9 +1004,8 @@ struct cp_genericize_data
> > > > > >     struct cp_fold_data
> > > > > >     {
> > > > > >       hash_set<tree> pset;
> > > > > > -  bool genericize; // called from cp_fold_function?
> > > > > > -
> > > > > > -  cp_fold_data (bool g): genericize (g) {}
> > > > > > +  fold_flags flags;
> > > > > > +  cp_fold_data (fold_flags flags): flags (flags) {}
> > > > > >     };
> > > > > >       static tree
> > > > > > @@ -1039,7 +1046,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
> > > > > > void
> > > > > > *data_)
> > > > > >           break;
> > > > > >         }
> > > > > >     -  *stmt_p = stmt = cp_fold (*stmt_p);
> > > > > > +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
> > > > > >         if (data->pset.add (stmt))
> > > > > >         {
> > > > > > @@ -1119,12 +1126,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
> > > > > > void
> > > > > > *data_)
> > > > > >     	 here rather than in cp_genericize to avoid problems with the
> > > > > > invisible
> > > > > >     	 reference transition.  */
> > > > > >         case INIT_EXPR:
> > > > > > -      if (data->genericize)
> > > > > > +      if (data->flags & ff_genericize)
> > > > > >     	cp_genericize_init_expr (stmt_p);
> > > > > >           break;
> > > > > >           case TARGET_EXPR:
> > > > > > -      if (data->genericize)
> > > > > > +      if (data->flags & ff_genericize)
> > > > > >     	cp_genericize_target_expr (stmt_p);
> > > > > >             /* Folding might replace e.g. a COND_EXPR with a
> > > > > > TARGET_EXPR;
> > > > > > in
> > > > > > @@ -1157,7 +1164,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
> > > > > > void
> > > > > > *data_)
> > > > > >     void
> > > > > >     cp_fold_function (tree fndecl)
> > > > > >     {
> > > > > > -  cp_fold_data data (/*genericize*/true);
> > > > > > +  cp_fold_data data (ff_genericize);
> > > > > >       cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data,
> > > > > > NULL);
> > > > > >     }
> > > > > >     @@ -2375,7 +2382,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
> > > > > >     {
> > > > > >       while (true)
> > > > > >         {
> > > > > > -      x = cp_fold (x);
> > > > > > +      x = cp_fold (x, ff_none);
> > > > > >           if (rval)
> > > > > >     	x = mark_rvalue_use (x);
> > > > > >           if (rval && DECL_P (x)
> > > > > > @@ -2434,7 +2441,7 @@ cp_fully_fold_init (tree x)
> > > > > >       if (processing_template_decl)
> > > > > >         return x;
> > > > > >       x = cp_fully_fold (x);
> > > > > > -  cp_fold_data data (/*genericize*/false);
> > > > > > +  cp_fold_data data (ff_none);
> > > > > >       cp_walk_tree (&x, cp_fold_r, &data, NULL);
> > > > > >       return x;
> > > > > >     }
> > > > > > @@ -2469,7 +2476,7 @@ clear_fold_cache (void)
> > > > > >         Function returns X or its folded variant.  */
> > > > > >       static tree
> > > > > > -cp_fold (tree x)
> > > > > > +cp_fold (tree x, fold_flags flags)
> > > > > >     {
> > > > > >       tree op0, op1, op2, op3;
> > > > > >       tree org_x = x, r = NULL_TREE;
> > > > > > @@ -2490,8 +2497,11 @@ cp_fold (tree x)
> > > > > >       if (fold_cache == NULL)
> > > > > >         fold_cache = hash_map<tree, tree>::create_ggc (101);
> > > > > >     -  if (tree *cached = fold_cache->get (x))
> > > > > > -    return *cached;
> > > > > > +  bool cache_p = (flags == ff_none);
> > > > > > +
> > > > > > +  if (cache_p)
> > > > > > +    if (tree *cached = fold_cache->get (x))
> > > > > > +      return *cached;
> > > > > >         uid_sensitive_constexpr_evaluation_checker c;
> > > > > >     @@ -2526,7 +2536,7 @@ cp_fold (tree x)
> > > > > >     	     Don't create a new tree if op0 != TREE_OPERAND (x, 0),
> > > > > > the
> > > > > >     	     folding of the operand should be in the caches and if in
> > > > > > cp_fold_r
> > > > > >     	     it will modify it in place.  */
> > > > > > -	  op0 = cp_fold (TREE_OPERAND (x, 0));
> > > > > > +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> > > > > >     	  if (op0 == error_mark_node)
> > > > > >     	    x = error_mark_node;
> > > > > >     	  break;
> > > > > > @@ -2571,7 +2581,7 @@ cp_fold (tree x)
> > > > > >     	{
> > > > > >     	  tree p = maybe_undo_parenthesized_ref (x);
> > > > > >     	  if (p != x)
> > > > > > -	    return cp_fold (p);
> > > > > > +	    return cp_fold (p, flags);
> > > > > >     	}
> > > > > >           goto unary;
> > > > > >     @@ -2763,8 +2773,8 @@ cp_fold (tree x)
> > > > > >         case COND_EXPR:
> > > > > >           loc = EXPR_LOCATION (x);
> > > > > >           op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> > > > > > -      op1 = cp_fold (TREE_OPERAND (x, 1));
> > > > > > -      op2 = cp_fold (TREE_OPERAND (x, 2));
> > > > > > +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> > > > > > +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> > > > > >             if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
> > > > > >     	{
> > > > > > @@ -2854,7 +2864,7 @@ cp_fold (tree x)
> > > > > >     	      {
> > > > > >     		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
> > > > > >     		  r = build_nop (TREE_TYPE (x), r);
> > > > > > -		x = cp_fold (r);
> > > > > > +		x = cp_fold (r, flags);
> > > > > >     		break;
> > > > > >     	      }
> > > > > >     	  }
> > > > > > @@ -2908,7 +2918,7 @@ cp_fold (tree x)
> > > > > >     	int m = call_expr_nargs (x);
> > > > > >     	for (int i = 0; i < m; i++)
> > > > > >     	  {
> > > > > > -	    r = cp_fold (CALL_EXPR_ARG (x, i));
> > > > > > +	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
> > > > > >     	    if (r != CALL_EXPR_ARG (x, i))
> > > > > >     	      {
> > > > > >     		if (r == error_mark_node)
> > > > > > @@ -2931,7 +2941,7 @@ cp_fold (tree x)
> > > > > >       	if (TREE_CODE (r) != CALL_EXPR)
> > > > > >     	  {
> > > > > > -	    x = cp_fold (r);
> > > > > > +	    x = cp_fold (r, flags);
> > > > > >     	    break;
> > > > > >     	  }
> > > > > >     @@ -2944,7 +2954,15 @@ cp_fold (tree x)
> > > > > >     	   constant, but the call followed by an INDIRECT_REF is.  */
> > > > > >     	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
> > > > > >     	    && !flag_no_inline)
> > > > > > -	  r = maybe_constant_value (x);
> > > > > > +	  {
> > > > > > +	    mce_value manifestly_const_eval = mce_unknown;
> > > > > > +	    if (flags & ff_genericize)
> > > > > > +	      /* At genericization time it's safe to fold
> > > > > > +		 __builtin_is_constant_evaluated to false.  */
> > > > > > +	      manifestly_const_eval = mce_false;
> > > > > > +	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
> > > > > > +				      manifestly_const_eval);
> > > > > > +	  }
> > > > > >     	optimize = sv;
> > > > > >               if (TREE_CODE (r) != CALL_EXPR)
> > > > > > @@ -2971,7 +2989,7 @@ cp_fold (tree x)
> > > > > >     	vec<constructor_elt, va_gc> *nelts = NULL;
> > > > > >     	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
> > > > > >     	  {
> > > > > > -	    tree op = cp_fold (p->value);
> > > > > > +	    tree op = cp_fold (p->value, flags);
> > > > > >     	    if (op != p->value)
> > > > > >     	      {
> > > > > >     		if (op == error_mark_node)
> > > > > > @@ -3002,7 +3020,7 @@ cp_fold (tree x)
> > > > > >       	for (int i = 0; i < n; i++)
> > > > > >     	  {
> > > > > > -	    tree op = cp_fold (TREE_VEC_ELT (x, i));
> > > > > > +	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
> > > > > >     	    if (op != TREE_VEC_ELT (x, i))
> > > > > >     	      {
> > > > > >     		if (!changed)
> > > > > > @@ -3019,10 +3037,10 @@ cp_fold (tree x)
> > > > > >         case ARRAY_RANGE_REF:
> > > > > >             loc = EXPR_LOCATION (x);
> > > > > > -      op0 = cp_fold (TREE_OPERAND (x, 0));
> > > > > > -      op1 = cp_fold (TREE_OPERAND (x, 1));
> > > > > > -      op2 = cp_fold (TREE_OPERAND (x, 2));
> > > > > > -      op3 = cp_fold (TREE_OPERAND (x, 3));
> > > > > > +      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> > > > > > +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> > > > > > +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> > > > > > +      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
> > > > > >             if (op0 != TREE_OPERAND (x, 0)
> > > > > >     	  || op1 != TREE_OPERAND (x, 1)
> > > > > > @@ -3050,7 +3068,7 @@ cp_fold (tree x)
> > > > > >           /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j),
> > > > > > which,
> > > > > > after
> > > > > >     	 folding, evaluates to an invariant.  In that case no need to
> > > > > > wrap
> > > > > >     	 this folded tree with a SAVE_EXPR.  */
> > > > > > -      r = cp_fold (TREE_OPERAND (x, 0));
> > > > > > +      r = cp_fold (TREE_OPERAND (x, 0), flags);
> > > > > >           if (tree_invariant_p (r))
> > > > > >     	x = r;
> > > > > >           break;
> > > > > > @@ -3069,7 +3087,7 @@ cp_fold (tree x)
> > > > > >           copy_warning (x, org_x);
> > > > > >         }
> > > > > >     -  if (!c.evaluation_restricted_p ())
> > > > > > +  if (cache_p && !c.evaluation_restricted_p ())
> > > > > >         {
> > > > > >           fold_cache->put (org_x, x);
> > > > > >           /* Prevent that we try to fold an already folded result
> > > > > > again.
> > > > > > */
> > > > > > diff --git a/gcc/testsuite/g++.dg/opt/pr108243.C
> > > > > > b/gcc/testsuite/g++.dg/opt/pr108243.C
> > > > > > new file mode 100644
> > > > > > index 00000000000..4c45dbba13c
> > > > > > --- /dev/null
> > > > > > +++ b/gcc/testsuite/g++.dg/opt/pr108243.C
> > > > > > @@ -0,0 +1,29 @@
> > > > > > +// PR c++/108243
> > > > > > +// { dg-do compile { target c++11 } }
> > > > > > +// { dg-additional-options "-O -fdump-tree-original" }
> > > > > > +
> > > > > > +constexpr int foo() {
> > > > > > +  return __builtin_is_constant_evaluated() + 1;
> > > > > > +}
> > > > > > +
> > > > > > +#if __cpp_if_consteval
> > > > > > +constexpr int bar() {
> > > > > > +  if consteval {
> > > > > > +    return 5;
> > > > > > +  } else {
> > > > > > +    return 4;
> > > > > > +  }
> > > > > > +}
> > > > > > +#endif
> > > > > > +
> > > > > > +int p, q;
> > > > > > +
> > > > > > +int main() {
> > > > > > +  p = foo();
> > > > > > +#if __cpp_if_consteval
> > > > > > +  q = bar();
> > > > > > +#endif
> > > > > > +}
> > > > > > +
> > > > > > +// { dg-final { scan-tree-dump-not "= foo" "original" } }
> > > > > > +// { dg-final { scan-tree-dump-not "= bar" "original" } }
> > > > > 
> > > > > Let's also test a static initializer that can't be fully
> > > > > constant-evaluated.
> > > > 
> > > > D'oh, doing so revealed that cp_fold_function doesn't reach static
> > > > initializers; that's taken care of by cp_fully_fold_init.  So it seems
> > > > we need to make cp_fold when called from the latter entry point to also
> > > > assume m_c_e is false.  We can't re-use ff_genericize here because that
> > > > flag has additional effects in cp_fold_r, so it seems we need another
> > > > flag that that only affects the manifestly constant-eval stuff; I called
> > > > it ff_mce_false.  How does the following look?
> > > > 
> > > > -- >8 --
> > > > 
> > > > Subject: [PATCH 2/2] c++: speculative constexpr and
> > > > is_constant_evaluated
> > > >    [PR108243]
> > > > 
> > > > This PR illustrates that __builtin_is_constant_evaluated currently acts
> > > > as an optimization barrier for our speculative constexpr evaluation,
> > > > since we don't want to prematurely fold the builtin to false if the
> > > > expression in question would be later manifestly constant evaluated (in
> > > > which case it must be folded to true).
> > > > 
> > > > This patch fixes this by permitting __builtin_is_constant_evaluated
> > > > to get folded as false during cp_fold_function and cp_fully_fold_init,
> > > > since at these points we're sure we're done with manifestly constant
> > > > evaluation.  To that end we add a flags parameter to cp_fold that
> > > > controls whether we pass mce_false or mce_unknown to
> > > > maybe_constant_value
> > > > when folding a CALL_EXPR.
> > > > 
> > > > 	PR c++/108243
> > > > 	PR c++/97553
> > > > 
> > > > gcc/cp/ChangeLog:
> > > > 
> > > > 	* cp-gimplify.cc (enum fold_flags): Define.
> > > > 	(cp_fold_data::genericize): Replace this data member with ...
> > > > 	(cp_fold_data::fold_flags): ... this.
> > > > 	(cp_fold_r): Adjust use of cp_fold_data and calls to cp_fold.
> > > > 	(cp_fold_function): Likewise.
> > > > 	(cp_fold_maybe_rvalue): Likewise.
> > > > 	(cp_fully_fold_init): Likewise.
> > > > 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
> > > > 	isn't empty.
> > > > 	<case CALL_EXPR>: If ff_genericize is set, fold
> > > > 	__builtin_is_constant_evaluated to false and pass mce_false to
> > > > 	maybe_constant_value.
> > > > 
> > > > gcc/testsuite/ChangeLog:
> > > > 
> > > > 	* g++.dg/opt/is_constant_evaluated1.C: New test.
> > > > 	* g++.dg/opt/is_constant_evaluated2.C: New test.
> > > > ---
> > > >    gcc/cp/cp-gimplify.cc                         | 88
> > > > ++++++++++++-------
> > > >    .../g++.dg/opt/is_constant_evaluated1.C       | 14 +++
> > > >    .../g++.dg/opt/is_constant_evaluated2.C       | 32 +++++++
> > > >    3 files changed, 104 insertions(+), 30 deletions(-)
> > > >    create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> > > >    create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> > > > 
> > > > diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> > > > index 9929d29981a..590ed787997 100644
> > > > --- a/gcc/cp/cp-gimplify.cc
> > > > +++ b/gcc/cp/cp-gimplify.cc
> > > > @@ -43,12 +43,26 @@ along with GCC; see the file COPYING3.  If not see
> > > >    #include "omp-general.h"
> > > >    #include "opts.h"
> > > >    +/* Flags for cp_fold and cp_fold_r.  */
> > > > +
> > > > +enum fold_flags {
> > > > +  ff_none = 0,
> > > > +  /* Whether we're being called from cp_fold_function.  */
> > > > +  ff_genericize = 1 << 0,
> > > > +  /* Whether we're folding late enough that we could assume
> > > > +     we're definitely not in a manifestly constant-evaluated
> > > > +     context.  */
> > > 
> > > It's not necessarily a matter of late enough; we could fold sooner and
> > > still
> > > know that, as in cp_fully_fold_init.  We could do the same at other
> > > full-expression points, but we don't because we want to delay folding as
> > > much
> > > as possible.  So let's say "folding at a point where we know we're..."
> > > 
> > > > +  ff_mce_false = 1 << 1,
> > > > +};
> > > > +
> > > > +using fold_flags_t = int;
> > > > +
> > > >    /* Forward declarations.  */
> > > >      static tree cp_genericize_r (tree *, int *, void *);
> > > >    static tree cp_fold_r (tree *, int *, void *);
> > > >    static void cp_genericize_tree (tree*, bool);
> > > > -static tree cp_fold (tree);
> > > > +static tree cp_fold (tree, fold_flags_t);
> > > >      /* Genericize a TRY_BLOCK.  */
> > > >    @@ -1012,9 +1026,8 @@ struct cp_genericize_data
> > > >    struct cp_fold_data
> > > >    {
> > > >      hash_set<tree> pset;
> > > > -  bool genericize; // called from cp_fold_function?
> > > > -
> > > > -  cp_fold_data (bool g): genericize (g) {}
> > > > +  fold_flags_t flags;
> > > > +  cp_fold_data (fold_flags_t flags): flags (flags) {}
> > > >    };
> > > >      static tree
> > > > @@ -1055,7 +1068,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > > > *data_)
> > > >          break;
> > > >        }
> > > >    -  *stmt_p = stmt = cp_fold (*stmt_p);
> > > > +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
> > > >        if (data->pset.add (stmt))
> > > >        {
> > > > @@ -1135,12 +1148,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
> > > > void
> > > > *data_)
> > > >    	 here rather than in cp_genericize to avoid problems with the
> > > > invisible
> > > >    	 reference transition.  */
> > > >        case INIT_EXPR:
> > > > -      if (data->genericize)
> > > > +      if (data->flags & ff_genericize)
> > > >    	cp_genericize_init_expr (stmt_p);
> > > >          break;
> > > >          case TARGET_EXPR:
> > > > -      if (data->genericize)
> > > > +      if (data->flags & ff_genericize)
> > > >    	cp_genericize_target_expr (stmt_p);
> > > >            /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR;
> > > > in
> > > > @@ -1173,7 +1186,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > > > *data_)
> > > >    void
> > > >    cp_fold_function (tree fndecl)
> > > >    {
> > > > -  cp_fold_data data (/*genericize*/true);
> > > > +  cp_fold_data data (ff_genericize | ff_mce_false);
> > > 
> > > Here would be a good place for a comment about passing mce_false because
> > > all
> > > manifestly-constant-evaluated expressions will have been
> > > constant-evaluated
> > > already if possible.
> > > 
> > > >      cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
> > > >    }
> > > >    @@ -2391,7 +2404,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
> > > >    {
> > > >      while (true)
> > > >        {
> > > > -      x = cp_fold (x);
> > > > +      x = cp_fold (x, ff_none);
> > > >          if (rval)
> > > >    	x = mark_rvalue_use (x);
> > > >          if (rval && DECL_P (x)
> > > > @@ -2450,7 +2463,7 @@ cp_fully_fold_init (tree x)
> > > >      if (processing_template_decl)
> > > >        return x;
> > > >      x = cp_fully_fold (x);
> > > > -  cp_fold_data data (/*genericize*/false);
> > > > +  cp_fold_data data (ff_mce_false);
> > > >      cp_walk_tree (&x, cp_fold_r, &data, NULL);
> > > >      return x;
> > > >    }
> > > > @@ -2485,7 +2498,7 @@ clear_fold_cache (void)
> > > >        Function returns X or its folded variant.  */
> > > >      static tree
> > > > -cp_fold (tree x)
> > > > +cp_fold (tree x, fold_flags_t flags)
> > > >    {
> > > >      tree op0, op1, op2, op3;
> > > >      tree org_x = x, r = NULL_TREE;
> > > > @@ -2506,8 +2519,11 @@ cp_fold (tree x)
> > > >      if (fold_cache == NULL)
> > > >        fold_cache = hash_map<tree, tree>::create_ggc (101);
> > > >    -  if (tree *cached = fold_cache->get (x))
> > > > -    return *cached;
> > > > +  bool cache_p = (flags == ff_none);
> > > > +
> > > > +  if (cache_p)
> > > > +    if (tree *cached = fold_cache->get (x))
> > > > +      return *cached;
> > > >        uid_sensitive_constexpr_evaluation_checker c;
> > > >    @@ -2542,7 +2558,7 @@ cp_fold (tree x)
> > > >    	     Don't create a new tree if op0 != TREE_OPERAND (x, 0),
> > > > the
> > > >    	     folding of the operand should be in the caches and if in
> > > > cp_fold_r
> > > >    	     it will modify it in place.  */
> > > > -	  op0 = cp_fold (TREE_OPERAND (x, 0));
> > > > +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> > > >    	  if (op0 == error_mark_node)
> > > >    	    x = error_mark_node;
> > > >    	  break;
> > > > @@ -2587,7 +2603,7 @@ cp_fold (tree x)
> > > >    	{
> > > >    	  tree p = maybe_undo_parenthesized_ref (x);
> > > >    	  if (p != x)
> > > > -	    return cp_fold (p);
> > > > +	    return cp_fold (p, flags);
> > > >    	}
> > > >          goto unary;
> > > >    @@ -2779,8 +2795,8 @@ cp_fold (tree x)
> > > >        case COND_EXPR:
> > > >          loc = EXPR_LOCATION (x);
> > > >          op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> > > > -      op1 = cp_fold (TREE_OPERAND (x, 1));
> > > > -      op2 = cp_fold (TREE_OPERAND (x, 2));
> > > > +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> > > > +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> > > >            if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
> > > >    	{
> > > > @@ -2870,7 +2886,7 @@ cp_fold (tree x)
> > > >    	      {
> > > >    		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
> > > >    		  r = build_nop (TREE_TYPE (x), r);
> > > > -		x = cp_fold (r);
> > > > +		x = cp_fold (r, flags);
> > > >    		break;
> > > >    	      }
> > > >    	  }
> > > > @@ -2890,8 +2906,12 @@ cp_fold (tree x)
> > > >    	  {
> > > >    	    switch (DECL_FE_FUNCTION_CODE (callee))
> > > >    	      {
> > > > -		/* Defer folding __builtin_is_constant_evaluated.  */
> > > >    	      case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
> > > > +		/* Defer folding __builtin_is_constant_evaluated unless
> > > > +		   we can assume this isn't a manifestly constant-evaluated
> > > 
> > > s/can assume/know/
> > > 
> > > OK with those comment changes.
> > 
> > Thanks a lot.  Unfortunately I think the patch has a significant problem
> > that only just occurred to me -- disabling the cp_fold cache when the
> > flag ff_mce_false is set effectively makes cp_fold_function and
> > cp_fully_fold_init quadratic in the size of the expression (since
> > cp_fold_r calls cp_fold on each subtree, and cp_fold when the cache is
> > disabled will end up fully walking each subtree).  Note that the reason
> > we must disable the cache is because cp_fold with ff_mce_false might
> > give a different folded result than without that flag if the expression
> > contains a suitable CALL_EXPR subexpression.
> 
> Good point.
> 
> > One approach to fix this complexity issue would be to parameterize the
> > cache according to the flags that were passed to cp_fold, which would
> > allow us to keep the cache enabled when ff_mce_false is set.  A downside
> > to this approach is that the size of the cp_fold cache would essentially
> > double since for each tree we'd now have two cache entries, one for
> > flags=ff_none and another for flags=ff_mce_false.
> 
> We could also clear the cache before cp_fold_function since the two folds
> shouldn't overlap (much).

Makes sense, but IIUC we'd also have to clear it before (and after)
cp_fully_fold_init too, which unlike cp_fold_function may get called
in the middle of a function body.

> 
> > Another approach would be to split out the trial constexpr evaluation
> > part of cp_fold's CALL_EXPR handling, parameterize that, and call it
> > directly from cp_fold_r.  With this approach we wouldn't perform as much
> > folding, e.g.
> > 
> >    int n = 41 + !std::is_constant_evaluated();
> > 
> > would get folded to 1 + 41 rather than 42.  But I suspect this would
> > give us 95% of the reapable benefits of the above approach.
> > 
> > I think I'm leaning towards this second approach, which the below patch
> > implements instead.  What do you think?  Bootstrapped and regtested on
> > x86_64-pc-linux-gnu.
> 
> That sounds reasonable, but...
> 
> > -- >8 --
> > 
> > Subject: [PATCH] c++: speculative constexpr and is_constant_evaluated
> >   [PR108243]
> > 
> > This PR illustrates that __builtin_is_constant_evaluated currently acts
> > as an optimization barrier for our speculative constexpr evaluation,
> > since we don't want to prematurely fold the builtin to false before the
> > expression in question undergoes constant evaluation as in a manifestly
> > constant-evaluated context (in which case the builtin must instead be
> > folded to true).
> > 
> > This patch fixes this by permitting __builtin_is_constant_evaluated
> > to get folded to false from cp_fold_r, where we know we're done with
> > proper constant evaluation (of manifestly constant-evaluated contexts).
> > 
> > 	PR c++/108243
> > 	PR c++/97553
> > 
> > gcc/cp/ChangeLog:
> > 
> > 	* cp-gimplify.cc
> > 	(cp_fold_r): Remove redundant *stmt_p assignments.  After
> > 	calling cp_fold, call maybe_fold_constexpr_call with mce_false.
> > 	(cp_fold) <case CALL_EXPR>: Split out trial constexpr evaluation
> > 	into ...
> > 	(maybe_fold_constexpr_call): ... here.
> > 
> > gcc/testsuite/ChangeLog:
> > 
> > 	* g++.dg/opt/is_constant_evaluated1.C: New test.
> > 	* g++.dg/opt/is_constant_evaluated2.C: New test.
> > ---
> >   gcc/cp/cp-gimplify.cc                         | 55 +++++++++++++++----
> >   .../g++.dg/opt/is_constant_evaluated1.C       | 20 +++++++
> >   .../g++.dg/opt/is_constant_evaluated2.C       | 32 +++++++++++
> >   3 files changed, 95 insertions(+), 12 deletions(-)
> >   create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> >   create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> > 
> > diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> > index 9929d29981a..dca55056b2c 100644
> > --- a/gcc/cp/cp-gimplify.cc
> > +++ b/gcc/cp/cp-gimplify.cc
> > @@ -49,6 +49,7 @@ static tree cp_genericize_r (tree *, int *, void *);
> >   static tree cp_fold_r (tree *, int *, void *);
> >   static void cp_genericize_tree (tree*, bool);
> >   static tree cp_fold (tree);
> > +static tree maybe_fold_constexpr_call (tree, mce_value);
> >     /* Genericize a TRY_BLOCK.  */
> >   @@ -1034,7 +1035,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > *data_)
> >   	    error_at (PTRMEM_CST_LOCATION (stmt),
> >   		      "taking address of an immediate function %qD",
> >   		      PTRMEM_CST_MEMBER (stmt));
> > -	  stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
> > +	  stmt = build_zero_cst (TREE_TYPE (stmt));
> >   	  break;
> >   	}
> >         break;
> > @@ -1046,7 +1047,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > *data_)
> >   	  error_at (EXPR_LOCATION (stmt),
> >   		    "taking address of an immediate function %qD",
> >   		    TREE_OPERAND (stmt, 0));
> > -	  stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
> > +	  stmt = build_zero_cst (TREE_TYPE (stmt));
> >   	  break;
> >   	}
> >         break;
> > @@ -1055,7 +1056,17 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > *data_)
> >         break;
> >       }
> >   -  *stmt_p = stmt = cp_fold (*stmt_p);
> > +  stmt = cp_fold (stmt);
> > +
> > +  if (TREE_CODE (stmt) == CALL_EXPR)
> > +    /* Since cp_fold_r is called (from cp_genericize, cp_fold_function
> > +       and cp_fully_fold_init) only after the overall expression has been
> > +       considered for constant-evaluation, we can by now safely fold any
> > +       remaining __builtin_is_constant_evaluated calls to false, so try
> > +       constexpr expansion with mce_false.  */
> > +    stmt = maybe_fold_constexpr_call (stmt, mce_false);
> > +
> > +  *stmt_p = stmt;
> >       if (data->pset.add (stmt))
> >       {
> > @@ -2952,15 +2963,10 @@ cp_fold (tree x)
> >   	  }
> >     	optimize = nw;
> > -
> > -	/* Invoke maybe_constant_value for functions declared
> > -	   constexpr and not called with AGGR_INIT_EXPRs.
> > -	   TODO:
> > -	   Do constexpr expansion of expressions where the call itself is not
> > -	   constant, but the call followed by an INDIRECT_REF is.  */
> > -	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
> > -	    && !flag_no_inline)
> > -	  r = maybe_constant_value (x);
> > +	/* Pass mce_unknown to defer folding __builtin_is_constant_evaluated
> > +	   since we don't know if we're in a manifestly constant-evaluated
> > +	   context that hasn't yet been constant-evaluated.  */
> > +	r = maybe_fold_constexpr_call (x, mce_unknown);
> 
> It seems unfortunate to try to fold both here and in cp_fold_r.

Yes, though I'm afraid some duplication of work is pretty much
unavoidable.  Even if in cp_fold_r we did something like

  if (TREE_CODE (stmt) == CALL_EXPR)
    /* cp_fold_call_expr is the entire CALL_EXPR case of cp_fold.  */
    stmt = cp_fold_call_expr (stmt, mce_false);
  else
    stmt = cp_fold (stmt);

instead of

  stmt = cp_fold (stmt);
  if (TREE_CODE (stmt) == CALL_EXPR)
    stmt = maybe_fold_constexpr_call (stmt, mce_false);

we would still end up doing cp_fold on the CALL_EXPR if it's a
subexpression of some larger expression (since cp_fold is recursive).

And even if we went with the original approach of parameterizing cp_fold
according to manifestly_const_eval totally, we would still end up trying
to fold some constexpr calls twice, first with flags=ff_none (during
some on-the-spot folding) and again with flags=ff_mce_false (during
cp_fold_function), I think.

> 
> Does this patch still fold __builtin_is_constant_evaluated() even though it no
> longer touches the cp_fold builtin handling?

Indeed it doesn't, instead __builtin_is_constant_evaluated() will
continue to get folded during gimplification.  I thought folding it
might not be benefical with this approach, but on second thought we
should do it for consistency at least.  The following incremental
patch seems to work:

diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
index dca55056b2c..250147bde0f 100644
--- a/gcc/cp/cp-gimplify.cc
+++ b/gcc/cp/cp-gimplify.cc
@@ -3124,6 +3124,14 @@ maybe_fold_constexpr_call (tree x, mce_value manifestly_const_eval)
       if (TREE_CODE (r) != CALL_EXPR)
 	return r;
     }
+  if (fndecl_built_in_p (callee, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
+			 BUILT_IN_FRONTEND))
+    {
+      if (manifestly_const_eval == mce_true)
+	return boolean_true_node;
+      else if (manifestly_const_eval == mce_false)
+	return boolean_false_node;
+    }
   return x;
 }
 

> 
> >   	optimize = sv;
> >             if (TREE_CODE (r) != CALL_EXPR)
> > @@ -3096,6 +3102,31 @@ cp_fold (tree x)
> >     return x;
> >   }
> >   +/* If the CALL_EXPR X calls a constexpr function, try expanding it via
> > +   constexpr evaluation.  Returns the expanded result or X if constexpr
> > +   evaluation wasn't possible.
> > +
> > +   TODO: Do constexpr expansion of expressions where the call itself
> > +   is not constant, but the call followed by an INDIRECT_REF is.  */
> > +
> > +static tree
> > +maybe_fold_constexpr_call (tree x, mce_value manifestly_const_eval)
> > +{
> > +  if (flag_no_inline)
> > +    return x;
> > +  tree callee = get_callee_fndecl (x);
> > +  if (!callee)
> > +    return x;
> > +  if (DECL_DECLARED_CONSTEXPR_P (callee))
> > +    {
> > +      tree r = maybe_constant_value (x, /*decl=*/NULL_TREE,
> > +				     manifestly_const_eval);
> > +      if (TREE_CODE (r) != CALL_EXPR)
> > +	return r;
> > +    }
> > +  return x;
> > +}
> > +
> >   /* Look up "hot", "cold", "likely" or "unlikely" in attribute list LIST.
> > */
> >     tree
> > diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> > b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> > new file mode 100644
> > index 00000000000..2123f20e3e5
> > --- /dev/null
> > +++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> > @@ -0,0 +1,20 @@
> > +// PR c++/108243
> > +// { dg-do compile { target c++11 } }
> > +// { dg-additional-options "-O -fdump-tree-original" }
> > +
> > +struct A {
> > +  constexpr A(int n, int m) : n(n), m(m) { }
> > +  int n, m;
> > +};
> > +
> > +constexpr int foo(int n) {
> > +  return n + !__builtin_is_constant_evaluated();
> > +}
> > +
> > +A* f(int n) {
> > +  static A a = {n, foo(41)};
> > +  return &a;
> > +}
> > +
> > +// { dg-final { scan-tree-dump "42" "original" } }
> > +// { dg-final { scan-tree-dump-not "foo \\(41\\)" "original" } }
> > diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> > b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> > new file mode 100644
> > index 00000000000..ed964e20a7a
> > --- /dev/null
> > +++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> > @@ -0,0 +1,32 @@
> > +// PR c++/97553
> > +// { dg-do compile { target c++11 } }
> > +// { dg-additional-options "-O -fdump-tree-original" }
> > +
> > +constexpr int foo() {
> > +  return __builtin_is_constant_evaluated() + 1;
> > +}
> > +
> > +#if __cpp_if_consteval
> > +constexpr int bar() {
> > +  if consteval {
> > +    return 5;
> > +  } else {
> > +    return 4;
> > +  }
> > +}
> > +#endif
> > +
> > +int p, q;
> > +
> > +int main() {
> > +  p = foo();
> > +#if __cpp_if_consteval
> > +  q = bar();
> > +#endif
> > +}
> > +
> > +// { dg-final { scan-tree-dump "p = 1" "original" } }
> > +// { dg-final { scan-tree-dump-not "= foo" "original" } }
> > +
> > +// { dg-final { scan-tree-dump "q = 4" "original" { target c++23 } } }
> > +// { dg-final { scan-tree-dump-not "= bar" "original" { target c++23 } } }
> 
> 


^ permalink raw reply	[flat|nested] 15+ messages in thread

* Re: [PATCH 2/2] c++: speculative constexpr and is_constant_evaluated [PR108243]
  2023-02-10  1:32             ` Patrick Palka
@ 2023-02-10 14:48               ` Patrick Palka
  2023-02-10 16:51                 ` Patrick Palka
  0 siblings, 1 reply; 15+ messages in thread
From: Patrick Palka @ 2023-02-10 14:48 UTC (permalink / raw)
  To: Patrick Palka; +Cc: Jason Merrill, gcc-patches

On Thu, 9 Feb 2023, Patrick Palka wrote:

> On Thu, 9 Feb 2023, Jason Merrill wrote:
> 
> > On 2/9/23 09:36, Patrick Palka wrote:
> > > On Sun, 5 Feb 2023, Jason Merrill wrote:
> > > 
> > > > On 2/3/23 15:51, Patrick Palka wrote:
> > > > > On Mon, 30 Jan 2023, Jason Merrill wrote:
> > > > > 
> > > > > > On 1/27/23 17:02, Patrick Palka wrote:
> > > > > > > This PR illustrates that __builtin_is_constant_evaluated currently
> > > > > > > acts
> > > > > > > as an optimization barrier for our speculative constexpr evaluation,
> > > > > > > since we don't want to prematurely fold the builtin to false if the
> > > > > > > expression in question would be later manifestly constant evaluated
> > > > > > > (in
> > > > > > > which case it must be folded to true).
> > > > > > > 
> > > > > > > This patch fixes this by permitting __builtin_is_constant_evaluated
> > > > > > > to get folded as false during cp_fold_function, since at that point
> > > > > > > we're sure we're doing manifestly constant evaluation.  To that end
> > > > > > > we add a flags parameter to cp_fold that controls what mce_value the
> > > > > > > CALL_EXPR case passes to maybe_constant_value.
> > > > > > > 
> > > > > > > bootstrapped and rgetsted no x86_64-pc-linux-gnu, does this look OK
> > > > > > > for
> > > > > > > trunk?
> > > > > > > 
> > > > > > > 	PR c++/108243
> > > > > > > 
> > > > > > > gcc/cp/ChangeLog:
> > > > > > > 
> > > > > > > 	* cp-gimplify.cc (enum fold_flags): Define.
> > > > > > > 	(cp_fold_data::genericize): Replace this data member with ...
> > > > > > > 	(cp_fold_data::fold_flags): ... this.
> > > > > > > 	(cp_fold_r): Adjust cp_fold_data use and cp_fold_calls.
> > > > > > > 	(cp_fold_function): Likewise.
> > > > > > > 	(cp_fold_maybe_rvalue): Likewise.
> > > > > > > 	(cp_fully_fold_init): Likewise.
> > > > > > > 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
> > > > > > > 	isn't empty.
> > > > > > > 	<case CALL_EXPR>: Pass mce_false to maybe_constant_value
> > > > > > > 	if if ff_genericize is set.
> > > > > > > 
> > > > > > > gcc/testsuite/ChangeLog:
> > > > > > > 
> > > > > > > 	* g++.dg/opt/pr108243.C: New test.
> > > > > > > ---
> > > > > > >     gcc/cp/cp-gimplify.cc               | 76
> > > > > > > ++++++++++++++++++-----------
> > > > > > >     gcc/testsuite/g++.dg/opt/pr108243.C | 29 +++++++++++
> > > > > > >     2 files changed, 76 insertions(+), 29 deletions(-)
> > > > > > >     create mode 100644 gcc/testsuite/g++.dg/opt/pr108243.C
> > > > > > > 
> > > > > > > diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> > > > > > > index a35cedd05cc..d023a63768f 100644
> > > > > > > --- a/gcc/cp/cp-gimplify.cc
> > > > > > > +++ b/gcc/cp/cp-gimplify.cc
> > > > > > > @@ -43,12 +43,20 @@ along with GCC; see the file COPYING3.  If not
> > > > > > > see
> > > > > > >     #include "omp-general.h"
> > > > > > >     #include "opts.h"
> > > > > > >     +/* Flags for cp_fold and cp_fold_r.  */
> > > > > > > +
> > > > > > > +enum fold_flags {
> > > > > > > +  ff_none = 0,
> > > > > > > +  /* Whether we're being called from cp_fold_function.  */
> > > > > > > +  ff_genericize = 1 << 0,
> > > > > > > +};
> > > > > > > +
> > > > > > >     /* Forward declarations.  */
> > > > > > >       static tree cp_genericize_r (tree *, int *, void *);
> > > > > > >     static tree cp_fold_r (tree *, int *, void *);
> > > > > > >     static void cp_genericize_tree (tree*, bool);
> > > > > > > -static tree cp_fold (tree);
> > > > > > > +static tree cp_fold (tree, fold_flags);
> > > > > > >       /* Genericize a TRY_BLOCK.  */
> > > > > > >     @@ -996,9 +1004,8 @@ struct cp_genericize_data
> > > > > > >     struct cp_fold_data
> > > > > > >     {
> > > > > > >       hash_set<tree> pset;
> > > > > > > -  bool genericize; // called from cp_fold_function?
> > > > > > > -
> > > > > > > -  cp_fold_data (bool g): genericize (g) {}
> > > > > > > +  fold_flags flags;
> > > > > > > +  cp_fold_data (fold_flags flags): flags (flags) {}
> > > > > > >     };
> > > > > > >       static tree
> > > > > > > @@ -1039,7 +1046,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
> > > > > > > void
> > > > > > > *data_)
> > > > > > >           break;
> > > > > > >         }
> > > > > > >     -  *stmt_p = stmt = cp_fold (*stmt_p);
> > > > > > > +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
> > > > > > >         if (data->pset.add (stmt))
> > > > > > >         {
> > > > > > > @@ -1119,12 +1126,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
> > > > > > > void
> > > > > > > *data_)
> > > > > > >     	 here rather than in cp_genericize to avoid problems with the
> > > > > > > invisible
> > > > > > >     	 reference transition.  */
> > > > > > >         case INIT_EXPR:
> > > > > > > -      if (data->genericize)
> > > > > > > +      if (data->flags & ff_genericize)
> > > > > > >     	cp_genericize_init_expr (stmt_p);
> > > > > > >           break;
> > > > > > >           case TARGET_EXPR:
> > > > > > > -      if (data->genericize)
> > > > > > > +      if (data->flags & ff_genericize)
> > > > > > >     	cp_genericize_target_expr (stmt_p);
> > > > > > >             /* Folding might replace e.g. a COND_EXPR with a
> > > > > > > TARGET_EXPR;
> > > > > > > in
> > > > > > > @@ -1157,7 +1164,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
> > > > > > > void
> > > > > > > *data_)
> > > > > > >     void
> > > > > > >     cp_fold_function (tree fndecl)
> > > > > > >     {
> > > > > > > -  cp_fold_data data (/*genericize*/true);
> > > > > > > +  cp_fold_data data (ff_genericize);
> > > > > > >       cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data,
> > > > > > > NULL);
> > > > > > >     }
> > > > > > >     @@ -2375,7 +2382,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
> > > > > > >     {
> > > > > > >       while (true)
> > > > > > >         {
> > > > > > > -      x = cp_fold (x);
> > > > > > > +      x = cp_fold (x, ff_none);
> > > > > > >           if (rval)
> > > > > > >     	x = mark_rvalue_use (x);
> > > > > > >           if (rval && DECL_P (x)
> > > > > > > @@ -2434,7 +2441,7 @@ cp_fully_fold_init (tree x)
> > > > > > >       if (processing_template_decl)
> > > > > > >         return x;
> > > > > > >       x = cp_fully_fold (x);
> > > > > > > -  cp_fold_data data (/*genericize*/false);
> > > > > > > +  cp_fold_data data (ff_none);
> > > > > > >       cp_walk_tree (&x, cp_fold_r, &data, NULL);
> > > > > > >       return x;
> > > > > > >     }
> > > > > > > @@ -2469,7 +2476,7 @@ clear_fold_cache (void)
> > > > > > >         Function returns X or its folded variant.  */
> > > > > > >       static tree
> > > > > > > -cp_fold (tree x)
> > > > > > > +cp_fold (tree x, fold_flags flags)
> > > > > > >     {
> > > > > > >       tree op0, op1, op2, op3;
> > > > > > >       tree org_x = x, r = NULL_TREE;
> > > > > > > @@ -2490,8 +2497,11 @@ cp_fold (tree x)
> > > > > > >       if (fold_cache == NULL)
> > > > > > >         fold_cache = hash_map<tree, tree>::create_ggc (101);
> > > > > > >     -  if (tree *cached = fold_cache->get (x))
> > > > > > > -    return *cached;
> > > > > > > +  bool cache_p = (flags == ff_none);
> > > > > > > +
> > > > > > > +  if (cache_p)
> > > > > > > +    if (tree *cached = fold_cache->get (x))
> > > > > > > +      return *cached;
> > > > > > >         uid_sensitive_constexpr_evaluation_checker c;
> > > > > > >     @@ -2526,7 +2536,7 @@ cp_fold (tree x)
> > > > > > >     	     Don't create a new tree if op0 != TREE_OPERAND (x, 0),
> > > > > > > the
> > > > > > >     	     folding of the operand should be in the caches and if in
> > > > > > > cp_fold_r
> > > > > > >     	     it will modify it in place.  */
> > > > > > > -	  op0 = cp_fold (TREE_OPERAND (x, 0));
> > > > > > > +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> > > > > > >     	  if (op0 == error_mark_node)
> > > > > > >     	    x = error_mark_node;
> > > > > > >     	  break;
> > > > > > > @@ -2571,7 +2581,7 @@ cp_fold (tree x)
> > > > > > >     	{
> > > > > > >     	  tree p = maybe_undo_parenthesized_ref (x);
> > > > > > >     	  if (p != x)
> > > > > > > -	    return cp_fold (p);
> > > > > > > +	    return cp_fold (p, flags);
> > > > > > >     	}
> > > > > > >           goto unary;
> > > > > > >     @@ -2763,8 +2773,8 @@ cp_fold (tree x)
> > > > > > >         case COND_EXPR:
> > > > > > >           loc = EXPR_LOCATION (x);
> > > > > > >           op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> > > > > > > -      op1 = cp_fold (TREE_OPERAND (x, 1));
> > > > > > > -      op2 = cp_fold (TREE_OPERAND (x, 2));
> > > > > > > +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> > > > > > > +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> > > > > > >             if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
> > > > > > >     	{
> > > > > > > @@ -2854,7 +2864,7 @@ cp_fold (tree x)
> > > > > > >     	      {
> > > > > > >     		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
> > > > > > >     		  r = build_nop (TREE_TYPE (x), r);
> > > > > > > -		x = cp_fold (r);
> > > > > > > +		x = cp_fold (r, flags);
> > > > > > >     		break;
> > > > > > >     	      }
> > > > > > >     	  }
> > > > > > > @@ -2908,7 +2918,7 @@ cp_fold (tree x)
> > > > > > >     	int m = call_expr_nargs (x);
> > > > > > >     	for (int i = 0; i < m; i++)
> > > > > > >     	  {
> > > > > > > -	    r = cp_fold (CALL_EXPR_ARG (x, i));
> > > > > > > +	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
> > > > > > >     	    if (r != CALL_EXPR_ARG (x, i))
> > > > > > >     	      {
> > > > > > >     		if (r == error_mark_node)
> > > > > > > @@ -2931,7 +2941,7 @@ cp_fold (tree x)
> > > > > > >       	if (TREE_CODE (r) != CALL_EXPR)
> > > > > > >     	  {
> > > > > > > -	    x = cp_fold (r);
> > > > > > > +	    x = cp_fold (r, flags);
> > > > > > >     	    break;
> > > > > > >     	  }
> > > > > > >     @@ -2944,7 +2954,15 @@ cp_fold (tree x)
> > > > > > >     	   constant, but the call followed by an INDIRECT_REF is.  */
> > > > > > >     	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
> > > > > > >     	    && !flag_no_inline)
> > > > > > > -	  r = maybe_constant_value (x);
> > > > > > > +	  {
> > > > > > > +	    mce_value manifestly_const_eval = mce_unknown;
> > > > > > > +	    if (flags & ff_genericize)
> > > > > > > +	      /* At genericization time it's safe to fold
> > > > > > > +		 __builtin_is_constant_evaluated to false.  */
> > > > > > > +	      manifestly_const_eval = mce_false;
> > > > > > > +	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
> > > > > > > +				      manifestly_const_eval);
> > > > > > > +	  }
> > > > > > >     	optimize = sv;
> > > > > > >               if (TREE_CODE (r) != CALL_EXPR)
> > > > > > > @@ -2971,7 +2989,7 @@ cp_fold (tree x)
> > > > > > >     	vec<constructor_elt, va_gc> *nelts = NULL;
> > > > > > >     	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
> > > > > > >     	  {
> > > > > > > -	    tree op = cp_fold (p->value);
> > > > > > > +	    tree op = cp_fold (p->value, flags);
> > > > > > >     	    if (op != p->value)
> > > > > > >     	      {
> > > > > > >     		if (op == error_mark_node)
> > > > > > > @@ -3002,7 +3020,7 @@ cp_fold (tree x)
> > > > > > >       	for (int i = 0; i < n; i++)
> > > > > > >     	  {
> > > > > > > -	    tree op = cp_fold (TREE_VEC_ELT (x, i));
> > > > > > > +	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
> > > > > > >     	    if (op != TREE_VEC_ELT (x, i))
> > > > > > >     	      {
> > > > > > >     		if (!changed)
> > > > > > > @@ -3019,10 +3037,10 @@ cp_fold (tree x)
> > > > > > >         case ARRAY_RANGE_REF:
> > > > > > >             loc = EXPR_LOCATION (x);
> > > > > > > -      op0 = cp_fold (TREE_OPERAND (x, 0));
> > > > > > > -      op1 = cp_fold (TREE_OPERAND (x, 1));
> > > > > > > -      op2 = cp_fold (TREE_OPERAND (x, 2));
> > > > > > > -      op3 = cp_fold (TREE_OPERAND (x, 3));
> > > > > > > +      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> > > > > > > +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> > > > > > > +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> > > > > > > +      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
> > > > > > >             if (op0 != TREE_OPERAND (x, 0)
> > > > > > >     	  || op1 != TREE_OPERAND (x, 1)
> > > > > > > @@ -3050,7 +3068,7 @@ cp_fold (tree x)
> > > > > > >           /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j),
> > > > > > > which,
> > > > > > > after
> > > > > > >     	 folding, evaluates to an invariant.  In that case no need to
> > > > > > > wrap
> > > > > > >     	 this folded tree with a SAVE_EXPR.  */
> > > > > > > -      r = cp_fold (TREE_OPERAND (x, 0));
> > > > > > > +      r = cp_fold (TREE_OPERAND (x, 0), flags);
> > > > > > >           if (tree_invariant_p (r))
> > > > > > >     	x = r;
> > > > > > >           break;
> > > > > > > @@ -3069,7 +3087,7 @@ cp_fold (tree x)
> > > > > > >           copy_warning (x, org_x);
> > > > > > >         }
> > > > > > >     -  if (!c.evaluation_restricted_p ())
> > > > > > > +  if (cache_p && !c.evaluation_restricted_p ())
> > > > > > >         {
> > > > > > >           fold_cache->put (org_x, x);
> > > > > > >           /* Prevent that we try to fold an already folded result
> > > > > > > again.
> > > > > > > */
> > > > > > > diff --git a/gcc/testsuite/g++.dg/opt/pr108243.C
> > > > > > > b/gcc/testsuite/g++.dg/opt/pr108243.C
> > > > > > > new file mode 100644
> > > > > > > index 00000000000..4c45dbba13c
> > > > > > > --- /dev/null
> > > > > > > +++ b/gcc/testsuite/g++.dg/opt/pr108243.C
> > > > > > > @@ -0,0 +1,29 @@
> > > > > > > +// PR c++/108243
> > > > > > > +// { dg-do compile { target c++11 } }
> > > > > > > +// { dg-additional-options "-O -fdump-tree-original" }
> > > > > > > +
> > > > > > > +constexpr int foo() {
> > > > > > > +  return __builtin_is_constant_evaluated() + 1;
> > > > > > > +}
> > > > > > > +
> > > > > > > +#if __cpp_if_consteval
> > > > > > > +constexpr int bar() {
> > > > > > > +  if consteval {
> > > > > > > +    return 5;
> > > > > > > +  } else {
> > > > > > > +    return 4;
> > > > > > > +  }
> > > > > > > +}
> > > > > > > +#endif
> > > > > > > +
> > > > > > > +int p, q;
> > > > > > > +
> > > > > > > +int main() {
> > > > > > > +  p = foo();
> > > > > > > +#if __cpp_if_consteval
> > > > > > > +  q = bar();
> > > > > > > +#endif
> > > > > > > +}
> > > > > > > +
> > > > > > > +// { dg-final { scan-tree-dump-not "= foo" "original" } }
> > > > > > > +// { dg-final { scan-tree-dump-not "= bar" "original" } }
> > > > > > 
> > > > > > Let's also test a static initializer that can't be fully
> > > > > > constant-evaluated.
> > > > > 
> > > > > D'oh, doing so revealed that cp_fold_function doesn't reach static
> > > > > initializers; that's taken care of by cp_fully_fold_init.  So it seems
> > > > > we need to make cp_fold when called from the latter entry point to also
> > > > > assume m_c_e is false.  We can't re-use ff_genericize here because that
> > > > > flag has additional effects in cp_fold_r, so it seems we need another
> > > > > flag that that only affects the manifestly constant-eval stuff; I called
> > > > > it ff_mce_false.  How does the following look?
> > > > > 
> > > > > -- >8 --
> > > > > 
> > > > > Subject: [PATCH 2/2] c++: speculative constexpr and
> > > > > is_constant_evaluated
> > > > >    [PR108243]
> > > > > 
> > > > > This PR illustrates that __builtin_is_constant_evaluated currently acts
> > > > > as an optimization barrier for our speculative constexpr evaluation,
> > > > > since we don't want to prematurely fold the builtin to false if the
> > > > > expression in question would be later manifestly constant evaluated (in
> > > > > which case it must be folded to true).
> > > > > 
> > > > > This patch fixes this by permitting __builtin_is_constant_evaluated
> > > > > to get folded as false during cp_fold_function and cp_fully_fold_init,
> > > > > since at these points we're sure we're done with manifestly constant
> > > > > evaluation.  To that end we add a flags parameter to cp_fold that
> > > > > controls whether we pass mce_false or mce_unknown to
> > > > > maybe_constant_value
> > > > > when folding a CALL_EXPR.
> > > > > 
> > > > > 	PR c++/108243
> > > > > 	PR c++/97553
> > > > > 
> > > > > gcc/cp/ChangeLog:
> > > > > 
> > > > > 	* cp-gimplify.cc (enum fold_flags): Define.
> > > > > 	(cp_fold_data::genericize): Replace this data member with ...
> > > > > 	(cp_fold_data::fold_flags): ... this.
> > > > > 	(cp_fold_r): Adjust use of cp_fold_data and calls to cp_fold.
> > > > > 	(cp_fold_function): Likewise.
> > > > > 	(cp_fold_maybe_rvalue): Likewise.
> > > > > 	(cp_fully_fold_init): Likewise.
> > > > > 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
> > > > > 	isn't empty.
> > > > > 	<case CALL_EXPR>: If ff_genericize is set, fold
> > > > > 	__builtin_is_constant_evaluated to false and pass mce_false to
> > > > > 	maybe_constant_value.
> > > > > 
> > > > > gcc/testsuite/ChangeLog:
> > > > > 
> > > > > 	* g++.dg/opt/is_constant_evaluated1.C: New test.
> > > > > 	* g++.dg/opt/is_constant_evaluated2.C: New test.
> > > > > ---
> > > > >    gcc/cp/cp-gimplify.cc                         | 88
> > > > > ++++++++++++-------
> > > > >    .../g++.dg/opt/is_constant_evaluated1.C       | 14 +++
> > > > >    .../g++.dg/opt/is_constant_evaluated2.C       | 32 +++++++
> > > > >    3 files changed, 104 insertions(+), 30 deletions(-)
> > > > >    create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> > > > >    create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> > > > > 
> > > > > diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> > > > > index 9929d29981a..590ed787997 100644
> > > > > --- a/gcc/cp/cp-gimplify.cc
> > > > > +++ b/gcc/cp/cp-gimplify.cc
> > > > > @@ -43,12 +43,26 @@ along with GCC; see the file COPYING3.  If not see
> > > > >    #include "omp-general.h"
> > > > >    #include "opts.h"
> > > > >    +/* Flags for cp_fold and cp_fold_r.  */
> > > > > +
> > > > > +enum fold_flags {
> > > > > +  ff_none = 0,
> > > > > +  /* Whether we're being called from cp_fold_function.  */
> > > > > +  ff_genericize = 1 << 0,
> > > > > +  /* Whether we're folding late enough that we could assume
> > > > > +     we're definitely not in a manifestly constant-evaluated
> > > > > +     context.  */
> > > > 
> > > > It's not necessarily a matter of late enough; we could fold sooner and
> > > > still
> > > > know that, as in cp_fully_fold_init.  We could do the same at other
> > > > full-expression points, but we don't because we want to delay folding as
> > > > much
> > > > as possible.  So let's say "folding at a point where we know we're..."
> > > > 
> > > > > +  ff_mce_false = 1 << 1,
> > > > > +};
> > > > > +
> > > > > +using fold_flags_t = int;
> > > > > +
> > > > >    /* Forward declarations.  */
> > > > >      static tree cp_genericize_r (tree *, int *, void *);
> > > > >    static tree cp_fold_r (tree *, int *, void *);
> > > > >    static void cp_genericize_tree (tree*, bool);
> > > > > -static tree cp_fold (tree);
> > > > > +static tree cp_fold (tree, fold_flags_t);
> > > > >      /* Genericize a TRY_BLOCK.  */
> > > > >    @@ -1012,9 +1026,8 @@ struct cp_genericize_data
> > > > >    struct cp_fold_data
> > > > >    {
> > > > >      hash_set<tree> pset;
> > > > > -  bool genericize; // called from cp_fold_function?
> > > > > -
> > > > > -  cp_fold_data (bool g): genericize (g) {}
> > > > > +  fold_flags_t flags;
> > > > > +  cp_fold_data (fold_flags_t flags): flags (flags) {}
> > > > >    };
> > > > >      static tree
> > > > > @@ -1055,7 +1068,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > > > > *data_)
> > > > >          break;
> > > > >        }
> > > > >    -  *stmt_p = stmt = cp_fold (*stmt_p);
> > > > > +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
> > > > >        if (data->pset.add (stmt))
> > > > >        {
> > > > > @@ -1135,12 +1148,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
> > > > > void
> > > > > *data_)
> > > > >    	 here rather than in cp_genericize to avoid problems with the
> > > > > invisible
> > > > >    	 reference transition.  */
> > > > >        case INIT_EXPR:
> > > > > -      if (data->genericize)
> > > > > +      if (data->flags & ff_genericize)
> > > > >    	cp_genericize_init_expr (stmt_p);
> > > > >          break;
> > > > >          case TARGET_EXPR:
> > > > > -      if (data->genericize)
> > > > > +      if (data->flags & ff_genericize)
> > > > >    	cp_genericize_target_expr (stmt_p);
> > > > >            /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR;
> > > > > in
> > > > > @@ -1173,7 +1186,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > > > > *data_)
> > > > >    void
> > > > >    cp_fold_function (tree fndecl)
> > > > >    {
> > > > > -  cp_fold_data data (/*genericize*/true);
> > > > > +  cp_fold_data data (ff_genericize | ff_mce_false);
> > > > 
> > > > Here would be a good place for a comment about passing mce_false because
> > > > all
> > > > manifestly-constant-evaluated expressions will have been
> > > > constant-evaluated
> > > > already if possible.
> > > > 
> > > > >      cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
> > > > >    }
> > > > >    @@ -2391,7 +2404,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
> > > > >    {
> > > > >      while (true)
> > > > >        {
> > > > > -      x = cp_fold (x);
> > > > > +      x = cp_fold (x, ff_none);
> > > > >          if (rval)
> > > > >    	x = mark_rvalue_use (x);
> > > > >          if (rval && DECL_P (x)
> > > > > @@ -2450,7 +2463,7 @@ cp_fully_fold_init (tree x)
> > > > >      if (processing_template_decl)
> > > > >        return x;
> > > > >      x = cp_fully_fold (x);
> > > > > -  cp_fold_data data (/*genericize*/false);
> > > > > +  cp_fold_data data (ff_mce_false);
> > > > >      cp_walk_tree (&x, cp_fold_r, &data, NULL);
> > > > >      return x;
> > > > >    }
> > > > > @@ -2485,7 +2498,7 @@ clear_fold_cache (void)
> > > > >        Function returns X or its folded variant.  */
> > > > >      static tree
> > > > > -cp_fold (tree x)
> > > > > +cp_fold (tree x, fold_flags_t flags)
> > > > >    {
> > > > >      tree op0, op1, op2, op3;
> > > > >      tree org_x = x, r = NULL_TREE;
> > > > > @@ -2506,8 +2519,11 @@ cp_fold (tree x)
> > > > >      if (fold_cache == NULL)
> > > > >        fold_cache = hash_map<tree, tree>::create_ggc (101);
> > > > >    -  if (tree *cached = fold_cache->get (x))
> > > > > -    return *cached;
> > > > > +  bool cache_p = (flags == ff_none);
> > > > > +
> > > > > +  if (cache_p)
> > > > > +    if (tree *cached = fold_cache->get (x))
> > > > > +      return *cached;
> > > > >        uid_sensitive_constexpr_evaluation_checker c;
> > > > >    @@ -2542,7 +2558,7 @@ cp_fold (tree x)
> > > > >    	     Don't create a new tree if op0 != TREE_OPERAND (x, 0),
> > > > > the
> > > > >    	     folding of the operand should be in the caches and if in
> > > > > cp_fold_r
> > > > >    	     it will modify it in place.  */
> > > > > -	  op0 = cp_fold (TREE_OPERAND (x, 0));
> > > > > +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> > > > >    	  if (op0 == error_mark_node)
> > > > >    	    x = error_mark_node;
> > > > >    	  break;
> > > > > @@ -2587,7 +2603,7 @@ cp_fold (tree x)
> > > > >    	{
> > > > >    	  tree p = maybe_undo_parenthesized_ref (x);
> > > > >    	  if (p != x)
> > > > > -	    return cp_fold (p);
> > > > > +	    return cp_fold (p, flags);
> > > > >    	}
> > > > >          goto unary;
> > > > >    @@ -2779,8 +2795,8 @@ cp_fold (tree x)
> > > > >        case COND_EXPR:
> > > > >          loc = EXPR_LOCATION (x);
> > > > >          op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> > > > > -      op1 = cp_fold (TREE_OPERAND (x, 1));
> > > > > -      op2 = cp_fold (TREE_OPERAND (x, 2));
> > > > > +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> > > > > +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> > > > >            if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
> > > > >    	{
> > > > > @@ -2870,7 +2886,7 @@ cp_fold (tree x)
> > > > >    	      {
> > > > >    		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
> > > > >    		  r = build_nop (TREE_TYPE (x), r);
> > > > > -		x = cp_fold (r);
> > > > > +		x = cp_fold (r, flags);
> > > > >    		break;
> > > > >    	      }
> > > > >    	  }
> > > > > @@ -2890,8 +2906,12 @@ cp_fold (tree x)
> > > > >    	  {
> > > > >    	    switch (DECL_FE_FUNCTION_CODE (callee))
> > > > >    	      {
> > > > > -		/* Defer folding __builtin_is_constant_evaluated.  */
> > > > >    	      case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
> > > > > +		/* Defer folding __builtin_is_constant_evaluated unless
> > > > > +		   we can assume this isn't a manifestly constant-evaluated
> > > > 
> > > > s/can assume/know/
> > > > 
> > > > OK with those comment changes.
> > > 
> > > Thanks a lot.  Unfortunately I think the patch has a significant problem
> > > that only just occurred to me -- disabling the cp_fold cache when the
> > > flag ff_mce_false is set effectively makes cp_fold_function and
> > > cp_fully_fold_init quadratic in the size of the expression (since
> > > cp_fold_r calls cp_fold on each subtree, and cp_fold when the cache is
> > > disabled will end up fully walking each subtree).  Note that the reason
> > > we must disable the cache is because cp_fold with ff_mce_false might
> > > give a different folded result than without that flag if the expression
> > > contains a suitable CALL_EXPR subexpression.
> > 
> > Good point.
> > 
> > > One approach to fix this complexity issue would be to parameterize the
> > > cache according to the flags that were passed to cp_fold, which would
> > > allow us to keep the cache enabled when ff_mce_false is set.  A downside
> > > to this approach is that the size of the cp_fold cache would essentially
> > > double since for each tree we'd now have two cache entries, one for
> > > flags=ff_none and another for flags=ff_mce_false.
> > 
> > We could also clear the cache before cp_fold_function since the two folds
> > shouldn't overlap (much).
> 
> Makes sense, but IIUC we'd also have to clear it before (and after)
> cp_fully_fold_init too, which unlike cp_fold_function may get called
> in the middle of a function body.

Ah sorry, I think I misunderstood your idea.  Clearing the cache between
cp_fold_function would definitely help with controlling the size of the
cache, and indeed there shouldn't be much overlap because there isn't
much sharing of expression trees across function bodies.

However, I was curious about how big the fold_cache gets in practice,
and it turns out it doesn't get very big at all since we regularly clear
the fold_cache via clear_cv_and_fold_caches anyway.  According to my
experiments it doesn't get larger than about ~10k elements.  So a
doubling of that is pretty much insignificant.

So ISTM parameterizing the cache is the way to go.  How does the
following look?

-- >8 --

Subject: [PATCH] c++: speculative constexpr and is_constant_evaluated
 [PR108243]

	PR c++/108243
	PR c++/97553

gcc/cp/ChangeLog:

	* cp-gimplify.cc (enum fold_flags): Define.
	(fold_flags_t): Declare.
	(cp_fold_data::genericize): Replace this data member with ...
	(cp_fold_data::fold_flags): ... this.
	(cp_fold_r): Adjust use of cp_fold_data and calls to cp_fold.
	(cp_fold_function): Likewise.
	(cp_fold_maybe_rvalue): Likewise.
	(cp_fully_fold_init): Likewise.
	(fold_cache): Replace with ...
	(fold_caches): ... this 2-element array of caches.
	(get_fold_cache): Define.
	(clear_fold_cache): Adjust.
	(cp_fold): Add flags parameter.  Call get_fold_cache.
	<case CALL_EXPR>: If ff_mce_false is set, fold
	__builtin_is_constant_evaluated to false and pass mce_false to
	maybe_constant_value.

gcc/testsuite/ChangeLog:

	* g++.dg/opt/is_constant_evaluated1.C: New test.
	* g++.dg/opt/is_constant_evaluated2.C: New test.
---
 gcc/cp/cp-gimplify.cc                         | 103 +++++++++++++-----
 .../g++.dg/opt/is_constant_evaluated1.C       |  15 +++
 .../g++.dg/opt/is_constant_evaluated2.C       |  32 ++++++
 3 files changed, 120 insertions(+), 30 deletions(-)
 create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
 create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C

diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
index 9929d29981a..01e624bc9de 100644
--- a/gcc/cp/cp-gimplify.cc
+++ b/gcc/cp/cp-gimplify.cc
@@ -43,12 +43,26 @@ along with GCC; see the file COPYING3.  If not see
 #include "omp-general.h"
 #include "opts.h"
 
+/* Flags for cp_fold and cp_fold_r.  */
+
+enum fold_flags {
+  ff_none = 0,
+  /* Whether we're being called from cp_fold_function.  */
+  ff_genericize = 1 << 0,
+  /* Whether we're folding a point where we know we're
+     definitely not in a manifestly constant-evaluated
+     context.  */
+  ff_mce_false = 1 << 1,
+};
+
+using fold_flags_t = int;
+
 /* Forward declarations.  */
 
 static tree cp_genericize_r (tree *, int *, void *);
 static tree cp_fold_r (tree *, int *, void *);
 static void cp_genericize_tree (tree*, bool);
-static tree cp_fold (tree);
+static tree cp_fold (tree, fold_flags_t);
 
 /* Genericize a TRY_BLOCK.  */
 
@@ -1012,9 +1026,8 @@ struct cp_genericize_data
 struct cp_fold_data
 {
   hash_set<tree> pset;
-  bool genericize; // called from cp_fold_function?
-
-  cp_fold_data (bool g): genericize (g) {}
+  fold_flags_t flags;
+  cp_fold_data (fold_flags_t flags): flags (flags) {}
 };
 
 static tree
@@ -1055,7 +1068,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
       break;
     }
 
-  *stmt_p = stmt = cp_fold (*stmt_p);
+  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
 
   if (data->pset.add (stmt))
     {
@@ -1135,12 +1148,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
 	 here rather than in cp_genericize to avoid problems with the invisible
 	 reference transition.  */
     case INIT_EXPR:
-      if (data->genericize)
+      if (data->flags & ff_genericize)
 	cp_genericize_init_expr (stmt_p);
       break;
 
     case TARGET_EXPR:
-      if (data->genericize)
+      if (data->flags & ff_genericize)
 	cp_genericize_target_expr (stmt_p);
 
       /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
@@ -1173,7 +1186,10 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
 void
 cp_fold_function (tree fndecl)
 {
-  cp_fold_data data (/*genericize*/true);
+  /* By now all manifestly-constant-evaluated expressions will have
+     been constant-evaluated already if possible, so we can safely
+     pass ff_mce_false.  */
+  cp_fold_data data (ff_genericize | ff_mce_false);
   cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
 }
 
@@ -2391,7 +2407,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
 {
   while (true)
     {
-      x = cp_fold (x);
+      x = cp_fold (x, ff_none);
       if (rval)
 	x = mark_rvalue_use (x);
       if (rval && DECL_P (x)
@@ -2450,7 +2466,7 @@ cp_fully_fold_init (tree x)
   if (processing_template_decl)
     return x;
   x = cp_fully_fold (x);
-  cp_fold_data data (/*genericize*/false);
+  cp_fold_data data (ff_mce_false);
   cp_walk_tree (&x, cp_fold_r, &data, NULL);
   return x;
 }
@@ -2466,15 +2482,29 @@ c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
   return cp_fold_maybe_rvalue (x, !lval);
 }
 
-static GTY((deletable)) hash_map<tree, tree> *fold_cache;
+static GTY((deletable)) hash_map<tree, tree> *fold_caches[2];
+
+/* Subroutine of cp_fold.  Returns which fold cache to use according
+   to the given flags.  We need multiple caches since the result of
+   folding may depend on which flags are used.  */
+
+static hash_map<tree, tree> *&
+get_fold_cache (fold_flags_t flags)
+{
+  if (flags & ff_mce_false)
+    return fold_caches[1];
+  else
+    return fold_caches[0];
+}
 
 /* Dispose of the whole FOLD_CACHE.  */
 
 void
 clear_fold_cache (void)
 {
-  if (fold_cache != NULL)
-    fold_cache->empty ();
+  for (auto& fold_cache : fold_caches)
+    if (fold_cache != NULL)
+      fold_cache->empty ();
 }
 
 /*  This function tries to fold an expression X.
@@ -2485,7 +2515,7 @@ clear_fold_cache (void)
     Function returns X or its folded variant.  */
 
 static tree
-cp_fold (tree x)
+cp_fold (tree x, fold_flags_t flags)
 {
   tree op0, op1, op2, op3;
   tree org_x = x, r = NULL_TREE;
@@ -2503,6 +2533,7 @@ cp_fold (tree x)
   if (DECL_P (x) || CONSTANT_CLASS_P (x))
     return x;
 
+  auto& fold_cache = get_fold_cache (flags);
   if (fold_cache == NULL)
     fold_cache = hash_map<tree, tree>::create_ggc (101);
 
@@ -2542,7 +2573,7 @@ cp_fold (tree x)
 	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
 	     folding of the operand should be in the caches and if in cp_fold_r
 	     it will modify it in place.  */
-	  op0 = cp_fold (TREE_OPERAND (x, 0));
+	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
 	  if (op0 == error_mark_node)
 	    x = error_mark_node;
 	  break;
@@ -2587,7 +2618,7 @@ cp_fold (tree x)
 	{
 	  tree p = maybe_undo_parenthesized_ref (x);
 	  if (p != x)
-	    return cp_fold (p);
+	    return cp_fold (p, flags);
 	}
       goto unary;
 
@@ -2779,8 +2810,8 @@ cp_fold (tree x)
     case COND_EXPR:
       loc = EXPR_LOCATION (x);
       op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
-      op1 = cp_fold (TREE_OPERAND (x, 1));
-      op2 = cp_fold (TREE_OPERAND (x, 2));
+      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
+      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
 
       if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
 	{
@@ -2870,7 +2901,7 @@ cp_fold (tree x)
 	      {
 		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
 		  r = build_nop (TREE_TYPE (x), r);
-		x = cp_fold (r);
+		x = cp_fold (r, flags);
 		break;
 	      }
 	  }
@@ -2890,8 +2921,12 @@ cp_fold (tree x)
 	  {
 	    switch (DECL_FE_FUNCTION_CODE (callee))
 	      {
-		/* Defer folding __builtin_is_constant_evaluated.  */
 	      case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
+		/* Defer folding __builtin_is_constant_evaluated unless
+		   we know this isn't a manifestly constant-evaluated
+		   context.  */
+		if (flags & ff_mce_false)
+		  x = boolean_false_node;
 		break;
 	      case CP_BUILT_IN_SOURCE_LOCATION:
 		x = fold_builtin_source_location (x);
@@ -2924,7 +2959,7 @@ cp_fold (tree x)
 	int m = call_expr_nargs (x);
 	for (int i = 0; i < m; i++)
 	  {
-	    r = cp_fold (CALL_EXPR_ARG (x, i));
+	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
 	    if (r != CALL_EXPR_ARG (x, i))
 	      {
 		if (r == error_mark_node)
@@ -2947,7 +2982,7 @@ cp_fold (tree x)
 
 	if (TREE_CODE (r) != CALL_EXPR)
 	  {
-	    x = cp_fold (r);
+	    x = cp_fold (r, flags);
 	    break;
 	  }
 
@@ -2960,7 +2995,15 @@ cp_fold (tree x)
 	   constant, but the call followed by an INDIRECT_REF is.  */
 	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
 	    && !flag_no_inline)
-	  r = maybe_constant_value (x);
+	  {
+	    mce_value manifestly_const_eval = mce_unknown;
+	    if (flags & ff_mce_false)
+	      /* Allow folding __builtin_is_constant_evaluated to false during
+		 constexpr evaluation of this call.  */
+	      manifestly_const_eval = mce_false;
+	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
+				      manifestly_const_eval);
+	  }
 	optimize = sv;
 
         if (TREE_CODE (r) != CALL_EXPR)
@@ -2987,7 +3030,7 @@ cp_fold (tree x)
 	vec<constructor_elt, va_gc> *nelts = NULL;
 	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
 	  {
-	    tree op = cp_fold (p->value);
+	    tree op = cp_fold (p->value, flags);
 	    if (op != p->value)
 	      {
 		if (op == error_mark_node)
@@ -3018,7 +3061,7 @@ cp_fold (tree x)
 
 	for (int i = 0; i < n; i++)
 	  {
-	    tree op = cp_fold (TREE_VEC_ELT (x, i));
+	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
 	    if (op != TREE_VEC_ELT (x, i))
 	      {
 		if (!changed)
@@ -3035,10 +3078,10 @@ cp_fold (tree x)
     case ARRAY_RANGE_REF:
 
       loc = EXPR_LOCATION (x);
-      op0 = cp_fold (TREE_OPERAND (x, 0));
-      op1 = cp_fold (TREE_OPERAND (x, 1));
-      op2 = cp_fold (TREE_OPERAND (x, 2));
-      op3 = cp_fold (TREE_OPERAND (x, 3));
+      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
+      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
+      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
+      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
 
       if (op0 != TREE_OPERAND (x, 0)
 	  || op1 != TREE_OPERAND (x, 1)
@@ -3066,7 +3109,7 @@ cp_fold (tree x)
       /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
 	 folding, evaluates to an invariant.  In that case no need to wrap
 	 this folded tree with a SAVE_EXPR.  */
-      r = cp_fold (TREE_OPERAND (x, 0));
+      r = cp_fold (TREE_OPERAND (x, 0), flags);
       if (tree_invariant_p (r))
 	x = r;
       break;
diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
new file mode 100644
index 00000000000..983410b9e83
--- /dev/null
+++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
@@ -0,0 +1,15 @@
+// PR c++/108243
+// { dg-do compile { target c++11 } }
+// { dg-additional-options "-O -fdump-tree-original" }
+
+struct A {
+  constexpr A(int n, int m) : n(n), m(m) { }
+  int n, m;
+};
+
+A* f(int n) {
+  static A a = {n, __builtin_is_constant_evaluated()};
+  return &a;
+}
+
+// { dg-final { scan-tree-dump-not "is_constant_evaluated" "original" } }
diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
new file mode 100644
index 00000000000..ed964e20a7a
--- /dev/null
+++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
@@ -0,0 +1,32 @@
+// PR c++/97553
+// { dg-do compile { target c++11 } }
+// { dg-additional-options "-O -fdump-tree-original" }
+
+constexpr int foo() {
+  return __builtin_is_constant_evaluated() + 1;
+}
+
+#if __cpp_if_consteval
+constexpr int bar() {
+  if consteval {
+    return 5;
+  } else {
+    return 4;
+  }
+}
+#endif
+
+int p, q;
+
+int main() {
+  p = foo();
+#if __cpp_if_consteval
+  q = bar();
+#endif
+}
+
+// { dg-final { scan-tree-dump "p = 1" "original" } }
+// { dg-final { scan-tree-dump-not "= foo" "original" } }
+
+// { dg-final { scan-tree-dump "q = 4" "original" { target c++23 } } }
+// { dg-final { scan-tree-dump-not "= bar" "original" { target c++23 } } }
-- 
2.39.1.433.g23c56f7bd5


> 
> > 
> > > Another approach would be to split out the trial constexpr evaluation
> > > part of cp_fold's CALL_EXPR handling, parameterize that, and call it
> > > directly from cp_fold_r.  With this approach we wouldn't perform as much
> > > folding, e.g.
> > > 
> > >    int n = 41 + !std::is_constant_evaluated();
> > > 
> > > would get folded to 1 + 41 rather than 42.  But I suspect this would
> > > give us 95% of the reapable benefits of the above approach.
> > > 
> > > I think I'm leaning towards this second approach, which the below patch
> > > implements instead.  What do you think?  Bootstrapped and regtested on
> > > x86_64-pc-linux-gnu.
> > 
> > That sounds reasonable, but...
> > 
> > > -- >8 --
> > > 
> > > Subject: [PATCH] c++: speculative constexpr and is_constant_evaluated
> > >   [PR108243]
> > > 
> > > This PR illustrates that __builtin_is_constant_evaluated currently acts
> > > as an optimization barrier for our speculative constexpr evaluation,
> > > since we don't want to prematurely fold the builtin to false before the
> > > expression in question undergoes constant evaluation as in a manifestly
> > > constant-evaluated context (in which case the builtin must instead be
> > > folded to true).
> > > 
> > > This patch fixes this by permitting __builtin_is_constant_evaluated
> > > to get folded to false from cp_fold_r, where we know we're done with
> > > proper constant evaluation (of manifestly constant-evaluated contexts).
> > > 
> > > 	PR c++/108243
> > > 	PR c++/97553
> > > 
> > > gcc/cp/ChangeLog:
> > > 
> > > 	* cp-gimplify.cc
> > > 	(cp_fold_r): Remove redundant *stmt_p assignments.  After
> > > 	calling cp_fold, call maybe_fold_constexpr_call with mce_false.
> > > 	(cp_fold) <case CALL_EXPR>: Split out trial constexpr evaluation
> > > 	into ...
> > > 	(maybe_fold_constexpr_call): ... here.
> > > 
> > > gcc/testsuite/ChangeLog:
> > > 
> > > 	* g++.dg/opt/is_constant_evaluated1.C: New test.
> > > 	* g++.dg/opt/is_constant_evaluated2.C: New test.
> > > ---
> > >   gcc/cp/cp-gimplify.cc                         | 55 +++++++++++++++----
> > >   .../g++.dg/opt/is_constant_evaluated1.C       | 20 +++++++
> > >   .../g++.dg/opt/is_constant_evaluated2.C       | 32 +++++++++++
> > >   3 files changed, 95 insertions(+), 12 deletions(-)
> > >   create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> > >   create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> > > 
> > > diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> > > index 9929d29981a..dca55056b2c 100644
> > > --- a/gcc/cp/cp-gimplify.cc
> > > +++ b/gcc/cp/cp-gimplify.cc
> > > @@ -49,6 +49,7 @@ static tree cp_genericize_r (tree *, int *, void *);
> > >   static tree cp_fold_r (tree *, int *, void *);
> > >   static void cp_genericize_tree (tree*, bool);
> > >   static tree cp_fold (tree);
> > > +static tree maybe_fold_constexpr_call (tree, mce_value);
> > >     /* Genericize a TRY_BLOCK.  */
> > >   @@ -1034,7 +1035,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > > *data_)
> > >   	    error_at (PTRMEM_CST_LOCATION (stmt),
> > >   		      "taking address of an immediate function %qD",
> > >   		      PTRMEM_CST_MEMBER (stmt));
> > > -	  stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
> > > +	  stmt = build_zero_cst (TREE_TYPE (stmt));
> > >   	  break;
> > >   	}
> > >         break;
> > > @@ -1046,7 +1047,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > > *data_)
> > >   	  error_at (EXPR_LOCATION (stmt),
> > >   		    "taking address of an immediate function %qD",
> > >   		    TREE_OPERAND (stmt, 0));
> > > -	  stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
> > > +	  stmt = build_zero_cst (TREE_TYPE (stmt));
> > >   	  break;
> > >   	}
> > >         break;
> > > @@ -1055,7 +1056,17 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > > *data_)
> > >         break;
> > >       }
> > >   -  *stmt_p = stmt = cp_fold (*stmt_p);
> > > +  stmt = cp_fold (stmt);
> > > +
> > > +  if (TREE_CODE (stmt) == CALL_EXPR)
> > > +    /* Since cp_fold_r is called (from cp_genericize, cp_fold_function
> > > +       and cp_fully_fold_init) only after the overall expression has been
> > > +       considered for constant-evaluation, we can by now safely fold any
> > > +       remaining __builtin_is_constant_evaluated calls to false, so try
> > > +       constexpr expansion with mce_false.  */
> > > +    stmt = maybe_fold_constexpr_call (stmt, mce_false);
> > > +
> > > +  *stmt_p = stmt;
> > >       if (data->pset.add (stmt))
> > >       {
> > > @@ -2952,15 +2963,10 @@ cp_fold (tree x)
> > >   	  }
> > >     	optimize = nw;
> > > -
> > > -	/* Invoke maybe_constant_value for functions declared
> > > -	   constexpr and not called with AGGR_INIT_EXPRs.
> > > -	   TODO:
> > > -	   Do constexpr expansion of expressions where the call itself is not
> > > -	   constant, but the call followed by an INDIRECT_REF is.  */
> > > -	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
> > > -	    && !flag_no_inline)
> > > -	  r = maybe_constant_value (x);
> > > +	/* Pass mce_unknown to defer folding __builtin_is_constant_evaluated
> > > +	   since we don't know if we're in a manifestly constant-evaluated
> > > +	   context that hasn't yet been constant-evaluated.  */
> > > +	r = maybe_fold_constexpr_call (x, mce_unknown);
> > 
> > It seems unfortunate to try to fold both here and in cp_fold_r.
> 
> Yes, though I'm afraid some duplication of work is pretty much
> unavoidable.  Even if in cp_fold_r we did something like
> 
>   if (TREE_CODE (stmt) == CALL_EXPR)
>     /* cp_fold_call_expr is the entire CALL_EXPR case of cp_fold.  */
>     stmt = cp_fold_call_expr (stmt, mce_false);
>   else
>     stmt = cp_fold (stmt);
> 
> instead of
> 
>   stmt = cp_fold (stmt);
>   if (TREE_CODE (stmt) == CALL_EXPR)
>     stmt = maybe_fold_constexpr_call (stmt, mce_false);
> 
> we would still end up doing cp_fold on the CALL_EXPR if it's a
> subexpression of some larger expression (since cp_fold is recursive).
> 
> And even if we went with the original approach of parameterizing cp_fold
> according to manifestly_const_eval totally, we would still end up trying
> to fold some constexpr calls twice, first with flags=ff_none (during
> some on-the-spot folding) and again with flags=ff_mce_false (during
> cp_fold_function), I think.
> 
> > 
> > Does this patch still fold __builtin_is_constant_evaluated() even though it no
> > longer touches the cp_fold builtin handling?
> 
> Indeed it doesn't, instead __builtin_is_constant_evaluated() will
> continue to get folded during gimplification.  I thought folding it
> might not be benefical with this approach, but on second thought we
> should do it for consistency at least.  The following incremental
> patch seems to work:
> 
> diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> index dca55056b2c..250147bde0f 100644
> --- a/gcc/cp/cp-gimplify.cc
> +++ b/gcc/cp/cp-gimplify.cc
> @@ -3124,6 +3124,14 @@ maybe_fold_constexpr_call (tree x, mce_value manifestly_const_eval)
>        if (TREE_CODE (r) != CALL_EXPR)
>  	return r;
>      }
> +  if (fndecl_built_in_p (callee, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
> +			 BUILT_IN_FRONTEND))
> +    {
> +      if (manifestly_const_eval == mce_true)
> +	return boolean_true_node;
> +      else if (manifestly_const_eval == mce_false)
> +	return boolean_false_node;
> +    }
>    return x;
>  }
>  
> 
> > 
> > >   	optimize = sv;
> > >             if (TREE_CODE (r) != CALL_EXPR)
> > > @@ -3096,6 +3102,31 @@ cp_fold (tree x)
> > >     return x;
> > >   }
> > >   +/* If the CALL_EXPR X calls a constexpr function, try expanding it via
> > > +   constexpr evaluation.  Returns the expanded result or X if constexpr
> > > +   evaluation wasn't possible.
> > > +
> > > +   TODO: Do constexpr expansion of expressions where the call itself
> > > +   is not constant, but the call followed by an INDIRECT_REF is.  */
> > > +
> > > +static tree
> > > +maybe_fold_constexpr_call (tree x, mce_value manifestly_const_eval)
> > > +{
> > > +  if (flag_no_inline)
> > > +    return x;
> > > +  tree callee = get_callee_fndecl (x);
> > > +  if (!callee)
> > > +    return x;
> > > +  if (DECL_DECLARED_CONSTEXPR_P (callee))
> > > +    {
> > > +      tree r = maybe_constant_value (x, /*decl=*/NULL_TREE,
> > > +				     manifestly_const_eval);
> > > +      if (TREE_CODE (r) != CALL_EXPR)
> > > +	return r;
> > > +    }
> > > +  return x;
> > > +}
> > > +
> > >   /* Look up "hot", "cold", "likely" or "unlikely" in attribute list LIST.
> > > */
> > >     tree
> > > diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> > > b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> > > new file mode 100644
> > > index 00000000000..2123f20e3e5
> > > --- /dev/null
> > > +++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> > > @@ -0,0 +1,20 @@
> > > +// PR c++/108243
> > > +// { dg-do compile { target c++11 } }
> > > +// { dg-additional-options "-O -fdump-tree-original" }
> > > +
> > > +struct A {
> > > +  constexpr A(int n, int m) : n(n), m(m) { }
> > > +  int n, m;
> > > +};
> > > +
> > > +constexpr int foo(int n) {
> > > +  return n + !__builtin_is_constant_evaluated();
> > > +}
> > > +
> > > +A* f(int n) {
> > > +  static A a = {n, foo(41)};
> > > +  return &a;
> > > +}
> > > +
> > > +// { dg-final { scan-tree-dump "42" "original" } }
> > > +// { dg-final { scan-tree-dump-not "foo \\(41\\)" "original" } }
> > > diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> > > b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> > > new file mode 100644
> > > index 00000000000..ed964e20a7a
> > > --- /dev/null
> > > +++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> > > @@ -0,0 +1,32 @@
> > > +// PR c++/97553
> > > +// { dg-do compile { target c++11 } }
> > > +// { dg-additional-options "-O -fdump-tree-original" }
> > > +
> > > +constexpr int foo() {
> > > +  return __builtin_is_constant_evaluated() + 1;
> > > +}
> > > +
> > > +#if __cpp_if_consteval
> > > +constexpr int bar() {
> > > +  if consteval {
> > > +    return 5;
> > > +  } else {
> > > +    return 4;
> > > +  }
> > > +}
> > > +#endif
> > > +
> > > +int p, q;
> > > +
> > > +int main() {
> > > +  p = foo();
> > > +#if __cpp_if_consteval
> > > +  q = bar();
> > > +#endif
> > > +}
> > > +
> > > +// { dg-final { scan-tree-dump "p = 1" "original" } }
> > > +// { dg-final { scan-tree-dump-not "= foo" "original" } }
> > > +
> > > +// { dg-final { scan-tree-dump "q = 4" "original" { target c++23 } } }
> > > +// { dg-final { scan-tree-dump-not "= bar" "original" { target c++23 } } }
> > 
> > 
> 


^ permalink raw reply	[flat|nested] 15+ messages in thread

* Re: [PATCH 2/2] c++: speculative constexpr and is_constant_evaluated [PR108243]
  2023-02-10 14:48               ` Patrick Palka
@ 2023-02-10 16:51                 ` Patrick Palka
  2023-02-14 23:02                   ` Jason Merrill
  0 siblings, 1 reply; 15+ messages in thread
From: Patrick Palka @ 2023-02-10 16:51 UTC (permalink / raw)
  To: Patrick Palka; +Cc: Jason Merrill, gcc-patches

On Fri, 10 Feb 2023, Patrick Palka wrote:

> On Thu, 9 Feb 2023, Patrick Palka wrote:
> 
> > On Thu, 9 Feb 2023, Jason Merrill wrote:
> > 
> > > On 2/9/23 09:36, Patrick Palka wrote:
> > > > On Sun, 5 Feb 2023, Jason Merrill wrote:
> > > > 
> > > > > On 2/3/23 15:51, Patrick Palka wrote:
> > > > > > On Mon, 30 Jan 2023, Jason Merrill wrote:
> > > > > > 
> > > > > > > On 1/27/23 17:02, Patrick Palka wrote:
> > > > > > > > This PR illustrates that __builtin_is_constant_evaluated currently
> > > > > > > > acts
> > > > > > > > as an optimization barrier for our speculative constexpr evaluation,
> > > > > > > > since we don't want to prematurely fold the builtin to false if the
> > > > > > > > expression in question would be later manifestly constant evaluated
> > > > > > > > (in
> > > > > > > > which case it must be folded to true).
> > > > > > > > 
> > > > > > > > This patch fixes this by permitting __builtin_is_constant_evaluated
> > > > > > > > to get folded as false during cp_fold_function, since at that point
> > > > > > > > we're sure we're doing manifestly constant evaluation.  To that end
> > > > > > > > we add a flags parameter to cp_fold that controls what mce_value the
> > > > > > > > CALL_EXPR case passes to maybe_constant_value.
> > > > > > > > 
> > > > > > > > bootstrapped and rgetsted no x86_64-pc-linux-gnu, does this look OK
> > > > > > > > for
> > > > > > > > trunk?
> > > > > > > > 
> > > > > > > > 	PR c++/108243
> > > > > > > > 
> > > > > > > > gcc/cp/ChangeLog:
> > > > > > > > 
> > > > > > > > 	* cp-gimplify.cc (enum fold_flags): Define.
> > > > > > > > 	(cp_fold_data::genericize): Replace this data member with ...
> > > > > > > > 	(cp_fold_data::fold_flags): ... this.
> > > > > > > > 	(cp_fold_r): Adjust cp_fold_data use and cp_fold_calls.
> > > > > > > > 	(cp_fold_function): Likewise.
> > > > > > > > 	(cp_fold_maybe_rvalue): Likewise.
> > > > > > > > 	(cp_fully_fold_init): Likewise.
> > > > > > > > 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
> > > > > > > > 	isn't empty.
> > > > > > > > 	<case CALL_EXPR>: Pass mce_false to maybe_constant_value
> > > > > > > > 	if if ff_genericize is set.
> > > > > > > > 
> > > > > > > > gcc/testsuite/ChangeLog:
> > > > > > > > 
> > > > > > > > 	* g++.dg/opt/pr108243.C: New test.
> > > > > > > > ---
> > > > > > > >     gcc/cp/cp-gimplify.cc               | 76
> > > > > > > > ++++++++++++++++++-----------
> > > > > > > >     gcc/testsuite/g++.dg/opt/pr108243.C | 29 +++++++++++
> > > > > > > >     2 files changed, 76 insertions(+), 29 deletions(-)
> > > > > > > >     create mode 100644 gcc/testsuite/g++.dg/opt/pr108243.C
> > > > > > > > 
> > > > > > > > diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> > > > > > > > index a35cedd05cc..d023a63768f 100644
> > > > > > > > --- a/gcc/cp/cp-gimplify.cc
> > > > > > > > +++ b/gcc/cp/cp-gimplify.cc
> > > > > > > > @@ -43,12 +43,20 @@ along with GCC; see the file COPYING3.  If not
> > > > > > > > see
> > > > > > > >     #include "omp-general.h"
> > > > > > > >     #include "opts.h"
> > > > > > > >     +/* Flags for cp_fold and cp_fold_r.  */
> > > > > > > > +
> > > > > > > > +enum fold_flags {
> > > > > > > > +  ff_none = 0,
> > > > > > > > +  /* Whether we're being called from cp_fold_function.  */
> > > > > > > > +  ff_genericize = 1 << 0,
> > > > > > > > +};
> > > > > > > > +
> > > > > > > >     /* Forward declarations.  */
> > > > > > > >       static tree cp_genericize_r (tree *, int *, void *);
> > > > > > > >     static tree cp_fold_r (tree *, int *, void *);
> > > > > > > >     static void cp_genericize_tree (tree*, bool);
> > > > > > > > -static tree cp_fold (tree);
> > > > > > > > +static tree cp_fold (tree, fold_flags);
> > > > > > > >       /* Genericize a TRY_BLOCK.  */
> > > > > > > >     @@ -996,9 +1004,8 @@ struct cp_genericize_data
> > > > > > > >     struct cp_fold_data
> > > > > > > >     {
> > > > > > > >       hash_set<tree> pset;
> > > > > > > > -  bool genericize; // called from cp_fold_function?
> > > > > > > > -
> > > > > > > > -  cp_fold_data (bool g): genericize (g) {}
> > > > > > > > +  fold_flags flags;
> > > > > > > > +  cp_fold_data (fold_flags flags): flags (flags) {}
> > > > > > > >     };
> > > > > > > >       static tree
> > > > > > > > @@ -1039,7 +1046,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
> > > > > > > > void
> > > > > > > > *data_)
> > > > > > > >           break;
> > > > > > > >         }
> > > > > > > >     -  *stmt_p = stmt = cp_fold (*stmt_p);
> > > > > > > > +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
> > > > > > > >         if (data->pset.add (stmt))
> > > > > > > >         {
> > > > > > > > @@ -1119,12 +1126,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
> > > > > > > > void
> > > > > > > > *data_)
> > > > > > > >     	 here rather than in cp_genericize to avoid problems with the
> > > > > > > > invisible
> > > > > > > >     	 reference transition.  */
> > > > > > > >         case INIT_EXPR:
> > > > > > > > -      if (data->genericize)
> > > > > > > > +      if (data->flags & ff_genericize)
> > > > > > > >     	cp_genericize_init_expr (stmt_p);
> > > > > > > >           break;
> > > > > > > >           case TARGET_EXPR:
> > > > > > > > -      if (data->genericize)
> > > > > > > > +      if (data->flags & ff_genericize)
> > > > > > > >     	cp_genericize_target_expr (stmt_p);
> > > > > > > >             /* Folding might replace e.g. a COND_EXPR with a
> > > > > > > > TARGET_EXPR;
> > > > > > > > in
> > > > > > > > @@ -1157,7 +1164,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
> > > > > > > > void
> > > > > > > > *data_)
> > > > > > > >     void
> > > > > > > >     cp_fold_function (tree fndecl)
> > > > > > > >     {
> > > > > > > > -  cp_fold_data data (/*genericize*/true);
> > > > > > > > +  cp_fold_data data (ff_genericize);
> > > > > > > >       cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data,
> > > > > > > > NULL);
> > > > > > > >     }
> > > > > > > >     @@ -2375,7 +2382,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
> > > > > > > >     {
> > > > > > > >       while (true)
> > > > > > > >         {
> > > > > > > > -      x = cp_fold (x);
> > > > > > > > +      x = cp_fold (x, ff_none);
> > > > > > > >           if (rval)
> > > > > > > >     	x = mark_rvalue_use (x);
> > > > > > > >           if (rval && DECL_P (x)
> > > > > > > > @@ -2434,7 +2441,7 @@ cp_fully_fold_init (tree x)
> > > > > > > >       if (processing_template_decl)
> > > > > > > >         return x;
> > > > > > > >       x = cp_fully_fold (x);
> > > > > > > > -  cp_fold_data data (/*genericize*/false);
> > > > > > > > +  cp_fold_data data (ff_none);
> > > > > > > >       cp_walk_tree (&x, cp_fold_r, &data, NULL);
> > > > > > > >       return x;
> > > > > > > >     }
> > > > > > > > @@ -2469,7 +2476,7 @@ clear_fold_cache (void)
> > > > > > > >         Function returns X or its folded variant.  */
> > > > > > > >       static tree
> > > > > > > > -cp_fold (tree x)
> > > > > > > > +cp_fold (tree x, fold_flags flags)
> > > > > > > >     {
> > > > > > > >       tree op0, op1, op2, op3;
> > > > > > > >       tree org_x = x, r = NULL_TREE;
> > > > > > > > @@ -2490,8 +2497,11 @@ cp_fold (tree x)
> > > > > > > >       if (fold_cache == NULL)
> > > > > > > >         fold_cache = hash_map<tree, tree>::create_ggc (101);
> > > > > > > >     -  if (tree *cached = fold_cache->get (x))
> > > > > > > > -    return *cached;
> > > > > > > > +  bool cache_p = (flags == ff_none);
> > > > > > > > +
> > > > > > > > +  if (cache_p)
> > > > > > > > +    if (tree *cached = fold_cache->get (x))
> > > > > > > > +      return *cached;
> > > > > > > >         uid_sensitive_constexpr_evaluation_checker c;
> > > > > > > >     @@ -2526,7 +2536,7 @@ cp_fold (tree x)
> > > > > > > >     	     Don't create a new tree if op0 != TREE_OPERAND (x, 0),
> > > > > > > > the
> > > > > > > >     	     folding of the operand should be in the caches and if in
> > > > > > > > cp_fold_r
> > > > > > > >     	     it will modify it in place.  */
> > > > > > > > -	  op0 = cp_fold (TREE_OPERAND (x, 0));
> > > > > > > > +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> > > > > > > >     	  if (op0 == error_mark_node)
> > > > > > > >     	    x = error_mark_node;
> > > > > > > >     	  break;
> > > > > > > > @@ -2571,7 +2581,7 @@ cp_fold (tree x)
> > > > > > > >     	{
> > > > > > > >     	  tree p = maybe_undo_parenthesized_ref (x);
> > > > > > > >     	  if (p != x)
> > > > > > > > -	    return cp_fold (p);
> > > > > > > > +	    return cp_fold (p, flags);
> > > > > > > >     	}
> > > > > > > >           goto unary;
> > > > > > > >     @@ -2763,8 +2773,8 @@ cp_fold (tree x)
> > > > > > > >         case COND_EXPR:
> > > > > > > >           loc = EXPR_LOCATION (x);
> > > > > > > >           op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> > > > > > > > -      op1 = cp_fold (TREE_OPERAND (x, 1));
> > > > > > > > -      op2 = cp_fold (TREE_OPERAND (x, 2));
> > > > > > > > +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> > > > > > > > +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> > > > > > > >             if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
> > > > > > > >     	{
> > > > > > > > @@ -2854,7 +2864,7 @@ cp_fold (tree x)
> > > > > > > >     	      {
> > > > > > > >     		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
> > > > > > > >     		  r = build_nop (TREE_TYPE (x), r);
> > > > > > > > -		x = cp_fold (r);
> > > > > > > > +		x = cp_fold (r, flags);
> > > > > > > >     		break;
> > > > > > > >     	      }
> > > > > > > >     	  }
> > > > > > > > @@ -2908,7 +2918,7 @@ cp_fold (tree x)
> > > > > > > >     	int m = call_expr_nargs (x);
> > > > > > > >     	for (int i = 0; i < m; i++)
> > > > > > > >     	  {
> > > > > > > > -	    r = cp_fold (CALL_EXPR_ARG (x, i));
> > > > > > > > +	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
> > > > > > > >     	    if (r != CALL_EXPR_ARG (x, i))
> > > > > > > >     	      {
> > > > > > > >     		if (r == error_mark_node)
> > > > > > > > @@ -2931,7 +2941,7 @@ cp_fold (tree x)
> > > > > > > >       	if (TREE_CODE (r) != CALL_EXPR)
> > > > > > > >     	  {
> > > > > > > > -	    x = cp_fold (r);
> > > > > > > > +	    x = cp_fold (r, flags);
> > > > > > > >     	    break;
> > > > > > > >     	  }
> > > > > > > >     @@ -2944,7 +2954,15 @@ cp_fold (tree x)
> > > > > > > >     	   constant, but the call followed by an INDIRECT_REF is.  */
> > > > > > > >     	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
> > > > > > > >     	    && !flag_no_inline)
> > > > > > > > -	  r = maybe_constant_value (x);
> > > > > > > > +	  {
> > > > > > > > +	    mce_value manifestly_const_eval = mce_unknown;
> > > > > > > > +	    if (flags & ff_genericize)
> > > > > > > > +	      /* At genericization time it's safe to fold
> > > > > > > > +		 __builtin_is_constant_evaluated to false.  */
> > > > > > > > +	      manifestly_const_eval = mce_false;
> > > > > > > > +	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
> > > > > > > > +				      manifestly_const_eval);
> > > > > > > > +	  }
> > > > > > > >     	optimize = sv;
> > > > > > > >               if (TREE_CODE (r) != CALL_EXPR)
> > > > > > > > @@ -2971,7 +2989,7 @@ cp_fold (tree x)
> > > > > > > >     	vec<constructor_elt, va_gc> *nelts = NULL;
> > > > > > > >     	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
> > > > > > > >     	  {
> > > > > > > > -	    tree op = cp_fold (p->value);
> > > > > > > > +	    tree op = cp_fold (p->value, flags);
> > > > > > > >     	    if (op != p->value)
> > > > > > > >     	      {
> > > > > > > >     		if (op == error_mark_node)
> > > > > > > > @@ -3002,7 +3020,7 @@ cp_fold (tree x)
> > > > > > > >       	for (int i = 0; i < n; i++)
> > > > > > > >     	  {
> > > > > > > > -	    tree op = cp_fold (TREE_VEC_ELT (x, i));
> > > > > > > > +	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
> > > > > > > >     	    if (op != TREE_VEC_ELT (x, i))
> > > > > > > >     	      {
> > > > > > > >     		if (!changed)
> > > > > > > > @@ -3019,10 +3037,10 @@ cp_fold (tree x)
> > > > > > > >         case ARRAY_RANGE_REF:
> > > > > > > >             loc = EXPR_LOCATION (x);
> > > > > > > > -      op0 = cp_fold (TREE_OPERAND (x, 0));
> > > > > > > > -      op1 = cp_fold (TREE_OPERAND (x, 1));
> > > > > > > > -      op2 = cp_fold (TREE_OPERAND (x, 2));
> > > > > > > > -      op3 = cp_fold (TREE_OPERAND (x, 3));
> > > > > > > > +      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> > > > > > > > +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> > > > > > > > +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> > > > > > > > +      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
> > > > > > > >             if (op0 != TREE_OPERAND (x, 0)
> > > > > > > >     	  || op1 != TREE_OPERAND (x, 1)
> > > > > > > > @@ -3050,7 +3068,7 @@ cp_fold (tree x)
> > > > > > > >           /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j),
> > > > > > > > which,
> > > > > > > > after
> > > > > > > >     	 folding, evaluates to an invariant.  In that case no need to
> > > > > > > > wrap
> > > > > > > >     	 this folded tree with a SAVE_EXPR.  */
> > > > > > > > -      r = cp_fold (TREE_OPERAND (x, 0));
> > > > > > > > +      r = cp_fold (TREE_OPERAND (x, 0), flags);
> > > > > > > >           if (tree_invariant_p (r))
> > > > > > > >     	x = r;
> > > > > > > >           break;
> > > > > > > > @@ -3069,7 +3087,7 @@ cp_fold (tree x)
> > > > > > > >           copy_warning (x, org_x);
> > > > > > > >         }
> > > > > > > >     -  if (!c.evaluation_restricted_p ())
> > > > > > > > +  if (cache_p && !c.evaluation_restricted_p ())
> > > > > > > >         {
> > > > > > > >           fold_cache->put (org_x, x);
> > > > > > > >           /* Prevent that we try to fold an already folded result
> > > > > > > > again.
> > > > > > > > */
> > > > > > > > diff --git a/gcc/testsuite/g++.dg/opt/pr108243.C
> > > > > > > > b/gcc/testsuite/g++.dg/opt/pr108243.C
> > > > > > > > new file mode 100644
> > > > > > > > index 00000000000..4c45dbba13c
> > > > > > > > --- /dev/null
> > > > > > > > +++ b/gcc/testsuite/g++.dg/opt/pr108243.C
> > > > > > > > @@ -0,0 +1,29 @@
> > > > > > > > +// PR c++/108243
> > > > > > > > +// { dg-do compile { target c++11 } }
> > > > > > > > +// { dg-additional-options "-O -fdump-tree-original" }
> > > > > > > > +
> > > > > > > > +constexpr int foo() {
> > > > > > > > +  return __builtin_is_constant_evaluated() + 1;
> > > > > > > > +}
> > > > > > > > +
> > > > > > > > +#if __cpp_if_consteval
> > > > > > > > +constexpr int bar() {
> > > > > > > > +  if consteval {
> > > > > > > > +    return 5;
> > > > > > > > +  } else {
> > > > > > > > +    return 4;
> > > > > > > > +  }
> > > > > > > > +}
> > > > > > > > +#endif
> > > > > > > > +
> > > > > > > > +int p, q;
> > > > > > > > +
> > > > > > > > +int main() {
> > > > > > > > +  p = foo();
> > > > > > > > +#if __cpp_if_consteval
> > > > > > > > +  q = bar();
> > > > > > > > +#endif
> > > > > > > > +}
> > > > > > > > +
> > > > > > > > +// { dg-final { scan-tree-dump-not "= foo" "original" } }
> > > > > > > > +// { dg-final { scan-tree-dump-not "= bar" "original" } }
> > > > > > > 
> > > > > > > Let's also test a static initializer that can't be fully
> > > > > > > constant-evaluated.
> > > > > > 
> > > > > > D'oh, doing so revealed that cp_fold_function doesn't reach static
> > > > > > initializers; that's taken care of by cp_fully_fold_init.  So it seems
> > > > > > we need to make cp_fold when called from the latter entry point to also
> > > > > > assume m_c_e is false.  We can't re-use ff_genericize here because that
> > > > > > flag has additional effects in cp_fold_r, so it seems we need another
> > > > > > flag that that only affects the manifestly constant-eval stuff; I called
> > > > > > it ff_mce_false.  How does the following look?
> > > > > > 
> > > > > > -- >8 --
> > > > > > 
> > > > > > Subject: [PATCH 2/2] c++: speculative constexpr and
> > > > > > is_constant_evaluated
> > > > > >    [PR108243]
> > > > > > 
> > > > > > This PR illustrates that __builtin_is_constant_evaluated currently acts
> > > > > > as an optimization barrier for our speculative constexpr evaluation,
> > > > > > since we don't want to prematurely fold the builtin to false if the
> > > > > > expression in question would be later manifestly constant evaluated (in
> > > > > > which case it must be folded to true).
> > > > > > 
> > > > > > This patch fixes this by permitting __builtin_is_constant_evaluated
> > > > > > to get folded as false during cp_fold_function and cp_fully_fold_init,
> > > > > > since at these points we're sure we're done with manifestly constant
> > > > > > evaluation.  To that end we add a flags parameter to cp_fold that
> > > > > > controls whether we pass mce_false or mce_unknown to
> > > > > > maybe_constant_value
> > > > > > when folding a CALL_EXPR.
> > > > > > 
> > > > > > 	PR c++/108243
> > > > > > 	PR c++/97553
> > > > > > 
> > > > > > gcc/cp/ChangeLog:
> > > > > > 
> > > > > > 	* cp-gimplify.cc (enum fold_flags): Define.
> > > > > > 	(cp_fold_data::genericize): Replace this data member with ...
> > > > > > 	(cp_fold_data::fold_flags): ... this.
> > > > > > 	(cp_fold_r): Adjust use of cp_fold_data and calls to cp_fold.
> > > > > > 	(cp_fold_function): Likewise.
> > > > > > 	(cp_fold_maybe_rvalue): Likewise.
> > > > > > 	(cp_fully_fold_init): Likewise.
> > > > > > 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
> > > > > > 	isn't empty.
> > > > > > 	<case CALL_EXPR>: If ff_genericize is set, fold
> > > > > > 	__builtin_is_constant_evaluated to false and pass mce_false to
> > > > > > 	maybe_constant_value.
> > > > > > 
> > > > > > gcc/testsuite/ChangeLog:
> > > > > > 
> > > > > > 	* g++.dg/opt/is_constant_evaluated1.C: New test.
> > > > > > 	* g++.dg/opt/is_constant_evaluated2.C: New test.
> > > > > > ---
> > > > > >    gcc/cp/cp-gimplify.cc                         | 88
> > > > > > ++++++++++++-------
> > > > > >    .../g++.dg/opt/is_constant_evaluated1.C       | 14 +++
> > > > > >    .../g++.dg/opt/is_constant_evaluated2.C       | 32 +++++++
> > > > > >    3 files changed, 104 insertions(+), 30 deletions(-)
> > > > > >    create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> > > > > >    create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> > > > > > 
> > > > > > diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> > > > > > index 9929d29981a..590ed787997 100644
> > > > > > --- a/gcc/cp/cp-gimplify.cc
> > > > > > +++ b/gcc/cp/cp-gimplify.cc
> > > > > > @@ -43,12 +43,26 @@ along with GCC; see the file COPYING3.  If not see
> > > > > >    #include "omp-general.h"
> > > > > >    #include "opts.h"
> > > > > >    +/* Flags for cp_fold and cp_fold_r.  */
> > > > > > +
> > > > > > +enum fold_flags {
> > > > > > +  ff_none = 0,
> > > > > > +  /* Whether we're being called from cp_fold_function.  */
> > > > > > +  ff_genericize = 1 << 0,
> > > > > > +  /* Whether we're folding late enough that we could assume
> > > > > > +     we're definitely not in a manifestly constant-evaluated
> > > > > > +     context.  */
> > > > > 
> > > > > It's not necessarily a matter of late enough; we could fold sooner and
> > > > > still
> > > > > know that, as in cp_fully_fold_init.  We could do the same at other
> > > > > full-expression points, but we don't because we want to delay folding as
> > > > > much
> > > > > as possible.  So let's say "folding at a point where we know we're..."
> > > > > 
> > > > > > +  ff_mce_false = 1 << 1,
> > > > > > +};
> > > > > > +
> > > > > > +using fold_flags_t = int;
> > > > > > +
> > > > > >    /* Forward declarations.  */
> > > > > >      static tree cp_genericize_r (tree *, int *, void *);
> > > > > >    static tree cp_fold_r (tree *, int *, void *);
> > > > > >    static void cp_genericize_tree (tree*, bool);
> > > > > > -static tree cp_fold (tree);
> > > > > > +static tree cp_fold (tree, fold_flags_t);
> > > > > >      /* Genericize a TRY_BLOCK.  */
> > > > > >    @@ -1012,9 +1026,8 @@ struct cp_genericize_data
> > > > > >    struct cp_fold_data
> > > > > >    {
> > > > > >      hash_set<tree> pset;
> > > > > > -  bool genericize; // called from cp_fold_function?
> > > > > > -
> > > > > > -  cp_fold_data (bool g): genericize (g) {}
> > > > > > +  fold_flags_t flags;
> > > > > > +  cp_fold_data (fold_flags_t flags): flags (flags) {}
> > > > > >    };
> > > > > >      static tree
> > > > > > @@ -1055,7 +1068,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > > > > > *data_)
> > > > > >          break;
> > > > > >        }
> > > > > >    -  *stmt_p = stmt = cp_fold (*stmt_p);
> > > > > > +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
> > > > > >        if (data->pset.add (stmt))
> > > > > >        {
> > > > > > @@ -1135,12 +1148,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
> > > > > > void
> > > > > > *data_)
> > > > > >    	 here rather than in cp_genericize to avoid problems with the
> > > > > > invisible
> > > > > >    	 reference transition.  */
> > > > > >        case INIT_EXPR:
> > > > > > -      if (data->genericize)
> > > > > > +      if (data->flags & ff_genericize)
> > > > > >    	cp_genericize_init_expr (stmt_p);
> > > > > >          break;
> > > > > >          case TARGET_EXPR:
> > > > > > -      if (data->genericize)
> > > > > > +      if (data->flags & ff_genericize)
> > > > > >    	cp_genericize_target_expr (stmt_p);
> > > > > >            /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR;
> > > > > > in
> > > > > > @@ -1173,7 +1186,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > > > > > *data_)
> > > > > >    void
> > > > > >    cp_fold_function (tree fndecl)
> > > > > >    {
> > > > > > -  cp_fold_data data (/*genericize*/true);
> > > > > > +  cp_fold_data data (ff_genericize | ff_mce_false);
> > > > > 
> > > > > Here would be a good place for a comment about passing mce_false because
> > > > > all
> > > > > manifestly-constant-evaluated expressions will have been
> > > > > constant-evaluated
> > > > > already if possible.
> > > > > 
> > > > > >      cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
> > > > > >    }
> > > > > >    @@ -2391,7 +2404,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
> > > > > >    {
> > > > > >      while (true)
> > > > > >        {
> > > > > > -      x = cp_fold (x);
> > > > > > +      x = cp_fold (x, ff_none);
> > > > > >          if (rval)
> > > > > >    	x = mark_rvalue_use (x);
> > > > > >          if (rval && DECL_P (x)
> > > > > > @@ -2450,7 +2463,7 @@ cp_fully_fold_init (tree x)
> > > > > >      if (processing_template_decl)
> > > > > >        return x;
> > > > > >      x = cp_fully_fold (x);
> > > > > > -  cp_fold_data data (/*genericize*/false);
> > > > > > +  cp_fold_data data (ff_mce_false);
> > > > > >      cp_walk_tree (&x, cp_fold_r, &data, NULL);
> > > > > >      return x;
> > > > > >    }
> > > > > > @@ -2485,7 +2498,7 @@ clear_fold_cache (void)
> > > > > >        Function returns X or its folded variant.  */
> > > > > >      static tree
> > > > > > -cp_fold (tree x)
> > > > > > +cp_fold (tree x, fold_flags_t flags)
> > > > > >    {
> > > > > >      tree op0, op1, op2, op3;
> > > > > >      tree org_x = x, r = NULL_TREE;
> > > > > > @@ -2506,8 +2519,11 @@ cp_fold (tree x)
> > > > > >      if (fold_cache == NULL)
> > > > > >        fold_cache = hash_map<tree, tree>::create_ggc (101);
> > > > > >    -  if (tree *cached = fold_cache->get (x))
> > > > > > -    return *cached;
> > > > > > +  bool cache_p = (flags == ff_none);
> > > > > > +
> > > > > > +  if (cache_p)
> > > > > > +    if (tree *cached = fold_cache->get (x))
> > > > > > +      return *cached;
> > > > > >        uid_sensitive_constexpr_evaluation_checker c;
> > > > > >    @@ -2542,7 +2558,7 @@ cp_fold (tree x)
> > > > > >    	     Don't create a new tree if op0 != TREE_OPERAND (x, 0),
> > > > > > the
> > > > > >    	     folding of the operand should be in the caches and if in
> > > > > > cp_fold_r
> > > > > >    	     it will modify it in place.  */
> > > > > > -	  op0 = cp_fold (TREE_OPERAND (x, 0));
> > > > > > +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> > > > > >    	  if (op0 == error_mark_node)
> > > > > >    	    x = error_mark_node;
> > > > > >    	  break;
> > > > > > @@ -2587,7 +2603,7 @@ cp_fold (tree x)
> > > > > >    	{
> > > > > >    	  tree p = maybe_undo_parenthesized_ref (x);
> > > > > >    	  if (p != x)
> > > > > > -	    return cp_fold (p);
> > > > > > +	    return cp_fold (p, flags);
> > > > > >    	}
> > > > > >          goto unary;
> > > > > >    @@ -2779,8 +2795,8 @@ cp_fold (tree x)
> > > > > >        case COND_EXPR:
> > > > > >          loc = EXPR_LOCATION (x);
> > > > > >          op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> > > > > > -      op1 = cp_fold (TREE_OPERAND (x, 1));
> > > > > > -      op2 = cp_fold (TREE_OPERAND (x, 2));
> > > > > > +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> > > > > > +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> > > > > >            if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
> > > > > >    	{
> > > > > > @@ -2870,7 +2886,7 @@ cp_fold (tree x)
> > > > > >    	      {
> > > > > >    		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
> > > > > >    		  r = build_nop (TREE_TYPE (x), r);
> > > > > > -		x = cp_fold (r);
> > > > > > +		x = cp_fold (r, flags);
> > > > > >    		break;
> > > > > >    	      }
> > > > > >    	  }
> > > > > > @@ -2890,8 +2906,12 @@ cp_fold (tree x)
> > > > > >    	  {
> > > > > >    	    switch (DECL_FE_FUNCTION_CODE (callee))
> > > > > >    	      {
> > > > > > -		/* Defer folding __builtin_is_constant_evaluated.  */
> > > > > >    	      case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
> > > > > > +		/* Defer folding __builtin_is_constant_evaluated unless
> > > > > > +		   we can assume this isn't a manifestly constant-evaluated
> > > > > 
> > > > > s/can assume/know/
> > > > > 
> > > > > OK with those comment changes.
> > > > 
> > > > Thanks a lot.  Unfortunately I think the patch has a significant problem
> > > > that only just occurred to me -- disabling the cp_fold cache when the
> > > > flag ff_mce_false is set effectively makes cp_fold_function and
> > > > cp_fully_fold_init quadratic in the size of the expression (since
> > > > cp_fold_r calls cp_fold on each subtree, and cp_fold when the cache is
> > > > disabled will end up fully walking each subtree).  Note that the reason
> > > > we must disable the cache is because cp_fold with ff_mce_false might
> > > > give a different folded result than without that flag if the expression
> > > > contains a suitable CALL_EXPR subexpression.
> > > 
> > > Good point.
> > > 
> > > > One approach to fix this complexity issue would be to parameterize the
> > > > cache according to the flags that were passed to cp_fold, which would
> > > > allow us to keep the cache enabled when ff_mce_false is set.  A downside
> > > > to this approach is that the size of the cp_fold cache would essentially
> > > > double since for each tree we'd now have two cache entries, one for
> > > > flags=ff_none and another for flags=ff_mce_false.
> > > 
> > > We could also clear the cache before cp_fold_function since the two folds
> > > shouldn't overlap (much).
> > 
> > Makes sense, but IIUC we'd also have to clear it before (and after)
> > cp_fully_fold_init too, which unlike cp_fold_function may get called
> > in the middle of a function body.
> 
> Ah sorry, I think I misunderstood your idea.  Clearing the cache between
> cp_fold_function would definitely help with controlling the size of the
> cache, and indeed there shouldn't be much overlap because there isn't
> much sharing of expression trees across function bodies.
> 
> However, I was curious about how big the fold_cache gets in practice,
> and it turns out it doesn't get very big at all since we regularly clear
> the fold_cache via clear_cv_and_fold_caches anyway.  According to my
> experiments it doesn't get larger than about ~10k elements.  So a
> doubling of that is pretty much insignificant.
> 
> So ISTM parameterizing the cache is the way to go.  How does the
> following look?
> 
> -- >8 --
> 
> Subject: [PATCH] c++: speculative constexpr and is_constant_evaluated
>  [PR108243]
> 
> 	PR c++/108243
> 	PR c++/97553
> 
> gcc/cp/ChangeLog:
> 
> 	* cp-gimplify.cc (enum fold_flags): Define.
> 	(fold_flags_t): Declare.
> 	(cp_fold_data::genericize): Replace this data member with ...
> 	(cp_fold_data::fold_flags): ... this.
> 	(cp_fold_r): Adjust use of cp_fold_data and calls to cp_fold.
> 	(cp_fold_function): Likewise.
> 	(cp_fold_maybe_rvalue): Likewise.
> 	(cp_fully_fold_init): Likewise.
> 	(fold_cache): Replace with ...
> 	(fold_caches): ... this 2-element array of caches.
> 	(get_fold_cache): Define.
> 	(clear_fold_cache): Adjust.
> 	(cp_fold): Add flags parameter.  Call get_fold_cache.
> 	<case CALL_EXPR>: If ff_mce_false is set, fold
> 	__builtin_is_constant_evaluated to false and pass mce_false to
> 	maybe_constant_value.
> 
> gcc/testsuite/ChangeLog:
> 
> 	* g++.dg/opt/is_constant_evaluated1.C: New test.
> 	* g++.dg/opt/is_constant_evaluated2.C: New test.
> ---
>  gcc/cp/cp-gimplify.cc                         | 103 +++++++++++++-----
>  .../g++.dg/opt/is_constant_evaluated1.C       |  15 +++
>  .../g++.dg/opt/is_constant_evaluated2.C       |  32 ++++++
>  3 files changed, 120 insertions(+), 30 deletions(-)
>  create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
>  create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> 
> diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> index 9929d29981a..01e624bc9de 100644
> --- a/gcc/cp/cp-gimplify.cc
> +++ b/gcc/cp/cp-gimplify.cc
> @@ -43,12 +43,26 @@ along with GCC; see the file COPYING3.  If not see
>  #include "omp-general.h"
>  #include "opts.h"
>  
> +/* Flags for cp_fold and cp_fold_r.  */
> +
> +enum fold_flags {
> +  ff_none = 0,
> +  /* Whether we're being called from cp_fold_function.  */
> +  ff_genericize = 1 << 0,
> +  /* Whether we're folding a point where we know we're
> +     definitely not in a manifestly constant-evaluated
> +     context.  */
> +  ff_mce_false = 1 << 1,
> +};
> +
> +using fold_flags_t = int;
> +
>  /* Forward declarations.  */
>  
>  static tree cp_genericize_r (tree *, int *, void *);
>  static tree cp_fold_r (tree *, int *, void *);
>  static void cp_genericize_tree (tree*, bool);
> -static tree cp_fold (tree);
> +static tree cp_fold (tree, fold_flags_t);
>  
>  /* Genericize a TRY_BLOCK.  */
>  
> @@ -1012,9 +1026,8 @@ struct cp_genericize_data
>  struct cp_fold_data
>  {
>    hash_set<tree> pset;
> -  bool genericize; // called from cp_fold_function?
> -
> -  cp_fold_data (bool g): genericize (g) {}
> +  fold_flags_t flags;
> +  cp_fold_data (fold_flags_t flags): flags (flags) {}
>  };
>  
>  static tree
> @@ -1055,7 +1068,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>        break;
>      }
>  
> -  *stmt_p = stmt = cp_fold (*stmt_p);
> +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
>  
>    if (data->pset.add (stmt))
>      {
> @@ -1135,12 +1148,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>  	 here rather than in cp_genericize to avoid problems with the invisible
>  	 reference transition.  */
>      case INIT_EXPR:
> -      if (data->genericize)
> +      if (data->flags & ff_genericize)
>  	cp_genericize_init_expr (stmt_p);
>        break;
>  
>      case TARGET_EXPR:
> -      if (data->genericize)
> +      if (data->flags & ff_genericize)
>  	cp_genericize_target_expr (stmt_p);
>  
>        /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
> @@ -1173,7 +1186,10 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>  void
>  cp_fold_function (tree fndecl)
>  {
> -  cp_fold_data data (/*genericize*/true);
> +  /* By now all manifestly-constant-evaluated expressions will have
> +     been constant-evaluated already if possible, so we can safely
> +     pass ff_mce_false.  */
> +  cp_fold_data data (ff_genericize | ff_mce_false);
>    cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
>  }
>  
> @@ -2391,7 +2407,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
>  {
>    while (true)
>      {
> -      x = cp_fold (x);
> +      x = cp_fold (x, ff_none);
>        if (rval)
>  	x = mark_rvalue_use (x);
>        if (rval && DECL_P (x)
> @@ -2450,7 +2466,7 @@ cp_fully_fold_init (tree x)
>    if (processing_template_decl)
>      return x;
>    x = cp_fully_fold (x);
> -  cp_fold_data data (/*genericize*/false);
> +  cp_fold_data data (ff_mce_false);
>    cp_walk_tree (&x, cp_fold_r, &data, NULL);
>    return x;
>  }
> @@ -2466,15 +2482,29 @@ c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
>    return cp_fold_maybe_rvalue (x, !lval);
>  }
>  
> -static GTY((deletable)) hash_map<tree, tree> *fold_cache;
> +static GTY((deletable)) hash_map<tree, tree> *fold_caches[2];
> +
> +/* Subroutine of cp_fold.  Returns which fold cache to use according
> +   to the given flags.  We need multiple caches since the result of
> +   folding may depend on which flags are used.  */
> +
> +static hash_map<tree, tree> *&
> +get_fold_cache (fold_flags_t flags)
> +{
> +  if (flags & ff_mce_false)
> +    return fold_caches[1];
> +  else
> +    return fold_caches[0];
> +}
>  
>  /* Dispose of the whole FOLD_CACHE.  */
>  
>  void
>  clear_fold_cache (void)
>  {
> -  if (fold_cache != NULL)
> -    fold_cache->empty ();
> +  for (auto& fold_cache : fold_caches)
> +    if (fold_cache != NULL)
> +      fold_cache->empty ();
>  }
>  
>  /*  This function tries to fold an expression X.
> @@ -2485,7 +2515,7 @@ clear_fold_cache (void)
>      Function returns X or its folded variant.  */
>  
>  static tree
> -cp_fold (tree x)
> +cp_fold (tree x, fold_flags_t flags)
>  {
>    tree op0, op1, op2, op3;
>    tree org_x = x, r = NULL_TREE;
> @@ -2503,6 +2533,7 @@ cp_fold (tree x)
>    if (DECL_P (x) || CONSTANT_CLASS_P (x))
>      return x;
>  
> +  auto& fold_cache = get_fold_cache (flags);
>    if (fold_cache == NULL)
>      fold_cache = hash_map<tree, tree>::create_ggc (101);
>  
> @@ -2542,7 +2573,7 @@ cp_fold (tree x)
>  	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
>  	     folding of the operand should be in the caches and if in cp_fold_r
>  	     it will modify it in place.  */
> -	  op0 = cp_fold (TREE_OPERAND (x, 0));
> +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
>  	  if (op0 == error_mark_node)
>  	    x = error_mark_node;
>  	  break;
> @@ -2587,7 +2618,7 @@ cp_fold (tree x)
>  	{
>  	  tree p = maybe_undo_parenthesized_ref (x);
>  	  if (p != x)
> -	    return cp_fold (p);
> +	    return cp_fold (p, flags);
>  	}
>        goto unary;
>  
> @@ -2779,8 +2810,8 @@ cp_fold (tree x)
>      case COND_EXPR:
>        loc = EXPR_LOCATION (x);
>        op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));

Whoops, we should also propagate the flags through calls to
cp_fold_rvalue and cp_fold_maybe_rvalue from cp_fold.  The below
version fixes this by adding static overloads of these functions that
additionally take and propagate a fold_flags parameter.

-- >8 --

Subject: [PATCH] c++: speculative constexpr and is_constant_evaluated
 [PR108243]

	PR c++/108243
	PR c++/97553

gcc/cp/ChangeLog:

	* cp-gimplify.cc (enum fold_flags): Define.
	(fold_flags_t): Declare.
	(cp_fold_data::genericize): Replace this data member with ...
	(cp_fold_data::fold_flags): ... this.
	(cp_fold_r): Adjust use of cp_fold_data and calls to cp_fold.
	(cp_fold_function): Likewise.
	(cp_fold_maybe_rvalue): Add an static overload that takes
	and propagates a fold_flags_t parameter, and define the existing
	public overload in terms of it.
	(cp_fold_rvalue): Likewise.
	(cp_fully_fold_init): Adjust use of cp_fold_data.
	(fold_cache): Replace with ...
	(fold_caches): ... this 2-element array of caches.
	(get_fold_cache): Define.
	(clear_fold_cache): Adjust.
	(cp_fold): Add fold_flags_t parameter.  Call get_fold_cache.
	Pass flags to cp_fold, cp_fold_rvalue and cp_fold_maybe_rvalue.
	<case CALL_EXPR>: If ff_mce_false is set, fold
	__builtin_is_constant_evaluated to false and pass mce_false to
	maybe_constant_value.

gcc/testsuite/ChangeLog:

	* g++.dg/opt/is_constant_evaluated1.C: New test.
	* g++.dg/opt/is_constant_evaluated2.C: New test.
---
 gcc/cp/cp-gimplify.cc                         | 139 ++++++++++++------
 .../g++.dg/opt/is_constant_evaluated1.C       |  15 ++
 .../g++.dg/opt/is_constant_evaluated2.C       |  32 ++++
 3 files changed, 144 insertions(+), 42 deletions(-)
 create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
 create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C

diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
index 9929d29981a..edece6b7a8a 100644
--- a/gcc/cp/cp-gimplify.cc
+++ b/gcc/cp/cp-gimplify.cc
@@ -43,12 +43,26 @@ along with GCC; see the file COPYING3.  If not see
 #include "omp-general.h"
 #include "opts.h"
 
+/* Flags for cp_fold and cp_fold_r.  */
+
+enum fold_flags {
+  ff_none = 0,
+  /* Whether we're being called from cp_fold_function.  */
+  ff_genericize = 1 << 0,
+  /* Whether we're folding a point where we know we're
+     definitely not in a manifestly constant-evaluated
+     context.  */
+  ff_mce_false = 1 << 1,
+};
+
+using fold_flags_t = int;
+
 /* Forward declarations.  */
 
 static tree cp_genericize_r (tree *, int *, void *);
 static tree cp_fold_r (tree *, int *, void *);
 static void cp_genericize_tree (tree*, bool);
-static tree cp_fold (tree);
+static tree cp_fold (tree, fold_flags_t);
 
 /* Genericize a TRY_BLOCK.  */
 
@@ -1012,9 +1026,8 @@ struct cp_genericize_data
 struct cp_fold_data
 {
   hash_set<tree> pset;
-  bool genericize; // called from cp_fold_function?
-
-  cp_fold_data (bool g): genericize (g) {}
+  fold_flags_t flags;
+  cp_fold_data (fold_flags_t flags): flags (flags) {}
 };
 
 static tree
@@ -1055,7 +1068,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
       break;
     }
 
-  *stmt_p = stmt = cp_fold (*stmt_p);
+  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
 
   if (data->pset.add (stmt))
     {
@@ -1135,12 +1148,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
 	 here rather than in cp_genericize to avoid problems with the invisible
 	 reference transition.  */
     case INIT_EXPR:
-      if (data->genericize)
+      if (data->flags & ff_genericize)
 	cp_genericize_init_expr (stmt_p);
       break;
 
     case TARGET_EXPR:
-      if (data->genericize)
+      if (data->flags & ff_genericize)
 	cp_genericize_target_expr (stmt_p);
 
       /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
@@ -1173,7 +1186,10 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
 void
 cp_fold_function (tree fndecl)
 {
-  cp_fold_data data (/*genericize*/true);
+  /* By now all manifestly-constant-evaluated expressions will have
+     been constant-evaluated already if possible, so we can safely
+     pass ff_mce_false.  */
+  cp_fold_data data (ff_genericize | ff_mce_false);
   cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
 }
 
@@ -2386,12 +2402,12 @@ cxx_omp_disregard_value_expr (tree decl, bool shared)
 
 /* Fold expression X which is used as an rvalue if RVAL is true.  */
 
-tree
-cp_fold_maybe_rvalue (tree x, bool rval)
+static tree
+cp_fold_maybe_rvalue (tree x, bool rval, fold_flags_t flags)
 {
   while (true)
     {
-      x = cp_fold (x);
+      x = cp_fold (x, flags);
       if (rval)
 	x = mark_rvalue_use (x);
       if (rval && DECL_P (x)
@@ -2409,12 +2425,24 @@ cp_fold_maybe_rvalue (tree x, bool rval)
   return x;
 }
 
+tree
+cp_fold_maybe_rvalue (tree x, bool rval)
+{
+  return cp_fold_maybe_rvalue (x, rval, ff_none);
+}
+
 /* Fold expression X which is used as an rvalue.  */
 
+static tree
+cp_fold_rvalue (tree x, fold_flags_t flags)
+{
+  return cp_fold_maybe_rvalue (x, true, flags);
+}
+
 tree
 cp_fold_rvalue (tree x)
 {
-  return cp_fold_maybe_rvalue (x, true);
+  return cp_fold_rvalue (x, ff_none);
 }
 
 /* Perform folding on expression X.  */
@@ -2450,7 +2478,7 @@ cp_fully_fold_init (tree x)
   if (processing_template_decl)
     return x;
   x = cp_fully_fold (x);
-  cp_fold_data data (/*genericize*/false);
+  cp_fold_data data (ff_mce_false);
   cp_walk_tree (&x, cp_fold_r, &data, NULL);
   return x;
 }
@@ -2466,15 +2494,29 @@ c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
   return cp_fold_maybe_rvalue (x, !lval);
 }
 
-static GTY((deletable)) hash_map<tree, tree> *fold_cache;
+static GTY((deletable)) hash_map<tree, tree> *fold_caches[2];
+
+/* Subroutine of cp_fold.  Returns which fold cache to use according
+   to the given flags.  We need multiple caches since the result of
+   folding may depend on which flags are used.  */
+
+static hash_map<tree, tree> *&
+get_fold_cache (fold_flags_t flags)
+{
+  if (flags & ff_mce_false)
+    return fold_caches[1];
+  else
+    return fold_caches[0];
+}
 
 /* Dispose of the whole FOLD_CACHE.  */
 
 void
 clear_fold_cache (void)
 {
-  if (fold_cache != NULL)
-    fold_cache->empty ();
+  for (auto& fold_cache : fold_caches)
+    if (fold_cache != NULL)
+      fold_cache->empty ();
 }
 
 /*  This function tries to fold an expression X.
@@ -2485,7 +2527,7 @@ clear_fold_cache (void)
     Function returns X or its folded variant.  */
 
 static tree
-cp_fold (tree x)
+cp_fold (tree x, fold_flags_t flags)
 {
   tree op0, op1, op2, op3;
   tree org_x = x, r = NULL_TREE;
@@ -2503,6 +2545,7 @@ cp_fold (tree x)
   if (DECL_P (x) || CONSTANT_CLASS_P (x))
     return x;
 
+  auto& fold_cache = get_fold_cache (flags);
   if (fold_cache == NULL)
     fold_cache = hash_map<tree, tree>::create_ggc (101);
 
@@ -2517,7 +2560,7 @@ cp_fold (tree x)
     case CLEANUP_POINT_EXPR:
       /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
 	 effects.  */
-      r = cp_fold_rvalue (TREE_OPERAND (x, 0));
+      r = cp_fold_rvalue (TREE_OPERAND (x, 0), flags);
       if (!TREE_SIDE_EFFECTS (r))
 	x = r;
       break;
@@ -2542,14 +2585,14 @@ cp_fold (tree x)
 	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
 	     folding of the operand should be in the caches and if in cp_fold_r
 	     it will modify it in place.  */
-	  op0 = cp_fold (TREE_OPERAND (x, 0));
+	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
 	  if (op0 == error_mark_node)
 	    x = error_mark_node;
 	  break;
 	}
 
       loc = EXPR_LOCATION (x);
-      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
+      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
 
       if (code == CONVERT_EXPR
 	  && SCALAR_TYPE_P (TREE_TYPE (x))
@@ -2577,7 +2620,7 @@ cp_fold (tree x)
       break;
 
     case EXCESS_PRECISION_EXPR:
-      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
+      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
       x = fold_convert_loc (EXPR_LOCATION (x), TREE_TYPE (x), op0);
       break;
 
@@ -2587,13 +2630,13 @@ cp_fold (tree x)
 	{
 	  tree p = maybe_undo_parenthesized_ref (x);
 	  if (p != x)
-	    return cp_fold (p);
+	    return cp_fold (p, flags);
 	}
       goto unary;
 
     case ADDR_EXPR:
       loc = EXPR_LOCATION (x);
-      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
+      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false, flags);
 
       /* Cope with user tricks that amount to offsetof.  */
       if (op0 != error_mark_node
@@ -2630,7 +2673,7 @@ cp_fold (tree x)
     unary:
 
       loc = EXPR_LOCATION (x);
-      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
+      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
 
     finish_unary:
       if (op0 != TREE_OPERAND (x, 0))
@@ -2657,7 +2700,7 @@ cp_fold (tree x)
       break;
 
     case UNARY_PLUS_EXPR:
-      op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
+      op0 = cp_fold_rvalue (TREE_OPERAND (x, 0), flags);
       if (op0 == error_mark_node)
 	x = error_mark_node;
       else
@@ -2711,8 +2754,8 @@ cp_fold (tree x)
     case RANGE_EXPR: case COMPLEX_EXPR:
 
       loc = EXPR_LOCATION (x);
-      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
-      op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
+      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
+      op1 = cp_fold_rvalue (TREE_OPERAND (x, 1), flags);
 
       /* decltype(nullptr) has only one value, so optimize away all comparisons
 	 with that type right away, keeping them in the IL causes troubles for
@@ -2778,9 +2821,9 @@ cp_fold (tree x)
     case VEC_COND_EXPR:
     case COND_EXPR:
       loc = EXPR_LOCATION (x);
-      op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
-      op1 = cp_fold (TREE_OPERAND (x, 1));
-      op2 = cp_fold (TREE_OPERAND (x, 2));
+      op0 = cp_fold_rvalue (TREE_OPERAND (x, 0), flags);
+      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
+      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
 
       if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
 	{
@@ -2870,7 +2913,7 @@ cp_fold (tree x)
 	      {
 		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
 		  r = build_nop (TREE_TYPE (x), r);
-		x = cp_fold (r);
+		x = cp_fold (r, flags);
 		break;
 	      }
 	  }
@@ -2890,8 +2933,12 @@ cp_fold (tree x)
 	  {
 	    switch (DECL_FE_FUNCTION_CODE (callee))
 	      {
-		/* Defer folding __builtin_is_constant_evaluated.  */
 	      case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
+		/* Defer folding __builtin_is_constant_evaluated unless
+		   we know this isn't a manifestly constant-evaluated
+		   context.  */
+		if (flags & ff_mce_false)
+		  x = boolean_false_node;
 		break;
 	      case CP_BUILT_IN_SOURCE_LOCATION:
 		x = fold_builtin_source_location (x);
@@ -2924,7 +2971,7 @@ cp_fold (tree x)
 	int m = call_expr_nargs (x);
 	for (int i = 0; i < m; i++)
 	  {
-	    r = cp_fold (CALL_EXPR_ARG (x, i));
+	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
 	    if (r != CALL_EXPR_ARG (x, i))
 	      {
 		if (r == error_mark_node)
@@ -2947,7 +2994,7 @@ cp_fold (tree x)
 
 	if (TREE_CODE (r) != CALL_EXPR)
 	  {
-	    x = cp_fold (r);
+	    x = cp_fold (r, flags);
 	    break;
 	  }
 
@@ -2960,7 +3007,15 @@ cp_fold (tree x)
 	   constant, but the call followed by an INDIRECT_REF is.  */
 	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
 	    && !flag_no_inline)
-	  r = maybe_constant_value (x);
+	  {
+	    mce_value manifestly_const_eval = mce_unknown;
+	    if (flags & ff_mce_false)
+	      /* Allow folding __builtin_is_constant_evaluated to false during
+		 constexpr evaluation of this call.  */
+	      manifestly_const_eval = mce_false;
+	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
+				      manifestly_const_eval);
+	  }
 	optimize = sv;
 
         if (TREE_CODE (r) != CALL_EXPR)
@@ -2987,7 +3042,7 @@ cp_fold (tree x)
 	vec<constructor_elt, va_gc> *nelts = NULL;
 	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
 	  {
-	    tree op = cp_fold (p->value);
+	    tree op = cp_fold (p->value, flags);
 	    if (op != p->value)
 	      {
 		if (op == error_mark_node)
@@ -3018,7 +3073,7 @@ cp_fold (tree x)
 
 	for (int i = 0; i < n; i++)
 	  {
-	    tree op = cp_fold (TREE_VEC_ELT (x, i));
+	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
 	    if (op != TREE_VEC_ELT (x, i))
 	      {
 		if (!changed)
@@ -3035,10 +3090,10 @@ cp_fold (tree x)
     case ARRAY_RANGE_REF:
 
       loc = EXPR_LOCATION (x);
-      op0 = cp_fold (TREE_OPERAND (x, 0));
-      op1 = cp_fold (TREE_OPERAND (x, 1));
-      op2 = cp_fold (TREE_OPERAND (x, 2));
-      op3 = cp_fold (TREE_OPERAND (x, 3));
+      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
+      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
+      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
+      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
 
       if (op0 != TREE_OPERAND (x, 0)
 	  || op1 != TREE_OPERAND (x, 1)
@@ -3066,7 +3121,7 @@ cp_fold (tree x)
       /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
 	 folding, evaluates to an invariant.  In that case no need to wrap
 	 this folded tree with a SAVE_EXPR.  */
-      r = cp_fold (TREE_OPERAND (x, 0));
+      r = cp_fold (TREE_OPERAND (x, 0), flags);
       if (tree_invariant_p (r))
 	x = r;
       break;
diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
new file mode 100644
index 00000000000..983410b9e83
--- /dev/null
+++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
@@ -0,0 +1,15 @@
+// PR c++/108243
+// { dg-do compile { target c++11 } }
+// { dg-additional-options "-O -fdump-tree-original" }
+
+struct A {
+  constexpr A(int n, int m) : n(n), m(m) { }
+  int n, m;
+};
+
+A* f(int n) {
+  static A a = {n, __builtin_is_constant_evaluated()};
+  return &a;
+}
+
+// { dg-final { scan-tree-dump-not "is_constant_evaluated" "original" } }
diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
new file mode 100644
index 00000000000..ed964e20a7a
--- /dev/null
+++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
@@ -0,0 +1,32 @@
+// PR c++/97553
+// { dg-do compile { target c++11 } }
+// { dg-additional-options "-O -fdump-tree-original" }
+
+constexpr int foo() {
+  return __builtin_is_constant_evaluated() + 1;
+}
+
+#if __cpp_if_consteval
+constexpr int bar() {
+  if consteval {
+    return 5;
+  } else {
+    return 4;
+  }
+}
+#endif
+
+int p, q;
+
+int main() {
+  p = foo();
+#if __cpp_if_consteval
+  q = bar();
+#endif
+}
+
+// { dg-final { scan-tree-dump "p = 1" "original" } }
+// { dg-final { scan-tree-dump-not "= foo" "original" } }
+
+// { dg-final { scan-tree-dump "q = 4" "original" { target c++23 } } }
+// { dg-final { scan-tree-dump-not "= bar" "original" { target c++23 } } }
-- 
2.39.1.433.g23c56f7bd5



> -      op1 = cp_fold (TREE_OPERAND (x, 1));
> -      op2 = cp_fold (TREE_OPERAND (x, 2));
> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
>  
>        if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
>  	{
> @@ -2870,7 +2901,7 @@ cp_fold (tree x)
>  	      {
>  		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
>  		  r = build_nop (TREE_TYPE (x), r);
> -		x = cp_fold (r);
> +		x = cp_fold (r, flags);
>  		break;
>  	      }
>  	  }
> @@ -2890,8 +2921,12 @@ cp_fold (tree x)
>  	  {
>  	    switch (DECL_FE_FUNCTION_CODE (callee))
>  	      {
> -		/* Defer folding __builtin_is_constant_evaluated.  */
>  	      case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
> +		/* Defer folding __builtin_is_constant_evaluated unless
> +		   we know this isn't a manifestly constant-evaluated
> +		   context.  */
> +		if (flags & ff_mce_false)
> +		  x = boolean_false_node;
>  		break;
>  	      case CP_BUILT_IN_SOURCE_LOCATION:
>  		x = fold_builtin_source_location (x);
> @@ -2924,7 +2959,7 @@ cp_fold (tree x)
>  	int m = call_expr_nargs (x);
>  	for (int i = 0; i < m; i++)
>  	  {
> -	    r = cp_fold (CALL_EXPR_ARG (x, i));
> +	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
>  	    if (r != CALL_EXPR_ARG (x, i))
>  	      {
>  		if (r == error_mark_node)
> @@ -2947,7 +2982,7 @@ cp_fold (tree x)
>  
>  	if (TREE_CODE (r) != CALL_EXPR)
>  	  {
> -	    x = cp_fold (r);
> +	    x = cp_fold (r, flags);
>  	    break;
>  	  }
>  
> @@ -2960,7 +2995,15 @@ cp_fold (tree x)
>  	   constant, but the call followed by an INDIRECT_REF is.  */
>  	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
>  	    && !flag_no_inline)
> -	  r = maybe_constant_value (x);
> +	  {
> +	    mce_value manifestly_const_eval = mce_unknown;
> +	    if (flags & ff_mce_false)
> +	      /* Allow folding __builtin_is_constant_evaluated to false during
> +		 constexpr evaluation of this call.  */
> +	      manifestly_const_eval = mce_false;
> +	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
> +				      manifestly_const_eval);
> +	  }
>  	optimize = sv;
>  
>          if (TREE_CODE (r) != CALL_EXPR)
> @@ -2987,7 +3030,7 @@ cp_fold (tree x)
>  	vec<constructor_elt, va_gc> *nelts = NULL;
>  	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
>  	  {
> -	    tree op = cp_fold (p->value);
> +	    tree op = cp_fold (p->value, flags);
>  	    if (op != p->value)
>  	      {
>  		if (op == error_mark_node)
> @@ -3018,7 +3061,7 @@ cp_fold (tree x)
>  
>  	for (int i = 0; i < n; i++)
>  	  {
> -	    tree op = cp_fold (TREE_VEC_ELT (x, i));
> +	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
>  	    if (op != TREE_VEC_ELT (x, i))
>  	      {
>  		if (!changed)
> @@ -3035,10 +3078,10 @@ cp_fold (tree x)
>      case ARRAY_RANGE_REF:
>  
>        loc = EXPR_LOCATION (x);
> -      op0 = cp_fold (TREE_OPERAND (x, 0));
> -      op1 = cp_fold (TREE_OPERAND (x, 1));
> -      op2 = cp_fold (TREE_OPERAND (x, 2));
> -      op3 = cp_fold (TREE_OPERAND (x, 3));
> +      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> +      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
>  
>        if (op0 != TREE_OPERAND (x, 0)
>  	  || op1 != TREE_OPERAND (x, 1)
> @@ -3066,7 +3109,7 @@ cp_fold (tree x)
>        /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
>  	 folding, evaluates to an invariant.  In that case no need to wrap
>  	 this folded tree with a SAVE_EXPR.  */
> -      r = cp_fold (TREE_OPERAND (x, 0));
> +      r = cp_fold (TREE_OPERAND (x, 0), flags);
>        if (tree_invariant_p (r))
>  	x = r;
>        break;
> diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> new file mode 100644
> index 00000000000..983410b9e83
> --- /dev/null
> +++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> @@ -0,0 +1,15 @@
> +// PR c++/108243
> +// { dg-do compile { target c++11 } }
> +// { dg-additional-options "-O -fdump-tree-original" }
> +
> +struct A {
> +  constexpr A(int n, int m) : n(n), m(m) { }
> +  int n, m;
> +};
> +
> +A* f(int n) {
> +  static A a = {n, __builtin_is_constant_evaluated()};
> +  return &a;
> +}
> +
> +// { dg-final { scan-tree-dump-not "is_constant_evaluated" "original" } }
> diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> new file mode 100644
> index 00000000000..ed964e20a7a
> --- /dev/null
> +++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> @@ -0,0 +1,32 @@
> +// PR c++/97553
> +// { dg-do compile { target c++11 } }
> +// { dg-additional-options "-O -fdump-tree-original" }
> +
> +constexpr int foo() {
> +  return __builtin_is_constant_evaluated() + 1;
> +}
> +
> +#if __cpp_if_consteval
> +constexpr int bar() {
> +  if consteval {
> +    return 5;
> +  } else {
> +    return 4;
> +  }
> +}
> +#endif
> +
> +int p, q;
> +
> +int main() {
> +  p = foo();
> +#if __cpp_if_consteval
> +  q = bar();
> +#endif
> +}
> +
> +// { dg-final { scan-tree-dump "p = 1" "original" } }
> +// { dg-final { scan-tree-dump-not "= foo" "original" } }
> +
> +// { dg-final { scan-tree-dump "q = 4" "original" { target c++23 } } }
> +// { dg-final { scan-tree-dump-not "= bar" "original" { target c++23 } } }
> -- 
> 2.39.1.433.g23c56f7bd5
> 
> 
> > 
> > > 
> > > > Another approach would be to split out the trial constexpr evaluation
> > > > part of cp_fold's CALL_EXPR handling, parameterize that, and call it
> > > > directly from cp_fold_r.  With this approach we wouldn't perform as much
> > > > folding, e.g.
> > > > 
> > > >    int n = 41 + !std::is_constant_evaluated();
> > > > 
> > > > would get folded to 1 + 41 rather than 42.  But I suspect this would
> > > > give us 95% of the reapable benefits of the above approach.
> > > > 
> > > > I think I'm leaning towards this second approach, which the below patch
> > > > implements instead.  What do you think?  Bootstrapped and regtested on
> > > > x86_64-pc-linux-gnu.
> > > 
> > > That sounds reasonable, but...
> > > 
> > > > -- >8 --
> > > > 
> > > > Subject: [PATCH] c++: speculative constexpr and is_constant_evaluated
> > > >   [PR108243]
> > > > 
> > > > This PR illustrates that __builtin_is_constant_evaluated currently acts
> > > > as an optimization barrier for our speculative constexpr evaluation,
> > > > since we don't want to prematurely fold the builtin to false before the
> > > > expression in question undergoes constant evaluation as in a manifestly
> > > > constant-evaluated context (in which case the builtin must instead be
> > > > folded to true).
> > > > 
> > > > This patch fixes this by permitting __builtin_is_constant_evaluated
> > > > to get folded to false from cp_fold_r, where we know we're done with
> > > > proper constant evaluation (of manifestly constant-evaluated contexts).
> > > > 
> > > > 	PR c++/108243
> > > > 	PR c++/97553
> > > > 
> > > > gcc/cp/ChangeLog:
> > > > 
> > > > 	* cp-gimplify.cc
> > > > 	(cp_fold_r): Remove redundant *stmt_p assignments.  After
> > > > 	calling cp_fold, call maybe_fold_constexpr_call with mce_false.
> > > > 	(cp_fold) <case CALL_EXPR>: Split out trial constexpr evaluation
> > > > 	into ...
> > > > 	(maybe_fold_constexpr_call): ... here.
> > > > 
> > > > gcc/testsuite/ChangeLog:
> > > > 
> > > > 	* g++.dg/opt/is_constant_evaluated1.C: New test.
> > > > 	* g++.dg/opt/is_constant_evaluated2.C: New test.
> > > > ---
> > > >   gcc/cp/cp-gimplify.cc                         | 55 +++++++++++++++----
> > > >   .../g++.dg/opt/is_constant_evaluated1.C       | 20 +++++++
> > > >   .../g++.dg/opt/is_constant_evaluated2.C       | 32 +++++++++++
> > > >   3 files changed, 95 insertions(+), 12 deletions(-)
> > > >   create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> > > >   create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> > > > 
> > > > diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> > > > index 9929d29981a..dca55056b2c 100644
> > > > --- a/gcc/cp/cp-gimplify.cc
> > > > +++ b/gcc/cp/cp-gimplify.cc
> > > > @@ -49,6 +49,7 @@ static tree cp_genericize_r (tree *, int *, void *);
> > > >   static tree cp_fold_r (tree *, int *, void *);
> > > >   static void cp_genericize_tree (tree*, bool);
> > > >   static tree cp_fold (tree);
> > > > +static tree maybe_fold_constexpr_call (tree, mce_value);
> > > >     /* Genericize a TRY_BLOCK.  */
> > > >   @@ -1034,7 +1035,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > > > *data_)
> > > >   	    error_at (PTRMEM_CST_LOCATION (stmt),
> > > >   		      "taking address of an immediate function %qD",
> > > >   		      PTRMEM_CST_MEMBER (stmt));
> > > > -	  stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
> > > > +	  stmt = build_zero_cst (TREE_TYPE (stmt));
> > > >   	  break;
> > > >   	}
> > > >         break;
> > > > @@ -1046,7 +1047,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > > > *data_)
> > > >   	  error_at (EXPR_LOCATION (stmt),
> > > >   		    "taking address of an immediate function %qD",
> > > >   		    TREE_OPERAND (stmt, 0));
> > > > -	  stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
> > > > +	  stmt = build_zero_cst (TREE_TYPE (stmt));
> > > >   	  break;
> > > >   	}
> > > >         break;
> > > > @@ -1055,7 +1056,17 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
> > > > *data_)
> > > >         break;
> > > >       }
> > > >   -  *stmt_p = stmt = cp_fold (*stmt_p);
> > > > +  stmt = cp_fold (stmt);
> > > > +
> > > > +  if (TREE_CODE (stmt) == CALL_EXPR)
> > > > +    /* Since cp_fold_r is called (from cp_genericize, cp_fold_function
> > > > +       and cp_fully_fold_init) only after the overall expression has been
> > > > +       considered for constant-evaluation, we can by now safely fold any
> > > > +       remaining __builtin_is_constant_evaluated calls to false, so try
> > > > +       constexpr expansion with mce_false.  */
> > > > +    stmt = maybe_fold_constexpr_call (stmt, mce_false);
> > > > +
> > > > +  *stmt_p = stmt;
> > > >       if (data->pset.add (stmt))
> > > >       {
> > > > @@ -2952,15 +2963,10 @@ cp_fold (tree x)
> > > >   	  }
> > > >     	optimize = nw;
> > > > -
> > > > -	/* Invoke maybe_constant_value for functions declared
> > > > -	   constexpr and not called with AGGR_INIT_EXPRs.
> > > > -	   TODO:
> > > > -	   Do constexpr expansion of expressions where the call itself is not
> > > > -	   constant, but the call followed by an INDIRECT_REF is.  */
> > > > -	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
> > > > -	    && !flag_no_inline)
> > > > -	  r = maybe_constant_value (x);
> > > > +	/* Pass mce_unknown to defer folding __builtin_is_constant_evaluated
> > > > +	   since we don't know if we're in a manifestly constant-evaluated
> > > > +	   context that hasn't yet been constant-evaluated.  */
> > > > +	r = maybe_fold_constexpr_call (x, mce_unknown);
> > > 
> > > It seems unfortunate to try to fold both here and in cp_fold_r.
> > 
> > Yes, though I'm afraid some duplication of work is pretty much
> > unavoidable.  Even if in cp_fold_r we did something like
> > 
> >   if (TREE_CODE (stmt) == CALL_EXPR)
> >     /* cp_fold_call_expr is the entire CALL_EXPR case of cp_fold.  */
> >     stmt = cp_fold_call_expr (stmt, mce_false);
> >   else
> >     stmt = cp_fold (stmt);
> > 
> > instead of
> > 
> >   stmt = cp_fold (stmt);
> >   if (TREE_CODE (stmt) == CALL_EXPR)
> >     stmt = maybe_fold_constexpr_call (stmt, mce_false);
> > 
> > we would still end up doing cp_fold on the CALL_EXPR if it's a
> > subexpression of some larger expression (since cp_fold is recursive).
> > 
> > And even if we went with the original approach of parameterizing cp_fold
> > according to manifestly_const_eval totally, we would still end up trying
> > to fold some constexpr calls twice, first with flags=ff_none (during
> > some on-the-spot folding) and again with flags=ff_mce_false (during
> > cp_fold_function), I think.
> > 
> > > 
> > > Does this patch still fold __builtin_is_constant_evaluated() even though it no
> > > longer touches the cp_fold builtin handling?
> > 
> > Indeed it doesn't, instead __builtin_is_constant_evaluated() will
> > continue to get folded during gimplification.  I thought folding it
> > might not be benefical with this approach, but on second thought we
> > should do it for consistency at least.  The following incremental
> > patch seems to work:
> > 
> > diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> > index dca55056b2c..250147bde0f 100644
> > --- a/gcc/cp/cp-gimplify.cc
> > +++ b/gcc/cp/cp-gimplify.cc
> > @@ -3124,6 +3124,14 @@ maybe_fold_constexpr_call (tree x, mce_value manifestly_const_eval)
> >        if (TREE_CODE (r) != CALL_EXPR)
> >  	return r;
> >      }
> > +  if (fndecl_built_in_p (callee, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
> > +			 BUILT_IN_FRONTEND))
> > +    {
> > +      if (manifestly_const_eval == mce_true)
> > +	return boolean_true_node;
> > +      else if (manifestly_const_eval == mce_false)
> > +	return boolean_false_node;
> > +    }
> >    return x;
> >  }
> >  
> > 
> > > 
> > > >   	optimize = sv;
> > > >             if (TREE_CODE (r) != CALL_EXPR)
> > > > @@ -3096,6 +3102,31 @@ cp_fold (tree x)
> > > >     return x;
> > > >   }
> > > >   +/* If the CALL_EXPR X calls a constexpr function, try expanding it via
> > > > +   constexpr evaluation.  Returns the expanded result or X if constexpr
> > > > +   evaluation wasn't possible.
> > > > +
> > > > +   TODO: Do constexpr expansion of expressions where the call itself
> > > > +   is not constant, but the call followed by an INDIRECT_REF is.  */
> > > > +
> > > > +static tree
> > > > +maybe_fold_constexpr_call (tree x, mce_value manifestly_const_eval)
> > > > +{
> > > > +  if (flag_no_inline)
> > > > +    return x;
> > > > +  tree callee = get_callee_fndecl (x);
> > > > +  if (!callee)
> > > > +    return x;
> > > > +  if (DECL_DECLARED_CONSTEXPR_P (callee))
> > > > +    {
> > > > +      tree r = maybe_constant_value (x, /*decl=*/NULL_TREE,
> > > > +				     manifestly_const_eval);
> > > > +      if (TREE_CODE (r) != CALL_EXPR)
> > > > +	return r;
> > > > +    }
> > > > +  return x;
> > > > +}
> > > > +
> > > >   /* Look up "hot", "cold", "likely" or "unlikely" in attribute list LIST.
> > > > */
> > > >     tree
> > > > diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> > > > b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> > > > new file mode 100644
> > > > index 00000000000..2123f20e3e5
> > > > --- /dev/null
> > > > +++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> > > > @@ -0,0 +1,20 @@
> > > > +// PR c++/108243
> > > > +// { dg-do compile { target c++11 } }
> > > > +// { dg-additional-options "-O -fdump-tree-original" }
> > > > +
> > > > +struct A {
> > > > +  constexpr A(int n, int m) : n(n), m(m) { }
> > > > +  int n, m;
> > > > +};
> > > > +
> > > > +constexpr int foo(int n) {
> > > > +  return n + !__builtin_is_constant_evaluated();
> > > > +}
> > > > +
> > > > +A* f(int n) {
> > > > +  static A a = {n, foo(41)};
> > > > +  return &a;
> > > > +}
> > > > +
> > > > +// { dg-final { scan-tree-dump "42" "original" } }
> > > > +// { dg-final { scan-tree-dump-not "foo \\(41\\)" "original" } }
> > > > diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> > > > b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> > > > new file mode 100644
> > > > index 00000000000..ed964e20a7a
> > > > --- /dev/null
> > > > +++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> > > > @@ -0,0 +1,32 @@
> > > > +// PR c++/97553
> > > > +// { dg-do compile { target c++11 } }
> > > > +// { dg-additional-options "-O -fdump-tree-original" }
> > > > +
> > > > +constexpr int foo() {
> > > > +  return __builtin_is_constant_evaluated() + 1;
> > > > +}
> > > > +
> > > > +#if __cpp_if_consteval
> > > > +constexpr int bar() {
> > > > +  if consteval {
> > > > +    return 5;
> > > > +  } else {
> > > > +    return 4;
> > > > +  }
> > > > +}
> > > > +#endif
> > > > +
> > > > +int p, q;
> > > > +
> > > > +int main() {
> > > > +  p = foo();
> > > > +#if __cpp_if_consteval
> > > > +  q = bar();
> > > > +#endif
> > > > +}
> > > > +
> > > > +// { dg-final { scan-tree-dump "p = 1" "original" } }
> > > > +// { dg-final { scan-tree-dump-not "= foo" "original" } }
> > > > +
> > > > +// { dg-final { scan-tree-dump "q = 4" "original" { target c++23 } } }
> > > > +// { dg-final { scan-tree-dump-not "= bar" "original" { target c++23 } } }
> > > 
> > > 
> > 
> 


^ permalink raw reply	[flat|nested] 15+ messages in thread

* Re: [PATCH 2/2] c++: speculative constexpr and is_constant_evaluated [PR108243]
  2023-02-10 16:51                 ` Patrick Palka
@ 2023-02-14 23:02                   ` Jason Merrill
  0 siblings, 0 replies; 15+ messages in thread
From: Jason Merrill @ 2023-02-14 23:02 UTC (permalink / raw)
  To: Patrick Palka; +Cc: gcc-patches

On 2/10/23 08:51, Patrick Palka wrote:
> On Fri, 10 Feb 2023, Patrick Palka wrote:
> 
>> On Thu, 9 Feb 2023, Patrick Palka wrote:
>>
>>> On Thu, 9 Feb 2023, Jason Merrill wrote:
>>>
>>>> On 2/9/23 09:36, Patrick Palka wrote:
>>>>> On Sun, 5 Feb 2023, Jason Merrill wrote:
>>>>>
>>>>>> On 2/3/23 15:51, Patrick Palka wrote:
>>>>>>> On Mon, 30 Jan 2023, Jason Merrill wrote:
>>>>>>>
>>>>>>>> On 1/27/23 17:02, Patrick Palka wrote:
>>>>>>>>> This PR illustrates that __builtin_is_constant_evaluated currently
>>>>>>>>> acts
>>>>>>>>> as an optimization barrier for our speculative constexpr evaluation,
>>>>>>>>> since we don't want to prematurely fold the builtin to false if the
>>>>>>>>> expression in question would be later manifestly constant evaluated
>>>>>>>>> (in
>>>>>>>>> which case it must be folded to true).
>>>>>>>>>
>>>>>>>>> This patch fixes this by permitting __builtin_is_constant_evaluated
>>>>>>>>> to get folded as false during cp_fold_function, since at that point
>>>>>>>>> we're sure we're doing manifestly constant evaluation.  To that end
>>>>>>>>> we add a flags parameter to cp_fold that controls what mce_value the
>>>>>>>>> CALL_EXPR case passes to maybe_constant_value.
>>>>>>>>>
>>>>>>>>> bootstrapped and rgetsted no x86_64-pc-linux-gnu, does this look OK
>>>>>>>>> for
>>>>>>>>> trunk?
>>>>>>>>>
>>>>>>>>> 	PR c++/108243
>>>>>>>>>
>>>>>>>>> gcc/cp/ChangeLog:
>>>>>>>>>
>>>>>>>>> 	* cp-gimplify.cc (enum fold_flags): Define.
>>>>>>>>> 	(cp_fold_data::genericize): Replace this data member with ...
>>>>>>>>> 	(cp_fold_data::fold_flags): ... this.
>>>>>>>>> 	(cp_fold_r): Adjust cp_fold_data use and cp_fold_calls.
>>>>>>>>> 	(cp_fold_function): Likewise.
>>>>>>>>> 	(cp_fold_maybe_rvalue): Likewise.
>>>>>>>>> 	(cp_fully_fold_init): Likewise.
>>>>>>>>> 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
>>>>>>>>> 	isn't empty.
>>>>>>>>> 	<case CALL_EXPR>: Pass mce_false to maybe_constant_value
>>>>>>>>> 	if if ff_genericize is set.
>>>>>>>>>
>>>>>>>>> gcc/testsuite/ChangeLog:
>>>>>>>>>
>>>>>>>>> 	* g++.dg/opt/pr108243.C: New test.
>>>>>>>>> ---
>>>>>>>>>      gcc/cp/cp-gimplify.cc               | 76
>>>>>>>>> ++++++++++++++++++-----------
>>>>>>>>>      gcc/testsuite/g++.dg/opt/pr108243.C | 29 +++++++++++
>>>>>>>>>      2 files changed, 76 insertions(+), 29 deletions(-)
>>>>>>>>>      create mode 100644 gcc/testsuite/g++.dg/opt/pr108243.C
>>>>>>>>>
>>>>>>>>> diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
>>>>>>>>> index a35cedd05cc..d023a63768f 100644
>>>>>>>>> --- a/gcc/cp/cp-gimplify.cc
>>>>>>>>> +++ b/gcc/cp/cp-gimplify.cc
>>>>>>>>> @@ -43,12 +43,20 @@ along with GCC; see the file COPYING3.  If not
>>>>>>>>> see
>>>>>>>>>      #include "omp-general.h"
>>>>>>>>>      #include "opts.h"
>>>>>>>>>      +/* Flags for cp_fold and cp_fold_r.  */
>>>>>>>>> +
>>>>>>>>> +enum fold_flags {
>>>>>>>>> +  ff_none = 0,
>>>>>>>>> +  /* Whether we're being called from cp_fold_function.  */
>>>>>>>>> +  ff_genericize = 1 << 0,
>>>>>>>>> +};
>>>>>>>>> +
>>>>>>>>>      /* Forward declarations.  */
>>>>>>>>>        static tree cp_genericize_r (tree *, int *, void *);
>>>>>>>>>      static tree cp_fold_r (tree *, int *, void *);
>>>>>>>>>      static void cp_genericize_tree (tree*, bool);
>>>>>>>>> -static tree cp_fold (tree);
>>>>>>>>> +static tree cp_fold (tree, fold_flags);
>>>>>>>>>        /* Genericize a TRY_BLOCK.  */
>>>>>>>>>      @@ -996,9 +1004,8 @@ struct cp_genericize_data
>>>>>>>>>      struct cp_fold_data
>>>>>>>>>      {
>>>>>>>>>        hash_set<tree> pset;
>>>>>>>>> -  bool genericize; // called from cp_fold_function?
>>>>>>>>> -
>>>>>>>>> -  cp_fold_data (bool g): genericize (g) {}
>>>>>>>>> +  fold_flags flags;
>>>>>>>>> +  cp_fold_data (fold_flags flags): flags (flags) {}
>>>>>>>>>      };
>>>>>>>>>        static tree
>>>>>>>>> @@ -1039,7 +1046,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
>>>>>>>>> void
>>>>>>>>> *data_)
>>>>>>>>>            break;
>>>>>>>>>          }
>>>>>>>>>      -  *stmt_p = stmt = cp_fold (*stmt_p);
>>>>>>>>> +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
>>>>>>>>>          if (data->pset.add (stmt))
>>>>>>>>>          {
>>>>>>>>> @@ -1119,12 +1126,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
>>>>>>>>> void
>>>>>>>>> *data_)
>>>>>>>>>      	 here rather than in cp_genericize to avoid problems with the
>>>>>>>>> invisible
>>>>>>>>>      	 reference transition.  */
>>>>>>>>>          case INIT_EXPR:
>>>>>>>>> -      if (data->genericize)
>>>>>>>>> +      if (data->flags & ff_genericize)
>>>>>>>>>      	cp_genericize_init_expr (stmt_p);
>>>>>>>>>            break;
>>>>>>>>>            case TARGET_EXPR:
>>>>>>>>> -      if (data->genericize)
>>>>>>>>> +      if (data->flags & ff_genericize)
>>>>>>>>>      	cp_genericize_target_expr (stmt_p);
>>>>>>>>>              /* Folding might replace e.g. a COND_EXPR with a
>>>>>>>>> TARGET_EXPR;
>>>>>>>>> in
>>>>>>>>> @@ -1157,7 +1164,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
>>>>>>>>> void
>>>>>>>>> *data_)
>>>>>>>>>      void
>>>>>>>>>      cp_fold_function (tree fndecl)
>>>>>>>>>      {
>>>>>>>>> -  cp_fold_data data (/*genericize*/true);
>>>>>>>>> +  cp_fold_data data (ff_genericize);
>>>>>>>>>        cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data,
>>>>>>>>> NULL);
>>>>>>>>>      }
>>>>>>>>>      @@ -2375,7 +2382,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
>>>>>>>>>      {
>>>>>>>>>        while (true)
>>>>>>>>>          {
>>>>>>>>> -      x = cp_fold (x);
>>>>>>>>> +      x = cp_fold (x, ff_none);
>>>>>>>>>            if (rval)
>>>>>>>>>      	x = mark_rvalue_use (x);
>>>>>>>>>            if (rval && DECL_P (x)
>>>>>>>>> @@ -2434,7 +2441,7 @@ cp_fully_fold_init (tree x)
>>>>>>>>>        if (processing_template_decl)
>>>>>>>>>          return x;
>>>>>>>>>        x = cp_fully_fold (x);
>>>>>>>>> -  cp_fold_data data (/*genericize*/false);
>>>>>>>>> +  cp_fold_data data (ff_none);
>>>>>>>>>        cp_walk_tree (&x, cp_fold_r, &data, NULL);
>>>>>>>>>        return x;
>>>>>>>>>      }
>>>>>>>>> @@ -2469,7 +2476,7 @@ clear_fold_cache (void)
>>>>>>>>>          Function returns X or its folded variant.  */
>>>>>>>>>        static tree
>>>>>>>>> -cp_fold (tree x)
>>>>>>>>> +cp_fold (tree x, fold_flags flags)
>>>>>>>>>      {
>>>>>>>>>        tree op0, op1, op2, op3;
>>>>>>>>>        tree org_x = x, r = NULL_TREE;
>>>>>>>>> @@ -2490,8 +2497,11 @@ cp_fold (tree x)
>>>>>>>>>        if (fold_cache == NULL)
>>>>>>>>>          fold_cache = hash_map<tree, tree>::create_ggc (101);
>>>>>>>>>      -  if (tree *cached = fold_cache->get (x))
>>>>>>>>> -    return *cached;
>>>>>>>>> +  bool cache_p = (flags == ff_none);
>>>>>>>>> +
>>>>>>>>> +  if (cache_p)
>>>>>>>>> +    if (tree *cached = fold_cache->get (x))
>>>>>>>>> +      return *cached;
>>>>>>>>>          uid_sensitive_constexpr_evaluation_checker c;
>>>>>>>>>      @@ -2526,7 +2536,7 @@ cp_fold (tree x)
>>>>>>>>>      	     Don't create a new tree if op0 != TREE_OPERAND (x, 0),
>>>>>>>>> the
>>>>>>>>>      	     folding of the operand should be in the caches and if in
>>>>>>>>> cp_fold_r
>>>>>>>>>      	     it will modify it in place.  */
>>>>>>>>> -	  op0 = cp_fold (TREE_OPERAND (x, 0));
>>>>>>>>> +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
>>>>>>>>>      	  if (op0 == error_mark_node)
>>>>>>>>>      	    x = error_mark_node;
>>>>>>>>>      	  break;
>>>>>>>>> @@ -2571,7 +2581,7 @@ cp_fold (tree x)
>>>>>>>>>      	{
>>>>>>>>>      	  tree p = maybe_undo_parenthesized_ref (x);
>>>>>>>>>      	  if (p != x)
>>>>>>>>> -	    return cp_fold (p);
>>>>>>>>> +	    return cp_fold (p, flags);
>>>>>>>>>      	}
>>>>>>>>>            goto unary;
>>>>>>>>>      @@ -2763,8 +2773,8 @@ cp_fold (tree x)
>>>>>>>>>          case COND_EXPR:
>>>>>>>>>            loc = EXPR_LOCATION (x);
>>>>>>>>>            op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
>>>>>>>>> -      op1 = cp_fold (TREE_OPERAND (x, 1));
>>>>>>>>> -      op2 = cp_fold (TREE_OPERAND (x, 2));
>>>>>>>>> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
>>>>>>>>> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
>>>>>>>>>              if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
>>>>>>>>>      	{
>>>>>>>>> @@ -2854,7 +2864,7 @@ cp_fold (tree x)
>>>>>>>>>      	      {
>>>>>>>>>      		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
>>>>>>>>>      		  r = build_nop (TREE_TYPE (x), r);
>>>>>>>>> -		x = cp_fold (r);
>>>>>>>>> +		x = cp_fold (r, flags);
>>>>>>>>>      		break;
>>>>>>>>>      	      }
>>>>>>>>>      	  }
>>>>>>>>> @@ -2908,7 +2918,7 @@ cp_fold (tree x)
>>>>>>>>>      	int m = call_expr_nargs (x);
>>>>>>>>>      	for (int i = 0; i < m; i++)
>>>>>>>>>      	  {
>>>>>>>>> -	    r = cp_fold (CALL_EXPR_ARG (x, i));
>>>>>>>>> +	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
>>>>>>>>>      	    if (r != CALL_EXPR_ARG (x, i))
>>>>>>>>>      	      {
>>>>>>>>>      		if (r == error_mark_node)
>>>>>>>>> @@ -2931,7 +2941,7 @@ cp_fold (tree x)
>>>>>>>>>        	if (TREE_CODE (r) != CALL_EXPR)
>>>>>>>>>      	  {
>>>>>>>>> -	    x = cp_fold (r);
>>>>>>>>> +	    x = cp_fold (r, flags);
>>>>>>>>>      	    break;
>>>>>>>>>      	  }
>>>>>>>>>      @@ -2944,7 +2954,15 @@ cp_fold (tree x)
>>>>>>>>>      	   constant, but the call followed by an INDIRECT_REF is.  */
>>>>>>>>>      	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
>>>>>>>>>      	    && !flag_no_inline)
>>>>>>>>> -	  r = maybe_constant_value (x);
>>>>>>>>> +	  {
>>>>>>>>> +	    mce_value manifestly_const_eval = mce_unknown;
>>>>>>>>> +	    if (flags & ff_genericize)
>>>>>>>>> +	      /* At genericization time it's safe to fold
>>>>>>>>> +		 __builtin_is_constant_evaluated to false.  */
>>>>>>>>> +	      manifestly_const_eval = mce_false;
>>>>>>>>> +	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
>>>>>>>>> +				      manifestly_const_eval);
>>>>>>>>> +	  }
>>>>>>>>>      	optimize = sv;
>>>>>>>>>                if (TREE_CODE (r) != CALL_EXPR)
>>>>>>>>> @@ -2971,7 +2989,7 @@ cp_fold (tree x)
>>>>>>>>>      	vec<constructor_elt, va_gc> *nelts = NULL;
>>>>>>>>>      	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
>>>>>>>>>      	  {
>>>>>>>>> -	    tree op = cp_fold (p->value);
>>>>>>>>> +	    tree op = cp_fold (p->value, flags);
>>>>>>>>>      	    if (op != p->value)
>>>>>>>>>      	      {
>>>>>>>>>      		if (op == error_mark_node)
>>>>>>>>> @@ -3002,7 +3020,7 @@ cp_fold (tree x)
>>>>>>>>>        	for (int i = 0; i < n; i++)
>>>>>>>>>      	  {
>>>>>>>>> -	    tree op = cp_fold (TREE_VEC_ELT (x, i));
>>>>>>>>> +	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
>>>>>>>>>      	    if (op != TREE_VEC_ELT (x, i))
>>>>>>>>>      	      {
>>>>>>>>>      		if (!changed)
>>>>>>>>> @@ -3019,10 +3037,10 @@ cp_fold (tree x)
>>>>>>>>>          case ARRAY_RANGE_REF:
>>>>>>>>>              loc = EXPR_LOCATION (x);
>>>>>>>>> -      op0 = cp_fold (TREE_OPERAND (x, 0));
>>>>>>>>> -      op1 = cp_fold (TREE_OPERAND (x, 1));
>>>>>>>>> -      op2 = cp_fold (TREE_OPERAND (x, 2));
>>>>>>>>> -      op3 = cp_fold (TREE_OPERAND (x, 3));
>>>>>>>>> +      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
>>>>>>>>> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
>>>>>>>>> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
>>>>>>>>> +      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
>>>>>>>>>              if (op0 != TREE_OPERAND (x, 0)
>>>>>>>>>      	  || op1 != TREE_OPERAND (x, 1)
>>>>>>>>> @@ -3050,7 +3068,7 @@ cp_fold (tree x)
>>>>>>>>>            /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j),
>>>>>>>>> which,
>>>>>>>>> after
>>>>>>>>>      	 folding, evaluates to an invariant.  In that case no need to
>>>>>>>>> wrap
>>>>>>>>>      	 this folded tree with a SAVE_EXPR.  */
>>>>>>>>> -      r = cp_fold (TREE_OPERAND (x, 0));
>>>>>>>>> +      r = cp_fold (TREE_OPERAND (x, 0), flags);
>>>>>>>>>            if (tree_invariant_p (r))
>>>>>>>>>      	x = r;
>>>>>>>>>            break;
>>>>>>>>> @@ -3069,7 +3087,7 @@ cp_fold (tree x)
>>>>>>>>>            copy_warning (x, org_x);
>>>>>>>>>          }
>>>>>>>>>      -  if (!c.evaluation_restricted_p ())
>>>>>>>>> +  if (cache_p && !c.evaluation_restricted_p ())
>>>>>>>>>          {
>>>>>>>>>            fold_cache->put (org_x, x);
>>>>>>>>>            /* Prevent that we try to fold an already folded result
>>>>>>>>> again.
>>>>>>>>> */
>>>>>>>>> diff --git a/gcc/testsuite/g++.dg/opt/pr108243.C
>>>>>>>>> b/gcc/testsuite/g++.dg/opt/pr108243.C
>>>>>>>>> new file mode 100644
>>>>>>>>> index 00000000000..4c45dbba13c
>>>>>>>>> --- /dev/null
>>>>>>>>> +++ b/gcc/testsuite/g++.dg/opt/pr108243.C
>>>>>>>>> @@ -0,0 +1,29 @@
>>>>>>>>> +// PR c++/108243
>>>>>>>>> +// { dg-do compile { target c++11 } }
>>>>>>>>> +// { dg-additional-options "-O -fdump-tree-original" }
>>>>>>>>> +
>>>>>>>>> +constexpr int foo() {
>>>>>>>>> +  return __builtin_is_constant_evaluated() + 1;
>>>>>>>>> +}
>>>>>>>>> +
>>>>>>>>> +#if __cpp_if_consteval
>>>>>>>>> +constexpr int bar() {
>>>>>>>>> +  if consteval {
>>>>>>>>> +    return 5;
>>>>>>>>> +  } else {
>>>>>>>>> +    return 4;
>>>>>>>>> +  }
>>>>>>>>> +}
>>>>>>>>> +#endif
>>>>>>>>> +
>>>>>>>>> +int p, q;
>>>>>>>>> +
>>>>>>>>> +int main() {
>>>>>>>>> +  p = foo();
>>>>>>>>> +#if __cpp_if_consteval
>>>>>>>>> +  q = bar();
>>>>>>>>> +#endif
>>>>>>>>> +}
>>>>>>>>> +
>>>>>>>>> +// { dg-final { scan-tree-dump-not "= foo" "original" } }
>>>>>>>>> +// { dg-final { scan-tree-dump-not "= bar" "original" } }
>>>>>>>>
>>>>>>>> Let's also test a static initializer that can't be fully
>>>>>>>> constant-evaluated.
>>>>>>>
>>>>>>> D'oh, doing so revealed that cp_fold_function doesn't reach static
>>>>>>> initializers; that's taken care of by cp_fully_fold_init.  So it seems
>>>>>>> we need to make cp_fold when called from the latter entry point to also
>>>>>>> assume m_c_e is false.  We can't re-use ff_genericize here because that
>>>>>>> flag has additional effects in cp_fold_r, so it seems we need another
>>>>>>> flag that that only affects the manifestly constant-eval stuff; I called
>>>>>>> it ff_mce_false.  How does the following look?
>>>>>>>
>>>>>>> -- >8 --
>>>>>>>
>>>>>>> Subject: [PATCH 2/2] c++: speculative constexpr and
>>>>>>> is_constant_evaluated
>>>>>>>     [PR108243]
>>>>>>>
>>>>>>> This PR illustrates that __builtin_is_constant_evaluated currently acts
>>>>>>> as an optimization barrier for our speculative constexpr evaluation,
>>>>>>> since we don't want to prematurely fold the builtin to false if the
>>>>>>> expression in question would be later manifestly constant evaluated (in
>>>>>>> which case it must be folded to true).
>>>>>>>
>>>>>>> This patch fixes this by permitting __builtin_is_constant_evaluated
>>>>>>> to get folded as false during cp_fold_function and cp_fully_fold_init,
>>>>>>> since at these points we're sure we're done with manifestly constant
>>>>>>> evaluation.  To that end we add a flags parameter to cp_fold that
>>>>>>> controls whether we pass mce_false or mce_unknown to
>>>>>>> maybe_constant_value
>>>>>>> when folding a CALL_EXPR.
>>>>>>>
>>>>>>> 	PR c++/108243
>>>>>>> 	PR c++/97553
>>>>>>>
>>>>>>> gcc/cp/ChangeLog:
>>>>>>>
>>>>>>> 	* cp-gimplify.cc (enum fold_flags): Define.
>>>>>>> 	(cp_fold_data::genericize): Replace this data member with ...
>>>>>>> 	(cp_fold_data::fold_flags): ... this.
>>>>>>> 	(cp_fold_r): Adjust use of cp_fold_data and calls to cp_fold.
>>>>>>> 	(cp_fold_function): Likewise.
>>>>>>> 	(cp_fold_maybe_rvalue): Likewise.
>>>>>>> 	(cp_fully_fold_init): Likewise.
>>>>>>> 	(cp_fold): Add fold_flags parameter.  Don't cache if flags
>>>>>>> 	isn't empty.
>>>>>>> 	<case CALL_EXPR>: If ff_genericize is set, fold
>>>>>>> 	__builtin_is_constant_evaluated to false and pass mce_false to
>>>>>>> 	maybe_constant_value.
>>>>>>>
>>>>>>> gcc/testsuite/ChangeLog:
>>>>>>>
>>>>>>> 	* g++.dg/opt/is_constant_evaluated1.C: New test.
>>>>>>> 	* g++.dg/opt/is_constant_evaluated2.C: New test.
>>>>>>> ---
>>>>>>>     gcc/cp/cp-gimplify.cc                         | 88
>>>>>>> ++++++++++++-------
>>>>>>>     .../g++.dg/opt/is_constant_evaluated1.C       | 14 +++
>>>>>>>     .../g++.dg/opt/is_constant_evaluated2.C       | 32 +++++++
>>>>>>>     3 files changed, 104 insertions(+), 30 deletions(-)
>>>>>>>     create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
>>>>>>>     create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
>>>>>>>
>>>>>>> diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
>>>>>>> index 9929d29981a..590ed787997 100644
>>>>>>> --- a/gcc/cp/cp-gimplify.cc
>>>>>>> +++ b/gcc/cp/cp-gimplify.cc
>>>>>>> @@ -43,12 +43,26 @@ along with GCC; see the file COPYING3.  If not see
>>>>>>>     #include "omp-general.h"
>>>>>>>     #include "opts.h"
>>>>>>>     +/* Flags for cp_fold and cp_fold_r.  */
>>>>>>> +
>>>>>>> +enum fold_flags {
>>>>>>> +  ff_none = 0,
>>>>>>> +  /* Whether we're being called from cp_fold_function.  */
>>>>>>> +  ff_genericize = 1 << 0,
>>>>>>> +  /* Whether we're folding late enough that we could assume
>>>>>>> +     we're definitely not in a manifestly constant-evaluated
>>>>>>> +     context.  */
>>>>>>
>>>>>> It's not necessarily a matter of late enough; we could fold sooner and
>>>>>> still
>>>>>> know that, as in cp_fully_fold_init.  We could do the same at other
>>>>>> full-expression points, but we don't because we want to delay folding as
>>>>>> much
>>>>>> as possible.  So let's say "folding at a point where we know we're..."
>>>>>>
>>>>>>> +  ff_mce_false = 1 << 1,
>>>>>>> +};
>>>>>>> +
>>>>>>> +using fold_flags_t = int;
>>>>>>> +
>>>>>>>     /* Forward declarations.  */
>>>>>>>       static tree cp_genericize_r (tree *, int *, void *);
>>>>>>>     static tree cp_fold_r (tree *, int *, void *);
>>>>>>>     static void cp_genericize_tree (tree*, bool);
>>>>>>> -static tree cp_fold (tree);
>>>>>>> +static tree cp_fold (tree, fold_flags_t);
>>>>>>>       /* Genericize a TRY_BLOCK.  */
>>>>>>>     @@ -1012,9 +1026,8 @@ struct cp_genericize_data
>>>>>>>     struct cp_fold_data
>>>>>>>     {
>>>>>>>       hash_set<tree> pset;
>>>>>>> -  bool genericize; // called from cp_fold_function?
>>>>>>> -
>>>>>>> -  cp_fold_data (bool g): genericize (g) {}
>>>>>>> +  fold_flags_t flags;
>>>>>>> +  cp_fold_data (fold_flags_t flags): flags (flags) {}
>>>>>>>     };
>>>>>>>       static tree
>>>>>>> @@ -1055,7 +1068,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
>>>>>>> *data_)
>>>>>>>           break;
>>>>>>>         }
>>>>>>>     -  *stmt_p = stmt = cp_fold (*stmt_p);
>>>>>>> +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
>>>>>>>         if (data->pset.add (stmt))
>>>>>>>         {
>>>>>>> @@ -1135,12 +1148,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees,
>>>>>>> void
>>>>>>> *data_)
>>>>>>>     	 here rather than in cp_genericize to avoid problems with the
>>>>>>> invisible
>>>>>>>     	 reference transition.  */
>>>>>>>         case INIT_EXPR:
>>>>>>> -      if (data->genericize)
>>>>>>> +      if (data->flags & ff_genericize)
>>>>>>>     	cp_genericize_init_expr (stmt_p);
>>>>>>>           break;
>>>>>>>           case TARGET_EXPR:
>>>>>>> -      if (data->genericize)
>>>>>>> +      if (data->flags & ff_genericize)
>>>>>>>     	cp_genericize_target_expr (stmt_p);
>>>>>>>             /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR;
>>>>>>> in
>>>>>>> @@ -1173,7 +1186,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void
>>>>>>> *data_)
>>>>>>>     void
>>>>>>>     cp_fold_function (tree fndecl)
>>>>>>>     {
>>>>>>> -  cp_fold_data data (/*genericize*/true);
>>>>>>> +  cp_fold_data data (ff_genericize | ff_mce_false);
>>>>>>
>>>>>> Here would be a good place for a comment about passing mce_false because
>>>>>> all
>>>>>> manifestly-constant-evaluated expressions will have been
>>>>>> constant-evaluated
>>>>>> already if possible.
>>>>>>
>>>>>>>       cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
>>>>>>>     }
>>>>>>>     @@ -2391,7 +2404,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
>>>>>>>     {
>>>>>>>       while (true)
>>>>>>>         {
>>>>>>> -      x = cp_fold (x);
>>>>>>> +      x = cp_fold (x, ff_none);
>>>>>>>           if (rval)
>>>>>>>     	x = mark_rvalue_use (x);
>>>>>>>           if (rval && DECL_P (x)
>>>>>>> @@ -2450,7 +2463,7 @@ cp_fully_fold_init (tree x)
>>>>>>>       if (processing_template_decl)
>>>>>>>         return x;
>>>>>>>       x = cp_fully_fold (x);
>>>>>>> -  cp_fold_data data (/*genericize*/false);
>>>>>>> +  cp_fold_data data (ff_mce_false);
>>>>>>>       cp_walk_tree (&x, cp_fold_r, &data, NULL);
>>>>>>>       return x;
>>>>>>>     }
>>>>>>> @@ -2485,7 +2498,7 @@ clear_fold_cache (void)
>>>>>>>         Function returns X or its folded variant.  */
>>>>>>>       static tree
>>>>>>> -cp_fold (tree x)
>>>>>>> +cp_fold (tree x, fold_flags_t flags)
>>>>>>>     {
>>>>>>>       tree op0, op1, op2, op3;
>>>>>>>       tree org_x = x, r = NULL_TREE;
>>>>>>> @@ -2506,8 +2519,11 @@ cp_fold (tree x)
>>>>>>>       if (fold_cache == NULL)
>>>>>>>         fold_cache = hash_map<tree, tree>::create_ggc (101);
>>>>>>>     -  if (tree *cached = fold_cache->get (x))
>>>>>>> -    return *cached;
>>>>>>> +  bool cache_p = (flags == ff_none);
>>>>>>> +
>>>>>>> +  if (cache_p)
>>>>>>> +    if (tree *cached = fold_cache->get (x))
>>>>>>> +      return *cached;
>>>>>>>         uid_sensitive_constexpr_evaluation_checker c;
>>>>>>>     @@ -2542,7 +2558,7 @@ cp_fold (tree x)
>>>>>>>     	     Don't create a new tree if op0 != TREE_OPERAND (x, 0),
>>>>>>> the
>>>>>>>     	     folding of the operand should be in the caches and if in
>>>>>>> cp_fold_r
>>>>>>>     	     it will modify it in place.  */
>>>>>>> -	  op0 = cp_fold (TREE_OPERAND (x, 0));
>>>>>>> +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
>>>>>>>     	  if (op0 == error_mark_node)
>>>>>>>     	    x = error_mark_node;
>>>>>>>     	  break;
>>>>>>> @@ -2587,7 +2603,7 @@ cp_fold (tree x)
>>>>>>>     	{
>>>>>>>     	  tree p = maybe_undo_parenthesized_ref (x);
>>>>>>>     	  if (p != x)
>>>>>>> -	    return cp_fold (p);
>>>>>>> +	    return cp_fold (p, flags);
>>>>>>>     	}
>>>>>>>           goto unary;
>>>>>>>     @@ -2779,8 +2795,8 @@ cp_fold (tree x)
>>>>>>>         case COND_EXPR:
>>>>>>>           loc = EXPR_LOCATION (x);
>>>>>>>           op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
>>>>>>> -      op1 = cp_fold (TREE_OPERAND (x, 1));
>>>>>>> -      op2 = cp_fold (TREE_OPERAND (x, 2));
>>>>>>> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
>>>>>>> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
>>>>>>>             if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
>>>>>>>     	{
>>>>>>> @@ -2870,7 +2886,7 @@ cp_fold (tree x)
>>>>>>>     	      {
>>>>>>>     		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
>>>>>>>     		  r = build_nop (TREE_TYPE (x), r);
>>>>>>> -		x = cp_fold (r);
>>>>>>> +		x = cp_fold (r, flags);
>>>>>>>     		break;
>>>>>>>     	      }
>>>>>>>     	  }
>>>>>>> @@ -2890,8 +2906,12 @@ cp_fold (tree x)
>>>>>>>     	  {
>>>>>>>     	    switch (DECL_FE_FUNCTION_CODE (callee))
>>>>>>>     	      {
>>>>>>> -		/* Defer folding __builtin_is_constant_evaluated.  */
>>>>>>>     	      case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
>>>>>>> +		/* Defer folding __builtin_is_constant_evaluated unless
>>>>>>> +		   we can assume this isn't a manifestly constant-evaluated
>>>>>>
>>>>>> s/can assume/know/
>>>>>>
>>>>>> OK with those comment changes.
>>>>>
>>>>> Thanks a lot.  Unfortunately I think the patch has a significant problem
>>>>> that only just occurred to me -- disabling the cp_fold cache when the
>>>>> flag ff_mce_false is set effectively makes cp_fold_function and
>>>>> cp_fully_fold_init quadratic in the size of the expression (since
>>>>> cp_fold_r calls cp_fold on each subtree, and cp_fold when the cache is
>>>>> disabled will end up fully walking each subtree).  Note that the reason
>>>>> we must disable the cache is because cp_fold with ff_mce_false might
>>>>> give a different folded result than without that flag if the expression
>>>>> contains a suitable CALL_EXPR subexpression.
>>>>
>>>> Good point.
>>>>
>>>>> One approach to fix this complexity issue would be to parameterize the
>>>>> cache according to the flags that were passed to cp_fold, which would
>>>>> allow us to keep the cache enabled when ff_mce_false is set.  A downside
>>>>> to this approach is that the size of the cp_fold cache would essentially
>>>>> double since for each tree we'd now have two cache entries, one for
>>>>> flags=ff_none and another for flags=ff_mce_false.
>>>>
>>>> We could also clear the cache before cp_fold_function since the two folds
>>>> shouldn't overlap (much).
>>>
>>> Makes sense, but IIUC we'd also have to clear it before (and after)
>>> cp_fully_fold_init too, which unlike cp_fold_function may get called
>>> in the middle of a function body.
>>
>> Ah sorry, I think I misunderstood your idea.  Clearing the cache between
>> cp_fold_function would definitely help with controlling the size of the
>> cache, and indeed there shouldn't be much overlap because there isn't
>> much sharing of expression trees across function bodies.
>>
>> However, I was curious about how big the fold_cache gets in practice,
>> and it turns out it doesn't get very big at all since we regularly clear
>> the fold_cache via clear_cv_and_fold_caches anyway.  According to my
>> experiments it doesn't get larger than about ~10k elements.  So a
>> doubling of that is pretty much insignificant.
>>
>> So ISTM parameterizing the cache is the way to go.  How does the
>> following look?
>>
>> -- >8 --
>>
>> Subject: [PATCH] c++: speculative constexpr and is_constant_evaluated
>>   [PR108243]
>>
>> 	PR c++/108243
>> 	PR c++/97553
>>
>> gcc/cp/ChangeLog:
>>
>> 	* cp-gimplify.cc (enum fold_flags): Define.
>> 	(fold_flags_t): Declare.
>> 	(cp_fold_data::genericize): Replace this data member with ...
>> 	(cp_fold_data::fold_flags): ... this.
>> 	(cp_fold_r): Adjust use of cp_fold_data and calls to cp_fold.
>> 	(cp_fold_function): Likewise.
>> 	(cp_fold_maybe_rvalue): Likewise.
>> 	(cp_fully_fold_init): Likewise.
>> 	(fold_cache): Replace with ...
>> 	(fold_caches): ... this 2-element array of caches.
>> 	(get_fold_cache): Define.
>> 	(clear_fold_cache): Adjust.
>> 	(cp_fold): Add flags parameter.  Call get_fold_cache.
>> 	<case CALL_EXPR>: If ff_mce_false is set, fold
>> 	__builtin_is_constant_evaluated to false and pass mce_false to
>> 	maybe_constant_value.
>>
>> gcc/testsuite/ChangeLog:
>>
>> 	* g++.dg/opt/is_constant_evaluated1.C: New test.
>> 	* g++.dg/opt/is_constant_evaluated2.C: New test.
>> ---
>>   gcc/cp/cp-gimplify.cc                         | 103 +++++++++++++-----
>>   .../g++.dg/opt/is_constant_evaluated1.C       |  15 +++
>>   .../g++.dg/opt/is_constant_evaluated2.C       |  32 ++++++
>>   3 files changed, 120 insertions(+), 30 deletions(-)
>>   create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
>>   create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
>>
>> diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
>> index 9929d29981a..01e624bc9de 100644
>> --- a/gcc/cp/cp-gimplify.cc
>> +++ b/gcc/cp/cp-gimplify.cc
>> @@ -43,12 +43,26 @@ along with GCC; see the file COPYING3.  If not see
>>   #include "omp-general.h"
>>   #include "opts.h"
>>   
>> +/* Flags for cp_fold and cp_fold_r.  */
>> +
>> +enum fold_flags {
>> +  ff_none = 0,
>> +  /* Whether we're being called from cp_fold_function.  */
>> +  ff_genericize = 1 << 0,
>> +  /* Whether we're folding a point where we know we're
>> +     definitely not in a manifestly constant-evaluated
>> +     context.  */
>> +  ff_mce_false = 1 << 1,
>> +};
>> +
>> +using fold_flags_t = int;
>> +
>>   /* Forward declarations.  */
>>   
>>   static tree cp_genericize_r (tree *, int *, void *);
>>   static tree cp_fold_r (tree *, int *, void *);
>>   static void cp_genericize_tree (tree*, bool);
>> -static tree cp_fold (tree);
>> +static tree cp_fold (tree, fold_flags_t);
>>   
>>   /* Genericize a TRY_BLOCK.  */
>>   
>> @@ -1012,9 +1026,8 @@ struct cp_genericize_data
>>   struct cp_fold_data
>>   {
>>     hash_set<tree> pset;
>> -  bool genericize; // called from cp_fold_function?
>> -
>> -  cp_fold_data (bool g): genericize (g) {}
>> +  fold_flags_t flags;
>> +  cp_fold_data (fold_flags_t flags): flags (flags) {}
>>   };
>>   
>>   static tree
>> @@ -1055,7 +1068,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>>         break;
>>       }
>>   
>> -  *stmt_p = stmt = cp_fold (*stmt_p);
>> +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
>>   
>>     if (data->pset.add (stmt))
>>       {
>> @@ -1135,12 +1148,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>>   	 here rather than in cp_genericize to avoid problems with the invisible
>>   	 reference transition.  */
>>       case INIT_EXPR:
>> -      if (data->genericize)
>> +      if (data->flags & ff_genericize)
>>   	cp_genericize_init_expr (stmt_p);
>>         break;
>>   
>>       case TARGET_EXPR:
>> -      if (data->genericize)
>> +      if (data->flags & ff_genericize)
>>   	cp_genericize_target_expr (stmt_p);
>>   
>>         /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
>> @@ -1173,7 +1186,10 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>>   void
>>   cp_fold_function (tree fndecl)
>>   {
>> -  cp_fold_data data (/*genericize*/true);
>> +  /* By now all manifestly-constant-evaluated expressions will have
>> +     been constant-evaluated already if possible, so we can safely
>> +     pass ff_mce_false.  */
>> +  cp_fold_data data (ff_genericize | ff_mce_false);
>>     cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
>>   }
>>   
>> @@ -2391,7 +2407,7 @@ cp_fold_maybe_rvalue (tree x, bool rval)
>>   {
>>     while (true)
>>       {
>> -      x = cp_fold (x);
>> +      x = cp_fold (x, ff_none);
>>         if (rval)
>>   	x = mark_rvalue_use (x);
>>         if (rval && DECL_P (x)
>> @@ -2450,7 +2466,7 @@ cp_fully_fold_init (tree x)
>>     if (processing_template_decl)
>>       return x;
>>     x = cp_fully_fold (x);
>> -  cp_fold_data data (/*genericize*/false);
>> +  cp_fold_data data (ff_mce_false);
>>     cp_walk_tree (&x, cp_fold_r, &data, NULL);
>>     return x;
>>   }
>> @@ -2466,15 +2482,29 @@ c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
>>     return cp_fold_maybe_rvalue (x, !lval);
>>   }
>>   
>> -static GTY((deletable)) hash_map<tree, tree> *fold_cache;
>> +static GTY((deletable)) hash_map<tree, tree> *fold_caches[2];
>> +
>> +/* Subroutine of cp_fold.  Returns which fold cache to use according
>> +   to the given flags.  We need multiple caches since the result of
>> +   folding may depend on which flags are used.  */
>> +
>> +static hash_map<tree, tree> *&
>> +get_fold_cache (fold_flags_t flags)
>> +{
>> +  if (flags & ff_mce_false)
>> +    return fold_caches[1];
>> +  else
>> +    return fold_caches[0];
>> +}
>>   
>>   /* Dispose of the whole FOLD_CACHE.  */
>>   
>>   void
>>   clear_fold_cache (void)
>>   {
>> -  if (fold_cache != NULL)
>> -    fold_cache->empty ();
>> +  for (auto& fold_cache : fold_caches)
>> +    if (fold_cache != NULL)
>> +      fold_cache->empty ();
>>   }
>>   
>>   /*  This function tries to fold an expression X.
>> @@ -2485,7 +2515,7 @@ clear_fold_cache (void)
>>       Function returns X or its folded variant.  */
>>   
>>   static tree
>> -cp_fold (tree x)
>> +cp_fold (tree x, fold_flags_t flags)
>>   {
>>     tree op0, op1, op2, op3;
>>     tree org_x = x, r = NULL_TREE;
>> @@ -2503,6 +2533,7 @@ cp_fold (tree x)
>>     if (DECL_P (x) || CONSTANT_CLASS_P (x))
>>       return x;
>>   
>> +  auto& fold_cache = get_fold_cache (flags);
>>     if (fold_cache == NULL)
>>       fold_cache = hash_map<tree, tree>::create_ggc (101);
>>   
>> @@ -2542,7 +2573,7 @@ cp_fold (tree x)
>>   	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
>>   	     folding of the operand should be in the caches and if in cp_fold_r
>>   	     it will modify it in place.  */
>> -	  op0 = cp_fold (TREE_OPERAND (x, 0));
>> +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
>>   	  if (op0 == error_mark_node)
>>   	    x = error_mark_node;
>>   	  break;
>> @@ -2587,7 +2618,7 @@ cp_fold (tree x)
>>   	{
>>   	  tree p = maybe_undo_parenthesized_ref (x);
>>   	  if (p != x)
>> -	    return cp_fold (p);
>> +	    return cp_fold (p, flags);
>>   	}
>>         goto unary;
>>   
>> @@ -2779,8 +2810,8 @@ cp_fold (tree x)
>>       case COND_EXPR:
>>         loc = EXPR_LOCATION (x);
>>         op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> 
> Whoops, we should also propagate the flags through calls to
> cp_fold_rvalue and cp_fold_maybe_rvalue from cp_fold.  The below
> version fixes this by adding static overloads of these functions that
> additionally take and propagate a fold_flags parameter.

OK.

> -- >8 --
> 
> Subject: [PATCH] c++: speculative constexpr and is_constant_evaluated
>   [PR108243]
> 
> 	PR c++/108243
> 	PR c++/97553
> 
> gcc/cp/ChangeLog:
> 
> 	* cp-gimplify.cc (enum fold_flags): Define.
> 	(fold_flags_t): Declare.
> 	(cp_fold_data::genericize): Replace this data member with ...
> 	(cp_fold_data::fold_flags): ... this.
> 	(cp_fold_r): Adjust use of cp_fold_data and calls to cp_fold.
> 	(cp_fold_function): Likewise.
> 	(cp_fold_maybe_rvalue): Add an static overload that takes
> 	and propagates a fold_flags_t parameter, and define the existing
> 	public overload in terms of it.
> 	(cp_fold_rvalue): Likewise.
> 	(cp_fully_fold_init): Adjust use of cp_fold_data.
> 	(fold_cache): Replace with ...
> 	(fold_caches): ... this 2-element array of caches.
> 	(get_fold_cache): Define.
> 	(clear_fold_cache): Adjust.
> 	(cp_fold): Add fold_flags_t parameter.  Call get_fold_cache.
> 	Pass flags to cp_fold, cp_fold_rvalue and cp_fold_maybe_rvalue.
> 	<case CALL_EXPR>: If ff_mce_false is set, fold
> 	__builtin_is_constant_evaluated to false and pass mce_false to
> 	maybe_constant_value.
> 
> gcc/testsuite/ChangeLog:
> 
> 	* g++.dg/opt/is_constant_evaluated1.C: New test.
> 	* g++.dg/opt/is_constant_evaluated2.C: New test.
> ---
>   gcc/cp/cp-gimplify.cc                         | 139 ++++++++++++------
>   .../g++.dg/opt/is_constant_evaluated1.C       |  15 ++
>   .../g++.dg/opt/is_constant_evaluated2.C       |  32 ++++
>   3 files changed, 144 insertions(+), 42 deletions(-)
>   create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
>   create mode 100644 gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> 
> diff --git a/gcc/cp/cp-gimplify.cc b/gcc/cp/cp-gimplify.cc
> index 9929d29981a..edece6b7a8a 100644
> --- a/gcc/cp/cp-gimplify.cc
> +++ b/gcc/cp/cp-gimplify.cc
> @@ -43,12 +43,26 @@ along with GCC; see the file COPYING3.  If not see
>   #include "omp-general.h"
>   #include "opts.h"
>   
> +/* Flags for cp_fold and cp_fold_r.  */
> +
> +enum fold_flags {
> +  ff_none = 0,
> +  /* Whether we're being called from cp_fold_function.  */
> +  ff_genericize = 1 << 0,
> +  /* Whether we're folding a point where we know we're
> +     definitely not in a manifestly constant-evaluated
> +     context.  */
> +  ff_mce_false = 1 << 1,
> +};
> +
> +using fold_flags_t = int;
> +
>   /* Forward declarations.  */
>   
>   static tree cp_genericize_r (tree *, int *, void *);
>   static tree cp_fold_r (tree *, int *, void *);
>   static void cp_genericize_tree (tree*, bool);
> -static tree cp_fold (tree);
> +static tree cp_fold (tree, fold_flags_t);
>   
>   /* Genericize a TRY_BLOCK.  */
>   
> @@ -1012,9 +1026,8 @@ struct cp_genericize_data
>   struct cp_fold_data
>   {
>     hash_set<tree> pset;
> -  bool genericize; // called from cp_fold_function?
> -
> -  cp_fold_data (bool g): genericize (g) {}
> +  fold_flags_t flags;
> +  cp_fold_data (fold_flags_t flags): flags (flags) {}
>   };
>   
>   static tree
> @@ -1055,7 +1068,7 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>         break;
>       }
>   
> -  *stmt_p = stmt = cp_fold (*stmt_p);
> +  *stmt_p = stmt = cp_fold (*stmt_p, data->flags);
>   
>     if (data->pset.add (stmt))
>       {
> @@ -1135,12 +1148,12 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>   	 here rather than in cp_genericize to avoid problems with the invisible
>   	 reference transition.  */
>       case INIT_EXPR:
> -      if (data->genericize)
> +      if (data->flags & ff_genericize)
>   	cp_genericize_init_expr (stmt_p);
>         break;
>   
>       case TARGET_EXPR:
> -      if (data->genericize)
> +      if (data->flags & ff_genericize)
>   	cp_genericize_target_expr (stmt_p);
>   
>         /* Folding might replace e.g. a COND_EXPR with a TARGET_EXPR; in
> @@ -1173,7 +1186,10 @@ cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
>   void
>   cp_fold_function (tree fndecl)
>   {
> -  cp_fold_data data (/*genericize*/true);
> +  /* By now all manifestly-constant-evaluated expressions will have
> +     been constant-evaluated already if possible, so we can safely
> +     pass ff_mce_false.  */
> +  cp_fold_data data (ff_genericize | ff_mce_false);
>     cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
>   }
>   
> @@ -2386,12 +2402,12 @@ cxx_omp_disregard_value_expr (tree decl, bool shared)
>   
>   /* Fold expression X which is used as an rvalue if RVAL is true.  */
>   
> -tree
> -cp_fold_maybe_rvalue (tree x, bool rval)
> +static tree
> +cp_fold_maybe_rvalue (tree x, bool rval, fold_flags_t flags)
>   {
>     while (true)
>       {
> -      x = cp_fold (x);
> +      x = cp_fold (x, flags);
>         if (rval)
>   	x = mark_rvalue_use (x);
>         if (rval && DECL_P (x)
> @@ -2409,12 +2425,24 @@ cp_fold_maybe_rvalue (tree x, bool rval)
>     return x;
>   }
>   
> +tree
> +cp_fold_maybe_rvalue (tree x, bool rval)
> +{
> +  return cp_fold_maybe_rvalue (x, rval, ff_none);
> +}
> +
>   /* Fold expression X which is used as an rvalue.  */
>   
> +static tree
> +cp_fold_rvalue (tree x, fold_flags_t flags)
> +{
> +  return cp_fold_maybe_rvalue (x, true, flags);
> +}
> +
>   tree
>   cp_fold_rvalue (tree x)
>   {
> -  return cp_fold_maybe_rvalue (x, true);
> +  return cp_fold_rvalue (x, ff_none);
>   }
>   
>   /* Perform folding on expression X.  */
> @@ -2450,7 +2478,7 @@ cp_fully_fold_init (tree x)
>     if (processing_template_decl)
>       return x;
>     x = cp_fully_fold (x);
> -  cp_fold_data data (/*genericize*/false);
> +  cp_fold_data data (ff_mce_false);
>     cp_walk_tree (&x, cp_fold_r, &data, NULL);
>     return x;
>   }
> @@ -2466,15 +2494,29 @@ c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
>     return cp_fold_maybe_rvalue (x, !lval);
>   }
>   
> -static GTY((deletable)) hash_map<tree, tree> *fold_cache;
> +static GTY((deletable)) hash_map<tree, tree> *fold_caches[2];
> +
> +/* Subroutine of cp_fold.  Returns which fold cache to use according
> +   to the given flags.  We need multiple caches since the result of
> +   folding may depend on which flags are used.  */
> +
> +static hash_map<tree, tree> *&
> +get_fold_cache (fold_flags_t flags)
> +{
> +  if (flags & ff_mce_false)
> +    return fold_caches[1];
> +  else
> +    return fold_caches[0];
> +}
>   
>   /* Dispose of the whole FOLD_CACHE.  */
>   
>   void
>   clear_fold_cache (void)
>   {
> -  if (fold_cache != NULL)
> -    fold_cache->empty ();
> +  for (auto& fold_cache : fold_caches)
> +    if (fold_cache != NULL)
> +      fold_cache->empty ();
>   }
>   
>   /*  This function tries to fold an expression X.
> @@ -2485,7 +2527,7 @@ clear_fold_cache (void)
>       Function returns X or its folded variant.  */
>   
>   static tree
> -cp_fold (tree x)
> +cp_fold (tree x, fold_flags_t flags)
>   {
>     tree op0, op1, op2, op3;
>     tree org_x = x, r = NULL_TREE;
> @@ -2503,6 +2545,7 @@ cp_fold (tree x)
>     if (DECL_P (x) || CONSTANT_CLASS_P (x))
>       return x;
>   
> +  auto& fold_cache = get_fold_cache (flags);
>     if (fold_cache == NULL)
>       fold_cache = hash_map<tree, tree>::create_ggc (101);
>   
> @@ -2517,7 +2560,7 @@ cp_fold (tree x)
>       case CLEANUP_POINT_EXPR:
>         /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
>   	 effects.  */
> -      r = cp_fold_rvalue (TREE_OPERAND (x, 0));
> +      r = cp_fold_rvalue (TREE_OPERAND (x, 0), flags);
>         if (!TREE_SIDE_EFFECTS (r))
>   	x = r;
>         break;
> @@ -2542,14 +2585,14 @@ cp_fold (tree x)
>   	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
>   	     folding of the operand should be in the caches and if in cp_fold_r
>   	     it will modify it in place.  */
> -	  op0 = cp_fold (TREE_OPERAND (x, 0));
> +	  op0 = cp_fold (TREE_OPERAND (x, 0), flags);
>   	  if (op0 == error_mark_node)
>   	    x = error_mark_node;
>   	  break;
>   	}
>   
>         loc = EXPR_LOCATION (x);
> -      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
> +      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
>   
>         if (code == CONVERT_EXPR
>   	  && SCALAR_TYPE_P (TREE_TYPE (x))
> @@ -2577,7 +2620,7 @@ cp_fold (tree x)
>         break;
>   
>       case EXCESS_PRECISION_EXPR:
> -      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
> +      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
>         x = fold_convert_loc (EXPR_LOCATION (x), TREE_TYPE (x), op0);
>         break;
>   
> @@ -2587,13 +2630,13 @@ cp_fold (tree x)
>   	{
>   	  tree p = maybe_undo_parenthesized_ref (x);
>   	  if (p != x)
> -	    return cp_fold (p);
> +	    return cp_fold (p, flags);
>   	}
>         goto unary;
>   
>       case ADDR_EXPR:
>         loc = EXPR_LOCATION (x);
> -      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
> +      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false, flags);
>   
>         /* Cope with user tricks that amount to offsetof.  */
>         if (op0 != error_mark_node
> @@ -2630,7 +2673,7 @@ cp_fold (tree x)
>       unary:
>   
>         loc = EXPR_LOCATION (x);
> -      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
> +      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
>   
>       finish_unary:
>         if (op0 != TREE_OPERAND (x, 0))
> @@ -2657,7 +2700,7 @@ cp_fold (tree x)
>         break;
>   
>       case UNARY_PLUS_EXPR:
> -      op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> +      op0 = cp_fold_rvalue (TREE_OPERAND (x, 0), flags);
>         if (op0 == error_mark_node)
>   	x = error_mark_node;
>         else
> @@ -2711,8 +2754,8 @@ cp_fold (tree x)
>       case RANGE_EXPR: case COMPLEX_EXPR:
>   
>         loc = EXPR_LOCATION (x);
> -      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
> -      op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
> +      op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops, flags);
> +      op1 = cp_fold_rvalue (TREE_OPERAND (x, 1), flags);
>   
>         /* decltype(nullptr) has only one value, so optimize away all comparisons
>   	 with that type right away, keeping them in the IL causes troubles for
> @@ -2778,9 +2821,9 @@ cp_fold (tree x)
>       case VEC_COND_EXPR:
>       case COND_EXPR:
>         loc = EXPR_LOCATION (x);
> -      op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
> -      op1 = cp_fold (TREE_OPERAND (x, 1));
> -      op2 = cp_fold (TREE_OPERAND (x, 2));
> +      op0 = cp_fold_rvalue (TREE_OPERAND (x, 0), flags);
> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
>   
>         if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
>   	{
> @@ -2870,7 +2913,7 @@ cp_fold (tree x)
>   	      {
>   		if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
>   		  r = build_nop (TREE_TYPE (x), r);
> -		x = cp_fold (r);
> +		x = cp_fold (r, flags);
>   		break;
>   	      }
>   	  }
> @@ -2890,8 +2933,12 @@ cp_fold (tree x)
>   	  {
>   	    switch (DECL_FE_FUNCTION_CODE (callee))
>   	      {
> -		/* Defer folding __builtin_is_constant_evaluated.  */
>   	      case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
> +		/* Defer folding __builtin_is_constant_evaluated unless
> +		   we know this isn't a manifestly constant-evaluated
> +		   context.  */
> +		if (flags & ff_mce_false)
> +		  x = boolean_false_node;
>   		break;
>   	      case CP_BUILT_IN_SOURCE_LOCATION:
>   		x = fold_builtin_source_location (x);
> @@ -2924,7 +2971,7 @@ cp_fold (tree x)
>   	int m = call_expr_nargs (x);
>   	for (int i = 0; i < m; i++)
>   	  {
> -	    r = cp_fold (CALL_EXPR_ARG (x, i));
> +	    r = cp_fold (CALL_EXPR_ARG (x, i), flags);
>   	    if (r != CALL_EXPR_ARG (x, i))
>   	      {
>   		if (r == error_mark_node)
> @@ -2947,7 +2994,7 @@ cp_fold (tree x)
>   
>   	if (TREE_CODE (r) != CALL_EXPR)
>   	  {
> -	    x = cp_fold (r);
> +	    x = cp_fold (r, flags);
>   	    break;
>   	  }
>   
> @@ -2960,7 +3007,15 @@ cp_fold (tree x)
>   	   constant, but the call followed by an INDIRECT_REF is.  */
>   	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
>   	    && !flag_no_inline)
> -	  r = maybe_constant_value (x);
> +	  {
> +	    mce_value manifestly_const_eval = mce_unknown;
> +	    if (flags & ff_mce_false)
> +	      /* Allow folding __builtin_is_constant_evaluated to false during
> +		 constexpr evaluation of this call.  */
> +	      manifestly_const_eval = mce_false;
> +	    r = maybe_constant_value (x, /*decl=*/NULL_TREE,
> +				      manifestly_const_eval);
> +	  }
>   	optimize = sv;
>   
>           if (TREE_CODE (r) != CALL_EXPR)
> @@ -2987,7 +3042,7 @@ cp_fold (tree x)
>   	vec<constructor_elt, va_gc> *nelts = NULL;
>   	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
>   	  {
> -	    tree op = cp_fold (p->value);
> +	    tree op = cp_fold (p->value, flags);
>   	    if (op != p->value)
>   	      {
>   		if (op == error_mark_node)
> @@ -3018,7 +3073,7 @@ cp_fold (tree x)
>   
>   	for (int i = 0; i < n; i++)
>   	  {
> -	    tree op = cp_fold (TREE_VEC_ELT (x, i));
> +	    tree op = cp_fold (TREE_VEC_ELT (x, i), flags);
>   	    if (op != TREE_VEC_ELT (x, i))
>   	      {
>   		if (!changed)
> @@ -3035,10 +3090,10 @@ cp_fold (tree x)
>       case ARRAY_RANGE_REF:
>   
>         loc = EXPR_LOCATION (x);
> -      op0 = cp_fold (TREE_OPERAND (x, 0));
> -      op1 = cp_fold (TREE_OPERAND (x, 1));
> -      op2 = cp_fold (TREE_OPERAND (x, 2));
> -      op3 = cp_fold (TREE_OPERAND (x, 3));
> +      op0 = cp_fold (TREE_OPERAND (x, 0), flags);
> +      op1 = cp_fold (TREE_OPERAND (x, 1), flags);
> +      op2 = cp_fold (TREE_OPERAND (x, 2), flags);
> +      op3 = cp_fold (TREE_OPERAND (x, 3), flags);
>   
>         if (op0 != TREE_OPERAND (x, 0)
>   	  || op1 != TREE_OPERAND (x, 1)
> @@ -3066,7 +3121,7 @@ cp_fold (tree x)
>         /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
>   	 folding, evaluates to an invariant.  In that case no need to wrap
>   	 this folded tree with a SAVE_EXPR.  */
> -      r = cp_fold (TREE_OPERAND (x, 0));
> +      r = cp_fold (TREE_OPERAND (x, 0), flags);
>         if (tree_invariant_p (r))
>   	x = r;
>         break;
> diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> new file mode 100644
> index 00000000000..983410b9e83
> --- /dev/null
> +++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated1.C
> @@ -0,0 +1,15 @@
> +// PR c++/108243
> +// { dg-do compile { target c++11 } }
> +// { dg-additional-options "-O -fdump-tree-original" }
> +
> +struct A {
> +  constexpr A(int n, int m) : n(n), m(m) { }
> +  int n, m;
> +};
> +
> +A* f(int n) {
> +  static A a = {n, __builtin_is_constant_evaluated()};
> +  return &a;
> +}
> +
> +// { dg-final { scan-tree-dump-not "is_constant_evaluated" "original" } }
> diff --git a/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> new file mode 100644
> index 00000000000..ed964e20a7a
> --- /dev/null
> +++ b/gcc/testsuite/g++.dg/opt/is_constant_evaluated2.C
> @@ -0,0 +1,32 @@
> +// PR c++/97553
> +// { dg-do compile { target c++11 } }
> +// { dg-additional-options "-O -fdump-tree-original" }
> +
> +constexpr int foo() {
> +  return __builtin_is_constant_evaluated() + 1;
> +}
> +
> +#if __cpp_if_consteval
> +constexpr int bar() {
> +  if consteval {
> +    return 5;
> +  } else {
> +    return 4;
> +  }
> +}
> +#endif
> +
> +int p, q;
> +
> +int main() {
> +  p = foo();
> +#if __cpp_if_consteval
> +  q = bar();
> +#endif
> +}
> +
> +// { dg-final { scan-tree-dump "p = 1" "original" } }
> +// { dg-final { scan-tree-dump-not "= foo" "original" } }
> +
> +// { dg-final { scan-tree-dump "q = 4" "original" { target c++23 } } }
> +// { dg-final { scan-tree-dump-not "= bar" "original" { target c++23 } } }


^ permalink raw reply	[flat|nested] 15+ messages in thread

end of thread, other threads:[~2023-02-14 23:02 UTC | newest]

Thread overview: 15+ messages (download: mbox.gz / follow: Atom feed)
-- links below jump to the message on this page --
2023-01-27 22:02 [PATCH 1/2] c++: make manifestly_const_eval tri-state Patrick Palka
2023-01-27 22:02 ` [PATCH 2/2] c++: speculative constexpr and is_constant_evaluated [PR108243] Patrick Palka
2023-01-27 22:05   ` Patrick Palka
2023-01-30 20:05   ` Jason Merrill
2023-02-03 20:51     ` Patrick Palka
2023-02-03 20:57       ` Patrick Palka
2023-02-05 20:11       ` Jason Merrill
2023-02-09 17:36         ` Patrick Palka
2023-02-09 23:36           ` Jason Merrill
2023-02-10  1:32             ` Patrick Palka
2023-02-10 14:48               ` Patrick Palka
2023-02-10 16:51                 ` Patrick Palka
2023-02-14 23:02                   ` Jason Merrill
2023-01-30 20:02 ` [PATCH 1/2] c++: make manifestly_const_eval tri-state Jason Merrill
2023-02-03 21:21   ` Patrick Palka

This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox;
as well as URLs for read-only IMAP folder(s) and NNTP newsgroup(s).