* [tuples][patch] Convert pass_remove_useless_stmts and related constant folding infrastructure
@ 2008-02-26 21:40 Bill Maddox
0 siblings, 0 replies; 4+ messages in thread
From: Bill Maddox @ 2008-02-26 21:40 UTC (permalink / raw)
To: gcc-patches; +Cc: Diego Novillo
[-- Attachment #1: Type: text/plain, Size: 2255 bytes --]
This patch enables pass_remove_useless_stmts, and includes conversion of
much of the constant folding infrastructure in tree-ssa-ccp.c.
* tree-ssa-ccp.c (maybe_fold_stmt_addition):
Reinstated this function for tuples as-is.
(valid_gimple_rhs_p): New function. Mostly lifted from
valid_gimple_epxression_p, which is likely obsolete.
(fold_stmt_r): Reinstated commented-out cases for
tuples. Replaced call to obsolete function set_rhs.
(get_maxval_strlen): Convert to tuples.
(ccp_fold_builtin): Partial conversion to tuples.
(fold_gimple_assign): New function.
(fold_gimple_cond): New function.
(fold_gimple_call): New function.
(fold_stmt): Convert to tuples.
(fold_stmt_inplace): Convert to tuples.
* tree-ssa-propagate.c (substitute_and_fold):
Update call to fold_stmt for revised argument signature.
* gimple-dummy.c (fold_stmt): Removed dummy definition.
* gimplify.c (gimplify_call_expr): Removed obsolete
manipulation of TREE_NOTHROW flag.
* cfgexpand.c (gimple_to_tree): Set nothrow flag
of call expression based on call statement flags.
Handle GIMPLE_NOP statement.
* tree-flow.h (notice_special_calls, fold_stmt):
Update prototypes for tuples.
* gimple.c (gimple_cond_set_condition_from_tree):
New function.
(gimple_seq_has_side_effects): New function.
* gimple.h (gimple_cond_set_condition_from_tree,
gimple_seq_has_side_effects): New prototypes.
(gimple_call_nothrow_p): New function.
(gsi_stmt_ptr): Add comment regarding usage of this
function vs. gsi_replace.
* tree-cfg.c (struct rus_data): Convert to tuples.
(remove_useless_stmts_1, remove_useless_stmts_warn_notreached,
remove_useless_stmts_cond, remove_useless_stmts_tf,
remove_useless_stmts_tc, remove_useless_stmts_goto,
remove_useless_stmts_label, notice_special_calls,
remove_useless_stmts): Convert to tuples.
(update_call_expr_flags): Removed.
* passes.c (init_optimization_passes): Enable
pass_remove_useless_stmts.
Commited to gimple-tuples-branch revision 132667.
Approved by dnovillo.
--Bill
[-- Attachment #2: patch-rus-02-25.txt --]
[-- Type: text/plain, Size: 62223 bytes --]
Index: gcc/tree-ssa-ccp.c
===================================================================
--- gcc/tree-ssa-ccp.c (revision 132666)
+++ gcc/tree-ssa-ccp.c (working copy)
@@ -1991,8 +1991,6 @@ maybe_fold_stmt_indirect (tree expr, tre
}
-/* FIXME tuples. */
-#if 0
/* A subroutine of fold_stmt_r. EXPR is a POINTER_PLUS_EXPR.
A quaint feature extant in our address arithmetic is that there
@@ -2079,7 +2077,102 @@ maybe_fold_stmt_addition (tree expr)
return t;
}
-#endif
+
+/* Return true if EXPR is an acceptable right-hand-side for a
+ GIMPLE assignment. We validate the entire tree, not just
+ the root node, thus catching expressions that embed complex
+ operands that are not permitted in GIMPLE. This function
+ is needed because the folding routines in fold-const.c
+ may return such expressions in some cases, e.g., an array
+ access with an embedded index addition. It may make more
+ sense to have folding routines that are sensitive to the
+ constraints on GIMPLE operands, rather than abandoning any
+ any attempt to fold if the usual folding turns out to be too
+ aggressive. */
+
+static bool
+valid_gimple_rhs_p (tree expr)
+{
+ enum tree_code code = TREE_CODE (expr);
+
+ switch (TREE_CODE_CLASS (code))
+ {
+ case tcc_declaration:
+ if (!is_gimple_variable (expr))
+ return false;
+ break;
+
+ case tcc_constant:
+ /* All constants are ok. */
+ break;
+
+ case tcc_binary:
+ case tcc_comparison:
+ if (!is_gimple_val (TREE_OPERAND (expr, 0))
+ || !is_gimple_val (TREE_OPERAND (expr, 1)))
+ return false;
+ break;
+
+ case tcc_unary:
+ if (!is_gimple_val (TREE_OPERAND (expr, 0)))
+ return false;
+ break;
+
+ case tcc_expression:
+ switch (code)
+ {
+ case ADDR_EXPR:
+ {
+ tree t = TREE_OPERAND (expr, 0);
+ while (handled_component_p (t))
+ {
+ /* ??? More checks needed, see the GIMPLE verifier. */
+ if ((TREE_CODE (t) == ARRAY_REF
+ || TREE_CODE (t) == ARRAY_RANGE_REF)
+ && !is_gimple_val (TREE_OPERAND (t, 1)))
+ return false;
+ t = TREE_OPERAND (t, 0);
+ }
+ if (!is_gimple_id (t))
+ return false;
+ }
+ break;
+
+ case TRUTH_NOT_EXPR:
+ if (!is_gimple_val (TREE_OPERAND (expr, 0)))
+ return false;
+ break;
+
+ case TRUTH_AND_EXPR:
+ case TRUTH_XOR_EXPR:
+ case TRUTH_OR_EXPR:
+ if (!is_gimple_val (TREE_OPERAND (expr, 0))
+ || !is_gimple_val (TREE_OPERAND (expr, 1)))
+ return false;
+ break;
+
+ case EXC_PTR_EXPR:
+ case FILTER_EXPR:
+ break;
+
+ default:
+ return false;
+ }
+ break;
+
+ case tcc_vl_exp:
+ return false;
+
+ case tcc_exceptional:
+ if (code != SSA_NAME)
+ return false;
+
+ default:
+ return false;
+ }
+
+ return true;
+}
/* For passing state through walk_tree into fold_stmt_r and its
children. */
@@ -2168,24 +2261,6 @@ fold_stmt_r (tree *expr_p, int *walk_sub
recompute_tree_invariant_for_addr_expr (expr);
return NULL_TREE;
- case POINTER_PLUS_EXPR:
- /* FIXME tuples. */
-#if 0
- t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
- if (t)
- return t;
- t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
- if (t)
- return t;
- *walk_subtrees = 0;
-
- t = maybe_fold_stmt_addition (expr);
-#else
- t = NULL;
- gimple_unreachable ();
-#endif
- break;
-
case COMPONENT_REF:
t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
if (t)
@@ -2211,9 +2286,21 @@ fold_stmt_r (tree *expr_p, int *walk_sub
t = maybe_fold_tmr (expr);
break;
+ case POINTER_PLUS_EXPR:
+ t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
+ if (t)
+ return t;
+ t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
+ if (t)
+ return t;
+ *walk_subtrees = 0;
+
+ t = maybe_fold_stmt_addition (expr);
+ break;
+
+ /* FIXME tuples. This case is likely redundant.
+ See a similar folding attempt in fold_gimple_assignment. */
case COND_EXPR:
- /* FIXME tuples. */
-#if 0
if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0)))
{
tree op0 = TREE_OPERAND (expr, 0);
@@ -2224,18 +2311,18 @@ fold_stmt_r (tree *expr_p, int *walk_sub
tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
TREE_OPERAND (op0, 0),
TREE_OPERAND (op0, 1));
- set = tem && set_rhs (expr_p, tem);
+ /* This is actually a conditional expression, not a GIMPLE
+ conditional statement, however, the valid_gimple_rhs_p
+ test still applies. */
+ set = tem && is_gimple_condexpr (tem) && valid_gimple_rhs_p (tem);
fold_undefer_overflow_warnings (set, fold_stmt_r_data->stmt, 0);
if (set)
{
- t = *expr_p;
+ COND_EXPR_COND (expr) = tem;
+ t = expr;
break;
}
}
-#else
- t = NULL;
- gimple_unreachable ();
-#endif
return NULL_TREE;
default:
@@ -2254,8 +2341,6 @@ fold_stmt_r (tree *expr_p, int *walk_sub
}
-/* FIXME tuples. */
-#if 0
/* Return the string length, maximum string length or maximum value of
ARG in LENGTH.
If ARG is an SSA name variable, follow its use-def chains. If LENGTH
@@ -2268,7 +2353,8 @@ fold_stmt_r (tree *expr_p, int *walk_sub
static bool
get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
{
- tree var, def_stmt, val;
+ tree var, val;
+ gimple def_stmt;
if (TREE_CODE (arg) != SSA_NAME)
{
@@ -2316,71 +2402,74 @@ get_maxval_strlen (tree arg, tree *lengt
var = arg;
def_stmt = SSA_NAME_DEF_STMT (var);
- switch (TREE_CODE (def_stmt))
+ switch (gimple_code (def_stmt))
{
- case GIMPLE_MODIFY_STMT:
+ case GIMPLE_ASSIGN:
{
tree rhs;
/* The RHS of the statement defining VAR must either have a
constant length or come from another SSA_NAME with a constant
length. */
- rhs = GIMPLE_STMT_OPERAND (def_stmt, 1);
- STRIP_NOPS (rhs);
- return get_maxval_strlen (rhs, length, visited, type);
- }
+ if (gimple_num_ops (def_stmt) == 2)
+ {
+ rhs = gimple_assign_rhs1 (def_stmt);
+ STRIP_NOPS (rhs);
+ return get_maxval_strlen (rhs, length, visited, type);
+ }
+ }
+ return false;
- case PHI_NODE:
+ case GIMPLE_PHI:
{
/* All the arguments of the PHI node must have the same constant
length. */
- int i;
-
- for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
- {
- tree arg = PHI_ARG_DEF (def_stmt, i);
-
- /* If this PHI has itself as an argument, we cannot
- determine the string length of this argument. However,
- if we can find a constant string length for the other
- PHI args then we can still be sure that this is a
- constant string length. So be optimistic and just
- continue with the next argument. */
- if (arg == PHI_RESULT (def_stmt))
- continue;
-
- if (!get_maxval_strlen (arg, length, visited, type))
- return false;
- }
+ unsigned i;
- return true;
- }
+ for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
+ {
+ tree arg = gimple_phi_arg (def_stmt, i)->def;
+
+ /* If this PHI has itself as an argument, we cannot
+ determine the string length of this argument. However,
+ if we can find a constant string length for the other
+ PHI args then we can still be sure that this is a
+ constant string length. So be optimistic and just
+ continue with the next argument. */
+ if (arg == gimple_phi_result (def_stmt))
+ continue;
+
+ if (!get_maxval_strlen (arg, length, visited, type))
+ return false;
+ }
+ }
+ return true;
default:
- break;
+ return false;
}
-
-
- return false;
}
-/* Fold builtin call FN in statement STMT. If it cannot be folded into a
+/* Fold builtin call in statement STMT. If it cannot be folded into a
constant, return NULL_TREE. Otherwise, return its constant value. */
static tree
-ccp_fold_builtin (tree stmt, tree fn)
+ccp_fold_builtin (gimple stmt)
{
tree result, val[3];
tree callee, a;
int arg_mask, i, type;
bitmap visited;
bool ignore;
- call_expr_arg_iterator iter;
int nargs;
- ignore = TREE_CODE (stmt) != GIMPLE_MODIFY_STMT;
+ gcc_assert (gimple_code (stmt) == GIMPLE_CALL);
+ ignore = (gimple_call_lhs (stmt) == NULL);
+
+ /* FIXME tuples. */
+#if 0
/* First try the generic builtin folder. If that succeeds, return the
result directly. */
result = fold_call_expr (fn, ignore);
@@ -2390,15 +2479,16 @@ ccp_fold_builtin (tree stmt, tree fn)
STRIP_NOPS (result);
return result;
}
+#endif
/* Ignore MD builtins. */
- callee = get_callee_fndecl (fn);
+ callee = gimple_call_fndecl (stmt);
if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
return NULL_TREE;
/* If the builtin could not be folded, and it has no argument list,
we're done. */
- nargs = call_expr_nargs (fn);
+ nargs = gimple_call_num_args (stmt);
if (nargs == 0)
return NULL_TREE;
@@ -2442,16 +2532,15 @@ ccp_fold_builtin (tree stmt, tree fn)
visited = BITMAP_ALLOC (NULL);
memset (val, 0, sizeof (val));
- init_call_expr_arg_iterator (fn, &iter);
- for (i = 0; arg_mask; i++, arg_mask >>= 1)
+ for (i = 0; i < nargs; i++)
{
- a = next_call_expr_arg (&iter);
- if (arg_mask & 1)
- {
- bitmap_clear (visited);
- if (!get_maxval_strlen (a, &val[i], visited, type))
- val[i] = NULL_TREE;
- }
+ if ((arg_mask >> i) & 1)
+ {
+ a = gimple_call_arg (stmt, i);
+ bitmap_clear (visited);
+ if (!get_maxval_strlen (a, &val[i], visited, type))
+ val[i] = NULL_TREE;
+ }
}
BITMAP_FREE (visited);
@@ -2462,7 +2551,8 @@ ccp_fold_builtin (tree stmt, tree fn)
case BUILT_IN_STRLEN:
if (val[0])
{
- tree new_val = fold_convert (TREE_TYPE (fn), val[0]);
+ tree new_val =
+ fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
/* If the result is not a valid gimple value, or not a cast
of a valid gimple value, then we can not use the result. */
@@ -2476,32 +2566,30 @@ ccp_fold_builtin (tree stmt, tree fn)
case BUILT_IN_STRCPY:
if (val[1] && is_gimple_val (val[1]) && nargs == 2)
result = fold_builtin_strcpy (callee,
- CALL_EXPR_ARG (fn, 0),
- CALL_EXPR_ARG (fn, 1),
+ gimple_call_arg (stmt, 0),
+ gimple_call_arg (stmt, 1),
val[1]);
break;
case BUILT_IN_STRNCPY:
if (val[1] && is_gimple_val (val[1]) && nargs == 3)
result = fold_builtin_strncpy (callee,
- CALL_EXPR_ARG (fn, 0),
- CALL_EXPR_ARG (fn, 1),
- CALL_EXPR_ARG (fn, 2),
+ gimple_call_arg (stmt, 0),
+ gimple_call_arg (stmt, 1),
+ gimple_call_arg (stmt, 2),
val[1]);
break;
case BUILT_IN_FPUTS:
- result = fold_builtin_fputs (CALL_EXPR_ARG (fn, 0),
- CALL_EXPR_ARG (fn, 1),
- TREE_CODE (stmt) != GIMPLE_MODIFY_STMT, 0,
- val[0]);
+ result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
+ gimple_call_arg (stmt, 1),
+ ignore, false, val[0]);
break;
case BUILT_IN_FPUTS_UNLOCKED:
- result = fold_builtin_fputs (CALL_EXPR_ARG (fn, 0),
- CALL_EXPR_ARG (fn, 1),
- TREE_CODE (stmt) != GIMPLE_MODIFY_STMT, 1,
- val[0]);
+ result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
+ gimple_call_arg (stmt, 1),
+ ignore, true, val[0]);
break;
case BUILT_IN_MEMCPY_CHK:
@@ -2510,10 +2598,10 @@ ccp_fold_builtin (tree stmt, tree fn)
case BUILT_IN_MEMSET_CHK:
if (val[2] && is_gimple_val (val[2]))
result = fold_builtin_memory_chk (callee,
- CALL_EXPR_ARG (fn, 0),
- CALL_EXPR_ARG (fn, 1),
- CALL_EXPR_ARG (fn, 2),
- CALL_EXPR_ARG (fn, 3),
+ gimple_call_arg (stmt, 0),
+ gimple_call_arg (stmt, 1),
+ gimple_call_arg (stmt, 2),
+ gimple_call_arg (stmt, 3),
val[2], ignore,
DECL_FUNCTION_CODE (callee));
break;
@@ -2522,27 +2610,30 @@ ccp_fold_builtin (tree stmt, tree fn)
case BUILT_IN_STPCPY_CHK:
if (val[1] && is_gimple_val (val[1]))
result = fold_builtin_stxcpy_chk (callee,
- CALL_EXPR_ARG (fn, 0),
- CALL_EXPR_ARG (fn, 1),
- CALL_EXPR_ARG (fn, 2),
+ gimple_call_arg (stmt, 0),
+ gimple_call_arg (stmt, 1),
+ gimple_call_arg (stmt, 2),
val[1], ignore,
DECL_FUNCTION_CODE (callee));
break;
case BUILT_IN_STRNCPY_CHK:
if (val[2] && is_gimple_val (val[2]))
- result = fold_builtin_strncpy_chk (CALL_EXPR_ARG (fn, 0),
- CALL_EXPR_ARG (fn, 1),
- CALL_EXPR_ARG (fn, 2),
- CALL_EXPR_ARG (fn, 3),
+ result = fold_builtin_strncpy_chk (gimple_call_arg (stmt, 0),
+ gimple_call_arg (stmt, 1),
+ gimple_call_arg (stmt, 2),
+ gimple_call_arg (stmt, 3),
val[2]);
break;
case BUILT_IN_SNPRINTF_CHK:
case BUILT_IN_VSNPRINTF_CHK:
+ /* FIXME tuples. */
+#if 0
if (val[1] && is_gimple_val (val[1]))
result = fold_builtin_snprintf_chk (fn, val[1],
DECL_FUNCTION_CODE (callee));
+#endif
break;
default:
@@ -2554,105 +2645,251 @@ ccp_fold_builtin (tree stmt, tree fn)
return result;
}
+/* Attempt to fold an assignment statement. Return true if any changes were
+ made. The statement is modified in place, but its RHS class may change.
+ It is assumed that the operands have been previously folded. */
+
+static bool
+fold_gimple_assign (gimple stmt)
+{
+ enum tree_code subcode = gimple_assign_subcode (stmt);
+
+ tree result = NULL;
-/* Fold the statement pointed to by STMT_P. In some cases, this function may
+ switch (get_gimple_rhs_class (subcode))
+ {
+ case GIMPLE_SINGLE_RHS:
+ {
+ tree rhs = gimple_assign_rhs1 (stmt);
+
+ /* Try to fold a conditional expression. */
+ if (TREE_CODE (rhs) == COND_EXPR)
+ {
+ tree temp = fold (COND_EXPR_COND (rhs));
+ if (temp != COND_EXPR_COND (rhs))
+ result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp,
+ COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
+ }
+
+ /* If we couldn't fold the RHS, hand over to the generic
+ fold routines. */
+ if (result == NULL_TREE)
+ result = fold (rhs);
+
+ /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
+ that may have been added by fold, and "useless" type
+ conversions that might now be apparent due to propagation. */
+ STRIP_USELESS_TYPE_CONVERSION (result);
+
+ if (result != rhs && valid_gimple_rhs_p (result))
+ {
+ gimple_assign_set_rhs_from_tree (stmt, result);
+ return true;
+ }
+ else
+ /* It is possible that fold_stmt_r simplified the RHS.
+ Make sure that the subcode of this statement still
+ reflects the principal operator of the rhs operand. */
+ gimple_assign_set_rhs_from_tree (stmt, rhs);
+ }
+ break;
+
+ case GIMPLE_UNARY_RHS:
+ result = fold_unary (subcode,
+ TREE_TYPE (gimple_assign_lhs (stmt)),
+ gimple_assign_rhs1 (stmt));
+
+ if (result)
+ {
+ STRIP_USELESS_TYPE_CONVERSION (result);
+ if (valid_gimple_rhs_p (result))
+ {
+ gimple_assign_set_rhs_from_tree (stmt, result);
+ return true;
+ }
+ }
+ break;
+
+ case GIMPLE_BINARY_RHS:
+ result = fold_binary (subcode,
+ TREE_TYPE (gimple_assign_lhs (stmt)),
+ gimple_assign_rhs1 (stmt),
+ gimple_assign_rhs2 (stmt));
+
+ if (result)
+ {
+ STRIP_USELESS_TYPE_CONVERSION (result);
+ if (valid_gimple_rhs_p (result))
+ {
+ gimple_assign_set_rhs_from_tree (stmt, result);
+ return true;
+ }
+ }
+ break;
+
+ case GIMPLE_INVALID_RHS:
+ gcc_unreachable();
+ }
+
+ return false;
+}
+
+/* Attempt to fold a conditional statement. Return true if any changes were
+ made. We only attempt to fold the condition expression, and do not perform
+ any transformation that would require alteration of the cfg. It is
+ assumed that the operands have been previously folded. */
+
+static bool
+fold_gimple_cond (gimple stmt)
+{
+ tree result = fold_binary (gimple_cond_code (stmt),
+ boolean_type_node,
+ gimple_cond_lhs (stmt),
+ gimple_cond_rhs (stmt));
+
+ if (result)
+ {
+ STRIP_USELESS_TYPE_CONVERSION (result);
+ if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
+ {
+ gimple_cond_set_condition_from_tree (stmt, result);
+ return true;
+ }
+ }
+
+ return false;
+}
+
+/* Attempt to fold a call statement referenced by the statement iterator GSI.
+ The statement may be replaced by another statement, e.g., if the call
+ simplifies to a constant value. Return true if any changes were made.
+ It is assumed that the operands have been previously folded. */
+
+static bool
+fold_gimple_call (gimple_stmt_iterator *gsi)
+{
+ gimple stmt = gsi_stmt (*gsi);
+
+ tree callee = gimple_call_fndecl (stmt);
+
+ /* Check for builtins that CCP can handle using information not
+ available in the generic fold routines. */
+ if (callee && DECL_BUILT_IN (callee))
+ {
+ tree result = ccp_fold_builtin (stmt);
+ if (result)
+ {
+ /* The call has simplified to a constant value, so
+ we can no longer represent it with a GIMPLE_CALL.
+ Introduce a new GIMPLE_ASSIGN statement. */
+ gimple new_stmt;
+ tree lhs = gimple_call_lhs (stmt);
+
+ STRIP_USELESS_TYPE_CONVERSION (result);
+ new_stmt = gimple_build_assign (lhs, result);
+ gimple_set_locus (new_stmt, gimple_locus (stmt));
+ gsi_replace (gsi, new_stmt, false);
+ return true;
+ }
+ }
+ else
+ {
+ /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
+ here are when we've propagated the address of a decl into the
+ object slot. */
+ /* ??? Should perhaps do this in fold proper. However, doing it
+ there requires that we create a new CALL_EXPR, and that requires
+ copying EH region info to the new node. Easier to just do it
+ here where we can just smash the call operand. */
+ /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
+ callee = gimple_call_fn (stmt);
+ if (TREE_CODE (callee) == OBJ_TYPE_REF
+ && lang_hooks.fold_obj_type_ref
+ && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
+ && DECL_P (TREE_OPERAND
+ (OBJ_TYPE_REF_OBJECT (callee), 0)))
+ {
+ tree t;
+
+ /* ??? Caution: Broken ADDR_EXPR semantics means that
+ looking at the type of the operand of the addr_expr
+ can yield an array type. See silly exception in
+ check_pointer_types_r. */
+ t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
+ t = lang_hooks.fold_obj_type_ref (callee, t);
+ if (t)
+ {
+ gimple_call_set_fn (stmt, t);
+ return true;
+ }
+ }
+ }
+
+ return false;
+}
+
+/* Fold the statement pointed to by GSI. In some cases, this function may
replace the whole statement with a new one. Returns true iff folding
makes any changes. */
bool
-fold_stmt (tree *stmt_p)
+fold_stmt (gimple_stmt_iterator *gsi)
{
- tree rhs, result, stmt;
+
struct fold_stmt_r_data fold_stmt_r_data;
+ struct walk_stmt_info wi;
+
bool changed = false;
bool inside_addr_expr = false;
- stmt = *stmt_p;
+ gimple stmt = gsi_stmt (*gsi);
fold_stmt_r_data.stmt = stmt;
fold_stmt_r_data.changed_p = &changed;
fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
- /* If we replaced constants and the statement makes pointer dereferences,
- then we may need to fold instances of *&VAR into VAR, etc. */
- if (walk_tree (stmt_p, fold_stmt_r, &fold_stmt_r_data, NULL))
- {
- *stmt_p = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
- return true;
- }
+ memset (&wi, 0, sizeof (wi));
+ wi.info = &fold_stmt_r_data;
- rhs = get_rhs (stmt);
- if (!rhs)
- return changed;
- result = NULL_TREE;
+ /* Fold the individual operands.
+ For example, fold instances of *&VAR into VAR, etc. */
+ gcc_assert (!walk_gimple_stmt (stmt, NULL, fold_stmt_r, &wi));
- if (TREE_CODE (rhs) == CALL_EXPR)
+ /* Fold the main computation performed by the statement. */
+ switch (gimple_code (stmt))
{
- tree callee;
-
- /* Check for builtins that CCP can handle using information not
- available in the generic fold routines. */
- callee = get_callee_fndecl (rhs);
- if (callee && DECL_BUILT_IN (callee))
- result = ccp_fold_builtin (stmt, rhs);
- else
- {
- /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
- here are when we've propagated the address of a decl into the
- object slot. */
- /* ??? Should perhaps do this in fold proper. However, doing it
- there requires that we create a new CALL_EXPR, and that requires
- copying EH region info to the new node. Easier to just do it
- here where we can just smash the call operand. Also
- CALL_EXPR_RETURN_SLOT_OPT needs to be handled correctly and
- copied, fold_call_expr does not have not information. */
- callee = CALL_EXPR_FN (rhs);
- if (TREE_CODE (callee) == OBJ_TYPE_REF
- && lang_hooks.fold_obj_type_ref
- && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
- && DECL_P (TREE_OPERAND
- (OBJ_TYPE_REF_OBJECT (callee), 0)))
- {
- tree t;
+ case GIMPLE_ASSIGN:
+ changed |= fold_gimple_assign (stmt);
+ break;
+ case GIMPLE_COND:
+ changed |= fold_gimple_cond (stmt);
+ break;
+ case GIMPLE_CALL:
+ /* The entire statement may be replaced in this case. */
+ changed |= fold_gimple_call (gsi);
+ break;
- /* ??? Caution: Broken ADDR_EXPR semantics means that
- looking at the type of the operand of the addr_expr
- can yield an array type. See silly exception in
- check_pointer_types_r. */
-
- t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
- t = lang_hooks.fold_obj_type_ref (callee, t);
- if (t)
- {
- CALL_EXPR_FN (rhs) = t;
- changed = true;
- }
- }
- }
+ default:
+ return changed;
+ break;
}
- else if (TREE_CODE (rhs) == COND_EXPR)
+
+ /* FIXME tuples. This seems to be a reasonable optimization, but it breaks
+ some callers. Since we weren't doing this pre-tuples, it is best not to
+ pursue it now. */
+#if 0
+ stmt = gsi_stmt (*gsi);
+ if (!gimple_has_side_effects (stmt))
{
- tree temp = fold (COND_EXPR_COND (rhs));
- if (temp != COND_EXPR_COND (rhs))
- result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp,
- COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
+ gimple new_stmt = gimple_build_nop ();
+ gimple_set_locus (new_stmt, gimple_locus (stmt));
+ gsi_replace (gsi, new_stmt, false);
+ return true;
}
-
- /* If we couldn't fold the RHS, hand over to the generic fold routines. */
- if (result == NULL_TREE)
- result = fold (rhs);
-
- /* Strip away useless type conversions. Both the NON_LVALUE_EXPR that
- may have been added by fold, and "useless" type conversions that might
- now be apparent due to propagation. */
- STRIP_USELESS_TYPE_CONVERSION (result);
-
- if (result != rhs)
- changed |= set_rhs (stmt_p, result);
+#endif
return changed;
}
-#endif
/* Perform the minimal folding on statement STMT. Only operations like
*&x created by constant propagation are handled. The statement cannot
@@ -2662,10 +2899,9 @@ fold_stmt (tree *stmt_p)
bool
fold_stmt_inplace (gimple stmt)
{
- enum gimple_code code;
- tree new_rhs;
struct fold_stmt_r_data fold_stmt_r_data;
struct walk_stmt_info wi;
+
bool changed = false;
bool inside_addr_expr = false;
@@ -2676,50 +2912,34 @@ fold_stmt_inplace (gimple stmt)
memset (&wi, 0, sizeof (wi));
wi.info = &fold_stmt_r_data;
- /* Fold the individual operands. */
- walk_gimple_stmt (stmt, NULL, fold_stmt_r, &wi);
+ /* Fold the individual operands.
+ For example, fold instances of *&VAR into VAR, etc.
+
+ It appears that, at one time, maybe_fold_stmt_indirect
+ would cause the walk to return non-null in order to
+ signal that the entire statement should be replaced with
+ a call to _builtin_trap. This functionality is currently
+ disabled, as noted in a FIXME, and cannot be supported here. */
+
+ gcc_assert (!walk_gimple_stmt (stmt, NULL, fold_stmt_r, &wi));
/* Fold the main computation performed by the statement. */
- code = gimple_code (stmt);
- if (code == GIMPLE_ASSIGN)
- {
- tree lhs = gimple_assign_lhs (stmt);
- tree rhs1 = gimple_assign_rhs1 (stmt);
- tree rhs2 = gimple_assign_rhs2 (stmt);
- tree type = TREE_TYPE (lhs);
- enum tree_code rhs_code;
- enum gimple_rhs_class rhs_class;
-
- rhs_code = gimple_assign_subcode (stmt);
- rhs_class = get_gimple_rhs_class (rhs_code);
- if (rhs_class == GIMPLE_BINARY_RHS)
- new_rhs = fold_binary (rhs_code, type, rhs1, rhs2);
- else if (rhs_class == GIMPLE_UNARY_RHS)
- new_rhs = fold_unary (rhs_code, type, rhs1);
- else if (rhs_class == GIMPLE_SINGLE_RHS)
- new_rhs = rhs1;
- else
- gcc_unreachable ();
- }
- else if (code == GIMPLE_COND)
- new_rhs = fold_binary (gimple_cond_code (stmt), boolean_type_node,
- gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
- else
+ switch (gimple_code (stmt))
{
- /* No other statement has a foldable expression, so folding its
- individual operands is enough. */
- return changed;
- }
+ case GIMPLE_ASSIGN:
+ changed |= fold_gimple_assign (stmt);
+ break;
+ case GIMPLE_COND:
+ changed |= fold_gimple_cond (stmt);
+ break;
- /* FIXME tuples. */
-#if 0
- changed |= set_rhs (&stmt, new_rhs);
-#else
- gimple_unreachable ();
-#endif
+ default:
+ break;
+ }
return changed;
}
+
/* FIXME tuples. */
#if 0
\f
Index: gcc/tree-ssa-propagate.c
===================================================================
--- gcc/tree-ssa-propagate.c (revision 132666)
+++ gcc/tree-ssa-propagate.c (working copy)
@@ -1292,7 +1292,7 @@ substitute_and_fold (prop_value_t *prop_
gimple old_stmt = stmt;
tree rhs;
- fold_stmt (gsi_stmt_ptr (&i));
+ fold_stmt (&i);
stmt = gsi_stmt (i);
/* If we cleaned up EH information from the statement,
Index: gcc/gimple-dummy.c
===================================================================
--- gcc/gimple-dummy.c (revision 132666)
+++ gcc/gimple-dummy.c (working copy)
@@ -28,7 +28,6 @@ DUMMY_FN (compute_data_dependences_for_l
DUMMY_FN (dump_ddrs)
DUMMY_FN (estimate_numbers_of_iterations)
DUMMY_FN (expr_invariant_in_loop_p)
-DUMMY_FN (fold_stmt)
DUMMY_FN (free_data_refs)
DUMMY_FN (free_dependence_relations)
DUMMY_FN (free_numbers_of_iterations_estimates)
Index: gcc/gimplify.c
===================================================================
--- gcc/gimplify.c (revision 132666)
+++ gcc/gimplify.c (working copy)
@@ -2284,7 +2284,6 @@ gimplify_call_expr (tree *expr_p, gimple
CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
CALL_CANNOT_INLINE_P (*expr_p)
= CALL_CANNOT_INLINE_P (call);
- TREE_NOTHROW (*expr_p) = TREE_NOTHROW (call);
SET_EXPR_LOCUS (*expr_p, EXPR_LOCUS (call));
TREE_BLOCK (*expr_p) = TREE_BLOCK (call);
/* Set CALL_EXPR_VA_ARG_PACK. */
Index: gcc/cfgexpand.c
===================================================================
--- gcc/cfgexpand.c (revision 132666)
+++ gcc/cfgexpand.c (working copy)
@@ -179,6 +179,9 @@ gimple_to_tree (gimple stmt)
if (!(gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE)))
TREE_SIDE_EFFECTS (t) = 1;
+ if (gimple_call_flags (stmt) & ECF_NOTHROW)
+ TREE_NOTHROW (t) = 1;
+
/* If the call has a LHS then create a MODIFY_EXPR to hold it. */
if (gimple_call_lhs (stmt))
t = build_gimple_modify_stmt (gimple_call_lhs (stmt), t);
@@ -203,6 +206,9 @@ gimple_to_tree (gimple stmt)
NULL, label_vec);
}
break;
+
+ case GIMPLE_NOP:
+ return build1 (NOP_EXPR, void_type_node, size_zero_node);
default:
error ("Unrecognized GIMPLE statement during RTL expansion");
Index: gcc/tree-flow.h
===================================================================
--- gcc/tree-flow.h (revision 132666)
+++ gcc/tree-flow.h (working copy)
@@ -717,7 +717,7 @@ extern gimple last_and_only_stmt (basic_
extern edge find_taken_edge (basic_block, tree);
extern basic_block label_to_block_fn (struct function *, tree);
#define label_to_block(t) (label_to_block_fn (cfun, t))
-extern void notice_special_calls (tree);
+extern void notice_special_calls (gimple);
extern void clear_special_calls (void);
extern void verify_stmts (void);
extern void verify_gimple (void);
@@ -888,7 +888,7 @@ tree get_current_def (tree);
void set_current_def (tree, tree);
/* In tree-ssa-ccp.c */
-bool fold_stmt (gimple *);
+bool fold_stmt (gimple_stmt_iterator *);
bool fold_stmt_inplace (gimple);
tree widen_bitfield (tree, tree, tree);
Index: gcc/gimple.c
===================================================================
--- gcc/gimple.c (revision 132666)
+++ gcc/gimple.c (working copy)
@@ -412,6 +412,20 @@ gimple_build_cond_from_tree (tree cond,
return gimple_build_cond (code, lhs, rhs, t_label, f_label);
}
+/* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
+ boolean expression tree COND. */
+
+void
+gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
+{
+ enum tree_code code;
+ tree lhs, rhs;
+
+ gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
+ gimple_cond_set_code (stmt, code);
+ gimple_cond_set_lhs (stmt, lhs);
+ gimple_cond_set_rhs (stmt, rhs);
+}
/* Build a GIMPLE_LABEL statement for LABEL. */
@@ -1742,4 +1756,18 @@ gimple_has_side_effects (gimple s)
return false;
}
+/* Return true if any statement in STMTS has side effects. */
+
+bool
+gimple_seq_has_side_effects (gimple_seq stmts)
+{
+ gimple_stmt_iterator gsi;
+
+ for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
+ if (gimple_has_side_effects (gsi_stmt (gsi)))
+ return true;
+
+ return false;
+}
+
#include "gt-gimple.h"
Index: gcc/gimple.h
===================================================================
--- gcc/gimple.h (revision 132666)
+++ gcc/gimple.h (working copy)
@@ -566,7 +566,9 @@ bool is_gimple_operand (const_tree);
void gimple_set_modified (gimple, bool);
void gimple_cond_get_ops_from_tree (tree, enum tree_code *, tree *, tree *);
gimple gimple_build_cond_from_tree (tree, tree, tree);
+void gimple_cond_set_condition_from_tree (gimple, tree);
bool gimple_has_side_effects (gimple);
+bool gimple_seq_has_side_effects (gimple_seq);
/* In builtins.c */
extern bool validate_arglist (const_gimple, ...);
@@ -1354,6 +1356,18 @@ gimple_call_noreturn_p (gimple s)
return (gimple_call_flags (s) & ECF_NORETURN) != 0;
}
+
+/* Return true if S is a nothrow call. */
+
+static inline bool
+gimple_call_nothrow_p (gimple s)
+{
+ if (gimple_code (s) != GIMPLE_CALL)
+ return false;
+ return (gimple_call_flags (s) & ECF_NOTHROW) != 0;
+}
+
+
/* Return the code of the predicate computed by conditional statement GS. */
static inline enum tree_code
@@ -2939,7 +2953,11 @@ gsi_after_labels (basic_block bb)
return gsi;
}
-/* Return a pointer to the current stmt. */
+/* Return a pointer to the current stmt.
+
+ NOTE: You may want to use gsi_replace on the iterator itself,
+ as this performs additional bookkeeping that will not be done
+ if you simply assign through a pointer returned by gsi_stmt_ptr. */
static inline gimple *
gsi_stmt_ptr (gimple_stmt_iterator *i)
Index: gcc/tree-cfg.c
===================================================================
--- gcc/tree-cfg.c (revision 132666)
+++ gcc/tree-cfg.c (working copy)
@@ -1450,6 +1450,8 @@ single_noncomplex_succ (basic_block bb)
* Some unnecessary BIND_EXPRs are removed
+ * GOTO_EXPRs immediately preceding destination are removed.
+
Clearly more work could be done. The trick is doing the analysis
and removal fast enough to be a net improvement in compile times.
@@ -1459,213 +1461,204 @@ single_noncomplex_succ (basic_block bb)
struct rus_data
{
- tree *last_goto;
bool repeat;
bool may_throw;
bool may_branch;
bool has_label;
+ bool last_was_goto;
+ gimple_stmt_iterator last_goto_gsi;
};
-/* FIXME tuples. */
-#if 0
-static void remove_useless_stmts_1 (tree *, struct rus_data *);
-#endif
+
+static void remove_useless_stmts_1 (gimple_stmt_iterator *gsi, struct rus_data *);
+
+/* Given a statement sequence, find the first executable statement with
+ location information, and warn that it is unreachable. When searching,
+ descend into containers in execution order. */
static bool
-remove_useless_stmts_warn_notreached (tree stmt)
+remove_useless_stmts_warn_notreached (gimple_seq stmts)
{
- if (EXPR_HAS_LOCATION (stmt))
- {
- location_t loc = EXPR_LOCATION (stmt);
- if (LOCATION_LINE (loc) > 0)
- {
- warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
- return true;
- }
- }
+ gimple_stmt_iterator gsi;
- switch (TREE_CODE (stmt))
+ for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
{
- case STATEMENT_LIST:
- {
- tree_stmt_iterator i;
- for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
- if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
- return true;
- }
- break;
+ gimple stmt = gsi_stmt (gsi);
- case COND_EXPR:
- if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
- return true;
- if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
- return true;
- if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
- return true;
- break;
+ if (!gimple_locus_empty_p (stmt))
+ {
+ location_t loc = gimple_locus (stmt);
+ if (LOCATION_LINE (loc) > 0)
+ {
+ warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
+ return true;
+ }
+ }
- case TRY_FINALLY_EXPR:
- case TRY_CATCH_EXPR:
- if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
- return true;
- if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
- return true;
- break;
+ switch (gimple_code (stmt))
+ {
+ /* Unfortunately, we need the CFG now to detect unreachable
+ branches in a conditional, so conditionals are not handled here. */
- case CATCH_EXPR:
- return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
- case EH_FILTER_EXPR:
- return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
- case BIND_EXPR:
- return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
+ case GIMPLE_TRY:
+ if (remove_useless_stmts_warn_notreached (gimple_try_eval (stmt)))
+ return true;
+ if (remove_useless_stmts_warn_notreached (gimple_try_cleanup (stmt)))
+ return true;
+ break;
+
+ case GIMPLE_CATCH:
+ return remove_useless_stmts_warn_notreached (gimple_catch_handler (stmt));
- default:
- /* Not a live container. */
- break;
- }
+ case GIMPLE_EH_FILTER:
+ return remove_useless_stmts_warn_notreached (gimple_eh_filter_failure (stmt));
- return false;
-}
+ case GIMPLE_BIND:
+ return remove_useless_stmts_warn_notreached (gimple_bind_body (stmt));
-/* FIXME tuples. */
+ /* FIXME tuples. The original code pre-tuplification did not
+ descend into the OMP constructs. */
#if 0
-static void
-remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
-{
- tree then_clause, else_clause, cond;
- bool save_has_label, then_has_label, else_has_label;
-
- save_has_label = data->has_label;
- data->has_label = false;
- data->last_goto = NULL;
-
- remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
+ case GIMPLE_OMP_FOR:
+ if (remove_useless_stmts_warn_notreached (gimple_omp_for_pre_body (stmt)))
+ return true;
+ /* FALLTHROUGH */
+ case GIMPLE_OMP_CRITICAL:
+ case GIMPLE_OMP_CONTINUE:
+ case GIMPLE_OMP_MASTER:
+ case GIMPLE_OMP_ORDERED:
+ case GIMPLE_OMP_SECTION:
+ case GIMPLE_OMP_PARALLEL:
+ case GIMPLE_OMP_SECTIONS:
+ case GIMPLE_OMP_SINGLE:
+ return remove_useless_stmts_warn_notreached (gimple_omp_body (stmt));
+#endif
- then_has_label = data->has_label;
- data->has_label = false;
- data->last_goto = NULL;
+ default:
+ break;
+ }
+ }
- remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
+ return false;
+}
- else_has_label = data->has_label;
- data->has_label = save_has_label | then_has_label | else_has_label;
+/* Helper for remove_useless_stmts_1. Handle GIMPLE_COND statements. */
- then_clause = COND_EXPR_THEN (*stmt_p);
- else_clause = COND_EXPR_ELSE (*stmt_p);
- cond = fold (COND_EXPR_COND (*stmt_p));
+static void
+remove_useless_stmts_cond (gimple_stmt_iterator *gsi, struct rus_data *data)
+{
+ tree cond;
+ gimple stmt = gsi_stmt (*gsi);
- /* If neither arm does anything at all, we can remove the whole IF. */
- if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
- {
- *stmt_p = build_empty_stmt ();
- data->repeat = true;
- }
+ /* The folded result must still be a conditional statement. */
+ fold_stmt_inplace (stmt);
- /* If there are no reachable statements in an arm, then we can
- zap the entire conditional. */
- else if (integer_nonzerop (cond) && !else_has_label)
- {
- if (warn_notreached)
- remove_useless_stmts_warn_notreached (else_clause);
- *stmt_p = then_clause;
+ /* Attempt to evaluate the condition at compile-time.
+ Because we are in GIMPLE here, only the most trivial
+ comparisons of a constant to a constant can be handled.
+ Calling fold_binary is thus a bit of overkill, as it
+ creates temporary tree nodes that are discarded immediately. */
+ cond = fold_binary (gimple_cond_code (stmt),
+ boolean_type_node,
+ gimple_cond_lhs (stmt),
+ gimple_cond_rhs (stmt));
+
+ /* FIXME tuples. The optimization being applied to GIMPLE_GOTO
+ destinations could be performed for the true and false labels
+ of a GIMPLE_COND as well. This would require tracking a bit
+ more information. */
+
+ /* Replace trivial conditionals with gotos. */
+ if (cond && integer_nonzerop (cond))
+ {
+ /* Goto THEN label. */
+ tree then_label = gimple_cond_true_label (stmt);
+
+ gsi_replace(gsi, gimple_build_goto (then_label), false);
+ data->last_goto_gsi = *gsi;
+ data->last_was_goto = true;
data->repeat = true;
}
- else if (integer_zerop (cond) && !then_has_label)
+ else if (cond && integer_zerop (cond))
{
- if (warn_notreached)
- remove_useless_stmts_warn_notreached (then_clause);
- *stmt_p = else_clause;
+ /* Goto ELSE label. */
+ tree else_label = gimple_cond_false_label (stmt);
+
+ gsi_replace(gsi, gimple_build_goto (else_label), false);
+ data->last_goto_gsi = *gsi;
+ data->last_was_goto = true;
data->repeat = true;
}
-
- /* Check a couple of simple things on then/else with single stmts. */
else
{
- tree then_stmt = expr_only (then_clause);
- tree else_stmt = expr_only (else_clause);
-
- /* Notice branches to a common destination. */
- if (then_stmt && else_stmt
- && TREE_CODE (then_stmt) == GOTO_EXPR
- && TREE_CODE (else_stmt) == GOTO_EXPR
- && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
- {
- *stmt_p = then_stmt;
+ tree then_label = gimple_cond_true_label (stmt);
+ tree else_label = gimple_cond_false_label (stmt);
+
+ if (then_label == else_label)
+ {
+ /* Goto common destination. */
+ gsi_replace (gsi, gimple_build_goto (then_label), false);
+ data->last_goto_gsi = *gsi;
+ data->last_was_goto = true;
data->repeat = true;
}
-
- /* If the THEN/ELSE clause merely assigns a value to a variable or
- parameter which is already known to contain that value, then
- remove the useless THEN/ELSE clause. */
- else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
- {
- if (else_stmt
- && TREE_CODE (else_stmt) == GIMPLE_MODIFY_STMT
- && GIMPLE_STMT_OPERAND (else_stmt, 0) == cond
- && integer_zerop (GIMPLE_STMT_OPERAND (else_stmt, 1)))
- COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
- }
- else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
- && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
- || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
- && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
- {
- tree stmt = (TREE_CODE (cond) == EQ_EXPR
- ? then_stmt : else_stmt);
- tree *location = (TREE_CODE (cond) == EQ_EXPR
- ? &COND_EXPR_THEN (*stmt_p)
- : &COND_EXPR_ELSE (*stmt_p));
-
- if (stmt
- && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
- && GIMPLE_STMT_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
- && GIMPLE_STMT_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
- *location = alloc_stmt_list ();
- }
}
- /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
- would be re-introduced during lowering. */
- data->last_goto = NULL;
+ gsi_next (gsi);
+
+ data->last_was_goto = false;
}
+/* Helper for remove_useless_stmts_1.
+ Handle the try-finally case for GIMPLE_TRY statements. */
static void
-remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
+remove_useless_stmts_tf (gimple_stmt_iterator *gsi, struct rus_data *data)
{
bool save_may_branch, save_may_throw;
bool this_may_branch, this_may_throw;
+ gimple_seq eval_seq, cleanup_seq;
+ gimple_stmt_iterator eval_gsi, cleanup_gsi;
+
+ gimple stmt = gsi_stmt (*gsi);
+
/* Collect may_branch and may_throw information for the body only. */
save_may_branch = data->may_branch;
save_may_throw = data->may_throw;
data->may_branch = false;
data->may_throw = false;
- data->last_goto = NULL;
+ data->last_was_goto = false;
- remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
+ eval_seq = gimple_try_eval (stmt);
+ eval_gsi = gsi_start (eval_seq);
+ remove_useless_stmts_1 (&eval_gsi, data);
this_may_branch = data->may_branch;
this_may_throw = data->may_throw;
data->may_branch |= save_may_branch;
data->may_throw |= save_may_throw;
- data->last_goto = NULL;
+ data->last_was_goto = false;
- remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
+ cleanup_seq = gimple_try_cleanup (stmt);
+ cleanup_gsi = gsi_start (cleanup_seq);
+ remove_useless_stmts_1 (&cleanup_gsi, data);
/* If the body is empty, then we can emit the FINALLY block without
the enclosing TRY_FINALLY_EXPR. */
- if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
+ if (!gimple_seq_has_side_effects (eval_seq))
{
- *stmt_p = TREE_OPERAND (*stmt_p, 1);
+ gsi_insert_seq_before (gsi, cleanup_seq, GSI_SAME_STMT);
+ gsi_remove (gsi, false);
data->repeat = true;
}
/* If the handler is empty, then we can emit the TRY block without
the enclosing TRY_FINALLY_EXPR. */
- else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
+ else if (!gimple_seq_has_side_effects (cleanup_seq))
{
- *stmt_p = TREE_OPERAND (*stmt_p, 0);
+ gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
+ gsi_remove (gsi, false);
data->repeat = true;
}
@@ -1673,37 +1666,51 @@ remove_useless_stmts_tf (tree *stmt_p, s
string the TRY and FINALLY blocks together. */
else if (!this_may_branch && !this_may_throw)
{
- tree stmt = *stmt_p;
- *stmt_p = TREE_OPERAND (stmt, 0);
- append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
+ gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
+ gsi_insert_seq_before (gsi, cleanup_seq, GSI_SAME_STMT);
+ gsi_remove (gsi, false);
data->repeat = true;
}
+ else
+ gsi_next (gsi);
}
+/* Helper for remove_useless_stmts_1.
+ Handle the try-catch case for GIMPLE_TRY statements. */
static void
-remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
+remove_useless_stmts_tc (gimple_stmt_iterator *gsi, struct rus_data *data)
{
bool save_may_throw, this_may_throw;
- tree_stmt_iterator i;
- tree stmt;
+
+ gimple_seq eval_seq, cleanup_seq, handler_seq, failure_seq;
+ gimple_stmt_iterator eval_gsi, cleanup_gsi, handler_gsi, failure_gsi;
+
+ gimple stmt = gsi_stmt (*gsi);
/* Collect may_throw information for the body only. */
save_may_throw = data->may_throw;
data->may_throw = false;
- data->last_goto = NULL;
+ data->last_was_goto = false;
- remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
+ eval_seq = gimple_try_eval (stmt);
+ eval_gsi = gsi_start (eval_seq);
+ remove_useless_stmts_1 (&eval_gsi, data);
this_may_throw = data->may_throw;
data->may_throw = save_may_throw;
+ cleanup_seq = gimple_try_cleanup (stmt);
+
/* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
if (!this_may_throw)
{
if (warn_notreached)
- remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
- *stmt_p = TREE_OPERAND (*stmt_p, 0);
+ {
+ remove_useless_stmts_warn_notreached (cleanup_seq);
+ }
+ gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
+ gsi_remove (gsi, false);
data->repeat = true;
return;
}
@@ -1712,141 +1719,164 @@ remove_useless_stmts_tc (tree *stmt_p, s
no exceptions propagate past this point. */
this_may_throw = true;
- i = tsi_start (TREE_OPERAND (*stmt_p, 1));
- stmt = tsi_stmt (i);
- data->last_goto = NULL;
+ cleanup_gsi = gsi_start (cleanup_seq);
+ stmt = gsi_stmt (cleanup_gsi);
+ data->last_was_goto = false;
- switch (TREE_CODE (stmt))
+ switch (gimple_code (stmt))
{
- case CATCH_EXPR:
- for (; !tsi_end_p (i); tsi_next (&i))
- {
- stmt = tsi_stmt (i);
+ case GIMPLE_CATCH:
+ /* If the first element is a catch, they all must be. */
+ while (!gsi_end_p (cleanup_gsi))
+ {
+ stmt = gsi_stmt (cleanup_gsi);
/* If we catch all exceptions, then the body does not
propagate exceptions past this point. */
- if (CATCH_TYPES (stmt) == NULL)
+ if (gimple_catch_types (stmt) == NULL)
this_may_throw = false;
- data->last_goto = NULL;
- remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
+ data->last_was_goto = false;
+ handler_seq = gimple_catch_handler (stmt);
+ handler_gsi = gsi_start (handler_seq);
+ remove_useless_stmts_1 (&handler_gsi, data);
+ gsi_next (&cleanup_gsi);
}
+ gsi_next (gsi);
break;
- case EH_FILTER_EXPR:
- if (EH_FILTER_MUST_NOT_THROW (stmt))
+ case GIMPLE_EH_FILTER:
+ /* If the first element is an eh_filter, it should stand alone. */
+ if (gimple_eh_filter_must_not_throw (stmt))
this_may_throw = false;
- else if (EH_FILTER_TYPES (stmt) == NULL)
+ else if (gimple_eh_filter_types (stmt) == NULL)
this_may_throw = false;
- remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
+ failure_seq = gimple_eh_filter_failure (stmt);
+ failure_gsi = gsi_start (failure_seq);
+ remove_useless_stmts_1 (&failure_gsi, data);
+ gsi_next (gsi);
break;
default:
- /* Otherwise this is a cleanup. */
- remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
+ /* Otherwise this is a list of cleanup statements. */
+ remove_useless_stmts_1 (&cleanup_gsi, data);
/* If the cleanup is empty, then we can emit the TRY block without
the enclosing TRY_CATCH_EXPR. */
- if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
+ if (!gimple_seq_has_side_effects (cleanup_seq))
{
- *stmt_p = TREE_OPERAND (*stmt_p, 0);
+ gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
+ gsi_remove(gsi, false);
data->repeat = true;
}
+ else
+ gsi_next (gsi);
break;
}
+
data->may_throw |= this_may_throw;
}
+/* Helper for remove_useless_stmts_1. Handle GIMPLE_BIND statements. */
static void
-remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
+remove_useless_stmts_bind (gimple_stmt_iterator *gsi, struct rus_data *data ATTRIBUTE_UNUSED)
{
tree block;
+ gimple_seq body_seq, fn_body_seq;
+ gimple_stmt_iterator body_gsi;
- /* First remove anything underneath the BIND_EXPR. */
- remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
+ gimple stmt = gsi_stmt (*gsi);
- /* If the BIND_EXPR has no variables, then we can pull everything
- up one level and remove the BIND_EXPR, unless this is the toplevel
- BIND_EXPR for the current function or an inlined function.
+ /* First remove anything underneath the BIND_EXPR. */
+
+ body_seq = gimple_bind_body (stmt);
+ body_gsi = gsi_start (body_seq);
+ remove_useless_stmts_1 (&body_gsi, data);
+
+ /* If the GIMPLE_BIND has no variables, then we can pull everything
+ up one level and remove the GIMPLE_BIND, unless this is the toplevel
+ GIMPLE_BIND for the current function or an inlined function.
When this situation occurs we will want to apply this
optimization again. */
- block = BIND_EXPR_BLOCK (*stmt_p);
- if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
- && *stmt_p != gimple_body (current_function_decl)
+ block = gimple_bind_block (stmt);
+ fn_body_seq = gimple_body (current_function_decl);
+ if (gimple_bind_vars (stmt) == NULL_TREE
+ && (gimple_seq_empty_p (fn_body_seq)
+ || stmt != gimple_seq_first_stmt (fn_body_seq))
&& (! block
|| ! BLOCK_ABSTRACT_ORIGIN (block)
|| (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
!= FUNCTION_DECL)))
{
- *stmt_p = BIND_EXPR_BODY (*stmt_p);
+ gsi_insert_seq_before (gsi, body_seq, GSI_SAME_STMT);
+ gsi_remove (gsi, false);
data->repeat = true;
}
+ else
+ gsi_next (gsi);
}
+/* Helper for remove_useless_stmts_1. Handle GIMPLE_GOTO statements. */
static void
-remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
+remove_useless_stmts_goto (gimple_stmt_iterator *gsi, struct rus_data *data)
{
- tree dest = GOTO_DESTINATION (*stmt_p);
+ gimple stmt = gsi_stmt (*gsi);
+
+ tree dest = gimple_goto_dest (stmt);
data->may_branch = true;
- data->last_goto = NULL;
+ data->last_was_goto = false;
- /* Record the last goto expr, so that we can delete it if unnecessary. */
+ /* Record iterator for last goto expr, so that we can delete it if unnecessary. */
if (TREE_CODE (dest) == LABEL_DECL)
- data->last_goto = stmt_p;
+ {
+ data->last_goto_gsi = *gsi;
+ data->last_was_goto = true;
+ }
+
+ gsi_next(gsi);
}
+/* Helper for remove_useless_stmts_1. Handle GIMPLE_LABEL statements. */
static void
-remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
+remove_useless_stmts_label (gimple_stmt_iterator *gsi, struct rus_data *data)
{
- tree label = LABEL_EXPR_LABEL (*stmt_p);
+ gimple stmt = gsi_stmt (*gsi);
+
+ tree label = gimple_label_label (stmt);
data->has_label = true;
/* We do want to jump across non-local label receiver code. */
if (DECL_NONLOCAL (label))
- data->last_goto = NULL;
+ data->last_was_goto = false;
- else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
+ else if (data->last_was_goto
+ && gimple_goto_dest (gsi_stmt (data->last_goto_gsi)) == label)
{
- *data->last_goto = build_empty_stmt ();
+ /* Replace the preceding GIMPLE_GOTO statement with
+ a GIMPLE_NOP, which will be subsequently removed.
+ In this way, we avoid invalidating other iterators
+ active on the statement sequence. */
+ gsi_replace(&data->last_goto_gsi, gimple_build_nop(), false);
+ data->last_was_goto = false;
data->repeat = true;
}
/* ??? Add something here to delete unused labels. */
-}
-
-
-/* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
- decl. This allows us to eliminate redundant or useless
- calls to "const" functions.
-
- Gimplifier already does the same operation, but we may notice functions
- being const and pure once their calls has been gimplified, so we need
- to update the flag. */
-static void
-update_call_expr_flags (tree call)
-{
- tree decl = get_callee_fndecl (call);
- if (!decl)
- return;
- if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
- TREE_SIDE_EFFECTS (call) = 0;
- if (TREE_NOTHROW (decl))
- TREE_NOTHROW (call) = 1;
+ gsi_next (gsi);
}
-#endif
/* T is CALL_EXPR. Set current_function_calls_* flags. */
void
-notice_special_calls (tree t)
+notice_special_calls (gimple call)
{
- int flags = call_expr_flags (t);
+ int flags = gimple_call_flags (call);
if (flags & ECF_MAY_BE_ALLOCA)
current_function_calls_alloca = true;
@@ -1865,128 +1895,148 @@ clear_special_calls (void)
current_function_calls_setjmp = false;
}
+/* Remove useless statements from a statement sequence, and perform
+ some preliminary simplifications. */
-/* FIXME tuples. */
-#if 0
static void
-remove_useless_stmts_1 (tree *tp, struct rus_data *data)
+remove_useless_stmts_1 (gimple_stmt_iterator *gsi, struct rus_data *data)
{
- tree t = *tp, op;
-
- switch (TREE_CODE (t))
+ while (!gsi_end_p (*gsi))
{
- case COND_EXPR:
- remove_useless_stmts_cond (tp, data);
- break;
+ gimple stmt = gsi_stmt (*gsi);
- case TRY_FINALLY_EXPR:
- remove_useless_stmts_tf (tp, data);
- break;
-
- case TRY_CATCH_EXPR:
- remove_useless_stmts_tc (tp, data);
- break;
-
- case BIND_EXPR:
- remove_useless_stmts_bind (tp, data);
- break;
-
- case GOTO_EXPR:
- remove_useless_stmts_goto (tp, data);
- break;
-
- case LABEL_EXPR:
- remove_useless_stmts_label (tp, data);
- break;
-
- case RETURN_EXPR:
- fold_stmt (tp);
- data->last_goto = NULL;
- data->may_branch = true;
- break;
-
- case CALL_EXPR:
- fold_stmt (tp);
- data->last_goto = NULL;
- notice_special_calls (t);
- update_call_expr_flags (t);
- if (stmt_could_throw_p (t))
- data->may_throw = true;
- break;
+ /* FIXME tuples. We follow the pre-tuples code below and use
+ fold_stmt for simplification. Note that this may change the
+ statement type, which is an invitation to trouble. Perhaps
+ fold_stmt_inplace should be used, or folding should be done
+ prior to the switch statement. */
- case MODIFY_EXPR:
- gcc_unreachable ();
-
- case GIMPLE_MODIFY_STMT:
- data->last_goto = NULL;
- fold_stmt (tp);
- op = get_call_expr_in (t);
- if (op)
- {
- update_call_expr_flags (op);
- notice_special_calls (op);
- }
- if (stmt_could_throw_p (t))
- data->may_throw = true;
- break;
-
- case STATEMENT_LIST:
- {
- tree_stmt_iterator i = tsi_start (t);
- while (!tsi_end_p (i))
- {
- t = tsi_stmt (i);
- if (IS_EMPTY_STMT (t))
- {
- tsi_delink (&i);
- continue;
- }
-
- remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
-
- t = tsi_stmt (i);
- if (TREE_CODE (t) == STATEMENT_LIST)
- {
- tsi_link_before (&i, t, TSI_SAME_STMT);
- tsi_delink (&i);
- }
- else
- tsi_next (&i);
- }
- }
- break;
- case ASM_EXPR:
- fold_stmt (tp);
- data->last_goto = NULL;
- break;
+ switch (gimple_code (stmt))
+ {
+ case GIMPLE_COND:
+ remove_useless_stmts_cond (gsi, data);
+ break;
+
+ case GIMPLE_GOTO:
+ remove_useless_stmts_goto (gsi, data);
+ break;
+
+ case GIMPLE_LABEL:
+ remove_useless_stmts_label (gsi, data);
+ break;
+
+ case GIMPLE_ASSIGN:
+ fold_stmt (gsi);
+ stmt = gsi_stmt (*gsi);
+ data->last_was_goto = false;
+ if (stmt_could_throw_p (stmt))
+ data->may_throw = true;
+ gsi_next (gsi);
+ break;
+
+ case GIMPLE_ASM:
+ fold_stmt (gsi);
+ data->last_was_goto = false;
+ gsi_next (gsi);
+ break;
+
+ case GIMPLE_CALL:
+ fold_stmt (gsi);
+ stmt = gsi_stmt (*gsi);
+ data->last_was_goto = false;
+ if (gimple_code (stmt) == GIMPLE_CALL)
+ notice_special_calls (stmt);
+ /* We used to call update_gimple_call_flags here,
+ which copied side-effects and nothrows status
+ from the function decl to the call. In the new
+ tuplified GIMPLE, the accessors for this information
+ always consult the function decl, so this copying
+ is no longer necessary. */
+ if (stmt_could_throw_p (stmt))
+ data->may_throw = true;
+ gsi_next (gsi);
+ break;
+
+ case GIMPLE_RETURN:
+ fold_stmt (gsi);
+ data->last_was_goto = false;
+ data->may_branch = true;
+ gsi_next (gsi);
+ break;
+
+ case GIMPLE_BIND:
+ remove_useless_stmts_bind (gsi, data);
+ break;
+
+ case GIMPLE_CATCH:
+ remove_useless_stmts_tc (gsi, data);
+ break;
+
+ case GIMPLE_TRY:
+ remove_useless_stmts_tf (gsi, data);
+ break;
+
+ case GIMPLE_NOP:
+ gsi_remove (gsi, false);
+ break;
+
+ /* FIXME tuples. The original pre-tuples code did not simplify OMP
+ statements, so I'd rather not enable this until we get everything
+ else working, and verify that such simplification is appropriate. */
+#if 0
+ case GIMPLE_OMP_FOR:
+ {
+ gimple_seq pre_body_seq = gimple_omp_body (stmt);
+ gimple_stmt_iterator pre_body_gsi = gsi_start (pre_body_seq);
+
+ remove_useless_stmts_1 (&pre_body_gsi, data);
+ }
+ /* FALLTHROUGH */
+ case GIMPLE_OMP_CRITICAL:
+ case GIMPLE_OMP_CONTINUE:
+ case GIMPLE_OMP_MASTER:
+ case GIMPLE_OMP_ORDERED:
+ case GIMPLE_OMP_SECTION:
+ case GIMPLE_OMP_PARALLEL:
+ case GIMPLE_OMP_SECTIONS:
+ case GIMPLE_OMP_SINGLE:
+ {
+ gimple_seq body_seq = gimple_omp_body (stmt);
+ gimple_stmt_iterator body_gsi = gsi_start (body_seq);
+
+ remove_useless_stmts_1 (&body_gsi, data);
+ }
+ break;
+#endif
- default:
- data->last_goto = NULL;
- break;
+ default:
+ data->last_was_goto = false;
+ gsi_next (gsi);
+ break;
+ }
}
}
-#endif
+
+/* Walk the function tree, removing useless statements and performing
+ some preliminary simplifications. */
static unsigned int
remove_useless_stmts (void)
{
- /* FIXME tuples. */
-#if 0
struct rus_data data;
clear_special_calls ();
do
{
+ gimple_stmt_iterator gsi;
+
+ gsi = gsi_start (gimple_body (current_function_decl));
memset (&data, 0, sizeof (data));
- remove_useless_stmts_1 (&gimple_body (current_function_decl), &data);
+ remove_useless_stmts_1 (&gsi, &data);
}
while (data.repeat);
return 0;
-#else
- gimple_unreachable ();
- return 0;
-#endif
}
Index: gcc/passes.c
===================================================================
--- gcc/passes.c (revision 132666)
+++ gcc/passes.c (working copy)
@@ -477,8 +477,8 @@ init_optimization_passes (void)
by these passes. */
p = &all_lowering_passes;
/* FIXME tuples. */
-#if 0
NEXT_PASS (pass_remove_useless_stmts);
+#if 0
NEXT_PASS (pass_mudflap_1);
NEXT_PASS (pass_lower_omp);
#endif
^ permalink raw reply [flat|nested] 4+ messages in thread
* [tuples][patch] Convert pass_remove_useless_stmts and related constant folding infrastructure
@ 2008-02-26 9:32 Bill Maddox
0 siblings, 0 replies; 4+ messages in thread
From: Bill Maddox @ 2008-02-26 9:32 UTC (permalink / raw)
To: gcc-patches; +Cc: Diego Novillo
This patch enables pass_remove_useless_stmts, and includes conversion of
much of the constant folding infrastructure in tree-ssa-ccp.c.
* tree-ssa-ccp.c (maybe_fold_stmt_addition):
Reinstated this function for tuples as-is.
(valid_gimple_rhs_p): New function. Mostly lifted from
valid_gimple_epxression_p, which is likely obsolete.
(fold_stmt_r): Reinstated commented-out cases for
tuples. Replaced call to obsolete function set_rhs.
(get_maxval_strlen): Convert to tuples.
(ccp_fold_builtin): Partial conversion to tuples.
(fold_gimple_assign): New function.
(fold_gimple_cond): New function.
(fold_gimple_call): New function.
(fold_stmt): Convert to tuples.
(fold_stmt_inplace): Convert to tuples.
* tree-ssa-propagate.c (substitute_and_fold):
Update call to fold_stmt for revised argument signature.
* gimple-dummy.c (fold_stmt): Removed dummy definition.
* gimplify.c (gimplify_call_expr): Removed obsolete
manipulation of TREE_NOTHROW flag.
* cfgexpand.c (gimple_to_tree): Set nothrow flag
of call expression based on call statement flags.
Handle GIMPLE_NOP statement.
* tree-flow.h (notice_special_calls, fold_stmt):
Update prototypes for tuples.
* gimple.c (gimple_cond_set_condition_from_tree):
New function.
(gimple_seq_has_side_effects): New function.
* gimple.h (gimple_cond_set_condition_from_tree,
gimple_seq_has_side_effects): New prototypes.
(gimple_call_nothrow_p): New function.
(gsi_stmt_ptr): Add comment regarding usage of this
function vs. gsi_replace.
* tree-cfg.c (struct rus_data): Convert to tuples.
(remove_useless_stmts_1, remove_useless_stmts_warn_notreached,
remove_useless_stmts_cond, remove_useless_stmts_tf,
remove_useless_stmts_tc, remove_useless_stmts_goto,
remove_useless_stmts_label, notice_special_calls,
remove_useless_stmts): Convert to tuples.
(update_call_expr_flags): Removed.
* passes.c (init_optimization_passes): Enable
pass_remove_useless_stmts.
Commited to gimple-tuples-branch revision 132667.
Approved by dnovillo.
--Bill
^ permalink raw reply [flat|nested] 4+ messages in thread
* Re: [Tuples][Patch] Convert pass_remove_useless_stmts and related constant folding infrastructure
2008-02-21 21:08 [Tuples][Patch] " Bill Maddox
@ 2008-02-21 22:38 ` Diego Novillo
0 siblings, 0 replies; 4+ messages in thread
From: Diego Novillo @ 2008-02-21 22:38 UTC (permalink / raw)
To: Bill Maddox; +Cc: gcc-patches, lto-team
On Thu, Feb 21, 2008 at 12:25 PM, Bill Maddox <maddox@google.com> wrote:
> * tree-ssa-ccp.c (maybe_fold_stmt_addition):
> Reinstated this function for tuples as-is.
> (valid_gimple_rhs_p): New function. Mostly lifted from
> valid_gimple_epxression_p, which is likely obsolete.
> (fold_stmt_r): Reinstated commented-out cases for
> tuples. Replaced call to obsolete function set_rhs.
> (get_maxval_strlen): Convert to tuples.
> (ccp_fold_builtin): Partial conversion to tuples.
> (fold_gimple_assign): New function.
> (fold_gimple_cond): New function.
> (fold_gimple_call): New function.
> (fold_stmt): Convert to tuples.
> (fold_stmt_inplace): Convert to tuples.
> * tree-ssa-propagate.c (substitute_and_fold):
> Update call to fold_stmt for revised argument signature.
> * gimple-dummy.c (fold_stmt): Removed dummy definition.
> * gimplify.c (gimplify_call_expr): Removed obsolete
> manipulation of TREE_NOTHROW flag.
> * cfgexpand.c (gimple_to_tree): Set nothrow flag
> of call expression based on call statement flags.
> Handle GIMPLE_NOP statement.
> * tree-flow.h (notice_special_calls, fold_stmt):
> Update prototypes for tuples.
> * gimple.c (gimple_cond_set_condition_from_tree):
> New function.
> (gimple_seq_has_side_effects): New function.
> * gimple.h (gimple_cond_set_condition_from_tree,
> gimple_seq_has_side_effects): New prototypes.
> (gimple_call_nothrow_p): New function.
> (gsi_stmt_ptr): Add comment regarding usage of this
> function vs. gsi_replace.
> * tree-cfg.c (struct rus_data): Convert to tuples.
> (remove_useless_stmts_1, remove_useless_stmts_warn_notreached,
> remove_useless_stmts_cond, remove_useless_stmts_tf,
> remove_useless_stmts_tc, remove_useless_stmts_goto,
> remove_useless_stmts_label, notice_special_calls,
> remove_useless_stmts): Convert to tuples.
> (update_call_expr_flags): Removed.
>
OK, but please wait until I unfreeze the branch. There are too many
new failures.
Diego.
^ permalink raw reply [flat|nested] 4+ messages in thread
* [Tuples][Patch] Convert pass_remove_useless_stmts and related constant folding infrastructure
@ 2008-02-21 21:08 Bill Maddox
2008-02-21 22:38 ` Diego Novillo
0 siblings, 1 reply; 4+ messages in thread
From: Bill Maddox @ 2008-02-21 21:08 UTC (permalink / raw)
To: gcc-patches, Diego Novillo, lto-team
[-- Attachment #1: Type: text/plain, Size: 1844 bytes --]
This patch converts pass_remove_useless_stmts and ports some of the
constant folding infrastructure used in ccp.
--Bill
* tree-ssa-ccp.c (maybe_fold_stmt_addition):
Reinstated this function for tuples as-is.
(valid_gimple_rhs_p): New function. Mostly lifted from
valid_gimple_epxression_p, which is likely obsolete.
(fold_stmt_r): Reinstated commented-out cases for
tuples. Replaced call to obsolete function set_rhs.
(get_maxval_strlen): Convert to tuples.
(ccp_fold_builtin): Partial conversion to tuples.
(fold_gimple_assign): New function.
(fold_gimple_cond): New function.
(fold_gimple_call): New function.
(fold_stmt): Convert to tuples.
(fold_stmt_inplace): Convert to tuples.
* tree-ssa-propagate.c (substitute_and_fold):
Update call to fold_stmt for revised argument signature.
* gimple-dummy.c (fold_stmt): Removed dummy definition.
* gimplify.c (gimplify_call_expr): Removed obsolete
manipulation of TREE_NOTHROW flag.
* cfgexpand.c (gimple_to_tree): Set nothrow flag
of call expression based on call statement flags.
Handle GIMPLE_NOP statement.
* tree-flow.h (notice_special_calls, fold_stmt):
Update prototypes for tuples.
* gimple.c (gimple_cond_set_condition_from_tree):
New function.
(gimple_seq_has_side_effects): New function.
* gimple.h (gimple_cond_set_condition_from_tree,
gimple_seq_has_side_effects): New prototypes.
(gimple_call_nothrow_p): New function.
(gsi_stmt_ptr): Add comment regarding usage of this
function vs. gsi_replace.
* tree-cfg.c (struct rus_data): Convert to tuples.
(remove_useless_stmts_1, remove_useless_stmts_warn_notreached,
remove_useless_stmts_cond, remove_useless_stmts_tf,
remove_useless_stmts_tc, remove_useless_stmts_goto,
remove_useless_stmts_label, notice_special_calls,
remove_useless_stmts): Convert to tuples.
(update_call_expr_flags): Removed.
[-- Attachment #2: tuples-rus-patch-02-20.txt --]
[-- Type: text/plain, Size: 61820 bytes --]
Index: gcc/tree-ssa-ccp.c
===================================================================
--- gcc/tree-ssa-ccp.c (revision 132513)
+++ gcc/tree-ssa-ccp.c (working copy)
@@ -1991,8 +1991,6 @@ maybe_fold_stmt_indirect (tree expr, tre
}
-/* FIXME tuples. */
-#if 0
/* A subroutine of fold_stmt_r. EXPR is a POINTER_PLUS_EXPR.
A quaint feature extant in our address arithmetic is that there
@@ -2079,7 +2077,102 @@ maybe_fold_stmt_addition (tree expr)
return t;
}
-#endif
+
+/* Return true if EXPR is an acceptable right-hand-side for a
+ GIMPLE assignment. We validate the entire tree, not just
+ the root node, thus catching expressions that embed complex
+ operands that are not permitted in GIMPLE. This function
+ is needed because the folding routines in fold-const.c
+ may return such expressions in some cases, e.g., an array
+ access with an embedded index addition. It may make more
+ sense to have folding routines that are sensitive to the
+ constraints on GIMPLE operands, rather than abandoning any
+ any attempt to fold if the usual folding turns out to be too
+ aggressive. */
+
+static bool
+valid_gimple_rhs_p (tree expr)
+{
+ enum tree_code code = TREE_CODE (expr);
+
+ switch (TREE_CODE_CLASS (code))
+ {
+ case tcc_declaration:
+ if (!is_gimple_variable (expr))
+ return false;
+ break;
+
+ case tcc_constant:
+ /* All constants are ok. */
+ break;
+
+ case tcc_binary:
+ case tcc_comparison:
+ if (!is_gimple_val (TREE_OPERAND (expr, 0))
+ || !is_gimple_val (TREE_OPERAND (expr, 1)))
+ return false;
+ break;
+
+ case tcc_unary:
+ if (!is_gimple_val (TREE_OPERAND (expr, 0)))
+ return false;
+ break;
+
+ case tcc_expression:
+ switch (code)
+ {
+ case ADDR_EXPR:
+ {
+ tree t = TREE_OPERAND (expr, 0);
+ while (handled_component_p (t))
+ {
+ /* ??? More checks needed, see the GIMPLE verifier. */
+ if ((TREE_CODE (t) == ARRAY_REF
+ || TREE_CODE (t) == ARRAY_RANGE_REF)
+ && !is_gimple_val (TREE_OPERAND (t, 1)))
+ return false;
+ t = TREE_OPERAND (t, 0);
+ }
+ if (!is_gimple_id (t))
+ return false;
+ }
+ break;
+
+ case TRUTH_NOT_EXPR:
+ if (!is_gimple_val (TREE_OPERAND (expr, 0)))
+ return false;
+ break;
+
+ case TRUTH_AND_EXPR:
+ case TRUTH_XOR_EXPR:
+ case TRUTH_OR_EXPR:
+ if (!is_gimple_val (TREE_OPERAND (expr, 0))
+ || !is_gimple_val (TREE_OPERAND (expr, 1)))
+ return false;
+ break;
+
+ case EXC_PTR_EXPR:
+ case FILTER_EXPR:
+ break;
+
+ default:
+ return false;
+ }
+ break;
+
+ case tcc_vl_exp:
+ return false;
+
+ case tcc_exceptional:
+ if (code != SSA_NAME)
+ return false;
+
+ default:
+ return false;
+ }
+
+ return true;
+}
/* For passing state through walk_tree into fold_stmt_r and its
children. */
@@ -2168,24 +2261,6 @@ fold_stmt_r (tree *expr_p, int *walk_sub
recompute_tree_invariant_for_addr_expr (expr);
return NULL_TREE;
- case POINTER_PLUS_EXPR:
- /* FIXME tuples. */
-#if 0
- t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
- if (t)
- return t;
- t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
- if (t)
- return t;
- *walk_subtrees = 0;
-
- t = maybe_fold_stmt_addition (expr);
-#else
- t = NULL;
- gimple_unreachable ();
-#endif
- break;
-
case COMPONENT_REF:
t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
if (t)
@@ -2211,9 +2286,21 @@ fold_stmt_r (tree *expr_p, int *walk_sub
t = maybe_fold_tmr (expr);
break;
+ case POINTER_PLUS_EXPR:
+ t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
+ if (t)
+ return t;
+ t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
+ if (t)
+ return t;
+ *walk_subtrees = 0;
+
+ t = maybe_fold_stmt_addition (expr);
+ break;
+
+ /* FIXME tuples. This case is likely redundant.
+ See a similar folding attempt in fold_gimple_assignment. */
case COND_EXPR:
- /* FIXME tuples. */
-#if 0
if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0)))
{
tree op0 = TREE_OPERAND (expr, 0);
@@ -2224,18 +2311,18 @@ fold_stmt_r (tree *expr_p, int *walk_sub
tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
TREE_OPERAND (op0, 0),
TREE_OPERAND (op0, 1));
- set = tem && set_rhs (expr_p, tem);
+ /* This is actually a conditional expression, not a GIMPLE
+ conditional statement, however, the valid_gimple_rhs_p
+ test still applies. */
+ set = tem && is_gimple_condexpr (tem) && valid_gimple_rhs_p (tem);
fold_undefer_overflow_warnings (set, fold_stmt_r_data->stmt, 0);
if (set)
{
- t = *expr_p;
+ COND_EXPR_COND (expr) = tem;
+ t = expr;
break;
}
}
-#else
- t = NULL;
- gimple_unreachable ();
-#endif
return NULL_TREE;
default:
@@ -2254,8 +2341,6 @@ fold_stmt_r (tree *expr_p, int *walk_sub
}
-/* FIXME tuples. */
-#if 0
/* Return the string length, maximum string length or maximum value of
ARG in LENGTH.
If ARG is an SSA name variable, follow its use-def chains. If LENGTH
@@ -2268,7 +2353,8 @@ fold_stmt_r (tree *expr_p, int *walk_sub
static bool
get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
{
- tree var, def_stmt, val;
+ tree var, val;
+ gimple def_stmt;
if (TREE_CODE (arg) != SSA_NAME)
{
@@ -2316,71 +2402,74 @@ get_maxval_strlen (tree arg, tree *lengt
var = arg;
def_stmt = SSA_NAME_DEF_STMT (var);
- switch (TREE_CODE (def_stmt))
+ switch (gimple_code (def_stmt))
{
- case GIMPLE_MODIFY_STMT:
+ case GIMPLE_ASSIGN:
{
tree rhs;
/* The RHS of the statement defining VAR must either have a
constant length or come from another SSA_NAME with a constant
length. */
- rhs = GIMPLE_STMT_OPERAND (def_stmt, 1);
- STRIP_NOPS (rhs);
- return get_maxval_strlen (rhs, length, visited, type);
- }
+ if (gimple_num_ops (def_stmt) < 2)
+ {
+ rhs = gimple_assign_rhs1 (def_stmt);
+ STRIP_NOPS (rhs);
+ return get_maxval_strlen (rhs, length, visited, type);
+ }
+ }
+ return false;
- case PHI_NODE:
+ case GIMPLE_PHI:
{
/* All the arguments of the PHI node must have the same constant
length. */
- int i;
-
- for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
- {
- tree arg = PHI_ARG_DEF (def_stmt, i);
-
- /* If this PHI has itself as an argument, we cannot
- determine the string length of this argument. However,
- if we can find a constant string length for the other
- PHI args then we can still be sure that this is a
- constant string length. So be optimistic and just
- continue with the next argument. */
- if (arg == PHI_RESULT (def_stmt))
- continue;
-
- if (!get_maxval_strlen (arg, length, visited, type))
- return false;
- }
+ unsigned i;
- return true;
- }
+ for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
+ {
+ tree arg = gimple_phi_arg (def_stmt, i)->def;
+
+ /* If this PHI has itself as an argument, we cannot
+ determine the string length of this argument. However,
+ if we can find a constant string length for the other
+ PHI args then we can still be sure that this is a
+ constant string length. So be optimistic and just
+ continue with the next argument. */
+ if (arg == gimple_phi_result (def_stmt))
+ continue;
+
+ if (!get_maxval_strlen (arg, length, visited, type))
+ return false;
+ }
+ }
+ return true;
default:
- break;
+ return false;
}
-
-
- return false;
}
-/* Fold builtin call FN in statement STMT. If it cannot be folded into a
+/* Fold builtin call in statement STMT. If it cannot be folded into a
constant, return NULL_TREE. Otherwise, return its constant value. */
static tree
-ccp_fold_builtin (tree stmt, tree fn)
+ccp_fold_builtin (gimple stmt)
{
tree result, val[3];
tree callee, a;
int arg_mask, i, type;
bitmap visited;
bool ignore;
- call_expr_arg_iterator iter;
int nargs;
- ignore = TREE_CODE (stmt) != GIMPLE_MODIFY_STMT;
+ gcc_assert (gimple_code (stmt) == GIMPLE_CALL);
+ ignore = (gimple_call_lhs (stmt) == NULL);
+
+ /* FIXME tuples. */
+#if 0
/* First try the generic builtin folder. If that succeeds, return the
result directly. */
result = fold_call_expr (fn, ignore);
@@ -2390,15 +2479,16 @@ ccp_fold_builtin (tree stmt, tree fn)
STRIP_NOPS (result);
return result;
}
+#endif
/* Ignore MD builtins. */
- callee = get_callee_fndecl (fn);
+ callee = gimple_call_fndecl (stmt);
if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
return NULL_TREE;
/* If the builtin could not be folded, and it has no argument list,
we're done. */
- nargs = call_expr_nargs (fn);
+ nargs = gimple_call_num_args (stmt);
if (nargs == 0)
return NULL_TREE;
@@ -2442,16 +2532,15 @@ ccp_fold_builtin (tree stmt, tree fn)
visited = BITMAP_ALLOC (NULL);
memset (val, 0, sizeof (val));
- init_call_expr_arg_iterator (fn, &iter);
- for (i = 0; arg_mask; i++, arg_mask >>= 1)
+ for (i = 0; i < nargs; i++)
{
- a = next_call_expr_arg (&iter);
- if (arg_mask & 1)
- {
- bitmap_clear (visited);
- if (!get_maxval_strlen (a, &val[i], visited, type))
- val[i] = NULL_TREE;
- }
+ if ((arg_mask >> i) & 1)
+ {
+ a = gimple_call_arg (stmt, i);
+ bitmap_clear (visited);
+ if (!get_maxval_strlen (a, &val[i], visited, type))
+ val[i] = NULL_TREE;
+ }
}
BITMAP_FREE (visited);
@@ -2462,7 +2551,8 @@ ccp_fold_builtin (tree stmt, tree fn)
case BUILT_IN_STRLEN:
if (val[0])
{
- tree new_val = fold_convert (TREE_TYPE (fn), val[0]);
+ tree new_val =
+ fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
/* If the result is not a valid gimple value, or not a cast
of a valid gimple value, then we can not use the result. */
@@ -2476,32 +2566,30 @@ ccp_fold_builtin (tree stmt, tree fn)
case BUILT_IN_STRCPY:
if (val[1] && is_gimple_val (val[1]) && nargs == 2)
result = fold_builtin_strcpy (callee,
- CALL_EXPR_ARG (fn, 0),
- CALL_EXPR_ARG (fn, 1),
+ gimple_call_arg (stmt, 0),
+ gimple_call_arg (stmt, 1),
val[1]);
break;
case BUILT_IN_STRNCPY:
if (val[1] && is_gimple_val (val[1]) && nargs == 3)
result = fold_builtin_strncpy (callee,
- CALL_EXPR_ARG (fn, 0),
- CALL_EXPR_ARG (fn, 1),
- CALL_EXPR_ARG (fn, 2),
+ gimple_call_arg (stmt, 0),
+ gimple_call_arg (stmt, 1),
+ gimple_call_arg (stmt, 2),
val[1]);
break;
case BUILT_IN_FPUTS:
- result = fold_builtin_fputs (CALL_EXPR_ARG (fn, 0),
- CALL_EXPR_ARG (fn, 1),
- TREE_CODE (stmt) != GIMPLE_MODIFY_STMT, 0,
- val[0]);
+ result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
+ gimple_call_arg (stmt, 1),
+ ignore, false, val[0]);
break;
case BUILT_IN_FPUTS_UNLOCKED:
- result = fold_builtin_fputs (CALL_EXPR_ARG (fn, 0),
- CALL_EXPR_ARG (fn, 1),
- TREE_CODE (stmt) != GIMPLE_MODIFY_STMT, 1,
- val[0]);
+ result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
+ gimple_call_arg (stmt, 1),
+ ignore, true, val[0]);
break;
case BUILT_IN_MEMCPY_CHK:
@@ -2510,10 +2598,10 @@ ccp_fold_builtin (tree stmt, tree fn)
case BUILT_IN_MEMSET_CHK:
if (val[2] && is_gimple_val (val[2]))
result = fold_builtin_memory_chk (callee,
- CALL_EXPR_ARG (fn, 0),
- CALL_EXPR_ARG (fn, 1),
- CALL_EXPR_ARG (fn, 2),
- CALL_EXPR_ARG (fn, 3),
+ gimple_call_arg (stmt, 0),
+ gimple_call_arg (stmt, 1),
+ gimple_call_arg (stmt, 2),
+ gimple_call_arg (stmt, 3),
val[2], ignore,
DECL_FUNCTION_CODE (callee));
break;
@@ -2522,27 +2610,30 @@ ccp_fold_builtin (tree stmt, tree fn)
case BUILT_IN_STPCPY_CHK:
if (val[1] && is_gimple_val (val[1]))
result = fold_builtin_stxcpy_chk (callee,
- CALL_EXPR_ARG (fn, 0),
- CALL_EXPR_ARG (fn, 1),
- CALL_EXPR_ARG (fn, 2),
+ gimple_call_arg (stmt, 0),
+ gimple_call_arg (stmt, 1),
+ gimple_call_arg (stmt, 2),
val[1], ignore,
DECL_FUNCTION_CODE (callee));
break;
case BUILT_IN_STRNCPY_CHK:
if (val[2] && is_gimple_val (val[2]))
- result = fold_builtin_strncpy_chk (CALL_EXPR_ARG (fn, 0),
- CALL_EXPR_ARG (fn, 1),
- CALL_EXPR_ARG (fn, 2),
- CALL_EXPR_ARG (fn, 3),
+ result = fold_builtin_strncpy_chk (gimple_call_arg (stmt, 0),
+ gimple_call_arg (stmt, 1),
+ gimple_call_arg (stmt, 2),
+ gimple_call_arg (stmt, 3),
val[2]);
break;
case BUILT_IN_SNPRINTF_CHK:
case BUILT_IN_VSNPRINTF_CHK:
+ /* FIXME tuples. */
+#if 0
if (val[1] && is_gimple_val (val[1]))
result = fold_builtin_snprintf_chk (fn, val[1],
DECL_FUNCTION_CODE (callee));
+#endif
break;
default:
@@ -2554,105 +2645,250 @@ ccp_fold_builtin (tree stmt, tree fn)
return result;
}
+/* Attempt to fold an assignment statement. Return true if any changes were
+ made. The statement is modified in place, but its RHS class may change.
+ It is assumed that the operands have been previously folded. */
+
+static bool
+fold_gimple_assign (gimple stmt)
+{
+ enum tree_code subcode = gimple_assign_subcode (stmt);
+
+ tree result = NULL;
-/* Fold the statement pointed to by STMT_P. In some cases, this function may
+ switch (get_gimple_rhs_class (subcode))
+ {
+ case GIMPLE_SINGLE_RHS:
+ {
+ tree rhs = gimple_assign_rhs1 (stmt);
+
+ /* Try to fold a conditional expression. */
+ if (TREE_CODE (rhs) == COND_EXPR)
+ {
+ tree temp = fold (COND_EXPR_COND (rhs));
+ if (temp != COND_EXPR_COND (rhs))
+ result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp,
+ COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
+ }
+
+ /* If we couldn't fold the RHS, hand over to the generic
+ fold routines. */
+ if (result == NULL_TREE)
+ result = fold (rhs);
+
+ /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
+ that may have been added by fold, and "useless" type
+ conversions that might now be apparent due to propagation. */
+ STRIP_USELESS_TYPE_CONVERSION (result);
+
+ if (result != rhs && valid_gimple_rhs_p (result))
+ {
+ gimple_assign_set_rhs_from_tree (stmt, result);
+ return true;
+ }
+ else
+ /* It is possible that fold_stmt_r simplified the RHS.
+ Make sure that the subcode of this statement still
+ reflects the principal operator of the rhs operand. */
+ gimple_assign_set_rhs_from_tree (stmt, rhs);
+ }
+ break;
+
+ case GIMPLE_UNARY_RHS:
+ result = fold_unary (subcode,
+ TREE_TYPE (gimple_assign_lhs (stmt)),
+ gimple_assign_rhs1 (stmt));
+
+ if (result)
+ {
+ STRIP_USELESS_TYPE_CONVERSION (result);
+ if (valid_gimple_rhs_p (result))
+ {
+ gimple_assign_set_rhs_from_tree (stmt, result);
+ return true;
+ }
+ }
+ break;
+
+ case GIMPLE_BINARY_RHS:
+ result = fold_binary (subcode,
+ TREE_TYPE (gimple_assign_lhs (stmt)),
+ gimple_assign_rhs1 (stmt),
+ gimple_assign_rhs2 (stmt));
+
+ if (result)
+ {
+ STRIP_USELESS_TYPE_CONVERSION (result);
+ if (valid_gimple_rhs_p (result))
+ {
+ gimple_assign_set_rhs_from_tree (stmt, result);
+ return true;
+ }
+ }
+ break;
+
+ case GIMPLE_INVALID_RHS:
+ gcc_unreachable();
+ }
+
+ return false;
+}
+
+/* Attempt to fold a conditional statement. Return true if any changes were
+ made. We only attempt to fold the condition expression, and do not perform
+ any transformation that would require alteration of the cfg. It is
+ assumed that the operands have been previously folded. */
+
+static bool
+fold_gimple_cond (gimple stmt)
+{
+ tree result = fold_binary (gimple_cond_code (stmt),
+ boolean_type_node,
+ gimple_cond_lhs (stmt),
+ gimple_cond_rhs (stmt));
+
+ if (result)
+ {
+ STRIP_USELESS_TYPE_CONVERSION (result);
+ if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
+ {
+ gimple_cond_set_condition_from_tree (stmt, result);
+ return true;
+ }
+ }
+
+ return false;
+}
+
+/* Attempt to fold a call statement referenced by the statement iterator GSI.
+ The statement may be replaced by another statement, e.g., if the call
+ simplifies to a constant value. Return true if any changes were made.
+ It is assumed that the operands have been previously folded. */
+
+static bool
+fold_gimple_call (gimple_stmt_iterator *gsi)
+{
+ gimple stmt = gsi_stmt (*gsi);
+
+ tree callee = gimple_call_fndecl (stmt);
+
+ /* Check for builtins that CCP can handle using information not
+ available in the generic fold routines. */
+ if (callee && DECL_BUILT_IN (callee))
+ {
+ tree result = ccp_fold_builtin (stmt);
+ if (result)
+ {
+ /* The call has simplified to a constant value, so
+ we can no longer represent it with a GIMPLE_CALL.
+ Introduce a new GIMPLE_ASSIGN statement. */
+ gimple new_stmt;
+ tree lhs = gimple_call_lhs (stmt);
+
+ STRIP_USELESS_TYPE_CONVERSION (result);
+ new_stmt = gimple_build_assign (lhs, result);
+ gimple_set_locus (new_stmt, gimple_locus (stmt));
+ gsi_replace (gsi, new_stmt, false);
+ return true;
+ }
+ }
+ else
+ {
+ /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
+ here are when we've propagated the address of a decl into the
+ object slot. */
+ /* ??? Should perhaps do this in fold proper. However, doing it
+ there requires that we create a new CALL_EXPR, and that requires
+ copying EH region info to the new node. Easier to just do it
+ here where we can just smash the call operand. */
+ /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
+ callee = gimple_call_fn (stmt);
+ if (TREE_CODE (callee) == OBJ_TYPE_REF
+ && lang_hooks.fold_obj_type_ref
+ && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
+ && DECL_P (TREE_OPERAND
+ (OBJ_TYPE_REF_OBJECT (callee), 0)))
+ {
+ tree t;
+
+ /* ??? Caution: Broken ADDR_EXPR semantics means that
+ looking at the type of the operand of the addr_expr
+ can yield an array type. See silly exception in
+ check_pointer_types_r. */
+ t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
+ t = lang_hooks.fold_obj_type_ref (callee, t);
+ if (t)
+ {
+ gimple_call_set_fn (stmt, t);
+ return true;
+ }
+ }
+ }
+
+ return false;
+}
+
+/* Fold the statement pointed to by GSI. In some cases, this function may
replace the whole statement with a new one. Returns true iff folding
makes any changes. */
bool
-fold_stmt (tree *stmt_p)
+fold_stmt (gimple_stmt_iterator *gsi)
{
- tree rhs, result, stmt;
+
struct fold_stmt_r_data fold_stmt_r_data;
+ struct walk_stmt_info wi;
+
bool changed = false;
bool inside_addr_expr = false;
- stmt = *stmt_p;
+ gimple stmt = gsi_stmt (*gsi);
fold_stmt_r_data.stmt = stmt;
fold_stmt_r_data.changed_p = &changed;
fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
- /* If we replaced constants and the statement makes pointer dereferences,
- then we may need to fold instances of *&VAR into VAR, etc. */
- if (walk_tree (stmt_p, fold_stmt_r, &fold_stmt_r_data, NULL))
- {
- *stmt_p = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
- return true;
- }
+ memset (&wi, 0, sizeof (wi));
+ wi.info = &fold_stmt_r_data;
- rhs = get_rhs (stmt);
- if (!rhs)
- return changed;
- result = NULL_TREE;
+ /* Fold the individual operands.
+ For example, fold instances of *&VAR into VAR, etc. */
+ gcc_assert (!walk_gimple_stmt (stmt, NULL, fold_stmt_r, &wi));
- if (TREE_CODE (rhs) == CALL_EXPR)
+ /* Fold the main computation performed by the statement. */
+ switch (gimple_code (stmt))
{
- tree callee;
-
- /* Check for builtins that CCP can handle using information not
- available in the generic fold routines. */
- callee = get_callee_fndecl (rhs);
- if (callee && DECL_BUILT_IN (callee))
- result = ccp_fold_builtin (stmt, rhs);
- else
- {
- /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
- here are when we've propagated the address of a decl into the
- object slot. */
- /* ??? Should perhaps do this in fold proper. However, doing it
- there requires that we create a new CALL_EXPR, and that requires
- copying EH region info to the new node. Easier to just do it
- here where we can just smash the call operand. Also
- CALL_EXPR_RETURN_SLOT_OPT needs to be handled correctly and
- copied, fold_call_expr does not have not information. */
- callee = CALL_EXPR_FN (rhs);
- if (TREE_CODE (callee) == OBJ_TYPE_REF
- && lang_hooks.fold_obj_type_ref
- && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
- && DECL_P (TREE_OPERAND
- (OBJ_TYPE_REF_OBJECT (callee), 0)))
- {
- tree t;
+ case GIMPLE_ASSIGN:
+ changed |= fold_gimple_assign (stmt);
+ break;
+ case GIMPLE_COND:
+ changed |= fold_gimple_cond (stmt);
+ break;
+ case GIMPLE_CALL:
+ /* The entire statement may be replaced in this case. */
+ changed |= fold_gimple_call (gsi);
+ break;
- /* ??? Caution: Broken ADDR_EXPR semantics means that
- looking at the type of the operand of the addr_expr
- can yield an array type. See silly exception in
- check_pointer_types_r. */
-
- t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
- t = lang_hooks.fold_obj_type_ref (callee, t);
- if (t)
- {
- CALL_EXPR_FN (rhs) = t;
- changed = true;
- }
- }
- }
+ default:
+ return changed;
+ break;
}
- else if (TREE_CODE (rhs) == COND_EXPR)
+
+ /* FIXME tuples. This seems to be a reasonable optimization, but it breaks
+ some callers. Since we weren't doing this pre-tuples, it is best not to
+ pursue it now. */
+#if 0
+ if (!gimple_has_side_effects (stmt))
{
- tree temp = fold (COND_EXPR_COND (rhs));
- if (temp != COND_EXPR_COND (rhs))
- result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp,
- COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
+ gimple new_stmt = gimple_build_nop ();
+ gimple_set_locus (new_stmt, gimple_locus (stmt));
+ gsi_replace (gsi, new_stmt, false);
+ return true;
}
-
- /* If we couldn't fold the RHS, hand over to the generic fold routines. */
- if (result == NULL_TREE)
- result = fold (rhs);
-
- /* Strip away useless type conversions. Both the NON_LVALUE_EXPR that
- may have been added by fold, and "useless" type conversions that might
- now be apparent due to propagation. */
- STRIP_USELESS_TYPE_CONVERSION (result);
-
- if (result != rhs)
- changed |= set_rhs (stmt_p, result);
+#endif
return changed;
}
-#endif
/* Perform the minimal folding on statement STMT. Only operations like
*&x created by constant propagation are handled. The statement cannot
@@ -2662,10 +2898,9 @@ fold_stmt (tree *stmt_p)
bool
fold_stmt_inplace (gimple stmt)
{
- enum gimple_code code;
- tree new_rhs;
struct fold_stmt_r_data fold_stmt_r_data;
struct walk_stmt_info wi;
+
bool changed = false;
bool inside_addr_expr = false;
@@ -2676,50 +2911,34 @@ fold_stmt_inplace (gimple stmt)
memset (&wi, 0, sizeof (wi));
wi.info = &fold_stmt_r_data;
- /* Fold the individual operands. */
- walk_gimple_stmt (stmt, NULL, fold_stmt_r, &wi);
+ /* Fold the individual operands.
+ For example, fold instances of *&VAR into VAR, etc.
+
+ It appears that, at one time, maybe_fold_stmt_indirect
+ would cause the walk to return non-null in order to
+ signal that the entire statement should be replaced with
+ a call to _builtin_trap. This functionality is currently
+ disabled, as noted in a FIXME, and cannot be supported here. */
+
+ gcc_assert (!walk_gimple_stmt (stmt, NULL, fold_stmt_r, &wi));
/* Fold the main computation performed by the statement. */
- code = gimple_code (stmt);
- if (code == GIMPLE_ASSIGN)
- {
- tree lhs = gimple_assign_lhs (stmt);
- tree rhs1 = gimple_assign_rhs1 (stmt);
- tree rhs2 = gimple_assign_rhs2 (stmt);
- tree type = TREE_TYPE (lhs);
- enum tree_code rhs_code;
- enum gimple_rhs_class rhs_class;
-
- rhs_code = gimple_assign_subcode (stmt);
- rhs_class = get_gimple_rhs_class (rhs_code);
- if (rhs_class == GIMPLE_BINARY_RHS)
- new_rhs = fold_binary (rhs_code, type, rhs1, rhs2);
- else if (rhs_class == GIMPLE_UNARY_RHS)
- new_rhs = fold_unary (rhs_code, type, rhs1);
- else if (rhs_class == GIMPLE_SINGLE_RHS)
- new_rhs = rhs1;
- else
- gcc_unreachable ();
- }
- else if (code == GIMPLE_COND)
- new_rhs = fold_binary (gimple_cond_code (stmt), boolean_type_node,
- gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
- else
+ switch (gimple_code (stmt))
{
- /* No other statement has a foldable expression, so folding its
- individual operands is enough. */
- return changed;
- }
+ case GIMPLE_ASSIGN:
+ changed |= fold_gimple_assign (stmt);
+ break;
+ case GIMPLE_COND:
+ changed |= fold_gimple_cond (stmt);
+ break;
- /* FIXME tuples. */
-#if 0
- changed |= set_rhs (&stmt, new_rhs);
-#else
- gimple_unreachable ();
-#endif
+ default:
+ break;
+ }
return changed;
}
+
/* FIXME tuples. */
#if 0
\f
Index: gcc/tree-ssa-propagate.c
===================================================================
--- gcc/tree-ssa-propagate.c (revision 132513)
+++ gcc/tree-ssa-propagate.c (working copy)
@@ -1288,7 +1288,7 @@ substitute_and_fold (prop_value_t *prop_
gimple old_stmt = stmt;
tree rhs;
- fold_stmt (gsi_stmt_ptr (&i));
+ fold_stmt (&i);
stmt = gsi_stmt (i);
/* If we cleaned up EH information from the statement,
Index: gcc/gimple-dummy.c
===================================================================
--- gcc/gimple-dummy.c (revision 132513)
+++ gcc/gimple-dummy.c (working copy)
@@ -28,7 +28,6 @@ DUMMY_FN (compute_data_dependences_for_l
DUMMY_FN (dump_ddrs)
DUMMY_FN (estimate_numbers_of_iterations)
DUMMY_FN (expr_invariant_in_loop_p)
-DUMMY_FN (fold_stmt)
DUMMY_FN (free_data_refs)
DUMMY_FN (free_dependence_relations)
DUMMY_FN (free_numbers_of_iterations_estimates)
Index: gcc/gimplify.c
===================================================================
--- gcc/gimplify.c (revision 132513)
+++ gcc/gimplify.c (working copy)
@@ -2284,7 +2284,6 @@ gimplify_call_expr (tree *expr_p, gimple
CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
CALL_CANNOT_INLINE_P (*expr_p)
= CALL_CANNOT_INLINE_P (call);
- TREE_NOTHROW (*expr_p) = TREE_NOTHROW (call);
SET_EXPR_LOCUS (*expr_p, EXPR_LOCUS (call));
TREE_BLOCK (*expr_p) = TREE_BLOCK (call);
/* Set CALL_EXPR_VA_ARG_PACK. */
Index: gcc/cfgexpand.c
===================================================================
--- gcc/cfgexpand.c (revision 132513)
+++ gcc/cfgexpand.c (working copy)
@@ -174,6 +174,9 @@ gimple_to_tree (gimple stmt)
if (!(gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE)))
TREE_SIDE_EFFECTS (t) = 1;
+ if (gimple_call_flags (stmt) & ECF_NOTHROW)
+ TREE_NOTHROW (t) = 1;
+
/* If the call has a LHS then create a MODIFY_EXPR to hold it. */
if (gimple_call_lhs (stmt))
t = build_gimple_modify_stmt (gimple_call_lhs (stmt), t);
@@ -200,6 +203,9 @@ gimple_to_tree (gimple stmt)
return t;
}
+
+ case GIMPLE_NOP:
+ return build1 (NOP_EXPR, void_type_node, size_zero_node);
default:
error ("Unrecognized GIMPLE statement during RTL expansion");
Index: gcc/tree-flow.h
===================================================================
--- gcc/tree-flow.h (revision 132513)
+++ gcc/tree-flow.h (working copy)
@@ -713,7 +713,7 @@ extern gimple last_and_only_stmt (basic_
extern edge find_taken_edge (basic_block, tree);
extern basic_block label_to_block_fn (struct function *, tree);
#define label_to_block(t) (label_to_block_fn (cfun, t))
-extern void notice_special_calls (tree);
+extern void notice_special_calls (gimple);
extern void clear_special_calls (void);
extern void verify_stmts (void);
extern void verify_gimple (void);
@@ -884,7 +884,7 @@ tree get_current_def (tree);
void set_current_def (tree, tree);
/* In tree-ssa-ccp.c */
-bool fold_stmt (gimple *);
+bool fold_stmt (gimple_stmt_iterator *);
bool fold_stmt_inplace (gimple);
tree widen_bitfield (tree, tree, tree);
Index: gcc/gimple.c
===================================================================
--- gcc/gimple.c (revision 132513)
+++ gcc/gimple.c (working copy)
@@ -412,6 +412,20 @@ gimple_build_cond_from_tree (tree cond,
return gimple_build_cond (code, lhs, rhs, t_label, f_label);
}
+/* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
+ boolean expression tree COND. */
+
+void
+gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
+{
+ enum tree_code code;
+ tree lhs, rhs;
+
+ gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
+ gimple_cond_set_code (stmt, code);
+ gimple_cond_set_lhs (stmt, lhs);
+ gimple_cond_set_rhs (stmt, rhs);
+}
/* Build a GIMPLE_LABEL statement for LABEL. */
@@ -1742,6 +1756,20 @@ gimple_has_side_effects (gimple s)
return false;
}
+/* Return true if any statement in STMTS has side effects. */
+
+bool
+gimple_seq_has_side_effects (gimple_seq stmts)
+{
+ gimple_stmt_iterator gsi;
+
+ for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
+ if (gimple_has_side_effects (gsi_stmt (gsi)))
+ return true;
+
+ return false;
+}
+
/* If STMT is a copy or no-op cast statement, returns its rhs operand.
Otherwise returns NULL. */
Index: gcc/gimple.h
===================================================================
--- gcc/gimple.h (revision 132513)
+++ gcc/gimple.h (working copy)
@@ -566,7 +566,9 @@ bool is_gimple_operand (const_tree);
void gimple_set_modified (gimple, bool);
void gimple_cond_get_ops_from_tree (tree, enum tree_code *, tree *, tree *);
gimple gimple_build_cond_from_tree (tree, tree, tree);
+void gimple_cond_set_condition_from_tree (gimple, tree);
bool gimple_has_side_effects (gimple);
+bool gimple_seq_has_side_effects (gimple_seq);
tree copy_or_nop_cast_stmt_rhs (gimple);
/* In builtins.c */
@@ -1336,6 +1338,18 @@ gimple_call_noreturn_p (gimple s)
return (gimple_call_flags (s) & ECF_NORETURN) != 0;
}
+
+/* Return true if S is a nothrow call. */
+
+static inline bool
+gimple_call_nothrow_p (gimple s)
+{
+ if (gimple_code (s) != GIMPLE_CALL)
+ return false;
+ return (gimple_call_flags (s) & ECF_NOTHROW) != 0;
+}
+
+
/* Return the code of the predicate computed by conditional statement GS. */
static inline enum tree_code
@@ -2855,7 +2869,11 @@ gsi_after_labels (basic_block bb)
return gsi;
}
-/* Return a pointer to the current stmt. */
+/* Return a pointer to the current stmt.
+
+ NOTE: You may want to use gsi_replace on the iterator itself,
+ as this performs additional bookkeeping that will not be done
+ if you simply assign through a pointer returned by gsi_stmt_ptr. */
static inline gimple *
gsi_stmt_ptr (gimple_stmt_iterator *i)
Index: gcc/tree-cfg.c
===================================================================
--- gcc/tree-cfg.c (revision 132513)
+++ gcc/tree-cfg.c (working copy)
@@ -1457,6 +1457,8 @@ single_noncomplex_succ (basic_block bb)
* Some unnecessary BIND_EXPRs are removed
+ * GOTO_EXPRs immediately preceding destination are removed.
+
Clearly more work could be done. The trick is doing the analysis
and removal fast enough to be a net improvement in compile times.
@@ -1466,213 +1468,204 @@ single_noncomplex_succ (basic_block bb)
struct rus_data
{
- tree *last_goto;
bool repeat;
bool may_throw;
bool may_branch;
bool has_label;
+ bool last_was_goto;
+ gimple_stmt_iterator last_goto_gsi;
};
-/* FIXME tuples. */
-#if 0
-static void remove_useless_stmts_1 (tree *, struct rus_data *);
-#endif
+
+static void remove_useless_stmts_1 (gimple_stmt_iterator *gsi, struct rus_data *);
+
+/* Given a statement sequence, find the first executable statement with
+ location information, and warn that it is unreachable. When searching,
+ descend into containers in execution order. */
static bool
-remove_useless_stmts_warn_notreached (tree stmt)
+remove_useless_stmts_warn_notreached (gimple_seq stmts)
{
- if (EXPR_HAS_LOCATION (stmt))
- {
- location_t loc = EXPR_LOCATION (stmt);
- if (LOCATION_LINE (loc) > 0)
- {
- warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
- return true;
- }
- }
+ gimple_stmt_iterator gsi;
- switch (TREE_CODE (stmt))
+ for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
{
- case STATEMENT_LIST:
- {
- tree_stmt_iterator i;
- for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
- if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
- return true;
- }
- break;
+ gimple stmt = gsi_stmt (gsi);
- case COND_EXPR:
- if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
- return true;
- if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
- return true;
- if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
- return true;
- break;
+ if (!gimple_locus_empty_p (stmt))
+ {
+ location_t loc = gimple_locus (stmt);
+ if (LOCATION_LINE (loc) > 0)
+ {
+ warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
+ return true;
+ }
+ }
- case TRY_FINALLY_EXPR:
- case TRY_CATCH_EXPR:
- if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
- return true;
- if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
- return true;
- break;
+ switch (gimple_code (stmt))
+ {
+ /* Unfortunately, we need the CFG now to detect unreachable
+ branches in a conditional, so conditionals are not handled here. */
- case CATCH_EXPR:
- return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
- case EH_FILTER_EXPR:
- return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
- case BIND_EXPR:
- return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
+ case GIMPLE_TRY:
+ if (remove_useless_stmts_warn_notreached (gimple_try_eval (stmt)))
+ return true;
+ if (remove_useless_stmts_warn_notreached (gimple_try_cleanup (stmt)))
+ return true;
+ break;
+
+ case GIMPLE_CATCH:
+ return remove_useless_stmts_warn_notreached (gimple_catch_handler (stmt));
- default:
- /* Not a live container. */
- break;
- }
+ case GIMPLE_EH_FILTER:
+ return remove_useless_stmts_warn_notreached (gimple_eh_filter_failure (stmt));
- return false;
-}
+ case GIMPLE_BIND:
+ return remove_useless_stmts_warn_notreached (gimple_bind_body (stmt));
-/* FIXME tuples. */
+ /* FIXME tuples. The original code pre-tuplification did not
+ descend into the OMP constructs. */
#if 0
-static void
-remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
-{
- tree then_clause, else_clause, cond;
- bool save_has_label, then_has_label, else_has_label;
-
- save_has_label = data->has_label;
- data->has_label = false;
- data->last_goto = NULL;
-
- remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
+ case GIMPLE_OMP_FOR:
+ if (remove_useless_stmts_warn_notreached (gimple_omp_for_pre_body (stmt)))
+ return true;
+ /* FALLTHROUGH */
+ case GIMPLE_OMP_CRITICAL:
+ case GIMPLE_OMP_CONTINUE:
+ case GIMPLE_OMP_MASTER:
+ case GIMPLE_OMP_ORDERED:
+ case GIMPLE_OMP_SECTION:
+ case GIMPLE_OMP_PARALLEL:
+ case GIMPLE_OMP_SECTIONS:
+ case GIMPLE_OMP_SINGLE:
+ return remove_useless_stmts_warn_notreached (gimple_omp_body (stmt));
+#endif
- then_has_label = data->has_label;
- data->has_label = false;
- data->last_goto = NULL;
+ default:
+ break;
+ }
+ }
- remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
+ return false;
+}
- else_has_label = data->has_label;
- data->has_label = save_has_label | then_has_label | else_has_label;
+/* Helper for remove_useless_stmts_1. Handle GIMPLE_COND statements. */
- then_clause = COND_EXPR_THEN (*stmt_p);
- else_clause = COND_EXPR_ELSE (*stmt_p);
- cond = fold (COND_EXPR_COND (*stmt_p));
+static void
+remove_useless_stmts_cond (gimple_stmt_iterator *gsi, struct rus_data *data)
+{
+ tree cond;
+ gimple stmt = gsi_stmt (*gsi);
- /* If neither arm does anything at all, we can remove the whole IF. */
- if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
- {
- *stmt_p = build_empty_stmt ();
- data->repeat = true;
- }
+ /* The folded result must still be a conditional statement. */
+ fold_stmt_inplace (stmt);
- /* If there are no reachable statements in an arm, then we can
- zap the entire conditional. */
- else if (integer_nonzerop (cond) && !else_has_label)
- {
- if (warn_notreached)
- remove_useless_stmts_warn_notreached (else_clause);
- *stmt_p = then_clause;
+ /* Attempt to evaluate the condition at compile-time.
+ Because we are in GIMPLE here, only the most trivial
+ comparisons of a constant to a constant can be handled.
+ Calling fold_binary is thus a bit of overkill, as it
+ creates temporary tree nodes that are discarded immediately. */
+ cond = fold_binary (gimple_cond_code (stmt),
+ boolean_type_node,
+ gimple_cond_lhs (stmt),
+ gimple_cond_rhs (stmt));
+
+ /* FIXME tuples. The optimization being applied to GIMPLE_GOTO
+ destinations could be performed for the true and false labels
+ of a GIMPLE_COND as well. This would require tracking a bit
+ more information. */
+
+ /* Replace trivial conditionals with gotos. */
+ if (cond && integer_nonzerop (cond))
+ {
+ /* Goto THEN label. */
+ tree then_label = gimple_cond_true_label (stmt);
+
+ gsi_replace(gsi, gimple_build_goto (then_label), false);
+ data->last_goto_gsi = *gsi;
+ data->last_was_goto = true;
data->repeat = true;
}
- else if (integer_zerop (cond) && !then_has_label)
+ else if (cond && integer_zerop (cond))
{
- if (warn_notreached)
- remove_useless_stmts_warn_notreached (then_clause);
- *stmt_p = else_clause;
+ /* Goto ELSE label. */
+ tree else_label = gimple_cond_false_label (stmt);
+
+ gsi_replace(gsi, gimple_build_goto (else_label), false);
+ data->last_goto_gsi = *gsi;
+ data->last_was_goto = true;
data->repeat = true;
}
-
- /* Check a couple of simple things on then/else with single stmts. */
else
{
- tree then_stmt = expr_only (then_clause);
- tree else_stmt = expr_only (else_clause);
-
- /* Notice branches to a common destination. */
- if (then_stmt && else_stmt
- && TREE_CODE (then_stmt) == GOTO_EXPR
- && TREE_CODE (else_stmt) == GOTO_EXPR
- && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
- {
- *stmt_p = then_stmt;
+ tree then_label = gimple_cond_true_label (stmt);
+ tree else_label = gimple_cond_false_label (stmt);
+
+ if (then_label == else_label)
+ {
+ /* Goto common destination. */
+ gsi_replace (gsi, gimple_build_goto (then_label), false);
+ data->last_goto_gsi = *gsi;
+ data->last_was_goto = true;
data->repeat = true;
}
-
- /* If the THEN/ELSE clause merely assigns a value to a variable or
- parameter which is already known to contain that value, then
- remove the useless THEN/ELSE clause. */
- else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
- {
- if (else_stmt
- && TREE_CODE (else_stmt) == GIMPLE_MODIFY_STMT
- && GIMPLE_STMT_OPERAND (else_stmt, 0) == cond
- && integer_zerop (GIMPLE_STMT_OPERAND (else_stmt, 1)))
- COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
- }
- else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
- && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
- || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
- && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
- {
- tree stmt = (TREE_CODE (cond) == EQ_EXPR
- ? then_stmt : else_stmt);
- tree *location = (TREE_CODE (cond) == EQ_EXPR
- ? &COND_EXPR_THEN (*stmt_p)
- : &COND_EXPR_ELSE (*stmt_p));
-
- if (stmt
- && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
- && GIMPLE_STMT_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
- && GIMPLE_STMT_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
- *location = alloc_stmt_list ();
- }
}
- /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
- would be re-introduced during lowering. */
- data->last_goto = NULL;
+ gsi_next (gsi);
+
+ data->last_was_goto = false;
}
+/* Helper for remove_useless_stmts_1.
+ Handle the try-finally case for GIMPLE_TRY statements. */
static void
-remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
+remove_useless_stmts_tf (gimple_stmt_iterator *gsi, struct rus_data *data)
{
bool save_may_branch, save_may_throw;
bool this_may_branch, this_may_throw;
+ gimple_seq eval_seq, cleanup_seq;
+ gimple_stmt_iterator eval_gsi, cleanup_gsi;
+
+ gimple stmt = gsi_stmt (*gsi);
+
/* Collect may_branch and may_throw information for the body only. */
save_may_branch = data->may_branch;
save_may_throw = data->may_throw;
data->may_branch = false;
data->may_throw = false;
- data->last_goto = NULL;
+ data->last_was_goto = false;
- remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
+ eval_seq = gimple_try_eval (stmt);
+ eval_gsi = gsi_start (eval_seq);
+ remove_useless_stmts_1 (&eval_gsi, data);
this_may_branch = data->may_branch;
this_may_throw = data->may_throw;
data->may_branch |= save_may_branch;
data->may_throw |= save_may_throw;
- data->last_goto = NULL;
+ data->last_was_goto = false;
- remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
+ cleanup_seq = gimple_try_cleanup (stmt);
+ cleanup_gsi = gsi_start (cleanup_seq);
+ remove_useless_stmts_1 (&cleanup_gsi, data);
/* If the body is empty, then we can emit the FINALLY block without
the enclosing TRY_FINALLY_EXPR. */
- if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
+ if (!gimple_seq_has_side_effects (eval_seq))
{
- *stmt_p = TREE_OPERAND (*stmt_p, 1);
+ gsi_insert_seq_before (gsi, cleanup_seq, GSI_SAME_STMT);
+ gsi_remove (gsi, false);
data->repeat = true;
}
/* If the handler is empty, then we can emit the TRY block without
the enclosing TRY_FINALLY_EXPR. */
- else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
+ else if (!gimple_seq_has_side_effects (cleanup_seq))
{
- *stmt_p = TREE_OPERAND (*stmt_p, 0);
+ gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
+ gsi_remove (gsi, false);
data->repeat = true;
}
@@ -1680,37 +1673,51 @@ remove_useless_stmts_tf (tree *stmt_p, s
string the TRY and FINALLY blocks together. */
else if (!this_may_branch && !this_may_throw)
{
- tree stmt = *stmt_p;
- *stmt_p = TREE_OPERAND (stmt, 0);
- append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
+ gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
+ gsi_insert_seq_before (gsi, cleanup_seq, GSI_SAME_STMT);
+ gsi_remove (gsi, false);
data->repeat = true;
}
+ else
+ gsi_next (gsi);
}
+/* Helper for remove_useless_stmts_1.
+ Handle the try-catch case for GIMPLE_TRY statements. */
static void
-remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
+remove_useless_stmts_tc (gimple_stmt_iterator *gsi, struct rus_data *data)
{
bool save_may_throw, this_may_throw;
- tree_stmt_iterator i;
- tree stmt;
+
+ gimple_seq eval_seq, cleanup_seq, handler_seq, failure_seq;
+ gimple_stmt_iterator eval_gsi, cleanup_gsi, handler_gsi, failure_gsi;
+
+ gimple stmt = gsi_stmt (*gsi);
/* Collect may_throw information for the body only. */
save_may_throw = data->may_throw;
data->may_throw = false;
- data->last_goto = NULL;
+ data->last_was_goto = false;
- remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
+ eval_seq = gimple_try_eval (stmt);
+ eval_gsi = gsi_start (eval_seq);
+ remove_useless_stmts_1 (&eval_gsi, data);
this_may_throw = data->may_throw;
data->may_throw = save_may_throw;
+ cleanup_seq = gimple_try_cleanup (stmt);
+
/* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
if (!this_may_throw)
{
if (warn_notreached)
- remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
- *stmt_p = TREE_OPERAND (*stmt_p, 0);
+ {
+ remove_useless_stmts_warn_notreached (cleanup_seq);
+ }
+ gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
+ gsi_remove (gsi, false);
data->repeat = true;
return;
}
@@ -1719,141 +1726,164 @@ remove_useless_stmts_tc (tree *stmt_p, s
no exceptions propagate past this point. */
this_may_throw = true;
- i = tsi_start (TREE_OPERAND (*stmt_p, 1));
- stmt = tsi_stmt (i);
- data->last_goto = NULL;
+ cleanup_gsi = gsi_start (cleanup_seq);
+ stmt = gsi_stmt (cleanup_gsi);
+ data->last_was_goto = false;
- switch (TREE_CODE (stmt))
+ switch (gimple_code (stmt))
{
- case CATCH_EXPR:
- for (; !tsi_end_p (i); tsi_next (&i))
- {
- stmt = tsi_stmt (i);
+ case GIMPLE_CATCH:
+ /* If the first element is a catch, they all must be. */
+ while (!gsi_end_p (cleanup_gsi))
+ {
+ stmt = gsi_stmt (cleanup_gsi);
/* If we catch all exceptions, then the body does not
propagate exceptions past this point. */
- if (CATCH_TYPES (stmt) == NULL)
+ if (gimple_catch_types (stmt) == NULL)
this_may_throw = false;
- data->last_goto = NULL;
- remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
+ data->last_was_goto = false;
+ handler_seq = gimple_catch_handler (stmt);
+ handler_gsi = gsi_start (handler_seq);
+ remove_useless_stmts_1 (&handler_gsi, data);
+ gsi_next (&cleanup_gsi);
}
+ gsi_next (gsi);
break;
- case EH_FILTER_EXPR:
- if (EH_FILTER_MUST_NOT_THROW (stmt))
+ case GIMPLE_EH_FILTER:
+ /* If the first element is an eh_filter, it should stand alone. */
+ if (gimple_eh_filter_must_not_throw (stmt))
this_may_throw = false;
- else if (EH_FILTER_TYPES (stmt) == NULL)
+ else if (gimple_eh_filter_types (stmt) == NULL)
this_may_throw = false;
- remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
+ failure_seq = gimple_eh_filter_failure (stmt);
+ failure_gsi = gsi_start (failure_seq);
+ remove_useless_stmts_1 (&failure_gsi, data);
+ gsi_next (gsi);
break;
default:
- /* Otherwise this is a cleanup. */
- remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
+ /* Otherwise this is a list of cleanup statements. */
+ remove_useless_stmts_1 (&cleanup_gsi, data);
/* If the cleanup is empty, then we can emit the TRY block without
the enclosing TRY_CATCH_EXPR. */
- if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
+ if (!gimple_seq_has_side_effects (cleanup_seq))
{
- *stmt_p = TREE_OPERAND (*stmt_p, 0);
+ gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
+ gsi_remove(gsi, false);
data->repeat = true;
}
+ else
+ gsi_next (gsi);
break;
}
+
data->may_throw |= this_may_throw;
}
+/* Helper for remove_useless_stmts_1. Handle GIMPLE_BIND statements. */
static void
-remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
+remove_useless_stmts_bind (gimple_stmt_iterator *gsi, struct rus_data *data ATTRIBUTE_UNUSED)
{
tree block;
+ gimple_seq body_seq, fn_body_seq;
+ gimple_stmt_iterator body_gsi;
- /* First remove anything underneath the BIND_EXPR. */
- remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
+ gimple stmt = gsi_stmt (*gsi);
- /* If the BIND_EXPR has no variables, then we can pull everything
- up one level and remove the BIND_EXPR, unless this is the toplevel
- BIND_EXPR for the current function or an inlined function.
+ /* First remove anything underneath the BIND_EXPR. */
+
+ body_seq = gimple_bind_body (stmt);
+ body_gsi = gsi_start (body_seq);
+ remove_useless_stmts_1 (&body_gsi, data);
+
+ /* If the GIMPLE_BIND has no variables, then we can pull everything
+ up one level and remove the GIMPLE_BIND, unless this is the toplevel
+ GIMPLE_BIND for the current function or an inlined function.
When this situation occurs we will want to apply this
optimization again. */
- block = BIND_EXPR_BLOCK (*stmt_p);
- if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
- && *stmt_p != gimple_body (current_function_decl)
+ block = gimple_bind_block (stmt);
+ fn_body_seq = gimple_body (current_function_decl);
+ if (gimple_bind_vars (stmt) == NULL_TREE
+ && (gimple_seq_empty_p (fn_body_seq)
+ || stmt != gimple_seq_first_stmt (fn_body_seq))
&& (! block
|| ! BLOCK_ABSTRACT_ORIGIN (block)
|| (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
!= FUNCTION_DECL)))
{
- *stmt_p = BIND_EXPR_BODY (*stmt_p);
+ gsi_insert_seq_before (gsi, body_seq, GSI_SAME_STMT);
+ gsi_remove (gsi, false);
data->repeat = true;
}
+ else
+ gsi_next (gsi);
}
+/* Helper for remove_useless_stmts_1. Handle GIMPLE_GOTO statements. */
static void
-remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
+remove_useless_stmts_goto (gimple_stmt_iterator *gsi, struct rus_data *data)
{
- tree dest = GOTO_DESTINATION (*stmt_p);
+ gimple stmt = gsi_stmt (*gsi);
+
+ tree dest = gimple_goto_dest (stmt);
data->may_branch = true;
- data->last_goto = NULL;
+ data->last_was_goto = false;
- /* Record the last goto expr, so that we can delete it if unnecessary. */
+ /* Record iterator for last goto expr, so that we can delete it if unnecessary. */
if (TREE_CODE (dest) == LABEL_DECL)
- data->last_goto = stmt_p;
+ {
+ data->last_goto_gsi = *gsi;
+ data->last_was_goto = true;
+ }
+
+ gsi_next(gsi);
}
+/* Helper for remove_useless_stmts_1. Handle GIMPLE_LABEL statements. */
static void
-remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
+remove_useless_stmts_label (gimple_stmt_iterator *gsi, struct rus_data *data)
{
- tree label = LABEL_EXPR_LABEL (*stmt_p);
+ gimple stmt = gsi_stmt (*gsi);
+
+ tree label = gimple_label_label (stmt);
data->has_label = true;
/* We do want to jump across non-local label receiver code. */
if (DECL_NONLOCAL (label))
- data->last_goto = NULL;
+ data->last_was_goto = false;
- else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
+ else if (data->last_was_goto
+ && gimple_goto_dest (gsi_stmt (data->last_goto_gsi)) == label)
{
- *data->last_goto = build_empty_stmt ();
+ /* Replace the preceding GIMPLE_GOTO statement with
+ a GIMPLE_NOP, which will be subsequently removed.
+ In this way, we avoid invalidating other iterators
+ active on the statement sequence. */
+ gsi_replace(&data->last_goto_gsi, gimple_build_nop(), false);
+ data->last_was_goto = false;
data->repeat = true;
}
/* ??? Add something here to delete unused labels. */
-}
-
-/* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
- decl. This allows us to eliminate redundant or useless
- calls to "const" functions.
-
- Gimplifier already does the same operation, but we may notice functions
- being const and pure once their calls has been gimplified, so we need
- to update the flag. */
-
-static void
-update_call_expr_flags (tree call)
-{
- tree decl = get_callee_fndecl (call);
- if (!decl)
- return;
- if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
- TREE_SIDE_EFFECTS (call) = 0;
- if (TREE_NOTHROW (decl))
- TREE_NOTHROW (call) = 1;
+ gsi_next (gsi);
}
-#endif
/* T is CALL_EXPR. Set current_function_calls_* flags. */
void
-notice_special_calls (tree t)
+notice_special_calls (gimple call)
{
- int flags = call_expr_flags (t);
+ int flags = gimple_call_flags (call);
if (flags & ECF_MAY_BE_ALLOCA)
current_function_calls_alloca = true;
@@ -1872,127 +1902,148 @@ clear_special_calls (void)
current_function_calls_setjmp = false;
}
+/* Remove useless statements from a statement sequence, and perform
+ some preliminary simplifications. */
-/* FIXME tuples. */
-#if 0
static void
-remove_useless_stmts_1 (tree *tp, struct rus_data *data)
+remove_useless_stmts_1 (gimple_stmt_iterator *gsi, struct rus_data *data)
{
- tree t = *tp, op;
-
- switch (TREE_CODE (t))
+ while (!gsi_end_p (*gsi))
{
- case COND_EXPR:
- remove_useless_stmts_cond (tp, data);
- break;
-
- case TRY_FINALLY_EXPR:
- remove_useless_stmts_tf (tp, data);
- break;
-
- case TRY_CATCH_EXPR:
- remove_useless_stmts_tc (tp, data);
- break;
-
- case BIND_EXPR:
- remove_useless_stmts_bind (tp, data);
- break;
+ gimple stmt = gsi_stmt (*gsi);
- case GOTO_EXPR:
- remove_useless_stmts_goto (tp, data);
- break;
-
- case LABEL_EXPR:
- remove_useless_stmts_label (tp, data);
- break;
+ /* FIXME tuples. We follow the pre-tuples code below and use
+ fold_stmt for simplification. Note that this may change the
+ statement type, which is an invitation to trouble. Perhaps
+ fold_stmt_inplace should be used, or folding should be done
+ prior to the switch statement. */
- case RETURN_EXPR:
- fold_stmt (tp);
- data->last_goto = NULL;
- data->may_branch = true;
- break;
-
- case CALL_EXPR:
- fold_stmt (tp);
- data->last_goto = NULL;
- notice_special_calls (t);
- update_call_expr_flags (t);
- if (stmt_could_throw_p (t))
- data->may_throw = true;
- break;
-
- case MODIFY_EXPR:
- gcc_unreachable ();
-
- case GIMPLE_MODIFY_STMT:
- data->last_goto = NULL;
- fold_stmt (tp);
- op = get_call_expr_in (t);
- if (op)
- {
- update_call_expr_flags (op);
- notice_special_calls (op);
- }
- if (stmt_could_throw_p (t))
- data->may_throw = true;
- break;
-
- case STATEMENT_LIST:
- {
- tree_stmt_iterator i = tsi_start (t);
- while (!tsi_end_p (i))
- {
- t = tsi_stmt (i);
- if (IS_EMPTY_STMT (t))
- {
- tsi_delink (&i);
- continue;
- }
-
- remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
-
- t = tsi_stmt (i);
- if (TREE_CODE (t) == STATEMENT_LIST)
- {
- tsi_link_before (&i, t, TSI_SAME_STMT);
- tsi_delink (&i);
- }
- else
- tsi_next (&i);
- }
- }
- break;
- case ASM_EXPR:
- fold_stmt (tp);
- data->last_goto = NULL;
- break;
+ switch (gimple_code (stmt))
+ {
+ case GIMPLE_COND:
+ remove_useless_stmts_cond (gsi, data);
+ break;
+
+ case GIMPLE_GOTO:
+ remove_useless_stmts_goto (gsi, data);
+ break;
+
+ case GIMPLE_LABEL:
+ remove_useless_stmts_label (gsi, data);
+ break;
+
+ case GIMPLE_ASSIGN:
+ fold_stmt (gsi);
+ stmt = gsi_stmt (*gsi);
+ data->last_was_goto = false;
+ if (stmt_could_throw_p (stmt))
+ data->may_throw = true;
+ gsi_next (gsi);
+ break;
+
+ case GIMPLE_ASM:
+ fold_stmt (gsi);
+ data->last_was_goto = false;
+ gsi_next (gsi);
+ break;
+
+ case GIMPLE_CALL:
+ fold_stmt (gsi);
+ stmt = gsi_stmt (*gsi);
+ data->last_was_goto = false;
+ if (gimple_code (stmt) == GIMPLE_CALL)
+ notice_special_calls (stmt);
+ /* We used to call update_gimple_call_flags here,
+ which copied side-effects and nothrows status
+ from the function decl to the call. In the new
+ tuplified GIMPLE, the accessors for this information
+ always consult the function decl, so this copying
+ is no longer necessary. */
+ if (stmt_could_throw_p (stmt))
+ data->may_throw = true;
+ gsi_next (gsi);
+ break;
+
+ case GIMPLE_RETURN:
+ fold_stmt (gsi);
+ data->last_was_goto = false;
+ data->may_branch = true;
+ gsi_next (gsi);
+ break;
+
+ case GIMPLE_BIND:
+ remove_useless_stmts_bind (gsi, data);
+ break;
+
+ case GIMPLE_CATCH:
+ remove_useless_stmts_tc (gsi, data);
+ break;
+
+ case GIMPLE_TRY:
+ remove_useless_stmts_tf (gsi, data);
+ break;
+
+ case GIMPLE_NOP:
+ gsi_remove (gsi, false);
+ break;
+
+ /* FIXME tuples. The original pre-tuples code did not simplify OMP
+ statements, so I'd rather not enable this until we get everything
+ else working, and verify that such simplification is appropriate. */
+#if 0
+ case GIMPLE_OMP_FOR:
+ {
+ gimple_seq pre_body_seq = gimple_omp_body (stmt);
+ gimple_stmt_iterator pre_body_gsi = gsi_start (pre_body_seq);
+
+ remove_useless_stmts_1 (&pre_body_gsi, data);
+ }
+ /* FALLTHROUGH */
+ case GIMPLE_OMP_CRITICAL:
+ case GIMPLE_OMP_CONTINUE:
+ case GIMPLE_OMP_MASTER:
+ case GIMPLE_OMP_ORDERED:
+ case GIMPLE_OMP_SECTION:
+ case GIMPLE_OMP_PARALLEL:
+ case GIMPLE_OMP_SECTIONS:
+ case GIMPLE_OMP_SINGLE:
+ {
+ gimple_seq body_seq = gimple_omp_body (stmt);
+ gimple_stmt_iterator body_gsi = gsi_start (body_seq);
+
+ remove_useless_stmts_1 (&body_gsi, data);
+ }
+ break;
+#endif
- default:
- data->last_goto = NULL;
- break;
+ default:
+ data->last_was_goto = false;
+ gsi_next (gsi);
+ break;
+ }
}
}
-#endif
+
+/* Walk the function tree, removing useless statements and performing
+ some preliminary simplifications. */
static unsigned int
remove_useless_stmts (void)
{
- /* FIXME tuples. */
-#if 0
struct rus_data data;
clear_special_calls ();
do
{
+ gimple_stmt_iterator gsi;
+
+ gsi = gsi_start (gimple_body (current_function_decl));
memset (&data, 0, sizeof (data));
- remove_useless_stmts_1 (&gimple_body (current_function_decl), &data);
+ remove_useless_stmts_1 (&gsi, &data);
}
while (data.repeat);
return 0;
-#else
- gimple_unreachable ();
-#endif
}
^ permalink raw reply [flat|nested] 4+ messages in thread
end of thread, other threads:[~2008-02-26 20:06 UTC | newest]
Thread overview: 4+ messages (download: mbox.gz / follow: Atom feed)
-- links below jump to the message on this page --
2008-02-26 21:40 [tuples][patch] Convert pass_remove_useless_stmts and related constant folding infrastructure Bill Maddox
-- strict thread matches above, loose matches on Subject: below --
2008-02-26 9:32 Bill Maddox
2008-02-21 21:08 [Tuples][Patch] " Bill Maddox
2008-02-21 22:38 ` Diego Novillo
This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox;
as well as URLs for read-only IMAP folder(s) and NNTP newsgroup(s).