Index: tree-inline.c =================================================================== --- tree-inline.c (revision 222439) +++ tree-inline.c (working copy) @@ -898,7 +898,19 @@ remap_gimple_op_r (tree *tp, int *walk_s if (TREE_CODE (*tp) == SSA_NAME) { - *tp = remap_ssa_name (*tp, id); + tree t = remap_ssa_name (*tp, id); + /* Perform anonymous constant propagation, this makes it possible to + generate reasonable code even at -O0 for operators implemented as + inline functions. */ + if (TREE_CODE (t) == SSA_NAME + && SSA_NAME_DEF_STMT (t) + && (!SSA_NAME_VAR (t) || DECL_IGNORED_P (SSA_NAME_VAR (t))) + && gimple_assign_copy_p (SSA_NAME_DEF_STMT (t)) + && is_gimple_min_invariant + (gimple_assign_rhs1 (SSA_NAME_DEF_STMT (t)))) + *tp = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (t)); + else + *tp = t; *walk_subtrees = 0; return NULL; } @@ -1965,7 +1977,7 @@ copy_bb (copy_body_data *id, basic_block /* Statements produced by inlining can be unfolded, especially when we constant propagated some operands. We can't fold - them right now for two reasons: + them right now in the general case for two reasons: 1) folding require SSA_NAME_DEF_STMTs to be correct 2) we can't change function calls to builtins. So we just mark statement for later folding. We mark @@ -1974,7 +1986,10 @@ copy_bb (copy_body_data *id, basic_block foldable indirectly are updated. If this turns out to be expensive, copy_body can be told to watch for nontrivial changes. */ - if (id->statements_to_fold) + if (gimple_assign_cast_p (stmt) + && is_gimple_min_invariant (gimple_assign_rhs1 (stmt))) + fold_stmt (©_gsi); + else if (id->statements_to_fold) id->statements_to_fold->add (stmt); /* We're duplicating a CALL_EXPR. Find any corresponding