public inbox for gcc-patches@gcc.gnu.org
 help / color / mirror / Atom feed
* Cgraph alias reorg 5/14 (ipa-inline update)
@ 2011-06-10 11:54 Jan Hubicka
  0 siblings, 0 replies; only message in thread
From: Jan Hubicka @ 2011-06-10 11:54 UTC (permalink / raw)
  To: gcc-patches

Hi,
this patch updates inliner.  Inliner also mostly walks from callers to callees
and it needs to get into the real function, not alias node.  There is quite
a lot of walking however, so the patch is quite busy.

There are few bits I had to leave out of this patch since the infrastructure is
not in yet.  ipa-inline-transform.c should remove useless aliases after
inlining and update_caller_keys should dive into aliases.

It is all pretty straighforward.

Bootstrapped/regtested x86_64-linux, will commit it shortly.

Honza

	* ipa-inline-transform.c (can_remove_node_now_p): Move out of...
	(clone_inlined_nodes): ... here.
	(inline_call): Use cgraph_function_or_thunk_node; redirect edge
	to real destination prior inlining.
	* ipa-inline.c (caller_growth_limits, can_inline_edge_p,
	can_early_inline_edge_p, want_early_inline_function_p,
	want_early_inline_function_p, want_inline_small_function_p,
	want_inline_self_recursive_call_p, want_inline_function_called_once_p,
	edge_badness, update_all_callee_keys, lookup_recursive_calls,
	add_new_edges_to_heap, inline_small_functions, flatten_function,
	inline_always_inline_functions, early_inline_small_functions): Use
	cgraph_function_or_thunk_node.
	* ipa-inline-analysis.c (evaluate_conditions_for_edge,
	dump_inline_edge_summary, estimate_function_body_sizes): Likewise.
	(do_estimate_edge_growth_1): Break out from ...
	(do_estimate_growth) ... here; walk aliases.
	(inline_generate_summary): Skip aliases.
Index: ipa-inline-transform.c
===================================================================
*** ipa-inline-transform.c	(revision 174895)
--- ipa-inline-transform.c	(working copy)
*************** update_noncloned_frequencies (struct cgr
*** 76,81 ****
--- 76,110 ----
      }
  }
  
+ /* We removed or are going to remove the last call to NODE.
+    Return true if we can and want proactively remove the NODE now.
+    This is important to do, since we want inliner to know when offline
+    copy of function was removed.  */
+ 
+ static bool
+ can_remove_node_now_p (struct cgraph_node *node)
+ {
+   /* FIXME: When address is taken of DECL_EXTERNAL function we still
+      can remove its offline copy, but we would need to keep unanalyzed node in
+      the callgraph so references can point to it.  */
+   return (!node->address_taken
+ 	  && cgraph_can_remove_if_no_direct_calls_p (node)
+ 	  /* Inlining might enable more devirtualizing, so we want to remove
+ 	     those only after all devirtualizable virtual calls are processed.
+ 	     Lacking may edges in callgraph we just preserve them post
+ 	     inlining.  */
+ 	  && (!DECL_VIRTUAL_P (node->decl)
+ 	      || (!DECL_COMDAT (node->decl)
+ 		  && !DECL_EXTERNAL (node->decl)))
+ 	  /* Don't reuse if more than one function shares a comdat group.
+ 	     If the other function(s) are needed, we need to emit even
+ 	     this function out of line.  */
+ 	  && !node->same_comdat_group
+ 	  /* During early inlining some unanalyzed cgraph nodes might be in the
+ 	     callgraph and they might reffer the function in question.  */
+ 	  && !cgraph_new_nodes);
+ }
+ 
  
  /* E is expected to be an edge being inlined.  Clone destination node of
     the edge and redirect it to the new clone.
*************** clone_inlined_nodes (struct cgraph_edge 
*** 97,121 ****
  	  /* Recursive inlining never wants the master clone to
  	     be overwritten.  */
  	  && update_original
! 	  /* FIXME: When address is taken of DECL_EXTERNAL function we still
! 	     can remove its offline copy, but we would need to keep unanalyzed
! 	     node in the callgraph so references can point to it.  */
! 	  && !e->callee->address_taken
! 	  && cgraph_can_remove_if_no_direct_calls_p (e->callee)
! 	  /* Inlining might enable more devirtualizing, so we want to remove
! 	     those only after all devirtualizable virtual calls are processed.
! 	     Lacking may edges in callgraph we just preserve them post
! 	     inlining.  */
! 	  && (!DECL_VIRTUAL_P (e->callee->decl)
! 	      || (!DECL_COMDAT (e->callee->decl)
! 		  && !DECL_EXTERNAL (e->callee->decl)))
! 	  /* Don't reuse if more than one function shares a comdat group.
! 	     If the other function(s) are needed, we need to emit even
! 	     this function out of line.  */
! 	  && !e->callee->same_comdat_group
! 	  /* During early inlining some unanalyzed cgraph nodes might be in the
! 	     callgraph and they might reffer the function in question.  */
! 	  && !cgraph_new_nodes)
  	{
  	  gcc_assert (!e->callee->global.inlined_to);
  	  if (e->callee->analyzed && !DECL_EXTERNAL (e->callee->decl))
--- 126,132 ----
  	  /* Recursive inlining never wants the master clone to
  	     be overwritten.  */
  	  && update_original
! 	  && can_remove_node_now_p (e->callee))
  	{
  	  gcc_assert (!e->callee->global.inlined_to);
  	  if (e->callee->analyzed && !DECL_EXTERNAL (e->callee->decl))
*************** inline_call (struct cgraph_edge *e, bool
*** 164,182 ****
    int old_size = 0, new_size = 0;
    struct cgraph_node *to = NULL;
    struct cgraph_edge *curr = e;
  
    /* Don't inline inlined edges.  */
    gcc_assert (e->inline_failed);
    /* Don't even think of inlining inline clone.  */
!   gcc_assert (!e->callee->global.inlined_to);
  
    e->inline_failed = CIF_OK;
!   DECL_POSSIBLY_INLINED (e->callee->decl) = true;
  
    to = e->caller;
    if (to->global.inlined_to)
      to = to->global.inlined_to;
  
    clone_inlined_nodes (e, true, update_original, overall_size);
  
    gcc_assert (curr->callee->global.inlined_to == to);
--- 175,199 ----
    int old_size = 0, new_size = 0;
    struct cgraph_node *to = NULL;
    struct cgraph_edge *curr = e;
+   struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
  
    /* Don't inline inlined edges.  */
    gcc_assert (e->inline_failed);
    /* Don't even think of inlining inline clone.  */
!   gcc_assert (!callee->global.inlined_to);
  
    e->inline_failed = CIF_OK;
!   DECL_POSSIBLY_INLINED (callee->decl) = true;
  
    to = e->caller;
    if (to->global.inlined_to)
      to = to->global.inlined_to;
  
+   /* If aliases are involved, redirect edge to the actual destination and
+      possibly remove the aliases.  */
+   if (e->callee != callee)
+     cgraph_redirect_edge_callee (e, callee);
+ 
    clone_inlined_nodes (e, true, update_original, overall_size);
  
    gcc_assert (curr->callee->global.inlined_to == to);
Index: ipa-inline.c
===================================================================
*** ipa-inline.c	(revision 174895)
--- ipa-inline.c	(working copy)
*************** static bool
*** 137,143 ****
  caller_growth_limits (struct cgraph_edge *e)
  {
    struct cgraph_node *to = e->caller;
!   struct cgraph_node *what = e->callee;
    int newsize;
    int limit = 0;
    HOST_WIDE_INT stack_size_limit = 0, inlined_stack;
--- 137,143 ----
  caller_growth_limits (struct cgraph_edge *e)
  {
    struct cgraph_node *to = e->caller;
!   struct cgraph_node *what = cgraph_function_or_thunk_node (e->callee, NULL);
    int newsize;
    int limit = 0;
    HOST_WIDE_INT stack_size_limit = 0, inlined_stack;
*************** static bool
*** 237,258 ****
  can_inline_edge_p (struct cgraph_edge *e, bool report)
  {
    bool inlinable = true;
    tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (e->caller->decl);
!   tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (e->callee->decl);
  
    gcc_assert (e->inline_failed);
  
!   if (!e->callee->analyzed)
      {
        e->inline_failed = CIF_BODY_NOT_AVAILABLE;
        inlinable = false;
      }
!   else if (!inline_summary (e->callee)->inlinable)
      {
        e->inline_failed = CIF_FUNCTION_NOT_INLINABLE;
        inlinable = false;
      }
!   else if (cgraph_function_body_availability (e->callee) <= AVAIL_OVERWRITABLE)
      {
        e->inline_failed = CIF_OVERWRITABLE;
        return false;
--- 237,260 ----
  can_inline_edge_p (struct cgraph_edge *e, bool report)
  {
    bool inlinable = true;
+   enum availability avail;
+   struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, &avail);
    tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (e->caller->decl);
!   tree callee_tree = callee ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee->decl) : NULL;
  
    gcc_assert (e->inline_failed);
  
!   if (!callee || !callee->analyzed)
      {
        e->inline_failed = CIF_BODY_NOT_AVAILABLE;
        inlinable = false;
      }
!   else if (!inline_summary (callee)->inlinable)
      {
        e->inline_failed = CIF_FUNCTION_NOT_INLINABLE;
        inlinable = false;
      }
!   else if (avail <= AVAIL_OVERWRITABLE)
      {
        e->inline_failed = CIF_OVERWRITABLE;
        return false;
*************** can_inline_edge_p (struct cgraph_edge *e
*** 264,272 ****
      }
    /* Don't inline if the functions have different EH personalities.  */
    else if (DECL_FUNCTION_PERSONALITY (e->caller->decl)
! 	   && DECL_FUNCTION_PERSONALITY (e->callee->decl)
  	   && (DECL_FUNCTION_PERSONALITY (e->caller->decl)
! 	       != DECL_FUNCTION_PERSONALITY (e->callee->decl)))
      {
        e->inline_failed = CIF_EH_PERSONALITY;
        inlinable = false;
--- 266,274 ----
      }
    /* Don't inline if the functions have different EH personalities.  */
    else if (DECL_FUNCTION_PERSONALITY (e->caller->decl)
! 	   && DECL_FUNCTION_PERSONALITY (callee->decl)
  	   && (DECL_FUNCTION_PERSONALITY (e->caller->decl)
! 	       != DECL_FUNCTION_PERSONALITY (callee->decl)))
      {
        e->inline_failed = CIF_EH_PERSONALITY;
        inlinable = false;
*************** can_inline_edge_p (struct cgraph_edge *e
*** 275,283 ****
       caller cannot.
       FIXME: this is obviously wrong for LTO where STRUCT_FUNCTION is missing.
       Move the flag into cgraph node or mirror it in the inline summary.  */
!   else if (DECL_STRUCT_FUNCTION (e->callee->decl)
  	   && DECL_STRUCT_FUNCTION
! 	        (e->callee->decl)->can_throw_non_call_exceptions
  	   && !(DECL_STRUCT_FUNCTION (e->caller->decl)
  	        && DECL_STRUCT_FUNCTION
  		     (e->caller->decl)->can_throw_non_call_exceptions))
--- 277,285 ----
       caller cannot.
       FIXME: this is obviously wrong for LTO where STRUCT_FUNCTION is missing.
       Move the flag into cgraph node or mirror it in the inline summary.  */
!   else if (DECL_STRUCT_FUNCTION (callee->decl)
  	   && DECL_STRUCT_FUNCTION
! 	        (callee->decl)->can_throw_non_call_exceptions
  	   && !(DECL_STRUCT_FUNCTION (e->caller->decl)
  	        && DECL_STRUCT_FUNCTION
  		     (e->caller->decl)->can_throw_non_call_exceptions))
*************** can_inline_edge_p (struct cgraph_edge *e
*** 287,299 ****
      }
    /* Check compatibility of target optimization options.  */
    else if (!targetm.target_option.can_inline_p (e->caller->decl,
! 						e->callee->decl))
      {
        e->inline_failed = CIF_TARGET_OPTION_MISMATCH;
        inlinable = false;
      }
    /* Check if caller growth allows the inlining.  */
!   else if (!DECL_DISREGARD_INLINE_LIMITS (e->callee->decl)
  	   && !lookup_attribute ("flatten",
  				 DECL_ATTRIBUTES
  				   (e->caller->global.inlined_to
--- 289,301 ----
      }
    /* Check compatibility of target optimization options.  */
    else if (!targetm.target_option.can_inline_p (e->caller->decl,
! 						callee->decl))
      {
        e->inline_failed = CIF_TARGET_OPTION_MISMATCH;
        inlinable = false;
      }
    /* Check if caller growth allows the inlining.  */
!   else if (!DECL_DISREGARD_INLINE_LIMITS (callee->decl)
  	   && !lookup_attribute ("flatten",
  				 DECL_ATTRIBUTES
  				   (e->caller->global.inlined_to
*************** can_inline_edge_p (struct cgraph_edge *e
*** 343,352 ****
  static bool
  can_early_inline_edge_p (struct cgraph_edge *e)
  {
    /* Early inliner might get called at WPA stage when IPA pass adds new
       function.  In this case we can not really do any of early inlining
       because function bodies are missing.  */
!   if (!gimple_has_body_p (e->callee->decl))
      {
        e->inline_failed = CIF_BODY_NOT_AVAILABLE;
        return false;
--- 345,356 ----
  static bool
  can_early_inline_edge_p (struct cgraph_edge *e)
  {
+   struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee,
+ 							      NULL);
    /* Early inliner might get called at WPA stage when IPA pass adds new
       function.  In this case we can not really do any of early inlining
       because function bodies are missing.  */
!   if (!gimple_has_body_p (callee->decl))
      {
        e->inline_failed = CIF_BODY_NOT_AVAILABLE;
        return false;
*************** can_early_inline_edge_p (struct cgraph_e
*** 356,362 ****
       the callee by early inliner, yet).  We don't have CIF code for this
       case; later we will re-do the decision in the real inliner.  */
    if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (e->caller->decl))
!       || !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (e->callee->decl)))
      {
        if (dump_file)
  	fprintf (dump_file, "  edge not inlinable: not in SSA form\n");
--- 360,366 ----
       the callee by early inliner, yet).  We don't have CIF code for this
       case; later we will re-do the decision in the real inliner.  */
    if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (e->caller->decl))
!       || !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (callee->decl)))
      {
        if (dump_file)
  	fprintf (dump_file, "  edge not inlinable: not in SSA form\n");
*************** static bool
*** 388,397 ****
  want_early_inline_function_p (struct cgraph_edge *e)
  {
    bool want_inline = true;
  
!   if (DECL_DISREGARD_INLINE_LIMITS (e->callee->decl))
      ;
!   else if (!DECL_DECLARED_INLINE_P (e->callee->decl)
  	   && !flag_inline_small_functions)
      {
        e->inline_failed = CIF_FUNCTION_NOT_INLINE_CANDIDATE;
--- 392,402 ----
  want_early_inline_function_p (struct cgraph_edge *e)
  {
    bool want_inline = true;
+   struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
  
!   if (DECL_DISREGARD_INLINE_LIMITS (callee->decl))
      ;
!   else if (!DECL_DECLARED_INLINE_P (callee->decl)
  	   && !flag_inline_small_functions)
      {
        e->inline_failed = CIF_FUNCTION_NOT_INLINE_CANDIDATE;
*************** want_early_inline_function_p (struct cgr
*** 410,427 ****
  	    fprintf (dump_file, "  will not early inline: %s/%i->%s/%i, "
  		     "call is cold and code would grow by %i\n",
  		     cgraph_node_name (e->caller), e->caller->uid,
! 		     cgraph_node_name (e->callee), e->callee->uid,
  		     growth);
  	  want_inline = false;
  	}
!       else if (!leaf_node_p (e->callee)
  	       && growth > 0)
  	{
  	  if (dump_file)
  	    fprintf (dump_file, "  will not early inline: %s/%i->%s/%i, "
  		     "callee is not leaf and code would grow by %i\n",
  		     cgraph_node_name (e->caller), e->caller->uid,
! 		     cgraph_node_name (e->callee), e->callee->uid,
  		     growth);
  	  want_inline = false;
  	}
--- 415,432 ----
  	    fprintf (dump_file, "  will not early inline: %s/%i->%s/%i, "
  		     "call is cold and code would grow by %i\n",
  		     cgraph_node_name (e->caller), e->caller->uid,
! 		     cgraph_node_name (callee), callee->uid,
  		     growth);
  	  want_inline = false;
  	}
!       else if (!leaf_node_p (callee)
  	       && growth > 0)
  	{
  	  if (dump_file)
  	    fprintf (dump_file, "  will not early inline: %s/%i->%s/%i, "
  		     "callee is not leaf and code would grow by %i\n",
  		     cgraph_node_name (e->caller), e->caller->uid,
! 		     cgraph_node_name (callee), callee->uid,
  		     growth);
  	  want_inline = false;
  	}
*************** want_early_inline_function_p (struct cgr
*** 431,437 ****
  	    fprintf (dump_file, "  will not early inline: %s/%i->%s/%i, "
  		     "growth %i exceeds --param early-inlining-insns\n",
  		     cgraph_node_name (e->caller), e->caller->uid,
! 		     cgraph_node_name (e->callee), e->callee->uid,
  		     growth);
  	  want_inline = false;
  	}
--- 436,442 ----
  	    fprintf (dump_file, "  will not early inline: %s/%i->%s/%i, "
  		     "growth %i exceeds --param early-inlining-insns\n",
  		     cgraph_node_name (e->caller), e->caller->uid,
! 		     cgraph_node_name (callee), callee->uid,
  		     growth);
  	  want_inline = false;
  	}
*************** static bool
*** 446,455 ****
  want_inline_small_function_p (struct cgraph_edge *e, bool report)
  {
    bool want_inline = true;
  
!   if (DECL_DISREGARD_INLINE_LIMITS (e->callee->decl))
      ;
!   else if (!DECL_DECLARED_INLINE_P (e->callee->decl)
  	   && !flag_inline_small_functions)
      {
        e->inline_failed = CIF_FUNCTION_NOT_INLINE_CANDIDATE;
--- 451,461 ----
  want_inline_small_function_p (struct cgraph_edge *e, bool report)
  {
    bool want_inline = true;
+   struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
  
!   if (DECL_DISREGARD_INLINE_LIMITS (callee->decl))
      ;
!   else if (!DECL_DECLARED_INLINE_P (callee->decl)
  	   && !flag_inline_small_functions)
      {
        e->inline_failed = CIF_FUNCTION_NOT_INLINE_CANDIDATE;
*************** want_inline_small_function_p (struct cgr
*** 461,479 ****
  
        if (growth <= 0)
  	;
!       else if (DECL_DECLARED_INLINE_P (e->callee->decl)
  	       && growth >= MAX_INLINE_INSNS_SINGLE)
  	{
            e->inline_failed = CIF_MAX_INLINE_INSNS_SINGLE_LIMIT;
  	  want_inline = false;
  	}
!       else if (!DECL_DECLARED_INLINE_P (e->callee->decl)
  	       && !flag_inline_functions)
  	{
            e->inline_failed = CIF_NOT_DECLARED_INLINED;
  	  want_inline = false;
  	}
!       else if (!DECL_DECLARED_INLINE_P (e->callee->decl)
  	       && growth >= MAX_INLINE_INSNS_AUTO)
  	{
            e->inline_failed = CIF_MAX_INLINE_INSNS_AUTO_LIMIT;
--- 467,485 ----
  
        if (growth <= 0)
  	;
!       else if (DECL_DECLARED_INLINE_P (callee->decl)
  	       && growth >= MAX_INLINE_INSNS_SINGLE)
  	{
            e->inline_failed = CIF_MAX_INLINE_INSNS_SINGLE_LIMIT;
  	  want_inline = false;
  	}
!       else if (!DECL_DECLARED_INLINE_P (callee->decl)
  	       && !flag_inline_functions)
  	{
            e->inline_failed = CIF_NOT_DECLARED_INLINED;
  	  want_inline = false;
  	}
!       else if (!DECL_DECLARED_INLINE_P (callee->decl)
  	       && growth >= MAX_INLINE_INSNS_AUTO)
  	{
            e->inline_failed = CIF_MAX_INLINE_INSNS_AUTO_LIMIT;
*************** want_inline_small_function_p (struct cgr
*** 495,501 ****
  	 "good" calls, we will realize that keeping the function around is
  	 better.  */
        else if (!cgraph_maybe_hot_edge_p (e)
! 	       && (DECL_EXTERNAL (e->callee->decl)
  
  		   /* Unlike for functions called once, we play unsafe with
  		      COMDATs.  We can allow that since we know functions
--- 501,507 ----
  	 "good" calls, we will realize that keeping the function around is
  	 better.  */
        else if (!cgraph_maybe_hot_edge_p (e)
! 	       && (DECL_EXTERNAL (callee->decl)
  
  		   /* Unlike for functions called once, we play unsafe with
  		      COMDATs.  We can allow that since we know functions
*************** want_inline_small_function_p (struct cgr
*** 510,517 ****
  		      instead of
  		      cgraph_will_be_removed_from_program_if_no_direct_calls  */
  
! 		   || !cgraph_can_remove_if_no_direct_calls_p (e->callee)
! 		   || estimate_growth (e->callee) > 0))
  	{
            e->inline_failed = CIF_UNLIKELY_CALL;
  	  want_inline = false;
--- 516,523 ----
  		      instead of
  		      cgraph_will_be_removed_from_program_if_no_direct_calls  */
  
! 		   || !cgraph_can_remove_if_no_direct_calls_p (callee)
! 		   || estimate_growth (callee) > 0))
  	{
            e->inline_failed = CIF_UNLIKELY_CALL;
  	  want_inline = false;
*************** want_inline_self_recursive_call_p (struc
*** 544,550 ****
    int caller_freq = CGRAPH_FREQ_BASE;
    int max_depth = PARAM_VALUE (PARAM_MAX_INLINE_RECURSIVE_DEPTH_AUTO);
  
!   if (DECL_DECLARED_INLINE_P (edge->callee->decl))
      max_depth = PARAM_VALUE (PARAM_MAX_INLINE_RECURSIVE_DEPTH);
  
    if (!cgraph_maybe_hot_edge_p (edge))
--- 550,556 ----
    int caller_freq = CGRAPH_FREQ_BASE;
    int max_depth = PARAM_VALUE (PARAM_MAX_INLINE_RECURSIVE_DEPTH_AUTO);
  
!   if (DECL_DECLARED_INLINE_P (edge->caller->decl))
      max_depth = PARAM_VALUE (PARAM_MAX_INLINE_RECURSIVE_DEPTH);
  
    if (!cgraph_maybe_hot_edge_p (edge))
*************** want_inline_self_recursive_call_p (struc
*** 644,649 ****
--- 650,657 ----
  static bool
  want_inline_function_called_once_p (struct cgraph_node *node)
  {
+    if (node->alias)
+      return false;
     /* Already inlined?  */
     if (node->global.inlined_to)
       return false;
*************** edge_badness (struct cgraph_edge *edge, 
*** 708,716 ****
  {
    gcov_type badness;
    int growth, time_growth;
!   struct inline_summary *callee_info = inline_summary (edge->callee);
  
!   if (DECL_DISREGARD_INLINE_LIMITS (edge->callee->decl))
      return INT_MIN;
  
    growth = estimate_edge_growth (edge);
--- 716,726 ----
  {
    gcov_type badness;
    int growth, time_growth;
!   struct cgraph_node *callee = cgraph_function_or_thunk_node (edge->callee,
! 							      NULL);
!   struct inline_summary *callee_info = inline_summary (callee);
  
!   if (DECL_DISREGARD_INLINE_LIMITS (callee->decl))
      return INT_MIN;
  
    growth = estimate_edge_growth (edge);
*************** edge_badness (struct cgraph_edge *edge, 
*** 720,726 ****
      {
        fprintf (dump_file, "    Badness calculation for %s -> %s\n",
  	       cgraph_node_name (edge->caller),
! 	       cgraph_node_name (edge->callee));
        fprintf (dump_file, "      size growth %i, time growth %i\n",
  	       growth,
  	       time_growth);
--- 730,736 ----
      {
        fprintf (dump_file, "    Badness calculation for %s -> %s\n",
  	       cgraph_node_name (edge->caller),
! 	       cgraph_node_name (callee));
        fprintf (dump_file, "      size growth %i, time growth %i\n",
  	       growth,
  	       time_growth);
*************** edge_badness (struct cgraph_edge *edge, 
*** 808,814 ****
  	 We might mix the valud into the fraction by taking into account
  	 relative growth of the unit, but for now just add the number
  	 into resulting fraction.  */
!       growth_for_all = estimate_growth (edge->callee);
        badness += growth_for_all;
        if (badness > INT_MAX - 1)
  	badness = INT_MAX - 1;
--- 818,824 ----
  	 We might mix the valud into the fraction by taking into account
  	 relative growth of the unit, but for now just add the number
  	 into resulting fraction.  */
!       growth_for_all = estimate_growth (callee);
        badness += growth_for_all;
        if (badness > INT_MAX - 1)
  	badness = INT_MAX - 1;
*************** edge_badness (struct cgraph_edge *edge, 
*** 828,834 ****
    else
      {
        int nest = MIN (inline_edge_summary (edge)->loop_depth, 8);
!       badness = estimate_growth (edge->callee) * 256;
  
        /* Decrease badness if call is nested.  */
        if (badness > 0)
--- 838,844 ----
    else
      {
        int nest = MIN (inline_edge_summary (edge)->loop_depth, 8);
!       badness = estimate_growth (callee) * 256;
  
        /* Decrease badness if call is nested.  */
        if (badness > 0)
*************** update_callee_keys (fibheap_t heap, stru
*** 1002,1014 ****
        e = e->callee->callees;
      else
        {
  	/* We do not reset callee growth cache here.  Since we added a new call,
  	   growth chould have just increased and consequentely badness metric
             don't need updating.  */
  	if (e->inline_failed
! 	    && inline_summary (e->callee)->inlinable
! 	    && cgraph_function_body_availability (e->callee) >= AVAIL_AVAILABLE
! 	    && !bitmap_bit_p (updated_nodes, e->callee->uid))
  	  {
  	    if (can_inline_edge_p (e, false)
  		&& want_inline_small_function_p (e, false))
--- 1012,1027 ----
        e = e->callee->callees;
      else
        {
+ 	enum availability avail;
+ 	struct cgraph_node *callee;
  	/* We do not reset callee growth cache here.  Since we added a new call,
  	   growth chould have just increased and consequentely badness metric
             don't need updating.  */
  	if (e->inline_failed
! 	    && (callee = cgraph_function_or_thunk_node (e->callee, &avail))
! 	    && inline_summary (callee)->inlinable
! 	    && cgraph_function_body_availability (callee) >= AVAIL_AVAILABLE
! 	    && !bitmap_bit_p (updated_nodes, callee->uid))
  	  {
  	    if (can_inline_edge_p (e, false)
  		&& want_inline_small_function_p (e, false))
*************** update_all_callee_keys (fibheap_t heap, 
*** 1044,1050 ****
  			bitmap updated_nodes)
  {
    struct cgraph_edge *e = node->callees;
- 
    if (!e)
      return;
    while (true)
--- 1057,1062 ----
*************** update_all_callee_keys (fibheap_t heap, 
*** 1052,1062 ****
        e = e->callee->callees;
      else
        {
  	/* We inlined and thus callees might have different number of calls.
  	   Reset their caches  */
!         reset_node_growth_cache (e->callee);
  	if (e->inline_failed)
! 	  update_caller_keys (heap, e->callee, updated_nodes, e);
  	if (e->next_callee)
  	  e = e->next_callee;
  	else
--- 1064,1077 ----
        e = e->callee->callees;
      else
        {
+ 	struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee,
+ 								    NULL);
+ 
  	/* We inlined and thus callees might have different number of calls.
  	   Reset their caches  */
!         reset_node_growth_cache (callee);
  	if (e->inline_failed)
! 	  update_caller_keys (heap, callee, updated_nodes, e);
  	if (e->next_callee)
  	  e = e->next_callee;
  	else
*************** lookup_recursive_calls (struct cgraph_no
*** 1081,1088 ****
  			fibheap_t heap)
  {
    struct cgraph_edge *e;
    for (e = where->callees; e; e = e->next_callee)
!     if (e->callee == node)
        {
  	/* When profile feedback is available, prioritize by expected number
  	   of calls.  */
--- 1096,1107 ----
  			fibheap_t heap)
  {
    struct cgraph_edge *e;
+   enum availability avail;
+ 
    for (e = where->callees; e; e = e->next_callee)
!     if (e->callee == node
! 	|| (cgraph_function_or_thunk_node (e->callee, &avail) == node
! 	    && avail > AVAIL_OVERWRITABLE))
        {
  	/* When profile feedback is available, prioritize by expected number
  	   of calls.  */
*************** add_new_edges_to_heap (fibheap_t heap, V
*** 1240,1247 ****
        struct cgraph_edge *edge = VEC_pop (cgraph_edge_p, new_edges);
  
        gcc_assert (!edge->aux);
!       if (inline_summary (edge->callee)->inlinable
! 	  && edge->inline_failed
  	  && can_inline_edge_p (edge, true)
  	  && want_inline_small_function_p (edge, true))
          edge->aux = fibheap_insert (heap, edge_badness (edge, false), edge);
--- 1259,1265 ----
        struct cgraph_edge *edge = VEC_pop (cgraph_edge_p, new_edges);
  
        gcc_assert (!edge->aux);
!       if (edge->inline_failed
  	  && can_inline_edge_p (edge, true)
  	  && want_inline_small_function_p (edge, true))
          edge->aux = fibheap_insert (heap, edge_badness (edge, false), edge);
*************** inline_small_functions (void)
*** 1283,1292 ****
    FOR_EACH_DEFINED_FUNCTION (node)
      if (!node->global.inlined_to)
        {
! 	struct inline_summary *info = inline_summary (node);
  
! 	if (!DECL_EXTERNAL (node->decl))
! 	  initial_size += info->size;
  
  	for (edge = node->callers; edge; edge = edge->next_caller)
  	  if (max_count < edge->count)
--- 1301,1314 ----
    FOR_EACH_DEFINED_FUNCTION (node)
      if (!node->global.inlined_to)
        {
! 	if (cgraph_function_with_gimple_body_p (node)
! 	    || node->thunk.thunk_p)
! 	  {
! 	    struct inline_summary *info = inline_summary (node);
  
! 	    if (!DECL_EXTERNAL (node->decl))
! 	      initial_size += info->size;
! 	  }
  
  	for (edge = node->callers; edge; edge = edge->next_caller)
  	  if (max_count < edge->count)
*************** inline_small_functions (void)
*** 1355,1368 ****
        if (!can_inline_edge_p (edge, true))
  	continue;
        
!       callee = edge->callee;
        growth = estimate_edge_growth (edge);
        if (dump_file)
  	{
  	  fprintf (dump_file,
  		   "\nConsidering %s with %i size\n",
! 		   cgraph_node_name (edge->callee),
! 		   inline_summary (edge->callee)->size);
  	  fprintf (dump_file,
  		   " to be inlined into %s in %s:%i\n"
  		   " Estimated growth after inlined into all is %+i insns.\n"
--- 1377,1390 ----
        if (!can_inline_edge_p (edge, true))
  	continue;
        
!       callee = cgraph_function_or_thunk_node (edge->callee, NULL);
        growth = estimate_edge_growth (edge);
        if (dump_file)
  	{
  	  fprintf (dump_file,
  		   "\nConsidering %s with %i size\n",
! 		   cgraph_node_name (callee),
! 		   inline_summary (callee)->size);
  	  fprintf (dump_file,
  		   " to be inlined into %s in %s:%i\n"
  		   " Estimated growth after inlined into all is %+i insns.\n"
*************** inline_small_functions (void)
*** 1372,1378 ****
  		   : gimple_filename ((const_gimple) edge->call_stmt),
  		   flag_wpa ? -1
  		   : gimple_lineno ((const_gimple) edge->call_stmt),
! 		   estimate_growth (edge->callee),
  		   badness,
  		   edge->frequency / (double)CGRAPH_FREQ_BASE);
  	  if (edge->count)
--- 1394,1400 ----
  		   : gimple_filename ((const_gimple) edge->call_stmt),
  		   flag_wpa ? -1
  		   : gimple_lineno ((const_gimple) edge->call_stmt),
! 		   estimate_growth (callee),
  		   badness,
  		   edge->frequency / (double)CGRAPH_FREQ_BASE);
  	  if (edge->count)
*************** inline_small_functions (void)
*** 1383,1389 ****
  	}
  
        if (overall_size + growth > max_size
! 	  && !DECL_DISREGARD_INLINE_LIMITS (edge->callee->decl))
  	{
  	  edge->inline_failed = CIF_INLINE_UNIT_GROWTH_LIMIT;
  	  report_inline_failed_reason (edge);
--- 1405,1411 ----
  	}
  
        if (overall_size + growth > max_size
! 	  && !DECL_DISREGARD_INLINE_LIMITS (callee->decl))
  	{
  	  edge->inline_failed = CIF_INLINE_UNIT_GROWTH_LIMIT;
  	  report_inline_failed_reason (edge);
*************** inline_small_functions (void)
*** 1418,1424 ****
  	}
        else
  	{
- 	  struct cgraph_node *callee;
  	  struct cgraph_node *outer_node = NULL;
  	  int depth = 0;
  
--- 1440,1445 ----
*************** inline_small_functions (void)
*** 1446,1452 ****
  	  else if (depth && dump_file)
  	    fprintf (dump_file, " Peeling recursion with depth %i\n", depth);
  
- 	  callee = edge->callee;
  	  gcc_checking_assert (!callee->global.inlined_to);
  	  inline_call (edge, true, &new_indirect_edges, &overall_size);
  	  if (flag_indirect_inlining)
--- 1467,1472 ----
*************** flatten_function (struct cgraph_node *no
*** 1529,1542 ****
    for (e = node->callees; e; e = e->next_callee)
      {
        struct cgraph_node *orig_callee;
  
        /* We've hit cycle?  It is time to give up.  */
!       if (e->callee->aux)
  	{
  	  if (dump_file)
  	    fprintf (dump_file,
  		     "Not inlining %s into %s to avoid cycle.\n",
! 		     cgraph_node_name (e->callee),
  		     cgraph_node_name (e->caller));
  	  e->inline_failed = CIF_RECURSIVE_INLINING;
  	  continue;
--- 1549,1563 ----
    for (e = node->callees; e; e = e->next_callee)
      {
        struct cgraph_node *orig_callee;
+       struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
  
        /* We've hit cycle?  It is time to give up.  */
!       if (callee->aux)
  	{
  	  if (dump_file)
  	    fprintf (dump_file,
  		     "Not inlining %s into %s to avoid cycle.\n",
! 		     cgraph_node_name (callee),
  		     cgraph_node_name (e->caller));
  	  e->inline_failed = CIF_RECURSIVE_INLINING;
  	  continue;
*************** flatten_function (struct cgraph_node *no
*** 1546,1552 ****
  	 it in order to fully flatten the leaves.  */
        if (!e->inline_failed)
  	{
! 	  flatten_function (e->callee, early);
  	  continue;
  	}
  
--- 1567,1573 ----
  	 it in order to fully flatten the leaves.  */
        if (!e->inline_failed)
  	{
! 	  flatten_function (callee, early);
  	  continue;
  	}
  
*************** flatten_function (struct cgraph_node *no
*** 1566,1572 ****
  	}
  
        if (gimple_in_ssa_p (DECL_STRUCT_FUNCTION (node->decl))
! 	  != gimple_in_ssa_p (DECL_STRUCT_FUNCTION (e->callee->decl)))
  	{
  	  if (dump_file)
  	    fprintf (dump_file, "Not inlining: SSA form does not match.\n");
--- 1587,1593 ----
  	}
  
        if (gimple_in_ssa_p (DECL_STRUCT_FUNCTION (node->decl))
! 	  != gimple_in_ssa_p (DECL_STRUCT_FUNCTION (callee->decl)))
  	{
  	  if (dump_file)
  	    fprintf (dump_file, "Not inlining: SSA form does not match.\n");
*************** flatten_function (struct cgraph_node *no
*** 1577,1585 ****
           recursing through the original node if the node was cloned.  */
        if (dump_file)
  	fprintf (dump_file, " Inlining %s into %s.\n",
! 		 cgraph_node_name (e->callee),
  		 cgraph_node_name (e->caller));
!       orig_callee = e->callee;
        inline_call (e, true, NULL, NULL);
        if (e->callee != orig_callee)
  	orig_callee->aux = (void *) node;
--- 1598,1606 ----
           recursing through the original node if the node was cloned.  */
        if (dump_file)
  	fprintf (dump_file, " Inlining %s into %s.\n",
! 		 cgraph_node_name (callee),
  		 cgraph_node_name (e->caller));
!       orig_callee = callee;
        inline_call (e, true, NULL, NULL);
        if (e->callee != orig_callee)
  	orig_callee->aux = (void *) node;
*************** inline_always_inline_functions (struct c
*** 1727,1733 ****
  
    for (e = node->callees; e; e = e->next_callee)
      {
!       if (!DECL_DISREGARD_INLINE_LIMITS (e->callee->decl))
  	continue;
  
        if (cgraph_edge_recursive_p (e))
--- 1748,1755 ----
  
    for (e = node->callees; e; e = e->next_callee)
      {
!       struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
!       if (!DECL_DISREGARD_INLINE_LIMITS (callee->decl))
  	continue;
  
        if (cgraph_edge_recursive_p (e))
*************** early_inline_small_functions (struct cgr
*** 1764,1782 ****
  
    for (e = node->callees; e; e = e->next_callee)
      {
!       if (!inline_summary (e->callee)->inlinable
  	  || !e->inline_failed)
  	continue;
  
        /* Do not consider functions not declared inline.  */
!       if (!DECL_DECLARED_INLINE_P (e->callee->decl)
  	  && !flag_inline_small_functions
  	  && !flag_inline_functions)
  	continue;
  
        if (dump_file)
  	fprintf (dump_file, "Considering inline candidate %s.\n",
! 		 cgraph_node_name (e->callee));
  
        if (!can_early_inline_edge_p (e))
  	continue;
--- 1786,1805 ----
  
    for (e = node->callees; e; e = e->next_callee)
      {
!       struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
!       if (!inline_summary (callee)->inlinable
  	  || !e->inline_failed)
  	continue;
  
        /* Do not consider functions not declared inline.  */
!       if (!DECL_DECLARED_INLINE_P (callee->decl)
  	  && !flag_inline_small_functions
  	  && !flag_inline_functions)
  	continue;
  
        if (dump_file)
  	fprintf (dump_file, "Considering inline candidate %s.\n",
! 		 cgraph_node_name (callee));
  
        if (!can_early_inline_edge_p (e))
  	continue;
*************** early_inline_small_functions (struct cgr
*** 1793,1799 ****
  
        if (dump_file)
  	fprintf (dump_file, " Inlining %s into %s.\n",
! 		 cgraph_node_name (e->callee),
  		 cgraph_node_name (e->caller));
        inline_call (e, true, NULL, NULL);
        inlined = true;
--- 1816,1822 ----
  
        if (dump_file)
  	fprintf (dump_file, " Inlining %s into %s.\n",
! 		 cgraph_node_name (callee),
  		 cgraph_node_name (e->caller));
        inline_call (e, true, NULL, NULL);
        inlined = true;
Index: ipa-inline-analysis.c
===================================================================
*** ipa-inline-analysis.c	(revision 174895)
--- ipa-inline-analysis.c	(working copy)
*************** static clause_t
*** 589,595 ****
  evaluate_conditions_for_edge (struct cgraph_edge *e, bool inline_p)
  {
    clause_t clause = inline_p ? 0 : 1 << predicate_not_inlined_condition;
!   struct inline_summary *info = inline_summary (e->callee);
    int i;
  
    if (ipa_node_params_vector && info->conds
--- 589,596 ----
  evaluate_conditions_for_edge (struct cgraph_edge *e, bool inline_p)
  {
    clause_t clause = inline_p ? 0 : 1 << predicate_not_inlined_condition;
!   struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
!   struct inline_summary *info = inline_summary (callee);
    int i;
  
    if (ipa_node_params_vector && info->conds
*************** evaluate_conditions_for_edge (struct cgr
*** 615,621 ****
  	  if (cst)
  	    VEC_replace (tree, known_vals, i, cst);
  	}
!       clause = evaluate_conditions_for_known_args (e->callee,
  						   inline_p, known_vals);
        VEC_free (tree, heap, known_vals);
      }
--- 616,622 ----
  	  if (cst)
  	    VEC_replace (tree, known_vals, i, cst);
  	}
!       clause = evaluate_conditions_for_known_args (callee,
  						   inline_p, known_vals);
        VEC_free (tree, heap, known_vals);
      }
*************** dump_inline_edge_summary (FILE * f, int 
*** 919,927 ****
    for (edge = node->callees; edge; edge = edge->next_callee)
      {
        struct inline_edge_summary *es = inline_edge_summary (edge);
        fprintf (f, "%*s%s/%i %s\n%*s  loop depth:%2i freq:%4i size:%2i time: %2i callee size:%2i stack:%2i",
! 	       indent, "", cgraph_node_name (edge->callee),
! 	       edge->callee->uid, 
  	       !edge->inline_failed ? "inlined"
  	       : cgraph_inline_failed_string (edge->inline_failed),
  	       indent, "",
--- 920,929 ----
    for (edge = node->callees; edge; edge = edge->next_callee)
      {
        struct inline_edge_summary *es = inline_edge_summary (edge);
+       struct cgraph_node *callee = cgraph_function_or_thunk_node (edge->callee, NULL);
        fprintf (f, "%*s%s/%i %s\n%*s  loop depth:%2i freq:%4i size:%2i time: %2i callee size:%2i stack:%2i",
! 	       indent, "", cgraph_node_name (callee),
! 	       callee->uid, 
  	       !edge->inline_failed ? "inlined"
  	       : cgraph_inline_failed_string (edge->inline_failed),
  	       indent, "",
*************** dump_inline_edge_summary (FILE * f, int 
*** 929,936 ****
                 edge->frequency,
  	       es->call_stmt_size,
  	       es->call_stmt_time,
! 	       (int)inline_summary (edge->callee)->size,
! 	       (int)inline_summary (edge->callee)->estimated_stack_size);
        if (es->predicate)
  	{
  	  fprintf (f, " predicate: ");
--- 931,938 ----
                 edge->frequency,
  	       es->call_stmt_size,
  	       es->call_stmt_time,
! 	       (int)inline_summary (callee)->size,
! 	       (int)inline_summary (callee)->estimated_stack_size);
        if (es->predicate)
  	{
  	  fprintf (f, " predicate: ");
*************** dump_inline_edge_summary (FILE * f, int 
*** 942,951 ****
  	{
            fprintf (f, "%*sStack frame offset %i, callee self size %i, callee size %i\n",
  		   indent+2, "",
! 		   (int)inline_summary (edge->callee)->stack_frame_offset,
! 		   (int)inline_summary (edge->callee)->estimated_self_stack_size,
! 		   (int)inline_summary (edge->callee)->estimated_stack_size);
! 	  dump_inline_edge_summary (f, indent+2, edge->callee, info);
  	}
      }
    for (edge = node->indirect_calls; edge; edge = edge->next_callee)
--- 944,953 ----
  	{
            fprintf (f, "%*sStack frame offset %i, callee self size %i, callee size %i\n",
  		   indent+2, "",
! 		   (int)inline_summary (callee)->stack_frame_offset,
! 		   (int)inline_summary (callee)->estimated_self_stack_size,
! 		   (int)inline_summary (callee)->estimated_stack_size);
! 	  dump_inline_edge_summary (f, indent+2, callee, info);
  	}
      }
    for (edge = node->indirect_calls; edge; edge = edge->next_callee)
*************** estimate_function_body_sizes (struct cgr
*** 1525,1531 ****
  	      /* Do not inline calls where we cannot triviall work around
  		 mismatches in argument or return types.  */
  	      if (edge->callee
! 		  && !gimple_check_call_matching_types (stmt, edge->callee->decl))
  		{
  		  edge->call_stmt_cannot_inline_p = true;
  		  gimple_call_set_cannot_inline (stmt, true);
--- 1527,1536 ----
  	      /* Do not inline calls where we cannot triviall work around
  		 mismatches in argument or return types.  */
  	      if (edge->callee
! 		  && cgraph_function_or_thunk_node (edge->callee, NULL)
! 		  && !gimple_check_call_matching_types (stmt,
! 							cgraph_function_or_thunk_node (edge->callee,
! 										       NULL)->decl))
  		{
  		  edge->call_stmt_cannot_inline_p = true;
  		  gimple_call_set_cannot_inline (stmt, true);
*************** int
*** 2110,2115 ****
--- 2115,2121 ----
  do_estimate_edge_growth (struct cgraph_edge *edge)
  {
    int size;
+   struct cgraph_node *callee;
  
    /* When we do caching, use do_estimate_edge_time to populate the entry.  */
  
*************** do_estimate_edge_growth (struct cgraph_e
*** 2122,2131 ****
        gcc_checking_assert (size);
        return size - (size > 0);
      }
  
    /* Early inliner runs without caching, go ahead and do the dirty work.  */
    gcc_checking_assert (edge->inline_failed);
!   estimate_node_size_and_time (edge->callee,
  			       evaluate_conditions_for_edge (edge, true),
  			       &size, NULL);
    gcc_checking_assert (inline_edge_summary (edge)->call_stmt_size);
--- 2128,2138 ----
        gcc_checking_assert (size);
        return size - (size > 0);
      }
+   callee = cgraph_function_or_thunk_node (edge->callee, NULL);
  
    /* Early inliner runs without caching, go ahead and do the dirty work.  */
    gcc_checking_assert (edge->inline_failed);
!   estimate_node_size_and_time (callee,
  			       evaluate_conditions_for_edge (edge, true),
  			       &size, NULL);
    gcc_checking_assert (inline_edge_summary (edge)->call_stmt_size);
*************** estimate_size_after_inlining (struct cgr
*** 2171,2185 ****
  }
  
  
! /* Estimate the growth caused by inlining NODE into all callees.  */
  
! int
! do_estimate_growth (struct cgraph_node *node)
  {
-   int growth = 0;
    struct cgraph_edge *e;
!   bool self_recursive = false;
!   struct inline_summary *info = inline_summary (node);
  
    for (e = node->callers; e; e = e->next_caller)
      {
--- 2178,2197 ----
  }
  
  
! struct growth_data
! {
!   bool self_recursive;
!   int growth;
! };
  
! 
! /* Worker for do_estimate_growth.  Collect growth for all callers.  */
! 
! static bool
! do_estimate_growth_1 (struct cgraph_node *node, void *data)
  {
    struct cgraph_edge *e;
!   struct growth_data *d = (struct growth_data *) data;
  
    for (e = node->callers; e; e = e->next_caller)
      {
*************** do_estimate_growth (struct cgraph_node *
*** 2188,2224 ****
        if (e->caller == node
  	  || (e->caller->global.inlined_to
  	      && e->caller->global.inlined_to == node))
!         self_recursive = true;
!       growth += estimate_edge_growth (e);
      }
!      
  
    /* For self recursive functions the growth estimation really should be
       infinity.  We don't want to return very large values because the growth
       plays various roles in badness computation fractions.  Be sure to not
       return zero or negative growths. */
!   if (self_recursive)
!     growth = growth < info->size ? info->size : growth;
    else
      {
!       if (cgraph_will_be_removed_from_program_if_no_direct_calls (node)
! 	  && !DECL_EXTERNAL (node->decl))
! 	growth -= info->size;
        /* COMDAT functions are very often not shared across multiple units since they
! 	 come from various template instantiations.  Take this into account.  */
        else  if (DECL_COMDAT (node->decl)
  		&& cgraph_can_remove_if_no_direct_calls_p (node))
! 	growth -= (info->size
! 		   * (100 - PARAM_VALUE (PARAM_COMDAT_SHARING_PROBABILITY)) + 50) / 100;
      }
  
    if (node_growth_cache)
      {
        if ((int)VEC_length (int, node_growth_cache) <= node->uid)
  	VEC_safe_grow_cleared (int, heap, node_growth_cache, cgraph_max_uid);
!       VEC_replace (int, node_growth_cache, node->uid, growth + (growth >= 0));
      }
!   return growth;
  }
  
  
--- 2200,2249 ----
        if (e->caller == node
  	  || (e->caller->global.inlined_to
  	      && e->caller->global.inlined_to == node))
!         d->self_recursive = true;
!       d->growth += estimate_edge_growth (e);
      }
!   return false;
! }
! 
! 
! /* Estimate the growth caused by inlining NODE into all callees.  */
! 
! int
! do_estimate_growth (struct cgraph_node *node)
! {
!   struct growth_data d = {0, false};
!   struct inline_summary *info = inline_summary (node);
! 
!   cgraph_for_node_and_aliases (node, do_estimate_growth_1, &d, true);
  
    /* For self recursive functions the growth estimation really should be
       infinity.  We don't want to return very large values because the growth
       plays various roles in badness computation fractions.  Be sure to not
       return zero or negative growths. */
!   if (d.self_recursive)
!     d.growth = d.growth < info->size ? info->size : d.growth;
    else
      {
!       if (!DECL_EXTERNAL (node->decl)
! 	  && !cgraph_will_be_removed_from_program_if_no_direct_calls (node))
! 	d.growth -= info->size;
        /* COMDAT functions are very often not shared across multiple units since they
! 	 come from various template instantiations.  Take this into account.
!          FIXME: allow also COMDATs with COMDAT aliases.  */
        else  if (DECL_COMDAT (node->decl)
  		&& cgraph_can_remove_if_no_direct_calls_p (node))
! 	d.growth -= (info->size
! 		     * (100 - PARAM_VALUE (PARAM_COMDAT_SHARING_PROBABILITY)) + 50) / 100;
      }
  
    if (node_growth_cache)
      {
        if ((int)VEC_length (int, node_growth_cache) <= node->uid)
  	VEC_safe_grow_cleared (int, heap, node_growth_cache, cgraph_max_uid);
!       VEC_replace (int, node_growth_cache, node->uid, d.growth + (d.growth >= 0));
      }
!   return d.growth;
  }
  
  
*************** inline_generate_summary (void)
*** 2282,2287 ****
--- 2307,2313 ----
      ipa_register_cgraph_hooks ();
  
    FOR_EACH_DEFINED_FUNCTION (node)
+     if (!node->alias)
        inline_analyze_function (node);
  }
  

^ permalink raw reply	[flat|nested] only message in thread

only message in thread, other threads:[~2011-06-10 11:41 UTC | newest]

Thread overview: (only message) (download: mbox.gz / follow: Atom feed)
-- links below jump to the message on this page --
2011-06-10 11:54 Cgraph alias reorg 5/14 (ipa-inline update) Jan Hubicka

This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox;
as well as URLs for read-only IMAP folder(s) and NNTP newsgroup(s).