From mboxrd@z Thu Jan 1 00:00:00 1970 Return-Path: Received: (qmail 21271 invoked by alias); 10 Jun 2011 10:12:57 -0000 Received: (qmail 21123 invoked by uid 22791); 10 Jun 2011 10:12:56 -0000 X-SWARE-Spam-Status: No, hits=-1.8 required=5.0 tests=AWL,BAYES_00,T_RP_MATCHES_RCVD X-Spam-Check-By: sourceware.org Received: from nikam.ms.mff.cuni.cz (HELO nikam.ms.mff.cuni.cz) (195.113.20.16) by sourceware.org (qpsmtpd/0.43rc1) with ESMTP; Fri, 10 Jun 2011 10:12:41 +0000 Received: by nikam.ms.mff.cuni.cz (Postfix, from userid 16202) id 97FE79AC867; Fri, 10 Jun 2011 12:12:39 +0200 (CEST) Date: Fri, 10 Jun 2011 10:15:00 -0000 From: Jan Hubicka To: gcc-patches@gcc.gnu.org Subject: Cgraph alias reorg 4/14 (ipa-pure-const and ipa-reference update) Message-ID: <20110610101239.GB28776@kam.mff.cuni.cz> MIME-Version: 1.0 Content-Type: text/plain; charset=us-ascii Content-Disposition: inline User-Agent: Mutt/1.5.18 (2008-05-17) Mailing-List: contact gcc-patches-help@gcc.gnu.org; run by ezmlm Precedence: bulk List-Id: List-Archive: List-Post: List-Help: Sender: gcc-patches-owner@gcc.gnu.org X-SW-Source: 2011-06/txt/msg00818.txt.bz2 Hi, this patch updates ipa-pure-const and ipa-reference to be ready for aliases. There is nothing difficult - the passes propagate from caller to callee and thus all they need is to skip the aliases and be aware of the fact that body visibility may change from the edge to edge. Bootstrapped/regtested x86_64-linux, will commit it shortly. Honza * ipa-utils.c (searchc): Use cgraph_function_or_thunk_node. * ipa-pure-const.c (analyze_function): Aliases don't need analysis. (self_recursive_p): Use cgraph_function_node. (propagate_pure_const): Likewise. (propagate_nothrow): Likewise. * ipa-reference.c (ipa_reference_get_not_read_global): Use cgraph_function_node. (propagate_bits): Likewise. (propagate): Likewise. Index: ipa-utils.c =================================================================== --- ipa-utils.c (revision 174877) +++ ipa-utils.c (working copy) @@ -101,10 +101,10 @@ searchc (struct searchc_env* env, struct for (edge = v->callees; edge; edge = edge->next_callee) { struct ipa_dfs_info * w_info; - struct cgraph_node *w = edge->callee; - enum availability avail = cgraph_function_body_availability (w); + enum availability avail; + struct cgraph_node *w = cgraph_function_or_thunk_node (edge->callee, &avail); - if (ignore_edge && ignore_edge (edge)) + if (!w || (ignore_edge && ignore_edge (edge))) continue; if (w->aux Index: ipa-pure-const.c =================================================================== --- ipa-pure-const.c (revision 174877) +++ ipa-pure-const.c (working copy) @@ -735,7 +735,7 @@ analyze_function (struct cgraph_node *fn flags_from_decl_or_type (fn->decl), cgraph_node_cannot_return (fn)); - if (fn->thunk.thunk_p) + if (fn->thunk.thunk_p || fn->alias) { /* Thunk gets propagated through, so nothing interesting happens. */ gcc_assert (ipa); @@ -1070,14 +1070,16 @@ ignore_edge (struct cgraph_edge *e) return (!e->can_throw_external); } -/* Return true if NODE is self recursive function. */ +/* Return true if NODE is self recursive function. + ??? self recursive and indirectly recursive funcions should + be the same, so this function seems unnecesary. */ static bool self_recursive_p (struct cgraph_node *node) { struct cgraph_edge *e; for (e = node->callees; e; e = e->next_callee) - if (e->callee == node) + if (cgraph_function_node (e->callee, NULL) == node) return true; return false; } @@ -1167,7 +1169,8 @@ propagate_pure_const (void) /* Now walk the edges and merge in callee properties. */ for (e = w->callees; e; e = e->next_callee) { - struct cgraph_node *y = e->callee; + enum availability avail; + struct cgraph_node *y = cgraph_function_node (e->callee, &avail); enum pure_const_state_e edge_state = IPA_CONST; bool edge_looping = false; @@ -1178,7 +1181,7 @@ propagate_pure_const (void) cgraph_node_name (e->callee), e->callee->uid); } - if (cgraph_function_body_availability (y) > AVAIL_OVERWRITABLE) + if (avail > AVAIL_OVERWRITABLE) { funct_state y_l = get_function_state (y); if (dump_file && (dump_flags & TDF_DETAILS)) @@ -1396,9 +1399,10 @@ propagate_nothrow (void) for (e = w->callees; e; e = e->next_callee) { - struct cgraph_node *y = e->callee; + enum availability avail; + struct cgraph_node *y = cgraph_function_node (e->callee, &avail); - if (cgraph_function_body_availability (y) > AVAIL_OVERWRITABLE) + if (avail > AVAIL_OVERWRITABLE) { funct_state y_l = get_function_state (y); Index: ipa-reference.c =================================================================== --- ipa-reference.c (revision 174877) +++ ipa-reference.c (working copy) @@ -196,7 +196,7 @@ ipa_reference_get_not_read_global (struc { ipa_reference_optimization_summary_t info; - info = get_reference_optimization_summary (fn); + info = get_reference_optimization_summary (cgraph_function_node (fn, NULL)); if (info) return info->statics_not_read; else if (flags_from_decl_or_type (fn->decl) & ECF_LEAF) @@ -301,16 +301,17 @@ propagate_bits (ipa_reference_global_var struct cgraph_edge *e; for (e = x->callees; e; e = e->next_callee) { - struct cgraph_node *y = e->callee; enum availability avail; + struct cgraph_node *y = cgraph_function_node (e->callee, &avail); - avail = cgraph_function_body_availability (e->callee); + if (!y) + continue; /* Only look into nodes we can propagate something. */ if (avail > AVAIL_OVERWRITABLE || (avail == AVAIL_OVERWRITABLE - && (flags_from_decl_or_type (e->callee->decl) & ECF_LEAF))) + && (flags_from_decl_or_type (y->decl) & ECF_LEAF))) { - int flags = flags_from_decl_or_type (e->callee->decl); + int flags = flags_from_decl_or_type (y->decl); if (get_reference_vars_info (y)) { ipa_reference_vars_info_t y_info @@ -663,8 +664,12 @@ propagate (void) read_write_all_from_decl (node, &read_all, &write_all); for (e = node->callees; e; e = e->next_callee) - if (cgraph_function_body_availability (e->callee) <= AVAIL_OVERWRITABLE) - read_write_all_from_decl (e->callee, &read_all, &write_all); + { + enum availability avail; + struct cgraph_node *callee = cgraph_function_node (e->callee, &avail); + if (!callee || avail <= AVAIL_OVERWRITABLE) + read_write_all_from_decl (callee, &read_all, &write_all); + } for (ie = node->indirect_calls; ie; ie = ie->next_callee) if (!(ie->indirect_info->ecf_flags & ECF_CONST)) @@ -696,8 +701,13 @@ propagate (void) read_write_all_from_decl (w, &read_all, &write_all); for (e = w->callees; e; e = e->next_callee) - if (cgraph_function_body_availability (e->callee) <= AVAIL_OVERWRITABLE) - read_write_all_from_decl (e->callee, &read_all, &write_all); + { + enum availability avail; + struct cgraph_node *callee = cgraph_function_node (e->callee, &avail); + + if (avail <= AVAIL_OVERWRITABLE) + read_write_all_from_decl (callee, &read_all, &write_all); + } for (ie = w->indirect_calls; ie; ie = ie->next_callee) if (!(ie->indirect_info->ecf_flags & ECF_CONST))