From mboxrd@z Thu Jan 1 00:00:00 1970 Return-Path: Received: (qmail 21049 invoked by alias); 10 Jun 2011 14:56:03 -0000 Received: (qmail 21038 invoked by uid 22791); 10 Jun 2011 14:56:02 -0000 X-SWARE-Spam-Status: No, hits=-1.8 required=5.0 tests=AWL,BAYES_00,TW_FN,T_RP_MATCHES_RCVD X-Spam-Check-By: sourceware.org Received: from nikam.ms.mff.cuni.cz (HELO nikam.ms.mff.cuni.cz) (195.113.20.16) by sourceware.org (qpsmtpd/0.43rc1) with ESMTP; Fri, 10 Jun 2011 14:55:45 +0000 Received: by nikam.ms.mff.cuni.cz (Postfix, from userid 16202) id E54F29AC875; Fri, 10 Jun 2011 16:55:43 +0200 (CEST) Date: Fri, 10 Jun 2011 15:42:00 -0000 From: Jan Hubicka To: gcc-patches@gcc.gnu.org Subject: Cgraph alias reorg 8/14 (ipa-cp and ipa-prop update) Message-ID: <20110610145543.GF28776@kam.mff.cuni.cz> MIME-Version: 1.0 Content-Type: text/plain; charset=us-ascii Content-Disposition: inline User-Agent: Mutt/1.5.18 (2008-05-17) Mailing-List: contact gcc-patches-help@gcc.gnu.org; run by ezmlm Precedence: bulk List-Id: List-Archive: List-Post: List-Help: Sender: gcc-patches-owner@gcc.gnu.org X-SW-Source: 2011-06/txt/msg00850.txt.bz2 Hi, this patch updated ipa-cp and ipa-prop for aliases. It is basically easy - we don't analyze nodes represneting aliases and when propagating we skip them, like everywhere else. There are two problems I noticed. First we should not propagate through calls that are overwritable. When non-overwritable function A has overwritable alias B, we should propagate through calls to A, but not throug calls to B. As discussed on IRC it is probably best to zap the jump functions in question at IPA stage (because at analysis stage it is not clear at all if the alias will end up overwritable with LTO). Similar problem already exists with code in ipa_compute_jump_functions that looks into a callee that might change with LTO. I am hoping Martin will help me to fix those incrementally, these problems don't seem to be blockers at the moment. Bootstrapped/regtested x86_64-linux, will commit it shorty. Honza * ipa-cp.c (ipcp_versionable_function_p): Aliases are not versionable. (ipcp_cloning_candidate_p): Aliases are not clonning candidates. (ipcp_initialize_node_lattices): We don't propagate through an aliases. (ipcp_propagate_stage): Skip aliases when propagating. (ipcp_need_redirect_p): Skip aliases. (ipcp_insert_stage): Use FOR_EACH_FUNCTION_WITH_GIMPLE_BODY and collect_callers_of_node. * ipa-prop.c (ipa_init_func_list): Do not analyze datastructures for aliases. (ipa_compute_jump_functions): Look through aliases. Index: ipa-cp.c =================================================================== --- ipa-cp.c (revision 174905) +++ ipa-cp.c (working copy) @@ -350,6 +350,10 @@ ipcp_versionable_function_p (struct cgra { struct cgraph_edge *edge; + /* We always version the actual function and redirect through the aliases. */ + if (node->alias) + return false; + /* There are a number of generic reasons functions cannot be versioned. We also cannot remove parameters if there are type attributes such as fnspec present. */ @@ -358,7 +362,8 @@ ipcp_versionable_function_p (struct cgra return false; /* Removing arguments doesn't work if the function takes varargs - or use __builtin_apply_args. */ + or use __builtin_apply_args. + FIXME: handle this together with can_change_signature flag. */ for (edge = node->callees; edge; edge = edge->next_callee) { tree t = edge->callee->decl; @@ -380,6 +385,10 @@ ipcp_cloning_candidate_p (struct cgraph_ gcov_type direct_call_sum = 0; struct cgraph_edge *e; + /* We look through aliases, so we clone the aliased function instead. */ + if (node->alias) + return false; + /* We never clone functions that are not visible from outside. FIXME: in future we should clone such functions when they are called with different constants, but current ipcp implementation is not good on this. @@ -498,7 +507,7 @@ ipcp_initialize_node_lattices (struct cg struct ipa_node_params *info = IPA_NODE_REF (node); enum ipa_lattice_type type; - if (ipa_is_called_with_var_arguments (info)) + if (ipa_is_called_with_var_arguments (info) || node->alias) type = IPA_BOTTOM; else if (node->local.local) type = IPA_TOP; @@ -759,7 +768,8 @@ ipcp_propagate_stage (void) for (cs = node->callees; cs; cs = cs->next_callee) { - struct ipa_node_params *callee_info = IPA_NODE_REF (cs->callee); + struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee, NULL); + struct ipa_node_params *callee_info = IPA_NODE_REF (callee); struct ipa_edge_args *args = IPA_EDGE_REF (cs); if (ipa_is_called_with_var_arguments (callee_info) @@ -778,11 +788,11 @@ ipcp_propagate_stage (void) { dest_lat->type = new_lat.type; dest_lat->constant = new_lat.constant; - ipa_push_func_to_list (&wl, cs->callee); + ipa_push_func_to_list (&wl, callee); } if (ipcp_propagate_types (info, callee_info, jump_func, i)) - ipa_push_func_to_list (&wl, cs->callee); + ipa_push_func_to_list (&wl, callee); } } } @@ -818,7 +828,7 @@ ipcp_iterate_stage (void) /* Some lattices have changed from IPA_TOP to IPA_BOTTOM. This change should be propagated. */ { - gcc_assert (n_cloning_candidates); + /*gcc_assert (n_cloning_candidates);*/ ipcp_propagate_stage (); } if (dump_file) @@ -946,7 +956,8 @@ ipcp_need_redirect_p (struct cgraph_edge { struct ipa_node_params *orig_callee_info; int i, count; - struct cgraph_node *node = cs->callee, *orig; + struct cgraph_node *node = cgraph_function_or_thunk_node (cs->callee, NULL); + struct cgraph_node *orig; if (!n_cloning_candidates) return false; @@ -1293,7 +1304,7 @@ ipcp_insert_stage (void) int i; VEC (cgraph_edge_p, heap) * redirect_callers; VEC (ipa_replace_map_p,gc)* replace_trees; - int node_callers, count; + int count; tree parm_tree; struct ipa_replace_map *replace_param; fibheap_t heap; @@ -1307,13 +1318,12 @@ ipcp_insert_stage (void) dead_nodes = BITMAP_ALLOC (NULL); - for (node = cgraph_nodes; node; node = node->next) - if (node->analyzed) - { - if (node->count > max_count) - max_count = node->count; - overall_size += inline_summary (node)->self_size; - } + FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node) + { + if (node->count > max_count) + max_count = node->count; + overall_size += inline_summary (node)->self_size; + } max_new_size = overall_size; if (max_new_size < PARAM_VALUE (PARAM_LARGE_UNIT_INSNS)) @@ -1413,14 +1423,7 @@ ipcp_insert_stage (void) if (!cs && cgraph_will_be_removed_from_program_if_no_direct_calls (node)) bitmap_set_bit (dead_nodes, node->uid); - /* Compute how many callers node has. */ - node_callers = 0; - for (cs = node->callers; cs != NULL; cs = cs->next_caller) - node_callers++; - redirect_callers = VEC_alloc (cgraph_edge_p, heap, node_callers); - for (cs = node->callers; cs != NULL; cs = cs->next_caller) - if (!cs->indirect_inlining_edge) - VEC_quick_push (cgraph_edge_p, redirect_callers, cs); + redirect_callers = collect_callers_of_node (node); /* Redirecting all the callers of the node to the new versioned node. */ @@ -1452,13 +1455,16 @@ ipcp_insert_stage (void) dump_function_to_file (node1->decl, dump_file, dump_flags); for (cs = node->callees; cs; cs = cs->next_callee) - if (cs->callee->aux) - { - fibheap_delete_node (heap, (fibnode_t) cs->callee->aux); - cs->callee->aux = fibheap_insert (heap, - ipcp_estimate_cloning_cost (cs->callee), - cs->callee); - } + { + struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee, NULL); + if (callee->aux) + { + fibheap_delete_node (heap, (fibnode_t) callee->aux); + callee->aux = fibheap_insert (heap, + ipcp_estimate_cloning_cost (callee), + callee); + } + } } while (!fibheap_empty (heap)) Index: ipa-prop.c =================================================================== --- ipa-prop.c (revision 174905) +++ ipa-prop.c (working copy) @@ -93,7 +93,7 @@ ipa_init_func_list (void) wl = NULL; for (node = cgraph_nodes; node; node = node->next) - if (node->analyzed) + if (node->analyzed && !node->alias) { struct ipa_node_params *info = IPA_NODE_REF (node); /* Unreachable nodes should have been eliminated before ipcp and @@ -1096,6 +1096,7 @@ ipa_compute_jump_functions (struct cgrap for (cs = node->callees; cs; cs = cs->next_callee) { + struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee, NULL); /* We do not need to bother analyzing calls to unknown functions unless they may become known during lto/whopr. */ if (!cs->callee->analyzed && !flag_lto) @@ -1103,11 +1104,11 @@ ipa_compute_jump_functions (struct cgrap ipa_count_arguments (cs); /* If the descriptor of the callee is not initialized yet, we have to do it now. */ - if (cs->callee->analyzed) - ipa_initialize_node_params (cs->callee); + if (callee->analyzed) + ipa_initialize_node_params (callee); if (ipa_get_cs_argument_count (IPA_EDGE_REF (cs)) - != ipa_get_param_count (IPA_NODE_REF (cs->callee))) - ipa_set_called_with_variable_arg (IPA_NODE_REF (cs->callee)); + != ipa_get_param_count (IPA_NODE_REF (callee))) + ipa_set_called_with_variable_arg (IPA_NODE_REF (callee)); ipa_compute_jump_functions_for_edge (parms_info, cs); }