From mboxrd@z Thu Jan 1 00:00:00 1970 Return-Path: Received: (qmail 23893 invoked by alias); 16 May 2011 13:49:49 -0000 Received: (qmail 23849 invoked by uid 22791); 16 May 2011 13:49:46 -0000 X-SWARE-Spam-Status: No, hits=-3.3 required=5.0 tests=AWL,BAYES_00,T_RP_MATCHES_RCVD X-Spam-Check-By: sourceware.org Received: from cantor2.suse.de (HELO mx2.suse.de) (195.135.220.15) by sourceware.org (qpsmtpd/0.43rc1) with ESMTP; Mon, 16 May 2011 13:49:26 +0000 Received: from relay1.suse.de (charybdis-ext.suse.de [195.135.221.2]) by mx2.suse.de (Postfix) with ESMTP id BBA6D86391 for ; Mon, 16 May 2011 15:49:24 +0200 (CEST) Date: Mon, 16 May 2011 15:36:00 -0000 From: Richard Guenther To: gcc-patches@gcc.gnu.org Subject: [PATCH][?/n] Cleanup LTO type merging Message-ID: User-Agent: Alpine 2.00 (LNX 1167 2008-08-23) MIME-Version: 1.0 Content-Type: TEXT/PLAIN; charset=US-ASCII Mailing-List: contact gcc-patches-help@gcc.gnu.org; run by ezmlm Precedence: bulk List-Id: List-Archive: List-Post: List-Help: Sender: gcc-patches-owner@gcc.gnu.org X-SW-Source: 2011-05/txt/msg01136.txt.bz2 Hashes of SCC members have been poor since ever as we have to ensure they are the same when the SCC is entered from a different path. But we can do better and mixin SCC member hashes if we can sort the SCC in some way. And we can do so, when sorting after the hash values. The only thing we need to ensure is that we ignore members with the same hash value during mixin. The following implements this. Bootstrapped and tested on x86_64-unknown-linux-gnu, applied to trunk. Richard. 2011-05-16 Richard Guenther * gimple.c (struct type_hash_pair): New type. (type_hash_pair_compare): New function. (iterative_hash_gimple_type): Mix in SCC member hashes in hash-order. Index: gcc/gimple.c =================================================================== --- gcc/gimple.c (revision 173732) +++ gcc/gimple.c (working copy) @@ -4056,6 +4056,25 @@ iterative_hash_name (tree name, hashval_ return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v); } +/* A type, hashvalue pair for sorting SCC members. */ + +struct type_hash_pair { + tree type; + hashval_t hash; +}; + +/* Compare two type, hashvalue pairs. */ + +static int +type_hash_pair_compare (const void *p1_, const void *p2_) +{ + const struct type_hash_pair *p1 = (const struct type_hash_pair *) p1_; + const struct type_hash_pair *p2 = (const struct type_hash_pair *) p2_; + if (p1->hash == p2->hash) + return TYPE_UID (p1->type) - TYPE_UID (p2->type); + return p1->hash - p2->hash; +} + /* Returning a hash value for gimple type TYPE combined with VAL. SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. @@ -4220,22 +4213,73 @@ iterative_hash_gimple_type (tree type, h if (state->low == state->dfsnum) { tree x; + struct sccs *cstate; + struct tree_int_map *m; /* Pop off the SCC and set its hash values. */ - do + x = VEC_pop (tree, *sccstack); + cstate = (struct sccs *)*pointer_map_contains (sccstate, x); + cstate->on_sccstack = false; + /* Optimize SCC size one. */ + if (x == type) { - struct sccs *cstate; - struct tree_int_map *m = ggc_alloc_cleared_tree_int_map (); - x = VEC_pop (tree, *sccstack); - cstate = (struct sccs *)*pointer_map_contains (sccstate, x); - cstate->on_sccstack = false; + m = ggc_alloc_cleared_tree_int_map (); m->base.from = x; m->to = cstate->u.hash; slot = htab_find_slot (type_hash_cache, m, INSERT); gcc_assert (!*slot); *slot = (void *) m; } - while (x != type); + else + { + unsigned first, i, size, j; + struct type_hash_pair *pairs; + /* Pop off the SCC and build an array of type, hash pairs. */ + first = VEC_length (tree, *sccstack) - 1; + while (VEC_index (tree, *sccstack, first) != type) + --first; + size = VEC_length (tree, *sccstack) - first + 1; + pairs = XALLOCAVEC (struct type_hash_pair, size); + i = 0; + pairs[i].type = x; + pairs[i].hash = cstate->u.hash; + do + { + x = VEC_pop (tree, *sccstack); + cstate = (struct sccs *)*pointer_map_contains (sccstate, x); + cstate->on_sccstack = false; + ++i; + pairs[i].type = x; + pairs[i].hash = cstate->u.hash; + } + while (x != type); + gcc_assert (i + 1 == size); + /* Sort the arrays of type, hash pairs so that when we mix in + all members of the SCC the hash value becomes independent on + the order we visited the SCC. Disregard hashes equal to + the hash of the type we mix into because we cannot guarantee + a stable sort for those across different TUs. */ + qsort (pairs, size, sizeof (struct type_hash_pair), + type_hash_pair_compare); + for (i = 0; i < size; ++i) + { + hashval_t hash; + m = ggc_alloc_cleared_tree_int_map (); + m->base.from = pairs[i].type; + hash = pairs[i].hash; + /* Skip same hashes. */ + for (j = i + 1; j < size && pairs[j].hash == pairs[i].hash; ++j) + ; + for (; j < size; ++j) + hash = iterative_hash_hashval_t (pairs[j].hash, hash); + for (j = 0; pairs[j].hash != pairs[i].hash; ++j) + hash = iterative_hash_hashval_t (pairs[j].hash, hash); + m->to = hash; + slot = htab_find_slot (type_hash_cache, m, INSERT); + gcc_assert (!*slot); + *slot = (void *) m; + } + } } return iterative_hash_hashval_t (v, val);