* [Bug tree-optimization/17133] [3.5 Regression] wrong code with -ftree-lim
2004-08-21 20:58 [Bug tree-optimization/17133] New: [3.5 Regression] wrong code with -ftree-loop-optimize belyshev at lubercy dot com
` (2 preceding siblings ...)
2004-09-01 21:19 ` rakdver at atrey dot karlin dot mff dot cuni dot cz
@ 2004-09-05 9:02 ` rakdver at atrey dot karlin dot mff dot cuni dot cz
2004-09-08 18:37 ` pinskia at gcc dot gnu dot org
` (5 subsequent siblings)
9 siblings, 0 replies; 11+ messages in thread
From: rakdver at atrey dot karlin dot mff dot cuni dot cz @ 2004-09-05 9:02 UTC (permalink / raw)
To: gcc-bugs
------- Additional Comments From rakdver at atrey dot karlin dot mff dot cuni dot cz 2004-09-05 09:02 -------
Subject: Re: [3.5 Regression] wrong code with -ftree-lim
Hello,
here is the patch; I will submit it once it passes regtesting.
Zdenek
Index: Makefile.in
===================================================================
RCS file: /cvs/gcc/gcc/gcc/Makefile.in,v
retrieving revision 1.1370
diff -c -3 -p -r1.1370 Makefile.in
*** Makefile.in 4 Sep 2004 03:26:56 -0000 1.1370
--- Makefile.in 4 Sep 2004 13:18:08 -0000
*************** tree-ssa-loop-manip.o : tree-ssa-loop-ma
*** 1728,1734 ****
tree-ssa-loop-im.o : tree-ssa-loop-im.c $(TREE_FLOW_H) $(CONFIG_H) \
$(SYSTEM_H) $(RTL_H) $(TREE_H) $(TM_P_H) $(CFGLOOP_H) domwalk.h $(PARAMS_H)\
output.h diagnostic.h $(TIMEVAR_H) $(TM_H) coretypes.h $(TREE_DUMP_H) \
! tree-pass.h flags.h
tree-ssa-alias.o : tree-ssa-alias.c $(TREE_FLOW_H) $(CONFIG_H) $(SYSTEM_H) \
$(RTL_H) $(TREE_H) $(TM_P_H) $(EXPR_H) $(GGC_H) tree-inline.h $(FLAGS_H) \
function.h $(TIMEVAR_H) tree-alias-common.h convert.h $(TM_H) coretypes.h \
--- 1728,1734 ----
tree-ssa-loop-im.o : tree-ssa-loop-im.c $(TREE_FLOW_H) $(CONFIG_H) \
$(SYSTEM_H) $(RTL_H) $(TREE_H) $(TM_P_H) $(CFGLOOP_H) domwalk.h $(PARAMS_H)\
output.h diagnostic.h $(TIMEVAR_H) $(TM_H) coretypes.h $(TREE_DUMP_H) \
! tree-pass.h flags.h $(HASHTAB_H)
tree-ssa-alias.o : tree-ssa-alias.c $(TREE_FLOW_H) $(CONFIG_H) $(SYSTEM_H) \
$(RTL_H) $(TREE_H) $(TM_P_H) $(EXPR_H) $(GGC_H) tree-inline.h $(FLAGS_H) \
function.h $(TIMEVAR_H) tree-alias-common.h convert.h $(TM_H) coretypes.h \
Index: tree-ssa-loop-im.c
===================================================================
RCS file: /cvs/gcc/gcc/gcc/tree-ssa-loop-im.c,v
retrieving revision 2.8
diff -c -3 -p -r2.8 tree-ssa-loop-im.c
*** tree-ssa-loop-im.c 4 Sep 2004 03:26:57 -0000 2.8
--- tree-ssa-loop-im.c 4 Sep 2004 13:18:08 -0000
*************** Software Foundation, 59 Temple Place - S
*** 37,42 ****
--- 37,43 ----
#include "params.h"
#include "tree-pass.h"
#include "flags.h"
+ #include "hashtab.h"
/* A type for the list of statements that have to be moved in order to be able
to hoist an invariant computation. */
*************** struct lim_aux_data
*** 78,90 ****
#define LIM_DATA(STMT) ((struct lim_aux_data *) (stmt_ann (STMT)->common.aux))
! /* Description of a memory reference for store motion. */
! struct mem_ref
{
tree *ref; /* The reference itself. */
tree stmt; /* The statement in that it occurs. */
! struct mem_ref *next; /* Next use in the chain. */
};
/* Minimum cost of an expensive expression. */
--- 79,104 ----
#define LIM_DATA(STMT) ((struct lim_aux_data *) (stmt_ann (STMT)->common.aux))
! /* Description of a memory reference location for store motion. */
! struct mem_ref_loc
{
tree *ref; /* The reference itself. */
tree stmt; /* The statement in that it occurs. */
! struct mem_ref_loc *next; /* Next use in the chain. */
! };
!
! /* Description of a memory reference for store motion. */
!
! struct mem_ref
! {
! tree mem; /* The memory itself. */
! hashval_t hash; /* Its hash value. */
! bool is_stored; /* True if there is a store to the location
! in the loop. */
! struct mem_ref_loc *locs; /* The locations where it is found. */
! bitmap possibly_aliases; /* The bitmap of tags the memory reference
! possibly aliases. */
};
/* Minimum cost of an expensive expression. */
*************** struct mem_ref
*** 94,103 ****
block will be executed. */
#define ALWAYS_EXECUTED_IN(BB) ((struct loop *) (BB)->aux)
- /* Maximum uid in the statement in the function. */
-
- static unsigned max_uid;
-
/* Calls CBCK for each index in memory reference ADDR_P. There are two
kinds situations handled; in each of these cases, the memory reference
and DATA are passed to the callback:
--- 108,113 ----
*************** movement_possibility (tree stmt)
*** 196,203 ****
if (TREE_SIDE_EFFECTS (rhs))
return MOVE_IMPOSSIBLE;
! if (TREE_CODE (lhs) != SSA_NAME
! || tree_could_trap_p (rhs))
return MOVE_PRESERVE_EXECUTION;
return MOVE_POSSIBLE;
--- 206,218 ----
if (TREE_SIDE_EFFECTS (rhs))
return MOVE_IMPOSSIBLE;
! /* Due to V_MUST_DEFS the ssa form for virtual uses no longer describes
! output dependence relation, so avoid moving out statements
! with VDEFS. */
! if (TREE_CODE (lhs) != SSA_NAME)
! return MOVE_IMPOSSIBLE;
!
! if (tree_could_trap_p (rhs))
return MOVE_PRESERVE_EXECUTION;
return MOVE_POSSIBLE;
*************** force_move_till (tree ref, tree *index,
*** 763,775 ****
return true;
}
! /* Records memory reference *REF (that occurs in statement STMT)
! to the list MEM_REFS. */
static void
! record_mem_ref (struct mem_ref **mem_refs, tree stmt, tree *ref)
{
! struct mem_ref *aref = xmalloc (sizeof (struct mem_ref));
aref->stmt = stmt;
aref->ref = ref;
--- 778,790 ----
return true;
}
! /* Records memory reference location *REF to the list MEM_REFS. The reference
! occurs in statement STMT. */
static void
! record_mem_ref_loc (struct mem_ref_loc **mem_refs, tree stmt, tree *ref)
{
! struct mem_ref_loc *aref = xmalloc (sizeof (struct mem_ref_loc));
aref->stmt = stmt;
aref->ref = ref;
*************** record_mem_ref (struct mem_ref **mem_ref
*** 778,789 ****
*mem_refs = aref;
}
! /* Releases list of memory references MEM_REFS. */
static void
! free_mem_refs (struct mem_ref *mem_refs)
{
! struct mem_ref *act;
while (mem_refs)
{
--- 793,804 ----
*mem_refs = aref;
}
! /* Releases list of memory reference locations MEM_REFS. */
static void
! free_mem_ref_locs (struct mem_ref_loc *mem_refs)
{
! struct mem_ref_loc *act;
while (mem_refs)
{
*************** free_mem_refs (struct mem_ref *mem_refs)
*** 793,1007 ****
}
}
- /* If VAR is defined in LOOP and the statement it is defined in does not belong
- to the set SEEN, add the statement to QUEUE of length IN_QUEUE and
- to the set SEEN. */
-
- static void
- maybe_queue_var (tree var, struct loop *loop,
- sbitmap seen, tree *queue, unsigned *in_queue)
- {
- tree stmt = SSA_NAME_DEF_STMT (var);
- basic_block def_bb = bb_for_stmt (stmt);
-
- if (!def_bb
- || !flow_bb_inside_loop_p (loop, def_bb)
- || TEST_BIT (seen, stmt_ann (stmt)->uid))
- return;
-
- SET_BIT (seen, stmt_ann (stmt)->uid);
- queue[(*in_queue)++] = stmt;
- }
-
- /* If COMMON_REF is NULL, set COMMON_REF to *OP and return true.
- Otherwise return true if the memory reference *OP is equal to COMMON_REF.
- Record the reference OP to list MEM_REFS. STMT is the statement in that
- the reference occurs. */
-
- struct sra_data
- {
- struct mem_ref **mem_refs;
- tree common_ref;
- tree stmt;
- };
-
- static bool
- fem_single_reachable_address (tree *op, void *data)
- {
- struct sra_data *sra_data = data;
-
- if (sra_data->common_ref
- && !operand_equal_p (*op, sra_data->common_ref, 0))
- return false;
- sra_data->common_ref = *op;
-
- record_mem_ref (sra_data->mem_refs, sra_data->stmt, op);
- return true;
- }
-
- /* Runs CALLBACK for each operand of STMT that is a memory reference. DATA
- is passed to the CALLBACK as well. The traversal stops if CALLBACK
- returns false, for_each_memref then returns false as well. Otherwise
- for_each_memref returns true. */
-
- static bool
- for_each_memref (tree stmt, bool (*callback)(tree *, void *), void *data)
- {
- tree *op;
-
- if (TREE_CODE (stmt) == RETURN_EXPR)
- stmt = TREE_OPERAND (stmt, 1);
-
- if (TREE_CODE (stmt) == MODIFY_EXPR)
- {
- op = &TREE_OPERAND (stmt, 0);
- if (TREE_CODE (*op) != SSA_NAME
- && !callback (op, data))
- return false;
-
- op = &TREE_OPERAND (stmt, 1);
- if (TREE_CODE (*op) != SSA_NAME
- && is_gimple_lvalue (*op)
- && !callback (op, data))
- return false;
-
- stmt = TREE_OPERAND (stmt, 1);
- }
-
- if (TREE_CODE (stmt) == WITH_SIZE_EXPR)
- stmt = TREE_OPERAND (stmt, 0);
-
- if (TREE_CODE (stmt) == CALL_EXPR)
- {
- tree args;
-
- for (args = TREE_OPERAND (stmt, 1); args; args = TREE_CHAIN (args))
- {
- op = &TREE_VALUE (args);
-
- if (TREE_CODE (*op) != SSA_NAME
- && is_gimple_lvalue (*op)
- && !callback (op, data))
- return false;
- }
- }
-
- return true;
- }
-
- /* Determine whether all memory references inside the LOOP that correspond
- to virtual ssa names defined in statement STMT are equal.
- If so, store the list of the references to MEM_REFS, and return one
- of them. Otherwise store NULL to MEM_REFS and return NULL_TREE.
- *SEEN_CALL_STMT is set to true if the virtual operands suggest
- that the reference might be clobbered by a call inside the LOOP. */
-
- static tree
- single_reachable_address (struct loop *loop, tree stmt,
- struct mem_ref **mem_refs,
- bool *seen_call_stmt)
- {
- tree *queue = xmalloc (sizeof (tree) * max_uid);
- sbitmap seen = sbitmap_alloc (max_uid);
- unsigned in_queue = 1;
- dataflow_t df;
- unsigned i, n;
- struct sra_data sra_data;
- tree call;
- tree val;
- ssa_op_iter iter;
-
- sbitmap_zero (seen);
-
- *mem_refs = NULL;
- sra_data.mem_refs = mem_refs;
- sra_data.common_ref = NULL_TREE;
-
- queue[0] = stmt;
- SET_BIT (seen, stmt_ann (stmt)->uid);
- *seen_call_stmt = false;
-
- while (in_queue)
- {
- stmt = queue[--in_queue];
- sra_data.stmt = stmt;
-
- if (LIM_DATA (stmt)
- && LIM_DATA (stmt)->sm_done)
- goto fail;
-
- switch (TREE_CODE (stmt))
- {
- case MODIFY_EXPR:
- case CALL_EXPR:
- case RETURN_EXPR:
- if (!for_each_memref (stmt, fem_single_reachable_address,
- &sra_data))
- goto fail;
-
- /* If this is a function that may depend on the memory location,
- record the fact. We cannot directly refuse call clobbered
- operands here, since sra_data.common_ref did not have
- to be set yet. */
- call = get_call_expr_in (stmt);
- if (call
- && !(call_expr_flags (call) & ECF_CONST))
- *seen_call_stmt = true;
-
- /* Traverse also definitions of the VUSES (there may be other
- distinct from the one we used to get to this statement). */
- FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_VIRTUAL_USES)
- maybe_queue_var (val, loop, seen, queue, &in_queue);
-
- break;
-
- case PHI_NODE:
- for (i = 0; i < (unsigned) PHI_NUM_ARGS (stmt); i++)
- maybe_queue_var (PHI_ARG_DEF (stmt, i), loop,
- seen, queue, &in_queue);
- break;
-
- default:
- goto fail;
- }
-
- /* Find uses of virtual names. */
- df = get_immediate_uses (stmt);
- n = num_immediate_uses (df);
-
- for (i = 0; i < n; i++)
- {
- stmt = immediate_use (df, i);
-
- if (!flow_bb_inside_loop_p (loop, bb_for_stmt (stmt)))
- continue;
-
- if (TEST_BIT (seen, stmt_ann (stmt)->uid))
- continue;
- SET_BIT (seen, stmt_ann (stmt)->uid);
-
- queue[in_queue++] = stmt;
- }
- }
-
- free (queue);
- sbitmap_free (seen);
-
- return sra_data.common_ref;
-
- fail:
- free_mem_refs (*mem_refs);
- *mem_refs = NULL;
- free (queue);
- sbitmap_free (seen);
-
- return NULL;
- }
-
/* Rewrites memory references in list MEM_REFS by variable TMP_VAR. */
static void
! rewrite_mem_refs (tree tmp_var, struct mem_ref *mem_refs)
{
tree var;
ssa_op_iter iter;
--- 808,817 ----
}
}
/* Rewrites memory references in list MEM_REFS by variable TMP_VAR. */
static void
! rewrite_mem_refs (tree tmp_var, struct mem_ref_loc *mem_refs)
{
tree var;
ssa_op_iter iter;
*************** rewrite_mem_refs (tree tmp_var, struct m
*** 1030,1038 ****
static void
schedule_sm (struct loop *loop, edge *exits, unsigned n_exits, tree ref,
! struct mem_ref *mem_refs)
{
! struct mem_ref *aref;
tree tmp_var;
unsigned i;
tree load, store;
--- 840,848 ----
static void
schedule_sm (struct loop *loop, edge *exits, unsigned n_exits, tree ref,
! struct mem_ref_loc *mem_refs)
{
! struct mem_ref_loc *aref;
tree tmp_var;
unsigned i;
tree load, store;
*************** schedule_sm (struct loop *loop, edge *ex
*** 1074,1150 ****
}
}
! /* Returns true if REF may be clobbered by calls. */
!
! static bool
! is_call_clobbered_ref (tree ref)
! {
! tree base;
!
! base = get_base_address (ref);
! if (!base)
! return true;
!
! if (DECL_P (base))
! return is_call_clobbered (base);
!
! if (TREE_CODE (base) == INDIRECT_REF)
! {
! /* Check whether the alias tags associated with the pointer
! are call clobbered. */
! tree ptr = TREE_OPERAND (base, 0);
! struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
! tree nmt = (pi) ? pi->name_mem_tag : NULL_TREE;
! tree tmt = var_ann (SSA_NAME_VAR (ptr))->type_mem_tag;
!
! if ((nmt && is_call_clobbered (nmt))
! || (tmt && is_call_clobbered (tmt)))
! return true;
!
! return false;
! }
!
! abort ();
! }
!
! /* Determine whether all memory references inside LOOP corresponding to the
! virtual ssa name REG are equal to each other, and whether the address of
! this common reference can be hoisted outside of the loop. If this is true,
! prepare the statements that load the value of the memory reference to a
! temporary variable in the loop preheader, store it back on the loop exits,
! and replace all the references inside LOOP by this temporary variable.
! LOOP has N_EXITS stored in EXITS. */
static void
! determine_lsm_reg (struct loop *loop, edge *exits, unsigned n_exits, tree reg)
{
! tree ref;
! struct mem_ref *mem_refs, *aref;
struct loop *must_exec;
! bool sees_call;
!
! if (is_gimple_reg (reg))
! return;
!
! ref = single_reachable_address (loop, SSA_NAME_DEF_STMT (reg), &mem_refs,
! &sees_call);
! if (!ref)
! return;
! /* If we cannot create a ssa name for the result, give up. */
! if (!is_gimple_reg_type (TREE_TYPE (ref))
! || TREE_THIS_VOLATILE (ref))
! goto fail;
!
! /* If there is a call that may use the location, give up as well. */
! if (sees_call
! && is_call_clobbered_ref (ref))
! goto fail;
! if (!for_each_index (&ref, may_move_till, loop))
! goto fail;
! if (tree_could_trap_p (ref))
{
/* If the memory access is unsafe (i.e. it might trap), ensure that some
of the statements in that it occurs is always executed when the loop
--- 884,917 ----
}
}
! /* Check whether memory reference REF can be hoisted out of the LOOP. If this
! is true, prepare the statements that load the value of the memory reference
! to a temporary variable in the loop preheader, store it back on the loop
! exits, and replace all the references inside LOOP by this temporary variable.
! LOOP has N_EXITS stored in EXITS. TAG_REF_COUNT contains for each alias
! tags number of different memory references that use/clobber it in LOOP. */
static void
! determine_lsm_ref (struct loop *loop, edge *exits, unsigned n_exits,
! struct mem_ref *ref, unsigned *tag_ref_count)
{
! struct mem_ref_loc *aref;
struct loop *must_exec;
! int i;
! /* In case the memory is not stored to, there is nothing for SM to do. */
! if (!ref->is_stored)
! return;
! /* If any of the tags aliased with ref is accessed via a different ref, then
! fail. */
! EXECUTE_IF_SET_IN_BITMAP (ref->possibly_aliases, 0, i,
! {
! if (tag_ref_count[i] != 1)
! return;
! });
! if (tree_could_trap_p (ref->mem))
{
/* If the memory access is unsafe (i.e. it might trap), ensure that some
of the statements in that it occurs is always executed when the loop
*************** determine_lsm_reg (struct loop *loop, ed
*** 1157,1163 ****
least one of the statements containing the memory reference is
executed. */
! for (aref = mem_refs; aref; aref = aref->next)
{
if (!LIM_DATA (aref->stmt))
continue;
--- 924,930 ----
least one of the statements containing the memory reference is
executed. */
! for (aref = ref->locs; aref; aref = aref->next)
{
if (!LIM_DATA (aref->stmt))
continue;
*************** determine_lsm_reg (struct loop *loop, ed
*** 1172,1184 ****
}
if (!aref)
! goto fail;
}
! schedule_sm (loop, exits, n_exits, ref, mem_refs);
! fail: ;
! free_mem_refs (mem_refs);
}
/* Checks whether LOOP (with N_EXITS exits stored in EXITS array) is suitable
--- 939,972 ----
}
if (!aref)
! return;
}
! schedule_sm (loop, exits, n_exits, ref->mem, ref->locs);
! }
!
! /* Attempts to hoist memory reference described in SLOT out of loop
! described in DATA. Callback for htab_traverse. */
!
! struct hmr_data
! {
! struct loop *loop; /* Loop from that the reference should be hoisted. */
! edge *exits; /* Exits of the loop. */
! unsigned n_exits; /* Length of the exits array. */
! unsigned *tag_ref_count;
! /* Number of references to each alias analysis tag. */
! };
!
! static int
! hoist_memory_reference (void **slot, void *data)
! {
! struct mem_ref *ref = *slot;
! struct hmr_data *hmr_data = data;
! determine_lsm_ref (hmr_data->loop, hmr_data->exits, hmr_data->n_exits,
! ref, hmr_data->tag_ref_count);
!
! return 1;
}
/* Checks whether LOOP (with N_EXITS exits stored in EXITS array) is suitable
*************** loop_suitable_for_sm (struct loop *loop
*** 1198,1212 ****
return true;
}
/* Try to perform store motion for all memory references modified inside
LOOP. */
static void
determine_lsm_loop (struct loop *loop)
{
- tree phi;
unsigned n_exits;
edge *exits = get_loop_exit_edges (loop, &n_exits);
if (!loop_suitable_for_sm (loop, exits, n_exits))
{
--- 986,1190 ----
return true;
}
+ /* A hash function for struct mem_ref object OBJ. */
+
+ static hashval_t
+ memref_hash (const void *obj)
+ {
+ const struct mem_ref *mem = obj;
+
+ return mem->hash;
+ }
+
+ /* An equality function for struct mem_ref object OBJ1 with
+ memory reference OBJ2. */
+
+ static int
+ memref_eq (const void *obj1, const void *obj2)
+ {
+ const struct mem_ref *mem1 = obj1;
+
+ return operand_equal_p (mem1->mem, (tree) obj2, 0);
+ }
+
+ /* A function to free the struct mem_ref object OBJ. */
+
+ static void
+ memref_del (void *obj)
+ {
+ struct mem_ref *mem = obj;
+
+ BITMAP_XFREE (mem->possibly_aliases);
+ free_mem_ref_locs (mem->locs);
+ free (mem);
+ }
+
+ /* Increases count in UNKNOWN_REF_COUNTS for each alias tag refered
+ in STMT. */
+
+ static void
+ gather_mem_refs_stmt_unknown (unsigned *unknown_ref_counts, tree stmt)
+ {
+ ssa_op_iter oi;
+ tree tag;
+
+ FOR_EACH_SSA_TREE_OPERAND (tag, stmt, oi,
+ (SSA_OP_VIRTUAL_DEFS | SSA_OP_VUSE))
+ {
+ if (TREE_CODE (tag) == SSA_NAME)
+ tag = SSA_NAME_VAR (tag);
+
+ unknown_ref_counts[var_ann (tag)->uid]++;
+ }
+ }
+
+ /* Gathers memory references in statement STMT in LOOP, storing the
+ information about them in MEM_REFS hash table. If we cannot fully
+ analyse STMT, increase the counter for each alias tag refered by
+ statement in UNKNOWN_REF_COUNTS. */
+
+ static void
+ gather_mem_refs_stmt (struct loop *loop, htab_t mem_refs,
+ unsigned *unknown_ref_counts, tree stmt)
+ {
+ tree *lhs, *rhs, *mem;
+ hashval_t hash;
+ PTR *slot;
+ struct mem_ref *ref;
+ ssa_op_iter oi;
+ tree tag;
+ bool is_stored;
+
+ get_stmt_operands (stmt);
+
+ if (!STMT_V_MAY_DEF_OPS (stmt)
+ && !STMT_V_MUST_DEF_OPS (stmt)
+ && !STMT_VUSE_OPS (stmt))
+ return;
+
+ /* Recognize MEM = (SSA_NAME | invariant) and SSA_NAME = MEM patterns. */
+ if (TREE_CODE (stmt) != MODIFY_EXPR)
+ {
+ gather_mem_refs_stmt_unknown (unknown_ref_counts, stmt);
+ return;
+ }
+ lhs = &TREE_OPERAND (stmt, 0);
+ rhs = &TREE_OPERAND (stmt, 1);
+
+ if (TREE_CODE (*lhs) == SSA_NAME)
+ {
+ if (!is_gimple_addressable (*rhs))
+ {
+ gather_mem_refs_stmt_unknown (unknown_ref_counts, stmt);
+ return;
+ }
+
+ mem = rhs;
+ is_stored = false;
+ }
+ else if (TREE_CODE (*rhs) == SSA_NAME
+ || is_gimple_min_invariant (*rhs))
+ {
+ mem = lhs;
+ is_stored = true;
+ }
+ else
+ {
+ gather_mem_refs_stmt_unknown (unknown_ref_counts, stmt);
+ return;
+ }
+
+ /* If we cannot create a ssa name for the result, give up. */
+ if (!is_gimple_reg_type (TREE_TYPE (*mem))
+ || TREE_THIS_VOLATILE (*mem))
+ {
+ gather_mem_refs_stmt_unknown (unknown_ref_counts, stmt);
+ return;
+ }
+
+ /* If we cannot move the reference from the loop, fail. */
+ if (!for_each_index (mem, may_move_till, loop))
+ {
+ gather_mem_refs_stmt_unknown (unknown_ref_counts, stmt);
+ return;
+ }
+
+ hash = iterative_hash_expr (*mem, 0);
+ slot = htab_find_slot_with_hash (mem_refs, *mem, hash, INSERT);
+
+ if (*slot)
+ ref = *slot;
+ else
+ {
+ ref = xmalloc (sizeof (struct mem_ref));
+ ref->mem = *mem;
+ ref->hash = hash;
+ ref->locs = NULL;
+ ref->is_stored = false;
+ ref->possibly_aliases = BITMAP_XMALLOC ();
+ *slot = ref;
+ }
+ ref->is_stored |= is_stored;
+
+ record_mem_ref_loc (&ref->locs, stmt, mem);
+ FOR_EACH_SSA_TREE_OPERAND (tag, stmt, oi,
+ (SSA_OP_VIRTUAL_DEFS | SSA_OP_VUSE))
+ {
+ if (TREE_CODE (tag) == SSA_NAME)
+ tag = SSA_NAME_VAR (tag);
+
+ bitmap_set_bit (ref->possibly_aliases, var_ann (tag)->uid);
+ }
+ }
+
+ /* Gathers memory references in LOOP, storing the information about them
+ in MEM_REFS hash table. For each alias tag store number of statements
+ statements that we could not fully analyse and that refer to it in
+ UNKNOWN_REF_COUNTS. */
+
+ static void
+ gather_mem_refs (struct loop *loop, htab_t mem_refs,
+ unsigned *unknown_ref_counts)
+ {
+ basic_block *body = get_loop_body (loop);
+ block_stmt_iterator bsi;
+ unsigned i;
+
+ for (i = 0; i < loop->num_nodes; i++)
+ for (bsi = bsi_start (body[i]); !bsi_end_p (bsi); bsi_next (&bsi))
+ gather_mem_refs_stmt (loop, mem_refs, unknown_ref_counts,
+ bsi_stmt (bsi));
+
+ free (body);
+ }
+
+ /* Increases number of references for each alias tag associated with
+ reference passed in SLOT in the array passed in DATA. Callback
+ for htab_traverse. */
+
+ static int
+ count_tag_references (void **slot, void *data)
+ {
+ struct mem_ref *ref = *slot;
+ unsigned *tag_ref_counts = data;
+ int i;
+
+ EXECUTE_IF_SET_IN_BITMAP (ref->possibly_aliases, 0, i, tag_ref_counts[i]++);
+
+ return 1;
+ }
+
/* Try to perform store motion for all memory references modified inside
LOOP. */
static void
determine_lsm_loop (struct loop *loop)
{
unsigned n_exits;
edge *exits = get_loop_exit_edges (loop, &n_exits);
+ htab_t mem_refs;
+ unsigned *tag_ref_counts;
+ struct hmr_data hmr_data;
if (!loop_suitable_for_sm (loop, exits, n_exits))
{
*************** determine_lsm_loop (struct loop *loop)
*** 1214,1222 ****
return;
}
! for (phi = phi_nodes (loop->header); phi; phi = TREE_CHAIN (phi))
! determine_lsm_reg (loop, exits, n_exits, PHI_RESULT (phi));
free (exits);
}
--- 1192,1214 ----
return;
}
! mem_refs = htab_create (100, memref_hash, memref_eq, memref_del);
!
! /* Find the memory references in LOOP. */
! tag_ref_counts = xcalloc (num_referenced_vars, sizeof (unsigned));
! gather_mem_refs (loop, mem_refs, tag_ref_counts);
!
! /* Count the number of references for each memory aliasing tag. */
! htab_traverse (mem_refs, count_tag_references, tag_ref_counts);
!
! /* Hoist all suitable memory references. */
! hmr_data.loop = loop;
! hmr_data.exits = exits;
! hmr_data.n_exits = n_exits;
! hmr_data.tag_ref_count = tag_ref_counts;
! htab_traverse (mem_refs, hoist_memory_reference, &hmr_data);
+ htab_delete (mem_refs);
free (exits);
}
*************** static void
*** 1227,1254 ****
determine_lsm (struct loops *loops)
{
struct loop *loop;
- basic_block bb;
-
- /* Create a UID for each statement in the function. Ordering of the
- UIDs is not important for this pass. */
- max_uid = 0;
- FOR_EACH_BB (bb)
- {
- block_stmt_iterator bsi;
- tree phi;
-
- for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
- stmt_ann (bsi_stmt (bsi))->uid = max_uid++;
-
- for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi))
- stmt_ann (phi)->uid = max_uid++;
- }
-
- compute_immediate_uses (TDFA_USE_VOPS, NULL);
! /* Pass the loops from the outermost. For each virtual operand loop phi node
! check whether all the references inside the loop correspond to a single
! address, and if so, move them. */
loop = loops->tree_root->inner;
while (1)
--- 1219,1227 ----
determine_lsm (struct loops *loops)
{
struct loop *loop;
! /* Pass the loops from the outermost and perform the store motion as
! suitable. */
loop = loops->tree_root->inner;
while (1)
*************** determine_lsm (struct loops *loops)
*** 1265,1271 ****
loop = loop->outer;
if (loop == loops->tree_root)
{
- free_df ();
loop_commit_inserts ();
return;
}
--- 1238,1243 ----
--
http://gcc.gnu.org/bugzilla/show_bug.cgi?id=17133
^ permalink raw reply [flat|nested] 11+ messages in thread