public inbox for gcc-patches@gcc.gnu.org
 help / color / mirror / Atom feed
* Make nonoverlapping_component_refs_since_match_p work with non-trivial MEM_REFs and TMRs
@ 2019-07-11 13:55 Jan Hubicka
  2019-07-11 14:07 ` Richard Biener
  0 siblings, 1 reply; 5+ messages in thread
From: Jan Hubicka @ 2019-07-11 13:55 UTC (permalink / raw)
  To: gcc-patches, rguenther, d

Hi,
this patch makes nonoverlapping_component_refs_since_match_p to accept
paths with non-trivial MEM_REFs and TMRs assuming that they have same
semantics.

Bootstrapped/regtested x86_64-linux, OK?

Honza

	* tree-ssa-alias.c (same_tmr_indexing_p): Break out from ...
	(indirect_refs_may_alias_p): ... here.
	(nonoverlapping_component_refs_since_match_p): Support also non-trivial
	mem refs in the access paths.
Index: testsuite/gcc.dg/tree-ssa/alias-access-path-9.c
===================================================================
--- testsuite/gcc.dg/tree-ssa/alias-access-path-9.c	(nonexistent)
+++ testsuite/gcc.dg/tree-ssa/alias-access-path-9.c	(working copy)
@@ -0,0 +1,44 @@
+/* { dg-do compile } */
+/* { dg-options "-O2 -fdump-tree-fre1" } */
+
+/* This testcase tests nonoverlapping_component_refs_since_match_p in presence
+   of non-trivial mem-refs.  */
+struct a {int a,b;};
+struct b {struct a a[10];};
+struct c {int c; struct b b;} c, *cptr;
+
+void
+set_a(struct a *a, int p)
+{
+  a->a=p;
+}
+void
+set_b(struct a *a, int p)
+{
+  a->b=p;
+}
+int
+get_a(struct a *a)
+{
+  return a->a;
+}
+
+int
+test(int i, int j)
+{
+  struct b *bptr = &c.b;
+  set_a (&bptr->a[i], 123);
+  set_b (&bptr->a[j], 124);
+  return get_a (&bptr->a[i]);
+}
+
+int
+test2(int i, int j)
+{
+  struct b *bptr = &cptr->b;
+  set_a (&bptr->a[i], 125);
+  set_b (&bptr->a[j], 126);
+  return get_a (&bptr->a[i]);
+}
+/* { dg-final { scan-tree-dump-times "return 123" 1 "fre1"} } */
+/* { dg-final { scan-tree-dump-times "return 125" 1 "fre1"} } */
Index: tree-ssa-alias.c
===================================================================
--- tree-ssa-alias.c	(revision 273322)
+++ tree-ssa-alias.c	(working copy)
@@ -1216,6 +1216,25 @@ nonoverlapping_component_refs_p_1 (const
   return -1;
 }
 
+/* Return if TARGET_MEM_REFS base1 and base2 have same offsets.  */
+
+static bool
+same_tmr_indexing_p (tree base1, tree base2)
+{
+  return ((TMR_STEP (base1) == TMR_STEP (base2)
+	  || (TMR_STEP (base1) && TMR_STEP (base2)
+	      && operand_equal_p (TMR_STEP (base1),
+				  TMR_STEP (base2), 0)))
+	  && (TMR_INDEX (base1) == TMR_INDEX (base2)
+	      || (TMR_INDEX (base1) && TMR_INDEX (base2)
+		  && operand_equal_p (TMR_INDEX (base1),
+				      TMR_INDEX (base2), 0)))
+	  && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
+	      || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
+		  && operand_equal_p (TMR_INDEX2 (base1),
+				      TMR_INDEX2 (base2), 0))));
+}
+
 /* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and
    MATCH2 either point to the same address or are disjoint.
    MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2
@@ -1265,20 +1284,6 @@ nonoverlapping_component_refs_since_matc
         component_refs1.safe_push (ref1);
       ref1 = TREE_OPERAND (ref1, 0);
     }
-  if (TREE_CODE (ref1) == MEM_REF && ref1 != match1)
-    {
-      if (!integer_zerop (TREE_OPERAND (ref1, 1)))
-	{
-	  ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
-	  return -1;
-	}
-    }
-  /* TODO: Handle TARGET_MEM_REF later.  */
-  if (TREE_CODE (ref1) == TARGET_MEM_REF && ref1 != match1)
-    {
-      ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
-      return -1;
-    }
 
   /* Create the stack of handled components for REF2.  */
   while (handled_component_p (ref2) && ref2 != match2)
@@ -1290,15 +1295,39 @@ nonoverlapping_component_refs_since_matc
         component_refs2.safe_push (ref2);
       ref2 = TREE_OPERAND (ref2, 0);
     }
-  if (TREE_CODE (ref2) == MEM_REF && ref2 != match2)
+
+  bool mem_ref1 = TREE_CODE (ref1) == MEM_REF && ref1 != match1;
+  bool mem_ref2 = TREE_CODE (ref2) == MEM_REF && ref2 != match2;
+
+  /* If only one of access path starts with MEM_REF check that offset is 0
+     so the addresses stays the same after stripping it.
+     TODO: In this case we may walk the other access path until we get same
+     offset.
+
+     If both starts with MEM_REF, offset has to be same.  */
+  if ((mem_ref1 && !mem_ref2 && !integer_zerop (TREE_OPERAND (ref1, 1)))
+      || (mem_ref2 && !mem_ref1 && !integer_zerop (TREE_OPERAND (ref2, 1)))
+      || (mem_ref1 && mem_ref2
+	  && !tree_int_cst_equal (TREE_OPERAND (ref1, 1),
+				  TREE_OPERAND (ref2, 1))))
     {
-      if (!integer_zerop (TREE_OPERAND (ref2, 1)))
-	{
-	  ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
-	  return -1;
-	}
+      ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
+      return -1;
     }
-  if (TREE_CODE (ref2) == TARGET_MEM_REF && ref2 != match2)
+
+  bool target_mem_ref1 = TREE_CODE (ref1) == TARGET_MEM_REF && ref1 != match1;
+  bool target_mem_ref2 = TREE_CODE (ref2) == TARGET_MEM_REF && ref2 != match2;
+
+  if ((target_mem_ref1 && !target_mem_ref2
+       && (TMR_INDEX (ref1) || TMR_INDEX2 (ref1)
+	   || !integer_zerop (TREE_OPERAND (ref1, 1))))
+      || (target_mem_ref2 && !target_mem_ref1
+          && (TMR_INDEX (ref2) || TMR_INDEX2 (ref2)
+	      || !integer_zerop (TREE_OPERAND (ref2, 1))))
+      || (target_mem_ref1 && target_mem_ref2
+	  && (!same_tmr_indexing_p (ref1, ref2)
+	      || !tree_int_cst_equal (TREE_OPERAND (ref1, 1),
+				      TREE_OPERAND (ref2, 1)))))
     {
       ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
       return -1;
@@ -1776,18 +1805,7 @@ indirect_refs_may_alias_p (tree ref1 ATT
 	       || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
 	  || (TREE_CODE (base1) == TARGET_MEM_REF
 	      && TREE_CODE (base2) == TARGET_MEM_REF
-	      && (TMR_STEP (base1) == TMR_STEP (base2)
-		  || (TMR_STEP (base1) && TMR_STEP (base2)
-		      && operand_equal_p (TMR_STEP (base1),
-					  TMR_STEP (base2), 0)))
-	      && (TMR_INDEX (base1) == TMR_INDEX (base2)
-		  || (TMR_INDEX (base1) && TMR_INDEX (base2)
-		      && operand_equal_p (TMR_INDEX (base1),
-					  TMR_INDEX (base2), 0)))
-	      && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
-		  || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
-		      && operand_equal_p (TMR_INDEX2 (base1),
-					  TMR_INDEX2 (base2), 0))))))
+	      && same_tmr_indexing_p (base1, base2))))
     {
       poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
       poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;

^ permalink raw reply	[flat|nested] 5+ messages in thread

* Re: Make nonoverlapping_component_refs_since_match_p work with non-trivial MEM_REFs and TMRs
  2019-07-11 13:55 Make nonoverlapping_component_refs_since_match_p work with non-trivial MEM_REFs and TMRs Jan Hubicka
@ 2019-07-11 14:07 ` Richard Biener
  2019-07-11 15:26   ` Jan Hubicka
  0 siblings, 1 reply; 5+ messages in thread
From: Richard Biener @ 2019-07-11 14:07 UTC (permalink / raw)
  To: Jan Hubicka; +Cc: gcc-patches, d

[-- Attachment #1: Type: text/plain, Size: 7053 bytes --]

On Thu, 11 Jul 2019, Jan Hubicka wrote:

> Hi,
> this patch makes nonoverlapping_component_refs_since_match_p to accept
> paths with non-trivial MEM_REFs and TMRs assuming that they have same
> semantics.

Hmm.  We'll never get any TARGET_MEM_REFs wrapped with
handled-components so I wonder if it makes sense to handle it in
nonoverlapping_component_refs_since_match_p at all.

> Bootstrapped/regtested x86_64-linux, OK?
> 
> Honza
> 
> 	* tree-ssa-alias.c (same_tmr_indexing_p): Break out from ...
> 	(indirect_refs_may_alias_p): ... here.
> 	(nonoverlapping_component_refs_since_match_p): Support also non-trivial
> 	mem refs in the access paths.
> Index: testsuite/gcc.dg/tree-ssa/alias-access-path-9.c
> ===================================================================
> --- testsuite/gcc.dg/tree-ssa/alias-access-path-9.c	(nonexistent)
> +++ testsuite/gcc.dg/tree-ssa/alias-access-path-9.c	(working copy)
> @@ -0,0 +1,44 @@
> +/* { dg-do compile } */
> +/* { dg-options "-O2 -fdump-tree-fre1" } */
> +
> +/* This testcase tests nonoverlapping_component_refs_since_match_p in presence
> +   of non-trivial mem-refs.  */
> +struct a {int a,b;};
> +struct b {struct a a[10];};
> +struct c {int c; struct b b;} c, *cptr;
> +
> +void
> +set_a(struct a *a, int p)
> +{
> +  a->a=p;
> +}
> +void
> +set_b(struct a *a, int p)
> +{
> +  a->b=p;
> +}
> +int
> +get_a(struct a *a)
> +{
> +  return a->a;
> +}
> +
> +int
> +test(int i, int j)
> +{
> +  struct b *bptr = &c.b;
> +  set_a (&bptr->a[i], 123);
> +  set_b (&bptr->a[j], 124);
> +  return get_a (&bptr->a[i]);
> +}
> +
> +int
> +test2(int i, int j)
> +{
> +  struct b *bptr = &cptr->b;
> +  set_a (&bptr->a[i], 125);
> +  set_b (&bptr->a[j], 126);
> +  return get_a (&bptr->a[i]);
> +}
> +/* { dg-final { scan-tree-dump-times "return 123" 1 "fre1"} } */
> +/* { dg-final { scan-tree-dump-times "return 125" 1 "fre1"} } */
> Index: tree-ssa-alias.c
> ===================================================================
> --- tree-ssa-alias.c	(revision 273322)
> +++ tree-ssa-alias.c	(working copy)
> @@ -1216,6 +1216,25 @@ nonoverlapping_component_refs_p_1 (const
>    return -1;
>  }
>  
> +/* Return if TARGET_MEM_REFS base1 and base2 have same offsets.  */
> +
> +static bool
> +same_tmr_indexing_p (tree base1, tree base2)
> +{
> +  return ((TMR_STEP (base1) == TMR_STEP (base2)
> +	  || (TMR_STEP (base1) && TMR_STEP (base2)
> +	      && operand_equal_p (TMR_STEP (base1),
> +				  TMR_STEP (base2), 0)))
> +	  && (TMR_INDEX (base1) == TMR_INDEX (base2)
> +	      || (TMR_INDEX (base1) && TMR_INDEX (base2)
> +		  && operand_equal_p (TMR_INDEX (base1),
> +				      TMR_INDEX (base2), 0)))
> +	  && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
> +	      || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
> +		  && operand_equal_p (TMR_INDEX2 (base1),
> +				      TMR_INDEX2 (base2), 0))));
> +}
> +
>  /* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and
>     MATCH2 either point to the same address or are disjoint.
>     MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2
> @@ -1265,20 +1284,6 @@ nonoverlapping_component_refs_since_matc
>          component_refs1.safe_push (ref1);
>        ref1 = TREE_OPERAND (ref1, 0);
>      }
> -  if (TREE_CODE (ref1) == MEM_REF && ref1 != match1)
> -    {
> -      if (!integer_zerop (TREE_OPERAND (ref1, 1)))
> -	{
> -	  ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
> -	  return -1;
> -	}
> -    }
> -  /* TODO: Handle TARGET_MEM_REF later.  */
> -  if (TREE_CODE (ref1) == TARGET_MEM_REF && ref1 != match1)
> -    {
> -      ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
> -      return -1;
> -    }
>  
>    /* Create the stack of handled components for REF2.  */
>    while (handled_component_p (ref2) && ref2 != match2)
> @@ -1290,15 +1295,39 @@ nonoverlapping_component_refs_since_matc
>          component_refs2.safe_push (ref2);
>        ref2 = TREE_OPERAND (ref2, 0);
>      }
> -  if (TREE_CODE (ref2) == MEM_REF && ref2 != match2)
> +
> +  bool mem_ref1 = TREE_CODE (ref1) == MEM_REF && ref1 != match1;
> +  bool mem_ref2 = TREE_CODE (ref2) == MEM_REF && ref2 != match2;
> +
> +  /* If only one of access path starts with MEM_REF check that offset is 0
> +     so the addresses stays the same after stripping it.
> +     TODO: In this case we may walk the other access path until we get same
> +     offset.
> +
> +     If both starts with MEM_REF, offset has to be same.  */
> +  if ((mem_ref1 && !mem_ref2 && !integer_zerop (TREE_OPERAND (ref1, 1)))
> +      || (mem_ref2 && !mem_ref1 && !integer_zerop (TREE_OPERAND (ref2, 1)))
> +      || (mem_ref1 && mem_ref2
> +	  && !tree_int_cst_equal (TREE_OPERAND (ref1, 1),
> +				  TREE_OPERAND (ref2, 1))))
>      {
> -      if (!integer_zerop (TREE_OPERAND (ref2, 1)))
> -	{
> -	  ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
> -	  return -1;
> -	}
> +      ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
> +      return -1;
>      }
> -  if (TREE_CODE (ref2) == TARGET_MEM_REF && ref2 != match2)
> +
> +  bool target_mem_ref1 = TREE_CODE (ref1) == TARGET_MEM_REF && ref1 != match1;
> +  bool target_mem_ref2 = TREE_CODE (ref2) == TARGET_MEM_REF && ref2 != match2;
> +
> +  if ((target_mem_ref1 && !target_mem_ref2
> +       && (TMR_INDEX (ref1) || TMR_INDEX2 (ref1)
> +	   || !integer_zerop (TREE_OPERAND (ref1, 1))))
> +      || (target_mem_ref2 && !target_mem_ref1
> +          && (TMR_INDEX (ref2) || TMR_INDEX2 (ref2)
> +	      || !integer_zerop (TREE_OPERAND (ref2, 1))))
> +      || (target_mem_ref1 && target_mem_ref2
> +	  && (!same_tmr_indexing_p (ref1, ref2)
> +	      || !tree_int_cst_equal (TREE_OPERAND (ref1, 1),
> +				      TREE_OPERAND (ref2, 1)))))
>      {
>        ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
>        return -1;
> @@ -1776,18 +1805,7 @@ indirect_refs_may_alias_p (tree ref1 ATT
>  	       || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
>  	  || (TREE_CODE (base1) == TARGET_MEM_REF
>  	      && TREE_CODE (base2) == TARGET_MEM_REF
> -	      && (TMR_STEP (base1) == TMR_STEP (base2)
> -		  || (TMR_STEP (base1) && TMR_STEP (base2)
> -		      && operand_equal_p (TMR_STEP (base1),
> -					  TMR_STEP (base2), 0)))
> -	      && (TMR_INDEX (base1) == TMR_INDEX (base2)
> -		  || (TMR_INDEX (base1) && TMR_INDEX (base2)
> -		      && operand_equal_p (TMR_INDEX (base1),
> -					  TMR_INDEX (base2), 0)))
> -	      && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
> -		  || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
> -		      && operand_equal_p (TMR_INDEX2 (base1),
> -					  TMR_INDEX2 (base2), 0))))))
> +	      && same_tmr_indexing_p (base1, base2))))
>      {
>        poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
>        poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
> 

-- 
Richard Biener <rguenther@suse.de>
SUSE Linux GmbH, Maxfeldstrasse 5, 90409 Nuernberg, Germany;
GF: Felix Imendörffer, Mary Higgins, Sri Rasiah; HRB 21284 (AG NÌrnberg)

^ permalink raw reply	[flat|nested] 5+ messages in thread

* Re: Make nonoverlapping_component_refs_since_match_p work with non-trivial MEM_REFs and TMRs
  2019-07-11 14:07 ` Richard Biener
@ 2019-07-11 15:26   ` Jan Hubicka
  2019-07-11 16:13     ` Jan Hubicka
  0 siblings, 1 reply; 5+ messages in thread
From: Jan Hubicka @ 2019-07-11 15:26 UTC (permalink / raw)
  To: Richard Biener; +Cc: gcc-patches, d

> On Thu, 11 Jul 2019, Jan Hubicka wrote:
> 
> > Hi,
> > this patch makes nonoverlapping_component_refs_since_match_p to accept
> > paths with non-trivial MEM_REFs and TMRs assuming that they have same
> > semantics.
> 
> Hmm.  We'll never get any TARGET_MEM_REFs wrapped with
> handled-components so I wonder if it makes sense to handle it in
> nonoverlapping_component_refs_since_match_p at all.

OK, that makes my life easier. Here is updated patch.

Index: tree-ssa-alias.c
===================================================================
--- tree-ssa-alias.c	(revision 273322)
+++ tree-ssa-alias.c	(working copy)
@@ -1265,20 +1265,6 @@ nonoverlapping_component_refs_since_matc
         component_refs1.safe_push (ref1);
       ref1 = TREE_OPERAND (ref1, 0);
     }
-  if (TREE_CODE (ref1) == MEM_REF && ref1 != match1)
-    {
-      if (!integer_zerop (TREE_OPERAND (ref1, 1)))
-	{
-	  ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
-	  return -1;
-	}
-    }
-  /* TODO: Handle TARGET_MEM_REF later.  */
-  if (TREE_CODE (ref1) == TARGET_MEM_REF && ref1 != match1)
-    {
-      ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
-      return -1;
-    }
 
   /* Create the stack of handled components for REF2.  */
   while (handled_component_p (ref2) && ref2 != match2)
@@ -1290,20 +1276,31 @@ nonoverlapping_component_refs_since_matc
         component_refs2.safe_push (ref2);
       ref2 = TREE_OPERAND (ref2, 0);
     }
-  if (TREE_CODE (ref2) == MEM_REF && ref2 != match2)
-    {
-      if (!integer_zerop (TREE_OPERAND (ref2, 1)))
-	{
-	  ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
-	  return -1;
-	}
-    }
-  if (TREE_CODE (ref2) == TARGET_MEM_REF && ref2 != match2)
+
+  bool mem_ref1 = TREE_CODE (ref1) == MEM_REF && ref1 != match1;
+  bool mem_ref2 = TREE_CODE (ref2) == MEM_REF && ref2 != match2;
+
+  /* If only one of access paths starts with MEM_REF check that offset is 0
+     so the addresses stays the same after stripping it.
+     TODO: In this case we may walk the other access path until we get same
+     offset.
+
+     If both starts with MEM_REF, offset has to be same.  */
+  if ((mem_ref1 && !mem_ref2 && !integer_zerop (TREE_OPERAND (ref1, 1)))
+      || (mem_ref2 && !mem_ref1 && !integer_zerop (TREE_OPERAND (ref2, 1)))
+      || (mem_ref1 && mem_ref2
+	  && !tree_int_cst_equal (TREE_OPERAND (ref1, 1),
+				  TREE_OPERAND (ref2, 1))))
     {
       ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
       return -1;
     }
 
+  /* TARGET_MEM_REF are never wrapped in handled components, so we do not need
+     to handle them here at all.  */
+  gcc_checking_assert (TREE_CODE (ref1) != TARGET_MEM_REF
+		       && TREE_CODE (ref2) != TARGET_MEM_REF);
+
   /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
      rank.  This is sufficient because we start from the same DECL and you
      cannot reference several fields at a time with COMPONENT_REFs (unlike

^ permalink raw reply	[flat|nested] 5+ messages in thread

* Re: Make nonoverlapping_component_refs_since_match_p work with non-trivial MEM_REFs and TMRs
  2019-07-11 15:26   ` Jan Hubicka
@ 2019-07-11 16:13     ` Jan Hubicka
  2019-07-12 10:19       ` Richard Biener
  0 siblings, 1 reply; 5+ messages in thread
From: Jan Hubicka @ 2019-07-11 16:13 UTC (permalink / raw)
  To: Richard Biener; +Cc: gcc-patches, d

> > On Thu, 11 Jul 2019, Jan Hubicka wrote:
> > 
> > > Hi,
> > > this patch makes nonoverlapping_component_refs_since_match_p to accept
> > > paths with non-trivial MEM_REFs and TMRs assuming that they have same
> > > semantics.
> > 
> > Hmm.  We'll never get any TARGET_MEM_REFs wrapped with
> > handled-components so I wonder if it makes sense to handle it in
> > nonoverlapping_component_refs_since_match_p at all.
> 
> OK, that makes my life easier. Here is updated patch.
Hi,
the patch finished testing on x86_64-linux so here is with Changelog and
testcase. OK?



	* tree-ssa-alias.c (same_tmr_indexing_p): Break out from ...
	(indirect_refs_may_alias_p): ... here.
	(nonoverlapping_component_refs_since_match_p): Support also non-trivial
	mem refs in the access paths.
Index: testsuite/gcc.dg/tree-ssa/alias-access-path-9.c
===================================================================
--- testsuite/gcc.dg/tree-ssa/alias-access-path-9.c	(nonexistent)
+++ testsuite/gcc.dg/tree-ssa/alias-access-path-9.c	(working copy)
@@ -0,0 +1,44 @@
+/* { dg-do compile } */
+/* { dg-options "-O2 -fdump-tree-fre1" } */
+
+/* This testcase tests nonoverlapping_component_refs_since_match_p in presence
+   of non-trivial mem-refs.  */
+struct a {int a,b;};
+struct b {struct a a[10];};
+struct c {int c; struct b b;} c, *cptr;
+
+void
+set_a(struct a *a, int p)
+{
+  a->a=p;
+}
+void
+set_b(struct a *a, int p)
+{
+  a->b=p;
+}
+int
+get_a(struct a *a)
+{
+  return a->a;
+}
+
+int
+test(int i, int j)
+{
+  struct b *bptr = &c.b;
+  set_a (&bptr->a[i], 123);
+  set_b (&bptr->a[j], 124);
+  return get_a (&bptr->a[i]);
+}
+
+int
+test2(int i, int j)
+{
+  struct b *bptr = &cptr->b;
+  set_a (&bptr->a[i], 125);
+  set_b (&bptr->a[j], 126);
+  return get_a (&bptr->a[i]);
+}
+/* { dg-final { scan-tree-dump-times "return 123" 1 "fre1"} } */
+/* { dg-final { scan-tree-dump-times "return 125" 1 "fre1"} } */
Index: tree-ssa-alias.c
===================================================================
--- tree-ssa-alias.c	(revision 273322)
+++ tree-ssa-alias.c	(working copy)
@@ -1265,20 +1265,6 @@ nonoverlapping_component_refs_since_matc
         component_refs1.safe_push (ref1);
       ref1 = TREE_OPERAND (ref1, 0);
     }
-  if (TREE_CODE (ref1) == MEM_REF && ref1 != match1)
-    {
-      if (!integer_zerop (TREE_OPERAND (ref1, 1)))
-	{
-	  ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
-	  return -1;
-	}
-    }
-  /* TODO: Handle TARGET_MEM_REF later.  */
-  if (TREE_CODE (ref1) == TARGET_MEM_REF && ref1 != match1)
-    {
-      ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
-      return -1;
-    }
 
   /* Create the stack of handled components for REF2.  */
   while (handled_component_p (ref2) && ref2 != match2)
@@ -1290,20 +1276,31 @@ nonoverlapping_component_refs_since_matc
         component_refs2.safe_push (ref2);
       ref2 = TREE_OPERAND (ref2, 0);
     }
-  if (TREE_CODE (ref2) == MEM_REF && ref2 != match2)
-    {
-      if (!integer_zerop (TREE_OPERAND (ref2, 1)))
-	{
-	  ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
-	  return -1;
-	}
-    }
-  if (TREE_CODE (ref2) == TARGET_MEM_REF && ref2 != match2)
+
+  bool mem_ref1 = TREE_CODE (ref1) == MEM_REF && ref1 != match1;
+  bool mem_ref2 = TREE_CODE (ref2) == MEM_REF && ref2 != match2;
+
+  /* If only one of access paths starts with MEM_REF check that offset is 0
+     so the addresses stays the same after stripping it.
+     TODO: In this case we may walk the other access path until we get same
+     offset.
+
+     If both starts with MEM_REF, offset has to be same.  */
+  if ((mem_ref1 && !mem_ref2 && !integer_zerop (TREE_OPERAND (ref1, 1)))
+      || (mem_ref2 && !mem_ref1 && !integer_zerop (TREE_OPERAND (ref2, 1)))
+      || (mem_ref1 && mem_ref2
+	  && !tree_int_cst_equal (TREE_OPERAND (ref1, 1),
+				  TREE_OPERAND (ref2, 1))))
     {
       ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
       return -1;
     }
 
+  /* TARGET_MEM_REF are never wrapped in handled components, so we do not need
+     to handle them here at all.  */
+  gcc_checking_assert (TREE_CODE (ref1) != TARGET_MEM_REF
+		       && TREE_CODE (ref2) != TARGET_MEM_REF);
+
   /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
      rank.  This is sufficient because we start from the same DECL and you
      cannot reference several fields at a time with COMPONENT_REFs (unlike

^ permalink raw reply	[flat|nested] 5+ messages in thread

* Re: Make nonoverlapping_component_refs_since_match_p work with non-trivial MEM_REFs and TMRs
  2019-07-11 16:13     ` Jan Hubicka
@ 2019-07-12 10:19       ` Richard Biener
  0 siblings, 0 replies; 5+ messages in thread
From: Richard Biener @ 2019-07-12 10:19 UTC (permalink / raw)
  To: Jan Hubicka; +Cc: gcc-patches, d

[-- Attachment #1: Type: text/plain, Size: 4972 bytes --]

On Thu, 11 Jul 2019, Jan Hubicka wrote:

> > > On Thu, 11 Jul 2019, Jan Hubicka wrote:
> > > 
> > > > Hi,
> > > > this patch makes nonoverlapping_component_refs_since_match_p to accept
> > > > paths with non-trivial MEM_REFs and TMRs assuming that they have same
> > > > semantics.
> > > 
> > > Hmm.  We'll never get any TARGET_MEM_REFs wrapped with
> > > handled-components so I wonder if it makes sense to handle it in
> > > nonoverlapping_component_refs_since_match_p at all.
> > 
> > OK, that makes my life easier. Here is updated patch.
> Hi,
> the patch finished testing on x86_64-linux so here is with Changelog and
> testcase. OK?

OK.

Richard.

> 
> 
> 	* tree-ssa-alias.c (same_tmr_indexing_p): Break out from ...
> 	(indirect_refs_may_alias_p): ... here.
> 	(nonoverlapping_component_refs_since_match_p): Support also non-trivial
> 	mem refs in the access paths.
> Index: testsuite/gcc.dg/tree-ssa/alias-access-path-9.c
> ===================================================================
> --- testsuite/gcc.dg/tree-ssa/alias-access-path-9.c	(nonexistent)
> +++ testsuite/gcc.dg/tree-ssa/alias-access-path-9.c	(working copy)
> @@ -0,0 +1,44 @@
> +/* { dg-do compile } */
> +/* { dg-options "-O2 -fdump-tree-fre1" } */
> +
> +/* This testcase tests nonoverlapping_component_refs_since_match_p in presence
> +   of non-trivial mem-refs.  */
> +struct a {int a,b;};
> +struct b {struct a a[10];};
> +struct c {int c; struct b b;} c, *cptr;
> +
> +void
> +set_a(struct a *a, int p)
> +{
> +  a->a=p;
> +}
> +void
> +set_b(struct a *a, int p)
> +{
> +  a->b=p;
> +}
> +int
> +get_a(struct a *a)
> +{
> +  return a->a;
> +}
> +
> +int
> +test(int i, int j)
> +{
> +  struct b *bptr = &c.b;
> +  set_a (&bptr->a[i], 123);
> +  set_b (&bptr->a[j], 124);
> +  return get_a (&bptr->a[i]);
> +}
> +
> +int
> +test2(int i, int j)
> +{
> +  struct b *bptr = &cptr->b;
> +  set_a (&bptr->a[i], 125);
> +  set_b (&bptr->a[j], 126);
> +  return get_a (&bptr->a[i]);
> +}
> +/* { dg-final { scan-tree-dump-times "return 123" 1 "fre1"} } */
> +/* { dg-final { scan-tree-dump-times "return 125" 1 "fre1"} } */
> Index: tree-ssa-alias.c
> ===================================================================
> --- tree-ssa-alias.c	(revision 273322)
> +++ tree-ssa-alias.c	(working copy)
> @@ -1265,20 +1265,6 @@ nonoverlapping_component_refs_since_matc
>          component_refs1.safe_push (ref1);
>        ref1 = TREE_OPERAND (ref1, 0);
>      }
> -  if (TREE_CODE (ref1) == MEM_REF && ref1 != match1)
> -    {
> -      if (!integer_zerop (TREE_OPERAND (ref1, 1)))
> -	{
> -	  ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
> -	  return -1;
> -	}
> -    }
> -  /* TODO: Handle TARGET_MEM_REF later.  */
> -  if (TREE_CODE (ref1) == TARGET_MEM_REF && ref1 != match1)
> -    {
> -      ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
> -      return -1;
> -    }
>  
>    /* Create the stack of handled components for REF2.  */
>    while (handled_component_p (ref2) && ref2 != match2)
> @@ -1290,20 +1276,31 @@ nonoverlapping_component_refs_since_matc
>          component_refs2.safe_push (ref2);
>        ref2 = TREE_OPERAND (ref2, 0);
>      }
> -  if (TREE_CODE (ref2) == MEM_REF && ref2 != match2)
> -    {
> -      if (!integer_zerop (TREE_OPERAND (ref2, 1)))
> -	{
> -	  ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
> -	  return -1;
> -	}
> -    }
> -  if (TREE_CODE (ref2) == TARGET_MEM_REF && ref2 != match2)
> +
> +  bool mem_ref1 = TREE_CODE (ref1) == MEM_REF && ref1 != match1;
> +  bool mem_ref2 = TREE_CODE (ref2) == MEM_REF && ref2 != match2;
> +
> +  /* If only one of access paths starts with MEM_REF check that offset is 0
> +     so the addresses stays the same after stripping it.
> +     TODO: In this case we may walk the other access path until we get same
> +     offset.
> +
> +     If both starts with MEM_REF, offset has to be same.  */
> +  if ((mem_ref1 && !mem_ref2 && !integer_zerop (TREE_OPERAND (ref1, 1)))
> +      || (mem_ref2 && !mem_ref1 && !integer_zerop (TREE_OPERAND (ref2, 1)))
> +      || (mem_ref1 && mem_ref2
> +	  && !tree_int_cst_equal (TREE_OPERAND (ref1, 1),
> +				  TREE_OPERAND (ref2, 1))))
>      {
>        ++alias_stats.nonoverlapping_component_refs_since_match_p_may_alias;
>        return -1;
>      }
>  
> +  /* TARGET_MEM_REF are never wrapped in handled components, so we do not need
> +     to handle them here at all.  */
> +  gcc_checking_assert (TREE_CODE (ref1) != TARGET_MEM_REF
> +		       && TREE_CODE (ref2) != TARGET_MEM_REF);
> +
>    /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
>       rank.  This is sufficient because we start from the same DECL and you
>       cannot reference several fields at a time with COMPONENT_REFs (unlike
> 

-- 
Richard Biener <rguenther@suse.de>
SUSE Linux GmbH, Maxfeldstrasse 5, 90409 Nuernberg, Germany;
GF: Felix Imendörffer, Mary Higgins, Sri Rasiah; HRB 21284 (AG NÌrnberg)

^ permalink raw reply	[flat|nested] 5+ messages in thread

end of thread, other threads:[~2019-07-12 10:08 UTC | newest]

Thread overview: 5+ messages (download: mbox.gz / follow: Atom feed)
-- links below jump to the message on this page --
2019-07-11 13:55 Make nonoverlapping_component_refs_since_match_p work with non-trivial MEM_REFs and TMRs Jan Hubicka
2019-07-11 14:07 ` Richard Biener
2019-07-11 15:26   ` Jan Hubicka
2019-07-11 16:13     ` Jan Hubicka
2019-07-12 10:19       ` Richard Biener

This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox;
as well as URLs for read-only IMAP folder(s) and NNTP newsgroup(s).