public inbox for gcc-patches@gcc.gnu.org
 help / color / mirror / Atom feed
* RFA: PATCH to make fold_indirect_ref_1 fold more things
@ 2010-10-27  2:16 Jason Merrill
  2010-10-27 20:10 ` Richard Guenther
  0 siblings, 1 reply; 8+ messages in thread
From: Jason Merrill @ 2010-10-27  2:16 UTC (permalink / raw)
  To: gcc-patches List

[-- Attachment #1: Type: text/plain, Size: 253 bytes --]

For constexpr I need to be able to fold some tree forms that 
fold_indirect_ref_1 didn't handle; this patch extends it to handle 
folding POINTER_PLUS_EXPR to an ARRAY_REF, and also folding to 
COMPONENT_REF.

Tested x86_64-pc-linux-gnu.  OK for trunk?

[-- Attachment #2: fold-indirect.patch --]
[-- Type: text/x-patch, Size: 4883 bytes --]

commit c99ff581c364c5ea79d5031bcada6a09732c9129
Author: Jason Merrill <jason@redhat.com>
Date:   Tue Oct 26 14:49:37 2010 -0400

    	* fold-const.c (fold_indirect_ref_1): Handle folding to COMPONENT_REF,
    	folding POINTER_PLUS_EXPR to ARRAY_REF.

diff --git a/gcc/fold-const.c b/gcc/fold-const.c
index decb0fb..accb35c 100644
--- a/gcc/fold-const.c
+++ b/gcc/fold-const.c
@@ -15647,55 +15647,85 @@ fold_indirect_ref_1 (location_t loc, tree type, tree op0)
 	  tree index = bitsize_int (0);
 	  return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
 	}
-    }
-
-  /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
-  if (TREE_CODE (sub) == POINTER_PLUS_EXPR
-      && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
-    {
-      tree op00 = TREE_OPERAND (sub, 0);
-      tree op01 = TREE_OPERAND (sub, 1);
-      tree op00type;
-
-      STRIP_NOPS (op00);
-      op00type = TREE_TYPE (op00);
-      if (TREE_CODE (op00) == ADDR_EXPR
-          && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
-          && type == TREE_TYPE (TREE_TYPE (op00type)))
+      /* *(foo *)&struct_with_foo_field => COMPONENT_REF */
+      else if (RECORD_OR_UNION_TYPE_P (optype))
 	{
-	  HOST_WIDE_INT offset = tree_low_cst (op01, 0);
-	  tree part_width = TYPE_SIZE (type);
-	  unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
-	  unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
-	  tree index = bitsize_int (indexi);
-
-	  if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
-	    return fold_build3_loc (loc,
-				BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
-				part_width, index);
-
+	  tree field = TYPE_FIELDS (optype);
+	  for (; field; field = DECL_CHAIN (field))
+	    if (TREE_CODE (field) == FIELD_DECL
+		&& integer_zerop (DECL_FIELD_OFFSET (field))
+		&& (TYPE_MAIN_VARIANT (TREE_TYPE (field))
+		    == TYPE_MAIN_VARIANT (type)))
+	      return fold_build3_loc (loc, COMPONENT_REF, type, op, field,
+				      NULL_TREE);
 	}
     }
 
-
-  /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
       && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
     {
       tree op00 = TREE_OPERAND (sub, 0);
       tree op01 = TREE_OPERAND (sub, 1);
-      tree op00type;
 
       STRIP_NOPS (op00);
-      op00type = TREE_TYPE (op00);
-      if (TREE_CODE (op00) == ADDR_EXPR
- 	  && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
-	  && type == TREE_TYPE (TREE_TYPE (op00type)))
+      if (TREE_CODE (op00) == ADDR_EXPR)
 	{
-	  tree size = TYPE_SIZE_UNIT (type);
-	  if (tree_int_cst_equal (size, op01))
-	    return fold_build1_loc (loc, IMAGPART_EXPR, type,
-				TREE_OPERAND (op00, 0));
+	  tree op00type;
+	  op00 = TREE_OPERAND (op00, 0);
+	  op00type = TREE_TYPE (op00);
+
+	  /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
+	  if (TREE_CODE (op00type) == VECTOR_TYPE
+	      && type == TREE_TYPE (op00type))
+	    {
+	      HOST_WIDE_INT offset = tree_low_cst (op01, 0);
+	      tree part_width = TYPE_SIZE (type);
+	      unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
+	      unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
+	      tree index = bitsize_int (indexi);
+
+	      if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
+		return fold_build3_loc (loc,
+					BIT_FIELD_REF, type, op00,
+					part_width, index);
+
+	    }
+	  /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
+	  else if (TREE_CODE (op00type) == COMPLEX_TYPE
+		   && type == TREE_TYPE (op00type))
+	    {
+	      tree size = TYPE_SIZE_UNIT (type);
+	      if (tree_int_cst_equal (size, op01))
+		return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
+	    }
+	  /* ((foo *)&fooarray)[1] => fooarray[1] */
+	  else if (TREE_CODE (op00type) == ARRAY_TYPE
+		   && type == TREE_TYPE (op00type))
+	    {
+	      tree type_domain = TYPE_DOMAIN (op00type);
+	      tree min_val = size_zero_node;
+	      if (type_domain && TYPE_MIN_VALUE (type_domain))
+		min_val = TYPE_MIN_VALUE (type_domain);
+	      op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
+				     TYPE_SIZE_UNIT (type));
+	      op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
+	      op0 = build4 (ARRAY_REF, type, op00, op01,
+			    NULL_TREE, NULL_TREE);
+	      SET_EXPR_LOCATION (op0, loc);
+	      return op0;
+	    }
+	  /* ((foo *)&struct_with_foo_field)[1] => COMPONENT_REF */
+	  else if (RECORD_OR_UNION_TYPE_P (op00type))
+	    {
+	      tree field = TYPE_FIELDS (op00type);
+	      for (; field; field = DECL_CHAIN (field))
+		if (TREE_CODE (field) == FIELD_DECL
+		    && tree_int_cst_equal (byte_position (field), op01)
+		    && (TYPE_MAIN_VARIANT (TREE_TYPE (field))
+			== TYPE_MAIN_VARIANT (type)))
+		  return fold_build3_loc (loc, COMPONENT_REF, type, op00,
+					  field, NULL_TREE);
+	    }
 	}
     }
 

^ permalink raw reply	[flat|nested] 8+ messages in thread

* Re: RFA: PATCH to make fold_indirect_ref_1 fold more things
  2010-10-27  2:16 RFA: PATCH to make fold_indirect_ref_1 fold more things Jason Merrill
@ 2010-10-27 20:10 ` Richard Guenther
  2010-10-28 16:18   ` Jason Merrill
  0 siblings, 1 reply; 8+ messages in thread
From: Richard Guenther @ 2010-10-27 20:10 UTC (permalink / raw)
  To: Jason Merrill; +Cc: gcc-patches List

On Tue, Oct 26, 2010 at 6:35 PM, Jason Merrill <jason@redhat.com> wrote:
> For constexpr I need to be able to fold some tree forms that
> fold_indirect_ref_1 didn't handle; this patch extends it to handle folding
> POINTER_PLUS_EXPR to an ARRAY_REF, and also folding to COMPONENT_REF.
>
> Tested x86_64-pc-linux-gnu.  OK for trunk?

I think by making this kind of transforms you are prone to bugs like PR44468.

Richard.

^ permalink raw reply	[flat|nested] 8+ messages in thread

* Re: RFA: PATCH to make fold_indirect_ref_1 fold more things
  2010-10-27 20:10 ` Richard Guenther
@ 2010-10-28 16:18   ` Jason Merrill
  2010-10-29 16:06     ` Richard Guenther
  0 siblings, 1 reply; 8+ messages in thread
From: Jason Merrill @ 2010-10-28 16:18 UTC (permalink / raw)
  To: Richard Guenther; +Cc: gcc-patches List

On 10/27/2010 02:29 PM, Richard Guenther wrote:
> On Tue, Oct 26, 2010 at 6:35 PM, Jason Merrill<jason@redhat.com>  wrote:
>> For constexpr I need to be able to fold some tree forms that
>> fold_indirect_ref_1 didn't handle; this patch extends it to handle folding
>> POINTER_PLUS_EXPR to an ARRAY_REF, and also folding to COMPONENT_REF.
>
> I think by making this kind of transforms you are prone to bugs like PR44468.

Ah, interesting.  But it seems to me that this case is different because 
we are starting from an ADDR_EXPR around an expression of the aggregate 
type in the folded expression, not just any pointer to the aggregate 
type.  What do you think?

Jason

^ permalink raw reply	[flat|nested] 8+ messages in thread

* Re: RFA: PATCH to make fold_indirect_ref_1 fold more things
  2010-10-28 16:18   ` Jason Merrill
@ 2010-10-29 16:06     ` Richard Guenther
  2010-10-29 21:51       ` Jason Merrill
  0 siblings, 1 reply; 8+ messages in thread
From: Richard Guenther @ 2010-10-29 16:06 UTC (permalink / raw)
  To: Jason Merrill; +Cc: gcc-patches List

On Thu, Oct 28, 2010 at 2:45 PM, Jason Merrill <jason@redhat.com> wrote:
> On 10/27/2010 02:29 PM, Richard Guenther wrote:
>>
>> On Tue, Oct 26, 2010 at 6:35 PM, Jason Merrill<jason@redhat.com>  wrote:
>>>
>>> For constexpr I need to be able to fold some tree forms that
>>> fold_indirect_ref_1 didn't handle; this patch extends it to handle
>>> folding
>>> POINTER_PLUS_EXPR to an ARRAY_REF, and also folding to COMPONENT_REF.
>>
>> I think by making this kind of transforms you are prone to bugs like
>> PR44468.
>
> Ah, interesting.  But it seems to me that this case is different because we
> are starting from an ADDR_EXPR around an expression of the aggregate type in
> the folded expression, not just any pointer to the aggregate type.  What do
> you think?

Hm, I think what might save you is that you only look into immediate
fields (not fields
in sub-structs as we originally did).

Now I am concerned about sth like

struct S {
    int i;
    int j;
};
struct R {
    struct S a;
    int k;
};
struct S s;
int main()
{
  struct R *p = (struct R *)&s;
  s.i = 1;
  s.j = 2;
  (*(struct S *)&*p).i = 0;
  if (s.i != 0)
    abort ();
  return 0;
}

where if we end up folding the obfuscated access to p->a.i = 0 we will generate
wrong code (one might argue that *p is invalid in C, but I'm viewing this from
a middle-end POV, not a C one).  Now if &* is already folded it will look as
p and so your code wouldn't trigger, but I guess this is just a matter of
obfuscating (like using &(*p + offset)) - the point is that the actual access
would change from one via struct S to one via struct R (and the alias set
of struct is a subset of that of R, but not the other way around).

So - what kind of testcases are you trying to handle?  (the middle-end will
optimize this stuff keeping TBAA info correct during forwprop)

Richard.

> Jason
>

^ permalink raw reply	[flat|nested] 8+ messages in thread

* Re: RFA: PATCH to make fold_indirect_ref_1 fold more things
  2010-10-29 16:06     ` Richard Guenther
@ 2010-10-29 21:51       ` Jason Merrill
  2010-10-30  7:53         ` Richard Guenther
  0 siblings, 1 reply; 8+ messages in thread
From: Jason Merrill @ 2010-10-29 21:51 UTC (permalink / raw)
  To: Richard Guenther; +Cc: gcc-patches List

On 10/29/2010 11:25 AM, Richard Guenther wrote:
> So - what kind of testcases are you trying to handle?  (the middle-end will
> optimize this stuff keeping TBAA info correct during forwprop)

The RECORD_TYPE folding was for this testcase:

struct C { // literal type
   int m;
   int n;
   constexpr C(int m) : m(m), n(-m) {}
   constexpr bool is_neg() { return m < 0; }
};

constexpr bool check1(const C& c, int C:: *pm) { return c.*pm < 0; } // #1

constexpr bool check2(const C* pc, bool (C::*pm)() const) { return
(pc->*pm)(); } // #2

constexpr C c(-1);

static_assert(!check1(c, &C::n), "Error");
static_assert(check1(c, &C::m), "Error");

static_assert(check2(&c, &C::is_neg), "Error");

For constexpr we need to evaluate suitable constant expressions at 
parsing time so that we can use them as template arguments, array bounds 
and such like.

I guess I can just do this folding in the constexpr expander...

Jason

^ permalink raw reply	[flat|nested] 8+ messages in thread

* Re: RFA: PATCH to make fold_indirect_ref_1 fold more things
  2010-10-29 21:51       ` Jason Merrill
@ 2010-10-30  7:53         ` Richard Guenther
  2010-10-31 23:44           ` Jason Merrill
  0 siblings, 1 reply; 8+ messages in thread
From: Richard Guenther @ 2010-10-30  7:53 UTC (permalink / raw)
  To: Jason Merrill; +Cc: gcc-patches List

On Fri, Oct 29, 2010 at 9:34 PM, Jason Merrill <jason@redhat.com> wrote:
> On 10/29/2010 11:25 AM, Richard Guenther wrote:
>>
>> So - what kind of testcases are you trying to handle?  (the middle-end
>> will
>> optimize this stuff keeping TBAA info correct during forwprop)
>
> The RECORD_TYPE folding was for this testcase:
>
> struct C { // literal type
>  int m;
>  int n;
>  constexpr C(int m) : m(m), n(-m) {}
>  constexpr bool is_neg() { return m < 0; }
> };
>
> constexpr bool check1(const C& c, int C:: *pm) { return c.*pm < 0; } // #1
>
> constexpr bool check2(const C* pc, bool (C::*pm)() const) { return
> (pc->*pm)(); } // #2
>
> constexpr C c(-1);
>
> static_assert(!check1(c, &C::n), "Error");
> static_assert(check1(c, &C::m), "Error");
>
> static_assert(check2(&c, &C::is_neg), "Error");
>
> For constexpr we need to evaluate suitable constant expressions at parsing
> time so that we can use them as template arguments, array bounds and such
> like.
>
> I guess I can just do this folding in the constexpr expander...

Yeah, I think that should be safer.

Richard.

> Jason
>

^ permalink raw reply	[flat|nested] 8+ messages in thread

* Re: RFA: PATCH to make fold_indirect_ref_1 fold more things
  2010-10-30  7:53         ` Richard Guenther
@ 2010-10-31 23:44           ` Jason Merrill
  2010-11-01 22:17             ` Richard Guenther
  0 siblings, 1 reply; 8+ messages in thread
From: Jason Merrill @ 2010-10-31 23:44 UTC (permalink / raw)
  To: Richard Guenther; +Cc: gcc-patches List

[-- Attachment #1: Type: text/plain, Size: 301 bytes --]

On 10/29/2010 07:32 PM, Richard Guenther wrote:
>> I guess I can just do this folding in the constexpr expander...
>
> Yeah, I think that should be safer.

OK, how about this patch that only adds folding INDIRECT_REF of 
POINTER_PLUS_EXPR to ARRAY_REF (and combines the two POINTER_PLUS_EXPR 
hunks)?

[-- Attachment #2: array-fold.patch --]
[-- Type: text/plain, Size: 3790 bytes --]

commit 49fd7e2c159bc5de19055c64c19e26778167cc28
Author: Jason Merrill <jason@redhat.com>
Date:   Tue Oct 26 14:49:37 2010 -0400

    	* fold-const.c (fold_indirect_ref_1): Handle folding
    	POINTER_PLUS_EXPR to ARRAY_REF.

diff --git a/gcc/fold-const.c b/gcc/fold-const.c
index decb0fb..dd69a20 100644
--- a/gcc/fold-const.c
+++ b/gcc/fold-const.c
@@ -15649,53 +15649,59 @@ fold_indirect_ref_1 (location_t loc, tree type, tree op0)
 	}
     }
 
-  /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
       && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
     {
       tree op00 = TREE_OPERAND (sub, 0);
       tree op01 = TREE_OPERAND (sub, 1);
-      tree op00type;
 
       STRIP_NOPS (op00);
-      op00type = TREE_TYPE (op00);
-      if (TREE_CODE (op00) == ADDR_EXPR
-          && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
-          && type == TREE_TYPE (TREE_TYPE (op00type)))
+      if (TREE_CODE (op00) == ADDR_EXPR)
 	{
-	  HOST_WIDE_INT offset = tree_low_cst (op01, 0);
-	  tree part_width = TYPE_SIZE (type);
-	  unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
-	  unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
-	  tree index = bitsize_int (indexi);
+	  tree op00type;
+	  op00 = TREE_OPERAND (op00, 0);
+	  op00type = TREE_TYPE (op00);
 
-	  if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
-	    return fold_build3_loc (loc,
-				BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
-				part_width, index);
+	  /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
+	  if (TREE_CODE (op00type) == VECTOR_TYPE
+	      && type == TREE_TYPE (op00type))
+	    {
+	      HOST_WIDE_INT offset = tree_low_cst (op01, 0);
+	      tree part_width = TYPE_SIZE (type);
+	      unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
+	      unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
+	      tree index = bitsize_int (indexi);
 
-	}
-    }
+	      if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
+		return fold_build3_loc (loc,
+					BIT_FIELD_REF, type, op00,
+					part_width, index);
 
-
-  /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
-  if (TREE_CODE (sub) == POINTER_PLUS_EXPR
-      && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
-    {
-      tree op00 = TREE_OPERAND (sub, 0);
-      tree op01 = TREE_OPERAND (sub, 1);
-      tree op00type;
-
-      STRIP_NOPS (op00);
-      op00type = TREE_TYPE (op00);
-      if (TREE_CODE (op00) == ADDR_EXPR
- 	  && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
-	  && type == TREE_TYPE (TREE_TYPE (op00type)))
-	{
-	  tree size = TYPE_SIZE_UNIT (type);
-	  if (tree_int_cst_equal (size, op01))
-	    return fold_build1_loc (loc, IMAGPART_EXPR, type,
-				TREE_OPERAND (op00, 0));
+	    }
+	  /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
+	  else if (TREE_CODE (op00type) == COMPLEX_TYPE
+		   && type == TREE_TYPE (op00type))
+	    {
+	      tree size = TYPE_SIZE_UNIT (type);
+	      if (tree_int_cst_equal (size, op01))
+		return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
+	    }
+	  /* ((foo *)&fooarray)[1] => fooarray[1] */
+	  else if (TREE_CODE (op00type) == ARRAY_TYPE
+		   && type == TREE_TYPE (op00type))
+	    {
+	      tree type_domain = TYPE_DOMAIN (op00type);
+	      tree min_val = size_zero_node;
+	      if (type_domain && TYPE_MIN_VALUE (type_domain))
+		min_val = TYPE_MIN_VALUE (type_domain);
+	      op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
+				     TYPE_SIZE_UNIT (type));
+	      op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
+	      op0 = build4 (ARRAY_REF, type, op00, op01,
+			    NULL_TREE, NULL_TREE);
+	      SET_EXPR_LOCATION (op0, loc);
+	      return op0;
+	    }
 	}
     }
 

^ permalink raw reply	[flat|nested] 8+ messages in thread

* Re: RFA: PATCH to make fold_indirect_ref_1 fold more things
  2010-10-31 23:44           ` Jason Merrill
@ 2010-11-01 22:17             ` Richard Guenther
  0 siblings, 0 replies; 8+ messages in thread
From: Richard Guenther @ 2010-11-01 22:17 UTC (permalink / raw)
  To: Jason Merrill; +Cc: gcc-patches List

On Sun, Oct 31, 2010 at 9:27 PM, Jason Merrill <jason@redhat.com> wrote:
> On 10/29/2010 07:32 PM, Richard Guenther wrote:
>>>
>>> I guess I can just do this folding in the constexpr expander...
>>
>> Yeah, I think that should be safer.
>
> OK, how about this patch that only adds folding INDIRECT_REF of
> POINTER_PLUS_EXPR to ARRAY_REF (and combines the two POINTER_PLUS_EXPR
> hunks)?

That works for me.

Thanks,
Richard.

> commit 49fd7e2c159bc5de19055c64c19e26778167cc28
> Author: Jason Merrill <jason@redhat.com>
> Date:   Tue Oct 26 14:49:37 2010 -0400
>
>        * fold-const.c (fold_indirect_ref_1): Handle folding
>        POINTER_PLUS_EXPR to ARRAY_REF.
>
> diff --git a/gcc/fold-const.c b/gcc/fold-const.c
> index decb0fb..dd69a20 100644
> --- a/gcc/fold-const.c
> +++ b/gcc/fold-const.c
> @@ -15649,53 +15649,59 @@ fold_indirect_ref_1 (location_t loc, tree type,
> tree op0)
>        }
>     }
>
> -  /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
>   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
>       && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
>     {
>       tree op00 = TREE_OPERAND (sub, 0);
>       tree op01 = TREE_OPERAND (sub, 1);
> -      tree op00type;
>
>       STRIP_NOPS (op00);
> -      op00type = TREE_TYPE (op00);
> -      if (TREE_CODE (op00) == ADDR_EXPR
> -          && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
> -          && type == TREE_TYPE (TREE_TYPE (op00type)))
> +      if (TREE_CODE (op00) == ADDR_EXPR)
>        {
> -         HOST_WIDE_INT offset = tree_low_cst (op01, 0);
> -         tree part_width = TYPE_SIZE (type);
> -         unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width,
> 0)/BITS_PER_UNIT;
> -         unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
> -         tree index = bitsize_int (indexi);
> +         tree op00type;
> +         op00 = TREE_OPERAND (op00, 0);
> +         op00type = TREE_TYPE (op00);
>
> -         if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE
> (op00type)))
> -           return fold_build3_loc (loc,
> -                               BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
> -                               part_width, index);
> +         /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
> +         if (TREE_CODE (op00type) == VECTOR_TYPE
> +             && type == TREE_TYPE (op00type))
> +           {
> +             HOST_WIDE_INT offset = tree_low_cst (op01, 0);
> +             tree part_width = TYPE_SIZE (type);
> +             unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width,
> 0)/BITS_PER_UNIT;
> +             unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
> +             tree index = bitsize_int (indexi);
>
> -       }
> -    }
> +             if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
> +               return fold_build3_loc (loc,
> +                                       BIT_FIELD_REF, type, op00,
> +                                       part_width, index);
>
> -
> -  /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
> -  if (TREE_CODE (sub) == POINTER_PLUS_EXPR
> -      && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
> -    {
> -      tree op00 = TREE_OPERAND (sub, 0);
> -      tree op01 = TREE_OPERAND (sub, 1);
> -      tree op00type;
> -
> -      STRIP_NOPS (op00);
> -      op00type = TREE_TYPE (op00);
> -      if (TREE_CODE (op00) == ADDR_EXPR
> -         && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
> -         && type == TREE_TYPE (TREE_TYPE (op00type)))
> -       {
> -         tree size = TYPE_SIZE_UNIT (type);
> -         if (tree_int_cst_equal (size, op01))
> -           return fold_build1_loc (loc, IMAGPART_EXPR, type,
> -                               TREE_OPERAND (op00, 0));
> +           }
> +         /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
> +         else if (TREE_CODE (op00type) == COMPLEX_TYPE
> +                  && type == TREE_TYPE (op00type))
> +           {
> +             tree size = TYPE_SIZE_UNIT (type);
> +             if (tree_int_cst_equal (size, op01))
> +               return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
> +           }
> +         /* ((foo *)&fooarray)[1] => fooarray[1] */
> +         else if (TREE_CODE (op00type) == ARRAY_TYPE
> +                  && type == TREE_TYPE (op00type))
> +           {
> +             tree type_domain = TYPE_DOMAIN (op00type);
> +             tree min_val = size_zero_node;
> +             if (type_domain && TYPE_MIN_VALUE (type_domain))
> +               min_val = TYPE_MIN_VALUE (type_domain);
> +             op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
> +                                    TYPE_SIZE_UNIT (type));
> +             op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
> +             op0 = build4 (ARRAY_REF, type, op00, op01,
> +                           NULL_TREE, NULL_TREE);
> +             SET_EXPR_LOCATION (op0, loc);
> +             return op0;
> +           }
>        }
>     }
>
>
>

^ permalink raw reply	[flat|nested] 8+ messages in thread

end of thread, other threads:[~2010-11-01 22:12 UTC | newest]

Thread overview: 8+ messages (download: mbox.gz / follow: Atom feed)
-- links below jump to the message on this page --
2010-10-27  2:16 RFA: PATCH to make fold_indirect_ref_1 fold more things Jason Merrill
2010-10-27 20:10 ` Richard Guenther
2010-10-28 16:18   ` Jason Merrill
2010-10-29 16:06     ` Richard Guenther
2010-10-29 21:51       ` Jason Merrill
2010-10-30  7:53         ` Richard Guenther
2010-10-31 23:44           ` Jason Merrill
2010-11-01 22:17             ` Richard Guenther

This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox;
as well as URLs for read-only IMAP folder(s) and NNTP newsgroup(s).