public inbox for gcc-patches@gcc.gnu.org
 help / color / mirror / Atom feed
* [PATCH][5/n] Remove GENERIC stmt combining from SCCVN
@ 2015-06-26 10:57 Richard Biener
  2015-06-27 13:36 ` Marc Glisse
  0 siblings, 1 reply; 3+ messages in thread
From: Richard Biener @ 2015-06-26 10:57 UTC (permalink / raw)
  To: gcc-patches


This moves some equality comparison patterns from fold-const.c to 
match.pd.

Bootstrapped and tested on x86_64-unknown-linux-gnu, applied.

Richard.

2015-06-26  Richard Biener  <rguenther@suse.de>

	* fold-const.c (fold_binary_loc): Remove -A CMP -B -> A CMP B
	and -A CMP CST -> A CMP -CST which is redundant with a pattern
	in match.pd.
	Move (A | C) == D where C & ~D != 0 -> 0, (X ^ Y) ==/!= 0 -> X ==/!= Y,
	(X ^ Y) ==/!= {Y,X} -> {X,Y} ==/!= 0 and
	(X ^ C1) op C2 -> X op (C1 ^ C2) to ...
	* match.pd: ... patterns here.

	* gcc.dg/tree-ssa/forwprop-25.c: Adjust.

Index: gcc/fold-const.c
===================================================================
*** gcc/fold-const.c	(revision 224943)
--- gcc/fold-const.c	(working copy)
*************** fold_binary_loc (location_t loc,
*** 12165,12179 ****
  				          type);
  	}
  
-       /* Similarly for a NEGATE_EXPR.  */
-       if (TREE_CODE (arg0) == NEGATE_EXPR
- 	  && TREE_CODE (arg1) == INTEGER_CST
- 	  && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
- 							arg1)))
- 	  && TREE_CODE (tem) == INTEGER_CST
- 	  && !TREE_OVERFLOW (tem))
- 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
- 
        /* Similarly for a BIT_XOR_EXPR;  X ^ C1 == C2 is X == (C1 ^ C2).  */
        if (TREE_CODE (arg0) == BIT_XOR_EXPR
  	  && TREE_CODE (arg1) == INTEGER_CST
--- 12165,12170 ----
*************** fold_binary_loc (location_t loc,
*** 12358,12379 ****
  	    return omit_one_operand_loc (loc, type, rslt, arg0);
  	}
  
-       /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
- 	 Similarly for NE_EXPR.  */
-       if (TREE_CODE (arg0) == BIT_IOR_EXPR
- 	  && TREE_CODE (arg1) == INTEGER_CST
- 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
- 	{
- 	  tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
- 	  tree candnotd
- 	    = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
- 			       TREE_OPERAND (arg0, 1),
- 			       fold_convert_loc (loc, TREE_TYPE (arg0), notd));
- 	  tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
- 	  if (integer_nonzerop (candnotd))
- 	    return omit_one_operand_loc (loc, type, rslt, arg0);
- 	}
- 
        /* If this is a comparison of a field, we may be able to simplify it.  */
        if ((TREE_CODE (arg0) == COMPONENT_REF
  	   || TREE_CODE (arg0) == BIT_FIELD_REF)
--- 12349,12354 ----
*************** fold_binary_loc (location_t loc,
*** 12431,12462 ****
  	    }
  	}
  
-       /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y.  */
-       if (integer_zerop (arg1)
- 	  && TREE_CODE (arg0) == BIT_XOR_EXPR)
- 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
- 			    TREE_OPERAND (arg0, 1));
- 
-       /* (X ^ Y) == Y becomes X == 0.  We know that Y has no side-effects.  */
-       if (TREE_CODE (arg0) == BIT_XOR_EXPR
- 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
- 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
- 				build_zero_cst (TREE_TYPE (arg0)));
-       /* Likewise (X ^ Y) == X becomes Y == 0.  X has no side-effects.  */
-       if (TREE_CODE (arg0) == BIT_XOR_EXPR
- 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
- 	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
- 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
- 				build_zero_cst (TREE_TYPE (arg0)));
- 
-       /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2).  */
-       if (TREE_CODE (arg0) == BIT_XOR_EXPR
- 	  && TREE_CODE (arg1) == INTEGER_CST
- 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
- 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
- 			    fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
- 					 TREE_OPERAND (arg0, 1), arg1));
- 
        /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
  	 (X & C) == 0 when C is a single bit.  */
        if (TREE_CODE (arg0) == BIT_AND_EXPR
--- 12406,12411 ----
*************** fold_binary_loc (location_t loc,
*** 12510,12523 ****
  	  return omit_one_operand_loc (loc, type, res, arg0);
  	}
  
-       /* Fold -X op -Y as X op Y, where op is eq/ne.  */
-       if (TREE_CODE (arg0) == NEGATE_EXPR
-           && TREE_CODE (arg1) == NEGATE_EXPR)
- 	return fold_build2_loc (loc, code, type,
- 				TREE_OPERAND (arg0, 0),
- 				fold_convert_loc (loc, TREE_TYPE (arg0),
- 						  TREE_OPERAND (arg1, 0)));
- 
        /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries.  */
        if (TREE_CODE (arg0) == BIT_AND_EXPR
  	  && TREE_CODE (arg1) == BIT_AND_EXPR)
--- 12459,12464 ----
Index: gcc/match.pd
===================================================================
*** gcc/match.pd	(revision 224943)
--- gcc/match.pd	(working copy)
*************** (define_operator_list swapped_tcc_compar
*** 1197,1202 ****
--- 1197,1231 ----
      (if (tem && !TREE_OVERFLOW (tem))
       (scmp @0 { tem; }))))))
  
+ 
+ /* Equality compare simplifications from fold_binary  */
+ (for cmp (eq ne)
+ 
+  /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
+     Similarly for NE_EXPR.  */
+  (simplify
+   (cmp (convert?@3 (bit_ior @0 INTEGER_CST@1)) INTEGER_CST@2)
+   (if (tree_nop_conversion_p (TREE_TYPE (@3), TREE_TYPE (@0))
+        && wi::bit_and_not (@1, @2) != 0)
+    { constant_boolean_node (cmp == NE_EXPR, type); }))
+ 
+  /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y.  */
+  (simplify
+   (cmp (bit_xor @0 @1) integer_zerop)
+   (cmp @0 @1))
+ 
+  /* (X ^ Y) == Y becomes X == 0.
+     Likewise (X ^ Y) == X becomes Y == 0.  */
+  (simplify
+   (cmp (bit_xor:c @0 @1) @0)
+   (cmp @1 { build_zero_cst (TREE_TYPE (@1)); }))
+ 
+  /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2).  */
+  (simplify
+   (cmp (convert?@3 (bit_xor @0 INTEGER_CST@1)) INTEGER_CST@2)
+   (if (tree_nop_conversion_p (TREE_TYPE (@3), TREE_TYPE (@0)))
+    (cmp @0 (bit_xor @1 (convert @2))))))
+ 
  /* Simplification of math builtins.  */
  
  (define_operator_list LOG BUILT_IN_LOGF BUILT_IN_LOG BUILT_IN_LOGL)
Index: gcc/testsuite/gcc.dg/tree-ssa/forwprop-25.c
===================================================================
*** gcc/testsuite/gcc.dg/tree-ssa/forwprop-25.c	(revision 224942)
--- gcc/testsuite/gcc.dg/tree-ssa/forwprop-25.c	(working copy)
***************
*** 1,5 ****
  /* { dg-do compile } */
! /* { dg-options "-O1 -fdump-tree-forwprop1" } */
  
  struct rtx_def;
  typedef struct rtx_def *rtx;
--- 1,5 ----
  /* { dg-do compile } */
! /* { dg-options "-O1 -fdump-tree-cddce1" } */
  
  struct rtx_def;
  typedef struct rtx_def *rtx;
*************** convert_move (rtx to, rtx from, int unsi
*** 37,43 ****
      0 : 0));
  }
  
! /* { dg-final { scan-tree-dump "Replaced.*!=.*with.*!=.* " "forwprop1"} } */
! 
! 
! 
--- 37,40 ----
      0 : 0));
  }
  
! /* { dg-final { scan-tree-dump-not " ^ " "cddce1"} } */

^ permalink raw reply	[flat|nested] 3+ messages in thread

* Re: [PATCH][5/n] Remove GENERIC stmt combining from SCCVN
  2015-06-26 10:57 [PATCH][5/n] Remove GENERIC stmt combining from SCCVN Richard Biener
@ 2015-06-27 13:36 ` Marc Glisse
  2015-06-29  8:17   ` Richard Biener
  0 siblings, 1 reply; 3+ messages in thread
From: Marc Glisse @ 2015-06-27 13:36 UTC (permalink / raw)
  To: Richard Biener; +Cc: gcc-patches

On Fri, 26 Jun 2015, Richard Biener wrote:

> + /* Equality compare simplifications from fold_binary  */
> + (for cmp (eq ne)
> +
> +  /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
> +     Similarly for NE_EXPR.  */
> +  (simplify
> +   (cmp (convert?@3 (bit_ior @0 INTEGER_CST@1)) INTEGER_CST@2)
> +   (if (tree_nop_conversion_p (TREE_TYPE (@3), TREE_TYPE (@0))
> +        && wi::bit_and_not (@1, @2) != 0)
> +    { constant_boolean_node (cmp == NE_EXPR, type); }))
> +
> +  /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y.  */
> +  (simplify
> +   (cmp (bit_xor @0 @1) integer_zerop)
> +   (cmp @0 @1))
> +
> +  /* (X ^ Y) == Y becomes X == 0.
> +     Likewise (X ^ Y) == X becomes Y == 0.  */
> +  (simplify
> +   (cmp (bit_xor:c @0 @1) @0)

Don't you need cmp:c for this one? The transformation still somehow 
happens through forward_propagate_into_comparison, but it looks like an 
accident.

> +   (cmp @1 { build_zero_cst (TREE_TYPE (@1)); }))
> +
> +  /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2).  */
> +  (simplify
> +   (cmp (convert?@3 (bit_xor @0 INTEGER_CST@1)) INTEGER_CST@2)
> +   (if (tree_nop_conversion_p (TREE_TYPE (@3), TREE_TYPE (@0)))
> +    (cmp @0 (bit_xor @1 (convert @2))))))

I guess we'll have to generalize this to vectors at some point...

-- 
Marc Glisse

^ permalink raw reply	[flat|nested] 3+ messages in thread

* Re: [PATCH][5/n] Remove GENERIC stmt combining from SCCVN
  2015-06-27 13:36 ` Marc Glisse
@ 2015-06-29  8:17   ` Richard Biener
  0 siblings, 0 replies; 3+ messages in thread
From: Richard Biener @ 2015-06-29  8:17 UTC (permalink / raw)
  To: gcc-patches

On Sat, 27 Jun 2015, Marc Glisse wrote:

> On Fri, 26 Jun 2015, Richard Biener wrote:
> 
> > + /* Equality compare simplifications from fold_binary  */
> > + (for cmp (eq ne)
> > +
> > +  /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
> > +     Similarly for NE_EXPR.  */
> > +  (simplify
> > +   (cmp (convert?@3 (bit_ior @0 INTEGER_CST@1)) INTEGER_CST@2)
> > +   (if (tree_nop_conversion_p (TREE_TYPE (@3), TREE_TYPE (@0))
> > +        && wi::bit_and_not (@1, @2) != 0)
> > +    { constant_boolean_node (cmp == NE_EXPR, type); }))
> > +
> > +  /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y.  */
> > +  (simplify
> > +   (cmp (bit_xor @0 @1) integer_zerop)
> > +   (cmp @0 @1))
> > +
> > +  /* (X ^ Y) == Y becomes X == 0.
> > +     Likewise (X ^ Y) == X becomes Y == 0.  */
> > +  (simplify
> > +   (cmp (bit_xor:c @0 @1) @0)
> 
> Don't you need cmp:c for this one? The transformation still somehow happens
> through forward_propagate_into_comparison, but it looks like an accident.

Yeah, I think it canonicalizes operand order (and uses the GENERIC
folding path).

I'm testing a patch changing the above to cmp:c.

> > +   (cmp @1 { build_zero_cst (TREE_TYPE (@1)); }))
> > +
> > +  /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2).  */
> > +  (simplify
> > +   (cmp (convert?@3 (bit_xor @0 INTEGER_CST@1)) INTEGER_CST@2)
> > +   (if (tree_nop_conversion_p (TREE_TYPE (@3), TREE_TYPE (@0)))
> > +    (cmp @0 (bit_xor @1 (convert @2))))))
> 
> I guess we'll have to generalize this to vectors at some point...

True - I'm trying to move patterns 1:1 leaving improvements to followups

Thanks,
Richard.

^ permalink raw reply	[flat|nested] 3+ messages in thread

end of thread, other threads:[~2015-06-29  8:17 UTC | newest]

Thread overview: 3+ messages (download: mbox.gz / follow: Atom feed)
-- links below jump to the message on this page --
2015-06-26 10:57 [PATCH][5/n] Remove GENERIC stmt combining from SCCVN Richard Biener
2015-06-27 13:36 ` Marc Glisse
2015-06-29  8:17   ` Richard Biener

This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox;
as well as URLs for read-only IMAP folder(s) and NNTP newsgroup(s).