* [PATCH] Optimize away useless bitwise & resp. | during vrp
@ 2010-07-12 13:27 Jakub Jelinek
2010-07-12 16:09 ` Richard Guenther
0 siblings, 1 reply; 2+ messages in thread
From: Jakub Jelinek @ 2010-07-12 13:27 UTC (permalink / raw)
To: gcc-patches
Hi!
This patch optimizes away some bitwise ands (if all possibly
nonzero bits in one operand correspond to known zero bits in the other
operand) and bitwise ors. This hits over 4000 times during
bootstrap/regtest.
Bootstrapped/regtested on x86_64-linux and i686-linux. Ok for trunk?
2010-07-12 Jakub Jelinek <jakub@redhat.com>
* tree-vrp.c (simplify_bit_ops_using_ranges): New function.
(simplify_stmt_using_ranges): Use it.
* gcc.dg/tree-ssa/vrp53.c: New test.
--- gcc/tree-vrp.c.jj 2010-07-12 09:25:05.000000000 +0200
+++ gcc/tree-vrp.c 2010-07-12 10:56:48.000000000 +0200
@@ -6913,6 +6913,89 @@ simplify_abs_using_ranges (gimple stmt)
return false;
}
+/* Optimize away redundant BIT_AND_EXPR and BIT_IOR_EXPR.
+ If all the bits that are being cleared by & are already
+ known to be zero from VR, or all the bits that are being
+ set by | are already known to be one from VR, the bit
+ operation is redundant. */
+
+static bool
+simplify_bit_ops_using_ranges (gimple_stmt_iterator *gsi, gimple stmt)
+{
+ tree op0 = gimple_assign_rhs1 (stmt);
+ tree op1 = gimple_assign_rhs2 (stmt);
+ tree op = NULL_TREE;
+ value_range_t vr0 = { VR_UNDEFINED, NULL_TREE, NULL_TREE, NULL };
+ value_range_t vr1 = { VR_UNDEFINED, NULL_TREE, NULL_TREE, NULL };
+ double_int may_be_nonzero0, may_be_nonzero1;
+ double_int must_be_nonzero0, must_be_nonzero1;
+ double_int mask;
+
+ if (TREE_CODE (op0) == SSA_NAME)
+ vr0 = *(get_value_range (op0));
+ else if (is_gimple_min_invariant (op0))
+ set_value_range_to_value (&vr0, op0, NULL);
+ else
+ return false;
+
+ if (TREE_CODE (op1) == SSA_NAME)
+ vr1 = *(get_value_range (op1));
+ else if (is_gimple_min_invariant (op1))
+ set_value_range_to_value (&vr1, op1, NULL);
+ else
+ return false;
+
+ if (!zero_nonzero_bits_from_vr (&vr0, &may_be_nonzero0, &must_be_nonzero0))
+ return false;
+ if (!zero_nonzero_bits_from_vr (&vr1, &may_be_nonzero1, &must_be_nonzero1))
+ return false;
+
+ switch (gimple_assign_rhs_code (stmt))
+ {
+ case BIT_AND_EXPR:
+ mask = double_int_and (may_be_nonzero0,
+ double_int_not (must_be_nonzero1));
+ if (double_int_zero_p (mask))
+ {
+ op = op0;
+ break;
+ }
+ mask = double_int_and (may_be_nonzero1,
+ double_int_not (must_be_nonzero0));
+ if (double_int_zero_p (mask))
+ {
+ op = op1;
+ break;
+ }
+ break;
+ case BIT_IOR_EXPR:
+ mask = double_int_and (may_be_nonzero0,
+ double_int_not (must_be_nonzero1));
+ if (double_int_zero_p (mask))
+ {
+ op = op1;
+ break;
+ }
+ mask = double_int_and (may_be_nonzero1,
+ double_int_not (must_be_nonzero0));
+ if (double_int_zero_p (mask))
+ {
+ op = op0;
+ break;
+ }
+ break;
+ default:
+ gcc_unreachable ();
+ }
+
+ if (op == NULL_TREE)
+ return false;
+
+ gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op), op, NULL);
+ update_stmt (gsi_stmt (*gsi));
+ return true;
+}
+
/* We are comparing trees OP0 and OP1 using COND_CODE. OP0 has
a known value range VR.
@@ -7198,6 +7281,15 @@ simplify_stmt_using_ranges (gimple_stmt_
return simplify_abs_using_ranges (stmt);
break;
+ case BIT_AND_EXPR:
+ case BIT_IOR_EXPR:
+ /* Optimize away BIT_AND_EXPR and BIT_IOR_EXPR
+ if all the bits being cleared are already cleared or
+ all the bits being set are already set. */
+ if (INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
+ return simplify_bit_ops_using_ranges (gsi, stmt);
+ break;
+
default:
break;
}
--- gcc/testsuite/gcc.dg/tree-ssa/vrp53.c.jj 2010-07-12 11:11:54.000000000 +0200
+++ gcc/testsuite/gcc.dg/tree-ssa/vrp53.c 2010-07-12 11:14:59.000000000 +0200
@@ -0,0 +1,24 @@
+/* { dg-do compile } */
+/* { dg-options "-O2 -fdump-tree-vrp1" } */
+
+int
+f1 (int x)
+{
+ x &= 0xff;
+ x += 0x400;
+ x &= 0x7ff;
+ return x;
+}
+
+int
+f2 (int x)
+{
+ x &= 0xff;
+ x += 0x5400;
+ x |= 0x4400;
+ return x;
+}
+
+/* { dg-final { scan-tree-dump-not "\& (2047|0x7ff)" "vrp1" } } */
+/* { dg-final { scan-tree-dump-not "\\| (17408|0x4400)" "vrp1" } } */
+/* { dg-final { cleanup-tree-dump "vrp1" } } */
Jakub
^ permalink raw reply [flat|nested] 2+ messages in thread
* Re: [PATCH] Optimize away useless bitwise & resp. | during vrp
2010-07-12 13:27 [PATCH] Optimize away useless bitwise & resp. | during vrp Jakub Jelinek
@ 2010-07-12 16:09 ` Richard Guenther
0 siblings, 0 replies; 2+ messages in thread
From: Richard Guenther @ 2010-07-12 16:09 UTC (permalink / raw)
To: Jakub Jelinek; +Cc: gcc-patches
On Mon, Jul 12, 2010 at 3:29 PM, Jakub Jelinek <jakub@redhat.com> wrote:
> Hi!
>
> This patch optimizes away some bitwise ands (if all possibly
> nonzero bits in one operand correspond to known zero bits in the other
> operand) and bitwise ors. This hits over 4000 times during
> bootstrap/regtest.
>
> Bootstrapped/regtested on x86_64-linux and i686-linux. Ok for trunk?
Ok.
Thanks,
Richard.
> 2010-07-12 Jakub Jelinek <jakub@redhat.com>
>
> * tree-vrp.c (simplify_bit_ops_using_ranges): New function.
> (simplify_stmt_using_ranges): Use it.
>
> * gcc.dg/tree-ssa/vrp53.c: New test.
>
> --- gcc/tree-vrp.c.jj 2010-07-12 09:25:05.000000000 +0200
> +++ gcc/tree-vrp.c 2010-07-12 10:56:48.000000000 +0200
> @@ -6913,6 +6913,89 @@ simplify_abs_using_ranges (gimple stmt)
> return false;
> }
>
> +/* Optimize away redundant BIT_AND_EXPR and BIT_IOR_EXPR.
> + If all the bits that are being cleared by & are already
> + known to be zero from VR, or all the bits that are being
> + set by | are already known to be one from VR, the bit
> + operation is redundant. */
> +
> +static bool
> +simplify_bit_ops_using_ranges (gimple_stmt_iterator *gsi, gimple stmt)
> +{
> + tree op0 = gimple_assign_rhs1 (stmt);
> + tree op1 = gimple_assign_rhs2 (stmt);
> + tree op = NULL_TREE;
> + value_range_t vr0 = { VR_UNDEFINED, NULL_TREE, NULL_TREE, NULL };
> + value_range_t vr1 = { VR_UNDEFINED, NULL_TREE, NULL_TREE, NULL };
> + double_int may_be_nonzero0, may_be_nonzero1;
> + double_int must_be_nonzero0, must_be_nonzero1;
> + double_int mask;
> +
> + if (TREE_CODE (op0) == SSA_NAME)
> + vr0 = *(get_value_range (op0));
> + else if (is_gimple_min_invariant (op0))
> + set_value_range_to_value (&vr0, op0, NULL);
> + else
> + return false;
> +
> + if (TREE_CODE (op1) == SSA_NAME)
> + vr1 = *(get_value_range (op1));
> + else if (is_gimple_min_invariant (op1))
> + set_value_range_to_value (&vr1, op1, NULL);
> + else
> + return false;
> +
> + if (!zero_nonzero_bits_from_vr (&vr0, &may_be_nonzero0, &must_be_nonzero0))
> + return false;
> + if (!zero_nonzero_bits_from_vr (&vr1, &may_be_nonzero1, &must_be_nonzero1))
> + return false;
> +
> + switch (gimple_assign_rhs_code (stmt))
> + {
> + case BIT_AND_EXPR:
> + mask = double_int_and (may_be_nonzero0,
> + double_int_not (must_be_nonzero1));
> + if (double_int_zero_p (mask))
> + {
> + op = op0;
> + break;
> + }
> + mask = double_int_and (may_be_nonzero1,
> + double_int_not (must_be_nonzero0));
> + if (double_int_zero_p (mask))
> + {
> + op = op1;
> + break;
> + }
> + break;
> + case BIT_IOR_EXPR:
> + mask = double_int_and (may_be_nonzero0,
> + double_int_not (must_be_nonzero1));
> + if (double_int_zero_p (mask))
> + {
> + op = op1;
> + break;
> + }
> + mask = double_int_and (may_be_nonzero1,
> + double_int_not (must_be_nonzero0));
> + if (double_int_zero_p (mask))
> + {
> + op = op0;
> + break;
> + }
> + break;
> + default:
> + gcc_unreachable ();
> + }
> +
> + if (op == NULL_TREE)
> + return false;
> +
> + gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op), op, NULL);
> + update_stmt (gsi_stmt (*gsi));
> + return true;
> +}
> +
> /* We are comparing trees OP0 and OP1 using COND_CODE. OP0 has
> a known value range VR.
>
> @@ -7198,6 +7281,15 @@ simplify_stmt_using_ranges (gimple_stmt_
> return simplify_abs_using_ranges (stmt);
> break;
>
> + case BIT_AND_EXPR:
> + case BIT_IOR_EXPR:
> + /* Optimize away BIT_AND_EXPR and BIT_IOR_EXPR
> + if all the bits being cleared are already cleared or
> + all the bits being set are already set. */
> + if (INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
> + return simplify_bit_ops_using_ranges (gsi, stmt);
> + break;
> +
> default:
> break;
> }
> --- gcc/testsuite/gcc.dg/tree-ssa/vrp53.c.jj 2010-07-12 11:11:54.000000000 +0200
> +++ gcc/testsuite/gcc.dg/tree-ssa/vrp53.c 2010-07-12 11:14:59.000000000 +0200
> @@ -0,0 +1,24 @@
> +/* { dg-do compile } */
> +/* { dg-options "-O2 -fdump-tree-vrp1" } */
> +
> +int
> +f1 (int x)
> +{
> + x &= 0xff;
> + x += 0x400;
> + x &= 0x7ff;
> + return x;
> +}
> +
> +int
> +f2 (int x)
> +{
> + x &= 0xff;
> + x += 0x5400;
> + x |= 0x4400;
> + return x;
> +}
> +
> +/* { dg-final { scan-tree-dump-not "\& (2047|0x7ff)" "vrp1" } } */
> +/* { dg-final { scan-tree-dump-not "\\| (17408|0x4400)" "vrp1" } } */
> +/* { dg-final { cleanup-tree-dump "vrp1" } } */
>
> Jakub
>
^ permalink raw reply [flat|nested] 2+ messages in thread
end of thread, other threads:[~2010-07-12 16:09 UTC | newest]
Thread overview: 2+ messages (download: mbox.gz / follow: Atom feed)
-- links below jump to the message on this page --
2010-07-12 13:27 [PATCH] Optimize away useless bitwise & resp. | during vrp Jakub Jelinek
2010-07-12 16:09 ` Richard Guenther
This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox;
as well as URLs for read-only IMAP folder(s) and NNTP newsgroup(s).