* [PATCH] Rip out rhs-to-tree from tree-affine.c
@ 2018-12-11 10:30 Richard Biener
2018-12-21 9:46 ` Richard Biener
0 siblings, 1 reply; 2+ messages in thread
From: Richard Biener @ 2018-12-11 10:30 UTC (permalink / raw)
To: gcc-patches
After the previous cleanup the following is more straight-forward now.
This should make tree-affine behave wrt the "new" GIMPLE world.
Bootstrapped and tested on x86_64-unknown-linux-gnu, now making
sure there are no codegen changes as expected.
Richard.
2018-12-11 Richard Biener <rguenther@suse.de>
* tree-affine.c (expr_to_aff_combination): New function split
out from...
(tree_to_aff_combination): ... here.
(aff_combination_expand): Avoid building a GENERIC tree.
Index: gcc/tree-affine.c
===================================================================
--- gcc/tree-affine.c (revision 266956)
+++ gcc/tree-affine.c (working copy)
@@ -259,101 +259,56 @@ aff_combination_convert (aff_tree *comb,
}
}
-/* Splits EXPR into an affine combination of parts. */
+/* Tries to handle OP0 CODE OP1 as affine combination of parts. Returns
+ true when that was successful and returns the combination in COMB. */
-void
-tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
+static bool
+expr_to_aff_combination (aff_tree *comb, tree_code code, tree type,
+ tree op0, tree op1 = NULL_TREE)
{
aff_tree tmp;
- enum tree_code code;
- tree cst, core, toffset;
poly_int64 bitpos, bitsize, bytepos;
- machine_mode mode;
- int unsignedp, reversep, volatilep;
-
- STRIP_NOPS (expr);
- code = TREE_CODE (expr);
switch (code)
{
case POINTER_PLUS_EXPR:
- tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
- tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
+ tree_to_aff_combination (op0, type, comb);
+ tree_to_aff_combination (op1, sizetype, &tmp);
aff_combination_add (comb, &tmp);
- return;
+ return true;
case PLUS_EXPR:
case MINUS_EXPR:
- tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
- tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
+ tree_to_aff_combination (op0, type, comb);
+ tree_to_aff_combination (op1, type, &tmp);
if (code == MINUS_EXPR)
aff_combination_scale (&tmp, -1);
aff_combination_add (comb, &tmp);
- return;
+ return true;
case MULT_EXPR:
- cst = TREE_OPERAND (expr, 1);
- if (TREE_CODE (cst) != INTEGER_CST)
+ if (TREE_CODE (op1) != INTEGER_CST)
break;
- tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
- aff_combination_scale (comb, wi::to_widest (cst));
- return;
+ tree_to_aff_combination (op0, type, comb);
+ aff_combination_scale (comb, wi::to_widest (op1));
+ return true;
case NEGATE_EXPR:
- tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
+ tree_to_aff_combination (op0, type, comb);
aff_combination_scale (comb, -1);
- return;
+ return true;
case BIT_NOT_EXPR:
/* ~x = -x - 1 */
- tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
+ tree_to_aff_combination (op0, type, comb);
aff_combination_scale (comb, -1);
aff_combination_add_cst (comb, -1);
- return;
-
- case ADDR_EXPR:
- /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
- if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
- {
- expr = TREE_OPERAND (expr, 0);
- tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
- tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
- aff_combination_add (comb, &tmp);
- return;
- }
- core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
- &toffset, &mode, &unsignedp, &reversep,
- &volatilep);
- if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
- break;
- aff_combination_const (comb, type, bytepos);
- if (TREE_CODE (core) == MEM_REF)
- {
- tree mem_offset = TREE_OPERAND (core, 1);
- aff_combination_add_cst (comb, wi::to_poly_widest (mem_offset));
- core = TREE_OPERAND (core, 0);
- }
- else
- core = build_fold_addr_expr (core);
-
- if (TREE_CODE (core) == ADDR_EXPR)
- aff_combination_add_elt (comb, core, 1);
- else
- {
- tree_to_aff_combination (core, type, &tmp);
- aff_combination_add (comb, &tmp);
- }
- if (toffset)
- {
- tree_to_aff_combination (toffset, type, &tmp);
- aff_combination_add (comb, &tmp);
- }
- return;
+ return true;
CASE_CONVERT:
{
- tree otype = TREE_TYPE (expr);
- tree inner = TREE_OPERAND (expr, 0);
+ tree otype = type;
+ tree inner = op0;
tree itype = TREE_TYPE (inner);
enum tree_code icode = TREE_CODE (inner);
@@ -376,9 +331,7 @@ tree_to_aff_combination (tree expr, tree
{
op0 = fold_convert (otype, op0);
op1 = fold_convert (otype, op1);
- expr = fold_build2 (icode, otype, op0, op1);
- tree_to_aff_combination (expr, type, comb);
- return;
+ return expr_to_aff_combination (comb, icode, otype, op0, op1);
}
wide_int minv, maxv;
/* If inner type has wrapping overflow behavior, fold conversion
@@ -399,15 +352,92 @@ tree_to_aff_combination (tree expr, tree
{
op0 = fold_convert (otype, op0);
op1 = fold_convert (otype, op1);
- expr = fold_build2 (MINUS_EXPR, otype, op0, op1);
- tree_to_aff_combination (expr, type, comb);
- return;
+ return expr_to_aff_combination (comb, MINUS_EXPR, otype,
+ op0, op1);
}
}
}
}
break;
+ default:;
+ }
+
+ return false;
+}
+
+/* Splits EXPR into an affine combination of parts. */
+
+void
+tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
+{
+ aff_tree tmp;
+ enum tree_code code;
+ tree core, toffset;
+ poly_int64 bitpos, bitsize, bytepos;
+ machine_mode mode;
+ int unsignedp, reversep, volatilep;
+
+ STRIP_NOPS (expr);
+
+ code = TREE_CODE (expr);
+ switch (code)
+ {
+ case POINTER_PLUS_EXPR:
+ case PLUS_EXPR:
+ case MINUS_EXPR:
+ case MULT_EXPR:
+ if (expr_to_aff_combination (comb, code, type, TREE_OPERAND (expr, 0),
+ TREE_OPERAND (expr, 1)))
+ return;
+ break;
+
+ case NEGATE_EXPR:
+ case BIT_NOT_EXPR:
+ CASE_CONVERT:
+ if (expr_to_aff_combination (comb, code, type, TREE_OPERAND (expr, 0)))
+ return;
+ break;
+
+ case ADDR_EXPR:
+ /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
+ if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
+ {
+ expr = TREE_OPERAND (expr, 0);
+ tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
+ tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
+ aff_combination_add (comb, &tmp);
+ return;
+ }
+ core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
+ &toffset, &mode, &unsignedp, &reversep,
+ &volatilep);
+ if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
+ break;
+ aff_combination_const (comb, type, bytepos);
+ if (TREE_CODE (core) == MEM_REF)
+ {
+ tree mem_offset = TREE_OPERAND (core, 1);
+ aff_combination_add_cst (comb, wi::to_poly_widest (mem_offset));
+ core = TREE_OPERAND (core, 0);
+ }
+ else
+ core = build_fold_addr_expr (core);
+
+ if (TREE_CODE (core) == ADDR_EXPR)
+ aff_combination_add_elt (comb, core, 1);
+ else
+ {
+ tree_to_aff_combination (core, type, &tmp);
+ aff_combination_add (comb, &tmp);
+ }
+ if (toffset)
+ {
+ tree_to_aff_combination (toffset, type, &tmp);
+ aff_combination_add (comb, &tmp);
+ }
+ return;
+
default:
{
if (poly_int_tree_p (expr))
@@ -665,7 +695,7 @@ aff_combination_expand (aff_tree *comb A
{
unsigned i;
aff_tree to_add, current, curre;
- tree e, rhs;
+ tree e;
gimple *def;
widest_int scale;
struct name_expansion *exp;
@@ -715,20 +745,27 @@ aff_combination_expand (aff_tree *comb A
case PLUS_EXPR:
case MINUS_EXPR:
case MULT_EXPR:
+ if (!expr_to_aff_combination (¤t, code, TREE_TYPE (name),
+ gimple_assign_rhs1 (def),
+ gimple_assign_rhs2 (def)))
+ continue;
+ break;
case NEGATE_EXPR:
case BIT_NOT_EXPR:
CASE_CONVERT:
- rhs = gimple_assign_rhs_to_tree (def);
+ if (!expr_to_aff_combination (¤t, code, TREE_TYPE (name),
+ gimple_assign_rhs1 (def)))
+ continue;
break;
case ADDR_EXPR:
case INTEGER_CST:
case POLY_INT_CST:
- rhs = gimple_assign_rhs1 (def);
+ tree_to_aff_combination (gimple_assign_rhs1 (def),
+ TREE_TYPE (name), ¤t);
break;
default:
continue;
}
- tree_to_aff_combination (rhs, TREE_TYPE (name), ¤t);
exp = XNEW (struct name_expansion);
exp->in_progress = 1;
if (!*cache)
^ permalink raw reply [flat|nested] 2+ messages in thread
* Re: [PATCH] Rip out rhs-to-tree from tree-affine.c
2018-12-11 10:30 [PATCH] Rip out rhs-to-tree from tree-affine.c Richard Biener
@ 2018-12-21 9:46 ` Richard Biener
0 siblings, 0 replies; 2+ messages in thread
From: Richard Biener @ 2018-12-21 9:46 UTC (permalink / raw)
To: gcc-patches
On Tue, 11 Dec 2018, Richard Biener wrote:
>
> After the previous cleanup the following is more straight-forward now.
> This should make tree-affine behave wrt the "new" GIMPLE world.
>
> Bootstrapped and tested on x86_64-unknown-linux-gnu, now making
> sure there are no codegen changes as expected.
So this is the final patch. It generates the same code for gcc/*.o
besides one case where it improved code-generation slightly.
Unfortunately it regresses
FAIL: gcc.dg/tree-ssa/ivopts-lt-2.c scan-tree-dump-times ivopts "PHI" 1
FAIL: gcc.dg/tree-ssa/ivopts-lt-2.c scan-tree-dump-times ivopts "p_[0-9]* <" 1
on x86_64 where IVOPTs fails to eliminate one induction variable.
This is because we no longer expand
{
type = long unsigned int
offset = 0
elements = {
[0] = _1 * 4
}
}
to
{
type = long unsigned int
offset = 0
elements = {
[0] = (long unsigned int) i_6(D) * 1
}
}
which I think is reasonable. This has to be dealt with in
IVOPTs. On 32bit the testcase still passes.
I do not want to introduce a regression at this point
so I am defering this patch to GCC 10.
Developing this patch also exposed IVOPTs calling
tree_to_aff_combination with essentially an outer
widenig cast missing which required to do
+ CASE_CONVERT:
+ if (expr_to_aff_combination (comb, code,
+ TREE_TYPE (expr), TREE_OPERAND (expr, 0)))
+ {
+ aff_combination_convert (comb, type);
+ return;
which shouldn'd be required. That's also sth to fix
and there's now plenty of time in the GCC 10 timeframe to
do so.
Bootstrapped / tested on x86_64-unknown-linux-gnu.
Richard.
2018-12-21 Richard Biener <rguenther@suse.de>
* tree-affine.c (expr_to_aff_combination): New function split
out from...
(tree_to_aff_combination): ... here.
(aff_combination_expand): Avoid building a GENERIC tree.
Index: gcc/tree-affine.c
===================================================================
--- gcc/tree-affine.c (revision 267297)
+++ gcc/tree-affine.c (working copy)
@@ -259,104 +259,66 @@ aff_combination_convert (aff_tree *comb,
}
}
-/* Splits EXPR into an affine combination of parts. */
+/* Tries to handle OP0 CODE OP1 as affine combination of parts. Returns
+ true when that was successful and returns the combination in COMB. */
-void
-tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
+static bool
+expr_to_aff_combination (aff_tree *comb, tree_code code, tree type,
+ tree op0, tree op1 = NULL_TREE)
{
aff_tree tmp;
- enum tree_code code;
- tree cst, core, toffset;
poly_int64 bitpos, bitsize, bytepos;
- machine_mode mode;
- int unsignedp, reversep, volatilep;
-
- STRIP_NOPS (expr);
- code = TREE_CODE (expr);
switch (code)
{
case POINTER_PLUS_EXPR:
- tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
- tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
+ tree_to_aff_combination (op0, type, comb);
+ tree_to_aff_combination (op1, sizetype, &tmp);
aff_combination_add (comb, &tmp);
- return;
+ return true;
case PLUS_EXPR:
case MINUS_EXPR:
- tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
- tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
+ tree_to_aff_combination (op0, type, comb);
+ tree_to_aff_combination (op1, type, &tmp);
if (code == MINUS_EXPR)
aff_combination_scale (&tmp, -1);
aff_combination_add (comb, &tmp);
- return;
+ return true;
case MULT_EXPR:
- cst = TREE_OPERAND (expr, 1);
- if (TREE_CODE (cst) != INTEGER_CST)
+ if (TREE_CODE (op1) != INTEGER_CST)
break;
- tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
- aff_combination_scale (comb, wi::to_widest (cst));
- return;
+ tree_to_aff_combination (op0, type, comb);
+ aff_combination_scale (comb, wi::to_widest (op1));
+ return true;
case NEGATE_EXPR:
- tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
+ tree_to_aff_combination (op0, type, comb);
aff_combination_scale (comb, -1);
- return;
+ return true;
case BIT_NOT_EXPR:
/* ~x = -x - 1 */
- tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
+ tree_to_aff_combination (op0, type, comb);
aff_combination_scale (comb, -1);
aff_combination_add_cst (comb, -1);
- return;
-
- case ADDR_EXPR:
- /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
- if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
- {
- expr = TREE_OPERAND (expr, 0);
- tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
- tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
- aff_combination_add (comb, &tmp);
- return;
- }
- core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
- &toffset, &mode, &unsignedp, &reversep,
- &volatilep);
- if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
- break;
- aff_combination_const (comb, type, bytepos);
- if (TREE_CODE (core) == MEM_REF)
- {
- tree mem_offset = TREE_OPERAND (core, 1);
- aff_combination_add_cst (comb, wi::to_poly_widest (mem_offset));
- core = TREE_OPERAND (core, 0);
- }
- else
- core = build_fold_addr_expr (core);
-
- if (TREE_CODE (core) == ADDR_EXPR)
- aff_combination_add_elt (comb, core, 1);
- else
- {
- tree_to_aff_combination (core, type, &tmp);
- aff_combination_add (comb, &tmp);
- }
- if (toffset)
- {
- tree_to_aff_combination (toffset, type, &tmp);
- aff_combination_add (comb, &tmp);
- }
- return;
+ return true;
CASE_CONVERT:
{
- tree otype = TREE_TYPE (expr);
- tree inner = TREE_OPERAND (expr, 0);
+ tree otype = type;
+ tree inner = op0;
tree itype = TREE_TYPE (inner);
enum tree_code icode = TREE_CODE (inner);
+ /* STRIP_NOPS */
+ if (tree_nop_conversion_p (otype, itype))
+ {
+ tree_to_aff_combination (op0, type, comb);
+ return true;
+ }
+
/* In principle this is a valid folding, but it isn't necessarily
an optimization, so do it here and not in fold_unary. */
if ((icode == PLUS_EXPR || icode == MINUS_EXPR || icode == MULT_EXPR)
@@ -376,9 +338,7 @@ tree_to_aff_combination (tree expr, tree
{
op0 = fold_convert (otype, op0);
op1 = fold_convert (otype, op1);
- expr = fold_build2 (icode, otype, op0, op1);
- tree_to_aff_combination (expr, type, comb);
- return;
+ return expr_to_aff_combination (comb, icode, otype, op0, op1);
}
wide_int minv, maxv;
/* If inner type has wrapping overflow behavior, fold conversion
@@ -399,15 +359,100 @@ tree_to_aff_combination (tree expr, tree
{
op0 = fold_convert (otype, op0);
op1 = fold_convert (otype, op1);
- expr = fold_build2 (MINUS_EXPR, otype, op0, op1);
- tree_to_aff_combination (expr, type, comb);
- return;
+ return expr_to_aff_combination (comb, MINUS_EXPR, otype,
+ op0, op1);
}
}
}
}
break;
+ default:;
+ }
+
+ return false;
+}
+
+/* Splits EXPR into an affine combination of parts. */
+
+void
+tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
+{
+ aff_tree tmp;
+ enum tree_code code;
+ tree core, toffset;
+ poly_int64 bitpos, bitsize, bytepos;
+ machine_mode mode;
+ int unsignedp, reversep, volatilep;
+
+ STRIP_NOPS (expr);
+
+ code = TREE_CODE (expr);
+ switch (code)
+ {
+ case POINTER_PLUS_EXPR:
+ case PLUS_EXPR:
+ case MINUS_EXPR:
+ case MULT_EXPR:
+ if (expr_to_aff_combination (comb, code, type, TREE_OPERAND (expr, 0),
+ TREE_OPERAND (expr, 1)))
+ return;
+ break;
+
+ case NEGATE_EXPR:
+ case BIT_NOT_EXPR:
+ if (expr_to_aff_combination (comb, code, type, TREE_OPERAND (expr, 0)))
+ return;
+ break;
+
+ CASE_CONVERT:
+ if (expr_to_aff_combination (comb, code,
+ TREE_TYPE (expr), TREE_OPERAND (expr, 0)))
+ {
+ aff_combination_convert (comb, type);
+ return;
+ }
+ break;
+
+ case ADDR_EXPR:
+ /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
+ if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
+ {
+ expr = TREE_OPERAND (expr, 0);
+ tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
+ tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
+ aff_combination_add (comb, &tmp);
+ return;
+ }
+ core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
+ &toffset, &mode, &unsignedp, &reversep,
+ &volatilep);
+ if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
+ break;
+ aff_combination_const (comb, type, bytepos);
+ if (TREE_CODE (core) == MEM_REF)
+ {
+ tree mem_offset = TREE_OPERAND (core, 1);
+ aff_combination_add_cst (comb, wi::to_poly_widest (mem_offset));
+ core = TREE_OPERAND (core, 0);
+ }
+ else
+ core = build_fold_addr_expr (core);
+
+ if (TREE_CODE (core) == ADDR_EXPR)
+ aff_combination_add_elt (comb, core, 1);
+ else
+ {
+ tree_to_aff_combination (core, type, &tmp);
+ aff_combination_add (comb, &tmp);
+ }
+ if (toffset)
+ {
+ tree_to_aff_combination (toffset, type, &tmp);
+ aff_combination_add (comb, &tmp);
+ }
+ return;
+
default:
{
if (poly_int_tree_p (expr))
@@ -665,7 +710,7 @@ aff_combination_expand (aff_tree *comb A
{
unsigned i;
aff_tree to_add, current, curre;
- tree e, rhs;
+ tree e;
gimple *def;
widest_int scale;
struct name_expansion *exp;
@@ -715,20 +760,27 @@ aff_combination_expand (aff_tree *comb A
case PLUS_EXPR:
case MINUS_EXPR:
case MULT_EXPR:
+ if (!expr_to_aff_combination (¤t, code, TREE_TYPE (name),
+ gimple_assign_rhs1 (def),
+ gimple_assign_rhs2 (def)))
+ continue;
+ break;
case NEGATE_EXPR:
case BIT_NOT_EXPR:
CASE_CONVERT:
- rhs = gimple_assign_rhs_to_tree (def);
+ if (!expr_to_aff_combination (¤t, code, TREE_TYPE (name),
+ gimple_assign_rhs1 (def)))
+ continue;
break;
case ADDR_EXPR:
case INTEGER_CST:
case POLY_INT_CST:
- rhs = gimple_assign_rhs1 (def);
+ tree_to_aff_combination (gimple_assign_rhs1 (def),
+ TREE_TYPE (name), ¤t);
break;
default:
continue;
}
- tree_to_aff_combination (rhs, TREE_TYPE (name), ¤t);
exp = XNEW (struct name_expansion);
exp->in_progress = 1;
if (!*cache)
^ permalink raw reply [flat|nested] 2+ messages in thread
end of thread, other threads:[~2018-12-21 9:15 UTC | newest]
Thread overview: 2+ messages (download: mbox.gz / follow: Atom feed)
-- links below jump to the message on this page --
2018-12-11 10:30 [PATCH] Rip out rhs-to-tree from tree-affine.c Richard Biener
2018-12-21 9:46 ` Richard Biener
This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox;
as well as URLs for read-only IMAP folder(s) and NNTP newsgroup(s).