public inbox for gcc-cvs@sourceware.org
help / color / mirror / Atom feed
* [gcc(refs/users/marxin/heads/if-to-switch-v4)] Playground #1.
@ 2020-10-09 12:41 Martin Liska
  0 siblings, 0 replies; 2+ messages in thread
From: Martin Liska @ 2020-10-09 12:41 UTC (permalink / raw)
  To: gcc-cvs

https://gcc.gnu.org/g:13d65ea567061c40047d7070eb88501cb71bad2c

commit 13d65ea567061c40047d7070eb88501cb71bad2c
Author: Martin Liska <mliska@suse.cz>
Date:   Thu Oct 8 12:48:16 2020 +0200

    Playground #1.

Diff:
---
 gcc/gimple-if-to-switch.cc | 60 ++++++++++++++++++++++++++++++++++++++++++++++
 gcc/tree-ssa-reassoc.c     | 34 ++++++--------------------
 gcc/tree.h                 | 28 ++++++++++++++++++++++
 3 files changed, 95 insertions(+), 27 deletions(-)

diff --git a/gcc/gimple-if-to-switch.cc b/gcc/gimple-if-to-switch.cc
index f066432e9b7..7b547136287 100644
--- a/gcc/gimple-if-to-switch.cc
+++ b/gcc/gimple-if-to-switch.cc
@@ -548,6 +548,66 @@ analyze_condition_in_bb (basic_block bb)
   tree rhs = gimple_cond_rhs (cond);
   tree_code code = gimple_cond_code (cond);
 
+  operand_rank = new hash_map<tree, long>;
+  debug_bb(bb);
+
+  if (code == NE_EXPR)
+    {
+      gassign *def = dyn_cast<gassign *> (SSA_NAME_DEF_STMT (lhs));
+      if (def)
+	{
+      enum tree_code rhs_code = gimple_assign_rhs_code (def);
+      if (associative_tree_code (rhs_code))
+	{
+	  auto_vec<operand_entry *> ops;
+	  if (TREE_CODE (lhs) == SSA_NAME && has_zero_uses (lhs))
+	    ;
+	  else
+	    {
+	      linearize_expr_tree (&ops, def, true, false);
+	      unsigned length = ops.length ();
+	      struct range_entry *ranges = XNEWVEC (struct range_entry, length);
+	      for (unsigned i = 0; i < length; i++)
+		{
+		  operand_entry *oe = ops[i];
+		  ranges[i].idx = i;
+		  init_range_entry (ranges + i, oe->op,
+				    oe->op
+				    ? NULL
+				    : last_stmt (BASIC_BLOCK_FOR_FN (cfun, oe->id)));
+		  /* For | invert it now, we will invert it again before emitting
+		     the optimized expression.  */
+		  if (rhs_code == BIT_IOR_EXPR
+		      || (rhs_code == ERROR_MARK && oe->rank == BIT_IOR_EXPR))
+		    ranges[i].in_p = !ranges[i].in_p;
+		  debug_range_entry (&ranges[i]);
+		}
+	      fprintf (stderr, "\n");
+
+	      int a = 2;
+	    }
+	}
+	}
+      else
+	{
+      struct range_entry entry;
+      init_range_entry (&entry, NULL_TREE, cond);
+      debug_range_entry (&entry);
+      fprintf (stderr, "\n");
+
+	}
+    }
+  else
+    {
+      struct range_entry entry;
+      init_range_entry (&entry, NULL_TREE, cond);
+      debug_range_entry (&entry);
+      fprintf (stderr, "\n");
+    }
+
+  delete operand_rank;
+  operand_rank = NULL;
+
   /* Situation 1.  */
   if (code == EQ_EXPR)
     {
diff --git a/gcc/tree-ssa-reassoc.c b/gcc/tree-ssa-reassoc.c
index a2ca1713d4b..6f83d184fb9 100644
--- a/gcc/tree-ssa-reassoc.c
+++ b/gcc/tree-ssa-reassoc.c
@@ -188,15 +188,6 @@ static struct
   int pows_created;
 } reassociate_stats;
 
-/* Operator, rank pair.  */
-struct operand_entry
-{
-  unsigned int rank;
-  unsigned int id;
-  tree op;
-  unsigned int count;
-  gimple *stmt_to_insert;
-};
 
 static object_allocator<operand_entry> operand_entry_pool
   ("operand entry pool");
@@ -211,7 +202,7 @@ static unsigned int next_operand_entry_id;
 static long *bb_rank;
 
 /* Operand->rank hashtable.  */
-static hash_map<tree, long> *operand_rank;
+hash_map<tree, long> *operand_rank;
 
 /* Vector of SSA_NAMEs on which after reassociate_bb is done with
    all basic blocks the CFG should be adjusted - basic blocks
@@ -226,7 +217,7 @@ static bool reassoc_stmt_dominates_stmt_p (gimple *, gimple *);
 /* Wrapper around gsi_remove, which adjusts gimple_uid of debug stmts
    possibly added by gsi_remove.  */
 
-bool
+static bool
 reassoc_remove_stmt (gimple_stmt_iterator *gsi)
 {
   gimple *stmt = gsi_stmt (*gsi);
@@ -1060,8 +1051,6 @@ eliminate_using_constants (enum tree_code opcode,
 }
 
 
-static void linearize_expr_tree (vec<operand_entry *> *, gimple *,
-				 bool, bool);
 
 /* Structure for tracking and counting operands.  */
 struct oecount {
@@ -2406,18 +2395,7 @@ optimize_ops_list (enum tree_code opcode,
    For more information see comments above fold_test_range in fold-const.c,
    this implementation is for GIMPLE.  */
 
-struct range_entry
-{
-  tree exp;
-  tree low;
-  tree high;
-  bool in_p;
-  bool strict_overflow_p;
-  unsigned int idx, next;
-};
 
-void dump_range_entry (FILE *file, struct range_entry *r);
-void debug_range_entry (struct range_entry *r);
 
 /* Dump the range entry R to FILE, skipping its expression if SKIP_EXP.  */
 
@@ -2447,7 +2425,7 @@ debug_range_entry (struct range_entry *r)
    an SSA_NAME and STMT argument is ignored, otherwise STMT
    argument should be a GIMPLE_COND.  */
 
-static void
+void
 init_range_entry (struct range_entry *r, tree exp, gimple *stmt)
 {
   int in_p;
@@ -3805,7 +3783,9 @@ optimize_range_tests (enum tree_code opcode,
       if (opcode == BIT_IOR_EXPR
 	  || (opcode == ERROR_MARK && oe->rank == BIT_IOR_EXPR))
 	ranges[i].in_p = !ranges[i].in_p;
+//      debug_range_entry (&ranges[i]);
     }
+//  fprintf (stderr, "\n");
 
   qsort (ranges, length, sizeof (*ranges), range_entry_cmp);
   for (i = 0; i < length; i++)
@@ -4693,7 +4673,7 @@ maybe_optimize_range_tests (gimple *stmt)
       if (bb == first_bb)
 	break;
     }
-  if (ops.length () > 1)
+//  if (ops.length () > 1)
     any_changes = optimize_range_tests (ERROR_MARK, &ops, first_bb);
   if (any_changes)
     {
@@ -5605,7 +5585,7 @@ try_special_add_to_ops (vec<operand_entry *> *ops,
 /* Recursively linearize a binary expression that is the RHS of STMT.
    Place the operands of the expression tree in the vector named OPS.  */
 
-static void
+void
 linearize_expr_tree (vec<operand_entry *> *ops, gimple *stmt,
 		     bool is_associative, bool set_visited)
 {
diff --git a/gcc/tree.h b/gcc/tree.h
index c0a027a650d..374166ff08c 100644
--- a/gcc/tree.h
+++ b/gcc/tree.h
@@ -6346,4 +6346,32 @@ public:
   operator location_t () const { return m_combined_loc; }
 };
 
+void dump_range_entry (FILE *file, struct range_entry *r);
+void debug_range_entry (struct range_entry *r);
+void init_range_entry (struct range_entry *r, tree exp, gimple *stmt);
+
+/* Operator, rank pair.  */
+struct operand_entry
+{
+  unsigned int rank;
+  unsigned int id;
+  tree op;
+  unsigned int count;
+  gimple *stmt_to_insert;
+};
+
+struct range_entry
+{
+  tree exp;
+  tree low;
+  tree high;
+  bool in_p;
+  bool strict_overflow_p;
+  unsigned int idx, next;
+};
+
+
+void linearize_expr_tree (vec<operand_entry *> *, gimple *, bool, bool);
+extern hash_map<tree, long> *operand_rank;
+
 #endif  /* GCC_TREE_H  */


^ permalink raw reply	[flat|nested] 2+ messages in thread

* [gcc(refs/users/marxin/heads/if-to-switch-v4)] Playground #1.
@ 2020-10-12 13:05 Martin Liska
  0 siblings, 0 replies; 2+ messages in thread
From: Martin Liska @ 2020-10-12 13:05 UTC (permalink / raw)
  To: gcc-cvs

https://gcc.gnu.org/g:0fefc48bf0a9f8e5a72784ead75df5a9adfab9d7

commit 0fefc48bf0a9f8e5a72784ead75df5a9adfab9d7
Author: Martin Liska <mliska@suse.cz>
Date:   Thu Oct 8 12:48:16 2020 +0200

    Playground #1.

Diff:
---
 gcc/gimple-if-to-switch.cc | 60 ++++++++++++++++++++++++++++++++++++++++++++++
 gcc/tree-ssa-reassoc.c     | 34 ++++++--------------------
 gcc/tree.h                 | 28 ++++++++++++++++++++++
 3 files changed, 95 insertions(+), 27 deletions(-)

diff --git a/gcc/gimple-if-to-switch.cc b/gcc/gimple-if-to-switch.cc
index f066432e9b7..7b547136287 100644
--- a/gcc/gimple-if-to-switch.cc
+++ b/gcc/gimple-if-to-switch.cc
@@ -548,6 +548,66 @@ analyze_condition_in_bb (basic_block bb)
   tree rhs = gimple_cond_rhs (cond);
   tree_code code = gimple_cond_code (cond);
 
+  operand_rank = new hash_map<tree, long>;
+  debug_bb(bb);
+
+  if (code == NE_EXPR)
+    {
+      gassign *def = dyn_cast<gassign *> (SSA_NAME_DEF_STMT (lhs));
+      if (def)
+	{
+      enum tree_code rhs_code = gimple_assign_rhs_code (def);
+      if (associative_tree_code (rhs_code))
+	{
+	  auto_vec<operand_entry *> ops;
+	  if (TREE_CODE (lhs) == SSA_NAME && has_zero_uses (lhs))
+	    ;
+	  else
+	    {
+	      linearize_expr_tree (&ops, def, true, false);
+	      unsigned length = ops.length ();
+	      struct range_entry *ranges = XNEWVEC (struct range_entry, length);
+	      for (unsigned i = 0; i < length; i++)
+		{
+		  operand_entry *oe = ops[i];
+		  ranges[i].idx = i;
+		  init_range_entry (ranges + i, oe->op,
+				    oe->op
+				    ? NULL
+				    : last_stmt (BASIC_BLOCK_FOR_FN (cfun, oe->id)));
+		  /* For | invert it now, we will invert it again before emitting
+		     the optimized expression.  */
+		  if (rhs_code == BIT_IOR_EXPR
+		      || (rhs_code == ERROR_MARK && oe->rank == BIT_IOR_EXPR))
+		    ranges[i].in_p = !ranges[i].in_p;
+		  debug_range_entry (&ranges[i]);
+		}
+	      fprintf (stderr, "\n");
+
+	      int a = 2;
+	    }
+	}
+	}
+      else
+	{
+      struct range_entry entry;
+      init_range_entry (&entry, NULL_TREE, cond);
+      debug_range_entry (&entry);
+      fprintf (stderr, "\n");
+
+	}
+    }
+  else
+    {
+      struct range_entry entry;
+      init_range_entry (&entry, NULL_TREE, cond);
+      debug_range_entry (&entry);
+      fprintf (stderr, "\n");
+    }
+
+  delete operand_rank;
+  operand_rank = NULL;
+
   /* Situation 1.  */
   if (code == EQ_EXPR)
     {
diff --git a/gcc/tree-ssa-reassoc.c b/gcc/tree-ssa-reassoc.c
index a2ca1713d4b..6f83d184fb9 100644
--- a/gcc/tree-ssa-reassoc.c
+++ b/gcc/tree-ssa-reassoc.c
@@ -188,15 +188,6 @@ static struct
   int pows_created;
 } reassociate_stats;
 
-/* Operator, rank pair.  */
-struct operand_entry
-{
-  unsigned int rank;
-  unsigned int id;
-  tree op;
-  unsigned int count;
-  gimple *stmt_to_insert;
-};
 
 static object_allocator<operand_entry> operand_entry_pool
   ("operand entry pool");
@@ -211,7 +202,7 @@ static unsigned int next_operand_entry_id;
 static long *bb_rank;
 
 /* Operand->rank hashtable.  */
-static hash_map<tree, long> *operand_rank;
+hash_map<tree, long> *operand_rank;
 
 /* Vector of SSA_NAMEs on which after reassociate_bb is done with
    all basic blocks the CFG should be adjusted - basic blocks
@@ -226,7 +217,7 @@ static bool reassoc_stmt_dominates_stmt_p (gimple *, gimple *);
 /* Wrapper around gsi_remove, which adjusts gimple_uid of debug stmts
    possibly added by gsi_remove.  */
 
-bool
+static bool
 reassoc_remove_stmt (gimple_stmt_iterator *gsi)
 {
   gimple *stmt = gsi_stmt (*gsi);
@@ -1060,8 +1051,6 @@ eliminate_using_constants (enum tree_code opcode,
 }
 
 
-static void linearize_expr_tree (vec<operand_entry *> *, gimple *,
-				 bool, bool);
 
 /* Structure for tracking and counting operands.  */
 struct oecount {
@@ -2406,18 +2395,7 @@ optimize_ops_list (enum tree_code opcode,
    For more information see comments above fold_test_range in fold-const.c,
    this implementation is for GIMPLE.  */
 
-struct range_entry
-{
-  tree exp;
-  tree low;
-  tree high;
-  bool in_p;
-  bool strict_overflow_p;
-  unsigned int idx, next;
-};
 
-void dump_range_entry (FILE *file, struct range_entry *r);
-void debug_range_entry (struct range_entry *r);
 
 /* Dump the range entry R to FILE, skipping its expression if SKIP_EXP.  */
 
@@ -2447,7 +2425,7 @@ debug_range_entry (struct range_entry *r)
    an SSA_NAME and STMT argument is ignored, otherwise STMT
    argument should be a GIMPLE_COND.  */
 
-static void
+void
 init_range_entry (struct range_entry *r, tree exp, gimple *stmt)
 {
   int in_p;
@@ -3805,7 +3783,9 @@ optimize_range_tests (enum tree_code opcode,
       if (opcode == BIT_IOR_EXPR
 	  || (opcode == ERROR_MARK && oe->rank == BIT_IOR_EXPR))
 	ranges[i].in_p = !ranges[i].in_p;
+//      debug_range_entry (&ranges[i]);
     }
+//  fprintf (stderr, "\n");
 
   qsort (ranges, length, sizeof (*ranges), range_entry_cmp);
   for (i = 0; i < length; i++)
@@ -4693,7 +4673,7 @@ maybe_optimize_range_tests (gimple *stmt)
       if (bb == first_bb)
 	break;
     }
-  if (ops.length () > 1)
+//  if (ops.length () > 1)
     any_changes = optimize_range_tests (ERROR_MARK, &ops, first_bb);
   if (any_changes)
     {
@@ -5605,7 +5585,7 @@ try_special_add_to_ops (vec<operand_entry *> *ops,
 /* Recursively linearize a binary expression that is the RHS of STMT.
    Place the operands of the expression tree in the vector named OPS.  */
 
-static void
+void
 linearize_expr_tree (vec<operand_entry *> *ops, gimple *stmt,
 		     bool is_associative, bool set_visited)
 {
diff --git a/gcc/tree.h b/gcc/tree.h
index c0a027a650d..374166ff08c 100644
--- a/gcc/tree.h
+++ b/gcc/tree.h
@@ -6346,4 +6346,32 @@ public:
   operator location_t () const { return m_combined_loc; }
 };
 
+void dump_range_entry (FILE *file, struct range_entry *r);
+void debug_range_entry (struct range_entry *r);
+void init_range_entry (struct range_entry *r, tree exp, gimple *stmt);
+
+/* Operator, rank pair.  */
+struct operand_entry
+{
+  unsigned int rank;
+  unsigned int id;
+  tree op;
+  unsigned int count;
+  gimple *stmt_to_insert;
+};
+
+struct range_entry
+{
+  tree exp;
+  tree low;
+  tree high;
+  bool in_p;
+  bool strict_overflow_p;
+  unsigned int idx, next;
+};
+
+
+void linearize_expr_tree (vec<operand_entry *> *, gimple *, bool, bool);
+extern hash_map<tree, long> *operand_rank;
+
 #endif  /* GCC_TREE_H  */


^ permalink raw reply	[flat|nested] 2+ messages in thread

end of thread, other threads:[~2020-10-12 13:05 UTC | newest]

Thread overview: 2+ messages (download: mbox.gz / follow: Atom feed)
-- links below jump to the message on this page --
2020-10-09 12:41 [gcc(refs/users/marxin/heads/if-to-switch-v4)] Playground #1 Martin Liska
2020-10-12 13:05 Martin Liska

This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox;
as well as URLs for read-only IMAP folder(s) and NNTP newsgroup(s).