public inbox for gcc-patches@gcc.gnu.org
 help / color / mirror / Atom feed
* [PATCH 10/13] make a member an auto_sbitmap
  2017-05-09 20:53 [PATCH 00/13] misc data structure stuff tbsaunde+gcc
@ 2017-05-09 20:53 ` tbsaunde+gcc
  2017-05-10  8:26   ` Richard Biener
  2017-05-09 20:53 ` [PATCH 08/13] move several bitmaps from gc memory to the default obstack and use auto_bitmap tbsaunde+gcc
                   ` (11 subsequent siblings)
  12 siblings, 1 reply; 34+ messages in thread
From: tbsaunde+gcc @ 2017-05-09 20:53 UTC (permalink / raw)
  To: gcc-patches

From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>

gcc/ChangeLog:

2017-05-09  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>

	* tree-ssa-dse.c (dse_dom_walker): Make m_live_byes a
	auto_sbitmap.
---
 gcc/tree-ssa-dse.c | 10 ++++------
 1 file changed, 4 insertions(+), 6 deletions(-)

diff --git a/gcc/tree-ssa-dse.c b/gcc/tree-ssa-dse.c
index 90230abe822..3ebc19948e1 100644
--- a/gcc/tree-ssa-dse.c
+++ b/gcc/tree-ssa-dse.c
@@ -601,16 +601,14 @@ class dse_dom_walker : public dom_walker
 {
 public:
   dse_dom_walker (cdi_direction direction)
-    : dom_walker (direction), m_byte_tracking_enabled (false)
-
-  { m_live_bytes = sbitmap_alloc (PARAM_VALUE (PARAM_DSE_MAX_OBJECT_SIZE)); }
-
-  ~dse_dom_walker () { sbitmap_free (m_live_bytes); }
+    : dom_walker (direction),
+    m_live_bytes (PARAM_VALUE (PARAM_DSE_MAX_OBJECT_SIZE)),
+    m_byte_tracking_enabled (false) {}
 
   virtual edge before_dom_children (basic_block);
 
 private:
-  sbitmap m_live_bytes;
+  auto_sbitmap m_live_bytes;
   bool m_byte_tracking_enabled;
   void dse_optimize_stmt (gimple_stmt_iterator *);
 };
-- 
2.11.0

^ permalink raw reply	[flat|nested] 34+ messages in thread

* [PATCH 09/13] use auto_bitmap more with alternate obstacks
  2017-05-09 20:53 [PATCH 00/13] misc data structure stuff tbsaunde+gcc
                   ` (2 preceding siblings ...)
  2017-05-09 20:53 ` [PATCH 07/13] use auto_bitmap more tbsaunde+gcc
@ 2017-05-09 20:53 ` tbsaunde+gcc
  2017-05-10  8:31   ` Richard Biener
  2017-05-09 20:53 ` [PATCH 13/13] make inverted_post_order_compute() operate on a vec tbsaunde+gcc
                   ` (8 subsequent siblings)
  12 siblings, 1 reply; 34+ messages in thread
From: tbsaunde+gcc @ 2017-05-09 20:53 UTC (permalink / raw)
  To: gcc-patches

From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>

gcc/ChangeLog:

2017-05-09  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>

	* df-core.c (df_set_blocks): Start using auto_bitmap.
	(df_compact_blocks): Likewise.
	* df-problems.c (df_rd_confluence_n): Likewise.
	* df-scan.c (df_insn_rescan_all): Likewise.
	(df_process_deferred_rescans): Likewise.
	(df_update_entry_block_defs): Likewise.
	(df_update_exit_block_uses): Likewise.
	(df_entry_block_bitmap_verify): Likewise.
	(df_exit_block_bitmap_verify): Likewise.
	(df_scan_verify): Likewise.
	* lra-constraints.c (lra_constraints): Likewise.
	(undo_optional_reloads): Likewise.
	(lra_undo_inheritance): Likewise.
	* lra-remat.c (calculate_gen_cands): Likewise.
	(do_remat): Likewise.
	* lra-spills.c (assign_spill_hard_regs): Likewise.
	(spill_pseudos): Likewise.
	* tree-ssa-pre.c (bitmap_set_and): Likewise.
	(bitmap_set_subtract_values): Likewise.
---
 gcc/df-core.c         | 30 +++++++----------
 gcc/df-problems.c     | 10 +++---
 gcc/df-scan.c         | 93 ++++++++++++++++++++-------------------------------
 gcc/lra-constraints.c | 42 ++++++++++-------------
 gcc/lra-remat.c       | 43 ++++++++++--------------
 gcc/lra-spills.c      | 25 ++++++--------
 gcc/tree-ssa-pre.c    | 17 ++++------
 7 files changed, 104 insertions(+), 156 deletions(-)

diff --git a/gcc/df-core.c b/gcc/df-core.c
index 98787a768c6..1b270d417aa 100644
--- a/gcc/df-core.c
+++ b/gcc/df-core.c
@@ -497,9 +497,8 @@ df_set_blocks (bitmap blocks)
 	  /* This block is called to change the focus from one subset
 	     to another.  */
 	  int p;
-	  bitmap_head diff;
-	  bitmap_initialize (&diff, &df_bitmap_obstack);
-	  bitmap_and_compl (&diff, df->blocks_to_analyze, blocks);
+	  auto_bitmap diff (&df_bitmap_obstack);
+	  bitmap_and_compl (diff, df->blocks_to_analyze, blocks);
 	  for (p = 0; p < df->num_problems_defined; p++)
 	    {
 	      struct dataflow *dflow = df->problems_in_order[p];
@@ -510,7 +509,7 @@ df_set_blocks (bitmap blocks)
 		  bitmap_iterator bi;
 		  unsigned int bb_index;
 
-		  EXECUTE_IF_SET_IN_BITMAP (&diff, 0, bb_index, bi)
+		  EXECUTE_IF_SET_IN_BITMAP (diff, 0, bb_index, bi)
 		    {
 		      basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
 		      if (bb)
@@ -522,8 +521,6 @@ df_set_blocks (bitmap blocks)
 		    }
 		}
 	    }
-
-	   bitmap_clear (&diff);
 	}
       else
 	{
@@ -1652,9 +1649,8 @@ df_compact_blocks (void)
   int i, p;
   basic_block bb;
   void *problem_temps;
-  bitmap_head tmp;
 
-  bitmap_initialize (&tmp, &df_bitmap_obstack);
+  auto_bitmap tmp (&df_bitmap_obstack);
   for (p = 0; p < df->num_problems_defined; p++)
     {
       struct dataflow *dflow = df->problems_in_order[p];
@@ -1663,17 +1659,17 @@ df_compact_blocks (void)
 	 dflow problem.  */
       if (dflow->out_of_date_transfer_functions)
 	{
-	  bitmap_copy (&tmp, dflow->out_of_date_transfer_functions);
+	  bitmap_copy (tmp, dflow->out_of_date_transfer_functions);
 	  bitmap_clear (dflow->out_of_date_transfer_functions);
-	  if (bitmap_bit_p (&tmp, ENTRY_BLOCK))
+	  if (bitmap_bit_p (tmp, ENTRY_BLOCK))
 	    bitmap_set_bit (dflow->out_of_date_transfer_functions, ENTRY_BLOCK);
-	  if (bitmap_bit_p (&tmp, EXIT_BLOCK))
+	  if (bitmap_bit_p (tmp, EXIT_BLOCK))
 	    bitmap_set_bit (dflow->out_of_date_transfer_functions, EXIT_BLOCK);
 
 	  i = NUM_FIXED_BLOCKS;
 	  FOR_EACH_BB_FN (bb, cfun)
 	    {
-	      if (bitmap_bit_p (&tmp, bb->index))
+	      if (bitmap_bit_p (tmp, bb->index))
 		bitmap_set_bit (dflow->out_of_date_transfer_functions, i);
 	      i++;
 	    }
@@ -1711,23 +1707,21 @@ df_compact_blocks (void)
 
   if (df->blocks_to_analyze)
     {
-      if (bitmap_bit_p (&tmp, ENTRY_BLOCK))
+      if (bitmap_bit_p (tmp, ENTRY_BLOCK))
 	bitmap_set_bit (df->blocks_to_analyze, ENTRY_BLOCK);
-      if (bitmap_bit_p (&tmp, EXIT_BLOCK))
+      if (bitmap_bit_p (tmp, EXIT_BLOCK))
 	bitmap_set_bit (df->blocks_to_analyze, EXIT_BLOCK);
-      bitmap_copy (&tmp, df->blocks_to_analyze);
+      bitmap_copy (tmp, df->blocks_to_analyze);
       bitmap_clear (df->blocks_to_analyze);
       i = NUM_FIXED_BLOCKS;
       FOR_EACH_BB_FN (bb, cfun)
 	{
-	  if (bitmap_bit_p (&tmp, bb->index))
+	  if (bitmap_bit_p (tmp, bb->index))
 	    bitmap_set_bit (df->blocks_to_analyze, i);
 	  i++;
 	}
     }
 
-  bitmap_clear (&tmp);
-
   i = NUM_FIXED_BLOCKS;
   FOR_EACH_BB_FN (bb, cfun)
     {
diff --git a/gcc/df-problems.c b/gcc/df-problems.c
index 92323a39d8a..755aecf46df 100644
--- a/gcc/df-problems.c
+++ b/gcc/df-problems.c
@@ -461,19 +461,17 @@ df_rd_confluence_n (edge e)
       bitmap dense_invalidated = &problem_data->dense_invalidated_by_call;
       bitmap_iterator bi;
       unsigned int regno;
-      bitmap_head tmp;
 
-      bitmap_initialize (&tmp, &df_bitmap_obstack);
-      bitmap_and_compl (&tmp, op2, dense_invalidated);
+      auto_bitmap tmp (&df_bitmap_obstack);
+      bitmap_and_compl (tmp, op2, dense_invalidated);
 
       EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
  	{
- 	  bitmap_clear_range (&tmp,
+	  bitmap_clear_range (tmp,
  			      DF_DEFS_BEGIN (regno),
  			      DF_DEFS_COUNT (regno));
 	}
-      changed |= bitmap_ior_into (op1, &tmp);
-      bitmap_clear (&tmp);
+      changed |= bitmap_ior_into (op1, tmp);
       return changed;
     }
   else
diff --git a/gcc/df-scan.c b/gcc/df-scan.c
index f75098c2bec..4884608b167 100644
--- a/gcc/df-scan.c
+++ b/gcc/df-scan.c
@@ -1161,9 +1161,6 @@ df_insn_rescan_all (void)
   basic_block bb;
   bitmap_iterator bi;
   unsigned int uid;
-  bitmap_head tmp;
-
-  bitmap_initialize (&tmp, &df_bitmap_obstack);
 
   if (df->changeable_flags & DF_NO_INSN_RESCAN)
     {
@@ -1177,15 +1174,15 @@ df_insn_rescan_all (void)
       defer_insn_rescan = true;
     }
 
-  bitmap_copy (&tmp, &df->insns_to_delete);
-  EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
+  auto_bitmap tmp (&df_bitmap_obstack);
+  bitmap_copy (tmp, &df->insns_to_delete);
+  EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
     {
       struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
       if (insn_info)
 	df_insn_info_delete (uid);
     }
 
-  bitmap_clear (&tmp);
   bitmap_clear (&df->insns_to_delete);
   bitmap_clear (&df->insns_to_rescan);
   bitmap_clear (&df->insns_to_notes_rescan);
@@ -1215,9 +1212,6 @@ df_process_deferred_rescans (void)
   bool defer_insn_rescan = false;
   bitmap_iterator bi;
   unsigned int uid;
-  bitmap_head tmp;
-
-  bitmap_initialize (&tmp, &df_bitmap_obstack);
 
   if (df->changeable_flags & DF_NO_INSN_RESCAN)
     {
@@ -1234,24 +1228,25 @@ df_process_deferred_rescans (void)
   if (dump_file)
     fprintf (dump_file, "starting the processing of deferred insns\n");
 
-  bitmap_copy (&tmp, &df->insns_to_delete);
-  EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
+  auto_bitmap tmp (&df_bitmap_obstack);
+  bitmap_copy (tmp, &df->insns_to_delete);
+  EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
     {
       struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
       if (insn_info)
 	df_insn_info_delete (uid);
     }
 
-  bitmap_copy (&tmp, &df->insns_to_rescan);
-  EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
+  bitmap_copy (tmp, &df->insns_to_rescan);
+  EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
     {
       struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
       if (insn_info)
 	df_insn_rescan (insn_info->insn);
     }
 
-  bitmap_copy (&tmp, &df->insns_to_notes_rescan);
-  EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
+  bitmap_copy (tmp, &df->insns_to_notes_rescan);
+  EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
     {
       struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
       if (insn_info)
@@ -1261,7 +1256,6 @@ df_process_deferred_rescans (void)
   if (dump_file)
     fprintf (dump_file, "ending the processing of deferred insns\n");
 
-  bitmap_clear (&tmp);
   bitmap_clear (&df->insns_to_delete);
   bitmap_clear (&df->insns_to_rescan);
   bitmap_clear (&df->insns_to_notes_rescan);
@@ -3628,14 +3622,13 @@ df_record_entry_block_defs (bitmap entry_block_defs)
 void
 df_update_entry_block_defs (void)
 {
-  bitmap_head refs;
   bool changed = false;
 
-  bitmap_initialize (&refs, &df_bitmap_obstack);
-  df_get_entry_block_def_set (&refs);
+  auto_bitmap refs (&df_bitmap_obstack);
+  df_get_entry_block_def_set (refs);
   if (df->entry_block_defs)
     {
-      if (!bitmap_equal_p (df->entry_block_defs, &refs))
+      if (!bitmap_equal_p (df->entry_block_defs, refs))
 	{
 	  struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK);
 	  df_ref_chain_delete_du_chain (bb_info->artificial_defs);
@@ -3655,11 +3648,10 @@ df_update_entry_block_defs (void)
 
   if (changed)
     {
-      df_record_entry_block_defs (&refs);
-      bitmap_copy (df->entry_block_defs, &refs);
+      df_record_entry_block_defs (refs);
+      bitmap_copy (df->entry_block_defs, refs);
       df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, ENTRY_BLOCK));
     }
-  bitmap_clear (&refs);
 }
 
 
@@ -3804,14 +3796,13 @@ df_record_exit_block_uses (bitmap exit_block_uses)
 void
 df_update_exit_block_uses (void)
 {
-  bitmap_head refs;
   bool changed = false;
 
-  bitmap_initialize (&refs, &df_bitmap_obstack);
-  df_get_exit_block_use_set (&refs);
+  auto_bitmap refs (&df_bitmap_obstack);
+  df_get_exit_block_use_set (refs);
   if (df->exit_block_uses)
     {
-      if (!bitmap_equal_p (df->exit_block_uses, &refs))
+      if (!bitmap_equal_p (df->exit_block_uses, refs))
 	{
 	  struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK);
 	  df_ref_chain_delete_du_chain (bb_info->artificial_uses);
@@ -3831,11 +3822,10 @@ df_update_exit_block_uses (void)
 
   if (changed)
     {
-      df_record_exit_block_uses (&refs);
-      bitmap_copy (df->exit_block_uses,& refs);
+      df_record_exit_block_uses (refs);
+      bitmap_copy (df->exit_block_uses, refs);
       df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK));
     }
-  bitmap_clear (&refs);
 }
 
 static bool initialized = false;
@@ -4171,25 +4161,22 @@ df_bb_verify (basic_block bb)
 static bool
 df_entry_block_bitmap_verify (bool abort_if_fail)
 {
-  bitmap_head entry_block_defs;
   bool is_eq;
 
-  bitmap_initialize (&entry_block_defs, &df_bitmap_obstack);
-  df_get_entry_block_def_set (&entry_block_defs);
+  auto_bitmap entry_block_defs (&df_bitmap_obstack);
+  df_get_entry_block_def_set (entry_block_defs);
 
-  is_eq = bitmap_equal_p (&entry_block_defs, df->entry_block_defs);
+  is_eq = bitmap_equal_p (entry_block_defs, df->entry_block_defs);
 
   if (!is_eq && abort_if_fail)
     {
       fprintf (stderr, "entry_block_defs = ");
-      df_print_regset (stderr, &entry_block_defs);
+      df_print_regset (stderr, entry_block_defs);
       fprintf (stderr, "df->entry_block_defs = ");
       df_print_regset (stderr, df->entry_block_defs);
       gcc_assert (0);
     }
 
-  bitmap_clear (&entry_block_defs);
-
   return is_eq;
 }
 
@@ -4200,25 +4187,22 @@ df_entry_block_bitmap_verify (bool abort_if_fail)
 static bool
 df_exit_block_bitmap_verify (bool abort_if_fail)
 {
-  bitmap_head exit_block_uses;
   bool is_eq;
 
-  bitmap_initialize (&exit_block_uses, &df_bitmap_obstack);
-  df_get_exit_block_use_set (&exit_block_uses);
+  auto_bitmap exit_block_uses (&df_bitmap_obstack);
+  df_get_exit_block_use_set (exit_block_uses);
 
-  is_eq = bitmap_equal_p (&exit_block_uses, df->exit_block_uses);
+  is_eq = bitmap_equal_p (exit_block_uses, df->exit_block_uses);
 
   if (!is_eq && abort_if_fail)
     {
       fprintf (stderr, "exit_block_uses = ");
-      df_print_regset (stderr, &exit_block_uses);
+      df_print_regset (stderr, exit_block_uses);
       fprintf (stderr, "df->exit_block_uses = ");
       df_print_regset (stderr, df->exit_block_uses);
       gcc_assert (0);
     }
 
-  bitmap_clear (&exit_block_uses);
-
   return is_eq;
 }
 
@@ -4231,8 +4215,6 @@ df_scan_verify (void)
 {
   unsigned int i;
   basic_block bb;
-  bitmap_head regular_block_artificial_uses;
-  bitmap_head eh_block_artificial_uses;
 
   if (!df)
     return;
@@ -4253,24 +4235,21 @@ df_scan_verify (void)
   /* (2) There are various bitmaps whose value may change over the
      course of the compilation.  This step recomputes them to make
      sure that they have not slipped out of date.  */
-  bitmap_initialize (&regular_block_artificial_uses, &df_bitmap_obstack);
-  bitmap_initialize (&eh_block_artificial_uses, &df_bitmap_obstack);
+  auto_bitmap regular_block_artificial_uses (&df_bitmap_obstack);
+  auto_bitmap eh_block_artificial_uses (&df_bitmap_obstack);
 
-  df_get_regular_block_artificial_uses (&regular_block_artificial_uses);
-  df_get_eh_block_artificial_uses (&eh_block_artificial_uses);
+  df_get_regular_block_artificial_uses (regular_block_artificial_uses);
+  df_get_eh_block_artificial_uses (eh_block_artificial_uses);
 
-  bitmap_ior_into (&eh_block_artificial_uses,
-		   &regular_block_artificial_uses);
+  bitmap_ior_into (eh_block_artificial_uses,
+		   regular_block_artificial_uses);
 
   /* Check artificial_uses bitmaps didn't change. */
-  gcc_assert (bitmap_equal_p (&regular_block_artificial_uses,
+  gcc_assert (bitmap_equal_p (regular_block_artificial_uses,
 			      &df->regular_block_artificial_uses));
-  gcc_assert (bitmap_equal_p (&eh_block_artificial_uses,
+  gcc_assert (bitmap_equal_p (eh_block_artificial_uses,
 			      &df->eh_block_artificial_uses));
 
-  bitmap_clear (&regular_block_artificial_uses);
-  bitmap_clear (&eh_block_artificial_uses);
-
   /* Verify entry block and exit block. These only verify the bitmaps,
      the refs are verified in df_bb_verify.  */
   df_entry_block_bitmap_verify (true);
diff --git a/gcc/lra-constraints.c b/gcc/lra-constraints.c
index c8bc9b9a66f..ed4fdc49d7c 100644
--- a/gcc/lra-constraints.c
+++ b/gcc/lra-constraints.c
@@ -4644,7 +4644,6 @@ lra_constraints (bool first_p)
   unsigned int min_len, new_min_len, uid;
   rtx set, x, reg, dest_reg;
   basic_block last_bb;
-  bitmap_head equiv_insn_bitmap;
   bitmap_iterator bi;
 
   lra_constraint_iter++;
@@ -4676,7 +4675,7 @@ lra_constraints (bool first_p)
   /* Do elimination before the equivalence processing as we can spill
      some pseudos during elimination.  */
   lra_eliminate (false, first_p);
-  bitmap_initialize (&equiv_insn_bitmap, &reg_obstack);
+  auto_bitmap equiv_insn_bitmap (&reg_obstack);
   for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
     if (lra_reg_info[i].nrefs != 0)
       {
@@ -4737,14 +4736,14 @@ lra_constraints (bool first_p)
 	    if (contains_reg_p (x, false, true))
 	      ira_reg_equiv[i].profitable_p = false;
 	    if (get_equiv (reg) != reg)
-	      bitmap_ior_into (&equiv_insn_bitmap, &lra_reg_info[i].insn_bitmap);
+	      bitmap_ior_into (equiv_insn_bitmap, &lra_reg_info[i].insn_bitmap);
 	  }
       }
   for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
     update_equiv (i);
   /* We should add all insns containing pseudos which should be
      substituted by their equivalences.  */
-  EXECUTE_IF_SET_IN_BITMAP (&equiv_insn_bitmap, 0, uid, bi)
+  EXECUTE_IF_SET_IN_BITMAP (equiv_insn_bitmap, 0, uid, bi)
     lra_push_insn_by_uid (uid);
   min_len = lra_insn_stack_length ();
   new_insns_num = 0;
@@ -4775,7 +4774,7 @@ lra_constraints (bool first_p)
 	  /* We need to check equivalence in debug insn and change
 	     pseudo to the equivalent value if necessary.  */
 	  curr_id = lra_get_insn_recog_data (curr_insn);
-	  if (bitmap_bit_p (&equiv_insn_bitmap, INSN_UID (curr_insn)))
+	  if (bitmap_bit_p (equiv_insn_bitmap, INSN_UID (curr_insn)))
 	    {
 	      rtx old = *curr_id->operand_loc[0];
 	      *curr_id->operand_loc[0]
@@ -4849,7 +4848,7 @@ lra_constraints (bool first_p)
 	  /* Check non-transformed insns too for equiv change as USE
 	     or CLOBBER don't need reloads but can contain pseudos
 	     being changed on their equivalences.  */
-	  else if (bitmap_bit_p (&equiv_insn_bitmap, INSN_UID (curr_insn))
+	  else if (bitmap_bit_p (equiv_insn_bitmap, INSN_UID (curr_insn))
 		   && loc_equivalence_change_p (&PATTERN (curr_insn)))
 	    {
 	      lra_update_insn_regno_info (curr_insn);
@@ -4857,7 +4856,7 @@ lra_constraints (bool first_p)
 	    }
 	}
     }
-  bitmap_clear (&equiv_insn_bitmap);
+
   /* If we used a new hard regno, changed_p should be true because the
      hard reg is assigned to a new pseudo.  */
   if (flag_checking && !changed_p)
@@ -6761,10 +6760,9 @@ undo_optional_reloads (void)
   bitmap_iterator bi, bi2;
   rtx_insn *insn;
   rtx set, src, dest;
-  bitmap_head removed_optional_reload_pseudos, insn_bitmap;
+  auto_bitmap removed_optional_reload_pseudos (&reg_obstack);
 
-  bitmap_initialize (&removed_optional_reload_pseudos, &reg_obstack);
-  bitmap_copy (&removed_optional_reload_pseudos, &lra_optional_reload_pseudos);
+  bitmap_copy (removed_optional_reload_pseudos, &lra_optional_reload_pseudos);
   EXECUTE_IF_SET_IN_BITMAP (&lra_optional_reload_pseudos, 0, regno, bi)
     {
       keep_p = false;
@@ -6799,19 +6797,19 @@ undo_optional_reloads (void)
 	  }
       if (keep_p)
 	{
-	  bitmap_clear_bit (&removed_optional_reload_pseudos, regno);
+	  bitmap_clear_bit (removed_optional_reload_pseudos, regno);
 	  if (lra_dump_file != NULL)
 	    fprintf (lra_dump_file, "Keep optional reload reg %d\n", regno);
 	}
     }
-  change_p = ! bitmap_empty_p (&removed_optional_reload_pseudos);
-  bitmap_initialize (&insn_bitmap, &reg_obstack);
-  EXECUTE_IF_SET_IN_BITMAP (&removed_optional_reload_pseudos, 0, regno, bi)
+  change_p = ! bitmap_empty_p (removed_optional_reload_pseudos);
+  auto_bitmap insn_bitmap (&reg_obstack);
+  EXECUTE_IF_SET_IN_BITMAP (removed_optional_reload_pseudos, 0, regno, bi)
     {
       if (lra_dump_file != NULL)
 	fprintf (lra_dump_file, "Remove optional reload reg %d\n", regno);
-      bitmap_copy (&insn_bitmap, &lra_reg_info[regno].insn_bitmap);
-      EXECUTE_IF_SET_IN_BITMAP (&insn_bitmap, 0, uid, bi2)
+      bitmap_copy (insn_bitmap, &lra_reg_info[regno].insn_bitmap);
+      EXECUTE_IF_SET_IN_BITMAP (insn_bitmap, 0, uid, bi2)
 	{
 	  insn = lra_insn_recog_data[uid]->insn;
 	  if ((set = single_set (insn)) != NULL_RTX)
@@ -6855,8 +6853,6 @@ undo_optional_reloads (void)
   /* Clear restore_regnos.  */
   EXECUTE_IF_SET_IN_BITMAP (&lra_optional_reload_pseudos, 0, regno, bi)
     lra_reg_info[regno].restore_rtx = NULL_RTX;
-  bitmap_clear (&insn_bitmap);
-  bitmap_clear (&removed_optional_reload_pseudos);
   return change_p;
 }
 
@@ -6869,7 +6865,6 @@ lra_undo_inheritance (void)
   int hard_regno;
   int n_all_inherit, n_inherit, n_all_split, n_split;
   rtx restore_rtx;
-  bitmap_head remove_pseudos;
   bitmap_iterator bi;
   bool change_p;
 
@@ -6880,7 +6875,7 @@ lra_undo_inheritance (void)
     fprintf (lra_dump_file,
 	     "\n********** Undoing inheritance #%d: **********\n\n",
 	     lra_undo_inheritance_iter);
-  bitmap_initialize (&remove_pseudos, &reg_obstack);
+  auto_bitmap remove_pseudos (&reg_obstack);
   n_inherit = n_all_inherit = 0;
   EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos, 0, regno, bi)
     if (lra_reg_info[regno].restore_rtx != NULL_RTX)
@@ -6892,7 +6887,7 @@ lra_undo_inheritance (void)
 	       allocation we used shorter live-ranges.  */
 	    && (! REG_P (lra_reg_info[regno].restore_rtx)
 		|| reg_renumber[REGNO (lra_reg_info[regno].restore_rtx)] < 0))
-	  bitmap_set_bit (&remove_pseudos, regno);
+	  bitmap_set_bit (remove_pseudos, regno);
 	else
 	  n_inherit++;
       }
@@ -6910,7 +6905,7 @@ lra_undo_inheritance (void)
 	hard_regno = (restore_regno >= FIRST_PSEUDO_REGISTER
 		      ? reg_renumber[restore_regno] : restore_regno);
 	if (hard_regno < 0 || reg_renumber[regno] == hard_regno)
-	  bitmap_set_bit (&remove_pseudos, regno);
+	  bitmap_set_bit (remove_pseudos, regno);
 	else
 	  {
 	    n_split++;
@@ -6923,8 +6918,7 @@ lra_undo_inheritance (void)
     fprintf (lra_dump_file, "Split %d out of %d (%.2f%%)\n",
 	     n_split, n_all_split,
 	     (double) n_split / n_all_split * 100);
-  change_p = remove_inheritance_pseudos (&remove_pseudos);
-  bitmap_clear (&remove_pseudos);
+  change_p = remove_inheritance_pseudos (remove_pseudos);
   /* Clear restore_regnos.  */
   EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos, 0, regno, bi)
     lra_reg_info[regno].restore_rtx = NULL_RTX;
diff --git a/gcc/lra-remat.c b/gcc/lra-remat.c
index 2c51481374a..fb294edf368 100644
--- a/gcc/lra-remat.c
+++ b/gcc/lra-remat.c
@@ -746,14 +746,12 @@ calculate_gen_cands (void)
 {
   basic_block bb;
   bitmap gen_cands;
-  bitmap_head gen_insns;
   rtx_insn *insn;
 
-  bitmap_initialize (&gen_insns, &reg_obstack);
   FOR_EACH_BB_FN (bb, cfun)
     {
       gen_cands = &get_remat_bb_data (bb)->gen_cands;
-      bitmap_clear (&gen_insns);
+      auto_bitmap gen_insns (&reg_obstack);
       FOR_BB_INSNS (bb, insn)
 	if (INSN_P (insn))
 	  {
@@ -782,7 +780,7 @@ calculate_gen_cands (void)
 		   reg = reg->next)
 		if (reg->type != OP_IN
 		    || find_regno_note (insn, REG_DEAD, reg->regno) != NULL)
-		  EXECUTE_IF_SET_IN_BITMAP (&gen_insns, 0, uid, bi)
+		  EXECUTE_IF_SET_IN_BITMAP (gen_insns, 0, uid, bi)
 		    {
 		      rtx_insn *insn2 = lra_insn_recog_data[uid]->insn;
 		      
@@ -801,7 +799,7 @@ calculate_gen_cands (void)
 		    }
 	    
 	    if (CALL_P (insn))
-	      EXECUTE_IF_SET_IN_BITMAP (&gen_insns, 0, uid, bi)
+	      EXECUTE_IF_SET_IN_BITMAP (gen_insns, 0, uid, bi)
 		{
 		  rtx_insn *insn2 = lra_insn_recog_data[uid]->insn;
 		  
@@ -813,17 +811,16 @@ calculate_gen_cands (void)
 		      bitmap_set_bit (&temp_bitmap, uid);
 		    }
 		}
-	    bitmap_and_compl_into (&gen_insns, &temp_bitmap);
+	    bitmap_and_compl_into (gen_insns, &temp_bitmap);
 
 	    cand = insn_to_cand[INSN_UID (insn)];
 	    if (cand != NULL)
 	      {
 		bitmap_set_bit (gen_cands, cand->index);
-		bitmap_set_bit (&gen_insns, INSN_UID (insn));
+		bitmap_set_bit (gen_insns, INSN_UID (insn));
 	      }
 	  }
     }  
-  bitmap_clear (&gen_insns);
 }
 
 \f
@@ -1059,15 +1056,13 @@ do_remat (void)
   unsigned regno;
   rtx_insn *insn;
   basic_block bb;
-  bitmap_head avail_cands;
-  bitmap_head active_cands;
   bool changed_p = false;
   /* Living hard regs and hard registers of living pseudos.  */
   HARD_REG_SET live_hard_regs;
   bitmap_iterator bi;
 
-  bitmap_initialize (&avail_cands, &reg_obstack);
-  bitmap_initialize (&active_cands, &reg_obstack);
+  auto_bitmap avail_cands (&reg_obstack);
+  auto_bitmap active_cands (&reg_obstack);
   FOR_EACH_BB_FN (bb, cfun)
     {
       CLEAR_HARD_REG_SET (live_hard_regs);
@@ -1079,11 +1074,11 @@ do_remat (void)
 	  if (hard_regno >= 0)
 	    SET_HARD_REG_BIT (live_hard_regs, hard_regno);
 	}
-      bitmap_and (&avail_cands, &get_remat_bb_data (bb)->avin_cands,
+      bitmap_and (avail_cands, &get_remat_bb_data (bb)->avin_cands,
 		  &get_remat_bb_data (bb)->livein_cands);
       /* Activating insns are always in the same block as their corresponding
 	 remat insn, so at the start of a block the two bitsets are equal.  */
-      bitmap_copy (&active_cands, &avail_cands);
+      bitmap_copy (active_cands, avail_cands);
       FOR_BB_INSNS (bb, insn)
 	{
 	  if (!NONDEBUG_INSN_P (insn))
@@ -1117,8 +1112,8 @@ do_remat (void)
 	      for (cand = regno_cands[src_regno];
 		   cand != NULL;
 		   cand = cand->next_regno_cand)
-		if (bitmap_bit_p (&avail_cands, cand->index)
-		    && bitmap_bit_p (&active_cands, cand->index))
+		if (bitmap_bit_p (avail_cands, cand->index)
+		    && bitmap_bit_p (active_cands, cand->index))
 		  break;
 	    }
 	  int i, hard_regno, nregs;
@@ -1189,7 +1184,7 @@ do_remat (void)
 		 reg = reg->next)
 	      if (reg->type != OP_IN
 		  || find_regno_note (insn, REG_DEAD, reg->regno) != NULL)
-		EXECUTE_IF_SET_IN_BITMAP (&avail_cands, 0, cid, bi)
+		EXECUTE_IF_SET_IN_BITMAP (avail_cands, 0, cid, bi)
 		  {
 		    cand = all_cands[cid];
 		    
@@ -1203,7 +1198,7 @@ do_remat (void)
 		  }
 
 	  if (CALL_P (insn))
-	    EXECUTE_IF_SET_IN_BITMAP (&avail_cands, 0, cid, bi)
+	    EXECUTE_IF_SET_IN_BITMAP (avail_cands, 0, cid, bi)
 	      {
 		cand = all_cands[cid];
 		
@@ -1211,22 +1206,22 @@ do_remat (void)
 		  bitmap_set_bit (&temp_bitmap, cand->index);
 	      }
 
-	  bitmap_and_compl_into (&avail_cands, &temp_bitmap);
+	  bitmap_and_compl_into (avail_cands, &temp_bitmap);
 
 	  /* Now see whether a candidate is made active or available
 	     by this insn.  */
 	  cand = insn_to_cand_activation[INSN_UID (insn)];
 	  if (cand)
-	    bitmap_set_bit (&active_cands, cand->index);
+	    bitmap_set_bit (active_cands, cand->index);
 
 	  cand = insn_to_cand[INSN_UID (insn)];
 	  if (cand != NULL)
 	    {
-	      bitmap_set_bit (&avail_cands, cand->index);
+	      bitmap_set_bit (avail_cands, cand->index);
 	      if (cand->reload_regno == -1)
-		bitmap_set_bit (&active_cands, cand->index);
+		bitmap_set_bit (active_cands, cand->index);
 	      else
-		bitmap_clear_bit (&active_cands, cand->index);
+		bitmap_clear_bit (active_cands, cand->index);
 	    }
 
 	  if (remat_insn != NULL)
@@ -1274,8 +1269,6 @@ do_remat (void)
 	      SET_HARD_REG_BIT (live_hard_regs, reg->regno);
 	}
     }
-  bitmap_clear (&avail_cands);
-  bitmap_clear (&active_cands);
   return changed_p;
 }
 
diff --git a/gcc/lra-spills.c b/gcc/lra-spills.c
index 492fc182cf0..3df6f6786a3 100644
--- a/gcc/lra-spills.c
+++ b/gcc/lra-spills.c
@@ -223,7 +223,6 @@ assign_spill_hard_regs (int *pseudo_regnos, int n)
   rtx set;
   basic_block bb;
   HARD_REG_SET conflict_hard_regs;
-  bitmap_head ok_insn_bitmap;
   bitmap setjump_crosses = regstat_get_setjmp_crosses ();
   /* Hard registers which can not be used for any purpose at given
      program point because they are unallocatable or already allocated
@@ -243,13 +242,13 @@ assign_spill_hard_regs (int *pseudo_regnos, int n)
 	for (p = r->start; p <= r->finish; p++)
 	  add_to_hard_reg_set (&reserved_hard_regs[p],
 			       lra_reg_info[i].biggest_mode, hard_regno);
-  bitmap_initialize (&ok_insn_bitmap, &reg_obstack);
+  auto_bitmap ok_insn_bitmap (&reg_obstack);
   FOR_EACH_BB_FN (bb, cfun)
     FOR_BB_INSNS (bb, insn)
       if (DEBUG_INSN_P (insn)
 	  || ((set = single_set (insn)) != NULL_RTX
 	      && REG_P (SET_SRC (set)) && REG_P (SET_DEST (set))))
-	bitmap_set_bit (&ok_insn_bitmap, INSN_UID (insn));
+	bitmap_set_bit (ok_insn_bitmap, INSN_UID (insn));
   for (res = i = 0; i < n; i++)
     {
       regno = pseudo_regnos[i];
@@ -260,7 +259,7 @@ assign_spill_hard_regs (int *pseudo_regnos, int n)
 		 targetm.spill_class ((reg_class_t) rclass,
 				      PSEUDO_REGNO_MODE (regno)))) == NO_REGS
 	  || bitmap_intersect_compl_p (&lra_reg_info[regno].insn_bitmap,
-				       &ok_insn_bitmap))
+				       ok_insn_bitmap))
 	{
 	  pseudo_regnos[res++] = regno;
 	  continue;
@@ -300,7 +299,6 @@ assign_spill_hard_regs (int *pseudo_regnos, int n)
 	/* Just loop.  */
 	df_set_regs_ever_live (hard_regno + nr, true);
     }
-  bitmap_clear (&ok_insn_bitmap);
   free (reserved_hard_regs);
   return res;
 }
@@ -449,17 +447,16 @@ spill_pseudos (void)
   basic_block bb;
   rtx_insn *insn, *curr;
   int i;
-  bitmap_head spilled_pseudos, changed_insns;
 
-  bitmap_initialize (&spilled_pseudos, &reg_obstack);
-  bitmap_initialize (&changed_insns, &reg_obstack);
+  auto_bitmap spilled_pseudos (&reg_obstack);
+  auto_bitmap changed_insns (&reg_obstack);
   for (i = FIRST_PSEUDO_REGISTER; i < regs_num; i++)
     {
       if (lra_reg_info[i].nrefs != 0 && lra_get_regno_hard_regno (i) < 0
 	  && ! lra_former_scratch_p (i))
 	{
-	  bitmap_set_bit (&spilled_pseudos, i);
-	  bitmap_ior_into (&changed_insns, &lra_reg_info[i].insn_bitmap);
+	  bitmap_set_bit (spilled_pseudos, i);
+	  bitmap_ior_into (changed_insns, &lra_reg_info[i].insn_bitmap);
 	}
     }
   FOR_EACH_BB_FN (bb, cfun)
@@ -468,7 +465,7 @@ spill_pseudos (void)
 	{
 	  bool removed_pseudo_p = false;
 	  
-	  if (bitmap_bit_p (&changed_insns, INSN_UID (insn)))
+	  if (bitmap_bit_p (changed_insns, INSN_UID (insn)))
 	    {
 	      rtx *link_loc, link;
 
@@ -526,12 +523,10 @@ spill_pseudos (void)
 			 "Debug insn #%u is reset because it referenced "
 			 "removed pseudo\n", INSN_UID (insn));
 	    }
-	  bitmap_and_compl_into (df_get_live_in (bb), &spilled_pseudos);
-	  bitmap_and_compl_into (df_get_live_out (bb), &spilled_pseudos);
+	  bitmap_and_compl_into (df_get_live_in (bb), spilled_pseudos);
+	  bitmap_and_compl_into (df_get_live_out (bb), spilled_pseudos);
 	}
     }
-  bitmap_clear (&spilled_pseudos);
-  bitmap_clear (&changed_insns);
 }
 
 /* Return true if we need to change some pseudos into memory.  */
diff --git a/gcc/tree-ssa-pre.c b/gcc/tree-ssa-pre.c
index 8175d2599ed..ca212daee62 100644
--- a/gcc/tree-ssa-pre.c
+++ b/gcc/tree-ssa-pre.c
@@ -817,19 +817,17 @@ bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
 
   if (dest != orig)
     {
-      bitmap_head temp;
-      bitmap_initialize (&temp, &grand_bitmap_obstack);
+      auto_bitmap temp (&grand_bitmap_obstack);
 
       bitmap_and_into (&dest->values, &orig->values);
-      bitmap_copy (&temp, &dest->expressions);
-      EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
+      bitmap_copy (temp, &dest->expressions);
+      EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi)
 	{
 	  pre_expr expr = expression_for_id (i);
 	  unsigned int value_id = get_expr_value_id (expr);
 	  if (!bitmap_bit_p (&dest->values, value_id))
 	    bitmap_clear_bit (&dest->expressions, i);
 	}
-      bitmap_clear (&temp);
     }
 }
 
@@ -862,18 +860,15 @@ bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
 {
   unsigned int i;
   bitmap_iterator bi;
-  bitmap_head temp;
+  auto_bitmap temp (&grand_bitmap_obstack);
 
-  bitmap_initialize (&temp, &grand_bitmap_obstack);
-
-  bitmap_copy (&temp, &a->expressions);
-  EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
+  bitmap_copy (temp, &a->expressions);
+  EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi)
     {
       pre_expr expr = expression_for_id (i);
       if (bitmap_set_contains_value (b, get_expr_value_id (expr)))
 	bitmap_remove_from_set (a, expr);
     }
-  bitmap_clear (&temp);
 }
 
 
-- 
2.11.0

^ permalink raw reply	[flat|nested] 34+ messages in thread

* [PATCH 06/13] replace some manual stacks with auto_vec
  2017-05-09 20:53 [PATCH 00/13] misc data structure stuff tbsaunde+gcc
                   ` (8 preceding siblings ...)
  2017-05-09 20:53 ` [PATCH 05/13] allow constructing a auto_vec with a preallocation, and a possibly larger actual allocation size tbsaunde+gcc
@ 2017-05-09 20:53 ` tbsaunde+gcc
  2017-05-10  8:26   ` Richard Biener
  2017-05-09 20:53 ` [PATCH 03/13] store the bitmap_head within the auto_bitmap tbsaunde+gcc
                   ` (2 subsequent siblings)
  12 siblings, 1 reply; 34+ messages in thread
From: tbsaunde+gcc @ 2017-05-09 20:53 UTC (permalink / raw)
  To: gcc-patches

From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>

gcc/ChangeLog:

2017-05-09  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>

	* cfganal.c (mark_dfs_back_edges): Replace manual stack with
	auto_vec.
	(post_order_compute): Likewise.
	(inverted_post_order_compute): Likewise.
	(pre_and_rev_post_order_compute_fn): Likewise.
---
 gcc/cfganal.c | 92 +++++++++++++++++++++++------------------------------------
 1 file changed, 36 insertions(+), 56 deletions(-)

diff --git a/gcc/cfganal.c b/gcc/cfganal.c
index 7377a7a0434..1b01564e8c7 100644
--- a/gcc/cfganal.c
+++ b/gcc/cfganal.c
@@ -61,10 +61,8 @@ static void flow_dfs_compute_reverse_finish (depth_first_search_ds *);
 bool
 mark_dfs_back_edges (void)
 {
-  edge_iterator *stack;
   int *pre;
   int *post;
-  int sp;
   int prenum = 1;
   int postnum = 1;
   bool found = false;
@@ -74,8 +72,7 @@ mark_dfs_back_edges (void)
   post = XCNEWVEC (int, last_basic_block_for_fn (cfun));
 
   /* Allocate stack for back-tracking up CFG.  */
-  stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
-  sp = 0;
+  auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
 
   /* Allocate bitmap to track nodes that have been visited.  */
   auto_sbitmap visited (last_basic_block_for_fn (cfun));
@@ -84,16 +81,15 @@ mark_dfs_back_edges (void)
   bitmap_clear (visited);
 
   /* Push the first edge on to the stack.  */
-  stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
+  stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs));
 
-  while (sp)
+  while (!stack.is_empty ())
     {
-      edge_iterator ei;
       basic_block src;
       basic_block dest;
 
       /* Look at the edge on the top of the stack.  */
-      ei = stack[sp - 1];
+      edge_iterator ei = stack.last ();
       src = ei_edge (ei)->src;
       dest = ei_edge (ei)->dest;
       ei_edge (ei)->flags &= ~EDGE_DFS_BACK;
@@ -110,7 +106,7 @@ mark_dfs_back_edges (void)
 	    {
 	      /* Since the DEST node has been visited for the first
 		 time, check its successors.  */
-	      stack[sp++] = ei_start (dest->succs);
+	      stack.quick_push (ei_start (dest->succs));
 	    }
 	  else
 	    post[dest->index] = postnum++;
@@ -128,15 +124,14 @@ mark_dfs_back_edges (void)
 	    post[src->index] = postnum++;
 
 	  if (!ei_one_before_end_p (ei))
-	    ei_next (&stack[sp - 1]);
+	    ei_next (&stack.last ());
 	  else
-	    sp--;
+	    stack.pop ();
 	}
     }
 
   free (pre);
   free (post);
-  free (stack);
 
   return found;
 }
@@ -637,8 +632,6 @@ int
 post_order_compute (int *post_order, bool include_entry_exit,
 		    bool delete_unreachable)
 {
-  edge_iterator *stack;
-  int sp;
   int post_order_num = 0;
   int count;
 
@@ -646,8 +639,7 @@ post_order_compute (int *post_order, bool include_entry_exit,
     post_order[post_order_num++] = EXIT_BLOCK;
 
   /* Allocate stack for back-tracking up CFG.  */
-  stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
-  sp = 0;
+  auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
 
   /* Allocate bitmap to track nodes that have been visited.  */
   auto_sbitmap visited (last_basic_block_for_fn (cfun));
@@ -656,16 +648,15 @@ post_order_compute (int *post_order, bool include_entry_exit,
   bitmap_clear (visited);
 
   /* Push the first edge on to the stack.  */
-  stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
+  stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs));
 
-  while (sp)
+  while (!stack.is_empty ())
     {
-      edge_iterator ei;
       basic_block src;
       basic_block dest;
 
       /* Look at the edge on the top of the stack.  */
-      ei = stack[sp - 1];
+      edge_iterator ei = stack.last ();
       src = ei_edge (ei)->src;
       dest = ei_edge (ei)->dest;
 
@@ -679,7 +670,7 @@ post_order_compute (int *post_order, bool include_entry_exit,
 	  if (EDGE_COUNT (dest->succs) > 0)
 	    /* Since the DEST node has been visited for the first
 	       time, check its successors.  */
-	    stack[sp++] = ei_start (dest->succs);
+	    stack.quick_push (ei_start (dest->succs));
 	  else
 	    post_order[post_order_num++] = dest->index;
 	}
@@ -690,9 +681,9 @@ post_order_compute (int *post_order, bool include_entry_exit,
 	    post_order[post_order_num++] = src->index;
 
 	  if (!ei_one_before_end_p (ei))
-	    ei_next (&stack[sp - 1]);
+	    ei_next (&stack.last ());
 	  else
-	    sp--;
+	    stack.pop ();
 	}
     }
 
@@ -722,7 +713,6 @@ post_order_compute (int *post_order, bool include_entry_exit,
       tidy_fallthru_edges ();
     }
 
-  free (stack);
   return post_order_num;
 }
 
@@ -813,16 +803,13 @@ inverted_post_order_compute (int *post_order,
 			     sbitmap *start_points)
 {
   basic_block bb;
-  edge_iterator *stack;
-  int sp;
   int post_order_num = 0;
 
   if (flag_checking)
     verify_no_unreachable_blocks ();
 
   /* Allocate stack for back-tracking up CFG.  */
-  stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
-  sp = 0;
+  auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
 
   /* Allocate bitmap to track nodes that have been visited.  */
   auto_sbitmap visited (last_basic_block_for_fn (cfun));
@@ -836,12 +823,12 @@ inverted_post_order_compute (int *post_order,
         if (bitmap_bit_p (*start_points, bb->index)
 	    && EDGE_COUNT (bb->preds) > 0)
 	  {
-            stack[sp++] = ei_start (bb->preds);
+	    stack.quick_push (ei_start (bb->preds));
             bitmap_set_bit (visited, bb->index);
 	  }
       if (EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
 	{
-          stack[sp++] = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
+	  stack.quick_push (ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds));
           bitmap_set_bit (visited, EXIT_BLOCK_PTR_FOR_FN (cfun)->index);
 	}
     }
@@ -853,7 +840,7 @@ inverted_post_order_compute (int *post_order,
         /* Push the initial edge on to the stack.  */
         if (EDGE_COUNT (bb->preds) > 0)
           {
-            stack[sp++] = ei_start (bb->preds);
+	    stack.quick_push (ei_start (bb->preds));
             bitmap_set_bit (visited, bb->index);
           }
       }
@@ -863,13 +850,13 @@ inverted_post_order_compute (int *post_order,
       bool has_unvisited_bb = false;
 
       /* The inverted traversal loop. */
-      while (sp)
+      while (!stack.is_empty ())
         {
           edge_iterator ei;
           basic_block pred;
 
           /* Look at the edge on the top of the stack.  */
-          ei = stack[sp - 1];
+	  ei = stack.last ();
           bb = ei_edge (ei)->dest;
           pred = ei_edge (ei)->src;
 
@@ -882,7 +869,7 @@ inverted_post_order_compute (int *post_order,
               if (EDGE_COUNT (pred->preds) > 0)
                 /* Since the predecessor node has been visited for the first
                    time, check its predecessors.  */
-                stack[sp++] = ei_start (pred->preds);
+		stack.quick_push (ei_start (pred->preds));
               else
                 post_order[post_order_num++] = pred->index;
             }
@@ -893,15 +880,15 @@ inverted_post_order_compute (int *post_order,
                 post_order[post_order_num++] = bb->index;
 
               if (!ei_one_before_end_p (ei))
-                ei_next (&stack[sp - 1]);
+		ei_next (&stack.last ());
               else
-                sp--;
+		stack.pop ();
             }
         }
 
       /* Detect any infinite loop and activate the kludge.
          Note that this doesn't check EXIT_BLOCK itself
-         since EXIT_BLOCK is always added after the outer do-while loop.  */
+	 since EXIT_BLOCK is always added after the outer do-while loop.  */
       FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
 		      EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
         if (!bitmap_bit_p (visited, bb->index))
@@ -926,31 +913,30 @@ inverted_post_order_compute (int *post_order,
                     basic_block be = dfs_find_deadend (bb);
                     gcc_assert (be != NULL);
                     bitmap_set_bit (visited, be->index);
-                    stack[sp++] = ei_start (be->preds);
+		    stack.quick_push (ei_start (be->preds));
                     break;
                   }
               }
           }
 
-      if (has_unvisited_bb && sp == 0)
+      if (has_unvisited_bb && stack.is_empty ())
         {
-          /* No blocks are reachable from EXIT at all.
+	  /* No blocks are reachable from EXIT at all.
              Find a dead-end from the ENTRY, and restart the iteration. */
 	  basic_block be = dfs_find_deadend (ENTRY_BLOCK_PTR_FOR_FN (cfun));
           gcc_assert (be != NULL);
           bitmap_set_bit (visited, be->index);
-          stack[sp++] = ei_start (be->preds);
+	  stack.quick_push (ei_start (be->preds));
         }
 
       /* The only case the below while fires is
          when there's an infinite loop.  */
     }
-  while (sp);
+  while (!stack.is_empty ());
 
   /* EXIT_BLOCK is always included.  */
   post_order[post_order_num++] = EXIT_BLOCK;
 
-  free (stack);
   return post_order_num;
 }
 
@@ -971,14 +957,11 @@ pre_and_rev_post_order_compute_fn (struct function *fn,
 				   int *pre_order, int *rev_post_order,
 				   bool include_entry_exit)
 {
-  edge_iterator *stack;
-  int sp;
   int pre_order_num = 0;
   int rev_post_order_num = n_basic_blocks_for_fn (cfun) - 1;
 
   /* Allocate stack for back-tracking up CFG.  */
-  stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
-  sp = 0;
+  auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
 
   if (include_entry_exit)
     {
@@ -998,16 +981,15 @@ pre_and_rev_post_order_compute_fn (struct function *fn,
   bitmap_clear (visited);
 
   /* Push the first edge on to the stack.  */
-  stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (fn)->succs);
+  stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (fn)->succs));
 
-  while (sp)
+  while (!stack.is_empty ())
     {
-      edge_iterator ei;
       basic_block src;
       basic_block dest;
 
       /* Look at the edge on the top of the stack.  */
-      ei = stack[sp - 1];
+      edge_iterator ei = stack.last ();
       src = ei_edge (ei)->src;
       dest = ei_edge (ei)->dest;
 
@@ -1026,7 +1008,7 @@ pre_and_rev_post_order_compute_fn (struct function *fn,
 	  if (EDGE_COUNT (dest->succs) > 0)
 	    /* Since the DEST node has been visited for the first
 	       time, check its successors.  */
-	    stack[sp++] = ei_start (dest->succs);
+	    stack.quick_push (ei_start (dest->succs));
 	  else if (rev_post_order)
 	    /* There are no successors for the DEST node so assign
 	       its reverse completion number.  */
@@ -1042,14 +1024,12 @@ pre_and_rev_post_order_compute_fn (struct function *fn,
 	    rev_post_order[rev_post_order_num--] = src->index;
 
 	  if (!ei_one_before_end_p (ei))
-	    ei_next (&stack[sp - 1]);
+	    ei_next (&stack.last ());
 	  else
-	    sp--;
+	    stack.pop ();
 	}
     }
 
-  free (stack);
-
   if (include_entry_exit)
     {
       if (pre_order)
-- 
2.11.0

^ permalink raw reply	[flat|nested] 34+ messages in thread

* [PATCH 00/13] misc data structure stuff
@ 2017-05-09 20:53 tbsaunde+gcc
  2017-05-09 20:53 ` [PATCH 10/13] make a member an auto_sbitmap tbsaunde+gcc
                   ` (12 more replies)
  0 siblings, 13 replies; 34+ messages in thread
From: tbsaunde+gcc @ 2017-05-09 20:53 UTC (permalink / raw)
  To: gcc-patches

From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>

Hi,

this is sort of a grab bag, but mostly improving bitmaps and auto_vec, and then
using them in more places.  Individually patches commit messages should explain in more detail where needed.

patches individually bootstrapped and regtested on x86_64-linux-gnu, ok?

Thanks!

Trev


Trevor Saunders (13):
  improve safety of freeing bitmaps
  improve bitmap / sbitmap compatability of bitmap_set_bit
  store the bitmap_head within the auto_bitmap
  allow auto_bitmap to use other bitmap obstacks
  allow constructing a auto_vec with a preallocation, and a possibly
    larger actual allocation size
  replace some manual stacks with auto_vec
  use auto_bitmap more
  move several bitmaps from gc memory to the default obstack and use
    auto_bitmap
  use auto_bitmap more with alternate obstacks
  make a member an auto_sbitmap
  make more vars auto_sbitmaps
  make depth_first_search_ds a class
  make inverted_post_order_compute() operate on a vec

 gcc/bitmap.h                 |  30 +++++--
 gcc/bt-load.c                |   8 +-
 gcc/cfganal.c                | 202 ++++++++++++++++---------------------------
 gcc/cfganal.h                |   2 +-
 gcc/cfgloop.c                |   4 +-
 gcc/ddg.c                    |  26 ++----
 gcc/df-core.c                |  94 +++++++++-----------
 gcc/df-problems.c            |  10 +--
 gcc/df-scan.c                |  93 ++++++++------------
 gcc/df.h                     |   4 +-
 gcc/genrecog.c               |   8 +-
 gcc/haifa-sched.c            |  23 ++---
 gcc/hsa-common.h             |   4 +-
 gcc/hsa-gen.c                |  14 +--
 gcc/init-regs.c              |   4 +-
 gcc/ipa-inline.c             |   6 +-
 gcc/ipa-reference.c          |   3 +-
 gcc/ira.c                    |  97 +++++++--------------
 gcc/lcm.c                    |  14 ++-
 gcc/loop-invariant.c         |  12 +--
 gcc/lower-subreg.c           |   8 +-
 gcc/lra-constraints.c        |  42 ++++-----
 gcc/lra-lives.c              |   9 +-
 gcc/lra-remat.c              |  43 ++++-----
 gcc/lra-spills.c             |  25 +++---
 gcc/predict.c                |  19 ++--
 gcc/print-rtl.c              |   5 +-
 gcc/print-rtl.h              |   2 +-
 gcc/sbitmap.h                |  16 +++-
 gcc/shrink-wrap.c            |  48 +++-------
 gcc/tree-cfg.c               |   7 +-
 gcc/tree-loop-distribution.c |   4 +-
 gcc/tree-predcom.c           |   4 +-
 gcc/tree-ssa-coalesce.c      |   4 +-
 gcc/tree-ssa-dce.c           |  10 +--
 gcc/tree-ssa-dse.c           |  10 +--
 gcc/tree-ssa-phionlycprop.c  |  15 +---
 gcc/tree-ssa-pre.c           |  30 +++----
 gcc/tree-ssa-sink.c          |   9 +-
 gcc/tree-ssa-threadupdate.c  |  13 +--
 gcc/tree-ssa.c               |  12 +--
 gcc/tree-ssanames.c          |  10 +--
 gcc/vec.h                    |  12 +++
 43 files changed, 393 insertions(+), 622 deletions(-)

-- 
2.11.0

^ permalink raw reply	[flat|nested] 34+ messages in thread

* [PATCH 05/13] allow constructing a auto_vec with a preallocation, and a possibly larger actual allocation size
  2017-05-09 20:53 [PATCH 00/13] misc data structure stuff tbsaunde+gcc
                   ` (7 preceding siblings ...)
  2017-05-09 20:53 ` [PATCH 12/13] make depth_first_search_ds a class tbsaunde+gcc
@ 2017-05-09 20:53 ` tbsaunde+gcc
  2017-05-10  6:58   ` Richard Sandiford
  2017-05-09 20:53 ` [PATCH 06/13] replace some manual stacks with auto_vec tbsaunde+gcc
                   ` (3 subsequent siblings)
  12 siblings, 1 reply; 34+ messages in thread
From: tbsaunde+gcc @ 2017-05-09 20:53 UTC (permalink / raw)
  To: gcc-patches

From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>

This allows us to set the capacity of the vector when we construct it,
and still use a stack buffer when the size is small enough.

gcc/ChangeLog:

2017-05-09  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>

	* genrecog.c (int_set::int_set): Explicitly construct our
auto_vec base class.
	* vec.h (auto_vec::auto_vec): New constructor.
---
 gcc/genrecog.c |  8 +++++---
 gcc/vec.h      | 12 ++++++++++++
 2 files changed, 17 insertions(+), 3 deletions(-)

diff --git a/gcc/genrecog.c b/gcc/genrecog.c
index 6a9e610e7a0..b69043f0d02 100644
--- a/gcc/genrecog.c
+++ b/gcc/genrecog.c
@@ -1407,14 +1407,16 @@ struct int_set : public auto_vec <uint64_t, 1>
   iterator end ();
 };
 
-int_set::int_set () {}
+int_set::int_set () : auto_vec<uint64_t, 1> () {}
 
-int_set::int_set (uint64_t label)
+int_set::int_set (uint64_t label) :
+  auto_vec<uint64_t, 1> ()
 {
   safe_push (label);
 }
 
-int_set::int_set (const int_set &other)
+int_set::int_set (const int_set &other) :
+  auto_vec<uint64_t, 1> ()
 {
   safe_splice (other);
 }
diff --git a/gcc/vec.h b/gcc/vec.h
index fee46164b01..914f89c350c 100644
--- a/gcc/vec.h
+++ b/gcc/vec.h
@@ -1272,6 +1272,18 @@ public:
     this->m_vec = &m_auto;
   }
 
+  auto_vec (size_t s)
+  {
+    if (s > N)
+      {
+	this->create (s);
+	return;
+      }
+
+    m_auto.embedded_init (MAX (N, 2), 0, 1);
+    this->m_vec = &m_auto;
+  }
+
   ~auto_vec ()
   {
     this->release ();
-- 
2.11.0

^ permalink raw reply	[flat|nested] 34+ messages in thread

* [PATCH 01/13] improve safety of freeing bitmaps
  2017-05-09 20:53 [PATCH 00/13] misc data structure stuff tbsaunde+gcc
                   ` (5 preceding siblings ...)
  2017-05-09 20:53 ` [PATCH 04/13] allow auto_bitmap to use other bitmap obstacks tbsaunde+gcc
@ 2017-05-09 20:53 ` tbsaunde+gcc
  2017-05-10  8:15   ` Richard Biener
  2017-05-09 20:53 ` [PATCH 12/13] make depth_first_search_ds a class tbsaunde+gcc
                   ` (5 subsequent siblings)
  12 siblings, 1 reply; 34+ messages in thread
From: tbsaunde+gcc @ 2017-05-09 20:53 UTC (permalink / raw)
  To: gcc-patches

From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>

There's two groups of changes here, first taking a sbitmap &, so that we
can assign null to the pointer after freeing the sbitmap to prevent use
after free through that pointer.  Second we define overloads of
sbitmap_free and bitmap_free taking auto_sbitmap and auto_bitmap
respectively, so that you can't double free the bitmap owned by a
auto_{s,}bitmap.

gcc/ChangeLog:

2017-05-09  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>

	* bitmap.h (BITMAP_FREE): Convert from macro to inline function
	and add overloaded decl for auto_bitmap.
	* sbitmap.h (inline void sbitmap_free): Add overload for
	auto_sbitmap, and change sbitmap to  point to null.
---
 gcc/bitmap.h  | 21 +++++++++++++++++++--
 gcc/sbitmap.h |  7 ++++++-
 2 files changed, 25 insertions(+), 3 deletions(-)

diff --git a/gcc/bitmap.h b/gcc/bitmap.h
index f158b447357..7508239cff9 100644
--- a/gcc/bitmap.h
+++ b/gcc/bitmap.h
@@ -129,6 +129,8 @@ along with GCC; see the file COPYING3.  If not see
 
 #include "obstack.h"
 
+   class auto_bitmap;
+
 /* Bitmap memory usage.  */
 struct bitmap_usage: public mem_usage
 {
@@ -372,8 +374,23 @@ extern hashval_t bitmap_hash (const_bitmap);
 #define BITMAP_GGC_ALLOC() bitmap_gc_alloc ()
 
 /* Do any cleanup needed on a bitmap when it is no longer used.  */
-#define BITMAP_FREE(BITMAP) \
-       ((void) (bitmap_obstack_free ((bitmap) BITMAP), (BITMAP) = (bitmap) NULL))
+inline void
+BITMAP_FREE (bitmap &b)
+{
+  bitmap_obstack_free ((bitmap) b);
+  b = NULL;
+}
+
+inline void
+BITMAP_FREE (void *&b)
+{
+  bitmap_obstack_free ((bitmap) b);
+  b = NULL;
+}
+
+/* Intentionally unimplemented to ensure it is never called with an
+   auto_bitmap argument.  */
+void BITMAP_FREE (auto_bitmap);
 
 /* Iterator for bitmaps.  */
 
diff --git a/gcc/sbitmap.h b/gcc/sbitmap.h
index ce4d27d927c..cba0452cdb9 100644
--- a/gcc/sbitmap.h
+++ b/gcc/sbitmap.h
@@ -82,6 +82,8 @@ along with GCC; see the file COPYING3.  If not see
 #define SBITMAP_ELT_BITS (HOST_BITS_PER_WIDEST_FAST_INT * 1u)
 #define SBITMAP_ELT_TYPE unsigned HOST_WIDEST_FAST_INT
 
+class auto_sbitmap;
+
 struct simple_bitmap_def
 {
   unsigned int n_bits;		/* Number of bits.  */
@@ -208,11 +210,14 @@ bmp_iter_next (sbitmap_iterator *i, unsigned *bit_no ATTRIBUTE_UNUSED)
        bmp_iter_next (&(ITER), &(BITNUM)))
 #endif
 
-inline void sbitmap_free (sbitmap map)
+inline void sbitmap_free (sbitmap &map)
 {
   free (map);
+  map = NULL;
 }
 
+void sbitmap_free (auto_sbitmap);
+
 inline void sbitmap_vector_free (sbitmap * vec)
 {
   free (vec);
-- 
2.11.0

^ permalink raw reply	[flat|nested] 34+ messages in thread

* [PATCH 07/13] use auto_bitmap more
  2017-05-09 20:53 [PATCH 00/13] misc data structure stuff tbsaunde+gcc
  2017-05-09 20:53 ` [PATCH 10/13] make a member an auto_sbitmap tbsaunde+gcc
  2017-05-09 20:53 ` [PATCH 08/13] move several bitmaps from gc memory to the default obstack and use auto_bitmap tbsaunde+gcc
@ 2017-05-09 20:53 ` tbsaunde+gcc
  2017-05-10  8:28   ` Richard Biener
  2017-05-09 20:53 ` [PATCH 09/13] use auto_bitmap more with alternate obstacks tbsaunde+gcc
                   ` (9 subsequent siblings)
  12 siblings, 1 reply; 34+ messages in thread
From: tbsaunde+gcc @ 2017-05-09 20:53 UTC (permalink / raw)
  To: gcc-patches

From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>

gcc/ChangeLog:

2017-05-09  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>

	* bt-load.c (combine_btr_defs): Use auto_bitmap to manage bitmap
	lifetime.
	(migrate_btr_def): Likewise.
	* cfgloop.c (get_loop_body_in_bfs_order): Likewise.
	* df-core.c (loop_post_order_compute): Likewise.
	(loop_inverted_post_order_compute): Likewise.
	* hsa-common.h: Likewise.
	* hsa-gen.c (hsa_bb::~hsa_bb): Likewise.
	* init-regs.c (initialize_uninitialized_regs): Likewise.
	* ipa-inline.c (resolve_noninline_speculation): Likewise.
	(inline_small_functions): Likewise.
	* ipa-reference.c (ipa_reference_write_optimization_summary):
Likewise.
	* ira.c (combine_and_move_insns): Likewise.
	(build_insn_chain): Likewise.
	* loop-invariant.c (find_invariants): Likewise.
	* lower-subreg.c (propagate_pseudo_copies): Likewise.
	* predict.c (tree_predict_by_opcode): Likewise.
	(predict_paths_leading_to): Likewise.
	(predict_paths_leading_to_edge): Likewise.
	(estimate_loops_at_level): Likewise.
	(estimate_loops): Likewise.
	* shrink-wrap.c (try_shrink_wrapping): Likewise.
	(spread_components): Likewise.
	* tree-cfg.c (remove_edge_and_dominated_blocks): Likewise.
	* tree-loop-distribution.c (rdg_build_partitions): Likewise.
	* tree-predcom.c (tree_predictive_commoning_loop): Likewise.
	* tree-ssa-coalesce.c (coalesce_ssa_name): Likewise.
	* tree-ssa-phionlycprop.c (pass_phi_only_cprop::execute):
Likewise.
	* tree-ssa-pre.c (remove_dead_inserted_code): Likewise.
	* tree-ssa-sink.c (nearest_common_dominator_of_uses): Likewise.
	* tree-ssa-threadupdate.c (compute_path_counts): Likewise.
	(mark_threaded_blocks): Likewise.
	(thread_through_all_blocks): Likewise.
	* tree-ssa.c (verify_ssa): Likewise.
	(execute_update_addresses_taken): Likewise.
	* tree-ssanames.c (verify_ssaname_freelists): Likewise.
---
 gcc/bt-load.c                |  8 +++-----
 gcc/cfgloop.c                |  4 +---
 gcc/df-core.c                |  8 ++------
 gcc/hsa-common.h             |  4 ++--
 gcc/hsa-gen.c                | 14 ++------------
 gcc/init-regs.c              |  4 +---
 gcc/ipa-inline.c             |  6 ++----
 gcc/ipa-reference.c          |  3 +--
 gcc/ira.c                    | 13 ++++---------
 gcc/loop-invariant.c         | 12 ++++--------
 gcc/lower-subreg.c           |  8 +-------
 gcc/predict.c                | 19 +++++--------------
 gcc/shrink-wrap.c            | 10 +++-------
 gcc/tree-cfg.c               |  7 +------
 gcc/tree-loop-distribution.c |  4 +---
 gcc/tree-predcom.c           |  4 +---
 gcc/tree-ssa-coalesce.c      |  4 +---
 gcc/tree-ssa-phionlycprop.c  | 15 ++++-----------
 gcc/tree-ssa-pre.c           |  4 +---
 gcc/tree-ssa-sink.c          |  9 +++------
 gcc/tree-ssa-threadupdate.c  | 13 +++----------
 gcc/tree-ssa.c               | 12 ++++--------
 gcc/tree-ssanames.c          | 10 +++-------
 23 files changed, 53 insertions(+), 142 deletions(-)

diff --git a/gcc/bt-load.c b/gcc/bt-load.c
index 27be6a382c4..32924e2ecc5 100644
--- a/gcc/bt-load.c
+++ b/gcc/bt-load.c
@@ -1058,7 +1058,7 @@ combine_btr_defs (btr_def *def, HARD_REG_SET *btrs_live_in_range)
 	     target registers live over the merged range.  */
 	  int btr;
 	  HARD_REG_SET combined_btrs_live;
-	  bitmap combined_live_range = BITMAP_ALLOC (NULL);
+	  auto_bitmap combined_live_range;
 	  btr_user *user;
 
 	  if (other_def->live_range == NULL)
@@ -1116,7 +1116,6 @@ combine_btr_defs (btr_def *def, HARD_REG_SET *btrs_live_in_range)
 	      delete_insn (other_def->insn);
 
 	    }
-	  BITMAP_FREE (combined_live_range);
 	}
     }
 }
@@ -1255,7 +1254,6 @@ can_move_up (const_basic_block bb, const rtx_insn *insn, int n_insns)
 static int
 migrate_btr_def (btr_def *def, int min_cost)
 {
-  bitmap live_range;
   HARD_REG_SET btrs_live_in_range;
   int btr_used_near_def = 0;
   int def_basic_block_freq;
@@ -1289,7 +1287,7 @@ migrate_btr_def (btr_def *def, int min_cost)
     }
 
   btr_def_live_range (def, &btrs_live_in_range);
-  live_range = BITMAP_ALLOC (NULL);
+  auto_bitmap live_range;
   bitmap_copy (live_range, def->live_range);
 
 #ifdef INSN_SCHEDULING
@@ -1373,7 +1371,7 @@ migrate_btr_def (btr_def *def, int min_cost)
       if (dump_file)
 	fprintf (dump_file, "failed to move\n");
     }
-  BITMAP_FREE (live_range);
+
   return !give_up;
 }
 
diff --git a/gcc/cfgloop.c b/gcc/cfgloop.c
index afd56bb8cf7..654d188e8b5 100644
--- a/gcc/cfgloop.c
+++ b/gcc/cfgloop.c
@@ -923,7 +923,6 @@ get_loop_body_in_bfs_order (const struct loop *loop)
 {
   basic_block *blocks;
   basic_block bb;
-  bitmap visited;
   unsigned int i = 1;
   unsigned int vc = 0;
 
@@ -931,7 +930,7 @@ get_loop_body_in_bfs_order (const struct loop *loop)
   gcc_assert (loop->latch != EXIT_BLOCK_PTR_FOR_FN (cfun));
 
   blocks = XNEWVEC (basic_block, loop->num_nodes);
-  visited = BITMAP_ALLOC (NULL);
+  auto_bitmap visited;
   blocks[0] = loop->header;
   bitmap_set_bit (visited, loop->header->index);
   while (i < loop->num_nodes)
@@ -952,7 +951,6 @@ get_loop_body_in_bfs_order (const struct loop *loop)
 	}
     }
 
-  BITMAP_FREE (visited);
   return blocks;
 }
 
diff --git a/gcc/df-core.c b/gcc/df-core.c
index 19f4d3dae8c..98787a768c6 100644
--- a/gcc/df-core.c
+++ b/gcc/df-core.c
@@ -1303,14 +1303,13 @@ loop_post_order_compute (int *post_order, struct loop *loop)
   edge_iterator *stack;
   int sp;
   int post_order_num = 0;
-  bitmap visited;
 
   /* Allocate stack for back-tracking up CFG.  */
   stack = XNEWVEC (edge_iterator, loop->num_nodes + 1);
   sp = 0;
 
   /* Allocate bitmap to track nodes that have been visited.  */
-  visited = BITMAP_ALLOC (NULL);
+  auto_bitmap visited;
 
   /* Push the first edge on to the stack.  */
   stack[sp++] = ei_start (loop_preheader_edge (loop)->src->succs);
@@ -1352,7 +1351,6 @@ loop_post_order_compute (int *post_order, struct loop *loop)
     }
 
   free (stack);
-  BITMAP_FREE (visited);
 
   return post_order_num;
 }
@@ -1367,14 +1365,13 @@ loop_inverted_post_order_compute (int *post_order, struct loop *loop)
   edge_iterator *stack;
   int sp;
   int post_order_num = 0;
-  bitmap visited;
 
   /* Allocate stack for back-tracking up CFG.  */
   stack = XNEWVEC (edge_iterator, loop->num_nodes + 1);
   sp = 0;
 
   /* Allocate bitmap to track nodes that have been visited.  */
-  visited = BITMAP_ALLOC (NULL);
+  auto_bitmap visited;
 
   /* Put all latches into the initial work list.  In theory we'd want
      to start from loop exits but then we'd have the special case of
@@ -1420,7 +1417,6 @@ loop_inverted_post_order_compute (int *post_order, struct loop *loop)
     }
 
   free (stack);
-  BITMAP_FREE (visited);
   return post_order_num;
 }
 
diff --git a/gcc/hsa-common.h b/gcc/hsa-common.h
index a24bf6e5ad1..810624e4e1c 100644
--- a/gcc/hsa-common.h
+++ b/gcc/hsa-common.h
@@ -27,6 +27,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "vec.h"
 #include "hash-table.h"
 #include "basic-block.h"
+#include "bitmap.h"
 
 
 /* Return true if the compiler should produce HSAIL.  */
@@ -1027,7 +1028,6 @@ class hsa_bb
 public:
   hsa_bb (basic_block cfg_bb);
   hsa_bb (basic_block cfg_bb, int idx);
-  ~hsa_bb ();
 
   /* Append an instruction INSN into the basic block.  */
   void append_insn (hsa_insn_basic *insn);
@@ -1049,7 +1049,7 @@ public:
   /* Just a number to construct names from.  */
   int m_index;
 
-  bitmap m_liveout, m_livein;
+  auto_bitmap m_liveout, m_livein;
 private:
   /* Make the default constructor inaccessible.  */
   hsa_bb ();
diff --git a/gcc/hsa-gen.c b/gcc/hsa-gen.c
index 4b85e0b2678..c5d8a6e1f44 100644
--- a/gcc/hsa-gen.c
+++ b/gcc/hsa-gen.c
@@ -5716,8 +5716,7 @@ gen_hsa_phi_from_gimple_phi (gimple *phi_stmt, hsa_bb *hbb)
 
 hsa_bb::hsa_bb (basic_block cfg_bb, int idx)
   : m_bb (cfg_bb), m_first_insn (NULL), m_last_insn (NULL), m_first_phi (NULL),
-    m_last_phi (NULL), m_index (idx), m_liveout (BITMAP_ALLOC (NULL)),
-    m_livein (BITMAP_ALLOC (NULL))
+    m_last_phi (NULL), m_index (idx)
 {
   gcc_assert (!cfg_bb->aux);
   cfg_bb->aux = this;
@@ -5728,21 +5727,12 @@ hsa_bb::hsa_bb (basic_block cfg_bb, int idx)
 
 hsa_bb::hsa_bb (basic_block cfg_bb)
   : m_bb (cfg_bb), m_first_insn (NULL), m_last_insn (NULL), m_first_phi (NULL),
-    m_last_phi (NULL), m_index (hsa_cfun->m_hbb_count++),
-    m_liveout (BITMAP_ALLOC (NULL)), m_livein (BITMAP_ALLOC (NULL))
+    m_last_phi (NULL), m_index (hsa_cfun->m_hbb_count++)
 {
   gcc_assert (!cfg_bb->aux);
   cfg_bb->aux = this;
 }
 
-/* Destructor of class representing HSA BB.  */
-
-hsa_bb::~hsa_bb ()
-{
-  BITMAP_FREE (m_livein);
-  BITMAP_FREE (m_liveout);
-}
-
 /* Create and initialize and return a new hsa_bb structure for a given CFG
    basic block BB.  */
 
diff --git a/gcc/init-regs.c b/gcc/init-regs.c
index 2c69991c9e1..15d77467149 100644
--- a/gcc/init-regs.c
+++ b/gcc/init-regs.c
@@ -48,7 +48,7 @@ static void
 initialize_uninitialized_regs (void)
 {
   basic_block bb;
-  bitmap already_genned = BITMAP_ALLOC (NULL);
+  auto_bitmap already_genned;
 
   if (optimize == 1)
     {
@@ -125,8 +125,6 @@ initialize_uninitialized_regs (void)
 	df_dump (dump_file);
       df_remove_problem (df_live);
     }
-
-  BITMAP_FREE (already_genned);
 }
 
 namespace {
diff --git a/gcc/ipa-inline.c b/gcc/ipa-inline.c
index ce79af5719e..7337e0a2fe5 100644
--- a/gcc/ipa-inline.c
+++ b/gcc/ipa-inline.c
@@ -1674,7 +1674,7 @@ resolve_noninline_speculation (edge_heap_t *edge_heap, struct cgraph_edge *edge)
       struct cgraph_node *node = edge->caller;
       struct cgraph_node *where = node->global.inlined_to
 				  ? node->global.inlined_to : node;
-      bitmap updated_nodes = BITMAP_ALLOC (NULL);
+      auto_bitmap updated_nodes;
 
       spec_rem += edge->count;
       edge->resolve_speculation ();
@@ -1684,7 +1684,6 @@ resolve_noninline_speculation (edge_heap_t *edge_heap, struct cgraph_edge *edge)
 			  updated_nodes, NULL);
       update_callee_keys (edge_heap, where,
 			  updated_nodes);
-      BITMAP_FREE (updated_nodes);
     }
 }
 
@@ -1726,7 +1725,7 @@ inline_small_functions (void)
   struct cgraph_node *node;
   struct cgraph_edge *edge;
   edge_heap_t edge_heap (sreal::min ());
-  bitmap updated_nodes = BITMAP_ALLOC (NULL);
+  auto_bitmap updated_nodes;
   int min_size, max_size;
   auto_vec<cgraph_edge *> new_indirect_edges;
   int initial_size = 0;
@@ -2072,7 +2071,6 @@ inline_small_functions (void)
 	     "Unit growth for small function inlining: %i->%i (%i%%)\n",
 	     initial_size, overall_size,
 	     initial_size ? overall_size * 100 / (initial_size) - 100: 0);
-  BITMAP_FREE (updated_nodes);
   symtab->remove_edge_removal_hook (edge_removal_hook_holder);
 }
 
diff --git a/gcc/ipa-reference.c b/gcc/ipa-reference.c
index f47d0cc51e1..222253920fd 100644
--- a/gcc/ipa-reference.c
+++ b/gcc/ipa-reference.c
@@ -992,7 +992,7 @@ ipa_reference_write_optimization_summary (void)
   unsigned int count = 0;
   int ltrans_statics_bitcount = 0;
   lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
-  bitmap ltrans_statics = BITMAP_ALLOC (NULL);
+  auto_bitmap ltrans_statics;
   int i;
 
   reference_vars_to_consider = splay_tree_new (splay_tree_compare_ints, 0, 0);
@@ -1052,7 +1052,6 @@ ipa_reference_write_optimization_summary (void)
 			       ltrans_statics_bitcount);
 	  }
       }
-  BITMAP_FREE (ltrans_statics);
   lto_destroy_simple_output_block (ob);
   splay_tree_delete (reference_vars_to_consider);
 }
diff --git a/gcc/ira.c b/gcc/ira.c
index bfb05080de1..c9751ce81ba 100644
--- a/gcc/ira.c
+++ b/gcc/ira.c
@@ -3698,7 +3698,7 @@ add_store_equivs (void)
 static void
 combine_and_move_insns (void)
 {
-  bitmap cleared_regs = BITMAP_ALLOC (NULL);
+  auto_bitmap cleared_regs;
   int max = max_reg_num ();
 
   for (int regno = FIRST_PSEUDO_REGISTER; regno < max; regno++)
@@ -3856,8 +3856,6 @@ combine_and_move_insns (void)
 		df_insn_rescan (insn);
 	    }
     }
-
-  BITMAP_FREE (cleared_regs);
 }
 
 /* A pass over indirect jumps, converting simple cases to direct jumps.
@@ -4083,8 +4081,8 @@ build_insn_chain (void)
   basic_block bb;
   struct insn_chain *c = NULL;
   struct insn_chain *next = NULL;
-  bitmap live_relevant_regs = BITMAP_ALLOC (NULL);
-  bitmap elim_regset = BITMAP_ALLOC (NULL);
+  auto_bitmap live_relevant_regs;
+  auto_bitmap elim_regset;
   /* live_subregs is a vector used to keep accurate information about
      which hardregs are live in multiword pseudos.  live_subregs and
      live_subregs_used are indexed by pseudo number.  The live_subreg
@@ -4093,7 +4091,7 @@ build_insn_chain (void)
      live_subreg[allocno] is number of bytes that the pseudo can
      occupy.  */
   sbitmap *live_subregs = XCNEWVEC (sbitmap, max_regno);
-  bitmap live_subregs_used = BITMAP_ALLOC (NULL);
+  auto_bitmap live_subregs_used;
 
   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
     if (TEST_HARD_REG_BIT (eliminable_regset, i))
@@ -4336,9 +4334,6 @@ build_insn_chain (void)
     if (live_subregs[i] != NULL)
       sbitmap_free (live_subregs[i]);
   free (live_subregs);
-  BITMAP_FREE (live_subregs_used);
-  BITMAP_FREE (live_relevant_regs);
-  BITMAP_FREE (elim_regset);
 
   if (dump_file)
     print_insn_chains (dump_file);
diff --git a/gcc/loop-invariant.c b/gcc/loop-invariant.c
index 8292cea3e52..cda42efd13e 100644
--- a/gcc/loop-invariant.c
+++ b/gcc/loop-invariant.c
@@ -1219,10 +1219,10 @@ find_invariants_body (struct loop *loop, basic_block *body,
 static void
 find_invariants (struct loop *loop)
 {
-  bitmap may_exit = BITMAP_ALLOC (NULL);
-  bitmap always_reached = BITMAP_ALLOC (NULL);
-  bitmap has_exit = BITMAP_ALLOC (NULL);
-  bitmap always_executed = BITMAP_ALLOC (NULL);
+  auto_bitmap may_exit;
+  auto_bitmap always_reached;
+  auto_bitmap has_exit;
+  auto_bitmap always_executed;
   basic_block *body = get_loop_body_in_dom_order (loop);
 
   find_exits (loop, body, may_exit, has_exit);
@@ -1233,10 +1233,6 @@ find_invariants (struct loop *loop)
   find_invariants_body (loop, body, always_reached, always_executed);
   merge_identical_invariants ();
 
-  BITMAP_FREE (always_reached);
-  BITMAP_FREE (always_executed);
-  BITMAP_FREE (may_exit);
-  BITMAP_FREE (has_exit);
   free (body);
 }
 
diff --git a/gcc/lower-subreg.c b/gcc/lower-subreg.c
index a4dcec51bb5..1ab1c71211f 100644
--- a/gcc/lower-subreg.c
+++ b/gcc/lower-subreg.c
@@ -405,10 +405,7 @@ find_pseudo_copy (rtx set)
 static void
 propagate_pseudo_copies (void)
 {
-  bitmap queue, propagate;
-
-  queue = BITMAP_ALLOC (NULL);
-  propagate = BITMAP_ALLOC (NULL);
+  auto_bitmap queue, propagate;
 
   bitmap_copy (queue, decomposable_context);
   do
@@ -429,9 +426,6 @@ propagate_pseudo_copies (void)
       bitmap_ior_into (decomposable_context, propagate);
     }
   while (!bitmap_empty_p (queue));
-
-  BITMAP_FREE (queue);
-  BITMAP_FREE (propagate);
 }
 
 /* A pointer to one of these values is passed to
diff --git a/gcc/predict.c b/gcc/predict.c
index fa4e626fab8..0fc9fc5c466 100644
--- a/gcc/predict.c
+++ b/gcc/predict.c
@@ -2396,7 +2396,6 @@ tree_predict_by_opcode (basic_block bb)
   tree type;
   tree val;
   enum tree_code cmp;
-  bitmap visited;
   edge_iterator ei;
   enum br_predictor predictor;
 
@@ -2409,10 +2408,8 @@ tree_predict_by_opcode (basic_block bb)
   op1 = gimple_cond_rhs (stmt);
   cmp = gimple_cond_code (stmt);
   type = TREE_TYPE (op0);
-  visited = BITMAP_ALLOC (NULL);
-  val = expr_expected_value_1 (boolean_type_node, op0, cmp, op1, visited,
+  val = expr_expected_value_1 (boolean_type_node, op0, cmp, op1, auto_bitmap (),
 			       &predictor);
-  BITMAP_FREE (visited);
   if (val && TREE_CODE (val) == INTEGER_CST)
     {
       if (predictor == PRED_BUILTIN_EXPECT)
@@ -2917,9 +2914,7 @@ static void
 predict_paths_leading_to (basic_block bb, enum br_predictor pred,
 			  enum prediction taken, struct loop *in_loop)
 {
-  bitmap visited = BITMAP_ALLOC (NULL);
-  predict_paths_for_bb (bb, bb, pred, taken, visited, in_loop);
-  BITMAP_FREE (visited);
+  predict_paths_for_bb (bb, bb, pred, taken, auto_bitmap (), in_loop);
 }
 
 /* Like predict_paths_leading_to but take edge instead of basic block.  */
@@ -2943,9 +2938,7 @@ predict_paths_leading_to_edge (edge e, enum br_predictor pred,
       }
   if (!has_nonloop_edge)
     {
-      bitmap visited = BITMAP_ALLOC (NULL);
-      predict_paths_for_bb (bb, bb, pred, taken, visited, in_loop);
-      BITMAP_FREE (visited);
+      predict_paths_for_bb (bb, bb, pred, taken, auto_bitmap (), in_loop);
     }
   else
     predict_edge_def (e, pred, taken);
@@ -3119,7 +3112,7 @@ estimate_loops_at_level (struct loop *first_loop)
       edge e;
       basic_block *bbs;
       unsigned i;
-      bitmap tovisit = BITMAP_ALLOC (NULL);
+      auto_bitmap tovisit;
 
       estimate_loops_at_level (loop->inner);
 
@@ -3132,7 +3125,6 @@ estimate_loops_at_level (struct loop *first_loop)
 	bitmap_set_bit (tovisit, bbs[i]->index);
       free (bbs);
       propagate_freq (loop->header, tovisit);
-      BITMAP_FREE (tovisit);
     }
 }
 
@@ -3141,7 +3133,7 @@ estimate_loops_at_level (struct loop *first_loop)
 static void
 estimate_loops (void)
 {
-  bitmap tovisit = BITMAP_ALLOC (NULL);
+  auto_bitmap tovisit;
   basic_block bb;
 
   /* Start by estimating the frequencies in the loops.  */
@@ -3154,7 +3146,6 @@ estimate_loops (void)
       bitmap_set_bit (tovisit, bb->index);
     }
   propagate_freq (ENTRY_BLOCK_PTR_FOR_FN (cfun), tovisit);
-  BITMAP_FREE (tovisit);
 }
 
 /* Drop the profile for NODE to guessed, and update its frequency based on
diff --git a/gcc/shrink-wrap.c b/gcc/shrink-wrap.c
index 35eb85ba63c..492376d949b 100644
--- a/gcc/shrink-wrap.c
+++ b/gcc/shrink-wrap.c
@@ -758,7 +758,7 @@ try_shrink_wrapping (edge *entry_edge, rtx_insn *prologue_seq)
      reachable from PRO that we already found, and in VEC a stack of
      those we still need to consider (to find successors).  */
 
-  bitmap bb_with = BITMAP_ALLOC (NULL);
+  auto_bitmap bb_with;
   bitmap_set_bit (bb_with, pro->index);
 
   vec<basic_block> vec;
@@ -822,7 +822,7 @@ try_shrink_wrapping (edge *entry_edge, rtx_insn *prologue_seq)
     {
       calculate_dominance_info (CDI_POST_DOMINATORS);
 
-      bitmap bb_tmp = BITMAP_ALLOC (NULL);
+      auto_bitmap bb_tmp;
       bitmap_copy (bb_tmp, bb_with);
       basic_block last_ok = pro;
       vec.truncate (0);
@@ -859,7 +859,6 @@ try_shrink_wrapping (edge *entry_edge, rtx_insn *prologue_seq)
 
       pro = last_ok;
 
-      BITMAP_FREE (bb_tmp);
       free_dominance_info (CDI_POST_DOMINATORS);
     }
 
@@ -871,7 +870,6 @@ try_shrink_wrapping (edge *entry_edge, rtx_insn *prologue_seq)
 
   if (pro == entry)
     {
-      BITMAP_FREE (bb_with);
       free_dominance_info (CDI_DOMINATORS);
       return;
     }
@@ -1006,7 +1004,6 @@ try_shrink_wrapping (edge *entry_edge, rtx_insn *prologue_seq)
   *entry_edge = make_single_succ_edge (new_bb, pro, EDGE_FALLTHRU);
   force_nonfallthru (*entry_edge);
 
-  BITMAP_FREE (bb_with);
   free_dominance_info (CDI_DOMINATORS);
 }
 \f
@@ -1265,7 +1262,7 @@ spread_components (sbitmap components)
      on that stack.  */
   vec<basic_block> todo;
   todo.create (n_basic_blocks_for_fn (cfun));
-  bitmap seen = BITMAP_ALLOC (NULL);
+  auto_bitmap seen;
 
   sbitmap old = sbitmap_alloc (SBITMAP_SIZE (components));
 
@@ -1395,7 +1392,6 @@ spread_components (sbitmap components)
     }
 
   sbitmap_free (old);
-  BITMAP_FREE (seen);
 }
 
 /* If we cannot handle placing some component's prologues or epilogues where
diff --git a/gcc/tree-cfg.c b/gcc/tree-cfg.c
index a540416cbb5..8fdfdd6f1bc 100644
--- a/gcc/tree-cfg.c
+++ b/gcc/tree-cfg.c
@@ -8178,7 +8178,6 @@ remove_edge_and_dominated_blocks (edge e)
 {
   vec<basic_block> bbs_to_remove = vNULL;
   vec<basic_block> bbs_to_fix_dom = vNULL;
-  bitmap df, df_idom;
   edge f;
   edge_iterator ei;
   bool none_removed = false;
@@ -8227,9 +8226,7 @@ remove_edge_and_dominated_blocks (edge e)
 	}
     }
 
-  df = BITMAP_ALLOC (NULL);
-  df_idom = BITMAP_ALLOC (NULL);
-
+  auto_bitmap df, df_idom;
   if (none_removed)
     bitmap_set_bit (df_idom,
 		    get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
@@ -8296,8 +8293,6 @@ remove_edge_and_dominated_blocks (edge e)
 
   iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
 
-  BITMAP_FREE (df);
-  BITMAP_FREE (df_idom);
   bbs_to_remove.release ();
   bbs_to_fix_dom.release ();
 }
diff --git a/gcc/tree-loop-distribution.c b/gcc/tree-loop-distribution.c
index 1b9950eca8e..a60454b5218 100644
--- a/gcc/tree-loop-distribution.c
+++ b/gcc/tree-loop-distribution.c
@@ -1251,7 +1251,7 @@ rdg_build_partitions (struct graph *rdg,
 		      vec<gimple *> starting_stmts,
 		      vec<partition *> *partitions)
 {
-  bitmap processed = BITMAP_ALLOC (NULL);
+  auto_bitmap processed;
   int i;
   gimple *stmt;
 
@@ -1282,8 +1282,6 @@ rdg_build_partitions (struct graph *rdg,
 
   /* All vertices should have been assigned to at least one partition now,
      other than vertices belonging to dead code.  */
-
-  BITMAP_FREE (processed);
 }
 
 /* Dump to FILE the PARTITIONS.  */
diff --git a/gcc/tree-predcom.c b/gcc/tree-predcom.c
index 57d8f7d207c..23e7870dd2d 100644
--- a/gcc/tree-predcom.c
+++ b/gcc/tree-predcom.c
@@ -2498,7 +2498,6 @@ tree_predictive_commoning_loop (struct loop *loop)
   struct tree_niter_desc desc;
   bool unroll = false;
   edge exit;
-  bitmap tmp_vars;
 
   if (dump_file && (dump_flags & TDF_DETAILS))
     fprintf (dump_file, "Processing loop %d\n",  loop->num);
@@ -2549,7 +2548,7 @@ tree_predictive_commoning_loop (struct loop *loop)
   /* Find the suitable components and split them into chains.  */
   components = filter_suitable_components (loop, components);
 
-  tmp_vars = BITMAP_ALLOC (NULL);
+  auto_bitmap tmp_vars;
   looparound_phis = BITMAP_ALLOC (NULL);
   determine_roots (loop, components, &chains);
   release_components (components);
@@ -2617,7 +2616,6 @@ tree_predictive_commoning_loop (struct loop *loop)
 end: ;
   release_chains (chains);
   free_data_refs (datarefs);
-  BITMAP_FREE (tmp_vars);
   BITMAP_FREE (looparound_phis);
 
   free_affine_expand_cache (&name_expansions);
diff --git a/gcc/tree-ssa-coalesce.c b/gcc/tree-ssa-coalesce.c
index 1b78d66456e..e166314ed4d 100644
--- a/gcc/tree-ssa-coalesce.c
+++ b/gcc/tree-ssa-coalesce.c
@@ -1791,7 +1791,7 @@ coalesce_ssa_name (void)
   tree_live_info_p liveinfo;
   ssa_conflicts *graph;
   coalesce_list *cl;
-  bitmap used_in_copies = BITMAP_ALLOC (NULL);
+  auto_bitmap used_in_copies;
   var_map map;
   unsigned int i;
   tree a;
@@ -1848,8 +1848,6 @@ coalesce_ssa_name (void)
   else
     compute_samebase_partition_bases (map);
 
-  BITMAP_FREE (used_in_copies);
-
   if (num_var_partitions (map) < 1)
     {
       delete_coalesce_list (cl);
diff --git a/gcc/tree-ssa-phionlycprop.c b/gcc/tree-ssa-phionlycprop.c
index f61b269cd63..aa0f50256cd 100644
--- a/gcc/tree-ssa-phionlycprop.c
+++ b/gcc/tree-ssa-phionlycprop.c
@@ -497,13 +497,11 @@ public:
 unsigned int
 pass_phi_only_cprop::execute (function *fun)
 {
-  bitmap interesting_names;
-  bitmap interesting_names1;
   bool cfg_altered = false;
 
   /* Bitmap of blocks which need EH information updated.  We can not
      update it on-the-fly as doing so invalidates the dominator tree.  */
-  bitmap need_eh_cleanup = BITMAP_ALLOC (NULL);
+  auto_bitmap need_eh_cleanup;
 
   /* INTERESTING_NAMES is effectively our worklist, indexed by
      SSA_NAME_VERSION.
@@ -515,8 +513,8 @@ pass_phi_only_cprop::execute (function *fun)
 
      Experiments have show we generally get better compilation
      time behavior with bitmaps rather than sbitmaps.  */
-  interesting_names = BITMAP_ALLOC (NULL);
-  interesting_names1 = BITMAP_ALLOC (NULL);
+  auto_bitmap interesting_names;
+  auto_bitmap interesting_names1;
 
   calculate_dominance_info (CDI_DOMINATORS);
   cfg_altered = false;
@@ -570,13 +568,8 @@ pass_phi_only_cprop::execute (function *fun)
   /* Propagation of const and copies may make some EH edges dead.  Purge
      such edges from the CFG as needed.  */
   if (!bitmap_empty_p (need_eh_cleanup))
-    {
-      gimple_purge_all_dead_eh_edges (need_eh_cleanup);
-      BITMAP_FREE (need_eh_cleanup);
-    }
+    gimple_purge_all_dead_eh_edges (need_eh_cleanup);
 
-  BITMAP_FREE (interesting_names);
-  BITMAP_FREE (interesting_names1);
   return 0;
 }
 
diff --git a/gcc/tree-ssa-pre.c b/gcc/tree-ssa-pre.c
index b4095bfdeed..8175d2599ed 100644
--- a/gcc/tree-ssa-pre.c
+++ b/gcc/tree-ssa-pre.c
@@ -4903,12 +4903,11 @@ mark_operand_necessary (tree op)
 static void
 remove_dead_inserted_code (void)
 {
-  bitmap worklist;
   unsigned i;
   bitmap_iterator bi;
   gimple *t;
 
-  worklist = BITMAP_ALLOC (NULL);
+  auto_bitmap worklist;
   EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
     {
       t = SSA_NAME_DEF_STMT (ssa_name (i));
@@ -4984,7 +4983,6 @@ remove_dead_inserted_code (void)
 	    }
 	}
     }
-  BITMAP_FREE (worklist);
 }
 
 
diff --git a/gcc/tree-ssa-sink.c b/gcc/tree-ssa-sink.c
index eb8b36095d8..acf832d66f6 100644
--- a/gcc/tree-ssa-sink.c
+++ b/gcc/tree-ssa-sink.c
@@ -128,7 +128,7 @@ static basic_block
 nearest_common_dominator_of_uses (def_operand_p def_p, bool *debug_stmts)
 {
   tree var = DEF_FROM_PTR (def_p);
-  bitmap blocks = BITMAP_ALLOC (NULL);
+  auto_bitmap blocks;
   basic_block commondom;
   unsigned int j;
   bitmap_iterator bi;
@@ -158,17 +158,14 @@ nearest_common_dominator_of_uses (def_operand_p def_p, bool *debug_stmts)
 
       /* Short circuit. Nothing dominates the entry block.  */
       if (useblock == ENTRY_BLOCK_PTR_FOR_FN (cfun))
-	{
-	  BITMAP_FREE (blocks);
-	  return NULL;
-	}
+	return NULL;
+
       bitmap_set_bit (blocks, useblock->index);
     }
   commondom = BASIC_BLOCK_FOR_FN (cfun, bitmap_first_set_bit (blocks));
   EXECUTE_IF_SET_IN_BITMAP (blocks, 0, j, bi)
     commondom = nearest_common_dominator (CDI_DOMINATORS, commondom,
 					  BASIC_BLOCK_FOR_FN (cfun, j));
-  BITMAP_FREE (blocks);
   return commondom;
 }
 
diff --git a/gcc/tree-ssa-threadupdate.c b/gcc/tree-ssa-threadupdate.c
index e30ddd821ed..319826861dc 100644
--- a/gcc/tree-ssa-threadupdate.c
+++ b/gcc/tree-ssa-threadupdate.c
@@ -723,7 +723,7 @@ compute_path_counts (struct redirection_data *rd,
      below to add up the counts of the other edges not included in this jump
      threading path.  */
   struct el *next, *el;
-  bitmap in_edge_srcs = BITMAP_ALLOC (NULL);
+  auto_bitmap in_edge_srcs;
   for (el = rd->incoming_edges; el; el = next)
     {
       next = el->next;
@@ -759,8 +759,6 @@ compute_path_counts (struct redirection_data *rd,
   if (path_in_freq > BB_FREQ_MAX)
     path_in_freq = BB_FREQ_MAX;
 
-  BITMAP_FREE (in_edge_srcs);
-
   /* Now compute the fraction of the total count coming into the first
      path bb that is from the current threading path.  */
   gcov_type total_count = e->dest->count;
@@ -1958,7 +1956,7 @@ mark_threaded_blocks (bitmap threaded_blocks)
 {
   unsigned int i;
   bitmap_iterator bi;
-  bitmap tmp = BITMAP_ALLOC (NULL);
+  auto_bitmap tmp;
   basic_block bb;
   edge e;
   edge_iterator ei;
@@ -2169,8 +2167,6 @@ mark_threaded_blocks (bitmap threaded_blocks)
 	    }
 	}
     }
-
-  BITMAP_FREE (tmp);
 }
 
 
@@ -2436,8 +2432,8 @@ thread_through_all_blocks (bool may_peel_loop_headers)
   bool retval = false;
   unsigned int i;
   bitmap_iterator bi;
-  bitmap threaded_blocks;
   struct loop *loop;
+  auto_bitmap threaded_blocks;
 
   if (!paths.exists ())
     {
@@ -2445,7 +2441,6 @@ thread_through_all_blocks (bool may_peel_loop_headers)
       goto out;
     }
 
-  threaded_blocks = BITMAP_ALLOC (NULL);
   memset (&thread_stats, 0, sizeof (thread_stats));
 
   /* Remove any paths that referenced removed edges.  */
@@ -2578,8 +2573,6 @@ thread_through_all_blocks (bool may_peel_loop_headers)
 
   free_original_copy_tables ();
 
-  BITMAP_FREE (threaded_blocks);
-  threaded_blocks = NULL;
   paths.release ();
 
   if (retval)
diff --git a/gcc/tree-ssa.c b/gcc/tree-ssa.c
index 42e708ed673..11f5a2bac92 100644
--- a/gcc/tree-ssa.c
+++ b/gcc/tree-ssa.c
@@ -1022,7 +1022,7 @@ verify_ssa (bool check_modified_stmt, bool check_ssa_operands)
   ssa_op_iter iter;
   tree op;
   enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
-  bitmap names_defined_in_bb = BITMAP_ALLOC (NULL);
+  auto_bitmap names_defined_in_bb;
 
   gcc_assert (!need_ssa_update_p (cfun));
 
@@ -1176,7 +1176,6 @@ verify_ssa (bool check_modified_stmt, bool check_ssa_operands)
   else
     set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
 
-  BITMAP_FREE (names_defined_in_bb);
   timevar_pop (TV_TREE_SSA_VERIFY);
   return;
 
@@ -1622,9 +1621,9 @@ void
 execute_update_addresses_taken (void)
 {
   basic_block bb;
-  bitmap addresses_taken = BITMAP_ALLOC (NULL);
-  bitmap not_reg_needs = BITMAP_ALLOC (NULL);
-  bitmap suitable_for_renaming = BITMAP_ALLOC (NULL);
+  auto_bitmap addresses_taken;
+  auto_bitmap not_reg_needs;
+  auto_bitmap suitable_for_renaming;
   tree var;
   unsigned i;
 
@@ -2005,9 +2004,6 @@ execute_update_addresses_taken (void)
 	update_ssa (TODO_update_ssa);
     }
 
-  BITMAP_FREE (not_reg_needs);
-  BITMAP_FREE (addresses_taken);
-  BITMAP_FREE (suitable_for_renaming);
   timevar_pop (TV_ADDRESS_TAKEN);
 }
 
diff --git a/gcc/tree-ssanames.c b/gcc/tree-ssanames.c
index 6487542eb8f..353c7b1906a 100644
--- a/gcc/tree-ssanames.c
+++ b/gcc/tree-ssanames.c
@@ -127,7 +127,7 @@ verify_ssaname_freelists (struct function *fun)
   if (!gimple_in_ssa_p (fun))
     return;
 
-  bitmap names_in_il = BITMAP_ALLOC (NULL);
+  auto_bitmap names_in_il;
 
   /* Walk the entire IL noting every SSA_NAME we see.  */
   basic_block bb;
@@ -165,7 +165,7 @@ verify_ssaname_freelists (struct function *fun)
 
   /* Now walk the free list noting what we find there and verifying
      there are no duplicates.  */
-  bitmap names_in_freelists = BITMAP_ALLOC (NULL);
+  auto_bitmap names_in_freelists;
   if (FREE_SSANAMES (fun))
     {
       for (unsigned int i = 0; i < FREE_SSANAMES (fun)->length (); i++)
@@ -221,7 +221,7 @@ verify_ssaname_freelists (struct function *fun)
 
   unsigned int i;
   bitmap_iterator bi;
-  bitmap all_names = BITMAP_ALLOC (NULL);
+  auto_bitmap all_names;
   bitmap_set_range (all_names, UNUSED_NAME_VERSION + 1, num_ssa_names - 1);
   bitmap_ior_into (names_in_il, names_in_freelists);
 
@@ -230,10 +230,6 @@ verify_ssaname_freelists (struct function *fun)
   EXECUTE_IF_AND_COMPL_IN_BITMAP(all_names, names_in_il,
 				 UNUSED_NAME_VERSION + 1, i, bi)
     gcc_assert (!ssa_name (i));
-
-  BITMAP_FREE (all_names);
-  BITMAP_FREE (names_in_freelists);
-  BITMAP_FREE (names_in_il);
 }
 
 /* Move all SSA_NAMEs from FREE_SSA_NAMES_QUEUE to FREE_SSA_NAMES.
-- 
2.11.0

^ permalink raw reply	[flat|nested] 34+ messages in thread

* [PATCH 04/13] allow auto_bitmap to use other bitmap obstacks
  2017-05-09 20:53 [PATCH 00/13] misc data structure stuff tbsaunde+gcc
                   ` (4 preceding siblings ...)
  2017-05-09 20:53 ` [PATCH 13/13] make inverted_post_order_compute() operate on a vec tbsaunde+gcc
@ 2017-05-09 20:53 ` tbsaunde+gcc
  2017-05-10  8:27   ` Richard Biener
  2017-05-09 20:53 ` [PATCH 01/13] improve safety of freeing bitmaps tbsaunde+gcc
                   ` (6 subsequent siblings)
  12 siblings, 1 reply; 34+ messages in thread
From: tbsaunde+gcc @ 2017-05-09 20:53 UTC (permalink / raw)
  To: gcc-patches

From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>

gcc/ChangeLog:

2017-05-07  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>

	* bitmap.h (class auto_bitmap): New constructor taking
bitmap_obstack * argument.
---
 gcc/bitmap.h | 1 +
 1 file changed, 1 insertion(+)

diff --git a/gcc/bitmap.h b/gcc/bitmap.h
index 49aec001cb0..2ddeee6bc10 100644
--- a/gcc/bitmap.h
+++ b/gcc/bitmap.h
@@ -824,6 +824,7 @@ class auto_bitmap
 {
  public:
   auto_bitmap () { bitmap_initialize (&m_bits, &bitmap_default_obstack); }
+  explicit auto_bitmap (bitmap_obstack *o) { bitmap_initialize (&m_bits, o); }
   ~auto_bitmap () { bitmap_clear (&m_bits); }
   // Allow calling bitmap functions on our bitmap.
   operator bitmap () { return &m_bits; }
-- 
2.11.0

^ permalink raw reply	[flat|nested] 34+ messages in thread

* [PATCH 12/13] make depth_first_search_ds a class
  2017-05-09 20:53 [PATCH 00/13] misc data structure stuff tbsaunde+gcc
                   ` (6 preceding siblings ...)
  2017-05-09 20:53 ` [PATCH 01/13] improve safety of freeing bitmaps tbsaunde+gcc
@ 2017-05-09 20:53 ` tbsaunde+gcc
  2017-05-10  8:29   ` Richard Biener
  2017-05-09 20:53 ` [PATCH 05/13] allow constructing a auto_vec with a preallocation, and a possibly larger actual allocation size tbsaunde+gcc
                   ` (4 subsequent siblings)
  12 siblings, 1 reply; 34+ messages in thread
From: tbsaunde+gcc @ 2017-05-09 20:53 UTC (permalink / raw)
  To: gcc-patches

From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>

gcc/ChangeLog:

2017-05-09  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>

	* cfganal.c (connect_infinite_loops_to_exit): Adjust.
	(depth_first_search::depth_first_search): Change structure init
function to this constructor.
	(depth_first_search::add_bb): Rename function to this member.
	(depth_first_search::execute): Likewise.
	(flow_dfs_compute_reverse_finish): Adjust.
---
 gcc/cfganal.c | 96 +++++++++++++++++++++--------------------------------------
 1 file changed, 34 insertions(+), 62 deletions(-)

diff --git a/gcc/cfganal.c b/gcc/cfganal.c
index 1b01564e8c7..27b453ca3f7 100644
--- a/gcc/cfganal.c
+++ b/gcc/cfganal.c
@@ -28,25 +28,24 @@ along with GCC; see the file COPYING3.  If not see
 #include "cfganal.h"
 #include "cfgloop.h"
 
+namespace {
 /* Store the data structures necessary for depth-first search.  */
-struct depth_first_search_ds {
-  /* stack for backtracking during the algorithm */
-  basic_block *stack;
+class depth_first_search
+  {
+public:
+    depth_first_search ();
+
+    basic_block execute (basic_block);
+    void add_bb (basic_block);
 
-  /* number of edges in the stack.  That is, positions 0, ..., sp-1
-     have edges.  */
-  unsigned int sp;
+private:
+  /* stack for backtracking during the algorithm */
+  auto_vec<basic_block, 20> m_stack;
 
   /* record of basic blocks already seen by depth-first search */
-  sbitmap visited_blocks;
+  auto_sbitmap m_visited_blocks;
 };
-
-static void flow_dfs_compute_reverse_init (depth_first_search_ds *);
-static void flow_dfs_compute_reverse_add_bb (depth_first_search_ds *,
-					     basic_block);
-static basic_block flow_dfs_compute_reverse_execute (depth_first_search_ds *,
-						     basic_block);
-static void flow_dfs_compute_reverse_finish (depth_first_search_ds *);
+}
 \f
 /* Mark the back edges in DFS traversal.
    Return nonzero if a loop (natural or otherwise) is present.
@@ -597,30 +596,23 @@ add_noreturn_fake_exit_edges (void)
 void
 connect_infinite_loops_to_exit (void)
 {
-  basic_block unvisited_block = EXIT_BLOCK_PTR_FOR_FN (cfun);
-  basic_block deadend_block;
-  depth_first_search_ds dfs_ds;
-
   /* Perform depth-first search in the reverse graph to find nodes
      reachable from the exit block.  */
-  flow_dfs_compute_reverse_init (&dfs_ds);
-  flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR_FOR_FN (cfun));
+  depth_first_search dfs;
+  dfs.add_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
 
   /* Repeatedly add fake edges, updating the unreachable nodes.  */
+  basic_block unvisited_block = EXIT_BLOCK_PTR_FOR_FN (cfun);
   while (1)
     {
-      unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds,
-							  unvisited_block);
+      unvisited_block = dfs.execute (unvisited_block);
       if (!unvisited_block)
 	break;
 
-      deadend_block = dfs_find_deadend (unvisited_block);
+      basic_block deadend_block = dfs_find_deadend (unvisited_block);
       make_edge (deadend_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
-      flow_dfs_compute_reverse_add_bb (&dfs_ds, deadend_block);
+      dfs.add_bb (deadend_block);
     }
-
-  flow_dfs_compute_reverse_finish (&dfs_ds);
-  return;
 }
 \f
 /* Compute reverse top sort order.  This is computing a post order
@@ -1094,31 +1086,22 @@ pre_and_rev_post_order_compute (int *pre_order, int *rev_post_order,
    search context.  If INITIALIZE_STACK is nonzero, there is an
    element on the stack.  */
 
-static void
-flow_dfs_compute_reverse_init (depth_first_search_ds *data)
+depth_first_search::depth_first_search () :
+  m_stack (n_basic_blocks_for_fn (cfun)),
+  m_visited_blocks (last_basic_block_for_fn (cfun))
 {
-  /* Allocate stack for back-tracking up CFG.  */
-  data->stack = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
-  data->sp = 0;
-
-  /* Allocate bitmap to track nodes that have been visited.  */
-  data->visited_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
-
-  /* None of the nodes in the CFG have been visited yet.  */
-  bitmap_clear (data->visited_blocks);
-
-  return;
+  bitmap_clear (m_visited_blocks);
 }
 
 /* Add the specified basic block to the top of the dfs data
    structures.  When the search continues, it will start at the
    block.  */
 
-static void
-flow_dfs_compute_reverse_add_bb (depth_first_search_ds *data, basic_block bb)
+void
+depth_first_search::add_bb (basic_block bb)
 {
-  data->stack[data->sp++] = bb;
-  bitmap_set_bit (data->visited_blocks, bb->index);
+  m_stack.quick_push (bb);
+  bitmap_set_bit (m_visited_blocks, bb->index);
 }
 
 /* Continue the depth-first search through the reverse graph starting with the
@@ -1126,42 +1109,31 @@ flow_dfs_compute_reverse_add_bb (depth_first_search_ds *data, basic_block bb)
    are marked.  Returns an unvisited basic block, or NULL if there is none
    available.  */
 
-static basic_block
-flow_dfs_compute_reverse_execute (depth_first_search_ds *data,
-				  basic_block last_unvisited)
+basic_block
+depth_first_search::execute (basic_block last_unvisited)
 {
   basic_block bb;
   edge e;
   edge_iterator ei;
 
-  while (data->sp > 0)
+  while (!m_stack.is_empty ())
     {
-      bb = data->stack[--data->sp];
+      bb = m_stack.pop ();
 
       /* Perform depth-first search on adjacent vertices.  */
       FOR_EACH_EDGE (e, ei, bb->preds)
-	if (!bitmap_bit_p (data->visited_blocks, e->src->index))
-	  flow_dfs_compute_reverse_add_bb (data, e->src);
+	if (!bitmap_bit_p (m_visited_blocks, e->src->index))
+	  add_bb (e->src);
     }
 
   /* Determine if there are unvisited basic blocks.  */
   FOR_BB_BETWEEN (bb, last_unvisited, NULL, prev_bb)
-    if (!bitmap_bit_p (data->visited_blocks, bb->index))
+    if (!bitmap_bit_p (m_visited_blocks, bb->index))
       return bb;
 
   return NULL;
 }
 
-/* Destroy the data structures needed for depth-first search on the
-   reverse graph.  */
-
-static void
-flow_dfs_compute_reverse_finish (depth_first_search_ds *data)
-{
-  free (data->stack);
-  sbitmap_free (data->visited_blocks);
-}
-
 /* Performs dfs search from BB over vertices satisfying PREDICATE;
    if REVERSE, go against direction of edges.  Returns number of blocks
    found and their list in RSLT.  RSLT can contain at most RSLT_MAX items.  */
-- 
2.11.0

^ permalink raw reply	[flat|nested] 34+ messages in thread

* [PATCH 13/13] make inverted_post_order_compute() operate on a vec
  2017-05-09 20:53 [PATCH 00/13] misc data structure stuff tbsaunde+gcc
                   ` (3 preceding siblings ...)
  2017-05-09 20:53 ` [PATCH 09/13] use auto_bitmap more with alternate obstacks tbsaunde+gcc
@ 2017-05-09 20:53 ` tbsaunde+gcc
  2017-05-10  8:44   ` Richard Biener
  2017-05-09 20:53 ` [PATCH 04/13] allow auto_bitmap to use other bitmap obstacks tbsaunde+gcc
                   ` (7 subsequent siblings)
  12 siblings, 1 reply; 34+ messages in thread
From: tbsaunde+gcc @ 2017-05-09 20:53 UTC (permalink / raw)
  To: gcc-patches

From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>

gcc/ChangeLog:

2017-05-09  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>

	* cfganal.c (inverted_post_order_compute): Change argument type
	to vec *.
	* cfganal.h (inverted_post_order_compute): Adjust prototype.
	* df-core.c (rest_of_handle_df_initialize): Adjust.
	(rest_of_handle_df_finish): Likewise.
	(df_analyze_1): Likewise.
	(df_analyze): Likewise.
	(loop_inverted_post_order_compute): Change argument to be a vec *.
	(df_analyze_loop): Adjust.
	(df_get_n_blocks): Likewise.
	(df_get_postorder): Likewise.
	* df.h (struct df_d): Change field to be a vec.
	* lcm.c (compute_laterin): Adjust.
	(compute_available): Likewise.
	* lra-lives.c (lra_create_live_ranges_1): Likewise.
	* tree-ssa-dce.c (remove_dead_stmt): Likewise.
	* tree-ssa-pre.c (compute_antic): Likewise.
---
 gcc/cfganal.c      | 14 ++++++--------
 gcc/cfganal.h      |  2 +-
 gcc/df-core.c      | 56 +++++++++++++++++++++++++-----------------------------
 gcc/df.h           |  4 +---
 gcc/lcm.c          | 14 ++++++--------
 gcc/lra-lives.c    |  9 ++++-----
 gcc/tree-ssa-dce.c | 10 ++++------
 gcc/tree-ssa-pre.c |  9 ++++-----
 8 files changed, 52 insertions(+), 66 deletions(-)

diff --git a/gcc/cfganal.c b/gcc/cfganal.c
index 27b453ca3f7..a3a6ea86994 100644
--- a/gcc/cfganal.c
+++ b/gcc/cfganal.c
@@ -790,12 +790,12 @@ dfs_find_deadend (basic_block bb)
    and start looking for a "dead end" from that block
    and do another inverted traversal from that block.  */
 
-int
-inverted_post_order_compute (int *post_order,
+void
+inverted_post_order_compute (vec<int> *post_order,
 			     sbitmap *start_points)
 {
   basic_block bb;
-  int post_order_num = 0;
+  post_order->reserve_exact (n_basic_blocks_for_fn (cfun));
 
   if (flag_checking)
     verify_no_unreachable_blocks ();
@@ -863,13 +863,13 @@ inverted_post_order_compute (int *post_order,
                    time, check its predecessors.  */
 		stack.quick_push (ei_start (pred->preds));
               else
-                post_order[post_order_num++] = pred->index;
+		post_order->quick_push (pred->index);
             }
           else
             {
 	      if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
 		  && ei_one_before_end_p (ei))
-                post_order[post_order_num++] = bb->index;
+		post_order->quick_push (bb->index);
 
               if (!ei_one_before_end_p (ei))
 		ei_next (&stack.last ());
@@ -927,9 +927,7 @@ inverted_post_order_compute (int *post_order,
   while (!stack.is_empty ());
 
   /* EXIT_BLOCK is always included.  */
-  post_order[post_order_num++] = EXIT_BLOCK;
-
-  return post_order_num;
+  post_order->quick_push (EXIT_BLOCK);
 }
 
 /* Compute the depth first search order of FN and store in the array
diff --git a/gcc/cfganal.h b/gcc/cfganal.h
index 7df484b8441..39bb5e547a5 100644
--- a/gcc/cfganal.h
+++ b/gcc/cfganal.h
@@ -63,7 +63,7 @@ extern void add_noreturn_fake_exit_edges (void);
 extern void connect_infinite_loops_to_exit (void);
 extern int post_order_compute (int *, bool, bool);
 extern basic_block dfs_find_deadend (basic_block);
-extern int inverted_post_order_compute (int *, sbitmap *start_points = 0);
+extern void inverted_post_order_compute (vec<int> *postorder, sbitmap *start_points = 0);
 extern int pre_and_rev_post_order_compute_fn (struct function *,
 					      int *, int *, bool);
 extern int pre_and_rev_post_order_compute (int *, int *, bool);
diff --git a/gcc/df-core.c b/gcc/df-core.c
index 1b270d417aa..1e84d4d948f 100644
--- a/gcc/df-core.c
+++ b/gcc/df-core.c
@@ -702,10 +702,9 @@ rest_of_handle_df_initialize (void)
     df_live_add_problem ();
 
   df->postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
-  df->postorder_inverted = XNEWVEC (int, last_basic_block_for_fn (cfun));
   df->n_blocks = post_order_compute (df->postorder, true, true);
-  df->n_blocks_inverted = inverted_post_order_compute (df->postorder_inverted);
-  gcc_assert (df->n_blocks == df->n_blocks_inverted);
+  inverted_post_order_compute (&df->postorder_inverted);
+  gcc_assert ((unsigned) df->n_blocks == df->postorder_inverted.length ());
 
   df->hard_regs_live_count = XCNEWVEC (unsigned int, FIRST_PSEUDO_REGISTER);
 
@@ -816,7 +815,7 @@ rest_of_handle_df_finish (void)
     }
 
   free (df->postorder);
-  free (df->postorder_inverted);
+  df->postorder_inverted.release ();
   free (df->hard_regs_live_count);
   free (df);
   df = NULL;
@@ -1198,7 +1197,7 @@ df_analyze_1 (void)
   int i;
 
   /* These should be the same.  */
-  gcc_assert (df->n_blocks == df->n_blocks_inverted);
+  gcc_assert ((unsigned) df->n_blocks == df->postorder_inverted.length ());
 
   /* We need to do this before the df_verify_all because this is
      not kept incrementally up to date.  */
@@ -1222,8 +1221,8 @@ df_analyze_1 (void)
           if (dflow->problem->dir == DF_FORWARD)
             df_analyze_problem (dflow,
                                 df->blocks_to_analyze,
-                                df->postorder_inverted,
-                                df->n_blocks_inverted);
+				df->postorder_inverted.address (),
+				df->postorder_inverted.length ());
           else
             df_analyze_problem (dflow,
                                 df->blocks_to_analyze,
@@ -1249,23 +1248,21 @@ void
 df_analyze (void)
 {
   bitmap current_all_blocks = BITMAP_ALLOC (&df_bitmap_obstack);
-  int i;
 
   free (df->postorder);
-  free (df->postorder_inverted);
   df->postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
-  df->postorder_inverted = XNEWVEC (int, last_basic_block_for_fn (cfun));
   df->n_blocks = post_order_compute (df->postorder, true, true);
-  df->n_blocks_inverted = inverted_post_order_compute (df->postorder_inverted);
+  df->postorder_inverted.truncate (0);
+  inverted_post_order_compute (&df->postorder_inverted);
 
-  for (i = 0; i < df->n_blocks; i++)
+  for (int i = 0; i < df->n_blocks; i++)
     bitmap_set_bit (current_all_blocks, df->postorder[i]);
 
   if (flag_checking)
     {
       /* Verify that POSTORDER_INVERTED only contains blocks reachable from
 	 the ENTRY block.  */
-      for (i = 0; i < df->n_blocks_inverted; i++)
+      for (unsigned int i = 0; i < df->postorder_inverted.length (); i++)
 	gcc_assert (bitmap_bit_p (current_all_blocks,
 				  df->postorder_inverted[i]));
     }
@@ -1277,9 +1274,10 @@ df_analyze (void)
       bitmap_and_into (df->blocks_to_analyze, current_all_blocks);
       df->n_blocks = df_prune_to_subcfg (df->postorder,
 					 df->n_blocks, df->blocks_to_analyze);
-      df->n_blocks_inverted = df_prune_to_subcfg (df->postorder_inverted,
-						  df->n_blocks_inverted,
+      unsigned int newlen = df_prune_to_subcfg (df->postorder_inverted.address (),
+						df->postorder_inverted.length (),
 						  df->blocks_to_analyze);
+      df->postorder_inverted.truncate (newlen);
       BITMAP_FREE (current_all_blocks);
     }
   else
@@ -1355,13 +1353,14 @@ loop_post_order_compute (int *post_order, struct loop *loop)
 /* Compute the reverse top sort order of the inverted sub-CFG specified
    by LOOP.  Returns the number of blocks which is always loop->num_nodes.  */
 
-static int
-loop_inverted_post_order_compute (int *post_order, struct loop *loop)
+static void
+loop_inverted_post_order_compute (vec<int> *post_order, struct loop *loop)
 {
   basic_block bb;
   edge_iterator *stack;
   int sp;
-  int post_order_num = 0;
+
+  post_order->reserve_exact (loop->num_nodes);
 
   /* Allocate stack for back-tracking up CFG.  */
   stack = XNEWVEC (edge_iterator, loop->num_nodes + 1);
@@ -1398,13 +1397,13 @@ loop_inverted_post_order_compute (int *post_order, struct loop *loop)
 	       time, check its predecessors.  */
 	    stack[sp++] = ei_start (pred->preds);
 	  else
-	    post_order[post_order_num++] = pred->index;
+	    post_order->quick_push (pred->index);
 	}
       else
 	{
 	  if (flow_bb_inside_loop_p (loop, bb)
 	      && ei_one_before_end_p (ei))
-	    post_order[post_order_num++] = bb->index;
+	    post_order->quick_push (bb->index);
 
 	  if (!ei_one_before_end_p (ei))
 	    ei_next (&stack[sp - 1]);
@@ -1414,7 +1413,6 @@ loop_inverted_post_order_compute (int *post_order, struct loop *loop)
     }
 
   free (stack);
-  return post_order_num;
 }
 
 
@@ -1424,15 +1422,13 @@ void
 df_analyze_loop (struct loop *loop)
 {
   free (df->postorder);
-  free (df->postorder_inverted);
 
   df->postorder = XNEWVEC (int, loop->num_nodes);
-  df->postorder_inverted = XNEWVEC (int, loop->num_nodes);
+  df->postorder_inverted.truncate (0);
   df->n_blocks = loop_post_order_compute (df->postorder, loop);
-  df->n_blocks_inverted
-    = loop_inverted_post_order_compute (df->postorder_inverted, loop);
+    loop_inverted_post_order_compute (&df->postorder_inverted, loop);
   gcc_assert ((unsigned) df->n_blocks == loop->num_nodes);
-  gcc_assert ((unsigned) df->n_blocks_inverted == loop->num_nodes);
+  gcc_assert (df->postorder_inverted.length () == loop->num_nodes);
 
   bitmap blocks = BITMAP_ALLOC (&df_bitmap_obstack);
   for (int i = 0; i < df->n_blocks; ++i)
@@ -1453,8 +1449,8 @@ df_get_n_blocks (enum df_flow_dir dir)
 
   if (dir == DF_FORWARD)
     {
-      gcc_assert (df->postorder_inverted);
-      return df->n_blocks_inverted;
+      gcc_assert (df->postorder_inverted.length ());
+      return df->postorder_inverted.length ();
     }
 
   gcc_assert (df->postorder);
@@ -1473,8 +1469,8 @@ df_get_postorder (enum df_flow_dir dir)
 
   if (dir == DF_FORWARD)
     {
-      gcc_assert (df->postorder_inverted);
-      return df->postorder_inverted;
+      gcc_assert (df->postorder_inverted.length ());
+      return df->postorder_inverted.address ();
     }
   gcc_assert (df->postorder);
   return df->postorder;
diff --git a/gcc/df.h b/gcc/df.h
index 681ff32098e..07fd3345d9d 100644
--- a/gcc/df.h
+++ b/gcc/df.h
@@ -582,11 +582,9 @@ struct df_d
   bitmap_head insns_to_notes_rescan;
   int *postorder;                /* The current set of basic blocks
                                     in reverse postorder.  */
-  int *postorder_inverted;       /* The current set of basic blocks
+  vec<int> postorder_inverted;       /* The current set of basic blocks
                                     in reverse postorder of inverted CFG.  */
   int n_blocks;                  /* The number of blocks in reverse postorder.  */
-  int n_blocks_inverted;         /* The number of blocks
-                                    in reverse postorder of inverted CFG.  */
 
   /* An array [FIRST_PSEUDO_REGISTER], indexed by regno, of the number
      of refs that qualify as being real hard regs uses.  Artificial
diff --git a/gcc/lcm.c b/gcc/lcm.c
index edc86b57009..e8666274211 100644
--- a/gcc/lcm.c
+++ b/gcc/lcm.c
@@ -270,9 +270,9 @@ compute_laterin (struct edge_list *edge_list, sbitmap *earliest,
 
   /* Add all the blocks to the worklist.  This prevents an early exit from
      the loop given our optimistic initialization of LATER above.  */
-  int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
-  int postorder_num = inverted_post_order_compute (postorder);
-  for (int i = 0; i < postorder_num; ++i)
+  auto_vec<int, 20> postorder;
+  inverted_post_order_compute (&postorder);
+  for (unsigned int i = 0; i < postorder.length (); ++i)
     {
       bb = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
       if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
@@ -281,7 +281,6 @@ compute_laterin (struct edge_list *edge_list, sbitmap *earliest,
       *qin++ = bb;
       bb->aux = bb;
     }
-  free (postorder);
 
   /* Note that we do not use the last allocated element for our queue,
      as EXIT_BLOCK is never inserted into it. */
@@ -512,9 +511,9 @@ compute_available (sbitmap *avloc, sbitmap *kill, sbitmap *avout,
   /* Put every block on the worklist; this is necessary because of the
      optimistic initialization of AVOUT above.  Use inverted postorder
      to make the dataflow problem require less iterations.  */
-  int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
-  int postorder_num = inverted_post_order_compute (postorder);
-  for (int i = 0; i < postorder_num; ++i)
+  auto_vec<int, 20> postorder;
+  inverted_post_order_compute (&postorder);
+  for (unsigned int i = 0; i < postorder.length (); ++i)
     {
       bb = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
       if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
@@ -523,7 +522,6 @@ compute_available (sbitmap *avloc, sbitmap *kill, sbitmap *avout,
       *qin++ = bb;
       bb->aux = bb;
     }
-  free (postorder);
 
   qin = worklist;
   qend = &worklist[n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS];
diff --git a/gcc/lra-lives.c b/gcc/lra-lives.c
index 5d4015b5ab9..e728e348215 100644
--- a/gcc/lra-lives.c
+++ b/gcc/lra-lives.c
@@ -1287,11 +1287,11 @@ lra_create_live_ranges_1 (bool all_p, bool dead_insn_p)
   point_freq_vec.truncate (0);
   point_freq_vec.reserve_exact (new_length);
   lra_point_freq = point_freq_vec.address ();
-  int *post_order_rev_cfg = XNEWVEC (int, last_basic_block_for_fn (cfun));
-  int n_blocks_inverted = inverted_post_order_compute (post_order_rev_cfg);
-  lra_assert (n_blocks_inverted == n_basic_blocks_for_fn (cfun));
+  auto_vec<int, 20> post_order_rev_cfg;
+  inverted_post_order_compute (&post_order_rev_cfg);
+  lra_assert (post_order_rev_cfg.length () == (unsigned) n_basic_blocks_for_fn (cfun));
   bb_live_change_p = false;
-  for (i = n_blocks_inverted - 1; i >= 0; --i)
+  for (i = post_order_rev_cfg.length () - 1; i >= 0; --i)
     {
       bb = BASIC_BLOCK_FOR_FN (cfun, post_order_rev_cfg[i]);
       if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun) || bb
@@ -1338,7 +1338,6 @@ lra_create_live_ranges_1 (bool all_p, bool dead_insn_p)
 	    }
 	}
     }
-  free (post_order_rev_cfg);
   lra_live_max_point = curr_point;
   if (lra_dump_file != NULL)
     print_live_ranges (lra_dump_file);
diff --git a/gcc/tree-ssa-dce.c b/gcc/tree-ssa-dce.c
index e17659df91f..150e4f73185 100644
--- a/gcc/tree-ssa-dce.c
+++ b/gcc/tree-ssa-dce.c
@@ -1042,14 +1042,12 @@ remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
 	{
 	  if (!bb_postorder)
 	    {
-	      int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
-	      int postorder_num
-		 = inverted_post_order_compute (postorder,
-						&bb_contains_live_stmts);
+	      auto_vec<int, 20> postorder;
+		 inverted_post_order_compute (&postorder,
+					      &bb_contains_live_stmts);
 	      bb_postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
-	      for (int i = 0; i < postorder_num; ++i)
+	      for (unsigned int i = 0; i < postorder.length (); ++i)
 		 bb_postorder[postorder[i]] = i;
-	      free (postorder);
 	    }
           FOR_EACH_EDGE (e2, ei, bb->succs)
 	    if (!e || e2->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
diff --git a/gcc/tree-ssa-pre.c b/gcc/tree-ssa-pre.c
index ca212daee62..6ffcd7b8eb4 100644
--- a/gcc/tree-ssa-pre.c
+++ b/gcc/tree-ssa-pre.c
@@ -2388,8 +2388,8 @@ compute_antic (void)
   /* For ANTIC computation we need a postorder that also guarantees that
      a block with a single successor is visited after its successor.
      RPO on the inverted CFG has this property.  */
-  int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
-  int postorder_num = inverted_post_order_compute (postorder);
+  auto_vec<int, 20> postorder;
+  inverted_post_order_compute (&postorder);
 
   auto_sbitmap worklist (last_basic_block_for_fn (cfun) + 1);
   bitmap_ones (worklist);
@@ -2403,7 +2403,7 @@ compute_antic (void)
 	 for PA ANTIC computation.  */
       num_iterations++;
       changed = false;
-      for (i = postorder_num - 1; i >= 0; i--)
+      for (i = postorder.length () - 1; i >= 0; i--)
 	{
 	  if (bitmap_bit_p (worklist, postorder[i]))
 	    {
@@ -2430,7 +2430,7 @@ compute_antic (void)
     {
       /* For partial antic we ignore backedges and thus we do not need
          to perform any iteration when we process blocks in postorder.  */
-      postorder_num = pre_and_rev_post_order_compute (NULL, postorder, false);
+      int postorder_num = pre_and_rev_post_order_compute (NULL, postorder.address (), false);
       for (i = postorder_num - 1 ; i >= 0; i--)
 	{
 	  basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
@@ -2441,7 +2441,6 @@ compute_antic (void)
     }
 
   sbitmap_free (has_abnormal_preds);
-  free (postorder);
 }
 
 
-- 
2.11.0

^ permalink raw reply	[flat|nested] 34+ messages in thread

* [PATCH 08/13] move several bitmaps from gc memory to the default obstack and use auto_bitmap
  2017-05-09 20:53 [PATCH 00/13] misc data structure stuff tbsaunde+gcc
  2017-05-09 20:53 ` [PATCH 10/13] make a member an auto_sbitmap tbsaunde+gcc
@ 2017-05-09 20:53 ` tbsaunde+gcc
  2017-05-10  8:26   ` Richard Biener
  2017-05-09 20:53 ` [PATCH 07/13] use auto_bitmap more tbsaunde+gcc
                   ` (10 subsequent siblings)
  12 siblings, 1 reply; 34+ messages in thread
From: tbsaunde+gcc @ 2017-05-09 20:53 UTC (permalink / raw)
  To: gcc-patches

From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>

These places where probably trying to use the default bitmap obstack,
but passing 0 to bitmap_initialize actually uses gc allocation.  In any
case they are all cleaned up before going out of scope so using
auto_bitmap should be fine.

gcc/ChangeLog:

2017-05-09  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>

	* haifa-sched.c (estimate_shadow_tick): Replace manual bitmap
	management with auto_bitmap.
	(fix_inter_tick): Likewise.
	(fix_recovery_deps): Likewise.
	* ira.c (add_store_equivs): Likewise.
	(find_moveable_pseudos): Likewise.
	(split_live_ranges_for_shrink_wrap): Likewise.
	* print-rtl.c (rtx_reuse_manager::rtx_reuse_manager): Likewise.
	(rtx_reuse_manager::seen_def_p): Likewise.
	(rtx_reuse_manager::set_seen_def): Likewise.
	* print-rtl.h (class rtx_reuse_manager): Likewise.
---
 gcc/haifa-sched.c | 23 +++++----------
 gcc/ira.c         | 84 +++++++++++++++++++------------------------------------
 gcc/print-rtl.c   |  5 ++--
 gcc/print-rtl.h   |  2 +-
 4 files changed, 38 insertions(+), 76 deletions(-)

diff --git a/gcc/haifa-sched.c b/gcc/haifa-sched.c
index 0ebf110471c..1fcc01d04ae 100644
--- a/gcc/haifa-sched.c
+++ b/gcc/haifa-sched.c
@@ -4843,14 +4843,12 @@ estimate_insn_tick (bitmap processed, rtx_insn *insn, int budget)
 static int
 estimate_shadow_tick (struct delay_pair *p)
 {
-  bitmap_head processed;
+  auto_bitmap processed;
   int t;
   bool cutoff;
-  bitmap_initialize (&processed, 0);
 
-  cutoff = !estimate_insn_tick (&processed, p->i2,
+  cutoff = !estimate_insn_tick (processed, p->i2,
 				max_insn_queue_index + pair_delay (p));
-  bitmap_clear (&processed);
   if (cutoff)
     return max_insn_queue_index;
   t = INSN_TICK_ESTIMATE (p->i2) - (clock_var + pair_delay (p) + 1);
@@ -7515,15 +7513,13 @@ static void
 fix_inter_tick (rtx_insn *head, rtx_insn *tail)
 {
   /* Set of instructions with corrected INSN_TICK.  */
-  bitmap_head processed;
+  auto_bitmap processed;
   /* ??? It is doubtful if we should assume that cycle advance happens on
      basic block boundaries.  Basically insns that are unconditionally ready
      on the start of the block are more preferable then those which have
      a one cycle dependency over insn from the previous block.  */
   int next_clock = clock_var + 1;
 
-  bitmap_initialize (&processed, 0);
-
   /* Iterates over scheduled instructions and fix their INSN_TICKs and
      INSN_TICKs of dependent instructions, so that INSN_TICKs are consistent
      across different blocks.  */
@@ -7539,7 +7535,7 @@ fix_inter_tick (rtx_insn *head, rtx_insn *tail)
 	  gcc_assert (tick >= MIN_TICK);
 
 	  /* Fix INSN_TICK of instruction from just scheduled block.  */
-	  if (bitmap_set_bit (&processed, INSN_LUID (head)))
+	  if (bitmap_set_bit (processed, INSN_LUID (head)))
 	    {
 	      tick -= next_clock;
 
@@ -7563,7 +7559,7 @@ fix_inter_tick (rtx_insn *head, rtx_insn *tail)
 		  /* If NEXT has its INSN_TICK calculated, fix it.
 		     If not - it will be properly calculated from
 		     scratch later in fix_tick_ready.  */
-		  && bitmap_set_bit (&processed, INSN_LUID (next)))
+		  && bitmap_set_bit (processed, INSN_LUID (next)))
 		{
 		  tick -= next_clock;
 
@@ -7580,7 +7576,6 @@ fix_inter_tick (rtx_insn *head, rtx_insn *tail)
 	    }
 	}
     }
-  bitmap_clear (&processed);
 }
 
 /* Check if NEXT is ready to be added to the ready or queue list.
@@ -8617,9 +8612,7 @@ fix_recovery_deps (basic_block rec)
 {
   rtx_insn *note, *insn, *jump;
   auto_vec<rtx_insn *, 10> ready_list;
-  bitmap_head in_ready;
-
-  bitmap_initialize (&in_ready, 0);
+  auto_bitmap in_ready;
 
   /* NOTE - a basic block note.  */
   note = NEXT_INSN (BB_HEAD (rec));
@@ -8642,7 +8635,7 @@ fix_recovery_deps (basic_block rec)
 	    {
 	      sd_delete_dep (sd_it);
 
-	      if (bitmap_set_bit (&in_ready, INSN_LUID (consumer)))
+	      if (bitmap_set_bit (in_ready, INSN_LUID (consumer)))
 		ready_list.safe_push (consumer);
 	    }
 	  else
@@ -8657,8 +8650,6 @@ fix_recovery_deps (basic_block rec)
     }
   while (insn != note);
 
-  bitmap_clear (&in_ready);
-
   /* Try to add instructions to the ready or queue list.  */
   unsigned int i;
   rtx_insn *temp;
diff --git a/gcc/ira.c b/gcc/ira.c
index c9751ce81ba..36a779bd37f 100644
--- a/gcc/ira.c
+++ b/gcc/ira.c
@@ -3635,16 +3635,15 @@ update_equiv_regs (void)
 static void
 add_store_equivs (void)
 {
-  bitmap_head seen_insns;
+  auto_bitmap seen_insns;
 
-  bitmap_initialize (&seen_insns, NULL);
   for (rtx_insn *insn = get_insns (); insn; insn = NEXT_INSN (insn))
     {
       rtx set, src, dest;
       unsigned regno;
       rtx_insn *init_insn;
 
-      bitmap_set_bit (&seen_insns, INSN_UID (insn));
+      bitmap_set_bit (seen_insns, INSN_UID (insn));
 
       if (! INSN_P (insn))
 	continue;
@@ -3665,7 +3664,7 @@ add_store_equivs (void)
 	  && ! reg_equiv[regno].pdx_subregs
 	  && reg_equiv[regno].init_insns != NULL
 	  && (init_insn = reg_equiv[regno].init_insns->insn ()) != 0
-	  && bitmap_bit_p (&seen_insns, INSN_UID (init_insn))
+	  && bitmap_bit_p (seen_insns, INSN_UID (init_insn))
 	  && ! find_reg_note (init_insn, REG_EQUIV, NULL_RTX)
 	  && validate_equiv_mem (init_insn, src, dest) == valid_reload
 	  && ! memref_used_between_p (dest, init_insn, insn)
@@ -3685,7 +3684,6 @@ add_store_equivs (void)
 		     INSN_UID (insn));
 	}
     }
-  bitmap_clear (&seen_insns);
 }
 
 /* Scan all regs killed in an insn to see if any of them are registers
@@ -4485,9 +4483,8 @@ find_moveable_pseudos (void)
      moved freely downwards, but are otherwise transparent to a block.  */
   bitmap_head *bb_moveable_reg_sets = XNEWVEC (bitmap_head,
 					       last_basic_block_for_fn (cfun));
-  bitmap_head live, used, set, interesting, unusable_as_input;
+  auto_bitmap live, used, set, interesting, unusable_as_input;
   bitmap_iterator bi;
-  bitmap_initialize (&interesting, 0);
 
   first_moveable_pseudo = max_regs;
   pseudo_replaced_reg.release ();
@@ -4497,10 +4494,6 @@ find_moveable_pseudos (void)
   calculate_dominance_info (CDI_DOMINATORS);
 
   i = 0;
-  bitmap_initialize (&live, 0);
-  bitmap_initialize (&used, 0);
-  bitmap_initialize (&set, 0);
-  bitmap_initialize (&unusable_as_input, 0);
   FOR_EACH_BB_FN (bb, cfun)
     {
       rtx_insn *insn;
@@ -4511,13 +4504,13 @@ find_moveable_pseudos (void)
       bitmap_initialize (local, 0);
       bitmap_initialize (transp, 0);
       bitmap_initialize (moveable, 0);
-      bitmap_copy (&live, df_get_live_out (bb));
-      bitmap_and_into (&live, df_get_live_in (bb));
-      bitmap_copy (transp, &live);
+      bitmap_copy (live, df_get_live_out (bb));
+      bitmap_and_into (live, df_get_live_in (bb));
+      bitmap_copy (transp, live);
       bitmap_clear (moveable);
-      bitmap_clear (&live);
-      bitmap_clear (&used);
-      bitmap_clear (&set);
+      bitmap_clear (live);
+      bitmap_clear (used);
+      bitmap_clear (set);
       FOR_BB_INSNS (bb, insn)
 	if (NONDEBUG_INSN_P (insn))
 	  {
@@ -4531,20 +4524,20 @@ find_moveable_pseudos (void)
 	    if (use
 		&& def
 		&& DF_REF_REGNO (use) == DF_REF_REGNO (def)
-		&& !bitmap_bit_p (&set, DF_REF_REGNO (use))
+		&& !bitmap_bit_p (set, DF_REF_REGNO (use))
 		&& rtx_moveable_p (&PATTERN (insn), OP_IN))
 	      {
 		unsigned regno = DF_REF_REGNO (use);
 		bitmap_set_bit (moveable, regno);
-		bitmap_set_bit (&set, regno);
-		bitmap_set_bit (&used, regno);
+		bitmap_set_bit (set, regno);
+		bitmap_set_bit (used, regno);
 		bitmap_clear_bit (transp, regno);
 		continue;
 	      }
 	    FOR_EACH_INSN_INFO_USE (use, insn_info)
 	      {
 		unsigned regno = DF_REF_REGNO (use);
-		bitmap_set_bit (&used, regno);
+		bitmap_set_bit (used, regno);
 		if (bitmap_clear_bit (moveable, regno))
 		  bitmap_clear_bit (transp, regno);
 	      }
@@ -4552,17 +4545,13 @@ find_moveable_pseudos (void)
 	    FOR_EACH_INSN_INFO_DEF (def, insn_info)
 	      {
 		unsigned regno = DF_REF_REGNO (def);
-		bitmap_set_bit (&set, regno);
+		bitmap_set_bit (set, regno);
 		bitmap_clear_bit (transp, regno);
 		bitmap_clear_bit (moveable, regno);
 	      }
 	  }
     }
 
-  bitmap_clear (&live);
-  bitmap_clear (&used);
-  bitmap_clear (&set);
-
   FOR_EACH_BB_FN (bb, cfun)
     {
       bitmap local = bb_local + bb->index;
@@ -4605,7 +4594,7 @@ find_moveable_pseudos (void)
 		if (dump_file)
 		  fprintf (dump_file, "Ignoring reg %d, has equiv memory\n",
 			   regno);
-		bitmap_set_bit (&unusable_as_input, regno);
+		bitmap_set_bit (unusable_as_input, regno);
 		continue;
 	      }
 
@@ -4665,7 +4654,7 @@ find_moveable_pseudos (void)
 		continue;
 	      }
 
-	    bitmap_set_bit (&interesting, regno);
+	    bitmap_set_bit (interesting, regno);
 	    /* If we get here, we know closest_use is a non-NULL insn
 	       (as opposed to const_0_rtx).  */
 	    closest_uses[regno] = as_a <rtx_insn *> (closest_use);
@@ -4684,7 +4673,7 @@ find_moveable_pseudos (void)
 	  }
     }
 
-  EXECUTE_IF_SET_IN_BITMAP (&interesting, 0, i, bi)
+  EXECUTE_IF_SET_IN_BITMAP (interesting, 0, i, bi)
     {
       df_ref def = DF_REG_DEF_CHAIN (i);
       rtx_insn *def_insn = DF_REF_INSN (def);
@@ -4728,7 +4717,7 @@ find_moveable_pseudos (void)
       FOR_EACH_INSN_USE (use, def_insn)
 	{
 	  unsigned regno = DF_REF_REGNO (use);
-	  if (bitmap_bit_p (&unusable_as_input, regno))
+	  if (bitmap_bit_p (unusable_as_input, regno))
 	    {
 	      all_ok = false;
 	      if (dump_file)
@@ -4794,8 +4783,6 @@ find_moveable_pseudos (void)
       bitmap_clear (bb_transp_live + bb->index);
       bitmap_clear (bb_moveable_reg_sets + bb->index);
     }
-  bitmap_clear (&interesting);
-  bitmap_clear (&unusable_as_input);
   free (uid_luid);
   free (closest_uses);
   free (bb_local);
@@ -4875,14 +4862,12 @@ split_live_ranges_for_shrink_wrap (void)
   basic_block bb, call_dom = NULL;
   basic_block first = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
   rtx_insn *insn, *last_interesting_insn = NULL;
-  bitmap_head need_new, reachable;
+  auto_bitmap need_new, reachable;
   vec<basic_block> queue;
 
   if (!SHRINK_WRAPPING_ENABLED)
     return false;
 
-  bitmap_initialize (&need_new, 0);
-  bitmap_initialize (&reachable, 0);
   queue.create (n_basic_blocks_for_fn (cfun));
 
   FOR_EACH_BB_FN (bb, cfun)
@@ -4891,22 +4876,18 @@ split_live_ranges_for_shrink_wrap (void)
 	{
 	  if (bb == first)
 	    {
-	      bitmap_clear (&need_new);
-	      bitmap_clear (&reachable);
 	      queue.release ();
 	      return false;
 	    }
 
-	  bitmap_set_bit (&need_new, bb->index);
-	  bitmap_set_bit (&reachable, bb->index);
+	  bitmap_set_bit (need_new, bb->index);
+	  bitmap_set_bit (reachable, bb->index);
 	  queue.quick_push (bb);
 	  break;
 	}
 
   if (queue.is_empty ())
     {
-      bitmap_clear (&need_new);
-      bitmap_clear (&reachable);
       queue.release ();
       return false;
     }
@@ -4919,7 +4900,7 @@ split_live_ranges_for_shrink_wrap (void)
       bb = queue.pop ();
       FOR_EACH_EDGE (e, ei, bb->succs)
 	if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
-	    && bitmap_set_bit (&reachable, e->dest->index))
+	    && bitmap_set_bit (reachable, e->dest->index))
 	  queue.quick_push (e->dest);
     }
   queue.release ();
@@ -4931,32 +4912,23 @@ split_live_ranges_for_shrink_wrap (void)
 	continue;
 
       if (DF_REG_DEF_COUNT (REGNO (dest)) > 1)
-	{
-	  bitmap_clear (&need_new);
-	  bitmap_clear (&reachable);
-	  return false;
-	}
+	return false;
 
       for (df_ref use = DF_REG_USE_CHAIN (REGNO(dest));
 	   use;
 	   use = DF_REF_NEXT_REG (use))
 	{
 	  int ubbi = DF_REF_BB (use)->index;
-	  if (bitmap_bit_p (&reachable, ubbi))
-	    bitmap_set_bit (&need_new, ubbi);
+	  if (bitmap_bit_p (reachable, ubbi))
+	    bitmap_set_bit (need_new, ubbi);
 	}
       last_interesting_insn = insn;
     }
 
-  bitmap_clear (&reachable);
   if (!last_interesting_insn)
-    {
-      bitmap_clear (&need_new);
-      return false;
-    }
+    return false;
 
-  call_dom = nearest_common_dominator_for_set (CDI_DOMINATORS, &need_new);
-  bitmap_clear (&need_new);
+  call_dom = nearest_common_dominator_for_set (CDI_DOMINATORS, need_new);
   if (call_dom == first)
     return false;
 
diff --git a/gcc/print-rtl.c b/gcc/print-rtl.c
index 30fd7597450..20bdafdb580 100644
--- a/gcc/print-rtl.c
+++ b/gcc/print-rtl.c
@@ -90,7 +90,6 @@ rtx_writer::rtx_writer (FILE *outf, int ind, bool simple, bool compact,
 rtx_reuse_manager::rtx_reuse_manager ()
 : m_next_id (0)
 {
-  bitmap_initialize (&m_defs_seen, NULL);
 }
 
 /* Determine if X is of a kind suitable for dumping via reuse_rtx.  */
@@ -158,7 +157,7 @@ rtx_reuse_manager::has_reuse_id (const_rtx x, int *out)
 bool
 rtx_reuse_manager::seen_def_p (int reuse_id)
 {
-  return bitmap_bit_p (&m_defs_seen, reuse_id);
+  return bitmap_bit_p (m_defs_seen, reuse_id);
 }
 
 /* Record that the definition of the given reuse ID has been seen.  */
@@ -166,7 +165,7 @@ rtx_reuse_manager::seen_def_p (int reuse_id)
 void
 rtx_reuse_manager::set_seen_def (int reuse_id)
 {
-  bitmap_set_bit (&m_defs_seen, reuse_id);
+  bitmap_set_bit (m_defs_seen, reuse_id);
 }
 
 #endif /* #ifndef GENERATOR_FILE */
diff --git a/gcc/print-rtl.h b/gcc/print-rtl.h
index 81dfcba62cf..eee949a1792 100644
--- a/gcc/print-rtl.h
+++ b/gcc/print-rtl.h
@@ -153,7 +153,7 @@ class rtx_reuse_manager
  private:
   hash_map<const_rtx, int> m_rtx_occurrence_count;
   hash_map<const_rtx, int> m_rtx_reuse_ids;
-  bitmap_head m_defs_seen;
+  auto_bitmap m_defs_seen;
   int m_next_id;
 };
 
-- 
2.11.0

^ permalink raw reply	[flat|nested] 34+ messages in thread

* [PATCH 02/13] improve bitmap / sbitmap compatability of bitmap_set_bit
  2017-05-09 20:53 [PATCH 00/13] misc data structure stuff tbsaunde+gcc
                   ` (10 preceding siblings ...)
  2017-05-09 20:53 ` [PATCH 03/13] store the bitmap_head within the auto_bitmap tbsaunde+gcc
@ 2017-05-09 20:53 ` tbsaunde+gcc
  2017-05-10  6:54   ` Richard Sandiford
  2017-05-09 20:55 ` [PATCH 11/13] make more vars auto_sbitmaps tbsaunde+gcc
  12 siblings, 1 reply; 34+ messages in thread
From: tbsaunde+gcc @ 2017-05-09 20:53 UTC (permalink / raw)
  To: gcc-patches

From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>

This make the sbitmap version return true if the bit was previously
unset to make it similar to the bitmap version.

gcc/ChangeLog:

2017-05-09  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>

	* sbitmap.h (bitmap_set_bit): Return bool similar to bitmap
version of this function.
---
 gcc/sbitmap.h | 9 ++++++---
 1 file changed, 6 insertions(+), 3 deletions(-)

diff --git a/gcc/sbitmap.h b/gcc/sbitmap.h
index cba0452cdb9..d4e3177d495 100644
--- a/gcc/sbitmap.h
+++ b/gcc/sbitmap.h
@@ -108,11 +108,14 @@ bitmap_bit_p (const_sbitmap map, int bitno)
 
 /* Set bit number BITNO in the sbitmap MAP.  */
 
-static inline void
+static inline bool
 bitmap_set_bit (sbitmap map, int bitno)
 {
-  map->elms[bitno / SBITMAP_ELT_BITS]
-    |= (SBITMAP_ELT_TYPE) 1 << (bitno) % SBITMAP_ELT_BITS;
+  SBITMAP_ELT_TYPE &word = map->elms[bitno / SBITMAP_ELT_BITS];
+    SBITMAP_ELT_TYPE mask = (SBITMAP_ELT_TYPE) 1 << (bitno) % SBITMAP_ELT_BITS;
+    bool ret = (word & mask) == 0;
+    word |= mask;
+    return ret;
 }
 
 /* Reset bit number BITNO in the sbitmap MAP.  */
-- 
2.11.0

^ permalink raw reply	[flat|nested] 34+ messages in thread

* [PATCH 03/13] store the bitmap_head within the auto_bitmap
  2017-05-09 20:53 [PATCH 00/13] misc data structure stuff tbsaunde+gcc
                   ` (9 preceding siblings ...)
  2017-05-09 20:53 ` [PATCH 06/13] replace some manual stacks with auto_vec tbsaunde+gcc
@ 2017-05-09 20:53 ` tbsaunde+gcc
  2017-05-10  8:25   ` Richard Biener
  2017-05-09 20:53 ` [PATCH 02/13] improve bitmap / sbitmap compatability of bitmap_set_bit tbsaunde+gcc
  2017-05-09 20:55 ` [PATCH 11/13] make more vars auto_sbitmaps tbsaunde+gcc
  12 siblings, 1 reply; 34+ messages in thread
From: tbsaunde+gcc @ 2017-05-09 20:53 UTC (permalink / raw)
  To: gcc-patches

From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>

This gets rid of one allocation per bitmap.  Often the bitmap_head is
now on the stack, when it isn't its part of some other struct on the
heap instead of being refered to by that struct.  On 64 bit platforms
this will increase the size of such structs by 24 bytes, but its an over
all win since we don't need an 8 byte pointer pointing at the
bitmap_head.  Given that the auto_bitmap owns the bitmap_head anyway we
know there would never be a place where two auto_bitmaps would refer to
the same bitmap_head object.

gcc/ChangeLog:

2017-05-07  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>

	* bitmap.h (class auto_bitmap): Change type of m_bits to
bitmap_head, and adjust ctor / dtor and member operators.
---
 gcc/bitmap.h | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/gcc/bitmap.h b/gcc/bitmap.h
index 7508239cff9..49aec001cb0 100644
--- a/gcc/bitmap.h
+++ b/gcc/bitmap.h
@@ -823,10 +823,10 @@ bmp_iter_and_compl (bitmap_iterator *bi, unsigned *bit_no)
 class auto_bitmap
 {
  public:
-  auto_bitmap () { bits = BITMAP_ALLOC (NULL); }
-  ~auto_bitmap () { BITMAP_FREE (bits); }
+  auto_bitmap () { bitmap_initialize (&m_bits, &bitmap_default_obstack); }
+  ~auto_bitmap () { bitmap_clear (&m_bits); }
   // Allow calling bitmap functions on our bitmap.
-  operator bitmap () { return bits; }
+  operator bitmap () { return &m_bits; }
 
  private:
   // Prevent making a copy that references our bitmap.
@@ -837,7 +837,7 @@ class auto_bitmap
   auto_bitmap &operator = (auto_bitmap &&);
 #endif
 
-  bitmap bits;
+  bitmap_head m_bits;
 };
 
 #endif /* GCC_BITMAP_H */
-- 
2.11.0

^ permalink raw reply	[flat|nested] 34+ messages in thread

* [PATCH 11/13] make more vars auto_sbitmaps
  2017-05-09 20:53 [PATCH 00/13] misc data structure stuff tbsaunde+gcc
                   ` (11 preceding siblings ...)
  2017-05-09 20:53 ` [PATCH 02/13] improve bitmap / sbitmap compatability of bitmap_set_bit tbsaunde+gcc
@ 2017-05-09 20:55 ` tbsaunde+gcc
  2017-05-10  8:27   ` Richard Biener
  12 siblings, 1 reply; 34+ messages in thread
From: tbsaunde+gcc @ 2017-05-09 20:55 UTC (permalink / raw)
  To: gcc-patches

From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>

gcc/ChangeLog:

2017-05-09  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>

	* ddg.c (find_nodes_on_paths): Use auto_sbitmap.
	(longest_simple_path): Likewise.
	* shrink-wrap.c (spread_components): Likewise.
	(disqualify_problematic_components): Likewise.
	(emit_common_heads_for_components): Likewise.
	(emit_common_tails_for_components): Likewise.
	(insert_prologue_epilogue_for_components): Likewise.
---
 gcc/ddg.c         | 26 ++++++++------------------
 gcc/shrink-wrap.c | 38 +++++++++++---------------------------
 2 files changed, 19 insertions(+), 45 deletions(-)

diff --git a/gcc/ddg.c b/gcc/ddg.c
index 9ea98d6f40f..8aaed80dec4 100644
--- a/gcc/ddg.c
+++ b/gcc/ddg.c
@@ -1081,16 +1081,15 @@ free_ddg_all_sccs (ddg_all_sccs_ptr all_sccs)
 int
 find_nodes_on_paths (sbitmap result, ddg_ptr g, sbitmap from, sbitmap to)
 {
-  int answer;
   int change;
   unsigned int u = 0;
   int num_nodes = g->num_nodes;
   sbitmap_iterator sbi;
 
-  sbitmap workset = sbitmap_alloc (num_nodes);
-  sbitmap reachable_from = sbitmap_alloc (num_nodes);
-  sbitmap reach_to = sbitmap_alloc (num_nodes);
-  sbitmap tmp = sbitmap_alloc (num_nodes);
+  auto_sbitmap workset (num_nodes);
+  auto_sbitmap reachable_from (num_nodes);
+  auto_sbitmap reach_to (num_nodes);
+  auto_sbitmap tmp (num_nodes);
 
   bitmap_copy (reachable_from, from);
   bitmap_copy (tmp, from);
@@ -1150,12 +1149,7 @@ find_nodes_on_paths (sbitmap result, ddg_ptr g, sbitmap from, sbitmap to)
 	}
     }
 
-  answer = bitmap_and (result, reachable_from, reach_to);
-  sbitmap_free (workset);
-  sbitmap_free (reachable_from);
-  sbitmap_free (reach_to);
-  sbitmap_free (tmp);
-  return answer;
+  return bitmap_and (result, reachable_from, reach_to);
 }
 
 
@@ -1195,10 +1189,9 @@ longest_simple_path (struct ddg * g, int src, int dest, sbitmap nodes)
   int i;
   unsigned int u = 0;
   int change = 1;
-  int result;
   int num_nodes = g->num_nodes;
-  sbitmap workset = sbitmap_alloc (num_nodes);
-  sbitmap tmp = sbitmap_alloc (num_nodes);
+  auto_sbitmap workset (num_nodes);
+  auto_sbitmap tmp (num_nodes);
 
 
   /* Data will hold the distance of the longest path found so far from
@@ -1224,10 +1217,7 @@ longest_simple_path (struct ddg * g, int src, int dest, sbitmap nodes)
 	  change |= update_dist_to_successors (u_node, nodes, tmp);
 	}
     }
-  result = g->nodes[dest].aux.count;
-  sbitmap_free (workset);
-  sbitmap_free (tmp);
-  return result;
+  return g->nodes[dest].aux.count;
 }
 
 #endif /* INSN_SCHEDULING */
diff --git a/gcc/shrink-wrap.c b/gcc/shrink-wrap.c
index 492376d949b..1ac4ea3b054 100644
--- a/gcc/shrink-wrap.c
+++ b/gcc/shrink-wrap.c
@@ -1264,7 +1264,7 @@ spread_components (sbitmap components)
   todo.create (n_basic_blocks_for_fn (cfun));
   auto_bitmap seen;
 
-  sbitmap old = sbitmap_alloc (SBITMAP_SIZE (components));
+  auto_sbitmap old (SBITMAP_SIZE (components));
 
   /* Find for every block the components that are *not* needed on some path
      from the entry to that block.  Do this with a flood fill from the entry
@@ -1390,8 +1390,6 @@ spread_components (sbitmap components)
 	  fprintf (dump_file, "\n");
 	}
     }
-
-  sbitmap_free (old);
 }
 
 /* If we cannot handle placing some component's prologues or epilogues where
@@ -1400,8 +1398,8 @@ spread_components (sbitmap components)
 static void
 disqualify_problematic_components (sbitmap components)
 {
-  sbitmap pro = sbitmap_alloc (SBITMAP_SIZE (components));
-  sbitmap epi = sbitmap_alloc (SBITMAP_SIZE (components));
+  auto_sbitmap pro (SBITMAP_SIZE (components));
+  auto_sbitmap epi (SBITMAP_SIZE (components));
 
   basic_block bb;
   FOR_EACH_BB_FN (bb, cfun)
@@ -1466,9 +1464,6 @@ disqualify_problematic_components (sbitmap components)
 	    }
 	}
     }
-
-  sbitmap_free (pro);
-  sbitmap_free (epi);
 }
 
 /* Place code for prologues and epilogues for COMPONENTS where we can put
@@ -1476,9 +1471,9 @@ disqualify_problematic_components (sbitmap components)
 static void
 emit_common_heads_for_components (sbitmap components)
 {
-  sbitmap pro = sbitmap_alloc (SBITMAP_SIZE (components));
-  sbitmap epi = sbitmap_alloc (SBITMAP_SIZE (components));
-  sbitmap tmp = sbitmap_alloc (SBITMAP_SIZE (components));
+  auto_sbitmap pro (SBITMAP_SIZE (components));
+  auto_sbitmap epi (SBITMAP_SIZE (components));
+  auto_sbitmap tmp (SBITMAP_SIZE (components));
 
   basic_block bb;
   FOR_ALL_BB_FN (bb, cfun)
@@ -1554,10 +1549,6 @@ emit_common_heads_for_components (sbitmap components)
 	  bitmap_ior (SW (bb)->head_components, SW (bb)->head_components, epi);
 	}
     }
-
-  sbitmap_free (pro);
-  sbitmap_free (epi);
-  sbitmap_free (tmp);
 }
 
 /* Place code for prologues and epilogues for COMPONENTS where we can put
@@ -1565,9 +1556,9 @@ emit_common_heads_for_components (sbitmap components)
 static void
 emit_common_tails_for_components (sbitmap components)
 {
-  sbitmap pro = sbitmap_alloc (SBITMAP_SIZE (components));
-  sbitmap epi = sbitmap_alloc (SBITMAP_SIZE (components));
-  sbitmap tmp = sbitmap_alloc (SBITMAP_SIZE (components));
+  auto_sbitmap pro (SBITMAP_SIZE (components));
+  auto_sbitmap epi (SBITMAP_SIZE (components));
+  auto_sbitmap tmp (SBITMAP_SIZE (components));
 
   basic_block bb;
   FOR_ALL_BB_FN (bb, cfun)
@@ -1664,10 +1655,6 @@ emit_common_tails_for_components (sbitmap components)
 	  bitmap_ior (SW (bb)->tail_components, SW (bb)->tail_components, pro);
 	}
     }
-
-  sbitmap_free (pro);
-  sbitmap_free (epi);
-  sbitmap_free (tmp);
 }
 
 /* Place prologues and epilogues for COMPONENTS on edges, if we haven't already
@@ -1675,8 +1662,8 @@ emit_common_tails_for_components (sbitmap components)
 static void
 insert_prologue_epilogue_for_components (sbitmap components)
 {
-  sbitmap pro = sbitmap_alloc (SBITMAP_SIZE (components));
-  sbitmap epi = sbitmap_alloc (SBITMAP_SIZE (components));
+  auto_sbitmap pro (SBITMAP_SIZE (components));
+  auto_sbitmap epi (SBITMAP_SIZE (components));
 
   basic_block bb;
   FOR_EACH_BB_FN (bb, cfun)
@@ -1754,9 +1741,6 @@ insert_prologue_epilogue_for_components (sbitmap components)
 	}
     }
 
-  sbitmap_free (pro);
-  sbitmap_free (epi);
-
   commit_edge_insertions ();
 }
 
-- 
2.11.0

^ permalink raw reply	[flat|nested] 34+ messages in thread

* Re: [PATCH 02/13] improve bitmap / sbitmap compatability of bitmap_set_bit
  2017-05-09 20:53 ` [PATCH 02/13] improve bitmap / sbitmap compatability of bitmap_set_bit tbsaunde+gcc
@ 2017-05-10  6:54   ` Richard Sandiford
  2017-05-11  8:01     ` Trevor Saunders
  0 siblings, 1 reply; 34+ messages in thread
From: Richard Sandiford @ 2017-05-10  6:54 UTC (permalink / raw)
  To: tbsaunde+gcc; +Cc: gcc-patches

tbsaunde+gcc@tbsaunde.org writes:
> From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
>
> This make the sbitmap version return true if the bit was previously
> unset to make it similar to the bitmap version.
>
> gcc/ChangeLog:
>
> 2017-05-09  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>
>
> 	* sbitmap.h (bitmap_set_bit): Return bool similar to bitmap
> version of this function.
> ---
>  gcc/sbitmap.h | 9 ++++++---
>  1 file changed, 6 insertions(+), 3 deletions(-)
>
> diff --git a/gcc/sbitmap.h b/gcc/sbitmap.h
> index cba0452cdb9..d4e3177d495 100644
> --- a/gcc/sbitmap.h
> +++ b/gcc/sbitmap.h
> @@ -108,11 +108,14 @@ bitmap_bit_p (const_sbitmap map, int bitno)
>  
>  /* Set bit number BITNO in the sbitmap MAP.  */
>  
> -static inline void
> +static inline bool
>  bitmap_set_bit (sbitmap map, int bitno)
>  {
> -  map->elms[bitno / SBITMAP_ELT_BITS]
> -    |= (SBITMAP_ELT_TYPE) 1 << (bitno) % SBITMAP_ELT_BITS;
> +  SBITMAP_ELT_TYPE &word = map->elms[bitno / SBITMAP_ELT_BITS];
> +    SBITMAP_ELT_TYPE mask = (SBITMAP_ELT_TYPE) 1 << (bitno) % SBITMAP_ELT_BITS;
> +    bool ret = (word & mask) == 0;
> +    word |= mask;
> +    return ret;
>  }

Indentation looks off (mabye it's a mailer thing?).  Think the function
comment should be updated too -- personally I can never remember whether
true means "I just set it" or "it was already set" :-)

What's the current position on the use of references?  IMO a pointer
is clearer here.

Thanks,
Richard

^ permalink raw reply	[flat|nested] 34+ messages in thread

* Re: [PATCH 05/13] allow constructing a auto_vec with a preallocation, and a possibly larger actual allocation size
  2017-05-09 20:53 ` [PATCH 05/13] allow constructing a auto_vec with a preallocation, and a possibly larger actual allocation size tbsaunde+gcc
@ 2017-05-10  6:58   ` Richard Sandiford
  2017-05-11  7:50     ` Trevor Saunders
  0 siblings, 1 reply; 34+ messages in thread
From: Richard Sandiford @ 2017-05-10  6:58 UTC (permalink / raw)
  To: tbsaunde+gcc; +Cc: gcc-patches

tbsaunde+gcc@tbsaunde.org writes:
> From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
>
> This allows us to set the capacity of the vector when we construct it,
> and still use a stack buffer when the size is small enough.
>
> gcc/ChangeLog:
>
> 2017-05-09  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>
>
> 	* genrecog.c (int_set::int_set): Explicitly construct our
> auto_vec base class.
> 	* vec.h (auto_vec::auto_vec): New constructor.
> ---
>  gcc/genrecog.c |  8 +++++---
>  gcc/vec.h      | 12 ++++++++++++
>  2 files changed, 17 insertions(+), 3 deletions(-)
>
> diff --git a/gcc/genrecog.c b/gcc/genrecog.c
> index 6a9e610e7a0..b69043f0d02 100644
> --- a/gcc/genrecog.c
> +++ b/gcc/genrecog.c
> @@ -1407,14 +1407,16 @@ struct int_set : public auto_vec <uint64_t, 1>
>    iterator end ();
>  };
>  
> -int_set::int_set () {}
> +int_set::int_set () : auto_vec<uint64_t, 1> () {}
>  
> -int_set::int_set (uint64_t label)
> +int_set::int_set (uint64_t label) :
> +  auto_vec<uint64_t, 1> ()
>  {
>    safe_push (label);
>  }
>  
> -int_set::int_set (const int_set &other)
> +int_set::int_set (const int_set &other) :
> +  auto_vec<uint64_t, 1> ()
>  {
>    safe_splice (other);
>  }

Is this part of the patch necessary?  Won't the default constructor
be used anyway?

Thanks,
Richard

> diff --git a/gcc/vec.h b/gcc/vec.h
> index fee46164b01..914f89c350c 100644
> --- a/gcc/vec.h
> +++ b/gcc/vec.h
> @@ -1272,6 +1272,18 @@ public:
>      this->m_vec = &m_auto;
>    }
>  
> +  auto_vec (size_t s)
> +  {
> +    if (s > N)
> +      {
> +	this->create (s);
> +	return;
> +      }
> +
> +    m_auto.embedded_init (MAX (N, 2), 0, 1);
> +    this->m_vec = &m_auto;
> +  }
> +
>    ~auto_vec ()
>    {
>      this->release ();

^ permalink raw reply	[flat|nested] 34+ messages in thread

* Re: [PATCH 01/13] improve safety of freeing bitmaps
  2017-05-09 20:53 ` [PATCH 01/13] improve safety of freeing bitmaps tbsaunde+gcc
@ 2017-05-10  8:15   ` Richard Biener
  2017-05-10 10:55     ` Trevor Saunders
  0 siblings, 1 reply; 34+ messages in thread
From: Richard Biener @ 2017-05-10  8:15 UTC (permalink / raw)
  To: tbsaunde+gcc; +Cc: GCC Patches

On Tue, May 9, 2017 at 10:52 PM,  <tbsaunde+gcc@tbsaunde.org> wrote:
> From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
>
> There's two groups of changes here, first taking a sbitmap &, so that we
> can assign null to the pointer after freeing the sbitmap to prevent use
> after free through that pointer.  Second we define overloads of
> sbitmap_free and bitmap_free taking auto_sbitmap and auto_bitmap
> respectively, so that you can't double free the bitmap owned by a
> auto_{s,}bitmap.

Looks good - but what do you need the void *& overload for?!  That at least
needs a comment.

Richard.

> gcc/ChangeLog:
>
> 2017-05-09  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>
>
>         * bitmap.h (BITMAP_FREE): Convert from macro to inline function
>         and add overloaded decl for auto_bitmap.
>         * sbitmap.h (inline void sbitmap_free): Add overload for
>         auto_sbitmap, and change sbitmap to  point to null.
> ---
>  gcc/bitmap.h  | 21 +++++++++++++++++++--
>  gcc/sbitmap.h |  7 ++++++-
>  2 files changed, 25 insertions(+), 3 deletions(-)
>
> diff --git a/gcc/bitmap.h b/gcc/bitmap.h
> index f158b447357..7508239cff9 100644
> --- a/gcc/bitmap.h
> +++ b/gcc/bitmap.h
> @@ -129,6 +129,8 @@ along with GCC; see the file COPYING3.  If not see
>
>  #include "obstack.h"
>
> +   class auto_bitmap;
> +
>  /* Bitmap memory usage.  */
>  struct bitmap_usage: public mem_usage
>  {
> @@ -372,8 +374,23 @@ extern hashval_t bitmap_hash (const_bitmap);
>  #define BITMAP_GGC_ALLOC() bitmap_gc_alloc ()
>
>  /* Do any cleanup needed on a bitmap when it is no longer used.  */
> -#define BITMAP_FREE(BITMAP) \
> -       ((void) (bitmap_obstack_free ((bitmap) BITMAP), (BITMAP) = (bitmap) NULL))
> +inline void
> +BITMAP_FREE (bitmap &b)
> +{
> +  bitmap_obstack_free ((bitmap) b);
> +  b = NULL;
> +}
> +
> +inline void
> +BITMAP_FREE (void *&b)
> +{
> +  bitmap_obstack_free ((bitmap) b);
> +  b = NULL;
> +}
> +
> +/* Intentionally unimplemented to ensure it is never called with an
> +   auto_bitmap argument.  */
> +void BITMAP_FREE (auto_bitmap);
>
>  /* Iterator for bitmaps.  */
>
> diff --git a/gcc/sbitmap.h b/gcc/sbitmap.h
> index ce4d27d927c..cba0452cdb9 100644
> --- a/gcc/sbitmap.h
> +++ b/gcc/sbitmap.h
> @@ -82,6 +82,8 @@ along with GCC; see the file COPYING3.  If not see
>  #define SBITMAP_ELT_BITS (HOST_BITS_PER_WIDEST_FAST_INT * 1u)
>  #define SBITMAP_ELT_TYPE unsigned HOST_WIDEST_FAST_INT
>
> +class auto_sbitmap;
> +
>  struct simple_bitmap_def
>  {
>    unsigned int n_bits;         /* Number of bits.  */
> @@ -208,11 +210,14 @@ bmp_iter_next (sbitmap_iterator *i, unsigned *bit_no ATTRIBUTE_UNUSED)
>         bmp_iter_next (&(ITER), &(BITNUM)))
>  #endif
>
> -inline void sbitmap_free (sbitmap map)
> +inline void sbitmap_free (sbitmap &map)
>  {
>    free (map);
> +  map = NULL;
>  }
>
> +void sbitmap_free (auto_sbitmap);
> +
>  inline void sbitmap_vector_free (sbitmap * vec)
>  {
>    free (vec);
> --
> 2.11.0
>

^ permalink raw reply	[flat|nested] 34+ messages in thread

* Re: [PATCH 03/13] store the bitmap_head within the auto_bitmap
  2017-05-09 20:53 ` [PATCH 03/13] store the bitmap_head within the auto_bitmap tbsaunde+gcc
@ 2017-05-10  8:25   ` Richard Biener
  0 siblings, 0 replies; 34+ messages in thread
From: Richard Biener @ 2017-05-10  8:25 UTC (permalink / raw)
  To: tbsaunde+gcc; +Cc: GCC Patches

On Tue, May 9, 2017 at 10:52 PM,  <tbsaunde+gcc@tbsaunde.org> wrote:
> From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
>
> This gets rid of one allocation per bitmap.  Often the bitmap_head is
> now on the stack, when it isn't its part of some other struct on the
> heap instead of being refered to by that struct.  On 64 bit platforms
> this will increase the size of such structs by 24 bytes, but its an over
> all win since we don't need an 8 byte pointer pointing at the
> bitmap_head.  Given that the auto_bitmap owns the bitmap_head anyway we
> know there would never be a place where two auto_bitmaps would refer to
> the same bitmap_head object.

Ok.

Richard.

> gcc/ChangeLog:
>
> 2017-05-07  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>
>
>         * bitmap.h (class auto_bitmap): Change type of m_bits to
> bitmap_head, and adjust ctor / dtor and member operators.
> ---
>  gcc/bitmap.h | 8 ++++----
>  1 file changed, 4 insertions(+), 4 deletions(-)
>
> diff --git a/gcc/bitmap.h b/gcc/bitmap.h
> index 7508239cff9..49aec001cb0 100644
> --- a/gcc/bitmap.h
> +++ b/gcc/bitmap.h
> @@ -823,10 +823,10 @@ bmp_iter_and_compl (bitmap_iterator *bi, unsigned *bit_no)
>  class auto_bitmap
>  {
>   public:
> -  auto_bitmap () { bits = BITMAP_ALLOC (NULL); }
> -  ~auto_bitmap () { BITMAP_FREE (bits); }
> +  auto_bitmap () { bitmap_initialize (&m_bits, &bitmap_default_obstack); }
> +  ~auto_bitmap () { bitmap_clear (&m_bits); }
>    // Allow calling bitmap functions on our bitmap.
> -  operator bitmap () { return bits; }
> +  operator bitmap () { return &m_bits; }
>
>   private:
>    // Prevent making a copy that references our bitmap.
> @@ -837,7 +837,7 @@ class auto_bitmap
>    auto_bitmap &operator = (auto_bitmap &&);
>  #endif
>
> -  bitmap bits;
> +  bitmap_head m_bits;
>  };
>
>  #endif /* GCC_BITMAP_H */
> --
> 2.11.0
>

^ permalink raw reply	[flat|nested] 34+ messages in thread

* Re: [PATCH 10/13] make a member an auto_sbitmap
  2017-05-09 20:53 ` [PATCH 10/13] make a member an auto_sbitmap tbsaunde+gcc
@ 2017-05-10  8:26   ` Richard Biener
  0 siblings, 0 replies; 34+ messages in thread
From: Richard Biener @ 2017-05-10  8:26 UTC (permalink / raw)
  To: tbsaunde+gcc; +Cc: GCC Patches

On Tue, May 9, 2017 at 10:52 PM,  <tbsaunde+gcc@tbsaunde.org> wrote:
> From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
>
> gcc/ChangeLog:

Ok.

> 2017-05-09  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>
>
>         * tree-ssa-dse.c (dse_dom_walker): Make m_live_byes a
>         auto_sbitmap.
> ---
>  gcc/tree-ssa-dse.c | 10 ++++------
>  1 file changed, 4 insertions(+), 6 deletions(-)
>
> diff --git a/gcc/tree-ssa-dse.c b/gcc/tree-ssa-dse.c
> index 90230abe822..3ebc19948e1 100644
> --- a/gcc/tree-ssa-dse.c
> +++ b/gcc/tree-ssa-dse.c
> @@ -601,16 +601,14 @@ class dse_dom_walker : public dom_walker
>  {
>  public:
>    dse_dom_walker (cdi_direction direction)
> -    : dom_walker (direction), m_byte_tracking_enabled (false)
> -
> -  { m_live_bytes = sbitmap_alloc (PARAM_VALUE (PARAM_DSE_MAX_OBJECT_SIZE)); }
> -
> -  ~dse_dom_walker () { sbitmap_free (m_live_bytes); }
> +    : dom_walker (direction),
> +    m_live_bytes (PARAM_VALUE (PARAM_DSE_MAX_OBJECT_SIZE)),
> +    m_byte_tracking_enabled (false) {}
>
>    virtual edge before_dom_children (basic_block);
>
>  private:
> -  sbitmap m_live_bytes;
> +  auto_sbitmap m_live_bytes;
>    bool m_byte_tracking_enabled;
>    void dse_optimize_stmt (gimple_stmt_iterator *);
>  };
> --
> 2.11.0
>

^ permalink raw reply	[flat|nested] 34+ messages in thread

* Re: [PATCH 08/13] move several bitmaps from gc memory to the default obstack and use auto_bitmap
  2017-05-09 20:53 ` [PATCH 08/13] move several bitmaps from gc memory to the default obstack and use auto_bitmap tbsaunde+gcc
@ 2017-05-10  8:26   ` Richard Biener
  0 siblings, 0 replies; 34+ messages in thread
From: Richard Biener @ 2017-05-10  8:26 UTC (permalink / raw)
  To: tbsaunde+gcc; +Cc: GCC Patches

On Tue, May 9, 2017 at 10:52 PM,  <tbsaunde+gcc@tbsaunde.org> wrote:
> From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
>
> These places where probably trying to use the default bitmap obstack,
> but passing 0 to bitmap_initialize actually uses gc allocation.  In any
> case they are all cleaned up before going out of scope so using
> auto_bitmap should be fine.

Ok.

Richard.

> gcc/ChangeLog:
>
> 2017-05-09  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>
>
>         * haifa-sched.c (estimate_shadow_tick): Replace manual bitmap
>         management with auto_bitmap.
>         (fix_inter_tick): Likewise.
>         (fix_recovery_deps): Likewise.
>         * ira.c (add_store_equivs): Likewise.
>         (find_moveable_pseudos): Likewise.
>         (split_live_ranges_for_shrink_wrap): Likewise.
>         * print-rtl.c (rtx_reuse_manager::rtx_reuse_manager): Likewise.
>         (rtx_reuse_manager::seen_def_p): Likewise.
>         (rtx_reuse_manager::set_seen_def): Likewise.
>         * print-rtl.h (class rtx_reuse_manager): Likewise.
> ---
>  gcc/haifa-sched.c | 23 +++++----------
>  gcc/ira.c         | 84 +++++++++++++++++++------------------------------------
>  gcc/print-rtl.c   |  5 ++--
>  gcc/print-rtl.h   |  2 +-
>  4 files changed, 38 insertions(+), 76 deletions(-)
>
> diff --git a/gcc/haifa-sched.c b/gcc/haifa-sched.c
> index 0ebf110471c..1fcc01d04ae 100644
> --- a/gcc/haifa-sched.c
> +++ b/gcc/haifa-sched.c
> @@ -4843,14 +4843,12 @@ estimate_insn_tick (bitmap processed, rtx_insn *insn, int budget)
>  static int
>  estimate_shadow_tick (struct delay_pair *p)
>  {
> -  bitmap_head processed;
> +  auto_bitmap processed;
>    int t;
>    bool cutoff;
> -  bitmap_initialize (&processed, 0);
>
> -  cutoff = !estimate_insn_tick (&processed, p->i2,
> +  cutoff = !estimate_insn_tick (processed, p->i2,
>                                 max_insn_queue_index + pair_delay (p));
> -  bitmap_clear (&processed);
>    if (cutoff)
>      return max_insn_queue_index;
>    t = INSN_TICK_ESTIMATE (p->i2) - (clock_var + pair_delay (p) + 1);
> @@ -7515,15 +7513,13 @@ static void
>  fix_inter_tick (rtx_insn *head, rtx_insn *tail)
>  {
>    /* Set of instructions with corrected INSN_TICK.  */
> -  bitmap_head processed;
> +  auto_bitmap processed;
>    /* ??? It is doubtful if we should assume that cycle advance happens on
>       basic block boundaries.  Basically insns that are unconditionally ready
>       on the start of the block are more preferable then those which have
>       a one cycle dependency over insn from the previous block.  */
>    int next_clock = clock_var + 1;
>
> -  bitmap_initialize (&processed, 0);
> -
>    /* Iterates over scheduled instructions and fix their INSN_TICKs and
>       INSN_TICKs of dependent instructions, so that INSN_TICKs are consistent
>       across different blocks.  */
> @@ -7539,7 +7535,7 @@ fix_inter_tick (rtx_insn *head, rtx_insn *tail)
>           gcc_assert (tick >= MIN_TICK);
>
>           /* Fix INSN_TICK of instruction from just scheduled block.  */
> -         if (bitmap_set_bit (&processed, INSN_LUID (head)))
> +         if (bitmap_set_bit (processed, INSN_LUID (head)))
>             {
>               tick -= next_clock;
>
> @@ -7563,7 +7559,7 @@ fix_inter_tick (rtx_insn *head, rtx_insn *tail)
>                   /* If NEXT has its INSN_TICK calculated, fix it.
>                      If not - it will be properly calculated from
>                      scratch later in fix_tick_ready.  */
> -                 && bitmap_set_bit (&processed, INSN_LUID (next)))
> +                 && bitmap_set_bit (processed, INSN_LUID (next)))
>                 {
>                   tick -= next_clock;
>
> @@ -7580,7 +7576,6 @@ fix_inter_tick (rtx_insn *head, rtx_insn *tail)
>             }
>         }
>      }
> -  bitmap_clear (&processed);
>  }
>
>  /* Check if NEXT is ready to be added to the ready or queue list.
> @@ -8617,9 +8612,7 @@ fix_recovery_deps (basic_block rec)
>  {
>    rtx_insn *note, *insn, *jump;
>    auto_vec<rtx_insn *, 10> ready_list;
> -  bitmap_head in_ready;
> -
> -  bitmap_initialize (&in_ready, 0);
> +  auto_bitmap in_ready;
>
>    /* NOTE - a basic block note.  */
>    note = NEXT_INSN (BB_HEAD (rec));
> @@ -8642,7 +8635,7 @@ fix_recovery_deps (basic_block rec)
>             {
>               sd_delete_dep (sd_it);
>
> -             if (bitmap_set_bit (&in_ready, INSN_LUID (consumer)))
> +             if (bitmap_set_bit (in_ready, INSN_LUID (consumer)))
>                 ready_list.safe_push (consumer);
>             }
>           else
> @@ -8657,8 +8650,6 @@ fix_recovery_deps (basic_block rec)
>      }
>    while (insn != note);
>
> -  bitmap_clear (&in_ready);
> -
>    /* Try to add instructions to the ready or queue list.  */
>    unsigned int i;
>    rtx_insn *temp;
> diff --git a/gcc/ira.c b/gcc/ira.c
> index c9751ce81ba..36a779bd37f 100644
> --- a/gcc/ira.c
> +++ b/gcc/ira.c
> @@ -3635,16 +3635,15 @@ update_equiv_regs (void)
>  static void
>  add_store_equivs (void)
>  {
> -  bitmap_head seen_insns;
> +  auto_bitmap seen_insns;
>
> -  bitmap_initialize (&seen_insns, NULL);
>    for (rtx_insn *insn = get_insns (); insn; insn = NEXT_INSN (insn))
>      {
>        rtx set, src, dest;
>        unsigned regno;
>        rtx_insn *init_insn;
>
> -      bitmap_set_bit (&seen_insns, INSN_UID (insn));
> +      bitmap_set_bit (seen_insns, INSN_UID (insn));
>
>        if (! INSN_P (insn))
>         continue;
> @@ -3665,7 +3664,7 @@ add_store_equivs (void)
>           && ! reg_equiv[regno].pdx_subregs
>           && reg_equiv[regno].init_insns != NULL
>           && (init_insn = reg_equiv[regno].init_insns->insn ()) != 0
> -         && bitmap_bit_p (&seen_insns, INSN_UID (init_insn))
> +         && bitmap_bit_p (seen_insns, INSN_UID (init_insn))
>           && ! find_reg_note (init_insn, REG_EQUIV, NULL_RTX)
>           && validate_equiv_mem (init_insn, src, dest) == valid_reload
>           && ! memref_used_between_p (dest, init_insn, insn)
> @@ -3685,7 +3684,6 @@ add_store_equivs (void)
>                      INSN_UID (insn));
>         }
>      }
> -  bitmap_clear (&seen_insns);
>  }
>
>  /* Scan all regs killed in an insn to see if any of them are registers
> @@ -4485,9 +4483,8 @@ find_moveable_pseudos (void)
>       moved freely downwards, but are otherwise transparent to a block.  */
>    bitmap_head *bb_moveable_reg_sets = XNEWVEC (bitmap_head,
>                                                last_basic_block_for_fn (cfun));
> -  bitmap_head live, used, set, interesting, unusable_as_input;
> +  auto_bitmap live, used, set, interesting, unusable_as_input;
>    bitmap_iterator bi;
> -  bitmap_initialize (&interesting, 0);
>
>    first_moveable_pseudo = max_regs;
>    pseudo_replaced_reg.release ();
> @@ -4497,10 +4494,6 @@ find_moveable_pseudos (void)
>    calculate_dominance_info (CDI_DOMINATORS);
>
>    i = 0;
> -  bitmap_initialize (&live, 0);
> -  bitmap_initialize (&used, 0);
> -  bitmap_initialize (&set, 0);
> -  bitmap_initialize (&unusable_as_input, 0);
>    FOR_EACH_BB_FN (bb, cfun)
>      {
>        rtx_insn *insn;
> @@ -4511,13 +4504,13 @@ find_moveable_pseudos (void)
>        bitmap_initialize (local, 0);
>        bitmap_initialize (transp, 0);
>        bitmap_initialize (moveable, 0);
> -      bitmap_copy (&live, df_get_live_out (bb));
> -      bitmap_and_into (&live, df_get_live_in (bb));
> -      bitmap_copy (transp, &live);
> +      bitmap_copy (live, df_get_live_out (bb));
> +      bitmap_and_into (live, df_get_live_in (bb));
> +      bitmap_copy (transp, live);
>        bitmap_clear (moveable);
> -      bitmap_clear (&live);
> -      bitmap_clear (&used);
> -      bitmap_clear (&set);
> +      bitmap_clear (live);
> +      bitmap_clear (used);
> +      bitmap_clear (set);
>        FOR_BB_INSNS (bb, insn)
>         if (NONDEBUG_INSN_P (insn))
>           {
> @@ -4531,20 +4524,20 @@ find_moveable_pseudos (void)
>             if (use
>                 && def
>                 && DF_REF_REGNO (use) == DF_REF_REGNO (def)
> -               && !bitmap_bit_p (&set, DF_REF_REGNO (use))
> +               && !bitmap_bit_p (set, DF_REF_REGNO (use))
>                 && rtx_moveable_p (&PATTERN (insn), OP_IN))
>               {
>                 unsigned regno = DF_REF_REGNO (use);
>                 bitmap_set_bit (moveable, regno);
> -               bitmap_set_bit (&set, regno);
> -               bitmap_set_bit (&used, regno);
> +               bitmap_set_bit (set, regno);
> +               bitmap_set_bit (used, regno);
>                 bitmap_clear_bit (transp, regno);
>                 continue;
>               }
>             FOR_EACH_INSN_INFO_USE (use, insn_info)
>               {
>                 unsigned regno = DF_REF_REGNO (use);
> -               bitmap_set_bit (&used, regno);
> +               bitmap_set_bit (used, regno);
>                 if (bitmap_clear_bit (moveable, regno))
>                   bitmap_clear_bit (transp, regno);
>               }
> @@ -4552,17 +4545,13 @@ find_moveable_pseudos (void)
>             FOR_EACH_INSN_INFO_DEF (def, insn_info)
>               {
>                 unsigned regno = DF_REF_REGNO (def);
> -               bitmap_set_bit (&set, regno);
> +               bitmap_set_bit (set, regno);
>                 bitmap_clear_bit (transp, regno);
>                 bitmap_clear_bit (moveable, regno);
>               }
>           }
>      }
>
> -  bitmap_clear (&live);
> -  bitmap_clear (&used);
> -  bitmap_clear (&set);
> -
>    FOR_EACH_BB_FN (bb, cfun)
>      {
>        bitmap local = bb_local + bb->index;
> @@ -4605,7 +4594,7 @@ find_moveable_pseudos (void)
>                 if (dump_file)
>                   fprintf (dump_file, "Ignoring reg %d, has equiv memory\n",
>                            regno);
> -               bitmap_set_bit (&unusable_as_input, regno);
> +               bitmap_set_bit (unusable_as_input, regno);
>                 continue;
>               }
>
> @@ -4665,7 +4654,7 @@ find_moveable_pseudos (void)
>                 continue;
>               }
>
> -           bitmap_set_bit (&interesting, regno);
> +           bitmap_set_bit (interesting, regno);
>             /* If we get here, we know closest_use is a non-NULL insn
>                (as opposed to const_0_rtx).  */
>             closest_uses[regno] = as_a <rtx_insn *> (closest_use);
> @@ -4684,7 +4673,7 @@ find_moveable_pseudos (void)
>           }
>      }
>
> -  EXECUTE_IF_SET_IN_BITMAP (&interesting, 0, i, bi)
> +  EXECUTE_IF_SET_IN_BITMAP (interesting, 0, i, bi)
>      {
>        df_ref def = DF_REG_DEF_CHAIN (i);
>        rtx_insn *def_insn = DF_REF_INSN (def);
> @@ -4728,7 +4717,7 @@ find_moveable_pseudos (void)
>        FOR_EACH_INSN_USE (use, def_insn)
>         {
>           unsigned regno = DF_REF_REGNO (use);
> -         if (bitmap_bit_p (&unusable_as_input, regno))
> +         if (bitmap_bit_p (unusable_as_input, regno))
>             {
>               all_ok = false;
>               if (dump_file)
> @@ -4794,8 +4783,6 @@ find_moveable_pseudos (void)
>        bitmap_clear (bb_transp_live + bb->index);
>        bitmap_clear (bb_moveable_reg_sets + bb->index);
>      }
> -  bitmap_clear (&interesting);
> -  bitmap_clear (&unusable_as_input);
>    free (uid_luid);
>    free (closest_uses);
>    free (bb_local);
> @@ -4875,14 +4862,12 @@ split_live_ranges_for_shrink_wrap (void)
>    basic_block bb, call_dom = NULL;
>    basic_block first = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
>    rtx_insn *insn, *last_interesting_insn = NULL;
> -  bitmap_head need_new, reachable;
> +  auto_bitmap need_new, reachable;
>    vec<basic_block> queue;
>
>    if (!SHRINK_WRAPPING_ENABLED)
>      return false;
>
> -  bitmap_initialize (&need_new, 0);
> -  bitmap_initialize (&reachable, 0);
>    queue.create (n_basic_blocks_for_fn (cfun));
>
>    FOR_EACH_BB_FN (bb, cfun)
> @@ -4891,22 +4876,18 @@ split_live_ranges_for_shrink_wrap (void)
>         {
>           if (bb == first)
>             {
> -             bitmap_clear (&need_new);
> -             bitmap_clear (&reachable);
>               queue.release ();
>               return false;
>             }
>
> -         bitmap_set_bit (&need_new, bb->index);
> -         bitmap_set_bit (&reachable, bb->index);
> +         bitmap_set_bit (need_new, bb->index);
> +         bitmap_set_bit (reachable, bb->index);
>           queue.quick_push (bb);
>           break;
>         }
>
>    if (queue.is_empty ())
>      {
> -      bitmap_clear (&need_new);
> -      bitmap_clear (&reachable);
>        queue.release ();
>        return false;
>      }
> @@ -4919,7 +4900,7 @@ split_live_ranges_for_shrink_wrap (void)
>        bb = queue.pop ();
>        FOR_EACH_EDGE (e, ei, bb->succs)
>         if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
> -           && bitmap_set_bit (&reachable, e->dest->index))
> +           && bitmap_set_bit (reachable, e->dest->index))
>           queue.quick_push (e->dest);
>      }
>    queue.release ();
> @@ -4931,32 +4912,23 @@ split_live_ranges_for_shrink_wrap (void)
>         continue;
>
>        if (DF_REG_DEF_COUNT (REGNO (dest)) > 1)
> -       {
> -         bitmap_clear (&need_new);
> -         bitmap_clear (&reachable);
> -         return false;
> -       }
> +       return false;
>
>        for (df_ref use = DF_REG_USE_CHAIN (REGNO(dest));
>            use;
>            use = DF_REF_NEXT_REG (use))
>         {
>           int ubbi = DF_REF_BB (use)->index;
> -         if (bitmap_bit_p (&reachable, ubbi))
> -           bitmap_set_bit (&need_new, ubbi);
> +         if (bitmap_bit_p (reachable, ubbi))
> +           bitmap_set_bit (need_new, ubbi);
>         }
>        last_interesting_insn = insn;
>      }
>
> -  bitmap_clear (&reachable);
>    if (!last_interesting_insn)
> -    {
> -      bitmap_clear (&need_new);
> -      return false;
> -    }
> +    return false;
>
> -  call_dom = nearest_common_dominator_for_set (CDI_DOMINATORS, &need_new);
> -  bitmap_clear (&need_new);
> +  call_dom = nearest_common_dominator_for_set (CDI_DOMINATORS, need_new);
>    if (call_dom == first)
>      return false;
>
> diff --git a/gcc/print-rtl.c b/gcc/print-rtl.c
> index 30fd7597450..20bdafdb580 100644
> --- a/gcc/print-rtl.c
> +++ b/gcc/print-rtl.c
> @@ -90,7 +90,6 @@ rtx_writer::rtx_writer (FILE *outf, int ind, bool simple, bool compact,
>  rtx_reuse_manager::rtx_reuse_manager ()
>  : m_next_id (0)
>  {
> -  bitmap_initialize (&m_defs_seen, NULL);
>  }
>
>  /* Determine if X is of a kind suitable for dumping via reuse_rtx.  */
> @@ -158,7 +157,7 @@ rtx_reuse_manager::has_reuse_id (const_rtx x, int *out)
>  bool
>  rtx_reuse_manager::seen_def_p (int reuse_id)
>  {
> -  return bitmap_bit_p (&m_defs_seen, reuse_id);
> +  return bitmap_bit_p (m_defs_seen, reuse_id);
>  }
>
>  /* Record that the definition of the given reuse ID has been seen.  */
> @@ -166,7 +165,7 @@ rtx_reuse_manager::seen_def_p (int reuse_id)
>  void
>  rtx_reuse_manager::set_seen_def (int reuse_id)
>  {
> -  bitmap_set_bit (&m_defs_seen, reuse_id);
> +  bitmap_set_bit (m_defs_seen, reuse_id);
>  }
>
>  #endif /* #ifndef GENERATOR_FILE */
> diff --git a/gcc/print-rtl.h b/gcc/print-rtl.h
> index 81dfcba62cf..eee949a1792 100644
> --- a/gcc/print-rtl.h
> +++ b/gcc/print-rtl.h
> @@ -153,7 +153,7 @@ class rtx_reuse_manager
>   private:
>    hash_map<const_rtx, int> m_rtx_occurrence_count;
>    hash_map<const_rtx, int> m_rtx_reuse_ids;
> -  bitmap_head m_defs_seen;
> +  auto_bitmap m_defs_seen;
>    int m_next_id;
>  };
>
> --
> 2.11.0
>

^ permalink raw reply	[flat|nested] 34+ messages in thread

* Re: [PATCH 06/13] replace some manual stacks with auto_vec
  2017-05-09 20:53 ` [PATCH 06/13] replace some manual stacks with auto_vec tbsaunde+gcc
@ 2017-05-10  8:26   ` Richard Biener
  0 siblings, 0 replies; 34+ messages in thread
From: Richard Biener @ 2017-05-10  8:26 UTC (permalink / raw)
  To: tbsaunde+gcc; +Cc: GCC Patches

On Tue, May 9, 2017 at 10:52 PM,  <tbsaunde+gcc@tbsaunde.org> wrote:
> From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
>
> gcc/ChangeLog:

Ok.

Richard.

> 2017-05-09  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>
>
>         * cfganal.c (mark_dfs_back_edges): Replace manual stack with
>         auto_vec.
>         (post_order_compute): Likewise.
>         (inverted_post_order_compute): Likewise.
>         (pre_and_rev_post_order_compute_fn): Likewise.
> ---
>  gcc/cfganal.c | 92 +++++++++++++++++++++++------------------------------------
>  1 file changed, 36 insertions(+), 56 deletions(-)
>
> diff --git a/gcc/cfganal.c b/gcc/cfganal.c
> index 7377a7a0434..1b01564e8c7 100644
> --- a/gcc/cfganal.c
> +++ b/gcc/cfganal.c
> @@ -61,10 +61,8 @@ static void flow_dfs_compute_reverse_finish (depth_first_search_ds *);
>  bool
>  mark_dfs_back_edges (void)
>  {
> -  edge_iterator *stack;
>    int *pre;
>    int *post;
> -  int sp;
>    int prenum = 1;
>    int postnum = 1;
>    bool found = false;
> @@ -74,8 +72,7 @@ mark_dfs_back_edges (void)
>    post = XCNEWVEC (int, last_basic_block_for_fn (cfun));
>
>    /* Allocate stack for back-tracking up CFG.  */
> -  stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
> -  sp = 0;
> +  auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
>
>    /* Allocate bitmap to track nodes that have been visited.  */
>    auto_sbitmap visited (last_basic_block_for_fn (cfun));
> @@ -84,16 +81,15 @@ mark_dfs_back_edges (void)
>    bitmap_clear (visited);
>
>    /* Push the first edge on to the stack.  */
> -  stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
> +  stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs));
>
> -  while (sp)
> +  while (!stack.is_empty ())
>      {
> -      edge_iterator ei;
>        basic_block src;
>        basic_block dest;
>
>        /* Look at the edge on the top of the stack.  */
> -      ei = stack[sp - 1];
> +      edge_iterator ei = stack.last ();
>        src = ei_edge (ei)->src;
>        dest = ei_edge (ei)->dest;
>        ei_edge (ei)->flags &= ~EDGE_DFS_BACK;
> @@ -110,7 +106,7 @@ mark_dfs_back_edges (void)
>             {
>               /* Since the DEST node has been visited for the first
>                  time, check its successors.  */
> -             stack[sp++] = ei_start (dest->succs);
> +             stack.quick_push (ei_start (dest->succs));
>             }
>           else
>             post[dest->index] = postnum++;
> @@ -128,15 +124,14 @@ mark_dfs_back_edges (void)
>             post[src->index] = postnum++;
>
>           if (!ei_one_before_end_p (ei))
> -           ei_next (&stack[sp - 1]);
> +           ei_next (&stack.last ());
>           else
> -           sp--;
> +           stack.pop ();
>         }
>      }
>
>    free (pre);
>    free (post);
> -  free (stack);
>
>    return found;
>  }
> @@ -637,8 +632,6 @@ int
>  post_order_compute (int *post_order, bool include_entry_exit,
>                     bool delete_unreachable)
>  {
> -  edge_iterator *stack;
> -  int sp;
>    int post_order_num = 0;
>    int count;
>
> @@ -646,8 +639,7 @@ post_order_compute (int *post_order, bool include_entry_exit,
>      post_order[post_order_num++] = EXIT_BLOCK;
>
>    /* Allocate stack for back-tracking up CFG.  */
> -  stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
> -  sp = 0;
> +  auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
>
>    /* Allocate bitmap to track nodes that have been visited.  */
>    auto_sbitmap visited (last_basic_block_for_fn (cfun));
> @@ -656,16 +648,15 @@ post_order_compute (int *post_order, bool include_entry_exit,
>    bitmap_clear (visited);
>
>    /* Push the first edge on to the stack.  */
> -  stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
> +  stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs));
>
> -  while (sp)
> +  while (!stack.is_empty ())
>      {
> -      edge_iterator ei;
>        basic_block src;
>        basic_block dest;
>
>        /* Look at the edge on the top of the stack.  */
> -      ei = stack[sp - 1];
> +      edge_iterator ei = stack.last ();
>        src = ei_edge (ei)->src;
>        dest = ei_edge (ei)->dest;
>
> @@ -679,7 +670,7 @@ post_order_compute (int *post_order, bool include_entry_exit,
>           if (EDGE_COUNT (dest->succs) > 0)
>             /* Since the DEST node has been visited for the first
>                time, check its successors.  */
> -           stack[sp++] = ei_start (dest->succs);
> +           stack.quick_push (ei_start (dest->succs));
>           else
>             post_order[post_order_num++] = dest->index;
>         }
> @@ -690,9 +681,9 @@ post_order_compute (int *post_order, bool include_entry_exit,
>             post_order[post_order_num++] = src->index;
>
>           if (!ei_one_before_end_p (ei))
> -           ei_next (&stack[sp - 1]);
> +           ei_next (&stack.last ());
>           else
> -           sp--;
> +           stack.pop ();
>         }
>      }
>
> @@ -722,7 +713,6 @@ post_order_compute (int *post_order, bool include_entry_exit,
>        tidy_fallthru_edges ();
>      }
>
> -  free (stack);
>    return post_order_num;
>  }
>
> @@ -813,16 +803,13 @@ inverted_post_order_compute (int *post_order,
>                              sbitmap *start_points)
>  {
>    basic_block bb;
> -  edge_iterator *stack;
> -  int sp;
>    int post_order_num = 0;
>
>    if (flag_checking)
>      verify_no_unreachable_blocks ();
>
>    /* Allocate stack for back-tracking up CFG.  */
> -  stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
> -  sp = 0;
> +  auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
>
>    /* Allocate bitmap to track nodes that have been visited.  */
>    auto_sbitmap visited (last_basic_block_for_fn (cfun));
> @@ -836,12 +823,12 @@ inverted_post_order_compute (int *post_order,
>          if (bitmap_bit_p (*start_points, bb->index)
>             && EDGE_COUNT (bb->preds) > 0)
>           {
> -            stack[sp++] = ei_start (bb->preds);
> +           stack.quick_push (ei_start (bb->preds));
>              bitmap_set_bit (visited, bb->index);
>           }
>        if (EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
>         {
> -          stack[sp++] = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
> +         stack.quick_push (ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds));
>            bitmap_set_bit (visited, EXIT_BLOCK_PTR_FOR_FN (cfun)->index);
>         }
>      }
> @@ -853,7 +840,7 @@ inverted_post_order_compute (int *post_order,
>          /* Push the initial edge on to the stack.  */
>          if (EDGE_COUNT (bb->preds) > 0)
>            {
> -            stack[sp++] = ei_start (bb->preds);
> +           stack.quick_push (ei_start (bb->preds));
>              bitmap_set_bit (visited, bb->index);
>            }
>        }
> @@ -863,13 +850,13 @@ inverted_post_order_compute (int *post_order,
>        bool has_unvisited_bb = false;
>
>        /* The inverted traversal loop. */
> -      while (sp)
> +      while (!stack.is_empty ())
>          {
>            edge_iterator ei;
>            basic_block pred;
>
>            /* Look at the edge on the top of the stack.  */
> -          ei = stack[sp - 1];
> +         ei = stack.last ();
>            bb = ei_edge (ei)->dest;
>            pred = ei_edge (ei)->src;
>
> @@ -882,7 +869,7 @@ inverted_post_order_compute (int *post_order,
>                if (EDGE_COUNT (pred->preds) > 0)
>                  /* Since the predecessor node has been visited for the first
>                     time, check its predecessors.  */
> -                stack[sp++] = ei_start (pred->preds);
> +               stack.quick_push (ei_start (pred->preds));
>                else
>                  post_order[post_order_num++] = pred->index;
>              }
> @@ -893,15 +880,15 @@ inverted_post_order_compute (int *post_order,
>                  post_order[post_order_num++] = bb->index;
>
>                if (!ei_one_before_end_p (ei))
> -                ei_next (&stack[sp - 1]);
> +               ei_next (&stack.last ());
>                else
> -                sp--;
> +               stack.pop ();
>              }
>          }
>
>        /* Detect any infinite loop and activate the kludge.
>           Note that this doesn't check EXIT_BLOCK itself
> -         since EXIT_BLOCK is always added after the outer do-while loop.  */
> +        since EXIT_BLOCK is always added after the outer do-while loop.  */
>        FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
>                       EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
>          if (!bitmap_bit_p (visited, bb->index))
> @@ -926,31 +913,30 @@ inverted_post_order_compute (int *post_order,
>                      basic_block be = dfs_find_deadend (bb);
>                      gcc_assert (be != NULL);
>                      bitmap_set_bit (visited, be->index);
> -                    stack[sp++] = ei_start (be->preds);
> +                   stack.quick_push (ei_start (be->preds));
>                      break;
>                    }
>                }
>            }
>
> -      if (has_unvisited_bb && sp == 0)
> +      if (has_unvisited_bb && stack.is_empty ())
>          {
> -          /* No blocks are reachable from EXIT at all.
> +         /* No blocks are reachable from EXIT at all.
>               Find a dead-end from the ENTRY, and restart the iteration. */
>           basic_block be = dfs_find_deadend (ENTRY_BLOCK_PTR_FOR_FN (cfun));
>            gcc_assert (be != NULL);
>            bitmap_set_bit (visited, be->index);
> -          stack[sp++] = ei_start (be->preds);
> +         stack.quick_push (ei_start (be->preds));
>          }
>
>        /* The only case the below while fires is
>           when there's an infinite loop.  */
>      }
> -  while (sp);
> +  while (!stack.is_empty ());
>
>    /* EXIT_BLOCK is always included.  */
>    post_order[post_order_num++] = EXIT_BLOCK;
>
> -  free (stack);
>    return post_order_num;
>  }
>
> @@ -971,14 +957,11 @@ pre_and_rev_post_order_compute_fn (struct function *fn,
>                                    int *pre_order, int *rev_post_order,
>                                    bool include_entry_exit)
>  {
> -  edge_iterator *stack;
> -  int sp;
>    int pre_order_num = 0;
>    int rev_post_order_num = n_basic_blocks_for_fn (cfun) - 1;
>
>    /* Allocate stack for back-tracking up CFG.  */
> -  stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
> -  sp = 0;
> +  auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
>
>    if (include_entry_exit)
>      {
> @@ -998,16 +981,15 @@ pre_and_rev_post_order_compute_fn (struct function *fn,
>    bitmap_clear (visited);
>
>    /* Push the first edge on to the stack.  */
> -  stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (fn)->succs);
> +  stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (fn)->succs));
>
> -  while (sp)
> +  while (!stack.is_empty ())
>      {
> -      edge_iterator ei;
>        basic_block src;
>        basic_block dest;
>
>        /* Look at the edge on the top of the stack.  */
> -      ei = stack[sp - 1];
> +      edge_iterator ei = stack.last ();
>        src = ei_edge (ei)->src;
>        dest = ei_edge (ei)->dest;
>
> @@ -1026,7 +1008,7 @@ pre_and_rev_post_order_compute_fn (struct function *fn,
>           if (EDGE_COUNT (dest->succs) > 0)
>             /* Since the DEST node has been visited for the first
>                time, check its successors.  */
> -           stack[sp++] = ei_start (dest->succs);
> +           stack.quick_push (ei_start (dest->succs));
>           else if (rev_post_order)
>             /* There are no successors for the DEST node so assign
>                its reverse completion number.  */
> @@ -1042,14 +1024,12 @@ pre_and_rev_post_order_compute_fn (struct function *fn,
>             rev_post_order[rev_post_order_num--] = src->index;
>
>           if (!ei_one_before_end_p (ei))
> -           ei_next (&stack[sp - 1]);
> +           ei_next (&stack.last ());
>           else
> -           sp--;
> +           stack.pop ();
>         }
>      }
>
> -  free (stack);
> -
>    if (include_entry_exit)
>      {
>        if (pre_order)
> --
> 2.11.0
>

^ permalink raw reply	[flat|nested] 34+ messages in thread

* Re: [PATCH 04/13] allow auto_bitmap to use other bitmap obstacks
  2017-05-09 20:53 ` [PATCH 04/13] allow auto_bitmap to use other bitmap obstacks tbsaunde+gcc
@ 2017-05-10  8:27   ` Richard Biener
  0 siblings, 0 replies; 34+ messages in thread
From: Richard Biener @ 2017-05-10  8:27 UTC (permalink / raw)
  To: tbsaunde+gcc; +Cc: GCC Patches

On Tue, May 9, 2017 at 10:52 PM,  <tbsaunde+gcc@tbsaunde.org> wrote:
> From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
>
> gcc/ChangeLog:

Ok.

> 2017-05-07  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>
>
>         * bitmap.h (class auto_bitmap): New constructor taking
> bitmap_obstack * argument.
> ---
>  gcc/bitmap.h | 1 +
>  1 file changed, 1 insertion(+)
>
> diff --git a/gcc/bitmap.h b/gcc/bitmap.h
> index 49aec001cb0..2ddeee6bc10 100644
> --- a/gcc/bitmap.h
> +++ b/gcc/bitmap.h
> @@ -824,6 +824,7 @@ class auto_bitmap
>  {
>   public:
>    auto_bitmap () { bitmap_initialize (&m_bits, &bitmap_default_obstack); }
> +  explicit auto_bitmap (bitmap_obstack *o) { bitmap_initialize (&m_bits, o); }
>    ~auto_bitmap () { bitmap_clear (&m_bits); }
>    // Allow calling bitmap functions on our bitmap.
>    operator bitmap () { return &m_bits; }
> --
> 2.11.0
>

^ permalink raw reply	[flat|nested] 34+ messages in thread

* Re: [PATCH 11/13] make more vars auto_sbitmaps
  2017-05-09 20:55 ` [PATCH 11/13] make more vars auto_sbitmaps tbsaunde+gcc
@ 2017-05-10  8:27   ` Richard Biener
  0 siblings, 0 replies; 34+ messages in thread
From: Richard Biener @ 2017-05-10  8:27 UTC (permalink / raw)
  To: tbsaunde+gcc; +Cc: GCC Patches

On Tue, May 9, 2017 at 10:52 PM,  <tbsaunde+gcc@tbsaunde.org> wrote:
> From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
>
> gcc/ChangeLog:

Ok.

> 2017-05-09  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>
>
>         * ddg.c (find_nodes_on_paths): Use auto_sbitmap.
>         (longest_simple_path): Likewise.
>         * shrink-wrap.c (spread_components): Likewise.
>         (disqualify_problematic_components): Likewise.
>         (emit_common_heads_for_components): Likewise.
>         (emit_common_tails_for_components): Likewise.
>         (insert_prologue_epilogue_for_components): Likewise.
> ---
>  gcc/ddg.c         | 26 ++++++++------------------
>  gcc/shrink-wrap.c | 38 +++++++++++---------------------------
>  2 files changed, 19 insertions(+), 45 deletions(-)
>
> diff --git a/gcc/ddg.c b/gcc/ddg.c
> index 9ea98d6f40f..8aaed80dec4 100644
> --- a/gcc/ddg.c
> +++ b/gcc/ddg.c
> @@ -1081,16 +1081,15 @@ free_ddg_all_sccs (ddg_all_sccs_ptr all_sccs)
>  int
>  find_nodes_on_paths (sbitmap result, ddg_ptr g, sbitmap from, sbitmap to)
>  {
> -  int answer;
>    int change;
>    unsigned int u = 0;
>    int num_nodes = g->num_nodes;
>    sbitmap_iterator sbi;
>
> -  sbitmap workset = sbitmap_alloc (num_nodes);
> -  sbitmap reachable_from = sbitmap_alloc (num_nodes);
> -  sbitmap reach_to = sbitmap_alloc (num_nodes);
> -  sbitmap tmp = sbitmap_alloc (num_nodes);
> +  auto_sbitmap workset (num_nodes);
> +  auto_sbitmap reachable_from (num_nodes);
> +  auto_sbitmap reach_to (num_nodes);
> +  auto_sbitmap tmp (num_nodes);
>
>    bitmap_copy (reachable_from, from);
>    bitmap_copy (tmp, from);
> @@ -1150,12 +1149,7 @@ find_nodes_on_paths (sbitmap result, ddg_ptr g, sbitmap from, sbitmap to)
>         }
>      }
>
> -  answer = bitmap_and (result, reachable_from, reach_to);
> -  sbitmap_free (workset);
> -  sbitmap_free (reachable_from);
> -  sbitmap_free (reach_to);
> -  sbitmap_free (tmp);
> -  return answer;
> +  return bitmap_and (result, reachable_from, reach_to);
>  }
>
>
> @@ -1195,10 +1189,9 @@ longest_simple_path (struct ddg * g, int src, int dest, sbitmap nodes)
>    int i;
>    unsigned int u = 0;
>    int change = 1;
> -  int result;
>    int num_nodes = g->num_nodes;
> -  sbitmap workset = sbitmap_alloc (num_nodes);
> -  sbitmap tmp = sbitmap_alloc (num_nodes);
> +  auto_sbitmap workset (num_nodes);
> +  auto_sbitmap tmp (num_nodes);
>
>
>    /* Data will hold the distance of the longest path found so far from
> @@ -1224,10 +1217,7 @@ longest_simple_path (struct ddg * g, int src, int dest, sbitmap nodes)
>           change |= update_dist_to_successors (u_node, nodes, tmp);
>         }
>      }
> -  result = g->nodes[dest].aux.count;
> -  sbitmap_free (workset);
> -  sbitmap_free (tmp);
> -  return result;
> +  return g->nodes[dest].aux.count;
>  }
>
>  #endif /* INSN_SCHEDULING */
> diff --git a/gcc/shrink-wrap.c b/gcc/shrink-wrap.c
> index 492376d949b..1ac4ea3b054 100644
> --- a/gcc/shrink-wrap.c
> +++ b/gcc/shrink-wrap.c
> @@ -1264,7 +1264,7 @@ spread_components (sbitmap components)
>    todo.create (n_basic_blocks_for_fn (cfun));
>    auto_bitmap seen;
>
> -  sbitmap old = sbitmap_alloc (SBITMAP_SIZE (components));
> +  auto_sbitmap old (SBITMAP_SIZE (components));
>
>    /* Find for every block the components that are *not* needed on some path
>       from the entry to that block.  Do this with a flood fill from the entry
> @@ -1390,8 +1390,6 @@ spread_components (sbitmap components)
>           fprintf (dump_file, "\n");
>         }
>      }
> -
> -  sbitmap_free (old);
>  }
>
>  /* If we cannot handle placing some component's prologues or epilogues where
> @@ -1400,8 +1398,8 @@ spread_components (sbitmap components)
>  static void
>  disqualify_problematic_components (sbitmap components)
>  {
> -  sbitmap pro = sbitmap_alloc (SBITMAP_SIZE (components));
> -  sbitmap epi = sbitmap_alloc (SBITMAP_SIZE (components));
> +  auto_sbitmap pro (SBITMAP_SIZE (components));
> +  auto_sbitmap epi (SBITMAP_SIZE (components));
>
>    basic_block bb;
>    FOR_EACH_BB_FN (bb, cfun)
> @@ -1466,9 +1464,6 @@ disqualify_problematic_components (sbitmap components)
>             }
>         }
>      }
> -
> -  sbitmap_free (pro);
> -  sbitmap_free (epi);
>  }
>
>  /* Place code for prologues and epilogues for COMPONENTS where we can put
> @@ -1476,9 +1471,9 @@ disqualify_problematic_components (sbitmap components)
>  static void
>  emit_common_heads_for_components (sbitmap components)
>  {
> -  sbitmap pro = sbitmap_alloc (SBITMAP_SIZE (components));
> -  sbitmap epi = sbitmap_alloc (SBITMAP_SIZE (components));
> -  sbitmap tmp = sbitmap_alloc (SBITMAP_SIZE (components));
> +  auto_sbitmap pro (SBITMAP_SIZE (components));
> +  auto_sbitmap epi (SBITMAP_SIZE (components));
> +  auto_sbitmap tmp (SBITMAP_SIZE (components));
>
>    basic_block bb;
>    FOR_ALL_BB_FN (bb, cfun)
> @@ -1554,10 +1549,6 @@ emit_common_heads_for_components (sbitmap components)
>           bitmap_ior (SW (bb)->head_components, SW (bb)->head_components, epi);
>         }
>      }
> -
> -  sbitmap_free (pro);
> -  sbitmap_free (epi);
> -  sbitmap_free (tmp);
>  }
>
>  /* Place code for prologues and epilogues for COMPONENTS where we can put
> @@ -1565,9 +1556,9 @@ emit_common_heads_for_components (sbitmap components)
>  static void
>  emit_common_tails_for_components (sbitmap components)
>  {
> -  sbitmap pro = sbitmap_alloc (SBITMAP_SIZE (components));
> -  sbitmap epi = sbitmap_alloc (SBITMAP_SIZE (components));
> -  sbitmap tmp = sbitmap_alloc (SBITMAP_SIZE (components));
> +  auto_sbitmap pro (SBITMAP_SIZE (components));
> +  auto_sbitmap epi (SBITMAP_SIZE (components));
> +  auto_sbitmap tmp (SBITMAP_SIZE (components));
>
>    basic_block bb;
>    FOR_ALL_BB_FN (bb, cfun)
> @@ -1664,10 +1655,6 @@ emit_common_tails_for_components (sbitmap components)
>           bitmap_ior (SW (bb)->tail_components, SW (bb)->tail_components, pro);
>         }
>      }
> -
> -  sbitmap_free (pro);
> -  sbitmap_free (epi);
> -  sbitmap_free (tmp);
>  }
>
>  /* Place prologues and epilogues for COMPONENTS on edges, if we haven't already
> @@ -1675,8 +1662,8 @@ emit_common_tails_for_components (sbitmap components)
>  static void
>  insert_prologue_epilogue_for_components (sbitmap components)
>  {
> -  sbitmap pro = sbitmap_alloc (SBITMAP_SIZE (components));
> -  sbitmap epi = sbitmap_alloc (SBITMAP_SIZE (components));
> +  auto_sbitmap pro (SBITMAP_SIZE (components));
> +  auto_sbitmap epi (SBITMAP_SIZE (components));
>
>    basic_block bb;
>    FOR_EACH_BB_FN (bb, cfun)
> @@ -1754,9 +1741,6 @@ insert_prologue_epilogue_for_components (sbitmap components)
>         }
>      }
>
> -  sbitmap_free (pro);
> -  sbitmap_free (epi);
> -
>    commit_edge_insertions ();
>  }
>
> --
> 2.11.0
>

^ permalink raw reply	[flat|nested] 34+ messages in thread

* Re: [PATCH 07/13] use auto_bitmap more
  2017-05-09 20:53 ` [PATCH 07/13] use auto_bitmap more tbsaunde+gcc
@ 2017-05-10  8:28   ` Richard Biener
  0 siblings, 0 replies; 34+ messages in thread
From: Richard Biener @ 2017-05-10  8:28 UTC (permalink / raw)
  To: tbsaunde+gcc; +Cc: GCC Patches

On Tue, May 9, 2017 at 10:52 PM,  <tbsaunde+gcc@tbsaunde.org> wrote:
> From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
>
> gcc/ChangeLog:

Ok.

Richard.

> 2017-05-09  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>
>
>         * bt-load.c (combine_btr_defs): Use auto_bitmap to manage bitmap
>         lifetime.
>         (migrate_btr_def): Likewise.
>         * cfgloop.c (get_loop_body_in_bfs_order): Likewise.
>         * df-core.c (loop_post_order_compute): Likewise.
>         (loop_inverted_post_order_compute): Likewise.
>         * hsa-common.h: Likewise.
>         * hsa-gen.c (hsa_bb::~hsa_bb): Likewise.
>         * init-regs.c (initialize_uninitialized_regs): Likewise.
>         * ipa-inline.c (resolve_noninline_speculation): Likewise.
>         (inline_small_functions): Likewise.
>         * ipa-reference.c (ipa_reference_write_optimization_summary):
> Likewise.
>         * ira.c (combine_and_move_insns): Likewise.
>         (build_insn_chain): Likewise.
>         * loop-invariant.c (find_invariants): Likewise.
>         * lower-subreg.c (propagate_pseudo_copies): Likewise.
>         * predict.c (tree_predict_by_opcode): Likewise.
>         (predict_paths_leading_to): Likewise.
>         (predict_paths_leading_to_edge): Likewise.
>         (estimate_loops_at_level): Likewise.
>         (estimate_loops): Likewise.
>         * shrink-wrap.c (try_shrink_wrapping): Likewise.
>         (spread_components): Likewise.
>         * tree-cfg.c (remove_edge_and_dominated_blocks): Likewise.
>         * tree-loop-distribution.c (rdg_build_partitions): Likewise.
>         * tree-predcom.c (tree_predictive_commoning_loop): Likewise.
>         * tree-ssa-coalesce.c (coalesce_ssa_name): Likewise.
>         * tree-ssa-phionlycprop.c (pass_phi_only_cprop::execute):
> Likewise.
>         * tree-ssa-pre.c (remove_dead_inserted_code): Likewise.
>         * tree-ssa-sink.c (nearest_common_dominator_of_uses): Likewise.
>         * tree-ssa-threadupdate.c (compute_path_counts): Likewise.
>         (mark_threaded_blocks): Likewise.
>         (thread_through_all_blocks): Likewise.
>         * tree-ssa.c (verify_ssa): Likewise.
>         (execute_update_addresses_taken): Likewise.
>         * tree-ssanames.c (verify_ssaname_freelists): Likewise.
> ---
>  gcc/bt-load.c                |  8 +++-----
>  gcc/cfgloop.c                |  4 +---
>  gcc/df-core.c                |  8 ++------
>  gcc/hsa-common.h             |  4 ++--
>  gcc/hsa-gen.c                | 14 ++------------
>  gcc/init-regs.c              |  4 +---
>  gcc/ipa-inline.c             |  6 ++----
>  gcc/ipa-reference.c          |  3 +--
>  gcc/ira.c                    | 13 ++++---------
>  gcc/loop-invariant.c         | 12 ++++--------
>  gcc/lower-subreg.c           |  8 +-------
>  gcc/predict.c                | 19 +++++--------------
>  gcc/shrink-wrap.c            | 10 +++-------
>  gcc/tree-cfg.c               |  7 +------
>  gcc/tree-loop-distribution.c |  4 +---
>  gcc/tree-predcom.c           |  4 +---
>  gcc/tree-ssa-coalesce.c      |  4 +---
>  gcc/tree-ssa-phionlycprop.c  | 15 ++++-----------
>  gcc/tree-ssa-pre.c           |  4 +---
>  gcc/tree-ssa-sink.c          |  9 +++------
>  gcc/tree-ssa-threadupdate.c  | 13 +++----------
>  gcc/tree-ssa.c               | 12 ++++--------
>  gcc/tree-ssanames.c          | 10 +++-------
>  23 files changed, 53 insertions(+), 142 deletions(-)
>
> diff --git a/gcc/bt-load.c b/gcc/bt-load.c
> index 27be6a382c4..32924e2ecc5 100644
> --- a/gcc/bt-load.c
> +++ b/gcc/bt-load.c
> @@ -1058,7 +1058,7 @@ combine_btr_defs (btr_def *def, HARD_REG_SET *btrs_live_in_range)
>              target registers live over the merged range.  */
>           int btr;
>           HARD_REG_SET combined_btrs_live;
> -         bitmap combined_live_range = BITMAP_ALLOC (NULL);
> +         auto_bitmap combined_live_range;
>           btr_user *user;
>
>           if (other_def->live_range == NULL)
> @@ -1116,7 +1116,6 @@ combine_btr_defs (btr_def *def, HARD_REG_SET *btrs_live_in_range)
>               delete_insn (other_def->insn);
>
>             }
> -         BITMAP_FREE (combined_live_range);
>         }
>      }
>  }
> @@ -1255,7 +1254,6 @@ can_move_up (const_basic_block bb, const rtx_insn *insn, int n_insns)
>  static int
>  migrate_btr_def (btr_def *def, int min_cost)
>  {
> -  bitmap live_range;
>    HARD_REG_SET btrs_live_in_range;
>    int btr_used_near_def = 0;
>    int def_basic_block_freq;
> @@ -1289,7 +1287,7 @@ migrate_btr_def (btr_def *def, int min_cost)
>      }
>
>    btr_def_live_range (def, &btrs_live_in_range);
> -  live_range = BITMAP_ALLOC (NULL);
> +  auto_bitmap live_range;
>    bitmap_copy (live_range, def->live_range);
>
>  #ifdef INSN_SCHEDULING
> @@ -1373,7 +1371,7 @@ migrate_btr_def (btr_def *def, int min_cost)
>        if (dump_file)
>         fprintf (dump_file, "failed to move\n");
>      }
> -  BITMAP_FREE (live_range);
> +
>    return !give_up;
>  }
>
> diff --git a/gcc/cfgloop.c b/gcc/cfgloop.c
> index afd56bb8cf7..654d188e8b5 100644
> --- a/gcc/cfgloop.c
> +++ b/gcc/cfgloop.c
> @@ -923,7 +923,6 @@ get_loop_body_in_bfs_order (const struct loop *loop)
>  {
>    basic_block *blocks;
>    basic_block bb;
> -  bitmap visited;
>    unsigned int i = 1;
>    unsigned int vc = 0;
>
> @@ -931,7 +930,7 @@ get_loop_body_in_bfs_order (const struct loop *loop)
>    gcc_assert (loop->latch != EXIT_BLOCK_PTR_FOR_FN (cfun));
>
>    blocks = XNEWVEC (basic_block, loop->num_nodes);
> -  visited = BITMAP_ALLOC (NULL);
> +  auto_bitmap visited;
>    blocks[0] = loop->header;
>    bitmap_set_bit (visited, loop->header->index);
>    while (i < loop->num_nodes)
> @@ -952,7 +951,6 @@ get_loop_body_in_bfs_order (const struct loop *loop)
>         }
>      }
>
> -  BITMAP_FREE (visited);
>    return blocks;
>  }
>
> diff --git a/gcc/df-core.c b/gcc/df-core.c
> index 19f4d3dae8c..98787a768c6 100644
> --- a/gcc/df-core.c
> +++ b/gcc/df-core.c
> @@ -1303,14 +1303,13 @@ loop_post_order_compute (int *post_order, struct loop *loop)
>    edge_iterator *stack;
>    int sp;
>    int post_order_num = 0;
> -  bitmap visited;
>
>    /* Allocate stack for back-tracking up CFG.  */
>    stack = XNEWVEC (edge_iterator, loop->num_nodes + 1);
>    sp = 0;
>
>    /* Allocate bitmap to track nodes that have been visited.  */
> -  visited = BITMAP_ALLOC (NULL);
> +  auto_bitmap visited;
>
>    /* Push the first edge on to the stack.  */
>    stack[sp++] = ei_start (loop_preheader_edge (loop)->src->succs);
> @@ -1352,7 +1351,6 @@ loop_post_order_compute (int *post_order, struct loop *loop)
>      }
>
>    free (stack);
> -  BITMAP_FREE (visited);
>
>    return post_order_num;
>  }
> @@ -1367,14 +1365,13 @@ loop_inverted_post_order_compute (int *post_order, struct loop *loop)
>    edge_iterator *stack;
>    int sp;
>    int post_order_num = 0;
> -  bitmap visited;
>
>    /* Allocate stack for back-tracking up CFG.  */
>    stack = XNEWVEC (edge_iterator, loop->num_nodes + 1);
>    sp = 0;
>
>    /* Allocate bitmap to track nodes that have been visited.  */
> -  visited = BITMAP_ALLOC (NULL);
> +  auto_bitmap visited;
>
>    /* Put all latches into the initial work list.  In theory we'd want
>       to start from loop exits but then we'd have the special case of
> @@ -1420,7 +1417,6 @@ loop_inverted_post_order_compute (int *post_order, struct loop *loop)
>      }
>
>    free (stack);
> -  BITMAP_FREE (visited);
>    return post_order_num;
>  }
>
> diff --git a/gcc/hsa-common.h b/gcc/hsa-common.h
> index a24bf6e5ad1..810624e4e1c 100644
> --- a/gcc/hsa-common.h
> +++ b/gcc/hsa-common.h
> @@ -27,6 +27,7 @@ along with GCC; see the file COPYING3.  If not see
>  #include "vec.h"
>  #include "hash-table.h"
>  #include "basic-block.h"
> +#include "bitmap.h"
>
>
>  /* Return true if the compiler should produce HSAIL.  */
> @@ -1027,7 +1028,6 @@ class hsa_bb
>  public:
>    hsa_bb (basic_block cfg_bb);
>    hsa_bb (basic_block cfg_bb, int idx);
> -  ~hsa_bb ();
>
>    /* Append an instruction INSN into the basic block.  */
>    void append_insn (hsa_insn_basic *insn);
> @@ -1049,7 +1049,7 @@ public:
>    /* Just a number to construct names from.  */
>    int m_index;
>
> -  bitmap m_liveout, m_livein;
> +  auto_bitmap m_liveout, m_livein;
>  private:
>    /* Make the default constructor inaccessible.  */
>    hsa_bb ();
> diff --git a/gcc/hsa-gen.c b/gcc/hsa-gen.c
> index 4b85e0b2678..c5d8a6e1f44 100644
> --- a/gcc/hsa-gen.c
> +++ b/gcc/hsa-gen.c
> @@ -5716,8 +5716,7 @@ gen_hsa_phi_from_gimple_phi (gimple *phi_stmt, hsa_bb *hbb)
>
>  hsa_bb::hsa_bb (basic_block cfg_bb, int idx)
>    : m_bb (cfg_bb), m_first_insn (NULL), m_last_insn (NULL), m_first_phi (NULL),
> -    m_last_phi (NULL), m_index (idx), m_liveout (BITMAP_ALLOC (NULL)),
> -    m_livein (BITMAP_ALLOC (NULL))
> +    m_last_phi (NULL), m_index (idx)
>  {
>    gcc_assert (!cfg_bb->aux);
>    cfg_bb->aux = this;
> @@ -5728,21 +5727,12 @@ hsa_bb::hsa_bb (basic_block cfg_bb, int idx)
>
>  hsa_bb::hsa_bb (basic_block cfg_bb)
>    : m_bb (cfg_bb), m_first_insn (NULL), m_last_insn (NULL), m_first_phi (NULL),
> -    m_last_phi (NULL), m_index (hsa_cfun->m_hbb_count++),
> -    m_liveout (BITMAP_ALLOC (NULL)), m_livein (BITMAP_ALLOC (NULL))
> +    m_last_phi (NULL), m_index (hsa_cfun->m_hbb_count++)
>  {
>    gcc_assert (!cfg_bb->aux);
>    cfg_bb->aux = this;
>  }
>
> -/* Destructor of class representing HSA BB.  */
> -
> -hsa_bb::~hsa_bb ()
> -{
> -  BITMAP_FREE (m_livein);
> -  BITMAP_FREE (m_liveout);
> -}
> -
>  /* Create and initialize and return a new hsa_bb structure for a given CFG
>     basic block BB.  */
>
> diff --git a/gcc/init-regs.c b/gcc/init-regs.c
> index 2c69991c9e1..15d77467149 100644
> --- a/gcc/init-regs.c
> +++ b/gcc/init-regs.c
> @@ -48,7 +48,7 @@ static void
>  initialize_uninitialized_regs (void)
>  {
>    basic_block bb;
> -  bitmap already_genned = BITMAP_ALLOC (NULL);
> +  auto_bitmap already_genned;
>
>    if (optimize == 1)
>      {
> @@ -125,8 +125,6 @@ initialize_uninitialized_regs (void)
>         df_dump (dump_file);
>        df_remove_problem (df_live);
>      }
> -
> -  BITMAP_FREE (already_genned);
>  }
>
>  namespace {
> diff --git a/gcc/ipa-inline.c b/gcc/ipa-inline.c
> index ce79af5719e..7337e0a2fe5 100644
> --- a/gcc/ipa-inline.c
> +++ b/gcc/ipa-inline.c
> @@ -1674,7 +1674,7 @@ resolve_noninline_speculation (edge_heap_t *edge_heap, struct cgraph_edge *edge)
>        struct cgraph_node *node = edge->caller;
>        struct cgraph_node *where = node->global.inlined_to
>                                   ? node->global.inlined_to : node;
> -      bitmap updated_nodes = BITMAP_ALLOC (NULL);
> +      auto_bitmap updated_nodes;
>
>        spec_rem += edge->count;
>        edge->resolve_speculation ();
> @@ -1684,7 +1684,6 @@ resolve_noninline_speculation (edge_heap_t *edge_heap, struct cgraph_edge *edge)
>                           updated_nodes, NULL);
>        update_callee_keys (edge_heap, where,
>                           updated_nodes);
> -      BITMAP_FREE (updated_nodes);
>      }
>  }
>
> @@ -1726,7 +1725,7 @@ inline_small_functions (void)
>    struct cgraph_node *node;
>    struct cgraph_edge *edge;
>    edge_heap_t edge_heap (sreal::min ());
> -  bitmap updated_nodes = BITMAP_ALLOC (NULL);
> +  auto_bitmap updated_nodes;
>    int min_size, max_size;
>    auto_vec<cgraph_edge *> new_indirect_edges;
>    int initial_size = 0;
> @@ -2072,7 +2071,6 @@ inline_small_functions (void)
>              "Unit growth for small function inlining: %i->%i (%i%%)\n",
>              initial_size, overall_size,
>              initial_size ? overall_size * 100 / (initial_size) - 100: 0);
> -  BITMAP_FREE (updated_nodes);
>    symtab->remove_edge_removal_hook (edge_removal_hook_holder);
>  }
>
> diff --git a/gcc/ipa-reference.c b/gcc/ipa-reference.c
> index f47d0cc51e1..222253920fd 100644
> --- a/gcc/ipa-reference.c
> +++ b/gcc/ipa-reference.c
> @@ -992,7 +992,7 @@ ipa_reference_write_optimization_summary (void)
>    unsigned int count = 0;
>    int ltrans_statics_bitcount = 0;
>    lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
> -  bitmap ltrans_statics = BITMAP_ALLOC (NULL);
> +  auto_bitmap ltrans_statics;
>    int i;
>
>    reference_vars_to_consider = splay_tree_new (splay_tree_compare_ints, 0, 0);
> @@ -1052,7 +1052,6 @@ ipa_reference_write_optimization_summary (void)
>                                ltrans_statics_bitcount);
>           }
>        }
> -  BITMAP_FREE (ltrans_statics);
>    lto_destroy_simple_output_block (ob);
>    splay_tree_delete (reference_vars_to_consider);
>  }
> diff --git a/gcc/ira.c b/gcc/ira.c
> index bfb05080de1..c9751ce81ba 100644
> --- a/gcc/ira.c
> +++ b/gcc/ira.c
> @@ -3698,7 +3698,7 @@ add_store_equivs (void)
>  static void
>  combine_and_move_insns (void)
>  {
> -  bitmap cleared_regs = BITMAP_ALLOC (NULL);
> +  auto_bitmap cleared_regs;
>    int max = max_reg_num ();
>
>    for (int regno = FIRST_PSEUDO_REGISTER; regno < max; regno++)
> @@ -3856,8 +3856,6 @@ combine_and_move_insns (void)
>                 df_insn_rescan (insn);
>             }
>      }
> -
> -  BITMAP_FREE (cleared_regs);
>  }
>
>  /* A pass over indirect jumps, converting simple cases to direct jumps.
> @@ -4083,8 +4081,8 @@ build_insn_chain (void)
>    basic_block bb;
>    struct insn_chain *c = NULL;
>    struct insn_chain *next = NULL;
> -  bitmap live_relevant_regs = BITMAP_ALLOC (NULL);
> -  bitmap elim_regset = BITMAP_ALLOC (NULL);
> +  auto_bitmap live_relevant_regs;
> +  auto_bitmap elim_regset;
>    /* live_subregs is a vector used to keep accurate information about
>       which hardregs are live in multiword pseudos.  live_subregs and
>       live_subregs_used are indexed by pseudo number.  The live_subreg
> @@ -4093,7 +4091,7 @@ build_insn_chain (void)
>       live_subreg[allocno] is number of bytes that the pseudo can
>       occupy.  */
>    sbitmap *live_subregs = XCNEWVEC (sbitmap, max_regno);
> -  bitmap live_subregs_used = BITMAP_ALLOC (NULL);
> +  auto_bitmap live_subregs_used;
>
>    for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
>      if (TEST_HARD_REG_BIT (eliminable_regset, i))
> @@ -4336,9 +4334,6 @@ build_insn_chain (void)
>      if (live_subregs[i] != NULL)
>        sbitmap_free (live_subregs[i]);
>    free (live_subregs);
> -  BITMAP_FREE (live_subregs_used);
> -  BITMAP_FREE (live_relevant_regs);
> -  BITMAP_FREE (elim_regset);
>
>    if (dump_file)
>      print_insn_chains (dump_file);
> diff --git a/gcc/loop-invariant.c b/gcc/loop-invariant.c
> index 8292cea3e52..cda42efd13e 100644
> --- a/gcc/loop-invariant.c
> +++ b/gcc/loop-invariant.c
> @@ -1219,10 +1219,10 @@ find_invariants_body (struct loop *loop, basic_block *body,
>  static void
>  find_invariants (struct loop *loop)
>  {
> -  bitmap may_exit = BITMAP_ALLOC (NULL);
> -  bitmap always_reached = BITMAP_ALLOC (NULL);
> -  bitmap has_exit = BITMAP_ALLOC (NULL);
> -  bitmap always_executed = BITMAP_ALLOC (NULL);
> +  auto_bitmap may_exit;
> +  auto_bitmap always_reached;
> +  auto_bitmap has_exit;
> +  auto_bitmap always_executed;
>    basic_block *body = get_loop_body_in_dom_order (loop);
>
>    find_exits (loop, body, may_exit, has_exit);
> @@ -1233,10 +1233,6 @@ find_invariants (struct loop *loop)
>    find_invariants_body (loop, body, always_reached, always_executed);
>    merge_identical_invariants ();
>
> -  BITMAP_FREE (always_reached);
> -  BITMAP_FREE (always_executed);
> -  BITMAP_FREE (may_exit);
> -  BITMAP_FREE (has_exit);
>    free (body);
>  }
>
> diff --git a/gcc/lower-subreg.c b/gcc/lower-subreg.c
> index a4dcec51bb5..1ab1c71211f 100644
> --- a/gcc/lower-subreg.c
> +++ b/gcc/lower-subreg.c
> @@ -405,10 +405,7 @@ find_pseudo_copy (rtx set)
>  static void
>  propagate_pseudo_copies (void)
>  {
> -  bitmap queue, propagate;
> -
> -  queue = BITMAP_ALLOC (NULL);
> -  propagate = BITMAP_ALLOC (NULL);
> +  auto_bitmap queue, propagate;
>
>    bitmap_copy (queue, decomposable_context);
>    do
> @@ -429,9 +426,6 @@ propagate_pseudo_copies (void)
>        bitmap_ior_into (decomposable_context, propagate);
>      }
>    while (!bitmap_empty_p (queue));
> -
> -  BITMAP_FREE (queue);
> -  BITMAP_FREE (propagate);
>  }
>
>  /* A pointer to one of these values is passed to
> diff --git a/gcc/predict.c b/gcc/predict.c
> index fa4e626fab8..0fc9fc5c466 100644
> --- a/gcc/predict.c
> +++ b/gcc/predict.c
> @@ -2396,7 +2396,6 @@ tree_predict_by_opcode (basic_block bb)
>    tree type;
>    tree val;
>    enum tree_code cmp;
> -  bitmap visited;
>    edge_iterator ei;
>    enum br_predictor predictor;
>
> @@ -2409,10 +2408,8 @@ tree_predict_by_opcode (basic_block bb)
>    op1 = gimple_cond_rhs (stmt);
>    cmp = gimple_cond_code (stmt);
>    type = TREE_TYPE (op0);
> -  visited = BITMAP_ALLOC (NULL);
> -  val = expr_expected_value_1 (boolean_type_node, op0, cmp, op1, visited,
> +  val = expr_expected_value_1 (boolean_type_node, op0, cmp, op1, auto_bitmap (),
>                                &predictor);
> -  BITMAP_FREE (visited);
>    if (val && TREE_CODE (val) == INTEGER_CST)
>      {
>        if (predictor == PRED_BUILTIN_EXPECT)
> @@ -2917,9 +2914,7 @@ static void
>  predict_paths_leading_to (basic_block bb, enum br_predictor pred,
>                           enum prediction taken, struct loop *in_loop)
>  {
> -  bitmap visited = BITMAP_ALLOC (NULL);
> -  predict_paths_for_bb (bb, bb, pred, taken, visited, in_loop);
> -  BITMAP_FREE (visited);
> +  predict_paths_for_bb (bb, bb, pred, taken, auto_bitmap (), in_loop);
>  }
>
>  /* Like predict_paths_leading_to but take edge instead of basic block.  */
> @@ -2943,9 +2938,7 @@ predict_paths_leading_to_edge (edge e, enum br_predictor pred,
>        }
>    if (!has_nonloop_edge)
>      {
> -      bitmap visited = BITMAP_ALLOC (NULL);
> -      predict_paths_for_bb (bb, bb, pred, taken, visited, in_loop);
> -      BITMAP_FREE (visited);
> +      predict_paths_for_bb (bb, bb, pred, taken, auto_bitmap (), in_loop);
>      }
>    else
>      predict_edge_def (e, pred, taken);
> @@ -3119,7 +3112,7 @@ estimate_loops_at_level (struct loop *first_loop)
>        edge e;
>        basic_block *bbs;
>        unsigned i;
> -      bitmap tovisit = BITMAP_ALLOC (NULL);
> +      auto_bitmap tovisit;
>
>        estimate_loops_at_level (loop->inner);
>
> @@ -3132,7 +3125,6 @@ estimate_loops_at_level (struct loop *first_loop)
>         bitmap_set_bit (tovisit, bbs[i]->index);
>        free (bbs);
>        propagate_freq (loop->header, tovisit);
> -      BITMAP_FREE (tovisit);
>      }
>  }
>
> @@ -3141,7 +3133,7 @@ estimate_loops_at_level (struct loop *first_loop)
>  static void
>  estimate_loops (void)
>  {
> -  bitmap tovisit = BITMAP_ALLOC (NULL);
> +  auto_bitmap tovisit;
>    basic_block bb;
>
>    /* Start by estimating the frequencies in the loops.  */
> @@ -3154,7 +3146,6 @@ estimate_loops (void)
>        bitmap_set_bit (tovisit, bb->index);
>      }
>    propagate_freq (ENTRY_BLOCK_PTR_FOR_FN (cfun), tovisit);
> -  BITMAP_FREE (tovisit);
>  }
>
>  /* Drop the profile for NODE to guessed, and update its frequency based on
> diff --git a/gcc/shrink-wrap.c b/gcc/shrink-wrap.c
> index 35eb85ba63c..492376d949b 100644
> --- a/gcc/shrink-wrap.c
> +++ b/gcc/shrink-wrap.c
> @@ -758,7 +758,7 @@ try_shrink_wrapping (edge *entry_edge, rtx_insn *prologue_seq)
>       reachable from PRO that we already found, and in VEC a stack of
>       those we still need to consider (to find successors).  */
>
> -  bitmap bb_with = BITMAP_ALLOC (NULL);
> +  auto_bitmap bb_with;
>    bitmap_set_bit (bb_with, pro->index);
>
>    vec<basic_block> vec;
> @@ -822,7 +822,7 @@ try_shrink_wrapping (edge *entry_edge, rtx_insn *prologue_seq)
>      {
>        calculate_dominance_info (CDI_POST_DOMINATORS);
>
> -      bitmap bb_tmp = BITMAP_ALLOC (NULL);
> +      auto_bitmap bb_tmp;
>        bitmap_copy (bb_tmp, bb_with);
>        basic_block last_ok = pro;
>        vec.truncate (0);
> @@ -859,7 +859,6 @@ try_shrink_wrapping (edge *entry_edge, rtx_insn *prologue_seq)
>
>        pro = last_ok;
>
> -      BITMAP_FREE (bb_tmp);
>        free_dominance_info (CDI_POST_DOMINATORS);
>      }
>
> @@ -871,7 +870,6 @@ try_shrink_wrapping (edge *entry_edge, rtx_insn *prologue_seq)
>
>    if (pro == entry)
>      {
> -      BITMAP_FREE (bb_with);
>        free_dominance_info (CDI_DOMINATORS);
>        return;
>      }
> @@ -1006,7 +1004,6 @@ try_shrink_wrapping (edge *entry_edge, rtx_insn *prologue_seq)
>    *entry_edge = make_single_succ_edge (new_bb, pro, EDGE_FALLTHRU);
>    force_nonfallthru (*entry_edge);
>
> -  BITMAP_FREE (bb_with);
>    free_dominance_info (CDI_DOMINATORS);
>  }
>
> @@ -1265,7 +1262,7 @@ spread_components (sbitmap components)
>       on that stack.  */
>    vec<basic_block> todo;
>    todo.create (n_basic_blocks_for_fn (cfun));
> -  bitmap seen = BITMAP_ALLOC (NULL);
> +  auto_bitmap seen;
>
>    sbitmap old = sbitmap_alloc (SBITMAP_SIZE (components));
>
> @@ -1395,7 +1392,6 @@ spread_components (sbitmap components)
>      }
>
>    sbitmap_free (old);
> -  BITMAP_FREE (seen);
>  }
>
>  /* If we cannot handle placing some component's prologues or epilogues where
> diff --git a/gcc/tree-cfg.c b/gcc/tree-cfg.c
> index a540416cbb5..8fdfdd6f1bc 100644
> --- a/gcc/tree-cfg.c
> +++ b/gcc/tree-cfg.c
> @@ -8178,7 +8178,6 @@ remove_edge_and_dominated_blocks (edge e)
>  {
>    vec<basic_block> bbs_to_remove = vNULL;
>    vec<basic_block> bbs_to_fix_dom = vNULL;
> -  bitmap df, df_idom;
>    edge f;
>    edge_iterator ei;
>    bool none_removed = false;
> @@ -8227,9 +8226,7 @@ remove_edge_and_dominated_blocks (edge e)
>         }
>      }
>
> -  df = BITMAP_ALLOC (NULL);
> -  df_idom = BITMAP_ALLOC (NULL);
> -
> +  auto_bitmap df, df_idom;
>    if (none_removed)
>      bitmap_set_bit (df_idom,
>                     get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
> @@ -8296,8 +8293,6 @@ remove_edge_and_dominated_blocks (edge e)
>
>    iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
>
> -  BITMAP_FREE (df);
> -  BITMAP_FREE (df_idom);
>    bbs_to_remove.release ();
>    bbs_to_fix_dom.release ();
>  }
> diff --git a/gcc/tree-loop-distribution.c b/gcc/tree-loop-distribution.c
> index 1b9950eca8e..a60454b5218 100644
> --- a/gcc/tree-loop-distribution.c
> +++ b/gcc/tree-loop-distribution.c
> @@ -1251,7 +1251,7 @@ rdg_build_partitions (struct graph *rdg,
>                       vec<gimple *> starting_stmts,
>                       vec<partition *> *partitions)
>  {
> -  bitmap processed = BITMAP_ALLOC (NULL);
> +  auto_bitmap processed;
>    int i;
>    gimple *stmt;
>
> @@ -1282,8 +1282,6 @@ rdg_build_partitions (struct graph *rdg,
>
>    /* All vertices should have been assigned to at least one partition now,
>       other than vertices belonging to dead code.  */
> -
> -  BITMAP_FREE (processed);
>  }
>
>  /* Dump to FILE the PARTITIONS.  */
> diff --git a/gcc/tree-predcom.c b/gcc/tree-predcom.c
> index 57d8f7d207c..23e7870dd2d 100644
> --- a/gcc/tree-predcom.c
> +++ b/gcc/tree-predcom.c
> @@ -2498,7 +2498,6 @@ tree_predictive_commoning_loop (struct loop *loop)
>    struct tree_niter_desc desc;
>    bool unroll = false;
>    edge exit;
> -  bitmap tmp_vars;
>
>    if (dump_file && (dump_flags & TDF_DETAILS))
>      fprintf (dump_file, "Processing loop %d\n",  loop->num);
> @@ -2549,7 +2548,7 @@ tree_predictive_commoning_loop (struct loop *loop)
>    /* Find the suitable components and split them into chains.  */
>    components = filter_suitable_components (loop, components);
>
> -  tmp_vars = BITMAP_ALLOC (NULL);
> +  auto_bitmap tmp_vars;
>    looparound_phis = BITMAP_ALLOC (NULL);
>    determine_roots (loop, components, &chains);
>    release_components (components);
> @@ -2617,7 +2616,6 @@ tree_predictive_commoning_loop (struct loop *loop)
>  end: ;
>    release_chains (chains);
>    free_data_refs (datarefs);
> -  BITMAP_FREE (tmp_vars);
>    BITMAP_FREE (looparound_phis);
>
>    free_affine_expand_cache (&name_expansions);
> diff --git a/gcc/tree-ssa-coalesce.c b/gcc/tree-ssa-coalesce.c
> index 1b78d66456e..e166314ed4d 100644
> --- a/gcc/tree-ssa-coalesce.c
> +++ b/gcc/tree-ssa-coalesce.c
> @@ -1791,7 +1791,7 @@ coalesce_ssa_name (void)
>    tree_live_info_p liveinfo;
>    ssa_conflicts *graph;
>    coalesce_list *cl;
> -  bitmap used_in_copies = BITMAP_ALLOC (NULL);
> +  auto_bitmap used_in_copies;
>    var_map map;
>    unsigned int i;
>    tree a;
> @@ -1848,8 +1848,6 @@ coalesce_ssa_name (void)
>    else
>      compute_samebase_partition_bases (map);
>
> -  BITMAP_FREE (used_in_copies);
> -
>    if (num_var_partitions (map) < 1)
>      {
>        delete_coalesce_list (cl);
> diff --git a/gcc/tree-ssa-phionlycprop.c b/gcc/tree-ssa-phionlycprop.c
> index f61b269cd63..aa0f50256cd 100644
> --- a/gcc/tree-ssa-phionlycprop.c
> +++ b/gcc/tree-ssa-phionlycprop.c
> @@ -497,13 +497,11 @@ public:
>  unsigned int
>  pass_phi_only_cprop::execute (function *fun)
>  {
> -  bitmap interesting_names;
> -  bitmap interesting_names1;
>    bool cfg_altered = false;
>
>    /* Bitmap of blocks which need EH information updated.  We can not
>       update it on-the-fly as doing so invalidates the dominator tree.  */
> -  bitmap need_eh_cleanup = BITMAP_ALLOC (NULL);
> +  auto_bitmap need_eh_cleanup;
>
>    /* INTERESTING_NAMES is effectively our worklist, indexed by
>       SSA_NAME_VERSION.
> @@ -515,8 +513,8 @@ pass_phi_only_cprop::execute (function *fun)
>
>       Experiments have show we generally get better compilation
>       time behavior with bitmaps rather than sbitmaps.  */
> -  interesting_names = BITMAP_ALLOC (NULL);
> -  interesting_names1 = BITMAP_ALLOC (NULL);
> +  auto_bitmap interesting_names;
> +  auto_bitmap interesting_names1;
>
>    calculate_dominance_info (CDI_DOMINATORS);
>    cfg_altered = false;
> @@ -570,13 +568,8 @@ pass_phi_only_cprop::execute (function *fun)
>    /* Propagation of const and copies may make some EH edges dead.  Purge
>       such edges from the CFG as needed.  */
>    if (!bitmap_empty_p (need_eh_cleanup))
> -    {
> -      gimple_purge_all_dead_eh_edges (need_eh_cleanup);
> -      BITMAP_FREE (need_eh_cleanup);
> -    }
> +    gimple_purge_all_dead_eh_edges (need_eh_cleanup);
>
> -  BITMAP_FREE (interesting_names);
> -  BITMAP_FREE (interesting_names1);
>    return 0;
>  }
>
> diff --git a/gcc/tree-ssa-pre.c b/gcc/tree-ssa-pre.c
> index b4095bfdeed..8175d2599ed 100644
> --- a/gcc/tree-ssa-pre.c
> +++ b/gcc/tree-ssa-pre.c
> @@ -4903,12 +4903,11 @@ mark_operand_necessary (tree op)
>  static void
>  remove_dead_inserted_code (void)
>  {
> -  bitmap worklist;
>    unsigned i;
>    bitmap_iterator bi;
>    gimple *t;
>
> -  worklist = BITMAP_ALLOC (NULL);
> +  auto_bitmap worklist;
>    EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
>      {
>        t = SSA_NAME_DEF_STMT (ssa_name (i));
> @@ -4984,7 +4983,6 @@ remove_dead_inserted_code (void)
>             }
>         }
>      }
> -  BITMAP_FREE (worklist);
>  }
>
>
> diff --git a/gcc/tree-ssa-sink.c b/gcc/tree-ssa-sink.c
> index eb8b36095d8..acf832d66f6 100644
> --- a/gcc/tree-ssa-sink.c
> +++ b/gcc/tree-ssa-sink.c
> @@ -128,7 +128,7 @@ static basic_block
>  nearest_common_dominator_of_uses (def_operand_p def_p, bool *debug_stmts)
>  {
>    tree var = DEF_FROM_PTR (def_p);
> -  bitmap blocks = BITMAP_ALLOC (NULL);
> +  auto_bitmap blocks;
>    basic_block commondom;
>    unsigned int j;
>    bitmap_iterator bi;
> @@ -158,17 +158,14 @@ nearest_common_dominator_of_uses (def_operand_p def_p, bool *debug_stmts)
>
>        /* Short circuit. Nothing dominates the entry block.  */
>        if (useblock == ENTRY_BLOCK_PTR_FOR_FN (cfun))
> -       {
> -         BITMAP_FREE (blocks);
> -         return NULL;
> -       }
> +       return NULL;
> +
>        bitmap_set_bit (blocks, useblock->index);
>      }
>    commondom = BASIC_BLOCK_FOR_FN (cfun, bitmap_first_set_bit (blocks));
>    EXECUTE_IF_SET_IN_BITMAP (blocks, 0, j, bi)
>      commondom = nearest_common_dominator (CDI_DOMINATORS, commondom,
>                                           BASIC_BLOCK_FOR_FN (cfun, j));
> -  BITMAP_FREE (blocks);
>    return commondom;
>  }
>
> diff --git a/gcc/tree-ssa-threadupdate.c b/gcc/tree-ssa-threadupdate.c
> index e30ddd821ed..319826861dc 100644
> --- a/gcc/tree-ssa-threadupdate.c
> +++ b/gcc/tree-ssa-threadupdate.c
> @@ -723,7 +723,7 @@ compute_path_counts (struct redirection_data *rd,
>       below to add up the counts of the other edges not included in this jump
>       threading path.  */
>    struct el *next, *el;
> -  bitmap in_edge_srcs = BITMAP_ALLOC (NULL);
> +  auto_bitmap in_edge_srcs;
>    for (el = rd->incoming_edges; el; el = next)
>      {
>        next = el->next;
> @@ -759,8 +759,6 @@ compute_path_counts (struct redirection_data *rd,
>    if (path_in_freq > BB_FREQ_MAX)
>      path_in_freq = BB_FREQ_MAX;
>
> -  BITMAP_FREE (in_edge_srcs);
> -
>    /* Now compute the fraction of the total count coming into the first
>       path bb that is from the current threading path.  */
>    gcov_type total_count = e->dest->count;
> @@ -1958,7 +1956,7 @@ mark_threaded_blocks (bitmap threaded_blocks)
>  {
>    unsigned int i;
>    bitmap_iterator bi;
> -  bitmap tmp = BITMAP_ALLOC (NULL);
> +  auto_bitmap tmp;
>    basic_block bb;
>    edge e;
>    edge_iterator ei;
> @@ -2169,8 +2167,6 @@ mark_threaded_blocks (bitmap threaded_blocks)
>             }
>         }
>      }
> -
> -  BITMAP_FREE (tmp);
>  }
>
>
> @@ -2436,8 +2432,8 @@ thread_through_all_blocks (bool may_peel_loop_headers)
>    bool retval = false;
>    unsigned int i;
>    bitmap_iterator bi;
> -  bitmap threaded_blocks;
>    struct loop *loop;
> +  auto_bitmap threaded_blocks;
>
>    if (!paths.exists ())
>      {
> @@ -2445,7 +2441,6 @@ thread_through_all_blocks (bool may_peel_loop_headers)
>        goto out;
>      }
>
> -  threaded_blocks = BITMAP_ALLOC (NULL);
>    memset (&thread_stats, 0, sizeof (thread_stats));
>
>    /* Remove any paths that referenced removed edges.  */
> @@ -2578,8 +2573,6 @@ thread_through_all_blocks (bool may_peel_loop_headers)
>
>    free_original_copy_tables ();
>
> -  BITMAP_FREE (threaded_blocks);
> -  threaded_blocks = NULL;
>    paths.release ();
>
>    if (retval)
> diff --git a/gcc/tree-ssa.c b/gcc/tree-ssa.c
> index 42e708ed673..11f5a2bac92 100644
> --- a/gcc/tree-ssa.c
> +++ b/gcc/tree-ssa.c
> @@ -1022,7 +1022,7 @@ verify_ssa (bool check_modified_stmt, bool check_ssa_operands)
>    ssa_op_iter iter;
>    tree op;
>    enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
> -  bitmap names_defined_in_bb = BITMAP_ALLOC (NULL);
> +  auto_bitmap names_defined_in_bb;
>
>    gcc_assert (!need_ssa_update_p (cfun));
>
> @@ -1176,7 +1176,6 @@ verify_ssa (bool check_modified_stmt, bool check_ssa_operands)
>    else
>      set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
>
> -  BITMAP_FREE (names_defined_in_bb);
>    timevar_pop (TV_TREE_SSA_VERIFY);
>    return;
>
> @@ -1622,9 +1621,9 @@ void
>  execute_update_addresses_taken (void)
>  {
>    basic_block bb;
> -  bitmap addresses_taken = BITMAP_ALLOC (NULL);
> -  bitmap not_reg_needs = BITMAP_ALLOC (NULL);
> -  bitmap suitable_for_renaming = BITMAP_ALLOC (NULL);
> +  auto_bitmap addresses_taken;
> +  auto_bitmap not_reg_needs;
> +  auto_bitmap suitable_for_renaming;
>    tree var;
>    unsigned i;
>
> @@ -2005,9 +2004,6 @@ execute_update_addresses_taken (void)
>         update_ssa (TODO_update_ssa);
>      }
>
> -  BITMAP_FREE (not_reg_needs);
> -  BITMAP_FREE (addresses_taken);
> -  BITMAP_FREE (suitable_for_renaming);
>    timevar_pop (TV_ADDRESS_TAKEN);
>  }
>
> diff --git a/gcc/tree-ssanames.c b/gcc/tree-ssanames.c
> index 6487542eb8f..353c7b1906a 100644
> --- a/gcc/tree-ssanames.c
> +++ b/gcc/tree-ssanames.c
> @@ -127,7 +127,7 @@ verify_ssaname_freelists (struct function *fun)
>    if (!gimple_in_ssa_p (fun))
>      return;
>
> -  bitmap names_in_il = BITMAP_ALLOC (NULL);
> +  auto_bitmap names_in_il;
>
>    /* Walk the entire IL noting every SSA_NAME we see.  */
>    basic_block bb;
> @@ -165,7 +165,7 @@ verify_ssaname_freelists (struct function *fun)
>
>    /* Now walk the free list noting what we find there and verifying
>       there are no duplicates.  */
> -  bitmap names_in_freelists = BITMAP_ALLOC (NULL);
> +  auto_bitmap names_in_freelists;
>    if (FREE_SSANAMES (fun))
>      {
>        for (unsigned int i = 0; i < FREE_SSANAMES (fun)->length (); i++)
> @@ -221,7 +221,7 @@ verify_ssaname_freelists (struct function *fun)
>
>    unsigned int i;
>    bitmap_iterator bi;
> -  bitmap all_names = BITMAP_ALLOC (NULL);
> +  auto_bitmap all_names;
>    bitmap_set_range (all_names, UNUSED_NAME_VERSION + 1, num_ssa_names - 1);
>    bitmap_ior_into (names_in_il, names_in_freelists);
>
> @@ -230,10 +230,6 @@ verify_ssaname_freelists (struct function *fun)
>    EXECUTE_IF_AND_COMPL_IN_BITMAP(all_names, names_in_il,
>                                  UNUSED_NAME_VERSION + 1, i, bi)
>      gcc_assert (!ssa_name (i));
> -
> -  BITMAP_FREE (all_names);
> -  BITMAP_FREE (names_in_freelists);
> -  BITMAP_FREE (names_in_il);
>  }
>
>  /* Move all SSA_NAMEs from FREE_SSA_NAMES_QUEUE to FREE_SSA_NAMES.
> --
> 2.11.0
>

^ permalink raw reply	[flat|nested] 34+ messages in thread

* Re: [PATCH 12/13] make depth_first_search_ds a class
  2017-05-09 20:53 ` [PATCH 12/13] make depth_first_search_ds a class tbsaunde+gcc
@ 2017-05-10  8:29   ` Richard Biener
  0 siblings, 0 replies; 34+ messages in thread
From: Richard Biener @ 2017-05-10  8:29 UTC (permalink / raw)
  To: tbsaunde+gcc; +Cc: GCC Patches

On Tue, May 9, 2017 at 10:52 PM,  <tbsaunde+gcc@tbsaunde.org> wrote:
> From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
>
> gcc/ChangeLog:

Ok.

> 2017-05-09  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>
>
>         * cfganal.c (connect_infinite_loops_to_exit): Adjust.
>         (depth_first_search::depth_first_search): Change structure init
> function to this constructor.
>         (depth_first_search::add_bb): Rename function to this member.
>         (depth_first_search::execute): Likewise.
>         (flow_dfs_compute_reverse_finish): Adjust.
> ---
>  gcc/cfganal.c | 96 +++++++++++++++++++++--------------------------------------
>  1 file changed, 34 insertions(+), 62 deletions(-)
>
> diff --git a/gcc/cfganal.c b/gcc/cfganal.c
> index 1b01564e8c7..27b453ca3f7 100644
> --- a/gcc/cfganal.c
> +++ b/gcc/cfganal.c
> @@ -28,25 +28,24 @@ along with GCC; see the file COPYING3.  If not see
>  #include "cfganal.h"
>  #include "cfgloop.h"
>
> +namespace {
>  /* Store the data structures necessary for depth-first search.  */
> -struct depth_first_search_ds {
> -  /* stack for backtracking during the algorithm */
> -  basic_block *stack;
> +class depth_first_search
> +  {
> +public:
> +    depth_first_search ();
> +
> +    basic_block execute (basic_block);
> +    void add_bb (basic_block);
>
> -  /* number of edges in the stack.  That is, positions 0, ..., sp-1
> -     have edges.  */
> -  unsigned int sp;
> +private:
> +  /* stack for backtracking during the algorithm */
> +  auto_vec<basic_block, 20> m_stack;
>
>    /* record of basic blocks already seen by depth-first search */
> -  sbitmap visited_blocks;
> +  auto_sbitmap m_visited_blocks;
>  };
> -
> -static void flow_dfs_compute_reverse_init (depth_first_search_ds *);
> -static void flow_dfs_compute_reverse_add_bb (depth_first_search_ds *,
> -                                            basic_block);
> -static basic_block flow_dfs_compute_reverse_execute (depth_first_search_ds *,
> -                                                    basic_block);
> -static void flow_dfs_compute_reverse_finish (depth_first_search_ds *);
> +}
>
>  /* Mark the back edges in DFS traversal.
>     Return nonzero if a loop (natural or otherwise) is present.
> @@ -597,30 +596,23 @@ add_noreturn_fake_exit_edges (void)
>  void
>  connect_infinite_loops_to_exit (void)
>  {
> -  basic_block unvisited_block = EXIT_BLOCK_PTR_FOR_FN (cfun);
> -  basic_block deadend_block;
> -  depth_first_search_ds dfs_ds;
> -
>    /* Perform depth-first search in the reverse graph to find nodes
>       reachable from the exit block.  */
> -  flow_dfs_compute_reverse_init (&dfs_ds);
> -  flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR_FOR_FN (cfun));
> +  depth_first_search dfs;
> +  dfs.add_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
>
>    /* Repeatedly add fake edges, updating the unreachable nodes.  */
> +  basic_block unvisited_block = EXIT_BLOCK_PTR_FOR_FN (cfun);
>    while (1)
>      {
> -      unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds,
> -                                                         unvisited_block);
> +      unvisited_block = dfs.execute (unvisited_block);
>        if (!unvisited_block)
>         break;
>
> -      deadend_block = dfs_find_deadend (unvisited_block);
> +      basic_block deadend_block = dfs_find_deadend (unvisited_block);
>        make_edge (deadend_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
> -      flow_dfs_compute_reverse_add_bb (&dfs_ds, deadend_block);
> +      dfs.add_bb (deadend_block);
>      }
> -
> -  flow_dfs_compute_reverse_finish (&dfs_ds);
> -  return;
>  }
>
>  /* Compute reverse top sort order.  This is computing a post order
> @@ -1094,31 +1086,22 @@ pre_and_rev_post_order_compute (int *pre_order, int *rev_post_order,
>     search context.  If INITIALIZE_STACK is nonzero, there is an
>     element on the stack.  */
>
> -static void
> -flow_dfs_compute_reverse_init (depth_first_search_ds *data)
> +depth_first_search::depth_first_search () :
> +  m_stack (n_basic_blocks_for_fn (cfun)),
> +  m_visited_blocks (last_basic_block_for_fn (cfun))
>  {
> -  /* Allocate stack for back-tracking up CFG.  */
> -  data->stack = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
> -  data->sp = 0;
> -
> -  /* Allocate bitmap to track nodes that have been visited.  */
> -  data->visited_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
> -
> -  /* None of the nodes in the CFG have been visited yet.  */
> -  bitmap_clear (data->visited_blocks);
> -
> -  return;
> +  bitmap_clear (m_visited_blocks);
>  }
>
>  /* Add the specified basic block to the top of the dfs data
>     structures.  When the search continues, it will start at the
>     block.  */
>
> -static void
> -flow_dfs_compute_reverse_add_bb (depth_first_search_ds *data, basic_block bb)
> +void
> +depth_first_search::add_bb (basic_block bb)
>  {
> -  data->stack[data->sp++] = bb;
> -  bitmap_set_bit (data->visited_blocks, bb->index);
> +  m_stack.quick_push (bb);
> +  bitmap_set_bit (m_visited_blocks, bb->index);
>  }
>
>  /* Continue the depth-first search through the reverse graph starting with the
> @@ -1126,42 +1109,31 @@ flow_dfs_compute_reverse_add_bb (depth_first_search_ds *data, basic_block bb)
>     are marked.  Returns an unvisited basic block, or NULL if there is none
>     available.  */
>
> -static basic_block
> -flow_dfs_compute_reverse_execute (depth_first_search_ds *data,
> -                                 basic_block last_unvisited)
> +basic_block
> +depth_first_search::execute (basic_block last_unvisited)
>  {
>    basic_block bb;
>    edge e;
>    edge_iterator ei;
>
> -  while (data->sp > 0)
> +  while (!m_stack.is_empty ())
>      {
> -      bb = data->stack[--data->sp];
> +      bb = m_stack.pop ();
>
>        /* Perform depth-first search on adjacent vertices.  */
>        FOR_EACH_EDGE (e, ei, bb->preds)
> -       if (!bitmap_bit_p (data->visited_blocks, e->src->index))
> -         flow_dfs_compute_reverse_add_bb (data, e->src);
> +       if (!bitmap_bit_p (m_visited_blocks, e->src->index))
> +         add_bb (e->src);
>      }
>
>    /* Determine if there are unvisited basic blocks.  */
>    FOR_BB_BETWEEN (bb, last_unvisited, NULL, prev_bb)
> -    if (!bitmap_bit_p (data->visited_blocks, bb->index))
> +    if (!bitmap_bit_p (m_visited_blocks, bb->index))
>        return bb;
>
>    return NULL;
>  }
>
> -/* Destroy the data structures needed for depth-first search on the
> -   reverse graph.  */
> -
> -static void
> -flow_dfs_compute_reverse_finish (depth_first_search_ds *data)
> -{
> -  free (data->stack);
> -  sbitmap_free (data->visited_blocks);
> -}
> -
>  /* Performs dfs search from BB over vertices satisfying PREDICATE;
>     if REVERSE, go against direction of edges.  Returns number of blocks
>     found and their list in RSLT.  RSLT can contain at most RSLT_MAX items.  */
> --
> 2.11.0
>

^ permalink raw reply	[flat|nested] 34+ messages in thread

* Re: [PATCH 09/13] use auto_bitmap more with alternate obstacks
  2017-05-09 20:53 ` [PATCH 09/13] use auto_bitmap more with alternate obstacks tbsaunde+gcc
@ 2017-05-10  8:31   ` Richard Biener
  0 siblings, 0 replies; 34+ messages in thread
From: Richard Biener @ 2017-05-10  8:31 UTC (permalink / raw)
  To: tbsaunde+gcc; +Cc: GCC Patches

On Tue, May 9, 2017 at 10:52 PM,  <tbsaunde+gcc@tbsaunde.org> wrote:
> From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
>
> gcc/ChangeLog:

Ok.

> 2017-05-09  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>
>
>         * df-core.c (df_set_blocks): Start using auto_bitmap.
>         (df_compact_blocks): Likewise.
>         * df-problems.c (df_rd_confluence_n): Likewise.
>         * df-scan.c (df_insn_rescan_all): Likewise.
>         (df_process_deferred_rescans): Likewise.
>         (df_update_entry_block_defs): Likewise.
>         (df_update_exit_block_uses): Likewise.
>         (df_entry_block_bitmap_verify): Likewise.
>         (df_exit_block_bitmap_verify): Likewise.
>         (df_scan_verify): Likewise.
>         * lra-constraints.c (lra_constraints): Likewise.
>         (undo_optional_reloads): Likewise.
>         (lra_undo_inheritance): Likewise.
>         * lra-remat.c (calculate_gen_cands): Likewise.
>         (do_remat): Likewise.
>         * lra-spills.c (assign_spill_hard_regs): Likewise.
>         (spill_pseudos): Likewise.
>         * tree-ssa-pre.c (bitmap_set_and): Likewise.
>         (bitmap_set_subtract_values): Likewise.
> ---
>  gcc/df-core.c         | 30 +++++++----------
>  gcc/df-problems.c     | 10 +++---
>  gcc/df-scan.c         | 93 ++++++++++++++++++++-------------------------------
>  gcc/lra-constraints.c | 42 ++++++++++-------------
>  gcc/lra-remat.c       | 43 ++++++++++--------------
>  gcc/lra-spills.c      | 25 ++++++--------
>  gcc/tree-ssa-pre.c    | 17 ++++------
>  7 files changed, 104 insertions(+), 156 deletions(-)
>
> diff --git a/gcc/df-core.c b/gcc/df-core.c
> index 98787a768c6..1b270d417aa 100644
> --- a/gcc/df-core.c
> +++ b/gcc/df-core.c
> @@ -497,9 +497,8 @@ df_set_blocks (bitmap blocks)
>           /* This block is called to change the focus from one subset
>              to another.  */
>           int p;
> -         bitmap_head diff;
> -         bitmap_initialize (&diff, &df_bitmap_obstack);
> -         bitmap_and_compl (&diff, df->blocks_to_analyze, blocks);
> +         auto_bitmap diff (&df_bitmap_obstack);
> +         bitmap_and_compl (diff, df->blocks_to_analyze, blocks);
>           for (p = 0; p < df->num_problems_defined; p++)
>             {
>               struct dataflow *dflow = df->problems_in_order[p];
> @@ -510,7 +509,7 @@ df_set_blocks (bitmap blocks)
>                   bitmap_iterator bi;
>                   unsigned int bb_index;
>
> -                 EXECUTE_IF_SET_IN_BITMAP (&diff, 0, bb_index, bi)
> +                 EXECUTE_IF_SET_IN_BITMAP (diff, 0, bb_index, bi)
>                     {
>                       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
>                       if (bb)
> @@ -522,8 +521,6 @@ df_set_blocks (bitmap blocks)
>                     }
>                 }
>             }
> -
> -          bitmap_clear (&diff);
>         }
>        else
>         {
> @@ -1652,9 +1649,8 @@ df_compact_blocks (void)
>    int i, p;
>    basic_block bb;
>    void *problem_temps;
> -  bitmap_head tmp;
>
> -  bitmap_initialize (&tmp, &df_bitmap_obstack);
> +  auto_bitmap tmp (&df_bitmap_obstack);
>    for (p = 0; p < df->num_problems_defined; p++)
>      {
>        struct dataflow *dflow = df->problems_in_order[p];
> @@ -1663,17 +1659,17 @@ df_compact_blocks (void)
>          dflow problem.  */
>        if (dflow->out_of_date_transfer_functions)
>         {
> -         bitmap_copy (&tmp, dflow->out_of_date_transfer_functions);
> +         bitmap_copy (tmp, dflow->out_of_date_transfer_functions);
>           bitmap_clear (dflow->out_of_date_transfer_functions);
> -         if (bitmap_bit_p (&tmp, ENTRY_BLOCK))
> +         if (bitmap_bit_p (tmp, ENTRY_BLOCK))
>             bitmap_set_bit (dflow->out_of_date_transfer_functions, ENTRY_BLOCK);
> -         if (bitmap_bit_p (&tmp, EXIT_BLOCK))
> +         if (bitmap_bit_p (tmp, EXIT_BLOCK))
>             bitmap_set_bit (dflow->out_of_date_transfer_functions, EXIT_BLOCK);
>
>           i = NUM_FIXED_BLOCKS;
>           FOR_EACH_BB_FN (bb, cfun)
>             {
> -             if (bitmap_bit_p (&tmp, bb->index))
> +             if (bitmap_bit_p (tmp, bb->index))
>                 bitmap_set_bit (dflow->out_of_date_transfer_functions, i);
>               i++;
>             }
> @@ -1711,23 +1707,21 @@ df_compact_blocks (void)
>
>    if (df->blocks_to_analyze)
>      {
> -      if (bitmap_bit_p (&tmp, ENTRY_BLOCK))
> +      if (bitmap_bit_p (tmp, ENTRY_BLOCK))
>         bitmap_set_bit (df->blocks_to_analyze, ENTRY_BLOCK);
> -      if (bitmap_bit_p (&tmp, EXIT_BLOCK))
> +      if (bitmap_bit_p (tmp, EXIT_BLOCK))
>         bitmap_set_bit (df->blocks_to_analyze, EXIT_BLOCK);
> -      bitmap_copy (&tmp, df->blocks_to_analyze);
> +      bitmap_copy (tmp, df->blocks_to_analyze);
>        bitmap_clear (df->blocks_to_analyze);
>        i = NUM_FIXED_BLOCKS;
>        FOR_EACH_BB_FN (bb, cfun)
>         {
> -         if (bitmap_bit_p (&tmp, bb->index))
> +         if (bitmap_bit_p (tmp, bb->index))
>             bitmap_set_bit (df->blocks_to_analyze, i);
>           i++;
>         }
>      }
>
> -  bitmap_clear (&tmp);
> -
>    i = NUM_FIXED_BLOCKS;
>    FOR_EACH_BB_FN (bb, cfun)
>      {
> diff --git a/gcc/df-problems.c b/gcc/df-problems.c
> index 92323a39d8a..755aecf46df 100644
> --- a/gcc/df-problems.c
> +++ b/gcc/df-problems.c
> @@ -461,19 +461,17 @@ df_rd_confluence_n (edge e)
>        bitmap dense_invalidated = &problem_data->dense_invalidated_by_call;
>        bitmap_iterator bi;
>        unsigned int regno;
> -      bitmap_head tmp;
>
> -      bitmap_initialize (&tmp, &df_bitmap_obstack);
> -      bitmap_and_compl (&tmp, op2, dense_invalidated);
> +      auto_bitmap tmp (&df_bitmap_obstack);
> +      bitmap_and_compl (tmp, op2, dense_invalidated);
>
>        EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
>         {
> -         bitmap_clear_range (&tmp,
> +         bitmap_clear_range (tmp,
>                               DF_DEFS_BEGIN (regno),
>                               DF_DEFS_COUNT (regno));
>         }
> -      changed |= bitmap_ior_into (op1, &tmp);
> -      bitmap_clear (&tmp);
> +      changed |= bitmap_ior_into (op1, tmp);
>        return changed;
>      }
>    else
> diff --git a/gcc/df-scan.c b/gcc/df-scan.c
> index f75098c2bec..4884608b167 100644
> --- a/gcc/df-scan.c
> +++ b/gcc/df-scan.c
> @@ -1161,9 +1161,6 @@ df_insn_rescan_all (void)
>    basic_block bb;
>    bitmap_iterator bi;
>    unsigned int uid;
> -  bitmap_head tmp;
> -
> -  bitmap_initialize (&tmp, &df_bitmap_obstack);
>
>    if (df->changeable_flags & DF_NO_INSN_RESCAN)
>      {
> @@ -1177,15 +1174,15 @@ df_insn_rescan_all (void)
>        defer_insn_rescan = true;
>      }
>
> -  bitmap_copy (&tmp, &df->insns_to_delete);
> -  EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
> +  auto_bitmap tmp (&df_bitmap_obstack);
> +  bitmap_copy (tmp, &df->insns_to_delete);
> +  EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
>      {
>        struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
>        if (insn_info)
>         df_insn_info_delete (uid);
>      }
>
> -  bitmap_clear (&tmp);
>    bitmap_clear (&df->insns_to_delete);
>    bitmap_clear (&df->insns_to_rescan);
>    bitmap_clear (&df->insns_to_notes_rescan);
> @@ -1215,9 +1212,6 @@ df_process_deferred_rescans (void)
>    bool defer_insn_rescan = false;
>    bitmap_iterator bi;
>    unsigned int uid;
> -  bitmap_head tmp;
> -
> -  bitmap_initialize (&tmp, &df_bitmap_obstack);
>
>    if (df->changeable_flags & DF_NO_INSN_RESCAN)
>      {
> @@ -1234,24 +1228,25 @@ df_process_deferred_rescans (void)
>    if (dump_file)
>      fprintf (dump_file, "starting the processing of deferred insns\n");
>
> -  bitmap_copy (&tmp, &df->insns_to_delete);
> -  EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
> +  auto_bitmap tmp (&df_bitmap_obstack);
> +  bitmap_copy (tmp, &df->insns_to_delete);
> +  EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
>      {
>        struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
>        if (insn_info)
>         df_insn_info_delete (uid);
>      }
>
> -  bitmap_copy (&tmp, &df->insns_to_rescan);
> -  EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
> +  bitmap_copy (tmp, &df->insns_to_rescan);
> +  EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
>      {
>        struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
>        if (insn_info)
>         df_insn_rescan (insn_info->insn);
>      }
>
> -  bitmap_copy (&tmp, &df->insns_to_notes_rescan);
> -  EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
> +  bitmap_copy (tmp, &df->insns_to_notes_rescan);
> +  EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
>      {
>        struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
>        if (insn_info)
> @@ -1261,7 +1256,6 @@ df_process_deferred_rescans (void)
>    if (dump_file)
>      fprintf (dump_file, "ending the processing of deferred insns\n");
>
> -  bitmap_clear (&tmp);
>    bitmap_clear (&df->insns_to_delete);
>    bitmap_clear (&df->insns_to_rescan);
>    bitmap_clear (&df->insns_to_notes_rescan);
> @@ -3628,14 +3622,13 @@ df_record_entry_block_defs (bitmap entry_block_defs)
>  void
>  df_update_entry_block_defs (void)
>  {
> -  bitmap_head refs;
>    bool changed = false;
>
> -  bitmap_initialize (&refs, &df_bitmap_obstack);
> -  df_get_entry_block_def_set (&refs);
> +  auto_bitmap refs (&df_bitmap_obstack);
> +  df_get_entry_block_def_set (refs);
>    if (df->entry_block_defs)
>      {
> -      if (!bitmap_equal_p (df->entry_block_defs, &refs))
> +      if (!bitmap_equal_p (df->entry_block_defs, refs))
>         {
>           struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK);
>           df_ref_chain_delete_du_chain (bb_info->artificial_defs);
> @@ -3655,11 +3648,10 @@ df_update_entry_block_defs (void)
>
>    if (changed)
>      {
> -      df_record_entry_block_defs (&refs);
> -      bitmap_copy (df->entry_block_defs, &refs);
> +      df_record_entry_block_defs (refs);
> +      bitmap_copy (df->entry_block_defs, refs);
>        df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, ENTRY_BLOCK));
>      }
> -  bitmap_clear (&refs);
>  }
>
>
> @@ -3804,14 +3796,13 @@ df_record_exit_block_uses (bitmap exit_block_uses)
>  void
>  df_update_exit_block_uses (void)
>  {
> -  bitmap_head refs;
>    bool changed = false;
>
> -  bitmap_initialize (&refs, &df_bitmap_obstack);
> -  df_get_exit_block_use_set (&refs);
> +  auto_bitmap refs (&df_bitmap_obstack);
> +  df_get_exit_block_use_set (refs);
>    if (df->exit_block_uses)
>      {
> -      if (!bitmap_equal_p (df->exit_block_uses, &refs))
> +      if (!bitmap_equal_p (df->exit_block_uses, refs))
>         {
>           struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK);
>           df_ref_chain_delete_du_chain (bb_info->artificial_uses);
> @@ -3831,11 +3822,10 @@ df_update_exit_block_uses (void)
>
>    if (changed)
>      {
> -      df_record_exit_block_uses (&refs);
> -      bitmap_copy (df->exit_block_uses,& refs);
> +      df_record_exit_block_uses (refs);
> +      bitmap_copy (df->exit_block_uses, refs);
>        df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK));
>      }
> -  bitmap_clear (&refs);
>  }
>
>  static bool initialized = false;
> @@ -4171,25 +4161,22 @@ df_bb_verify (basic_block bb)
>  static bool
>  df_entry_block_bitmap_verify (bool abort_if_fail)
>  {
> -  bitmap_head entry_block_defs;
>    bool is_eq;
>
> -  bitmap_initialize (&entry_block_defs, &df_bitmap_obstack);
> -  df_get_entry_block_def_set (&entry_block_defs);
> +  auto_bitmap entry_block_defs (&df_bitmap_obstack);
> +  df_get_entry_block_def_set (entry_block_defs);
>
> -  is_eq = bitmap_equal_p (&entry_block_defs, df->entry_block_defs);
> +  is_eq = bitmap_equal_p (entry_block_defs, df->entry_block_defs);
>
>    if (!is_eq && abort_if_fail)
>      {
>        fprintf (stderr, "entry_block_defs = ");
> -      df_print_regset (stderr, &entry_block_defs);
> +      df_print_regset (stderr, entry_block_defs);
>        fprintf (stderr, "df->entry_block_defs = ");
>        df_print_regset (stderr, df->entry_block_defs);
>        gcc_assert (0);
>      }
>
> -  bitmap_clear (&entry_block_defs);
> -
>    return is_eq;
>  }
>
> @@ -4200,25 +4187,22 @@ df_entry_block_bitmap_verify (bool abort_if_fail)
>  static bool
>  df_exit_block_bitmap_verify (bool abort_if_fail)
>  {
> -  bitmap_head exit_block_uses;
>    bool is_eq;
>
> -  bitmap_initialize (&exit_block_uses, &df_bitmap_obstack);
> -  df_get_exit_block_use_set (&exit_block_uses);
> +  auto_bitmap exit_block_uses (&df_bitmap_obstack);
> +  df_get_exit_block_use_set (exit_block_uses);
>
> -  is_eq = bitmap_equal_p (&exit_block_uses, df->exit_block_uses);
> +  is_eq = bitmap_equal_p (exit_block_uses, df->exit_block_uses);
>
>    if (!is_eq && abort_if_fail)
>      {
>        fprintf (stderr, "exit_block_uses = ");
> -      df_print_regset (stderr, &exit_block_uses);
> +      df_print_regset (stderr, exit_block_uses);
>        fprintf (stderr, "df->exit_block_uses = ");
>        df_print_regset (stderr, df->exit_block_uses);
>        gcc_assert (0);
>      }
>
> -  bitmap_clear (&exit_block_uses);
> -
>    return is_eq;
>  }
>
> @@ -4231,8 +4215,6 @@ df_scan_verify (void)
>  {
>    unsigned int i;
>    basic_block bb;
> -  bitmap_head regular_block_artificial_uses;
> -  bitmap_head eh_block_artificial_uses;
>
>    if (!df)
>      return;
> @@ -4253,24 +4235,21 @@ df_scan_verify (void)
>    /* (2) There are various bitmaps whose value may change over the
>       course of the compilation.  This step recomputes them to make
>       sure that they have not slipped out of date.  */
> -  bitmap_initialize (&regular_block_artificial_uses, &df_bitmap_obstack);
> -  bitmap_initialize (&eh_block_artificial_uses, &df_bitmap_obstack);
> +  auto_bitmap regular_block_artificial_uses (&df_bitmap_obstack);
> +  auto_bitmap eh_block_artificial_uses (&df_bitmap_obstack);
>
> -  df_get_regular_block_artificial_uses (&regular_block_artificial_uses);
> -  df_get_eh_block_artificial_uses (&eh_block_artificial_uses);
> +  df_get_regular_block_artificial_uses (regular_block_artificial_uses);
> +  df_get_eh_block_artificial_uses (eh_block_artificial_uses);
>
> -  bitmap_ior_into (&eh_block_artificial_uses,
> -                  &regular_block_artificial_uses);
> +  bitmap_ior_into (eh_block_artificial_uses,
> +                  regular_block_artificial_uses);
>
>    /* Check artificial_uses bitmaps didn't change. */
> -  gcc_assert (bitmap_equal_p (&regular_block_artificial_uses,
> +  gcc_assert (bitmap_equal_p (regular_block_artificial_uses,
>                               &df->regular_block_artificial_uses));
> -  gcc_assert (bitmap_equal_p (&eh_block_artificial_uses,
> +  gcc_assert (bitmap_equal_p (eh_block_artificial_uses,
>                               &df->eh_block_artificial_uses));
>
> -  bitmap_clear (&regular_block_artificial_uses);
> -  bitmap_clear (&eh_block_artificial_uses);
> -
>    /* Verify entry block and exit block. These only verify the bitmaps,
>       the refs are verified in df_bb_verify.  */
>    df_entry_block_bitmap_verify (true);
> diff --git a/gcc/lra-constraints.c b/gcc/lra-constraints.c
> index c8bc9b9a66f..ed4fdc49d7c 100644
> --- a/gcc/lra-constraints.c
> +++ b/gcc/lra-constraints.c
> @@ -4644,7 +4644,6 @@ lra_constraints (bool first_p)
>    unsigned int min_len, new_min_len, uid;
>    rtx set, x, reg, dest_reg;
>    basic_block last_bb;
> -  bitmap_head equiv_insn_bitmap;
>    bitmap_iterator bi;
>
>    lra_constraint_iter++;
> @@ -4676,7 +4675,7 @@ lra_constraints (bool first_p)
>    /* Do elimination before the equivalence processing as we can spill
>       some pseudos during elimination.  */
>    lra_eliminate (false, first_p);
> -  bitmap_initialize (&equiv_insn_bitmap, &reg_obstack);
> +  auto_bitmap equiv_insn_bitmap (&reg_obstack);
>    for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
>      if (lra_reg_info[i].nrefs != 0)
>        {
> @@ -4737,14 +4736,14 @@ lra_constraints (bool first_p)
>             if (contains_reg_p (x, false, true))
>               ira_reg_equiv[i].profitable_p = false;
>             if (get_equiv (reg) != reg)
> -             bitmap_ior_into (&equiv_insn_bitmap, &lra_reg_info[i].insn_bitmap);
> +             bitmap_ior_into (equiv_insn_bitmap, &lra_reg_info[i].insn_bitmap);
>           }
>        }
>    for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
>      update_equiv (i);
>    /* We should add all insns containing pseudos which should be
>       substituted by their equivalences.  */
> -  EXECUTE_IF_SET_IN_BITMAP (&equiv_insn_bitmap, 0, uid, bi)
> +  EXECUTE_IF_SET_IN_BITMAP (equiv_insn_bitmap, 0, uid, bi)
>      lra_push_insn_by_uid (uid);
>    min_len = lra_insn_stack_length ();
>    new_insns_num = 0;
> @@ -4775,7 +4774,7 @@ lra_constraints (bool first_p)
>           /* We need to check equivalence in debug insn and change
>              pseudo to the equivalent value if necessary.  */
>           curr_id = lra_get_insn_recog_data (curr_insn);
> -         if (bitmap_bit_p (&equiv_insn_bitmap, INSN_UID (curr_insn)))
> +         if (bitmap_bit_p (equiv_insn_bitmap, INSN_UID (curr_insn)))
>             {
>               rtx old = *curr_id->operand_loc[0];
>               *curr_id->operand_loc[0]
> @@ -4849,7 +4848,7 @@ lra_constraints (bool first_p)
>           /* Check non-transformed insns too for equiv change as USE
>              or CLOBBER don't need reloads but can contain pseudos
>              being changed on their equivalences.  */
> -         else if (bitmap_bit_p (&equiv_insn_bitmap, INSN_UID (curr_insn))
> +         else if (bitmap_bit_p (equiv_insn_bitmap, INSN_UID (curr_insn))
>                    && loc_equivalence_change_p (&PATTERN (curr_insn)))
>             {
>               lra_update_insn_regno_info (curr_insn);
> @@ -4857,7 +4856,7 @@ lra_constraints (bool first_p)
>             }
>         }
>      }
> -  bitmap_clear (&equiv_insn_bitmap);
> +
>    /* If we used a new hard regno, changed_p should be true because the
>       hard reg is assigned to a new pseudo.  */
>    if (flag_checking && !changed_p)
> @@ -6761,10 +6760,9 @@ undo_optional_reloads (void)
>    bitmap_iterator bi, bi2;
>    rtx_insn *insn;
>    rtx set, src, dest;
> -  bitmap_head removed_optional_reload_pseudos, insn_bitmap;
> +  auto_bitmap removed_optional_reload_pseudos (&reg_obstack);
>
> -  bitmap_initialize (&removed_optional_reload_pseudos, &reg_obstack);
> -  bitmap_copy (&removed_optional_reload_pseudos, &lra_optional_reload_pseudos);
> +  bitmap_copy (removed_optional_reload_pseudos, &lra_optional_reload_pseudos);
>    EXECUTE_IF_SET_IN_BITMAP (&lra_optional_reload_pseudos, 0, regno, bi)
>      {
>        keep_p = false;
> @@ -6799,19 +6797,19 @@ undo_optional_reloads (void)
>           }
>        if (keep_p)
>         {
> -         bitmap_clear_bit (&removed_optional_reload_pseudos, regno);
> +         bitmap_clear_bit (removed_optional_reload_pseudos, regno);
>           if (lra_dump_file != NULL)
>             fprintf (lra_dump_file, "Keep optional reload reg %d\n", regno);
>         }
>      }
> -  change_p = ! bitmap_empty_p (&removed_optional_reload_pseudos);
> -  bitmap_initialize (&insn_bitmap, &reg_obstack);
> -  EXECUTE_IF_SET_IN_BITMAP (&removed_optional_reload_pseudos, 0, regno, bi)
> +  change_p = ! bitmap_empty_p (removed_optional_reload_pseudos);
> +  auto_bitmap insn_bitmap (&reg_obstack);
> +  EXECUTE_IF_SET_IN_BITMAP (removed_optional_reload_pseudos, 0, regno, bi)
>      {
>        if (lra_dump_file != NULL)
>         fprintf (lra_dump_file, "Remove optional reload reg %d\n", regno);
> -      bitmap_copy (&insn_bitmap, &lra_reg_info[regno].insn_bitmap);
> -      EXECUTE_IF_SET_IN_BITMAP (&insn_bitmap, 0, uid, bi2)
> +      bitmap_copy (insn_bitmap, &lra_reg_info[regno].insn_bitmap);
> +      EXECUTE_IF_SET_IN_BITMAP (insn_bitmap, 0, uid, bi2)
>         {
>           insn = lra_insn_recog_data[uid]->insn;
>           if ((set = single_set (insn)) != NULL_RTX)
> @@ -6855,8 +6853,6 @@ undo_optional_reloads (void)
>    /* Clear restore_regnos.  */
>    EXECUTE_IF_SET_IN_BITMAP (&lra_optional_reload_pseudos, 0, regno, bi)
>      lra_reg_info[regno].restore_rtx = NULL_RTX;
> -  bitmap_clear (&insn_bitmap);
> -  bitmap_clear (&removed_optional_reload_pseudos);
>    return change_p;
>  }
>
> @@ -6869,7 +6865,6 @@ lra_undo_inheritance (void)
>    int hard_regno;
>    int n_all_inherit, n_inherit, n_all_split, n_split;
>    rtx restore_rtx;
> -  bitmap_head remove_pseudos;
>    bitmap_iterator bi;
>    bool change_p;
>
> @@ -6880,7 +6875,7 @@ lra_undo_inheritance (void)
>      fprintf (lra_dump_file,
>              "\n********** Undoing inheritance #%d: **********\n\n",
>              lra_undo_inheritance_iter);
> -  bitmap_initialize (&remove_pseudos, &reg_obstack);
> +  auto_bitmap remove_pseudos (&reg_obstack);
>    n_inherit = n_all_inherit = 0;
>    EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos, 0, regno, bi)
>      if (lra_reg_info[regno].restore_rtx != NULL_RTX)
> @@ -6892,7 +6887,7 @@ lra_undo_inheritance (void)
>                allocation we used shorter live-ranges.  */
>             && (! REG_P (lra_reg_info[regno].restore_rtx)
>                 || reg_renumber[REGNO (lra_reg_info[regno].restore_rtx)] < 0))
> -         bitmap_set_bit (&remove_pseudos, regno);
> +         bitmap_set_bit (remove_pseudos, regno);
>         else
>           n_inherit++;
>        }
> @@ -6910,7 +6905,7 @@ lra_undo_inheritance (void)
>         hard_regno = (restore_regno >= FIRST_PSEUDO_REGISTER
>                       ? reg_renumber[restore_regno] : restore_regno);
>         if (hard_regno < 0 || reg_renumber[regno] == hard_regno)
> -         bitmap_set_bit (&remove_pseudos, regno);
> +         bitmap_set_bit (remove_pseudos, regno);
>         else
>           {
>             n_split++;
> @@ -6923,8 +6918,7 @@ lra_undo_inheritance (void)
>      fprintf (lra_dump_file, "Split %d out of %d (%.2f%%)\n",
>              n_split, n_all_split,
>              (double) n_split / n_all_split * 100);
> -  change_p = remove_inheritance_pseudos (&remove_pseudos);
> -  bitmap_clear (&remove_pseudos);
> +  change_p = remove_inheritance_pseudos (remove_pseudos);
>    /* Clear restore_regnos.  */
>    EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos, 0, regno, bi)
>      lra_reg_info[regno].restore_rtx = NULL_RTX;
> diff --git a/gcc/lra-remat.c b/gcc/lra-remat.c
> index 2c51481374a..fb294edf368 100644
> --- a/gcc/lra-remat.c
> +++ b/gcc/lra-remat.c
> @@ -746,14 +746,12 @@ calculate_gen_cands (void)
>  {
>    basic_block bb;
>    bitmap gen_cands;
> -  bitmap_head gen_insns;
>    rtx_insn *insn;
>
> -  bitmap_initialize (&gen_insns, &reg_obstack);
>    FOR_EACH_BB_FN (bb, cfun)
>      {
>        gen_cands = &get_remat_bb_data (bb)->gen_cands;
> -      bitmap_clear (&gen_insns);
> +      auto_bitmap gen_insns (&reg_obstack);
>        FOR_BB_INSNS (bb, insn)
>         if (INSN_P (insn))
>           {
> @@ -782,7 +780,7 @@ calculate_gen_cands (void)
>                    reg = reg->next)
>                 if (reg->type != OP_IN
>                     || find_regno_note (insn, REG_DEAD, reg->regno) != NULL)
> -                 EXECUTE_IF_SET_IN_BITMAP (&gen_insns, 0, uid, bi)
> +                 EXECUTE_IF_SET_IN_BITMAP (gen_insns, 0, uid, bi)
>                     {
>                       rtx_insn *insn2 = lra_insn_recog_data[uid]->insn;
>
> @@ -801,7 +799,7 @@ calculate_gen_cands (void)
>                     }
>
>             if (CALL_P (insn))
> -             EXECUTE_IF_SET_IN_BITMAP (&gen_insns, 0, uid, bi)
> +             EXECUTE_IF_SET_IN_BITMAP (gen_insns, 0, uid, bi)
>                 {
>                   rtx_insn *insn2 = lra_insn_recog_data[uid]->insn;
>
> @@ -813,17 +811,16 @@ calculate_gen_cands (void)
>                       bitmap_set_bit (&temp_bitmap, uid);
>                     }
>                 }
> -           bitmap_and_compl_into (&gen_insns, &temp_bitmap);
> +           bitmap_and_compl_into (gen_insns, &temp_bitmap);
>
>             cand = insn_to_cand[INSN_UID (insn)];
>             if (cand != NULL)
>               {
>                 bitmap_set_bit (gen_cands, cand->index);
> -               bitmap_set_bit (&gen_insns, INSN_UID (insn));
> +               bitmap_set_bit (gen_insns, INSN_UID (insn));
>               }
>           }
>      }
> -  bitmap_clear (&gen_insns);
>  }
>
>
> @@ -1059,15 +1056,13 @@ do_remat (void)
>    unsigned regno;
>    rtx_insn *insn;
>    basic_block bb;
> -  bitmap_head avail_cands;
> -  bitmap_head active_cands;
>    bool changed_p = false;
>    /* Living hard regs and hard registers of living pseudos.  */
>    HARD_REG_SET live_hard_regs;
>    bitmap_iterator bi;
>
> -  bitmap_initialize (&avail_cands, &reg_obstack);
> -  bitmap_initialize (&active_cands, &reg_obstack);
> +  auto_bitmap avail_cands (&reg_obstack);
> +  auto_bitmap active_cands (&reg_obstack);
>    FOR_EACH_BB_FN (bb, cfun)
>      {
>        CLEAR_HARD_REG_SET (live_hard_regs);
> @@ -1079,11 +1074,11 @@ do_remat (void)
>           if (hard_regno >= 0)
>             SET_HARD_REG_BIT (live_hard_regs, hard_regno);
>         }
> -      bitmap_and (&avail_cands, &get_remat_bb_data (bb)->avin_cands,
> +      bitmap_and (avail_cands, &get_remat_bb_data (bb)->avin_cands,
>                   &get_remat_bb_data (bb)->livein_cands);
>        /* Activating insns are always in the same block as their corresponding
>          remat insn, so at the start of a block the two bitsets are equal.  */
> -      bitmap_copy (&active_cands, &avail_cands);
> +      bitmap_copy (active_cands, avail_cands);
>        FOR_BB_INSNS (bb, insn)
>         {
>           if (!NONDEBUG_INSN_P (insn))
> @@ -1117,8 +1112,8 @@ do_remat (void)
>               for (cand = regno_cands[src_regno];
>                    cand != NULL;
>                    cand = cand->next_regno_cand)
> -               if (bitmap_bit_p (&avail_cands, cand->index)
> -                   && bitmap_bit_p (&active_cands, cand->index))
> +               if (bitmap_bit_p (avail_cands, cand->index)
> +                   && bitmap_bit_p (active_cands, cand->index))
>                   break;
>             }
>           int i, hard_regno, nregs;
> @@ -1189,7 +1184,7 @@ do_remat (void)
>                  reg = reg->next)
>               if (reg->type != OP_IN
>                   || find_regno_note (insn, REG_DEAD, reg->regno) != NULL)
> -               EXECUTE_IF_SET_IN_BITMAP (&avail_cands, 0, cid, bi)
> +               EXECUTE_IF_SET_IN_BITMAP (avail_cands, 0, cid, bi)
>                   {
>                     cand = all_cands[cid];
>
> @@ -1203,7 +1198,7 @@ do_remat (void)
>                   }
>
>           if (CALL_P (insn))
> -           EXECUTE_IF_SET_IN_BITMAP (&avail_cands, 0, cid, bi)
> +           EXECUTE_IF_SET_IN_BITMAP (avail_cands, 0, cid, bi)
>               {
>                 cand = all_cands[cid];
>
> @@ -1211,22 +1206,22 @@ do_remat (void)
>                   bitmap_set_bit (&temp_bitmap, cand->index);
>               }
>
> -         bitmap_and_compl_into (&avail_cands, &temp_bitmap);
> +         bitmap_and_compl_into (avail_cands, &temp_bitmap);
>
>           /* Now see whether a candidate is made active or available
>              by this insn.  */
>           cand = insn_to_cand_activation[INSN_UID (insn)];
>           if (cand)
> -           bitmap_set_bit (&active_cands, cand->index);
> +           bitmap_set_bit (active_cands, cand->index);
>
>           cand = insn_to_cand[INSN_UID (insn)];
>           if (cand != NULL)
>             {
> -             bitmap_set_bit (&avail_cands, cand->index);
> +             bitmap_set_bit (avail_cands, cand->index);
>               if (cand->reload_regno == -1)
> -               bitmap_set_bit (&active_cands, cand->index);
> +               bitmap_set_bit (active_cands, cand->index);
>               else
> -               bitmap_clear_bit (&active_cands, cand->index);
> +               bitmap_clear_bit (active_cands, cand->index);
>             }
>
>           if (remat_insn != NULL)
> @@ -1274,8 +1269,6 @@ do_remat (void)
>               SET_HARD_REG_BIT (live_hard_regs, reg->regno);
>         }
>      }
> -  bitmap_clear (&avail_cands);
> -  bitmap_clear (&active_cands);
>    return changed_p;
>  }
>
> diff --git a/gcc/lra-spills.c b/gcc/lra-spills.c
> index 492fc182cf0..3df6f6786a3 100644
> --- a/gcc/lra-spills.c
> +++ b/gcc/lra-spills.c
> @@ -223,7 +223,6 @@ assign_spill_hard_regs (int *pseudo_regnos, int n)
>    rtx set;
>    basic_block bb;
>    HARD_REG_SET conflict_hard_regs;
> -  bitmap_head ok_insn_bitmap;
>    bitmap setjump_crosses = regstat_get_setjmp_crosses ();
>    /* Hard registers which can not be used for any purpose at given
>       program point because they are unallocatable or already allocated
> @@ -243,13 +242,13 @@ assign_spill_hard_regs (int *pseudo_regnos, int n)
>         for (p = r->start; p <= r->finish; p++)
>           add_to_hard_reg_set (&reserved_hard_regs[p],
>                                lra_reg_info[i].biggest_mode, hard_regno);
> -  bitmap_initialize (&ok_insn_bitmap, &reg_obstack);
> +  auto_bitmap ok_insn_bitmap (&reg_obstack);
>    FOR_EACH_BB_FN (bb, cfun)
>      FOR_BB_INSNS (bb, insn)
>        if (DEBUG_INSN_P (insn)
>           || ((set = single_set (insn)) != NULL_RTX
>               && REG_P (SET_SRC (set)) && REG_P (SET_DEST (set))))
> -       bitmap_set_bit (&ok_insn_bitmap, INSN_UID (insn));
> +       bitmap_set_bit (ok_insn_bitmap, INSN_UID (insn));
>    for (res = i = 0; i < n; i++)
>      {
>        regno = pseudo_regnos[i];
> @@ -260,7 +259,7 @@ assign_spill_hard_regs (int *pseudo_regnos, int n)
>                  targetm.spill_class ((reg_class_t) rclass,
>                                       PSEUDO_REGNO_MODE (regno)))) == NO_REGS
>           || bitmap_intersect_compl_p (&lra_reg_info[regno].insn_bitmap,
> -                                      &ok_insn_bitmap))
> +                                      ok_insn_bitmap))
>         {
>           pseudo_regnos[res++] = regno;
>           continue;
> @@ -300,7 +299,6 @@ assign_spill_hard_regs (int *pseudo_regnos, int n)
>         /* Just loop.  */
>         df_set_regs_ever_live (hard_regno + nr, true);
>      }
> -  bitmap_clear (&ok_insn_bitmap);
>    free (reserved_hard_regs);
>    return res;
>  }
> @@ -449,17 +447,16 @@ spill_pseudos (void)
>    basic_block bb;
>    rtx_insn *insn, *curr;
>    int i;
> -  bitmap_head spilled_pseudos, changed_insns;
>
> -  bitmap_initialize (&spilled_pseudos, &reg_obstack);
> -  bitmap_initialize (&changed_insns, &reg_obstack);
> +  auto_bitmap spilled_pseudos (&reg_obstack);
> +  auto_bitmap changed_insns (&reg_obstack);
>    for (i = FIRST_PSEUDO_REGISTER; i < regs_num; i++)
>      {
>        if (lra_reg_info[i].nrefs != 0 && lra_get_regno_hard_regno (i) < 0
>           && ! lra_former_scratch_p (i))
>         {
> -         bitmap_set_bit (&spilled_pseudos, i);
> -         bitmap_ior_into (&changed_insns, &lra_reg_info[i].insn_bitmap);
> +         bitmap_set_bit (spilled_pseudos, i);
> +         bitmap_ior_into (changed_insns, &lra_reg_info[i].insn_bitmap);
>         }
>      }
>    FOR_EACH_BB_FN (bb, cfun)
> @@ -468,7 +465,7 @@ spill_pseudos (void)
>         {
>           bool removed_pseudo_p = false;
>
> -         if (bitmap_bit_p (&changed_insns, INSN_UID (insn)))
> +         if (bitmap_bit_p (changed_insns, INSN_UID (insn)))
>             {
>               rtx *link_loc, link;
>
> @@ -526,12 +523,10 @@ spill_pseudos (void)
>                          "Debug insn #%u is reset because it referenced "
>                          "removed pseudo\n", INSN_UID (insn));
>             }
> -         bitmap_and_compl_into (df_get_live_in (bb), &spilled_pseudos);
> -         bitmap_and_compl_into (df_get_live_out (bb), &spilled_pseudos);
> +         bitmap_and_compl_into (df_get_live_in (bb), spilled_pseudos);
> +         bitmap_and_compl_into (df_get_live_out (bb), spilled_pseudos);
>         }
>      }
> -  bitmap_clear (&spilled_pseudos);
> -  bitmap_clear (&changed_insns);
>  }
>
>  /* Return true if we need to change some pseudos into memory.  */
> diff --git a/gcc/tree-ssa-pre.c b/gcc/tree-ssa-pre.c
> index 8175d2599ed..ca212daee62 100644
> --- a/gcc/tree-ssa-pre.c
> +++ b/gcc/tree-ssa-pre.c
> @@ -817,19 +817,17 @@ bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
>
>    if (dest != orig)
>      {
> -      bitmap_head temp;
> -      bitmap_initialize (&temp, &grand_bitmap_obstack);
> +      auto_bitmap temp (&grand_bitmap_obstack);
>
>        bitmap_and_into (&dest->values, &orig->values);
> -      bitmap_copy (&temp, &dest->expressions);
> -      EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
> +      bitmap_copy (temp, &dest->expressions);
> +      EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi)
>         {
>           pre_expr expr = expression_for_id (i);
>           unsigned int value_id = get_expr_value_id (expr);
>           if (!bitmap_bit_p (&dest->values, value_id))
>             bitmap_clear_bit (&dest->expressions, i);
>         }
> -      bitmap_clear (&temp);
>      }
>  }
>
> @@ -862,18 +860,15 @@ bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
>  {
>    unsigned int i;
>    bitmap_iterator bi;
> -  bitmap_head temp;
> +  auto_bitmap temp (&grand_bitmap_obstack);
>
> -  bitmap_initialize (&temp, &grand_bitmap_obstack);
> -
> -  bitmap_copy (&temp, &a->expressions);
> -  EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
> +  bitmap_copy (temp, &a->expressions);
> +  EXECUTE_IF_SET_IN_BITMAP (temp, 0, i, bi)
>      {
>        pre_expr expr = expression_for_id (i);
>        if (bitmap_set_contains_value (b, get_expr_value_id (expr)))
>         bitmap_remove_from_set (a, expr);
>      }
> -  bitmap_clear (&temp);
>  }
>
>
> --
> 2.11.0
>

^ permalink raw reply	[flat|nested] 34+ messages in thread

* Re: [PATCH 13/13] make inverted_post_order_compute() operate on a vec
  2017-05-09 20:53 ` [PATCH 13/13] make inverted_post_order_compute() operate on a vec tbsaunde+gcc
@ 2017-05-10  8:44   ` Richard Biener
  0 siblings, 0 replies; 34+ messages in thread
From: Richard Biener @ 2017-05-10  8:44 UTC (permalink / raw)
  To: tbsaunde+gcc; +Cc: GCC Patches

On Tue, May 9, 2017 at 10:52 PM,  <tbsaunde+gcc@tbsaunde.org> wrote:
> From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
>
> gcc/ChangeLog:

Ok.

Richard.

> 2017-05-09  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>
>
>         * cfganal.c (inverted_post_order_compute): Change argument type
>         to vec *.
>         * cfganal.h (inverted_post_order_compute): Adjust prototype.
>         * df-core.c (rest_of_handle_df_initialize): Adjust.
>         (rest_of_handle_df_finish): Likewise.
>         (df_analyze_1): Likewise.
>         (df_analyze): Likewise.
>         (loop_inverted_post_order_compute): Change argument to be a vec *.
>         (df_analyze_loop): Adjust.
>         (df_get_n_blocks): Likewise.
>         (df_get_postorder): Likewise.
>         * df.h (struct df_d): Change field to be a vec.
>         * lcm.c (compute_laterin): Adjust.
>         (compute_available): Likewise.
>         * lra-lives.c (lra_create_live_ranges_1): Likewise.
>         * tree-ssa-dce.c (remove_dead_stmt): Likewise.
>         * tree-ssa-pre.c (compute_antic): Likewise.
> ---
>  gcc/cfganal.c      | 14 ++++++--------
>  gcc/cfganal.h      |  2 +-
>  gcc/df-core.c      | 56 +++++++++++++++++++++++++-----------------------------
>  gcc/df.h           |  4 +---
>  gcc/lcm.c          | 14 ++++++--------
>  gcc/lra-lives.c    |  9 ++++-----
>  gcc/tree-ssa-dce.c | 10 ++++------
>  gcc/tree-ssa-pre.c |  9 ++++-----
>  8 files changed, 52 insertions(+), 66 deletions(-)
>
> diff --git a/gcc/cfganal.c b/gcc/cfganal.c
> index 27b453ca3f7..a3a6ea86994 100644
> --- a/gcc/cfganal.c
> +++ b/gcc/cfganal.c
> @@ -790,12 +790,12 @@ dfs_find_deadend (basic_block bb)
>     and start looking for a "dead end" from that block
>     and do another inverted traversal from that block.  */
>
> -int
> -inverted_post_order_compute (int *post_order,
> +void
> +inverted_post_order_compute (vec<int> *post_order,
>                              sbitmap *start_points)
>  {
>    basic_block bb;
> -  int post_order_num = 0;
> +  post_order->reserve_exact (n_basic_blocks_for_fn (cfun));
>
>    if (flag_checking)
>      verify_no_unreachable_blocks ();
> @@ -863,13 +863,13 @@ inverted_post_order_compute (int *post_order,
>                     time, check its predecessors.  */
>                 stack.quick_push (ei_start (pred->preds));
>                else
> -                post_order[post_order_num++] = pred->index;
> +               post_order->quick_push (pred->index);
>              }
>            else
>              {
>               if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
>                   && ei_one_before_end_p (ei))
> -                post_order[post_order_num++] = bb->index;
> +               post_order->quick_push (bb->index);
>
>                if (!ei_one_before_end_p (ei))
>                 ei_next (&stack.last ());
> @@ -927,9 +927,7 @@ inverted_post_order_compute (int *post_order,
>    while (!stack.is_empty ());
>
>    /* EXIT_BLOCK is always included.  */
> -  post_order[post_order_num++] = EXIT_BLOCK;
> -
> -  return post_order_num;
> +  post_order->quick_push (EXIT_BLOCK);
>  }
>
>  /* Compute the depth first search order of FN and store in the array
> diff --git a/gcc/cfganal.h b/gcc/cfganal.h
> index 7df484b8441..39bb5e547a5 100644
> --- a/gcc/cfganal.h
> +++ b/gcc/cfganal.h
> @@ -63,7 +63,7 @@ extern void add_noreturn_fake_exit_edges (void);
>  extern void connect_infinite_loops_to_exit (void);
>  extern int post_order_compute (int *, bool, bool);
>  extern basic_block dfs_find_deadend (basic_block);
> -extern int inverted_post_order_compute (int *, sbitmap *start_points = 0);
> +extern void inverted_post_order_compute (vec<int> *postorder, sbitmap *start_points = 0);
>  extern int pre_and_rev_post_order_compute_fn (struct function *,
>                                               int *, int *, bool);
>  extern int pre_and_rev_post_order_compute (int *, int *, bool);
> diff --git a/gcc/df-core.c b/gcc/df-core.c
> index 1b270d417aa..1e84d4d948f 100644
> --- a/gcc/df-core.c
> +++ b/gcc/df-core.c
> @@ -702,10 +702,9 @@ rest_of_handle_df_initialize (void)
>      df_live_add_problem ();
>
>    df->postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
> -  df->postorder_inverted = XNEWVEC (int, last_basic_block_for_fn (cfun));
>    df->n_blocks = post_order_compute (df->postorder, true, true);
> -  df->n_blocks_inverted = inverted_post_order_compute (df->postorder_inverted);
> -  gcc_assert (df->n_blocks == df->n_blocks_inverted);
> +  inverted_post_order_compute (&df->postorder_inverted);
> +  gcc_assert ((unsigned) df->n_blocks == df->postorder_inverted.length ());
>
>    df->hard_regs_live_count = XCNEWVEC (unsigned int, FIRST_PSEUDO_REGISTER);
>
> @@ -816,7 +815,7 @@ rest_of_handle_df_finish (void)
>      }
>
>    free (df->postorder);
> -  free (df->postorder_inverted);
> +  df->postorder_inverted.release ();
>    free (df->hard_regs_live_count);
>    free (df);
>    df = NULL;
> @@ -1198,7 +1197,7 @@ df_analyze_1 (void)
>    int i;
>
>    /* These should be the same.  */
> -  gcc_assert (df->n_blocks == df->n_blocks_inverted);
> +  gcc_assert ((unsigned) df->n_blocks == df->postorder_inverted.length ());
>
>    /* We need to do this before the df_verify_all because this is
>       not kept incrementally up to date.  */
> @@ -1222,8 +1221,8 @@ df_analyze_1 (void)
>            if (dflow->problem->dir == DF_FORWARD)
>              df_analyze_problem (dflow,
>                                  df->blocks_to_analyze,
> -                                df->postorder_inverted,
> -                                df->n_blocks_inverted);
> +                               df->postorder_inverted.address (),
> +                               df->postorder_inverted.length ());
>            else
>              df_analyze_problem (dflow,
>                                  df->blocks_to_analyze,
> @@ -1249,23 +1248,21 @@ void
>  df_analyze (void)
>  {
>    bitmap current_all_blocks = BITMAP_ALLOC (&df_bitmap_obstack);
> -  int i;
>
>    free (df->postorder);
> -  free (df->postorder_inverted);
>    df->postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
> -  df->postorder_inverted = XNEWVEC (int, last_basic_block_for_fn (cfun));
>    df->n_blocks = post_order_compute (df->postorder, true, true);
> -  df->n_blocks_inverted = inverted_post_order_compute (df->postorder_inverted);
> +  df->postorder_inverted.truncate (0);
> +  inverted_post_order_compute (&df->postorder_inverted);
>
> -  for (i = 0; i < df->n_blocks; i++)
> +  for (int i = 0; i < df->n_blocks; i++)
>      bitmap_set_bit (current_all_blocks, df->postorder[i]);
>
>    if (flag_checking)
>      {
>        /* Verify that POSTORDER_INVERTED only contains blocks reachable from
>          the ENTRY block.  */
> -      for (i = 0; i < df->n_blocks_inverted; i++)
> +      for (unsigned int i = 0; i < df->postorder_inverted.length (); i++)
>         gcc_assert (bitmap_bit_p (current_all_blocks,
>                                   df->postorder_inverted[i]));
>      }
> @@ -1277,9 +1274,10 @@ df_analyze (void)
>        bitmap_and_into (df->blocks_to_analyze, current_all_blocks);
>        df->n_blocks = df_prune_to_subcfg (df->postorder,
>                                          df->n_blocks, df->blocks_to_analyze);
> -      df->n_blocks_inverted = df_prune_to_subcfg (df->postorder_inverted,
> -                                                 df->n_blocks_inverted,
> +      unsigned int newlen = df_prune_to_subcfg (df->postorder_inverted.address (),
> +                                               df->postorder_inverted.length (),
>                                                   df->blocks_to_analyze);
> +      df->postorder_inverted.truncate (newlen);
>        BITMAP_FREE (current_all_blocks);
>      }
>    else
> @@ -1355,13 +1353,14 @@ loop_post_order_compute (int *post_order, struct loop *loop)
>  /* Compute the reverse top sort order of the inverted sub-CFG specified
>     by LOOP.  Returns the number of blocks which is always loop->num_nodes.  */
>
> -static int
> -loop_inverted_post_order_compute (int *post_order, struct loop *loop)
> +static void
> +loop_inverted_post_order_compute (vec<int> *post_order, struct loop *loop)
>  {
>    basic_block bb;
>    edge_iterator *stack;
>    int sp;
> -  int post_order_num = 0;
> +
> +  post_order->reserve_exact (loop->num_nodes);
>
>    /* Allocate stack for back-tracking up CFG.  */
>    stack = XNEWVEC (edge_iterator, loop->num_nodes + 1);
> @@ -1398,13 +1397,13 @@ loop_inverted_post_order_compute (int *post_order, struct loop *loop)
>                time, check its predecessors.  */
>             stack[sp++] = ei_start (pred->preds);
>           else
> -           post_order[post_order_num++] = pred->index;
> +           post_order->quick_push (pred->index);
>         }
>        else
>         {
>           if (flow_bb_inside_loop_p (loop, bb)
>               && ei_one_before_end_p (ei))
> -           post_order[post_order_num++] = bb->index;
> +           post_order->quick_push (bb->index);
>
>           if (!ei_one_before_end_p (ei))
>             ei_next (&stack[sp - 1]);
> @@ -1414,7 +1413,6 @@ loop_inverted_post_order_compute (int *post_order, struct loop *loop)
>      }
>
>    free (stack);
> -  return post_order_num;
>  }
>
>
> @@ -1424,15 +1422,13 @@ void
>  df_analyze_loop (struct loop *loop)
>  {
>    free (df->postorder);
> -  free (df->postorder_inverted);
>
>    df->postorder = XNEWVEC (int, loop->num_nodes);
> -  df->postorder_inverted = XNEWVEC (int, loop->num_nodes);
> +  df->postorder_inverted.truncate (0);
>    df->n_blocks = loop_post_order_compute (df->postorder, loop);
> -  df->n_blocks_inverted
> -    = loop_inverted_post_order_compute (df->postorder_inverted, loop);
> +    loop_inverted_post_order_compute (&df->postorder_inverted, loop);
>    gcc_assert ((unsigned) df->n_blocks == loop->num_nodes);
> -  gcc_assert ((unsigned) df->n_blocks_inverted == loop->num_nodes);
> +  gcc_assert (df->postorder_inverted.length () == loop->num_nodes);
>
>    bitmap blocks = BITMAP_ALLOC (&df_bitmap_obstack);
>    for (int i = 0; i < df->n_blocks; ++i)
> @@ -1453,8 +1449,8 @@ df_get_n_blocks (enum df_flow_dir dir)
>
>    if (dir == DF_FORWARD)
>      {
> -      gcc_assert (df->postorder_inverted);
> -      return df->n_blocks_inverted;
> +      gcc_assert (df->postorder_inverted.length ());
> +      return df->postorder_inverted.length ();
>      }
>
>    gcc_assert (df->postorder);
> @@ -1473,8 +1469,8 @@ df_get_postorder (enum df_flow_dir dir)
>
>    if (dir == DF_FORWARD)
>      {
> -      gcc_assert (df->postorder_inverted);
> -      return df->postorder_inverted;
> +      gcc_assert (df->postorder_inverted.length ());
> +      return df->postorder_inverted.address ();
>      }
>    gcc_assert (df->postorder);
>    return df->postorder;
> diff --git a/gcc/df.h b/gcc/df.h
> index 681ff32098e..07fd3345d9d 100644
> --- a/gcc/df.h
> +++ b/gcc/df.h
> @@ -582,11 +582,9 @@ struct df_d
>    bitmap_head insns_to_notes_rescan;
>    int *postorder;                /* The current set of basic blocks
>                                      in reverse postorder.  */
> -  int *postorder_inverted;       /* The current set of basic blocks
> +  vec<int> postorder_inverted;       /* The current set of basic blocks
>                                      in reverse postorder of inverted CFG.  */
>    int n_blocks;                  /* The number of blocks in reverse postorder.  */
> -  int n_blocks_inverted;         /* The number of blocks
> -                                    in reverse postorder of inverted CFG.  */
>
>    /* An array [FIRST_PSEUDO_REGISTER], indexed by regno, of the number
>       of refs that qualify as being real hard regs uses.  Artificial
> diff --git a/gcc/lcm.c b/gcc/lcm.c
> index edc86b57009..e8666274211 100644
> --- a/gcc/lcm.c
> +++ b/gcc/lcm.c
> @@ -270,9 +270,9 @@ compute_laterin (struct edge_list *edge_list, sbitmap *earliest,
>
>    /* Add all the blocks to the worklist.  This prevents an early exit from
>       the loop given our optimistic initialization of LATER above.  */
> -  int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
> -  int postorder_num = inverted_post_order_compute (postorder);
> -  for (int i = 0; i < postorder_num; ++i)
> +  auto_vec<int, 20> postorder;
> +  inverted_post_order_compute (&postorder);
> +  for (unsigned int i = 0; i < postorder.length (); ++i)
>      {
>        bb = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
>        if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
> @@ -281,7 +281,6 @@ compute_laterin (struct edge_list *edge_list, sbitmap *earliest,
>        *qin++ = bb;
>        bb->aux = bb;
>      }
> -  free (postorder);
>
>    /* Note that we do not use the last allocated element for our queue,
>       as EXIT_BLOCK is never inserted into it. */
> @@ -512,9 +511,9 @@ compute_available (sbitmap *avloc, sbitmap *kill, sbitmap *avout,
>    /* Put every block on the worklist; this is necessary because of the
>       optimistic initialization of AVOUT above.  Use inverted postorder
>       to make the dataflow problem require less iterations.  */
> -  int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
> -  int postorder_num = inverted_post_order_compute (postorder);
> -  for (int i = 0; i < postorder_num; ++i)
> +  auto_vec<int, 20> postorder;
> +  inverted_post_order_compute (&postorder);
> +  for (unsigned int i = 0; i < postorder.length (); ++i)
>      {
>        bb = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
>        if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
> @@ -523,7 +522,6 @@ compute_available (sbitmap *avloc, sbitmap *kill, sbitmap *avout,
>        *qin++ = bb;
>        bb->aux = bb;
>      }
> -  free (postorder);
>
>    qin = worklist;
>    qend = &worklist[n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS];
> diff --git a/gcc/lra-lives.c b/gcc/lra-lives.c
> index 5d4015b5ab9..e728e348215 100644
> --- a/gcc/lra-lives.c
> +++ b/gcc/lra-lives.c
> @@ -1287,11 +1287,11 @@ lra_create_live_ranges_1 (bool all_p, bool dead_insn_p)
>    point_freq_vec.truncate (0);
>    point_freq_vec.reserve_exact (new_length);
>    lra_point_freq = point_freq_vec.address ();
> -  int *post_order_rev_cfg = XNEWVEC (int, last_basic_block_for_fn (cfun));
> -  int n_blocks_inverted = inverted_post_order_compute (post_order_rev_cfg);
> -  lra_assert (n_blocks_inverted == n_basic_blocks_for_fn (cfun));
> +  auto_vec<int, 20> post_order_rev_cfg;
> +  inverted_post_order_compute (&post_order_rev_cfg);
> +  lra_assert (post_order_rev_cfg.length () == (unsigned) n_basic_blocks_for_fn (cfun));
>    bb_live_change_p = false;
> -  for (i = n_blocks_inverted - 1; i >= 0; --i)
> +  for (i = post_order_rev_cfg.length () - 1; i >= 0; --i)
>      {
>        bb = BASIC_BLOCK_FOR_FN (cfun, post_order_rev_cfg[i]);
>        if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun) || bb
> @@ -1338,7 +1338,6 @@ lra_create_live_ranges_1 (bool all_p, bool dead_insn_p)
>             }
>         }
>      }
> -  free (post_order_rev_cfg);
>    lra_live_max_point = curr_point;
>    if (lra_dump_file != NULL)
>      print_live_ranges (lra_dump_file);
> diff --git a/gcc/tree-ssa-dce.c b/gcc/tree-ssa-dce.c
> index e17659df91f..150e4f73185 100644
> --- a/gcc/tree-ssa-dce.c
> +++ b/gcc/tree-ssa-dce.c
> @@ -1042,14 +1042,12 @@ remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
>         {
>           if (!bb_postorder)
>             {
> -             int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
> -             int postorder_num
> -                = inverted_post_order_compute (postorder,
> -                                               &bb_contains_live_stmts);
> +             auto_vec<int, 20> postorder;
> +                inverted_post_order_compute (&postorder,
> +                                             &bb_contains_live_stmts);
>               bb_postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
> -             for (int i = 0; i < postorder_num; ++i)
> +             for (unsigned int i = 0; i < postorder.length (); ++i)
>                  bb_postorder[postorder[i]] = i;
> -             free (postorder);
>             }
>            FOR_EACH_EDGE (e2, ei, bb->succs)
>             if (!e || e2->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
> diff --git a/gcc/tree-ssa-pre.c b/gcc/tree-ssa-pre.c
> index ca212daee62..6ffcd7b8eb4 100644
> --- a/gcc/tree-ssa-pre.c
> +++ b/gcc/tree-ssa-pre.c
> @@ -2388,8 +2388,8 @@ compute_antic (void)
>    /* For ANTIC computation we need a postorder that also guarantees that
>       a block with a single successor is visited after its successor.
>       RPO on the inverted CFG has this property.  */
> -  int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
> -  int postorder_num = inverted_post_order_compute (postorder);
> +  auto_vec<int, 20> postorder;
> +  inverted_post_order_compute (&postorder);
>
>    auto_sbitmap worklist (last_basic_block_for_fn (cfun) + 1);
>    bitmap_ones (worklist);
> @@ -2403,7 +2403,7 @@ compute_antic (void)
>          for PA ANTIC computation.  */
>        num_iterations++;
>        changed = false;
> -      for (i = postorder_num - 1; i >= 0; i--)
> +      for (i = postorder.length () - 1; i >= 0; i--)
>         {
>           if (bitmap_bit_p (worklist, postorder[i]))
>             {
> @@ -2430,7 +2430,7 @@ compute_antic (void)
>      {
>        /* For partial antic we ignore backedges and thus we do not need
>           to perform any iteration when we process blocks in postorder.  */
> -      postorder_num = pre_and_rev_post_order_compute (NULL, postorder, false);
> +      int postorder_num = pre_and_rev_post_order_compute (NULL, postorder.address (), false);
>        for (i = postorder_num - 1 ; i >= 0; i--)
>         {
>           basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
> @@ -2441,7 +2441,6 @@ compute_antic (void)
>      }
>
>    sbitmap_free (has_abnormal_preds);
> -  free (postorder);
>  }
>
>
> --
> 2.11.0
>

^ permalink raw reply	[flat|nested] 34+ messages in thread

* Re: [PATCH 01/13] improve safety of freeing bitmaps
  2017-05-10  8:15   ` Richard Biener
@ 2017-05-10 10:55     ` Trevor Saunders
  2017-05-10 11:11       ` Richard Biener
  0 siblings, 1 reply; 34+ messages in thread
From: Trevor Saunders @ 2017-05-10 10:55 UTC (permalink / raw)
  To: Richard Biener; +Cc: tbsaunde+gcc, GCC Patches

On Wed, May 10, 2017 at 10:14:17AM +0200, Richard Biener wrote:
> On Tue, May 9, 2017 at 10:52 PM,  <tbsaunde+gcc@tbsaunde.org> wrote:
> > From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
> >
> > There's two groups of changes here, first taking a sbitmap &, so that we
> > can assign null to the pointer after freeing the sbitmap to prevent use
> > after free through that pointer.  Second we define overloads of
> > sbitmap_free and bitmap_free taking auto_sbitmap and auto_bitmap
> > respectively, so that you can't double free the bitmap owned by a
> > auto_{s,}bitmap.
> 
> Looks good - but what do you need the void *& overload for?!  That at least
> needs a comment.

yeah, its gross, I put it in to be compatible with the previous macro.
  The first problem with removing it is that cfgexpand.c:663 and
  presumably other places do BITMAP_FREE(bb->aux) which of course
  depends on being able to pass in a void *.  I'll add a comment and try
  and look into removing it.

  Trev

> 
> Richard.
> 
> > gcc/ChangeLog:
> >
> > 2017-05-09  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>
> >
> >         * bitmap.h (BITMAP_FREE): Convert from macro to inline function
> >         and add overloaded decl for auto_bitmap.
> >         * sbitmap.h (inline void sbitmap_free): Add overload for
> >         auto_sbitmap, and change sbitmap to  point to null.
> > ---
> >  gcc/bitmap.h  | 21 +++++++++++++++++++--
> >  gcc/sbitmap.h |  7 ++++++-
> >  2 files changed, 25 insertions(+), 3 deletions(-)
> >
> > diff --git a/gcc/bitmap.h b/gcc/bitmap.h
> > index f158b447357..7508239cff9 100644
> > --- a/gcc/bitmap.h
> > +++ b/gcc/bitmap.h
> > @@ -129,6 +129,8 @@ along with GCC; see the file COPYING3.  If not see
> >
> >  #include "obstack.h"
> >
> > +   class auto_bitmap;
> > +
> >  /* Bitmap memory usage.  */
> >  struct bitmap_usage: public mem_usage
> >  {
> > @@ -372,8 +374,23 @@ extern hashval_t bitmap_hash (const_bitmap);
> >  #define BITMAP_GGC_ALLOC() bitmap_gc_alloc ()
> >
> >  /* Do any cleanup needed on a bitmap when it is no longer used.  */
> > -#define BITMAP_FREE(BITMAP) \
> > -       ((void) (bitmap_obstack_free ((bitmap) BITMAP), (BITMAP) = (bitmap) NULL))
> > +inline void
> > +BITMAP_FREE (bitmap &b)
> > +{
> > +  bitmap_obstack_free ((bitmap) b);
> > +  b = NULL;
> > +}
> > +
> > +inline void
> > +BITMAP_FREE (void *&b)
> > +{
> > +  bitmap_obstack_free ((bitmap) b);
> > +  b = NULL;
> > +}
> > +
> > +/* Intentionally unimplemented to ensure it is never called with an
> > +   auto_bitmap argument.  */
> > +void BITMAP_FREE (auto_bitmap);
> >
> >  /* Iterator for bitmaps.  */
> >
> > diff --git a/gcc/sbitmap.h b/gcc/sbitmap.h
> > index ce4d27d927c..cba0452cdb9 100644
> > --- a/gcc/sbitmap.h
> > +++ b/gcc/sbitmap.h
> > @@ -82,6 +82,8 @@ along with GCC; see the file COPYING3.  If not see
> >  #define SBITMAP_ELT_BITS (HOST_BITS_PER_WIDEST_FAST_INT * 1u)
> >  #define SBITMAP_ELT_TYPE unsigned HOST_WIDEST_FAST_INT
> >
> > +class auto_sbitmap;
> > +
> >  struct simple_bitmap_def
> >  {
> >    unsigned int n_bits;         /* Number of bits.  */
> > @@ -208,11 +210,14 @@ bmp_iter_next (sbitmap_iterator *i, unsigned *bit_no ATTRIBUTE_UNUSED)
> >         bmp_iter_next (&(ITER), &(BITNUM)))
> >  #endif
> >
> > -inline void sbitmap_free (sbitmap map)
> > +inline void sbitmap_free (sbitmap &map)
> >  {
> >    free (map);
> > +  map = NULL;
> >  }
> >
> > +void sbitmap_free (auto_sbitmap);
> > +
> >  inline void sbitmap_vector_free (sbitmap * vec)
> >  {
> >    free (vec);
> > --
> > 2.11.0
> >

^ permalink raw reply	[flat|nested] 34+ messages in thread

* Re: [PATCH 01/13] improve safety of freeing bitmaps
  2017-05-10 10:55     ` Trevor Saunders
@ 2017-05-10 11:11       ` Richard Biener
  0 siblings, 0 replies; 34+ messages in thread
From: Richard Biener @ 2017-05-10 11:11 UTC (permalink / raw)
  To: Trevor Saunders; +Cc: tbsaunde+gcc, GCC Patches

On Wed, May 10, 2017 at 12:52 PM, Trevor Saunders <tbsaunde@tbsaunde.org> wrote:
> On Wed, May 10, 2017 at 10:14:17AM +0200, Richard Biener wrote:
>> On Tue, May 9, 2017 at 10:52 PM,  <tbsaunde+gcc@tbsaunde.org> wrote:
>> > From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
>> >
>> > There's two groups of changes here, first taking a sbitmap &, so that we
>> > can assign null to the pointer after freeing the sbitmap to prevent use
>> > after free through that pointer.  Second we define overloads of
>> > sbitmap_free and bitmap_free taking auto_sbitmap and auto_bitmap
>> > respectively, so that you can't double free the bitmap owned by a
>> > auto_{s,}bitmap.
>>
>> Looks good - but what do you need the void *& overload for?!  That at least
>> needs a comment.
>
> yeah, its gross, I put it in to be compatible with the previous macro.
>   The first problem with removing it is that cfgexpand.c:663 and
>   presumably other places do BITMAP_FREE(bb->aux) which of course
>   depends on being able to pass in a void *.  I'll add a comment and try
>   and look into removing it.

Yeah, please remove it by fixing callers instead.

Richard.

>   Trev
>
>>
>> Richard.
>>
>> > gcc/ChangeLog:
>> >
>> > 2017-05-09  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>
>> >
>> >         * bitmap.h (BITMAP_FREE): Convert from macro to inline function
>> >         and add overloaded decl for auto_bitmap.
>> >         * sbitmap.h (inline void sbitmap_free): Add overload for
>> >         auto_sbitmap, and change sbitmap to  point to null.
>> > ---
>> >  gcc/bitmap.h  | 21 +++++++++++++++++++--
>> >  gcc/sbitmap.h |  7 ++++++-
>> >  2 files changed, 25 insertions(+), 3 deletions(-)
>> >
>> > diff --git a/gcc/bitmap.h b/gcc/bitmap.h
>> > index f158b447357..7508239cff9 100644
>> > --- a/gcc/bitmap.h
>> > +++ b/gcc/bitmap.h
>> > @@ -129,6 +129,8 @@ along with GCC; see the file COPYING3.  If not see
>> >
>> >  #include "obstack.h"
>> >
>> > +   class auto_bitmap;
>> > +
>> >  /* Bitmap memory usage.  */
>> >  struct bitmap_usage: public mem_usage
>> >  {
>> > @@ -372,8 +374,23 @@ extern hashval_t bitmap_hash (const_bitmap);
>> >  #define BITMAP_GGC_ALLOC() bitmap_gc_alloc ()
>> >
>> >  /* Do any cleanup needed on a bitmap when it is no longer used.  */
>> > -#define BITMAP_FREE(BITMAP) \
>> > -       ((void) (bitmap_obstack_free ((bitmap) BITMAP), (BITMAP) = (bitmap) NULL))
>> > +inline void
>> > +BITMAP_FREE (bitmap &b)
>> > +{
>> > +  bitmap_obstack_free ((bitmap) b);
>> > +  b = NULL;
>> > +}
>> > +
>> > +inline void
>> > +BITMAP_FREE (void *&b)
>> > +{
>> > +  bitmap_obstack_free ((bitmap) b);
>> > +  b = NULL;
>> > +}
>> > +
>> > +/* Intentionally unimplemented to ensure it is never called with an
>> > +   auto_bitmap argument.  */
>> > +void BITMAP_FREE (auto_bitmap);
>> >
>> >  /* Iterator for bitmaps.  */
>> >
>> > diff --git a/gcc/sbitmap.h b/gcc/sbitmap.h
>> > index ce4d27d927c..cba0452cdb9 100644
>> > --- a/gcc/sbitmap.h
>> > +++ b/gcc/sbitmap.h
>> > @@ -82,6 +82,8 @@ along with GCC; see the file COPYING3.  If not see
>> >  #define SBITMAP_ELT_BITS (HOST_BITS_PER_WIDEST_FAST_INT * 1u)
>> >  #define SBITMAP_ELT_TYPE unsigned HOST_WIDEST_FAST_INT
>> >
>> > +class auto_sbitmap;
>> > +
>> >  struct simple_bitmap_def
>> >  {
>> >    unsigned int n_bits;         /* Number of bits.  */
>> > @@ -208,11 +210,14 @@ bmp_iter_next (sbitmap_iterator *i, unsigned *bit_no ATTRIBUTE_UNUSED)
>> >         bmp_iter_next (&(ITER), &(BITNUM)))
>> >  #endif
>> >
>> > -inline void sbitmap_free (sbitmap map)
>> > +inline void sbitmap_free (sbitmap &map)
>> >  {
>> >    free (map);
>> > +  map = NULL;
>> >  }
>> >
>> > +void sbitmap_free (auto_sbitmap);
>> > +
>> >  inline void sbitmap_vector_free (sbitmap * vec)
>> >  {
>> >    free (vec);
>> > --
>> > 2.11.0
>> >

^ permalink raw reply	[flat|nested] 34+ messages in thread

* Re: [PATCH 05/13] allow constructing a auto_vec with a preallocation, and a possibly larger actual allocation size
  2017-05-10  6:58   ` Richard Sandiford
@ 2017-05-11  7:50     ` Trevor Saunders
  2017-05-11  8:18       ` Richard Biener
  0 siblings, 1 reply; 34+ messages in thread
From: Trevor Saunders @ 2017-05-11  7:50 UTC (permalink / raw)
  To: tbsaunde+gcc, gcc-patches, richard.sandiford

On Wed, May 10, 2017 at 07:54:13AM +0100, Richard Sandiford wrote:
> tbsaunde+gcc@tbsaunde.org writes:
> > From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
> >
> > This allows us to set the capacity of the vector when we construct it,
> > and still use a stack buffer when the size is small enough.
> >
> > gcc/ChangeLog:
> >
> > 2017-05-09  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>
> >
> > 	* genrecog.c (int_set::int_set): Explicitly construct our
> > auto_vec base class.
> > 	* vec.h (auto_vec::auto_vec): New constructor.
> > ---
> >  gcc/genrecog.c |  8 +++++---
> >  gcc/vec.h      | 12 ++++++++++++
> >  2 files changed, 17 insertions(+), 3 deletions(-)
> >
> > diff --git a/gcc/genrecog.c b/gcc/genrecog.c
> > index 6a9e610e7a0..b69043f0d02 100644
> > --- a/gcc/genrecog.c
> > +++ b/gcc/genrecog.c
> > @@ -1407,14 +1407,16 @@ struct int_set : public auto_vec <uint64_t, 1>
> >    iterator end ();
> >  };
> >  
> > -int_set::int_set () {}
> > +int_set::int_set () : auto_vec<uint64_t, 1> () {}
> >  
> > -int_set::int_set (uint64_t label)
> > +int_set::int_set (uint64_t label) :
> > +  auto_vec<uint64_t, 1> ()
> >  {
> >    safe_push (label);
> >  }
> >  
> > -int_set::int_set (const int_set &other)
> > +int_set::int_set (const int_set &other) :
> > +  auto_vec<uint64_t, 1> ()
> >  {
> >    safe_splice (other);
> >  }
> 
> Is this part of the patch necessary?  Won't the default constructor
> be used anyway?

Well, without the change to the copy constructor we get this bootstrap
warning.

/src/gcc/gcc/genrecog.c: In copy constructor ‘int_set::int_set(const int_set&)’:
/src/gcc/gcc/genrecog.c:1417:1: error: base class ‘class auto_vec<long
unsigned int, 1>’ should be explicitly initialized in the copy
constructor [-Werror=extra]
 int_set::int_set (const int_set &other)
  ^~~~~~~

>
So we need to do something about that.  I'm not sure the other cases are
necessary, but I was there, and being explicit seemed better than
leaving it implicit.

Thanks

Trev

> Thanks,
> Richard
> 
> > diff --git a/gcc/vec.h b/gcc/vec.h
> > index fee46164b01..914f89c350c 100644
> > --- a/gcc/vec.h
> > +++ b/gcc/vec.h
> > @@ -1272,6 +1272,18 @@ public:
> >      this->m_vec = &m_auto;
> >    }
> >  
> > +  auto_vec (size_t s)
> > +  {
> > +    if (s > N)
> > +      {
> > +	this->create (s);
> > +	return;
> > +      }
> > +
> > +    m_auto.embedded_init (MAX (N, 2), 0, 1);
> > +    this->m_vec = &m_auto;
> > +  }
> > +
> >    ~auto_vec ()
> >    {
> >      this->release ();

^ permalink raw reply	[flat|nested] 34+ messages in thread

* Re: [PATCH 02/13] improve bitmap / sbitmap compatability of bitmap_set_bit
  2017-05-10  6:54   ` Richard Sandiford
@ 2017-05-11  8:01     ` Trevor Saunders
  0 siblings, 0 replies; 34+ messages in thread
From: Trevor Saunders @ 2017-05-11  8:01 UTC (permalink / raw)
  To: tbsaunde+gcc, gcc-patches, richard.sandiford

On Wed, May 10, 2017 at 07:44:17AM +0100, Richard Sandiford wrote:
> tbsaunde+gcc@tbsaunde.org writes:
> > From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
> >
> > This make the sbitmap version return true if the bit was previously
> > unset to make it similar to the bitmap version.
> >
> > gcc/ChangeLog:
> >
> > 2017-05-09  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>
> >
> > 	* sbitmap.h (bitmap_set_bit): Return bool similar to bitmap
> > version of this function.
> > ---
> >  gcc/sbitmap.h | 9 ++++++---
> >  1 file changed, 6 insertions(+), 3 deletions(-)
> >
> > diff --git a/gcc/sbitmap.h b/gcc/sbitmap.h
> > index cba0452cdb9..d4e3177d495 100644
> > --- a/gcc/sbitmap.h
> > +++ b/gcc/sbitmap.h
> > @@ -108,11 +108,14 @@ bitmap_bit_p (const_sbitmap map, int bitno)
> >  
> >  /* Set bit number BITNO in the sbitmap MAP.  */
> >  
> > -static inline void
> > +static inline bool
> >  bitmap_set_bit (sbitmap map, int bitno)
> >  {
> > -  map->elms[bitno / SBITMAP_ELT_BITS]
> > -    |= (SBITMAP_ELT_TYPE) 1 << (bitno) % SBITMAP_ELT_BITS;
> > +  SBITMAP_ELT_TYPE &word = map->elms[bitno / SBITMAP_ELT_BITS];
> > +    SBITMAP_ELT_TYPE mask = (SBITMAP_ELT_TYPE) 1 << (bitno) % SBITMAP_ELT_BITS;
> > +    bool ret = (word & mask) == 0;
> > +    word |= mask;
> > +    return ret;
> >  }
> 
> Indentation looks off (mabye it's a mailer thing?).  Think the function
> comment should be updated too -- personally I can never remember whether
> true means "I just set it" or "it was already set" :-)

Sure, I can handle that.

> What's the current position on the use of references?  IMO a pointer
> is clearer here.

Well, I generally think non const references aren't a great idea, so I'm
not really sure why I used one here.  Anyway not a big deal so happy to
change that.

thanks

Trev

> 
> Thanks,
> Richard

^ permalink raw reply	[flat|nested] 34+ messages in thread

* Re: [PATCH 05/13] allow constructing a auto_vec with a preallocation, and a possibly larger actual allocation size
  2017-05-11  7:50     ` Trevor Saunders
@ 2017-05-11  8:18       ` Richard Biener
  2017-05-11  8:23         ` Trevor Saunders
  0 siblings, 1 reply; 34+ messages in thread
From: Richard Biener @ 2017-05-11  8:18 UTC (permalink / raw)
  To: Trevor Saunders; +Cc: tbsaunde+gcc, GCC Patches, Richard Sandiford

On Thu, May 11, 2017 at 9:45 AM, Trevor Saunders <tbsaunde@tbsaunde.org> wrote:
> On Wed, May 10, 2017 at 07:54:13AM +0100, Richard Sandiford wrote:
>> tbsaunde+gcc@tbsaunde.org writes:
>> > From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
>> >
>> > This allows us to set the capacity of the vector when we construct it,
>> > and still use a stack buffer when the size is small enough.
>> >
>> > gcc/ChangeLog:
>> >
>> > 2017-05-09  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>
>> >
>> >     * genrecog.c (int_set::int_set): Explicitly construct our
>> > auto_vec base class.
>> >     * vec.h (auto_vec::auto_vec): New constructor.
>> > ---
>> >  gcc/genrecog.c |  8 +++++---
>> >  gcc/vec.h      | 12 ++++++++++++
>> >  2 files changed, 17 insertions(+), 3 deletions(-)
>> >
>> > diff --git a/gcc/genrecog.c b/gcc/genrecog.c
>> > index 6a9e610e7a0..b69043f0d02 100644
>> > --- a/gcc/genrecog.c
>> > +++ b/gcc/genrecog.c
>> > @@ -1407,14 +1407,16 @@ struct int_set : public auto_vec <uint64_t, 1>
>> >    iterator end ();
>> >  };
>> >
>> > -int_set::int_set () {}
>> > +int_set::int_set () : auto_vec<uint64_t, 1> () {}
>> >
>> > -int_set::int_set (uint64_t label)
>> > +int_set::int_set (uint64_t label) :
>> > +  auto_vec<uint64_t, 1> ()
>> >  {
>> >    safe_push (label);
>> >  }
>> >
>> > -int_set::int_set (const int_set &other)
>> > +int_set::int_set (const int_set &other) :
>> > +  auto_vec<uint64_t, 1> ()
>> >  {
>> >    safe_splice (other);
>> >  }
>>
>> Is this part of the patch necessary?  Won't the default constructor
>> be used anyway?
>
> Well, without the change to the copy constructor we get this bootstrap
> warning.
>
> /src/gcc/gcc/genrecog.c: In copy constructor ‘int_set::int_set(const int_set&)’:
> /src/gcc/gcc/genrecog.c:1417:1: error: base class ‘class auto_vec<long
> unsigned int, 1>’ should be explicitly initialized in the copy
> constructor [-Werror=extra]
>  int_set::int_set (const int_set &other)
>   ^~~~~~~
>
>>
> So we need to do something about that.  I'm not sure the other cases are
> necessary, but I was there, and being explicit seemed better than
> leaving it implicit.

Ah,

          /* If these initializations are taking place in a copy constructor,
             the base class should probably be explicitly initialized if there
             is a user-defined constructor in the base class (other than the
             default constructor, which will be called anyway).  */
          if (extra_warnings
              && DECL_COPY_CONSTRUCTOR_P (current_function_decl)
              && type_has_user_nondefault_constructor (BINFO_TYPE (subobject)))
            warning_at (DECL_SOURCE_LOCATION (current_function_decl),
                        OPT_Wextra, "base class %q#T should be explicitly "
                        "initialized in the copy constructor",
                        BINFO_TYPE (subobject));

ok - fine then.  Probably could be avoided with

 auto_vec() = defaulted;

(or how you'd write that)

Thanks,
Richard.

> Thanks
>
> Trev
>
>> Thanks,
>> Richard
>>
>> > diff --git a/gcc/vec.h b/gcc/vec.h
>> > index fee46164b01..914f89c350c 100644
>> > --- a/gcc/vec.h
>> > +++ b/gcc/vec.h
>> > @@ -1272,6 +1272,18 @@ public:
>> >      this->m_vec = &m_auto;
>> >    }
>> >
>> > +  auto_vec (size_t s)
>> > +  {
>> > +    if (s > N)
>> > +      {
>> > +   this->create (s);
>> > +   return;
>> > +      }
>> > +
>> > +    m_auto.embedded_init (MAX (N, 2), 0, 1);
>> > +    this->m_vec = &m_auto;
>> > +  }
>> > +
>> >    ~auto_vec ()
>> >    {
>> >      this->release ();

^ permalink raw reply	[flat|nested] 34+ messages in thread

* Re: [PATCH 05/13] allow constructing a auto_vec with a preallocation, and a possibly larger actual allocation size
  2017-05-11  8:18       ` Richard Biener
@ 2017-05-11  8:23         ` Trevor Saunders
  2017-05-11  9:04           ` Richard Sandiford
  0 siblings, 1 reply; 34+ messages in thread
From: Trevor Saunders @ 2017-05-11  8:23 UTC (permalink / raw)
  To: Richard Biener; +Cc: tbsaunde+gcc, GCC Patches, Richard Sandiford

On Thu, May 11, 2017 at 10:01:51AM +0200, Richard Biener wrote:
> On Thu, May 11, 2017 at 9:45 AM, Trevor Saunders <tbsaunde@tbsaunde.org> wrote:
> > On Wed, May 10, 2017 at 07:54:13AM +0100, Richard Sandiford wrote:
> >> tbsaunde+gcc@tbsaunde.org writes:
> >> > From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
> >> >
> >> > This allows us to set the capacity of the vector when we construct it,
> >> > and still use a stack buffer when the size is small enough.
> >> >
> >> > gcc/ChangeLog:
> >> >
> >> > 2017-05-09  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>
> >> >
> >> >     * genrecog.c (int_set::int_set): Explicitly construct our
> >> > auto_vec base class.
> >> >     * vec.h (auto_vec::auto_vec): New constructor.
> >> > ---
> >> >  gcc/genrecog.c |  8 +++++---
> >> >  gcc/vec.h      | 12 ++++++++++++
> >> >  2 files changed, 17 insertions(+), 3 deletions(-)
> >> >
> >> > diff --git a/gcc/genrecog.c b/gcc/genrecog.c
> >> > index 6a9e610e7a0..b69043f0d02 100644
> >> > --- a/gcc/genrecog.c
> >> > +++ b/gcc/genrecog.c
> >> > @@ -1407,14 +1407,16 @@ struct int_set : public auto_vec <uint64_t, 1>
> >> >    iterator end ();
> >> >  };
> >> >
> >> > -int_set::int_set () {}
> >> > +int_set::int_set () : auto_vec<uint64_t, 1> () {}
> >> >
> >> > -int_set::int_set (uint64_t label)
> >> > +int_set::int_set (uint64_t label) :
> >> > +  auto_vec<uint64_t, 1> ()
> >> >  {
> >> >    safe_push (label);
> >> >  }
> >> >
> >> > -int_set::int_set (const int_set &other)
> >> > +int_set::int_set (const int_set &other) :
> >> > +  auto_vec<uint64_t, 1> ()
> >> >  {
> >> >    safe_splice (other);
> >> >  }
> >>
> >> Is this part of the patch necessary?  Won't the default constructor
> >> be used anyway?
> >
> > Well, without the change to the copy constructor we get this bootstrap
> > warning.
> >
> > /src/gcc/gcc/genrecog.c: In copy constructor ‘int_set::int_set(const int_set&)’:
> > /src/gcc/gcc/genrecog.c:1417:1: error: base class ‘class auto_vec<long
> > unsigned int, 1>’ should be explicitly initialized in the copy
> > constructor [-Werror=extra]
> >  int_set::int_set (const int_set &other)
> >   ^~~~~~~
> >
> >>
> > So we need to do something about that.  I'm not sure the other cases are
> > necessary, but I was there, and being explicit seemed better than
> > leaving it implicit.
> 
> Ah,
> 
>           /* If these initializations are taking place in a copy constructor,
>              the base class should probably be explicitly initialized if there
>              is a user-defined constructor in the base class (other than the
>              default constructor, which will be called anyway).  */
>           if (extra_warnings
>               && DECL_COPY_CONSTRUCTOR_P (current_function_decl)
>               && type_has_user_nondefault_constructor (BINFO_TYPE (subobject)))
>             warning_at (DECL_SOURCE_LOCATION (current_function_decl),
>                         OPT_Wextra, "base class %q#T should be explicitly "
>                         "initialized in the copy constructor",
>                         BINFO_TYPE (subobject));
> 
> ok - fine then.  Probably could be avoided with
> 
>  auto_vec() = defaulted;
> 
> (or how you'd write that)

Well, we don't get to use = default in C++98, so we'd have to ifdef, I
guess it could work since it would fix the warning outside of stage 1,
but seems pretty gross.

Trev

> 
> Thanks,
> Richard.
> 
> > Thanks
> >
> > Trev
> >
> >> Thanks,
> >> Richard
> >>
> >> > diff --git a/gcc/vec.h b/gcc/vec.h
> >> > index fee46164b01..914f89c350c 100644
> >> > --- a/gcc/vec.h
> >> > +++ b/gcc/vec.h
> >> > @@ -1272,6 +1272,18 @@ public:
> >> >      this->m_vec = &m_auto;
> >> >    }
> >> >
> >> > +  auto_vec (size_t s)
> >> > +  {
> >> > +    if (s > N)
> >> > +      {
> >> > +   this->create (s);
> >> > +   return;
> >> > +      }
> >> > +
> >> > +    m_auto.embedded_init (MAX (N, 2), 0, 1);
> >> > +    this->m_vec = &m_auto;
> >> > +  }
> >> > +
> >> >    ~auto_vec ()
> >> >    {
> >> >      this->release ();

^ permalink raw reply	[flat|nested] 34+ messages in thread

* Re: [PATCH 05/13] allow constructing a auto_vec with a preallocation, and a possibly larger actual allocation size
  2017-05-11  8:23         ` Trevor Saunders
@ 2017-05-11  9:04           ` Richard Sandiford
  0 siblings, 0 replies; 34+ messages in thread
From: Richard Sandiford @ 2017-05-11  9:04 UTC (permalink / raw)
  To: Trevor Saunders; +Cc: Richard Biener, tbsaunde+gcc, GCC Patches

Trevor Saunders <tbsaunde@tbsaunde.org> writes:
> On Thu, May 11, 2017 at 10:01:51AM +0200, Richard Biener wrote:
>> On Thu, May 11, 2017 at 9:45 AM, Trevor Saunders
>> <tbsaunde@tbsaunde.org> wrote:
>> > On Wed, May 10, 2017 at 07:54:13AM +0100, Richard Sandiford wrote:
>> >> tbsaunde+gcc@tbsaunde.org writes:
>> >> > From: Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
>> >> >
>> >> > This allows us to set the capacity of the vector when we construct it,
>> >> > and still use a stack buffer when the size is small enough.
>> >> >
>> >> > gcc/ChangeLog:
>> >> >
>> >> > 2017-05-09  Trevor Saunders  <tbsaunde+gcc@tbsaunde.org>
>> >> >
>> >> >     * genrecog.c (int_set::int_set): Explicitly construct our
>> >> > auto_vec base class.
>> >> >     * vec.h (auto_vec::auto_vec): New constructor.
>> >> > ---
>> >> >  gcc/genrecog.c |  8 +++++---
>> >> >  gcc/vec.h      | 12 ++++++++++++
>> >> >  2 files changed, 17 insertions(+), 3 deletions(-)
>> >> >
>> >> > diff --git a/gcc/genrecog.c b/gcc/genrecog.c
>> >> > index 6a9e610e7a0..b69043f0d02 100644
>> >> > --- a/gcc/genrecog.c
>> >> > +++ b/gcc/genrecog.c
>> >> > @@ -1407,14 +1407,16 @@ struct int_set : public auto_vec <uint64_t, 1>
>> >> >    iterator end ();
>> >> >  };
>> >> >
>> >> > -int_set::int_set () {}
>> >> > +int_set::int_set () : auto_vec<uint64_t, 1> () {}
>> >> >
>> >> > -int_set::int_set (uint64_t label)
>> >> > +int_set::int_set (uint64_t label) :
>> >> > +  auto_vec<uint64_t, 1> ()
>> >> >  {
>> >> >    safe_push (label);
>> >> >  }
>> >> >
>> >> > -int_set::int_set (const int_set &other)
>> >> > +int_set::int_set (const int_set &other) :
>> >> > +  auto_vec<uint64_t, 1> ()
>> >> >  {
>> >> >    safe_splice (other);
>> >> >  }
>> >>
>> >> Is this part of the patch necessary?  Won't the default constructor
>> >> be used anyway?
>> >
>> > Well, without the change to the copy constructor we get this bootstrap
>> > warning.
>> >
>> > /src/gcc/gcc/genrecog.c: In copy constructor ‘int_set::int_set(const int_set&)’:
>> > /src/gcc/gcc/genrecog.c:1417:1: error: base class ‘class auto_vec<long
>> > unsigned int, 1>’ should be explicitly initialized in the copy
>> > constructor [-Werror=extra]
>> >  int_set::int_set (const int_set &other)
>> >   ^~~~~~~
>> >
>> >>
>> > So we need to do something about that.  I'm not sure the other cases are
>> > necessary, but I was there, and being explicit seemed better than
>> > leaving it implicit.
>> 
>> Ah,
>> 
>>           /* If these initializations are taking place in a copy constructor,
>>              the base class should probably be explicitly initialized if there
>>              is a user-defined constructor in the base class (other than the
>>              default constructor, which will be called anyway).  */
>>           if (extra_warnings
>>               && DECL_COPY_CONSTRUCTOR_P (current_function_decl)
>>               && type_has_user_nondefault_constructor (BINFO_TYPE (subobject)))
>>             warning_at (DECL_SOURCE_LOCATION (current_function_decl),
>>                         OPT_Wextra, "base class %q#T should be explicitly "
>>                         "initialized in the copy constructor",
>>                         BINFO_TYPE (subobject));
>> 
>> ok - fine then.  Probably could be avoided with
>> 
>>  auto_vec() = defaulted;
>> 
>> (or how you'd write that)
>
> Well, we don't get to use = default in C++98, so we'd have to ifdef, I
> guess it could work since it would fix the warning outside of stage 1,
> but seems pretty gross.

Yeah.  OK for the genrecog.c bit.

Thanks,
Richard

^ permalink raw reply	[flat|nested] 34+ messages in thread

end of thread, other threads:[~2017-05-11  8:47 UTC | newest]

Thread overview: 34+ messages (download: mbox.gz / follow: Atom feed)
-- links below jump to the message on this page --
2017-05-09 20:53 [PATCH 00/13] misc data structure stuff tbsaunde+gcc
2017-05-09 20:53 ` [PATCH 10/13] make a member an auto_sbitmap tbsaunde+gcc
2017-05-10  8:26   ` Richard Biener
2017-05-09 20:53 ` [PATCH 08/13] move several bitmaps from gc memory to the default obstack and use auto_bitmap tbsaunde+gcc
2017-05-10  8:26   ` Richard Biener
2017-05-09 20:53 ` [PATCH 07/13] use auto_bitmap more tbsaunde+gcc
2017-05-10  8:28   ` Richard Biener
2017-05-09 20:53 ` [PATCH 09/13] use auto_bitmap more with alternate obstacks tbsaunde+gcc
2017-05-10  8:31   ` Richard Biener
2017-05-09 20:53 ` [PATCH 13/13] make inverted_post_order_compute() operate on a vec tbsaunde+gcc
2017-05-10  8:44   ` Richard Biener
2017-05-09 20:53 ` [PATCH 04/13] allow auto_bitmap to use other bitmap obstacks tbsaunde+gcc
2017-05-10  8:27   ` Richard Biener
2017-05-09 20:53 ` [PATCH 01/13] improve safety of freeing bitmaps tbsaunde+gcc
2017-05-10  8:15   ` Richard Biener
2017-05-10 10:55     ` Trevor Saunders
2017-05-10 11:11       ` Richard Biener
2017-05-09 20:53 ` [PATCH 12/13] make depth_first_search_ds a class tbsaunde+gcc
2017-05-10  8:29   ` Richard Biener
2017-05-09 20:53 ` [PATCH 05/13] allow constructing a auto_vec with a preallocation, and a possibly larger actual allocation size tbsaunde+gcc
2017-05-10  6:58   ` Richard Sandiford
2017-05-11  7:50     ` Trevor Saunders
2017-05-11  8:18       ` Richard Biener
2017-05-11  8:23         ` Trevor Saunders
2017-05-11  9:04           ` Richard Sandiford
2017-05-09 20:53 ` [PATCH 06/13] replace some manual stacks with auto_vec tbsaunde+gcc
2017-05-10  8:26   ` Richard Biener
2017-05-09 20:53 ` [PATCH 03/13] store the bitmap_head within the auto_bitmap tbsaunde+gcc
2017-05-10  8:25   ` Richard Biener
2017-05-09 20:53 ` [PATCH 02/13] improve bitmap / sbitmap compatability of bitmap_set_bit tbsaunde+gcc
2017-05-10  6:54   ` Richard Sandiford
2017-05-11  8:01     ` Trevor Saunders
2017-05-09 20:55 ` [PATCH 11/13] make more vars auto_sbitmaps tbsaunde+gcc
2017-05-10  8:27   ` Richard Biener

This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox;
as well as URLs for read-only IMAP folder(s) and NNTP newsgroup(s).