From: Lehua Ding <lehua.ding@rivai.ai>
To: gcc-patches@gcc.gnu.org
Cc: juzhe.zhong@rivai.ai, kito.cheng@gmail.com, rdapp.gcc@gmail.com,
palmer@rivosinc.com, jeffreyalaw@gmail.com, lehua.ding@rivai.ai
Subject: [PATCH V3 06/11] RISC-V: P6: Add computing reaching definition data flow
Date: Thu, 19 Oct 2023 16:33:28 +0800 [thread overview]
Message-ID: <20231019083333.2052340-7-lehua.ding@rivai.ai> (raw)
In-Reply-To: <20231019083333.2052340-1-lehua.ding@rivai.ai>
gcc/ChangeLog:
* config/riscv/riscv-vsetvl.cc (pre_vsetvl::compute_avl_def_data): New.
(pre_vsetvl::compute_vsetvl_def_data): New.
(pre_vsetvl::compute_lcm_local_properties): New.
---
gcc/config/riscv/riscv-vsetvl.cc | 395 +++++++++++++++++++++++++++++++
1 file changed, 395 insertions(+)
diff --git a/gcc/config/riscv/riscv-vsetvl.cc b/gcc/config/riscv/riscv-vsetvl.cc
index dad3d7c941e..27d47d7c039 100644
--- a/gcc/config/riscv/riscv-vsetvl.cc
+++ b/gcc/config/riscv/riscv-vsetvl.cc
@@ -2722,6 +2722,401 @@ public:
};
+void
+pre_vsetvl::compute_avl_def_data ()
+{
+ if (bitmap_empty_p (m_avl_regs))
+ return;
+
+ unsigned num_regs = GP_REG_LAST + 1;
+ unsigned num_bbs = last_basic_block_for_fn (cfun);
+
+ sbitmap *avl_def_loc_temp = sbitmap_vector_alloc (num_bbs, num_regs);
+ for (const bb_info *bb : crtl->ssa->bbs ())
+ {
+ bitmap_and (avl_def_loc_temp[bb->index ()], m_avl_regs,
+ m_reg_def_loc[bb->index ()]);
+
+ vsetvl_block_info &block_info = get_block_info (bb);
+ if (block_info.has_info ())
+ {
+ vsetvl_info &footer_info = block_info.get_exit_info ();
+ gcc_assert (footer_info.valid_p ());
+ if (footer_info.has_vl ())
+ bitmap_set_bit (avl_def_loc_temp[bb->index ()],
+ REGNO (footer_info.get_vl ()));
+ }
+ }
+
+ if (m_avl_def_in)
+ sbitmap_vector_free (m_avl_def_in);
+ if (m_avl_def_out)
+ sbitmap_vector_free (m_avl_def_out);
+
+ unsigned num_exprs = num_bbs * num_regs;
+ sbitmap *avl_def_loc = sbitmap_vector_alloc (num_bbs, num_exprs);
+ sbitmap *m_kill = sbitmap_vector_alloc (num_bbs, num_exprs);
+ m_avl_def_in = sbitmap_vector_alloc (num_bbs, num_exprs);
+ m_avl_def_out = sbitmap_vector_alloc (num_bbs, num_exprs);
+
+ bitmap_vector_clear (avl_def_loc, num_bbs);
+ bitmap_vector_clear (m_kill, num_bbs);
+ bitmap_vector_clear (m_avl_def_out, num_bbs);
+
+ unsigned regno;
+ sbitmap_iterator sbi;
+ for (const bb_info *bb : crtl->ssa->bbs ())
+ EXECUTE_IF_SET_IN_BITMAP (avl_def_loc_temp[bb->index ()], 0, regno, sbi)
+ {
+ bitmap_set_bit (avl_def_loc[bb->index ()],
+ get_expr_id (bb->index (), regno, num_bbs));
+ bitmap_set_range (m_kill[bb->index ()], regno * num_bbs, num_bbs);
+ }
+
+ basic_block entry = ENTRY_BLOCK_PTR_FOR_FN (cfun);
+ EXECUTE_IF_SET_IN_BITMAP (m_avl_regs, 0, regno, sbi)
+ bitmap_set_bit (m_avl_def_out[entry->index],
+ get_expr_id (entry->index, regno, num_bbs));
+
+ compute_reaching_defintion (avl_def_loc, m_kill, m_avl_def_in, m_avl_def_out);
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file,
+ " Compute avl reaching defition data (num_bbs %d, num_regs "
+ "%d):\n\n",
+ num_bbs, num_regs);
+ fprintf (dump_file, " avl_regs: ");
+ dump_bitmap_file (dump_file, m_avl_regs);
+ fprintf (dump_file, "\n bitmap data:\n");
+ for (const bb_info *bb : crtl->ssa->bbs ())
+ {
+ unsigned int i = bb->index ();
+ fprintf (dump_file, " BB %u:\n", i);
+ fprintf (dump_file, " avl_def_loc:");
+ unsigned expr_id;
+ sbitmap_iterator sbi;
+ EXECUTE_IF_SET_IN_BITMAP (avl_def_loc[i], 0, expr_id, sbi)
+ {
+ fprintf (dump_file, " (r%u,bb%u)", get_regno (expr_id, num_bbs),
+ get_bb_index (expr_id, num_bbs));
+ }
+ fprintf (dump_file, "\n kill:");
+ EXECUTE_IF_SET_IN_BITMAP (m_kill[i], 0, expr_id, sbi)
+ {
+ fprintf (dump_file, " (r%u,bb%u)", get_regno (expr_id, num_bbs),
+ get_bb_index (expr_id, num_bbs));
+ }
+ fprintf (dump_file, "\n avl_def_in:");
+ EXECUTE_IF_SET_IN_BITMAP (m_avl_def_in[i], 0, expr_id, sbi)
+ {
+ fprintf (dump_file, " (r%u,bb%u)", get_regno (expr_id, num_bbs),
+ get_bb_index (expr_id, num_bbs));
+ }
+ fprintf (dump_file, "\n avl_def_out:");
+ EXECUTE_IF_SET_IN_BITMAP (m_avl_def_out[i], 0, expr_id, sbi)
+ {
+ fprintf (dump_file, " (r%u,bb%u)", get_regno (expr_id, num_bbs),
+ get_bb_index (expr_id, num_bbs));
+ }
+ fprintf (dump_file, "\n");
+ }
+ }
+
+ sbitmap_vector_free (avl_def_loc);
+ sbitmap_vector_free (m_kill);
+ sbitmap_vector_free (avl_def_loc_temp);
+
+ m_dem.set_avl_in_out_data (m_avl_def_in, m_avl_def_out);
+}
+
+void
+pre_vsetvl::compute_vsetvl_def_data ()
+{
+ m_vsetvl_def_exprs.truncate (0);
+ add_expr (m_vsetvl_def_exprs, m_unknow_info);
+ for (const bb_info *bb : crtl->ssa->bbs ())
+ {
+ vsetvl_block_info &block_info = get_block_info (bb);
+ if (block_info.empty_p ())
+ continue;
+ vsetvl_info &footer_info = block_info.get_exit_info ();
+ gcc_assert (footer_info.valid_p () || footer_info.unknown_p ());
+ add_expr (m_vsetvl_def_exprs, footer_info);
+ }
+
+ if (m_vsetvl_def_in)
+ sbitmap_vector_free (m_vsetvl_def_in);
+ if (m_vsetvl_def_out)
+ sbitmap_vector_free (m_vsetvl_def_out);
+
+ sbitmap *def_loc = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
+ m_vsetvl_def_exprs.length ());
+ sbitmap *m_kill = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
+ m_vsetvl_def_exprs.length ());
+
+ m_vsetvl_def_in = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
+ m_vsetvl_def_exprs.length ());
+ m_vsetvl_def_out = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
+ m_vsetvl_def_exprs.length ());
+
+ bitmap_vector_clear (def_loc, last_basic_block_for_fn (cfun));
+ bitmap_vector_clear (m_kill, last_basic_block_for_fn (cfun));
+ bitmap_vector_clear (m_vsetvl_def_out, last_basic_block_for_fn (cfun));
+
+ for (const bb_info *bb : crtl->ssa->bbs ())
+ {
+ vsetvl_block_info &block_info = get_block_info (bb);
+ if (block_info.empty_p ())
+ {
+ for (unsigned i = 0; i < m_vsetvl_def_exprs.length (); i += 1)
+ {
+ const vsetvl_info &info = *m_vsetvl_def_exprs[i];
+ if (!info.has_nonvlmax_reg_avl ())
+ continue;
+ unsigned int regno;
+ sbitmap_iterator sbi;
+ EXECUTE_IF_SET_IN_BITMAP (m_reg_def_loc[bb->index ()], 0, regno,
+ sbi)
+ if (regno == REGNO (info.get_avl ()))
+ {
+ bitmap_set_bit (m_kill[bb->index ()], i);
+ bitmap_set_bit (def_loc[bb->index ()],
+ get_expr_index (m_vsetvl_def_exprs,
+ m_unknow_info));
+ }
+ }
+ continue;
+ }
+
+ vsetvl_info &footer_info = block_info.get_exit_info ();
+ bitmap_ones (m_kill[bb->index ()]);
+ bitmap_set_bit (def_loc[bb->index ()],
+ get_expr_index (m_vsetvl_def_exprs, footer_info));
+ }
+
+ /* Set the def_out of the ENTRY basic block to m_unknow_info expr. */
+ basic_block entry = ENTRY_BLOCK_PTR_FOR_FN (cfun);
+ bitmap_set_bit (m_vsetvl_def_out[entry->index],
+ get_expr_index (m_vsetvl_def_exprs, m_unknow_info));
+
+ compute_reaching_defintion (def_loc, m_kill, m_vsetvl_def_in,
+ m_vsetvl_def_out);
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ fprintf (dump_file,
+ "\n Compute vsetvl info reaching defition data:\n\n");
+ fprintf (dump_file, " Expression List (%d):\n",
+ m_vsetvl_def_exprs.length ());
+ for (unsigned i = 0; i < m_vsetvl_def_exprs.length (); i++)
+ {
+ const auto &info = *m_vsetvl_def_exprs[i];
+ fprintf (dump_file, " Expr[%u]: ", i);
+ info.dump (dump_file, " ");
+ }
+ fprintf (dump_file, "\n bitmap data:\n");
+ for (const bb_info *bb : crtl->ssa->bbs ())
+ {
+ unsigned int i = bb->index ();
+ fprintf (dump_file, " BB %u:\n", i);
+ fprintf (dump_file, " def_loc: ");
+ dump_bitmap_file (dump_file, def_loc[i]);
+ fprintf (dump_file, " kill: ");
+ dump_bitmap_file (dump_file, m_kill[i]);
+ fprintf (dump_file, " vsetvl_def_in: ");
+ dump_bitmap_file (dump_file, m_vsetvl_def_in[i]);
+ fprintf (dump_file, " vsetvl_def_out: ");
+ dump_bitmap_file (dump_file, m_vsetvl_def_out[i]);
+ }
+ }
+
+ for (const bb_info *bb : crtl->ssa->bbs ())
+ {
+ vsetvl_block_info &block_info = get_block_info (bb);
+ if (block_info.empty_p ())
+ continue;
+ vsetvl_info &curr_info = block_info.get_entry_info ();
+ if (!curr_info.valid_p ())
+ continue;
+
+ unsigned int expr_index;
+ sbitmap_iterator sbi;
+ gcc_assert (
+ !bitmap_empty_p (m_vsetvl_def_in[curr_info.get_bb ()->index ()]));
+ bool full_available = true;
+ EXECUTE_IF_SET_IN_BITMAP (m_vsetvl_def_in[bb->index ()], 0, expr_index,
+ sbi)
+ {
+ vsetvl_info &prev_info = *m_vsetvl_def_exprs[expr_index];
+ if (!prev_info.valid_p ()
+ || !m_dem.available_p (prev_info, curr_info))
+ {
+ full_available = false;
+ break;
+ }
+ }
+ block_info.full_available = full_available;
+ }
+
+ sbitmap_vector_free (def_loc);
+ sbitmap_vector_free (m_kill);
+}
+
+/* Compute the local properties of each recorded expression.
+
+ Local properties are those that are defined by the block, irrespective of
+ other blocks.
+
+ An expression is transparent in a block if its operands are not modified
+ in the block.
+
+ An expression is computed (locally available) in a block if it is computed
+ at least once and expression would contain the same value if the
+ computation was moved to the end of the block.
+
+ An expression is locally anticipatable in a block if it is computed at
+ least once and expression would contain the same value if the computation
+ was moved to the beginning of the block. */
+void
+pre_vsetvl::compute_lcm_local_properties ()
+{
+ m_exprs.truncate (0);
+ for (const bb_info *bb : crtl->ssa->bbs ())
+ {
+ vsetvl_block_info &block_info = get_block_info (bb);
+ if (block_info.empty_p ())
+ continue;
+ vsetvl_info &header_info = block_info.get_entry_info ();
+ vsetvl_info &footer_info = block_info.get_exit_info ();
+ gcc_assert (footer_info.valid_p () || footer_info.unknown_p ());
+ add_expr (m_exprs, header_info);
+ add_expr (m_exprs, footer_info);
+ }
+
+ int num_exprs = m_exprs.length ();
+ if (m_avloc)
+ sbitmap_vector_free (m_avloc);
+ if (m_kill)
+ sbitmap_vector_free (m_kill);
+ if (m_antloc)
+ sbitmap_vector_free (m_antloc);
+ if (m_transp)
+ sbitmap_vector_free (m_transp);
+ if (m_avin)
+ sbitmap_vector_free (m_avin);
+ if (m_avout)
+ sbitmap_vector_free (m_avout);
+
+ m_avloc = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), num_exprs);
+ m_kill = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), num_exprs);
+ m_antloc = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), num_exprs);
+ m_transp = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), num_exprs);
+ m_avin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), num_exprs);
+ m_avout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), num_exprs);
+
+ bitmap_vector_clear (m_avloc, last_basic_block_for_fn (cfun));
+ bitmap_vector_clear (m_antloc, last_basic_block_for_fn (cfun));
+ bitmap_vector_clear (m_transp, last_basic_block_for_fn (cfun));
+
+ /* - If T is locally available at the end of a block, then T' must be
+ available at the end of the same block. Since some optimization has
+ occurred earlier, T' might not be locally available, however, it must
+ have been previously computed on all paths. As a formula, T at AVLOC(B)
+ implies that T' at AVOUT(B).
+ An "available occurrence" is one that is the last occurrence in the
+ basic block and the operands are not modified by following statements in
+ the basic block [including this insn].
+
+ - If T is locally anticipated at the beginning of a block, then either
+ T', is locally anticipated or it is already available from previous
+ blocks. As a formula, this means that T at ANTLOC(B) implies that T' at
+ ANTLOC(B) at AVIN(B).
+ An "anticipatable occurrence" is one that is the first occurrence in the
+ basic block, the operands are not modified in the basic block prior
+ to the occurrence and the output is not used between the start of
+ the block and the occurrence. */
+ for (const bb_info *bb : crtl->ssa->bbs ())
+ {
+ unsigned bb_index = bb->index ();
+ vsetvl_block_info &block_info = get_block_info (bb);
+
+ /* Compute m_transp */
+ if (block_info.empty_p ())
+ {
+ bitmap_ones (m_transp[bb_index]);
+ for (int i = 0; i < num_exprs; i += 1)
+ {
+ const vsetvl_info &info = *m_exprs[i];
+ if (!info.has_nonvlmax_reg_avl () && !info.has_vl ())
+ continue;
+
+ unsigned int regno;
+ sbitmap_iterator sbi;
+ EXECUTE_IF_SET_IN_BITMAP (m_reg_def_loc[bb->index ()], 0, regno,
+ sbi)
+ {
+ if (regno == REGNO (info.get_avl ()))
+ bitmap_clear_bit (m_transp[bb->index ()], i);
+ }
+
+ for (const insn_info *insn : bb->real_nondebug_insns ())
+ {
+ if ((info.has_nonvlmax_reg_avl ()
+ && find_access (insn->defs (), REGNO (info.get_avl ())))
+ || (info.has_vl ()
+ && find_access (insn->uses (),
+ REGNO (info.get_vl ()))))
+ {
+ bitmap_clear_bit (m_transp[bb_index], i);
+ break;
+ }
+ }
+ }
+
+ continue;
+ }
+
+ vsetvl_info &header_info = block_info.get_entry_info ();
+ vsetvl_info &footer_info = block_info.get_exit_info ();
+
+ if (header_info.valid_p ()
+ && (anticpatable_exp_p (header_info) || block_info.full_available))
+ bitmap_set_bit (m_antloc[bb_index],
+ get_expr_index (m_exprs, header_info));
+
+ if (footer_info.valid_p ())
+ for (int i = 0; i < num_exprs; i += 1)
+ {
+ const vsetvl_info &info = *m_exprs[i];
+ if (!info.valid_p ())
+ continue;
+ if (available_exp_p (footer_info, info))
+ bitmap_set_bit (m_avloc[bb_index], i);
+ }
+ }
+
+ for (const bb_info *bb : crtl->ssa->bbs ())
+ {
+ unsigned bb_index = bb->index ();
+ bitmap_ior (m_kill[bb_index], m_transp[bb_index], m_avloc[bb_index]);
+ bitmap_not (m_kill[bb_index], m_kill[bb_index]);
+ }
+
+ for (const bb_info *bb : crtl->ssa->bbs ())
+ {
+ unsigned bb_index = bb->index ();
+ edge e;
+ edge_iterator ei;
+ FOR_EACH_EDGE (e, ei, bb->cfg_bb ()->preds)
+ if (e->flags & EDGE_COMPLEX)
+ {
+ bitmap_clear (m_antloc[bb_index]);
+ bitmap_clear (m_transp[bb_index]);
+ }
+ }
+}
+
void
pre_vsetvl::fuse_local_vsetvl_info ()
{
--
2.36.3
next prev parent reply other threads:[~2023-10-19 8:34 UTC|newest]
Thread overview: 26+ messages / expand[flat|nested] mbox.gz Atom feed top
2023-10-19 8:33 [PATCH V3 00/11] Refactor and cleanup vsetvl pass Lehua Ding
2023-10-19 8:33 ` [PATCH V3 01/11] RISC-V: P1: Refactor avl_info/vl_vtype_info/vector_insn_info/vector_block_info Lehua Ding
2023-10-19 8:33 ` [PATCH V3 02/11] RISC-V: P2: Refactor and cleanup demand system Lehua Ding
2023-10-19 8:33 ` [PATCH V3 03/11] RISC-V: P3: Refactor vector_infos_manager Lehua Ding
2023-10-19 8:33 ` [PATCH V3 04/11] RISC-V: P4: move method from pass_vsetvl to pre_vsetvl Lehua Ding
2023-10-19 8:33 ` [PATCH V3 05/11] RISC-V: P5: Combine phase 1 and 2 Lehua Ding
2023-10-19 8:33 ` Lehua Ding [this message]
2023-10-19 8:33 ` [PATCH V3 07/11] RISC-V: P7: Move earliest fuse and lcm code to pre_vsetvl class Lehua Ding
2023-10-19 8:33 ` [PATCH V3 08/11] RISC-V: P8: Refactor emit-vsetvl phase and delete post optimization Lehua Ding
2023-10-19 8:33 ` [PATCH V3 09/11] RISC-V: P9: Cleanup and reorganize helper functions Lehua Ding
2023-10-19 8:33 ` [PATCH V3 10/11] RISC-V: P10: Delete riscv-vsetvl.h and adjust riscv-vsetvl.def Lehua Ding
2023-10-19 8:33 ` [PATCH V3 11/11] RISC-V: P11: Adjust and add testcases Lehua Ding
2023-10-19 8:38 ` [PATCH V3 00/11] Refactor and cleanup vsetvl pass Robin Dapp
2023-10-19 8:43 ` Lehua Ding
2023-10-19 8:50 ` 钟居哲
2023-10-19 18:04 ` Patrick O'Neill
2023-10-20 2:20 ` Lehua Ding
2023-10-20 3:58 ` Lehua Ding
2023-10-23 18:30 ` Patrick O'Neill
2023-10-23 21:41 ` 钟居哲
2023-10-23 22:46 ` Patrick O'Neill
2023-10-23 22:50 ` 钟居哲
2023-10-23 23:42 ` Patrick O'Neill
2023-10-24 0:51 ` juzhe.zhong
2023-10-24 1:01 ` Patrick O'Neill
2023-10-24 2:27 ` juzhe.zhong
Reply instructions:
You may reply publicly to this message via plain-text email
using any one of the following methods:
* Save the following mbox file, import it into your mail client,
and reply-to-all from there: mbox
Avoid top-posting and favor interleaved quoting:
https://en.wikipedia.org/wiki/Posting_style#Interleaved_style
* Reply using the --to, --cc, and --in-reply-to
switches of git-send-email(1):
git send-email \
--in-reply-to=20231019083333.2052340-7-lehua.ding@rivai.ai \
--to=lehua.ding@rivai.ai \
--cc=gcc-patches@gcc.gnu.org \
--cc=jeffreyalaw@gmail.com \
--cc=juzhe.zhong@rivai.ai \
--cc=kito.cheng@gmail.com \
--cc=palmer@rivosinc.com \
--cc=rdapp.gcc@gmail.com \
/path/to/YOUR_REPLY
https://kernel.org/pub/software/scm/git/docs/git-send-email.html
* If your mail client supports setting the In-Reply-To header
via mailto: links, try the mailto: link
Be sure your reply has a Subject: header at the top and a blank line
before the message body.
This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox;
as well as URLs for read-only IMAP folder(s) and NNTP newsgroup(s).