aboutsummaryrefslogtreecommitdiff
path: root/gcc/tree-ssa-reassoc.c
diff options
context:
space:
mode:
authorIlya Leoshkevich <iii@linux.ibm.com>2021-09-14 20:41:18 +0200
committerIlya Leoshkevich <iii@linux.ibm.com>2021-09-28 14:10:13 +0200
commit99c106e695bd8f1de580c4ff0b1d3fe59c9a4f1e (patch)
tree0d23c96694feb753a909aed32fea3edacc559766 /gcc/tree-ssa-reassoc.c
parent3b7041e8345c2f1030e58620f28e22d64b2c196b (diff)
downloadgcc-99c106e695bd8f1de580c4ff0b1d3fe59c9a4f1e.zip
gcc-99c106e695bd8f1de580c4ff0b1d3fe59c9a4f1e.tar.gz
gcc-99c106e695bd8f1de580c4ff0b1d3fe59c9a4f1e.tar.bz2
reassoc: Do not bias loop-carried PHIs early
Biasing loop-carried PHIs during the 1st reassociation pass interferes with reduction chains and does not bring measurable benefits, so do it only during the 2nd reassociation pass. gcc/ChangeLog: * passes.def (pass_reassoc): Rename parameter to early_p. * tree-ssa-reassoc.c (reassoc_bias_loop_carried_phi_ranks_p): New variable. (phi_rank): Don't bias loop-carried phi ranks before vectorization pass. (execute_reassoc): Add bias_loop_carried_phi_ranks_p parameter. (pass_reassoc::pass_reassoc): Add bias_loop_carried_phi_ranks_p initializer. (pass_reassoc::set_param): Set bias_loop_carried_phi_ranks_p value. (pass_reassoc::execute): Pass bias_loop_carried_phi_ranks_p to execute_reassoc. (pass_reassoc::bias_loop_carried_phi_ranks_p): New member.
Diffstat (limited to 'gcc/tree-ssa-reassoc.c')
-rw-r--r--gcc/tree-ssa-reassoc.c16
1 files changed, 14 insertions, 2 deletions
diff --git a/gcc/tree-ssa-reassoc.c b/gcc/tree-ssa-reassoc.c
index 8498cfc..420c14e 100644
--- a/gcc/tree-ssa-reassoc.c
+++ b/gcc/tree-ssa-reassoc.c
@@ -180,6 +180,10 @@ along with GCC; see the file COPYING3. If not see
point 3a in the pass header comment. */
static bool reassoc_insert_powi_p;
+/* Enable biasing ranks of loop accumulators. We don't want this before
+ vectorization, since it interferes with reduction chains. */
+static bool reassoc_bias_loop_carried_phi_ranks_p;
+
/* Statistics */
static struct
{
@@ -269,6 +273,9 @@ phi_rank (gimple *stmt)
use_operand_p use;
gimple *use_stmt;
+ if (!reassoc_bias_loop_carried_phi_ranks_p)
+ return bb_rank[bb->index];
+
/* We only care about real loops (those with a latch). */
if (!father->latch)
return bb_rank[bb->index];
@@ -6940,9 +6947,10 @@ fini_reassoc (void)
optimization of a gimple conditional. Otherwise returns zero. */
static unsigned int
-execute_reassoc (bool insert_powi_p)
+execute_reassoc (bool insert_powi_p, bool bias_loop_carried_phi_ranks_p)
{
reassoc_insert_powi_p = insert_powi_p;
+ reassoc_bias_loop_carried_phi_ranks_p = bias_loop_carried_phi_ranks_p;
init_reassoc ();
@@ -6983,15 +6991,19 @@ public:
{
gcc_assert (n == 0);
insert_powi_p = param;
+ bias_loop_carried_phi_ranks_p = !param;
}
virtual bool gate (function *) { return flag_tree_reassoc != 0; }
virtual unsigned int execute (function *)
- { return execute_reassoc (insert_powi_p); }
+ {
+ return execute_reassoc (insert_powi_p, bias_loop_carried_phi_ranks_p);
+ }
private:
/* Enable insertion of __builtin_powi calls during execute_reassoc. See
point 3a in the pass header comment. */
bool insert_powi_p;
+ bool bias_loop_carried_phi_ranks_p;
}; // class pass_reassoc
} // anon namespace