diff options
author | Thomas Koenig <tkoenig@gcc.gnu.org> | 2021-09-13 19:49:49 +0200 |
---|---|---|
committer | Thomas Koenig <tkoenig@gcc.gnu.org> | 2021-09-13 19:49:49 +0200 |
commit | b18a97e5dd0935e1c4a626c230f21457d0aad3d5 (patch) | |
tree | c1818f41af6fe780deafb6cd6a183f32085fe654 /gcc/tree-if-conv.c | |
parent | e76a53644c9d70e998c0d050e9a456af388c6b61 (diff) | |
download | gcc-b18a97e5dd0935e1c4a626c230f21457d0aad3d5.zip gcc-b18a97e5dd0935e1c4a626c230f21457d0aad3d5.tar.gz gcc-b18a97e5dd0935e1c4a626c230f21457d0aad3d5.tar.bz2 |
Merged current trunk to branch.
Diffstat (limited to 'gcc/tree-if-conv.c')
-rw-r--r-- | gcc/tree-if-conv.c | 149 |
1 files changed, 127 insertions, 22 deletions
diff --git a/gcc/tree-if-conv.c b/gcc/tree-if-conv.c index 93effaa..d7b7b30 100644 --- a/gcc/tree-if-conv.c +++ b/gcc/tree-if-conv.c @@ -1,5 +1,5 @@ /* If-conversion for vectorizer. - Copyright (C) 2004-2020 Free Software Foundation, Inc. + Copyright (C) 2004-2021 Free Software Foundation, Inc. Contributed by Devang Patel <dpatel@apple.com> This file is part of GCC. @@ -1579,6 +1579,31 @@ if_convertible_loop_p (class loop *loop) return res; } +/* Return reduc_1 if has_nop. + + if (...) + tmp1 = (unsigned type) reduc_1; + tmp2 = tmp1 + rhs2; + reduc_3 = (signed type) tmp2. */ +static tree +strip_nop_cond_scalar_reduction (bool has_nop, tree op) +{ + if (!has_nop) + return op; + + if (TREE_CODE (op) != SSA_NAME) + return NULL_TREE; + + gassign *stmt = safe_dyn_cast <gassign *> (SSA_NAME_DEF_STMT (op)); + if (!stmt + || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)) + || !tree_nop_conversion_p (TREE_TYPE (op), TREE_TYPE + (gimple_assign_rhs1 (stmt)))) + return NULL_TREE; + + return gimple_assign_rhs1 (stmt); +} + /* Returns true if def-stmt for phi argument ARG is simple increment/decrement which is in predicated basic block. In fact, the following PHI pattern is searching: @@ -1595,9 +1620,10 @@ if_convertible_loop_p (class loop *loop) static bool is_cond_scalar_reduction (gimple *phi, gimple **reduc, tree arg_0, tree arg_1, - tree *op0, tree *op1, bool extended) + tree *op0, tree *op1, bool extended, bool* has_nop, + gimple **nop_reduc) { - tree lhs, r_op1, r_op2; + tree lhs, r_op1, r_op2, r_nop1, r_nop2; gimple *stmt; gimple *header_phi = NULL; enum tree_code reduction_op; @@ -1608,7 +1634,7 @@ is_cond_scalar_reduction (gimple *phi, gimple **reduc, tree arg_0, tree arg_1, use_operand_p use_p; edge e; edge_iterator ei; - bool result = false; + bool result = *has_nop = false; if (TREE_CODE (arg_0) != SSA_NAME || TREE_CODE (arg_1) != SSA_NAME) return false; @@ -1656,18 +1682,77 @@ is_cond_scalar_reduction (gimple *phi, gimple **reduc, tree arg_0, tree arg_1, return false; reduction_op = gimple_assign_rhs_code (stmt); + + /* Catch something like below + + loop-header: + reduc_1 = PHI <..., reduc_2> + ... + if (...) + tmp1 = (unsigned type) reduc_1; + tmp2 = tmp1 + rhs2; + reduc_3 = (signed type) tmp2; + + reduc_2 = PHI <reduc_1, reduc_3> + + and convert to + + reduc_2 = PHI <0, reduc_3> + tmp1 = (unsigned type)reduce_1; + ifcvt = cond_expr ? rhs2 : 0 + tmp2 = tmp1 +/- ifcvt; + reduce_1 = (signed type)tmp2; */ + + if (CONVERT_EXPR_CODE_P (reduction_op)) + { + lhs = gimple_assign_rhs1 (stmt); + if (TREE_CODE (lhs) != SSA_NAME + || !has_single_use (lhs)) + return false; + + *nop_reduc = stmt; + stmt = SSA_NAME_DEF_STMT (lhs); + if (gimple_bb (stmt) != gimple_bb (*nop_reduc) + || !is_gimple_assign (stmt)) + return false; + + *has_nop = true; + reduction_op = gimple_assign_rhs_code (stmt); + } + if (reduction_op != PLUS_EXPR && reduction_op != MINUS_EXPR) return false; r_op1 = gimple_assign_rhs1 (stmt); r_op2 = gimple_assign_rhs2 (stmt); + r_nop1 = strip_nop_cond_scalar_reduction (*has_nop, r_op1); + r_nop2 = strip_nop_cond_scalar_reduction (*has_nop, r_op2); + /* Make R_OP1 to hold reduction variable. */ - if (r_op2 == PHI_RESULT (header_phi) + if (r_nop2 == PHI_RESULT (header_phi) && reduction_op == PLUS_EXPR) - std::swap (r_op1, r_op2); - else if (r_op1 != PHI_RESULT (header_phi)) + { + std::swap (r_op1, r_op2); + std::swap (r_nop1, r_nop2); + } + else if (r_nop1 != PHI_RESULT (header_phi)) return false; + if (*has_nop) + { + /* Check that R_NOP1 is used in nop_stmt or in PHI only. */ + FOR_EACH_IMM_USE_FAST (use_p, imm_iter, r_nop1) + { + gimple *use_stmt = USE_STMT (use_p); + if (is_gimple_debug (use_stmt)) + continue; + if (use_stmt == SSA_NAME_DEF_STMT (r_op1)) + continue; + if (use_stmt != phi) + return false; + } + } + /* Check that R_OP1 is used in reduction stmt or in PHI only. */ FOR_EACH_IMM_USE_FAST (use_p, imm_iter, r_op1) { @@ -1705,7 +1790,8 @@ is_cond_scalar_reduction (gimple *phi, gimple **reduc, tree arg_0, tree arg_1, static tree convert_scalar_cond_reduction (gimple *reduc, gimple_stmt_iterator *gsi, - tree cond, tree op0, tree op1, bool swap) + tree cond, tree op0, tree op1, bool swap, + bool has_nop, gimple* nop_reduc) { gimple_stmt_iterator stmt_it; gimple *new_assign; @@ -1714,6 +1800,7 @@ convert_scalar_cond_reduction (gimple *reduc, gimple_stmt_iterator *gsi, tree tmp = make_temp_ssa_name (TREE_TYPE (rhs1), NULL, "_ifc_"); tree c; tree zero = build_zero_cst (TREE_TYPE (rhs1)); + gimple_seq stmts = NULL; if (dump_file && (dump_flags & TDF_DETAILS)) { @@ -1732,8 +1819,18 @@ convert_scalar_cond_reduction (gimple *reduc, gimple_stmt_iterator *gsi, new_assign = gimple_build_assign (tmp, c); gsi_insert_before (gsi, new_assign, GSI_SAME_STMT); /* Build rhs for unconditional increment/decrement. */ - rhs = fold_build2 (gimple_assign_rhs_code (reduc), - TREE_TYPE (rhs1), op0, tmp); + rhs = gimple_build (&stmts, gimple_assign_rhs_code (reduc), + TREE_TYPE (rhs1), op0, tmp); + + if (has_nop) + { + rhs = gimple_convert (&stmts, + TREE_TYPE (gimple_assign_lhs (nop_reduc)), rhs); + stmt_it = gsi_for_stmt (nop_reduc); + gsi_remove (&stmt_it, true); + release_defs (nop_reduc); + } + gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); /* Delete original reduction stmt. */ stmt_it = gsi_for_stmt (reduc); @@ -1808,7 +1905,7 @@ ifcvt_follow_ssa_use_edges (tree val) static void predicate_scalar_phi (gphi *phi, gimple_stmt_iterator *gsi) { - gimple *new_stmt = NULL, *reduc; + gimple *new_stmt = NULL, *reduc, *nop_reduc; tree rhs, res, arg0, arg1, op0, op1, scev; tree cond; unsigned int index0; @@ -1816,6 +1913,7 @@ predicate_scalar_phi (gphi *phi, gimple_stmt_iterator *gsi) edge e; basic_block bb; unsigned int i; + bool has_nop; res = gimple_phi_result (phi); if (virtual_operand_p (res)) @@ -1876,10 +1974,15 @@ predicate_scalar_phi (gphi *phi, gimple_stmt_iterator *gsi) arg1 = gimple_phi_arg_def (phi, 1); } if (is_cond_scalar_reduction (phi, &reduc, arg0, arg1, - &op0, &op1, false)) - /* Convert reduction stmt into vectorizable form. */ - rhs = convert_scalar_cond_reduction (reduc, gsi, cond, op0, op1, - true_bb != gimple_bb (reduc)); + &op0, &op1, false, &has_nop, + &nop_reduc)) + { + /* Convert reduction stmt into vectorizable form. */ + rhs = convert_scalar_cond_reduction (reduc, gsi, cond, op0, op1, + true_bb != gimple_bb (reduc), + has_nop, nop_reduc); + redundant_ssa_names.safe_push (std::make_pair (res, rhs)); + } else /* Build new RHS using selected condition and arguments. */ rhs = fold_build_cond_expr (TREE_TYPE (res), unshare_expr (cond), @@ -1961,14 +2064,17 @@ predicate_scalar_phi (gphi *phi, gimple_stmt_iterator *gsi) is_gimple_condexpr, NULL_TREE, true, GSI_SAME_STMT); if (!(is_cond_scalar_reduction (phi, &reduc, arg0 , arg1, - &op0, &op1, true))) + &op0, &op1, true, &has_nop, &nop_reduc))) rhs = fold_build_cond_expr (TREE_TYPE (res), unshare_expr (cond), swap? arg1 : arg0, swap? arg0 : arg1); else - /* Convert reduction stmt into vectorizable form. */ - rhs = convert_scalar_cond_reduction (reduc, gsi, cond, op0, op1, - swap); + { + /* Convert reduction stmt into vectorizable form. */ + rhs = convert_scalar_cond_reduction (reduc, gsi, cond, op0, op1, + swap,has_nop, nop_reduc); + redundant_ssa_names.safe_push (std::make_pair (res, rhs)); + } new_stmt = gimple_build_assign (res, rhs); gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT); update_stmt (new_stmt); @@ -2102,7 +2208,7 @@ insert_gimplified_predicates (loop_p loop) mask if it was created for given SIZE and -1 otherwise. */ static int -mask_exists (int size, vec<int> vec) +mask_exists (int size, const vec<int> &vec) { unsigned int ix; int v; @@ -3194,14 +3300,13 @@ pass_if_conversion::gate (function *fun) unsigned int pass_if_conversion::execute (function *fun) { - class loop *loop; unsigned todo = 0; if (number_of_loops (fun) <= 1) return 0; auto_vec<gimple *> preds; - FOR_EACH_LOOP (loop, 0) + for (auto loop : loops_list (cfun, 0)) if (flag_tree_loop_if_convert == 1 || ((flag_tree_loop_vectorize || loop->force_vectorize) && !loop->dont_vectorize)) |