diff options
Diffstat (limited to 'gcc')
-rw-r--r-- | gcc/tree-vect-loop.cc | 726 | ||||
-rw-r--r-- | gcc/tree-vect-stmts.cc | 10 | ||||
-rw-r--r-- | gcc/tree-vectorizer.h | 7 |
3 files changed, 207 insertions, 536 deletions
diff --git a/gcc/tree-vect-loop.cc b/gcc/tree-vect-loop.cc index 3de1ea6..fe6f3cf 100644 --- a/gcc/tree-vect-loop.cc +++ b/gcc/tree-vect-loop.cc @@ -5470,76 +5470,6 @@ vect_emit_reduction_init_stmts (loop_vec_info loop_vinfo, } } -/* Function get_initial_def_for_reduction - - Input: - REDUC_INFO - the info_for_reduction - INIT_VAL - the initial value of the reduction variable - NEUTRAL_OP - a value that has no effect on the reduction, as per - neutral_op_for_reduction - - Output: - Return a vector variable, initialized according to the operation that - STMT_VINFO performs. This vector will be used as the initial value - of the vector of partial results. - - The value we need is a vector in which element 0 has value INIT_VAL - and every other element has value NEUTRAL_OP. */ - -static tree -get_initial_def_for_reduction (loop_vec_info loop_vinfo, - stmt_vec_info reduc_info, - tree init_val, tree neutral_op) -{ - class loop *loop = LOOP_VINFO_LOOP (loop_vinfo); - tree scalar_type = TREE_TYPE (init_val); - tree vectype = get_vectype_for_scalar_type (loop_vinfo, scalar_type); - tree init_def; - gimple_seq stmts = NULL; - - gcc_assert (vectype); - - gcc_assert (POINTER_TYPE_P (scalar_type) || INTEGRAL_TYPE_P (scalar_type) - || SCALAR_FLOAT_TYPE_P (scalar_type)); - - gcc_assert (nested_in_vect_loop_p (loop, reduc_info) - || loop == (gimple_bb (reduc_info->stmt))->loop_father); - - if (operand_equal_p (init_val, neutral_op)) - { - /* If both elements are equal then the vector described above is - just a splat. */ - neutral_op = gimple_convert (&stmts, TREE_TYPE (vectype), neutral_op); - init_def = gimple_build_vector_from_val (&stmts, vectype, neutral_op); - } - else - { - neutral_op = gimple_convert (&stmts, TREE_TYPE (vectype), neutral_op); - init_val = gimple_convert (&stmts, TREE_TYPE (vectype), init_val); - if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant ()) - { - /* Construct a splat of NEUTRAL_OP and insert INIT_VAL into - element 0. */ - init_def = gimple_build_vector_from_val (&stmts, vectype, - neutral_op); - init_def = gimple_build (&stmts, CFN_VEC_SHL_INSERT, - vectype, init_def, init_val); - } - else - { - /* Build {INIT_VAL, NEUTRAL_OP, NEUTRAL_OP, ...}. */ - tree_vector_builder elts (vectype, 1, 2); - elts.quick_push (init_val); - elts.quick_push (neutral_op); - init_def = gimple_build_vector (&stmts, &elts); - } - } - - if (stmts) - vect_emit_reduction_init_stmts (loop_vinfo, reduc_info, stmts); - return init_def; -} - /* Get at the initial defs for the reduction PHIs for REDUC_INFO, which performs a reduction involving GROUP_SIZE scalar statements. NUMBER_OF_VECTORS is the number of vector defs to create. If NEUTRAL_OP @@ -5951,7 +5881,6 @@ vect_create_epilog_for_reduction (loop_vec_info loop_vinfo, loop-closed PHI of the inner loop which we remember as def for the reduction PHI generation. */ bool double_reduc = false; - stmt_vec_info rdef_info = stmt_info; if (STMT_VINFO_DEF_TYPE (stmt_info) == vect_double_reduction_def) { double_reduc = true; @@ -5981,20 +5910,18 @@ vect_create_epilog_for_reduction (loop_vec_info loop_vinfo, auto_vec<tree> reduc_inputs; int j, i; vec<tree> &scalar_results = reduc_info->reduc_scalar_results; - unsigned int group_size = 1, k; + unsigned int k; /* SLP reduction without reduction chain, e.g., # a1 = phi <a2, a0> # b1 = phi <b2, b0> a2 = operation (a1) b2 = operation (b1) */ bool slp_reduc - = (slp_node - && !REDUC_GROUP_FIRST_ELEMENT (STMT_VINFO_REDUC_DEF (reduc_info))); + = !REDUC_GROUP_FIRST_ELEMENT (STMT_VINFO_REDUC_DEF (reduc_info)); bool direct_slp_reduc; tree induction_index = NULL_TREE; - if (slp_node) - group_size = SLP_TREE_LANES (slp_node); + unsigned int group_size = SLP_TREE_LANES (slp_node); if (nested_in_vect_loop_p (loop, stmt_info)) { @@ -6024,18 +5951,8 @@ vect_create_epilog_for_reduction (loop_vec_info loop_vinfo, /* All statements produce live-out values. */ live_out_stmts = SLP_TREE_SCALAR_STMTS (slp_node); - unsigned vec_num; - int ncopies; - if (slp_node) - { - vec_num = SLP_TREE_VEC_DEFS (slp_node_instance->reduc_phis).length (); - ncopies = 1; - } - else - { - vec_num = 1; - ncopies = STMT_VINFO_VEC_STMTS (reduc_info).length (); - } + unsigned vec_num + = SLP_TREE_VEC_DEFS (slp_node_instance->reduc_phis).length (); /* For cond reductions we want to create a new vector (INDEX_COND_EXPR) which is updated with the current index of the loop for every match of @@ -6050,53 +5967,29 @@ vect_create_epilog_for_reduction (loop_vec_info loop_vinfo, if (STMT_VINFO_REDUC_TYPE (reduc_info) == COND_REDUCTION) { auto_vec<std::pair<tree, bool>, 2> ccompares; - if (slp_node) + slp_tree cond_node = slp_node_instance->root; + while (cond_node != slp_node_instance->reduc_phis) { - slp_tree cond_node = slp_node_instance->root; - while (cond_node != slp_node_instance->reduc_phis) + stmt_vec_info cond_info = SLP_TREE_REPRESENTATIVE (cond_node); + int slp_reduc_idx; + if (gimple_assign_rhs_code (cond_info->stmt) == COND_EXPR) { - stmt_vec_info cond_info = SLP_TREE_REPRESENTATIVE (cond_node); - int slp_reduc_idx; - if (gimple_assign_rhs_code (cond_info->stmt) == COND_EXPR) - { - gimple *vec_stmt - = SSA_NAME_DEF_STMT (SLP_TREE_VEC_DEFS (cond_node)[0]); - gcc_assert (gimple_assign_rhs_code (vec_stmt) == VEC_COND_EXPR); - ccompares.safe_push - (std::make_pair (gimple_assign_rhs1 (vec_stmt), - STMT_VINFO_REDUC_IDX (cond_info) == 2)); - /* ??? We probably want to have REDUC_IDX on the SLP node? - We have both three and four children COND_EXPR nodes - dependent on whether the comparison is still embedded - as GENERIC. So work backwards. */ - slp_reduc_idx = (SLP_TREE_CHILDREN (cond_node).length () - 3 - + STMT_VINFO_REDUC_IDX (cond_info)); - } - else - slp_reduc_idx = STMT_VINFO_REDUC_IDX (cond_info); - cond_node = SLP_TREE_CHILDREN (cond_node)[slp_reduc_idx]; - } - } - else - { - stmt_vec_info cond_info = STMT_VINFO_REDUC_DEF (reduc_info); - cond_info = vect_stmt_to_vectorize (cond_info); - while (cond_info != reduc_info) - { - if (gimple_assign_rhs_code (cond_info->stmt) == COND_EXPR) - { - gimple *vec_stmt = STMT_VINFO_VEC_STMTS (cond_info)[0]; - gcc_assert (gimple_assign_rhs_code (vec_stmt) == VEC_COND_EXPR); - ccompares.safe_push - (std::make_pair (gimple_assign_rhs1 (vec_stmt), - STMT_VINFO_REDUC_IDX (cond_info) == 2)); - } - cond_info - = loop_vinfo->lookup_def (gimple_op (cond_info->stmt, - 1 + STMT_VINFO_REDUC_IDX - (cond_info))); - cond_info = vect_stmt_to_vectorize (cond_info); + gimple *vec_stmt + = SSA_NAME_DEF_STMT (SLP_TREE_VEC_DEFS (cond_node)[0]); + gcc_assert (gimple_assign_rhs_code (vec_stmt) == VEC_COND_EXPR); + ccompares.safe_push + (std::make_pair (gimple_assign_rhs1 (vec_stmt), + STMT_VINFO_REDUC_IDX (cond_info) == 2)); + /* ??? We probably want to have REDUC_IDX on the SLP node? + We have both three and four children COND_EXPR nodes + dependent on whether the comparison is still embedded + as GENERIC. So work backwards. */ + slp_reduc_idx = (SLP_TREE_CHILDREN (cond_node).length () - 3 + + STMT_VINFO_REDUC_IDX (cond_info)); } + else + slp_reduc_idx = STMT_VINFO_REDUC_IDX (cond_info); + cond_node = SLP_TREE_CHILDREN (cond_node)[slp_reduc_idx]; } gcc_assert (ccompares.length () != 0); @@ -6205,30 +6098,22 @@ vect_create_epilog_for_reduction (loop_vec_info loop_vinfo, /* We need to reduce values in all exits. */ exit_bb = loop_exit->dest; exit_gsi = gsi_after_labels (exit_bb); - reduc_inputs.create (slp_node ? vec_num : ncopies); + reduc_inputs.create (vec_num); for (unsigned i = 0; i < vec_num; i++) { gimple_seq stmts = NULL; - if (slp_node) - def = vect_get_slp_vect_def (slp_node, i); + def = vect_get_slp_vect_def (slp_node, i); + tree new_def = copy_ssa_name (def); + phi = create_phi_node (new_def, exit_bb); + if (LOOP_VINFO_IV_EXIT (loop_vinfo) == loop_exit) + SET_PHI_ARG_DEF (phi, loop_exit->dest_idx, def); else - def = gimple_get_lhs (STMT_VINFO_VEC_STMTS (rdef_info)[0]); - for (j = 0; j < ncopies; j++) - { - tree new_def = copy_ssa_name (def); - phi = create_phi_node (new_def, exit_bb); - if (j) - def = gimple_get_lhs (STMT_VINFO_VEC_STMTS (rdef_info)[j]); - if (LOOP_VINFO_IV_EXIT (loop_vinfo) == loop_exit) - SET_PHI_ARG_DEF (phi, loop_exit->dest_idx, def); - else - { - for (unsigned k = 0; k < gimple_phi_num_args (phi); k++) - SET_PHI_ARG_DEF (phi, k, def); - } - new_def = gimple_convert (&stmts, vectype, new_def); - reduc_inputs.quick_push (new_def); + { + for (unsigned k = 0; k < gimple_phi_num_args (phi); k++) + SET_PHI_ARG_DEF (phi, k, def); } + new_def = gimple_convert (&stmts, vectype, new_def); + reduc_inputs.quick_push (new_def); gsi_insert_seq_before (&exit_gsi, stmts, GSI_SAME_STMT); } @@ -6284,8 +6169,7 @@ vect_create_epilog_for_reduction (loop_vec_info loop_vinfo, if (REDUC_GROUP_FIRST_ELEMENT (STMT_VINFO_REDUC_DEF (reduc_info)) || direct_slp_reduc || (slp_reduc - && constant_multiple_p (TYPE_VECTOR_SUBPARTS (vectype), group_size)) - || ncopies > 1) + && constant_multiple_p (TYPE_VECTOR_SUBPARTS (vectype), group_size))) { gimple_seq stmts = NULL; tree single_input = reduc_inputs[0]; @@ -6579,7 +6463,7 @@ vect_create_epilog_for_reduction (loop_vec_info loop_vinfo, is itself a neutral value. */ tree vector_identity = NULL_TREE; tree neutral_op = NULL_TREE; - if (slp_node) + if (1) { tree initial_value = NULL_TREE; if (REDUC_GROUP_FIRST_ELEMENT (STMT_VINFO_REDUC_DEF (reduc_info))) @@ -7088,10 +6972,10 @@ static bool vectorize_fold_left_reduction (loop_vec_info loop_vinfo, stmt_vec_info stmt_info, gimple_stmt_iterator *gsi, - gimple **vec_stmt, slp_tree slp_node, + slp_tree slp_node, gimple *reduc_def_stmt, code_helper code, internal_fn reduc_fn, - tree *ops, int num_ops, tree vectype_in, + int num_ops, tree vectype_in, int reduc_index, vec_loop_masks *masks, vec_loop_lens *lens) { @@ -7099,14 +6983,7 @@ vectorize_fold_left_reduction (loop_vec_info loop_vinfo, tree vectype_out = STMT_VINFO_VECTYPE (stmt_info); internal_fn mask_reduc_fn = get_masked_reduction_fn (reduc_fn, vectype_in); - int ncopies; - if (slp_node) - ncopies = 1; - else - ncopies = vect_get_num_copies (loop_vinfo, vectype_in); - gcc_assert (!nested_in_vect_loop_p (loop, stmt_info)); - gcc_assert (ncopies == 1); bool is_cond_op = false; if (!code.is_tree_code ()) @@ -7118,9 +6995,8 @@ vectorize_fold_left_reduction (loop_vec_info loop_vinfo, gcc_assert (TREE_CODE_LENGTH (tree_code (code)) == binary_op); - if (slp_node) - gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (vectype_out), - TYPE_VECTOR_SUBPARTS (vectype_in))); + gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (vectype_out), + TYPE_VECTOR_SUBPARTS (vectype_in))); /* The operands either come from a binary operation or an IFN_COND operation. The former is a gimple assign with binary rhs and the latter is a @@ -7130,36 +7006,14 @@ vectorize_fold_left_reduction (loop_vec_info loop_vinfo, int group_size = 1; stmt_vec_info scalar_dest_def_info; auto_vec<tree> vec_oprnds0, vec_opmask; - if (slp_node) - { - vect_get_slp_defs (SLP_TREE_CHILDREN (slp_node)[(is_cond_op ? 2 : 0) - + (1 - reduc_index)], - &vec_oprnds0); - group_size = SLP_TREE_SCALAR_STMTS (slp_node).length (); - scalar_dest_def_info = SLP_TREE_SCALAR_STMTS (slp_node)[group_size - 1]; - /* For an IFN_COND_OP we also need the vector mask operand. */ - if (is_cond_op) - vect_get_slp_defs (SLP_TREE_CHILDREN (slp_node)[0], &vec_opmask); - } - else - { - tree op0, opmask; - if (!is_cond_op) - op0 = ops[1 - reduc_index]; - else - { - op0 = ops[2 + (1 - reduc_index)]; - opmask = ops[0]; - } - vect_get_vec_defs_for_operand (loop_vinfo, stmt_info, 1, - op0, &vec_oprnds0); - scalar_dest_def_info = stmt_info; - - /* For an IFN_COND_OP we also need the vector mask operand. */ - if (is_cond_op) - vect_get_vec_defs_for_operand (loop_vinfo, stmt_info, 1, - opmask, &vec_opmask); - } + vect_get_slp_defs (SLP_TREE_CHILDREN (slp_node)[(is_cond_op ? 2 : 0) + + (1 - reduc_index)], + &vec_oprnds0); + group_size = SLP_TREE_SCALAR_STMTS (slp_node).length (); + scalar_dest_def_info = SLP_TREE_SCALAR_STMTS (slp_node)[group_size - 1]; + /* For an IFN_COND_OP we also need the vector mask operand. */ + if (is_cond_op) + vect_get_slp_defs (SLP_TREE_CHILDREN (slp_node)[0], &vec_opmask); gimple *sdef = vect_orig_stmt (scalar_dest_def_info)->stmt; tree scalar_dest = gimple_get_lhs (sdef); @@ -7167,7 +7021,6 @@ vectorize_fold_left_reduction (loop_vec_info loop_vinfo, tree reduc_var = gimple_phi_result (reduc_def_stmt); int vec_num = vec_oprnds0.length (); - gcc_assert (vec_num == 1 || slp_node); tree vec_elem_type = TREE_TYPE (vectype_out); gcc_checking_assert (useless_type_conversion_p (scalar_type, vec_elem_type)); @@ -7279,13 +7132,7 @@ vectorize_fold_left_reduction (loop_vec_info loop_vinfo, scalar_dest_def_info, new_stmt, gsi); - if (slp_node) - slp_node->push_vec_def (new_stmt); - else - { - STMT_VINFO_VEC_STMTS (stmt_info).safe_push (new_stmt); - *vec_stmt = new_stmt; - } + slp_node->push_vec_def (new_stmt); } return true; @@ -7543,7 +7390,7 @@ vectorizable_lane_reducing (loop_vec_info loop_vinfo, stmt_vec_info stmt_info, return false; } - if (slp_node && !vect_maybe_update_slp_op_vectype (slp_op, vectype)) + if (!vect_maybe_update_slp_op_vectype (slp_op, vectype)) { if (dump_enabled_p ()) dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location, @@ -7682,23 +7529,20 @@ vectorizable_reduction (loop_vec_info loop_vinfo, { if (is_a <gphi *> (stmt_info->stmt)) { - if (slp_node) - { - /* We eventually need to set a vector type on invariant - arguments. */ - unsigned j; - slp_tree child; - FOR_EACH_VEC_ELT (SLP_TREE_CHILDREN (slp_node), j, child) - if (!vect_maybe_update_slp_op_vectype - (child, SLP_TREE_VECTYPE (slp_node))) - { - if (dump_enabled_p ()) - dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location, - "incompatible vector types for " - "invariants\n"); - return false; - } - } + /* We eventually need to set a vector type on invariant + arguments. */ + unsigned j; + slp_tree child; + FOR_EACH_VEC_ELT (SLP_TREE_CHILDREN (slp_node), j, child) + if (!vect_maybe_update_slp_op_vectype + (child, SLP_TREE_VECTYPE (slp_node))) + { + if (dump_enabled_p ()) + dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location, + "incompatible vector types for " + "invariants\n"); + return false; + } /* Analysis for double-reduction is done on the outer loop PHI, nested cycles have no further restrictions. */ STMT_VINFO_TYPE (stmt_info) = cycle_phi_info_type; @@ -7723,7 +7567,6 @@ vectorizable_reduction (loop_vec_info loop_vinfo, the outer loop PHI. The latter is what we want to analyze the reduction with. The LC PHI is handled by vectorizable_lc_phi. */ - gcc_assert (slp_node); return gimple_phi_num_args (as_a <gphi *> (stmt_info->stmt)) == 2; } use_operand_p use_p; @@ -7734,13 +7577,10 @@ vectorizable_reduction (loop_vec_info loop_vinfo, phi_info = loop_vinfo->lookup_stmt (use_stmt); } - if (slp_node) - { - slp_node_instance->reduc_phis = slp_node; - /* ??? We're leaving slp_node to point to the PHIs, we only - need it to get at the number of vector stmts which wasn't - yet initialized for the instance root. */ - } + slp_node_instance->reduc_phis = slp_node; + /* ??? We're leaving slp_node to point to the PHIs, we only + need it to get at the number of vector stmts which wasn't + yet initialized for the instance root. */ /* PHIs should not participate in patterns. */ gcc_assert (!STMT_VINFO_RELATED_STMT (phi_info)); @@ -7756,11 +7596,10 @@ vectorizable_reduction (loop_vec_info loop_vinfo, unsigned reduc_chain_length = 0; bool only_slp_reduc_chain = true; stmt_info = NULL; - slp_tree slp_for_stmt_info = slp_node ? slp_node_instance->root : NULL; + slp_tree slp_for_stmt_info = slp_node_instance->root; /* For double-reductions we start SLP analysis at the inner loop LC PHI which is the def of the outer loop live stmt. */ - if (STMT_VINFO_DEF_TYPE (reduc_info) == vect_double_reduction_def - && slp_node) + if (STMT_VINFO_DEF_TYPE (reduc_info) == vect_double_reduction_def) slp_for_stmt_info = SLP_TREE_CHILDREN (slp_for_stmt_info)[0]; while (reduc_def != PHI_RESULT (reduc_def_phi)) { @@ -7863,7 +7702,7 @@ vectorizable_reduction (loop_vec_info loop_vinfo, reduc_def = op.ops[reduc_idx]; reduc_chain_length++; - if (!stmt_info && slp_node) + if (!stmt_info) slp_for_stmt_info = SLP_TREE_CHILDREN (slp_for_stmt_info)[0]; } /* PHIs should not participate in patterns. */ @@ -7877,14 +7716,13 @@ vectorizable_reduction (loop_vec_info loop_vinfo, /* STMT_VINFO_REDUC_DEF doesn't point to the first but the last element. */ - if (slp_node && REDUC_GROUP_FIRST_ELEMENT (stmt_info)) + if (REDUC_GROUP_FIRST_ELEMENT (stmt_info)) { gcc_assert (!REDUC_GROUP_NEXT_ELEMENT (stmt_info)); stmt_info = REDUC_GROUP_FIRST_ELEMENT (stmt_info); } if (REDUC_GROUP_FIRST_ELEMENT (stmt_info)) - gcc_assert (slp_node - && REDUC_GROUP_FIRST_ELEMENT (stmt_info) == stmt_info); + gcc_assert (REDUC_GROUP_FIRST_ELEMENT (stmt_info) == stmt_info); /* 1. Is vectorizable reduction? */ /* Not supportable if the reduction variable is used in the loop, unless @@ -7939,7 +7777,6 @@ vectorizable_reduction (loop_vec_info loop_vinfo, OK to use them in a reduction chain or when the reduction group has just one element. */ if (lane_reducing - && slp_node && !REDUC_GROUP_FIRST_ELEMENT (stmt_info) && SLP_TREE_LANES (slp_node) > 1) { @@ -8017,7 +7854,7 @@ vectorizable_reduction (loop_vec_info loop_vinfo, /* If we have a condition reduction, see if we can simplify it further. */ if (reduction_type == COND_REDUCTION) { - if (slp_node && SLP_TREE_LANES (slp_node) != 1) + if (SLP_TREE_LANES (slp_node) != 1) return false; /* When the condition uses the reduction value in the condition, fail. */ @@ -8119,10 +7956,7 @@ vectorizable_reduction (loop_vec_info loop_vinfo, if (STMT_VINFO_LIVE_P (phi_info)) return false; - if (slp_node) - ncopies = SLP_TREE_NUMBER_OF_VEC_STMTS (slp_node); - else - ncopies = vect_get_num_copies (loop_vinfo, vectype_in); + ncopies = SLP_TREE_NUMBER_OF_VEC_STMTS (slp_node); gcc_assert (ncopies >= 1); @@ -8194,8 +8028,7 @@ vectorizable_reduction (loop_vec_info loop_vinfo, outer-loop vectorization is safe. Likewise when we are vectorizing a series of reductions using SLP and the VF is one the reductions are performed in scalar order. */ - if (slp_node - && !REDUC_GROUP_FIRST_ELEMENT (stmt_info) + if (!REDUC_GROUP_FIRST_ELEMENT (stmt_info) && known_eq (LOOP_VINFO_VECT_FACTOR (loop_vinfo), 1u)) ; else if (needs_fold_left_reduction_p (op.type, orig_code)) @@ -8227,7 +8060,7 @@ vectorizable_reduction (loop_vec_info loop_vinfo, || reduction_type == INTEGER_INDUC_COND_REDUCTION || reduction_type == CONST_COND_REDUCTION || reduction_type == EXTRACT_LAST_REDUCTION) - && slp_node + && 1 && ncopies > 1) { if (dump_enabled_p ()) @@ -8236,17 +8069,6 @@ vectorizable_reduction (loop_vec_info loop_vinfo, return false; } - if ((double_reduc || reduction_type != TREE_CODE_REDUCTION) - && !slp_node - && ncopies > 1) - { - if (dump_enabled_p ()) - dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location, - "multiple types in double reduction or condition " - "reduction or fold-left reduction.\n"); - return false; - } - internal_fn reduc_fn = IFN_LAST; if (reduction_type == TREE_CODE_REDUCTION || reduction_type == FOLD_LEFT_REDUCTION @@ -8308,14 +8130,11 @@ vectorizable_reduction (loop_vec_info loop_vinfo, /* For SLP reductions, see if there is a neutral value we can use. */ tree neutral_op = NULL_TREE; - if (slp_node) - { - tree initial_value = NULL_TREE; - if (REDUC_GROUP_FIRST_ELEMENT (stmt_info) != NULL) - initial_value = vect_phi_initial_value (reduc_def_phi); - neutral_op = neutral_op_for_reduction (TREE_TYPE (vectype_out), - orig_code, initial_value); - } + tree initial_value = NULL_TREE; + if (REDUC_GROUP_FIRST_ELEMENT (stmt_info) != NULL) + initial_value = vect_phi_initial_value (reduc_def_phi); + neutral_op = neutral_op_for_reduction (TREE_TYPE (vectype_out), + orig_code, initial_value); if (double_reduc && reduction_type == FOLD_LEFT_REDUCTION) { @@ -8341,7 +8160,7 @@ vectorizable_reduction (loop_vec_info loop_vinfo, } if (reduction_type == FOLD_LEFT_REDUCTION - && (slp_node && SLP_TREE_LANES (slp_node) > 1) + && SLP_TREE_LANES (slp_node) > 1 && !REDUC_GROUP_FIRST_ELEMENT (stmt_info)) { /* We cannot use in-order reductions in this case because there is @@ -8370,8 +8189,7 @@ vectorizable_reduction (loop_vec_info loop_vinfo, } /* Check extra constraints for variable-length unchained SLP reductions. */ - if (slp_node - && !REDUC_GROUP_FIRST_ELEMENT (stmt_info) + if (!REDUC_GROUP_FIRST_ELEMENT (stmt_info) && !nunits_out.is_constant ()) { /* We checked above that we could build the initial vector when @@ -8465,9 +8283,8 @@ vectorizable_reduction (loop_vec_info loop_vinfo, own reduction accumulator since one of the main goals of unrolling a reduction is to reduce the aggregate loop-carried latency. */ if (ncopies > 1 - && (!slp_node - || (!REDUC_GROUP_FIRST_ELEMENT (stmt_info) - && SLP_TREE_LANES (slp_node) == 1)) + && !REDUC_GROUP_FIRST_ELEMENT (stmt_info) + && SLP_TREE_LANES (slp_node) == 1 && (STMT_VINFO_RELEVANT (stmt_info) <= vect_used_only_live) && reduc_chain_length == 1 && loop_vinfo->suggested_unroll_factor == 1) @@ -8518,8 +8335,7 @@ vectorizable_reduction (loop_vec_info loop_vinfo, reduction. */ single_defuse_cycle &= !lane_reducing; - if (slp_node - && (single_defuse_cycle || reduction_type == FOLD_LEFT_REDUCTION)) + if (single_defuse_cycle || reduction_type == FOLD_LEFT_REDUCTION) for (i = 0; i < (int) op.num_ops; i++) if (!vect_maybe_update_slp_op_vectype (slp_op[i], vectype_op[i])) { @@ -8549,7 +8365,7 @@ vectorizable_reduction (loop_vec_info loop_vinfo, { stmt_vec_info tem = vect_stmt_to_vectorize (STMT_VINFO_REDUC_DEF (phi_info)); - if (slp_node && REDUC_GROUP_FIRST_ELEMENT (tem)) + if (REDUC_GROUP_FIRST_ELEMENT (tem)) { gcc_assert (!REDUC_GROUP_NEXT_ELEMENT (tem)); tem = REDUC_GROUP_FIRST_ELEMENT (tem); @@ -8645,11 +8461,10 @@ vect_emulate_mixed_dot_prod (loop_vec_info loop_vinfo, stmt_vec_info stmt_info, bool vect_transform_reduction (loop_vec_info loop_vinfo, stmt_vec_info stmt_info, gimple_stmt_iterator *gsi, - gimple **vec_stmt, slp_tree slp_node) + slp_tree slp_node) { tree vectype_out = STMT_VINFO_VECTYPE (stmt_info); class loop *loop = LOOP_VINFO_LOOP (loop_vinfo); - unsigned ncopies; unsigned vec_num; stmt_vec_info reduc_info = info_for_reduction (loop_vinfo, stmt_info); @@ -8677,16 +8492,7 @@ vect_transform_reduction (loop_vec_info loop_vinfo, if (!vectype_in) vectype_in = STMT_VINFO_VECTYPE (stmt_info); - if (slp_node) - { - ncopies = 1; - vec_num = vect_get_num_copies (loop_vinfo, slp_node, vectype_in); - } - else - { - ncopies = vect_get_num_copies (loop_vinfo, vectype_in); - vec_num = 1; - } + vec_num = vect_get_num_copies (loop_vinfo, slp_node, vectype_in); code_helper code = canonicalize_code (op.code, op.type); internal_fn cond_fn = get_conditional_internal_fn (code, op.type); @@ -8702,10 +8508,6 @@ vect_transform_reduction (loop_vec_info loop_vinfo, if (dump_enabled_p ()) dump_printf_loc (MSG_NOTE, vect_location, "transform reduction.\n"); - /* FORNOW: Multiple types are not supported for condition. */ - if (code == COND_EXPR) - gcc_assert (ncopies == 1); - /* A binary COND_OP reduction must have the same definition and else value. */ bool cond_fn_p = code.is_internal_fn () @@ -8729,8 +8531,8 @@ vect_transform_reduction (loop_vec_info loop_vinfo, internal_fn reduc_fn = STMT_VINFO_REDUC_FN (reduc_info); gcc_assert (code.is_tree_code () || cond_fn_p); return vectorize_fold_left_reduction - (loop_vinfo, stmt_info, gsi, vec_stmt, slp_node, reduc_def_phi, - code, reduc_fn, op.ops, op.num_ops, vectype_in, + (loop_vinfo, stmt_info, gsi, slp_node, reduc_def_phi, + code, reduc_fn, op.num_ops, vectype_in, reduc_index, masks, lens); } @@ -8748,33 +8550,12 @@ vect_transform_reduction (loop_vec_info loop_vinfo, tree scalar_dest = gimple_get_lhs (stmt_info->stmt); tree vec_dest = vect_create_destination_var (scalar_dest, vectype_out); - if (lane_reducing && !slp_node && !single_defuse_cycle) - { - /* Note: there are still vectorizable cases that can not be handled by - single-lane slp. Probably it would take some time to evolve the - feature to a mature state. So we have to keep the below non-slp code - path as failsafe for lane-reducing support. */ - gcc_assert (op.num_ops <= 3); - for (unsigned i = 0; i < op.num_ops; i++) - { - unsigned oprnd_ncopies = ncopies; - - if ((int) i == reduc_index) - { - tree vectype = STMT_VINFO_VECTYPE (stmt_info); - oprnd_ncopies = vect_get_num_copies (loop_vinfo, vectype); - } - - vect_get_vec_defs_for_operand (loop_vinfo, stmt_info, oprnd_ncopies, - op.ops[i], &vec_oprnds[i]); - } - } /* Get NCOPIES vector definitions for all operands except the reduction definition. */ - else if (!cond_fn_p) + if (!cond_fn_p) { gcc_assert (reduc_index >= 0 && reduc_index <= 2); - vect_get_vec_defs (loop_vinfo, stmt_info, slp_node, ncopies, + vect_get_vec_defs (loop_vinfo, stmt_info, slp_node, 1, single_defuse_cycle && reduc_index == 0 ? NULL_TREE : op.ops[0], &vec_oprnds[0], single_defuse_cycle && reduc_index == 1 @@ -8789,7 +8570,7 @@ vect_transform_reduction (loop_vec_info loop_vinfo, vectype. */ gcc_assert (single_defuse_cycle && (reduc_index == 1 || reduc_index == 2)); - vect_get_vec_defs (loop_vinfo, stmt_info, slp_node, ncopies, op.ops[0], + vect_get_vec_defs (loop_vinfo, stmt_info, slp_node, 1, op.ops[0], truth_type_for (vectype_in), &vec_oprnds[0], reduc_index == 1 ? NULL_TREE : op.ops[1], NULL_TREE, &vec_oprnds[1], @@ -8945,7 +8726,7 @@ vect_transform_reduction (loop_vec_info loop_vinfo, handle over the operand to other dependent statements. */ gcc_assert (reduc_vop); - if (slp_node && TREE_CODE (reduc_vop) == SSA_NAME + if (TREE_CODE (reduc_vop) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (reduc_vop)) new_stmt = SSA_NAME_DEF_STMT (reduc_vop); else @@ -8969,7 +8750,7 @@ vect_transform_reduction (loop_vec_info loop_vinfo, std::swap (vop[0], vop[1]); } tree mask = vect_get_loop_mask (loop_vinfo, gsi, masks, - vec_num * ncopies, vectype_in, + vec_num, vectype_in, mask_index++); gcall *call = gimple_build_call_internal (cond_fn, 4, mask, vop[0], vop[1], vop[0]); @@ -8987,7 +8768,7 @@ vect_transform_reduction (loop_vec_info loop_vinfo, if (masked_loop_p && mask_by_cond_expr) { tree mask = vect_get_loop_mask (loop_vinfo, gsi, masks, - vec_num * ncopies, vectype_in, + vec_num, vectype_in, mask_index++); build_vect_cond_expr (code, vop, mask, gsi); } @@ -9015,15 +8796,10 @@ vect_transform_reduction (loop_vec_info loop_vinfo, if (single_defuse_cycle && i < num - 1) vec_oprnds[reduc_index].safe_push (gimple_get_lhs (new_stmt)); - else if (slp_node) - slp_node->push_vec_def (new_stmt); else - STMT_VINFO_VEC_STMTS (stmt_info).safe_push (new_stmt); + slp_node->push_vec_def (new_stmt); } - if (!slp_node) - *vec_stmt = STMT_VINFO_VEC_STMTS (stmt_info)[0]; - return true; } @@ -9031,14 +8807,12 @@ vect_transform_reduction (loop_vec_info loop_vinfo, bool vect_transform_cycle_phi (loop_vec_info loop_vinfo, - stmt_vec_info stmt_info, gimple **vec_stmt, + stmt_vec_info stmt_info, slp_tree slp_node, slp_instance slp_node_instance) { tree vectype_out = STMT_VINFO_VECTYPE (stmt_info); class loop *loop = LOOP_VINFO_LOOP (loop_vinfo); int i; - int ncopies; - int j; bool nested_cycle = false; int vec_num; @@ -9058,25 +8832,12 @@ vect_transform_cycle_phi (loop_vec_info loop_vinfo, /* Leave the scalar phi in place. */ return true; - if (slp_node) - { - vec_num = SLP_TREE_NUMBER_OF_VEC_STMTS (slp_node); - ncopies = 1; - } - else - { - vec_num = 1; - ncopies = vect_get_num_copies (loop_vinfo, - STMT_VINFO_VECTYPE (stmt_info)); - } + vec_num = SLP_TREE_NUMBER_OF_VEC_STMTS (slp_node); /* Check whether we should use a single PHI node and accumulate vectors to one before the backedge. */ if (STMT_VINFO_FORCE_SINGLE_CYCLE (reduc_info)) - { - ncopies = 1; - vec_num = 1; - } + vec_num = 1; /* Create the destination vector */ gphi *phi = as_a <gphi *> (stmt_info->stmt); @@ -9086,158 +8847,86 @@ vect_transform_cycle_phi (loop_vec_info loop_vinfo, /* Get the loop-entry arguments. */ tree vec_initial_def = NULL_TREE; auto_vec<tree> vec_initial_defs; - if (slp_node) + vec_initial_defs.reserve (vec_num); + /* Optimize: if initial_def is for REDUC_MAX smaller than the base + and we can't use zero for induc_val, use initial_def. Similarly + for REDUC_MIN and initial_def larger than the base. */ + if (STMT_VINFO_REDUC_TYPE (reduc_info) == INTEGER_INDUC_COND_REDUCTION) { - vec_initial_defs.reserve (vec_num); - /* Optimize: if initial_def is for REDUC_MAX smaller than the base - and we can't use zero for induc_val, use initial_def. Similarly - for REDUC_MIN and initial_def larger than the base. */ - if (STMT_VINFO_REDUC_TYPE (reduc_info) == INTEGER_INDUC_COND_REDUCTION) - { - gcc_assert (SLP_TREE_LANES (slp_node) == 1); - tree initial_def = vect_phi_initial_value (phi); - reduc_info->reduc_initial_values.safe_push (initial_def); - tree induc_val = STMT_VINFO_VEC_INDUC_COND_INITIAL_VAL (reduc_info); - if (TREE_CODE (initial_def) == INTEGER_CST - && !integer_zerop (induc_val) - && ((STMT_VINFO_REDUC_CODE (reduc_info) == MAX_EXPR - && tree_int_cst_lt (initial_def, induc_val)) - || (STMT_VINFO_REDUC_CODE (reduc_info) == MIN_EXPR - && tree_int_cst_lt (induc_val, initial_def)))) - { - induc_val = initial_def; - /* Communicate we used the initial_def to epilouge - generation. */ - STMT_VINFO_VEC_INDUC_COND_INITIAL_VAL (reduc_info) = NULL_TREE; - } - vec_initial_defs.quick_push - (build_vector_from_val (vectype_out, induc_val)); - } - else if (nested_cycle) - { - unsigned phi_idx = loop_preheader_edge (loop)->dest_idx; - vect_get_slp_defs (SLP_TREE_CHILDREN (slp_node)[phi_idx], - &vec_initial_defs); - } - else + gcc_assert (SLP_TREE_LANES (slp_node) == 1); + tree initial_def = vect_phi_initial_value (phi); + reduc_info->reduc_initial_values.safe_push (initial_def); + tree induc_val = STMT_VINFO_VEC_INDUC_COND_INITIAL_VAL (reduc_info); + if (TREE_CODE (initial_def) == INTEGER_CST + && !integer_zerop (induc_val) + && ((STMT_VINFO_REDUC_CODE (reduc_info) == MAX_EXPR + && tree_int_cst_lt (initial_def, induc_val)) + || (STMT_VINFO_REDUC_CODE (reduc_info) == MIN_EXPR + && tree_int_cst_lt (induc_val, initial_def)))) { - gcc_assert (slp_node == slp_node_instance->reduc_phis); - vec<tree> &initial_values = reduc_info->reduc_initial_values; - vec<stmt_vec_info> &stmts = SLP_TREE_SCALAR_STMTS (slp_node); - - unsigned int num_phis = stmts.length (); - if (REDUC_GROUP_FIRST_ELEMENT (reduc_stmt_info)) - num_phis = 1; - initial_values.reserve (num_phis); - for (unsigned int i = 0; i < num_phis; ++i) - { - gphi *this_phi = as_a<gphi *> (stmts[i]->stmt); - initial_values.quick_push (vect_phi_initial_value (this_phi)); - } - if (vec_num == 1) - vect_find_reusable_accumulator (loop_vinfo, reduc_info); - if (!initial_values.is_empty ()) - { - tree initial_value - = (num_phis == 1 ? initial_values[0] : NULL_TREE); - code_helper code = STMT_VINFO_REDUC_CODE (reduc_info); - tree neutral_op - = neutral_op_for_reduction (TREE_TYPE (vectype_out), - code, initial_value); - /* Try to simplify the vector initialization by applying an - adjustment after the reduction has been performed. This - can also break a critical path but on the other hand - requires to keep the initial value live across the loop. */ - if (neutral_op - && initial_values.length () == 1 - && !reduc_info->reused_accumulator - && STMT_VINFO_DEF_TYPE (stmt_info) == vect_reduction_def - && !operand_equal_p (neutral_op, initial_values[0])) - { - STMT_VINFO_REDUC_EPILOGUE_ADJUSTMENT (reduc_info) - = initial_values[0]; - initial_values[0] = neutral_op; - } - get_initial_defs_for_reduction (loop_vinfo, reduc_info, - &vec_initial_defs, vec_num, - stmts.length (), neutral_op); - } + induc_val = initial_def; + /* Communicate we used the initial_def to epilouge + generation. */ + STMT_VINFO_VEC_INDUC_COND_INITIAL_VAL (reduc_info) = NULL_TREE; } + vec_initial_defs.quick_push + (build_vector_from_val (vectype_out, induc_val)); + } + else if (nested_cycle) + { + unsigned phi_idx = loop_preheader_edge (loop)->dest_idx; + vect_get_slp_defs (SLP_TREE_CHILDREN (slp_node)[phi_idx], + &vec_initial_defs); } else { - /* Get at the scalar def before the loop, that defines the initial - value of the reduction variable. */ - tree initial_def = vect_phi_initial_value (phi); - reduc_info->reduc_initial_values.safe_push (initial_def); - /* Optimize: if initial_def is for REDUC_MAX smaller than the base - and we can't use zero for induc_val, use initial_def. Similarly - for REDUC_MIN and initial_def larger than the base. */ - if (STMT_VINFO_REDUC_TYPE (reduc_info) == INTEGER_INDUC_COND_REDUCTION) - { - tree induc_val = STMT_VINFO_VEC_INDUC_COND_INITIAL_VAL (reduc_info); - if (TREE_CODE (initial_def) == INTEGER_CST - && !integer_zerop (induc_val) - && ((STMT_VINFO_REDUC_CODE (reduc_info) == MAX_EXPR - && tree_int_cst_lt (initial_def, induc_val)) - || (STMT_VINFO_REDUC_CODE (reduc_info) == MIN_EXPR - && tree_int_cst_lt (induc_val, initial_def)))) + gcc_assert (slp_node == slp_node_instance->reduc_phis); + vec<tree> &initial_values = reduc_info->reduc_initial_values; + vec<stmt_vec_info> &stmts = SLP_TREE_SCALAR_STMTS (slp_node); + + unsigned int num_phis = stmts.length (); + if (REDUC_GROUP_FIRST_ELEMENT (reduc_stmt_info)) + num_phis = 1; + initial_values.reserve (num_phis); + for (unsigned int i = 0; i < num_phis; ++i) + { + gphi *this_phi = as_a<gphi *> (stmts[i]->stmt); + initial_values.quick_push (vect_phi_initial_value (this_phi)); + } + if (vec_num == 1) + vect_find_reusable_accumulator (loop_vinfo, reduc_info); + if (!initial_values.is_empty ()) + { + tree initial_value + = (num_phis == 1 ? initial_values[0] : NULL_TREE); + code_helper code = STMT_VINFO_REDUC_CODE (reduc_info); + tree neutral_op + = neutral_op_for_reduction (TREE_TYPE (vectype_out), + code, initial_value); + /* Try to simplify the vector initialization by applying an + adjustment after the reduction has been performed. This + can also break a critical path but on the other hand + requires to keep the initial value live across the loop. */ + if (neutral_op + && initial_values.length () == 1 + && !reduc_info->reused_accumulator + && STMT_VINFO_DEF_TYPE (stmt_info) == vect_reduction_def + && !operand_equal_p (neutral_op, initial_values[0])) { - induc_val = initial_def; - /* Communicate we used the initial_def to epilouge - generation. */ - STMT_VINFO_VEC_INDUC_COND_INITIAL_VAL (reduc_info) = NULL_TREE; - } - vec_initial_def = build_vector_from_val (vectype_out, induc_val); - } - else if (nested_cycle) - { - /* Do not use an adjustment def as that case is not supported - correctly if ncopies is not one. */ - vect_get_vec_defs_for_operand (loop_vinfo, reduc_stmt_info, - ncopies, initial_def, - &vec_initial_defs); - } - else if (STMT_VINFO_REDUC_TYPE (reduc_info) == CONST_COND_REDUCTION - || STMT_VINFO_REDUC_TYPE (reduc_info) == COND_REDUCTION) - /* Fill the initial vector with the initial scalar value. */ - vec_initial_def - = get_initial_def_for_reduction (loop_vinfo, reduc_stmt_info, - initial_def, initial_def); - else - { - if (ncopies == 1) - vect_find_reusable_accumulator (loop_vinfo, reduc_info); - if (!reduc_info->reduc_initial_values.is_empty ()) - { - initial_def = reduc_info->reduc_initial_values[0]; - code_helper code = STMT_VINFO_REDUC_CODE (reduc_info); - tree neutral_op - = neutral_op_for_reduction (TREE_TYPE (initial_def), - code, initial_def); - gcc_assert (neutral_op); - /* Try to simplify the vector initialization by applying an - adjustment after the reduction has been performed. */ - if (!reduc_info->reused_accumulator - && STMT_VINFO_DEF_TYPE (stmt_info) == vect_reduction_def - && !operand_equal_p (neutral_op, initial_def)) - { - STMT_VINFO_REDUC_EPILOGUE_ADJUSTMENT (reduc_info) - = initial_def; - initial_def = neutral_op; - } - vec_initial_def - = get_initial_def_for_reduction (loop_vinfo, reduc_info, - initial_def, neutral_op); + STMT_VINFO_REDUC_EPILOGUE_ADJUSTMENT (reduc_info) + = initial_values[0]; + initial_values[0] = neutral_op; } + get_initial_defs_for_reduction (loop_vinfo, reduc_info, + &vec_initial_defs, vec_num, + stmts.length (), neutral_op); } } if (vec_initial_def) { - vec_initial_defs.create (ncopies); - for (i = 0; i < ncopies; ++i) - vec_initial_defs.quick_push (vec_initial_def); + vec_initial_defs.create (1); + vec_initial_defs.quick_push (vec_initial_def); } if (auto *accumulator = reduc_info->reused_accumulator) @@ -9312,29 +9001,15 @@ vect_transform_cycle_phi (loop_vec_info loop_vinfo, for (i = 0; i < vec_num; i++) { tree vec_init_def = vec_initial_defs[i]; - for (j = 0; j < ncopies; j++) - { - /* Create the reduction-phi that defines the reduction - operand. */ - gphi *new_phi = create_phi_node (vec_dest, loop->header); - - /* Set the loop-entry arg of the reduction-phi. */ - if (j != 0 && nested_cycle) - vec_init_def = vec_initial_defs[j]; - add_phi_arg (new_phi, vec_init_def, loop_preheader_edge (loop), - UNKNOWN_LOCATION); + /* Create the reduction-phi that defines the reduction + operand. */ + gphi *new_phi = create_phi_node (vec_dest, loop->header); + add_phi_arg (new_phi, vec_init_def, loop_preheader_edge (loop), + UNKNOWN_LOCATION); - /* The loop-latch arg is set in epilogue processing. */ + /* The loop-latch arg is set in epilogue processing. */ - if (slp_node) - slp_node->push_vec_def (new_phi); - else - { - if (j == 0) - *vec_stmt = new_phi; - STMT_VINFO_VEC_STMTS (stmt_info).safe_push (new_phi); - } - } + slp_node->push_vec_def (new_phi); } return true; @@ -9344,7 +9019,7 @@ vect_transform_cycle_phi (loop_vec_info loop_vinfo, bool vectorizable_lc_phi (loop_vec_info loop_vinfo, - stmt_vec_info stmt_info, gimple **vec_stmt, + stmt_vec_info stmt_info, slp_tree slp_node) { if (!loop_vinfo @@ -9356,22 +9031,25 @@ vectorizable_lc_phi (loop_vec_info loop_vinfo, && STMT_VINFO_DEF_TYPE (stmt_info) != vect_double_reduction_def) return false; - if (!vec_stmt) /* transformation not required. */ + /* Deal with copies from externs or constants that disguise as + loop-closed PHI nodes (PR97886). */ + if (!vect_maybe_update_slp_op_vectype (SLP_TREE_CHILDREN (slp_node)[0], + SLP_TREE_VECTYPE (slp_node))) { - /* Deal with copies from externs or constants that disguise as - loop-closed PHI nodes (PR97886). */ - if (slp_node - && !vect_maybe_update_slp_op_vectype (SLP_TREE_CHILDREN (slp_node)[0], - SLP_TREE_VECTYPE (slp_node))) - { - if (dump_enabled_p ()) - dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location, - "incompatible vector types for invariants\n"); - return false; - } - STMT_VINFO_TYPE (stmt_info) = lc_phi_info_type; - return true; + if (dump_enabled_p ()) + dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location, + "incompatible vector types for invariants\n"); + return false; } + STMT_VINFO_TYPE (stmt_info) = lc_phi_info_type; + return true; +} + +bool +vect_transform_lc_phi (loop_vec_info loop_vinfo, + stmt_vec_info stmt_info, + slp_tree slp_node) +{ tree vectype = STMT_VINFO_VECTYPE (stmt_info); tree scalar_dest = gimple_phi_result (stmt_info->stmt); @@ -9379,21 +9057,15 @@ vectorizable_lc_phi (loop_vec_info loop_vinfo, edge e = single_pred_edge (bb); tree vec_dest = vect_create_destination_var (scalar_dest, vectype); auto_vec<tree> vec_oprnds; - vect_get_vec_defs (loop_vinfo, stmt_info, slp_node, - !slp_node ? vect_get_num_copies (loop_vinfo, vectype) : 1, + vect_get_vec_defs (loop_vinfo, stmt_info, slp_node, 1, gimple_phi_arg_def (stmt_info->stmt, 0), &vec_oprnds); for (unsigned i = 0; i < vec_oprnds.length (); i++) { /* Create the vectorized LC PHI node. */ gphi *new_phi = create_phi_node (vec_dest, bb); add_phi_arg (new_phi, vec_oprnds[i], e, UNKNOWN_LOCATION); - if (slp_node) - slp_node->push_vec_def (new_phi); - else - STMT_VINFO_VEC_STMTS (stmt_info).safe_push (new_phi); + slp_node->push_vec_def (new_phi); } - if (!slp_node) - *vec_stmt = STMT_VINFO_VEC_STMTS (stmt_info)[0]; return true; } @@ -12203,7 +11875,7 @@ update_epilogue_loop_vinfo (class loop *epilogue, tree advance) gimple *stmt = STMT_VINFO_STMT (related_vinfo); stmt_worklist.safe_push (stmt); /* Set BB such that the assert in - 'get_initial_def_for_reduction' is able to determine that + 'get_initial_defs_for_reduction' is able to determine that the BB of the related stmt is inside this loop. */ gimple_set_bb (stmt, gimple_bb (new_stmt)); diff --git a/gcc/tree-vect-stmts.cc b/gcc/tree-vect-stmts.cc index a8762ba..3373d75 100644 --- a/gcc/tree-vect-stmts.cc +++ b/gcc/tree-vect-stmts.cc @@ -14040,7 +14040,7 @@ vect_analyze_stmt (vec_info *vinfo, || vectorizable_comparison (vinfo, stmt_info, NULL, NULL, node, cost_vec) || vectorizable_lc_phi (as_a <loop_vec_info> (vinfo), - stmt_info, NULL, node) + stmt_info, node) || vectorizable_recurr (as_a <loop_vec_info> (vinfo), stmt_info, NULL, node, cost_vec) || vectorizable_early_exit (vinfo, stmt_info, NULL, NULL, node, @@ -14207,19 +14207,19 @@ vect_transform_stmt (vec_info *vinfo, case reduc_vec_info_type: done = vect_transform_reduction (as_a <loop_vec_info> (vinfo), stmt_info, - gsi, &vec_stmt, slp_node); + gsi, slp_node); gcc_assert (done); break; case cycle_phi_info_type: done = vect_transform_cycle_phi (as_a <loop_vec_info> (vinfo), stmt_info, - &vec_stmt, slp_node, slp_node_instance); + slp_node, slp_node_instance); gcc_assert (done); break; case lc_phi_info_type: - done = vectorizable_lc_phi (as_a <loop_vec_info> (vinfo), - stmt_info, &vec_stmt, slp_node); + done = vect_transform_lc_phi (as_a <loop_vec_info> (vinfo), + stmt_info, slp_node); gcc_assert (done); break; diff --git a/gcc/tree-vectorizer.h b/gcc/tree-vectorizer.h index 3d11559..a2f33a5 100644 --- a/gcc/tree-vectorizer.h +++ b/gcc/tree-vectorizer.h @@ -2625,12 +2625,11 @@ extern bool vectorizable_induction (loop_vec_info, stmt_vec_info, stmt_vector_for_cost *); extern bool vect_transform_reduction (loop_vec_info, stmt_vec_info, gimple_stmt_iterator *, - gimple **, slp_tree); + slp_tree); extern bool vect_transform_cycle_phi (loop_vec_info, stmt_vec_info, - gimple **, slp_tree, slp_instance); -extern bool vectorizable_lc_phi (loop_vec_info, stmt_vec_info, - gimple **, slp_tree); +extern bool vectorizable_lc_phi (loop_vec_info, stmt_vec_info, slp_tree); +extern bool vect_transform_lc_phi (loop_vec_info, stmt_vec_info, slp_tree); extern bool vectorizable_phi (vec_info *, stmt_vec_info, gimple **, slp_tree, stmt_vector_for_cost *); extern bool vectorizable_recurr (loop_vec_info, stmt_vec_info, |