aboutsummaryrefslogtreecommitdiff
path: root/gcc/tree-vect-slp.c
diff options
context:
space:
mode:
authorDavid Malcolm <dmalcolm@redhat.com>2018-11-13 16:10:13 +0000
committerDavid Malcolm <dmalcolm@gcc.gnu.org>2018-11-13 16:10:13 +0000
commitbbeeac91f96bdcbc3eb40ec68c1fd8cf5d4a038d (patch)
tree14626efce8213f765013cedf40bc8ad860ab9c9b /gcc/tree-vect-slp.c
parent9228f64cc2ceb54764a302103e3b754c0b6235a2 (diff)
downloadgcc-bbeeac91f96bdcbc3eb40ec68c1fd8cf5d4a038d.zip
gcc-bbeeac91f96bdcbc3eb40ec68c1fd8cf5d4a038d.tar.gz
gcc-bbeeac91f96bdcbc3eb40ec68c1fd8cf5d4a038d.tar.bz2
Ensure that dump calls are guarded with dump_enabled_p
If called when !dump_enabled_p, the dump_* functions effectively do nothing, but as of r263178 this doing "nothing" involves non-trivial work internally. I wasn't sure whether the dump_* functions should assert that dump_enabled_p () is true when they're called, or if they should bail out immediately for this case, so in this patch I implemented both, so that we get an assertion failure, and otherwise bail out for the case where !dump_enabled_p when assertions are disabled. The patch also fixes all of the places I found during testing (on x86_64-pc-linux-gnu) that call into dump_* but which weren't guarded by if (dump_enabled_p ()) gcc/ChangeLog: * dumpfile.c (VERIFY_DUMP_ENABLED_P): New macro. (dump_gimple_stmt): Use it. (dump_gimple_stmt_loc): Likewise. (dump_gimple_expr): Likewise. (dump_gimple_expr_loc): Likewise. (dump_generic_expr): Likewise. (dump_generic_expr_loc): Likewise. (dump_printf): Likewise. (dump_printf_loc): Likewise. (dump_dec): Likewise. (dump_dec): Likewise. (dump_hex): Likewise. (dump_symtab_node): Likewise. gcc/ChangeLog: * gimple-loop-interchange.cc (tree_loop_interchange::interchange): Guard dump call with dump_enabled_p. * graphite-isl-ast-to-gimple.c (graphite_regenerate_ast_isl): Likewise. * graphite-optimize-isl.c (optimize_isl): Likewise. * graphite.c (graphite_transform_loops): Likewise. * tree-loop-distribution.c (pass_loop_distribution::execute): Likewise. * tree-parloops.c (parallelize_loops): Likewise. * tree-ssa-loop-niter.c (number_of_iterations_exit): Likewise. * tree-vect-data-refs.c (vect_analyze_group_access_1): Likewise. (vect_prune_runtime_alias_test_list): Likewise. * tree-vect-loop.c (vect_update_vf_for_slp): Likewise. (vect_estimate_min_profitable_iters): Likewise. * tree-vect-slp.c (vect_record_max_nunits): Likewise. (vect_build_slp_tree_2): Likewise. (vect_supported_load_permutation_p): Likewise. (vect_slp_analyze_operations): Likewise. (vect_slp_analyze_bb_1): Likewise. (vect_slp_bb): Likewise. * tree-vect-stmts.c (vect_analyze_stmt): Likewise. * tree-vectorizer.c (try_vectorize_loop_1): Likewise. (pass_slp_vectorize::execute): Likewise. (increase_alignment): Likewise. From-SVN: r266080
Diffstat (limited to 'gcc/tree-vect-slp.c')
-rw-r--r--gcc/tree-vect-slp.c84
1 files changed, 49 insertions, 35 deletions
diff --git a/gcc/tree-vect-slp.c b/gcc/tree-vect-slp.c
index f802b00..f2bb8da 100644
--- a/gcc/tree-vect-slp.c
+++ b/gcc/tree-vect-slp.c
@@ -575,9 +575,10 @@ vect_record_max_nunits (stmt_vec_info stmt_info, unsigned int group_size,
&& (!nunits.is_constant (&const_nunits)
|| const_nunits > group_size))
{
- dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
- "Build SLP failed: unrolling required "
- "in basic block SLP\n");
+ if (dump_enabled_p ())
+ dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
+ "Build SLP failed: unrolling required "
+ "in basic block SLP\n");
/* Fatal mismatch. */
return false;
}
@@ -1231,9 +1232,10 @@ vect_build_slp_tree_2 (vec_info *vinfo,
vect_free_slp_tree (grandchild, false);
SLP_TREE_CHILDREN (child).truncate (0);
- dump_printf_loc (MSG_NOTE, vect_location,
- "Building parent vector operands from "
- "scalars instead\n");
+ if (dump_enabled_p ())
+ dump_printf_loc (MSG_NOTE, vect_location,
+ "Building parent vector operands from "
+ "scalars instead\n");
oprnd_info->def_stmts = vNULL;
SLP_TREE_DEF_TYPE (child) = vect_external_def;
children.safe_push (child);
@@ -1261,8 +1263,9 @@ vect_build_slp_tree_2 (vec_info *vinfo,
scalar version. */
&& !is_pattern_stmt_p (stmt_info))
{
- dump_printf_loc (MSG_NOTE, vect_location,
- "Building vector operands from scalars\n");
+ if (dump_enabled_p ())
+ dump_printf_loc (MSG_NOTE, vect_location,
+ "Building vector operands from scalars\n");
child = vect_create_new_slp_node (oprnd_info->def_stmts);
SLP_TREE_DEF_TYPE (child) = vect_external_def;
children.safe_push (child);
@@ -1334,16 +1337,19 @@ vect_build_slp_tree_2 (vec_info *vinfo,
while (j != group_size);
/* Swap mismatched definition stmts. */
- dump_printf_loc (MSG_NOTE, vect_location,
- "Re-trying with swapped operands of stmts ");
+ if (dump_enabled_p ())
+ dump_printf_loc (MSG_NOTE, vect_location,
+ "Re-trying with swapped operands of stmts ");
for (j = 0; j < group_size; ++j)
if (matches[j] == !swap_not_matching)
{
std::swap (oprnds_info[0]->def_stmts[j],
oprnds_info[1]->def_stmts[j]);
- dump_printf (MSG_NOTE, "%d ", j);
+ if (dump_enabled_p ())
+ dump_printf (MSG_NOTE, "%d ", j);
}
- dump_printf (MSG_NOTE, "\n");
+ if (dump_enabled_p ())
+ dump_printf (MSG_NOTE, "\n");
/* And try again with scratch 'matches' ... */
bool *tem = XALLOCAVEC (bool, group_size);
if ((child = vect_build_slp_tree (vinfo, oprnd_info->def_stmts,
@@ -1399,9 +1405,10 @@ vect_build_slp_tree_2 (vec_info *vinfo,
vect_free_slp_tree (grandchild, false);
SLP_TREE_CHILDREN (child).truncate (0);
- dump_printf_loc (MSG_NOTE, vect_location,
- "Building parent vector operands from "
- "scalars instead\n");
+ if (dump_enabled_p ())
+ dump_printf_loc (MSG_NOTE, vect_location,
+ "Building parent vector operands from "
+ "scalars instead\n");
oprnd_info->def_stmts = vNULL;
SLP_TREE_DEF_TYPE (child) = vect_external_def;
children.safe_push (child);
@@ -1757,9 +1764,10 @@ vect_supported_load_permutation_p (slp_instance slp_instn)
if (!TYPE_VECTOR_SUBPARTS (vectype).is_constant (&nunits)
|| maxk >= (DR_GROUP_SIZE (group_info) & ~(nunits - 1)))
{
- dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
- "BB vectorization with gaps at the end of "
- "a load is not supported\n");
+ if (dump_enabled_p ())
+ dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
+ "BB vectorization with gaps at the end of "
+ "a load is not supported\n");
return false;
}
@@ -1769,9 +1777,10 @@ vect_supported_load_permutation_p (slp_instance slp_instn)
if (!vect_transform_slp_perm_load (node, tem, NULL,
1, slp_instn, true, &n_perms))
{
- dump_printf_loc (MSG_MISSED_OPTIMIZATION,
- vect_location,
- "unsupported load permutation\n");
+ if (dump_enabled_p ())
+ dump_printf_loc (MSG_MISSED_OPTIMIZATION,
+ vect_location,
+ "unsupported load permutation\n");
return false;
}
}
@@ -2592,9 +2601,10 @@ vect_slp_analyze_operations (vec_info *vinfo)
{
slp_tree node = SLP_INSTANCE_TREE (instance);
stmt_vec_info stmt_info = SLP_TREE_SCALAR_STMTS (node)[0];
- dump_printf_loc (MSG_NOTE, vect_location,
- "removing SLP instance operations starting from: %G",
- stmt_info->stmt);
+ if (dump_enabled_p ())
+ dump_printf_loc (MSG_NOTE, vect_location,
+ "removing SLP instance operations starting from: %G",
+ stmt_info->stmt);
vect_free_slp_instance (instance, false);
vinfo->slp_instances.ordered_remove (i);
cost_vec.release ();
@@ -2888,9 +2898,10 @@ vect_slp_analyze_bb_1 (gimple_stmt_iterator region_begin,
{
slp_tree node = SLP_INSTANCE_TREE (instance);
stmt_vec_info stmt_info = SLP_TREE_SCALAR_STMTS (node)[0];
- dump_printf_loc (MSG_NOTE, vect_location,
- "removing SLP instance operations starting from: %G",
- stmt_info->stmt);
+ if (dump_enabled_p ())
+ dump_printf_loc (MSG_NOTE, vect_location,
+ "removing SLP instance operations starting from: %G",
+ stmt_info->stmt);
vect_free_slp_instance (instance, false);
BB_VINFO_SLP_INSTANCES (bb_vinfo).ordered_remove (i);
continue;
@@ -3006,14 +3017,17 @@ vect_slp_bb (basic_block bb)
vect_schedule_slp (bb_vinfo);
unsigned HOST_WIDE_INT bytes;
- if (current_vector_size.is_constant (&bytes))
- dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, vect_location,
- "basic block part vectorized using %wu byte "
- "vectors\n", bytes);
- else
- dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, vect_location,
- "basic block part vectorized using variable "
- "length vectors\n");
+ if (dump_enabled_p ())
+ {
+ if (current_vector_size.is_constant (&bytes))
+ dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, vect_location,
+ "basic block part vectorized using %wu byte "
+ "vectors\n", bytes);
+ else
+ dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, vect_location,
+ "basic block part vectorized using variable "
+ "length vectors\n");
+ }
vectorized = true;
}