aboutsummaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
authorRichard Sandiford <richard.sandiford@linaro.org>2017-08-29 07:47:05 +0000
committerRichard Sandiford <rsandifo@gcc.gnu.org>2017-08-29 07:47:05 +0000
commita844293de1d30da8ddba7384fc22ae59e857709f (patch)
tree29ae014a56a196ff0764a9dd8c569efb193cb413 /gcc
parent130fcab02f1fbb097fc52fc9a8892472e3fa0c6c (diff)
downloadgcc-a844293de1d30da8ddba7384fc22ae59e857709f.zip
gcc-a844293de1d30da8ddba7384fc22ae59e857709f.tar.gz
gcc-a844293de1d30da8ddba7384fc22ae59e857709f.tar.bz2
Set the call nothrow flag more often
This patch sets the nothrow flag for various calls to internal functions that are not inherently NOTHROW (and so can't be declared that way in internal-fn.def) but that are used in contexts that can guarantee NOTHROWness. 2017-08-29 Richard Sandiford <richard.sandiford@linaro.org> gcc/ * gimplify.c (gimplify_call_expr): Copy the nothrow flag to calls to internal functions. (gimplify_modify_expr): Likewise. * tree-call-cdce.c (use_internal_fn): Likewise. * tree-ssa-math-opts.c (pass_cse_reciprocals::execute): Likewise. (convert_to_divmod): Set the nothrow flag. * tree-if-conv.c (predicate_mem_writes): Likewise. * tree-vect-stmts.c (vectorizable_mask_load_store): Likewise. (vectorizable_call): Likewise. (vectorizable_store): Likewise. (vectorizable_load): Likewise. * tree-vect-patterns.c (vect_recog_pow_pattern): Likewise. (vect_recog_mask_conversion_pattern): Likewise. From-SVN: r251401
Diffstat (limited to 'gcc')
-rw-r--r--gcc/ChangeLog16
-rw-r--r--gcc/gimplify.c4
-rw-r--r--gcc/tree-call-cdce.c1
-rw-r--r--gcc/tree-if-conv.c3
-rw-r--r--gcc/tree-ssa-math-opts.c4
-rw-r--r--gcc/tree-vect-patterns.c5
-rw-r--r--gcc/tree-vect-stmts.c75
7 files changed, 79 insertions, 29 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 6c853d9..f8f6491 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,19 @@
+2017-08-29 Richard Sandiford <richard.sandiford@linaro.org>
+
+ * gimplify.c (gimplify_call_expr): Copy the nothrow flag to
+ calls to internal functions.
+ (gimplify_modify_expr): Likewise.
+ * tree-call-cdce.c (use_internal_fn): Likewise.
+ * tree-ssa-math-opts.c (pass_cse_reciprocals::execute): Likewise.
+ (convert_to_divmod): Set the nothrow flag.
+ * tree-if-conv.c (predicate_mem_writes): Likewise.
+ * tree-vect-stmts.c (vectorizable_mask_load_store): Likewise.
+ (vectorizable_call): Likewise.
+ (vectorizable_store): Likewise.
+ (vectorizable_load): Likewise.
+ * tree-vect-patterns.c (vect_recog_pow_pattern): Likewise.
+ (vect_recog_mask_conversion_pattern): Likewise.
+
2017-08-29 Martin Liska <mliska@suse.cz>
PR other/39851
diff --git a/gcc/gimplify.c b/gcc/gimplify.c
index e52d7dc..8b29a71 100644
--- a/gcc/gimplify.c
+++ b/gcc/gimplify.c
@@ -3150,7 +3150,8 @@ gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
if (EXPR_CILK_SPAWN (*expr_p))
gimplify_cilk_detach (pre_p);
- gimple *call = gimple_build_call_internal_vec (ifn, vargs);
+ gcall *call = gimple_build_call_internal_vec (ifn, vargs);
+ gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
gimplify_seq_add_stmt (pre_p, call);
return GS_ALL_DONE;
}
@@ -5636,6 +5637,7 @@ gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
}
call_stmt = gimple_build_call_internal_vec (ifn, vargs);
+ gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
}
else
diff --git a/gcc/tree-call-cdce.c b/gcc/tree-call-cdce.c
index 862d1a6..1578350 100644
--- a/gcc/tree-call-cdce.c
+++ b/gcc/tree-call-cdce.c
@@ -1019,6 +1019,7 @@ use_internal_fn (gcall *call)
args.safe_push (gimple_call_arg (call, i));
gcall *new_call = gimple_build_call_internal_vec (ifn, args);
gimple_set_location (new_call, gimple_location (call));
+ gimple_call_set_nothrow (new_call, gimple_call_nothrow_p (call));
/* Transfer the LHS to the new call. */
tree lhs = gimple_call_lhs (call);
diff --git a/gcc/tree-if-conv.c b/gcc/tree-if-conv.c
index d78731f..dd686c1 100644
--- a/gcc/tree-if-conv.c
+++ b/gcc/tree-if-conv.c
@@ -2219,7 +2219,7 @@ predicate_mem_writes (loop_p loop)
tree lhs = gimple_assign_lhs (stmt);
tree rhs = gimple_assign_rhs1 (stmt);
tree ref, addr, ptr, mask;
- gimple *new_stmt;
+ gcall *new_stmt;
gimple_seq stmts = NULL;
int bitsize = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (lhs)));
ref = TREE_CODE (lhs) == SSA_NAME ? rhs : lhs;
@@ -2281,6 +2281,7 @@ predicate_mem_writes (loop_p loop)
gimple_set_vdef (new_stmt, gimple_vdef (stmt));
SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
}
+ gimple_call_set_nothrow (new_stmt, true);
gsi_replace (&gsi, new_stmt, true);
}
diff --git a/gcc/tree-ssa-math-opts.c b/gcc/tree-ssa-math-opts.c
index 073c9dc..df0bcd6 100644
--- a/gcc/tree-ssa-math-opts.c
+++ b/gcc/tree-ssa-math-opts.c
@@ -690,6 +690,8 @@ pass_cse_reciprocals::execute (function *fun)
gimple_set_vdef (stmt2, gimple_vdef (call));
SSA_NAME_DEF_STMT (gimple_vdef (stmt2)) = stmt2;
}
+ gimple_call_set_nothrow (stmt2,
+ gimple_call_nothrow_p (call));
gimple_set_vuse (stmt2, gimple_vuse (call));
gimple_stmt_iterator gsi2 = gsi_for_stmt (call);
gsi_replace (&gsi2, stmt2, true);
@@ -4100,6 +4102,8 @@ convert_to_divmod (gassign *stmt)
tree res = make_temp_ssa_name (build_complex_type (TREE_TYPE (op1)),
call_stmt, "divmod_tmp");
gimple_call_set_lhs (call_stmt, res);
+ /* We rejected throwing statements above. */
+ gimple_call_set_nothrow (call_stmt, true);
/* Insert the call before top_stmt. */
gimple_stmt_iterator top_stmt_gsi = gsi_for_stmt (top_stmt);
diff --git a/gcc/tree-vect-patterns.c b/gcc/tree-vect-patterns.c
index 877711a..cfdb72c 100644
--- a/gcc/tree-vect-patterns.c
+++ b/gcc/tree-vect-patterns.c
@@ -1085,6 +1085,7 @@ vect_recog_pow_pattern (vec<gimple *> *stmts, tree *type_in,
gcall *stmt = gimple_build_call_internal (IFN_SQRT, 1, base);
var = vect_recog_temp_ssa_var (TREE_TYPE (base), stmt);
gimple_call_set_lhs (stmt, var);
+ gimple_call_set_nothrow (stmt, true);
return stmt;
}
}
@@ -3867,7 +3868,6 @@ vect_recog_mask_conversion_pattern (vec<gimple *> *stmts, tree *type_in,
stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
stmt_vec_info pattern_stmt_info;
vec_info *vinfo = stmt_vinfo->vinfo;
- gimple *pattern_stmt;
/* Check for MASK_LOAD ans MASK_STORE calls requiring mask conversion. */
if (is_gimple_call (last_stmt)
@@ -3875,6 +3875,7 @@ vect_recog_mask_conversion_pattern (vec<gimple *> *stmts, tree *type_in,
&& (gimple_call_internal_fn (last_stmt) == IFN_MASK_STORE
|| gimple_call_internal_fn (last_stmt) == IFN_MASK_LOAD))
{
+ gcall *pattern_stmt;
bool load = (gimple_call_internal_fn (last_stmt) == IFN_MASK_LOAD);
if (load)
@@ -3918,6 +3919,7 @@ vect_recog_mask_conversion_pattern (vec<gimple *> *stmts, tree *type_in,
tmp,
gimple_call_arg (last_stmt, 3));
+ gimple_call_set_nothrow (pattern_stmt, true);
pattern_stmt_info = new_stmt_vec_info (pattern_stmt, vinfo);
set_vinfo_for_stmt (pattern_stmt, pattern_stmt_info);
@@ -3940,6 +3942,7 @@ vect_recog_mask_conversion_pattern (vec<gimple *> *stmts, tree *type_in,
if (!is_gimple_assign (last_stmt))
return NULL;
+ gimple *pattern_stmt;
lhs = gimple_assign_lhs (last_stmt);
rhs1 = gimple_assign_rhs1 (last_stmt);
rhs_code = gimple_assign_rhs_code (last_stmt);
diff --git a/gcc/tree-vect-stmts.c b/gcc/tree-vect-stmts.c
index 0629c12..013fb1f 100644
--- a/gcc/tree-vect-stmts.c
+++ b/gcc/tree-vect-stmts.c
@@ -2364,9 +2364,11 @@ vectorizable_mask_load_store (gimple *stmt, gimple_stmt_iterator *gsi,
misalign);
tree ptr = build_int_cst (TREE_TYPE (gimple_call_arg (stmt, 1)),
misalign ? least_bit_hwi (misalign) : align);
- new_stmt
+ gcall *call
= gimple_build_call_internal (IFN_MASK_STORE, 4, dataref_ptr,
ptr, vec_mask, vec_rhs);
+ gimple_call_set_nothrow (call, true);
+ new_stmt = call;
vect_finish_stmt_generation (stmt, new_stmt, gsi);
if (i == 0)
STMT_VINFO_VEC_STMT (stmt_info) = *vec_stmt = new_stmt;
@@ -2414,16 +2416,17 @@ vectorizable_mask_load_store (gimple *stmt, gimple_stmt_iterator *gsi,
misalign);
tree ptr = build_int_cst (TREE_TYPE (gimple_call_arg (stmt, 1)),
misalign ? least_bit_hwi (misalign) : align);
- new_stmt
+ gcall *call
= gimple_build_call_internal (IFN_MASK_LOAD, 3, dataref_ptr,
ptr, vec_mask);
- gimple_call_set_lhs (new_stmt, make_ssa_name (vec_dest));
- vect_finish_stmt_generation (stmt, new_stmt, gsi);
+ gimple_call_set_lhs (call, make_ssa_name (vec_dest));
+ gimple_call_set_nothrow (call, true);
+ vect_finish_stmt_generation (stmt, call, gsi);
if (i == 0)
- STMT_VINFO_VEC_STMT (stmt_info) = *vec_stmt = new_stmt;
+ STMT_VINFO_VEC_STMT (stmt_info) = *vec_stmt = call;
else
- STMT_VINFO_RELATED_STMT (prev_stmt_info) = new_stmt;
- prev_stmt_info = vinfo_for_stmt (new_stmt);
+ STMT_VINFO_RELATED_STMT (prev_stmt_info) = call;
+ prev_stmt_info = vinfo_for_stmt (call);
}
}
@@ -2867,8 +2870,11 @@ vectorizable_call (gimple *gs, gimple_stmt_iterator *gsi, gimple **vec_stmt,
if (modifier == NARROW)
{
tree half_res = make_ssa_name (vectype_in);
- new_stmt = gimple_build_call_internal_vec (ifn, vargs);
- gimple_call_set_lhs (new_stmt, half_res);
+ gcall *call
+ = gimple_build_call_internal_vec (ifn, vargs);
+ gimple_call_set_lhs (call, half_res);
+ gimple_call_set_nothrow (call, true);
+ new_stmt = call;
vect_finish_stmt_generation (stmt, new_stmt, gsi);
if ((i & 1) == 0)
{
@@ -2881,12 +2887,15 @@ vectorizable_call (gimple *gs, gimple_stmt_iterator *gsi, gimple **vec_stmt,
}
else
{
+ gcall *call;
if (ifn != IFN_LAST)
- new_stmt = gimple_build_call_internal_vec (ifn, vargs);
+ call = gimple_build_call_internal_vec (ifn, vargs);
else
- new_stmt = gimple_build_call_vec (fndecl, vargs);
- new_temp = make_ssa_name (vec_dest, new_stmt);
- gimple_call_set_lhs (new_stmt, new_temp);
+ call = gimple_build_call_vec (fndecl, vargs);
+ new_temp = make_ssa_name (vec_dest, call);
+ gimple_call_set_lhs (call, new_temp);
+ gimple_call_set_nothrow (call, true);
+ new_stmt = call;
}
vect_finish_stmt_generation (stmt, new_stmt, gsi);
SLP_TREE_VEC_STMTS (slp_node).quick_push (new_stmt);
@@ -2934,8 +2943,10 @@ vectorizable_call (gimple *gs, gimple_stmt_iterator *gsi, gimple **vec_stmt,
else if (modifier == NARROW)
{
tree half_res = make_ssa_name (vectype_in);
- new_stmt = gimple_build_call_internal_vec (ifn, vargs);
- gimple_call_set_lhs (new_stmt, half_res);
+ gcall *call = gimple_build_call_internal_vec (ifn, vargs);
+ gimple_call_set_lhs (call, half_res);
+ gimple_call_set_nothrow (call, true);
+ new_stmt = call;
vect_finish_stmt_generation (stmt, new_stmt, gsi);
if ((j & 1) == 0)
{
@@ -2948,12 +2959,15 @@ vectorizable_call (gimple *gs, gimple_stmt_iterator *gsi, gimple **vec_stmt,
}
else
{
+ gcall *call;
if (ifn != IFN_LAST)
- new_stmt = gimple_build_call_internal_vec (ifn, vargs);
+ call = gimple_build_call_internal_vec (ifn, vargs);
else
- new_stmt = gimple_build_call_vec (fndecl, vargs);
+ call = gimple_build_call_vec (fndecl, vargs);
new_temp = make_ssa_name (vec_dest, new_stmt);
- gimple_call_set_lhs (new_stmt, new_temp);
+ gimple_call_set_lhs (call, new_temp);
+ gimple_call_set_nothrow (call, true);
+ new_stmt = call;
}
vect_finish_stmt_generation (stmt, new_stmt, gsi);
@@ -2996,12 +3010,15 @@ vectorizable_call (gimple *gs, gimple_stmt_iterator *gsi, gimple **vec_stmt,
vargs.quick_push (vec_oprndsk[i]);
vargs.quick_push (vec_oprndsk[i + 1]);
}
+ gcall *call;
if (ifn != IFN_LAST)
- new_stmt = gimple_build_call_internal_vec (ifn, vargs);
+ call = gimple_build_call_internal_vec (ifn, vargs);
else
- new_stmt = gimple_build_call_vec (fndecl, vargs);
- new_temp = make_ssa_name (vec_dest, new_stmt);
- gimple_call_set_lhs (new_stmt, new_temp);
+ call = gimple_build_call_vec (fndecl, vargs);
+ new_temp = make_ssa_name (vec_dest, call);
+ gimple_call_set_lhs (call, new_temp);
+ gimple_call_set_nothrow (call, true);
+ new_stmt = call;
vect_finish_stmt_generation (stmt, new_stmt, gsi);
SLP_TREE_VEC_STMTS (slp_node).quick_push (new_stmt);
}
@@ -6356,8 +6373,11 @@ vectorizable_store (gimple *stmt, gimple_stmt_iterator *gsi, gimple **vec_stmt,
/* Emit:
MEM_REF[...all elements...] = STORE_LANES (VEC_ARRAY). */
data_ref = create_array_ref (aggr_type, dataref_ptr, ref_type);
- new_stmt = gimple_build_call_internal (IFN_STORE_LANES, 1, vec_array);
- gimple_call_set_lhs (new_stmt, data_ref);
+ gcall *call = gimple_build_call_internal (IFN_STORE_LANES, 1,
+ vec_array);
+ gimple_call_set_lhs (call, data_ref);
+ gimple_call_set_nothrow (call, true);
+ new_stmt = call;
vect_finish_stmt_generation (stmt, new_stmt, gsi);
}
else
@@ -7448,8 +7468,11 @@ vectorizable_load (gimple *stmt, gimple_stmt_iterator *gsi, gimple **vec_stmt,
/* Emit:
VEC_ARRAY = LOAD_LANES (MEM_REF[...all elements...]). */
data_ref = create_array_ref (aggr_type, dataref_ptr, ref_type);
- new_stmt = gimple_build_call_internal (IFN_LOAD_LANES, 1, data_ref);
- gimple_call_set_lhs (new_stmt, vec_array);
+ gcall *call = gimple_build_call_internal (IFN_LOAD_LANES, 1,
+ data_ref);
+ gimple_call_set_lhs (call, vec_array);
+ gimple_call_set_nothrow (call, true);
+ new_stmt = call;
vect_finish_stmt_generation (stmt, new_stmt, gsi);
/* Extract each vector into an SSA_NAME. */