aboutsummaryrefslogtreecommitdiff
path: root/gcc/omp-expand.c
diff options
context:
space:
mode:
authorJakub Jelinek <jakub@redhat.com>2019-05-24 23:31:59 +0200
committerJakub Jelinek <jakub@gcc.gnu.org>2019-05-24 23:31:59 +0200
commit6c7ae8c56f9341f180e097d5eb7ba05cb8eec413 (patch)
tree425965042d935027e8cc7045b383f6b7b6f714f9 /gcc/omp-expand.c
parent09b4000c7c5b359ec62b5d621daf6ab95233ce10 (diff)
downloadgcc-6c7ae8c56f9341f180e097d5eb7ba05cb8eec413.zip
gcc-6c7ae8c56f9341f180e097d5eb7ba05cb8eec413.tar.gz
gcc-6c7ae8c56f9341f180e097d5eb7ba05cb8eec413.tar.bz2
tree-core.h (enum omp_clause_code): Add OMP_CLAUSE__CONDTEMP_.
* tree-core.h (enum omp_clause_code): Add OMP_CLAUSE__CONDTEMP_. * tree.h (OMP_CLAUSE_DECL): Use OMP_CLAUSE__CONDTEMP_ instead of OMP_CLAUSE__REDUCTEMP_. * tree.c (omp_clause_num_ops, omp_clause_code_name): Add OMP_CLAUSE__CONDTEMP_. (walk_tree_1): Handle OMP_CLAUSE__CONDTEMP_. * tree-pretty-print.c (dump_omp_clause): Likewise. * tree-nested.c (convert_nonlocal_omp_clauses, convert_local_omp_clauses): Likewise. * gimplify.c (enum gimplify_omp_var_data): Use hexadecimal constants instead of decimal. Add GOVD_LASTPRIVATE_CONDITIONAL. (gimplify_scan_omp_clauses): Don't reject lastprivate conditional on OMP_FOR. (gimplify_omp_for): Warn and disable conditional modifier from lastprivate on loop iterators. * omp-general.h (struct omp_for_data): Add lastprivate_conditional member. * omp-general.c (omp_extract_for_data): Initialize it. * omp-low.c (struct omp_context): Add lastprivate_conditional_map member. (delete_omp_context): Delete it. (lower_lastprivate_conditional_clauses): New function. (lower_lastprivate_clauses): Add BODY_P and CSTMT_LIST arguments, handle lastprivate conditional clauses. (lower_reduction_clauses): Add CLIST argument, emit it into the critical section if any. (lower_omp_sections): Adjust lower_lastprivate_clauses and lower_reduction_clauses callers. (lower_omp_for_lastprivate): Add CLIST argument, pass it through to lower_lastprivate_clauses. (lower_omp_for): Call lower_lastprivate_conditional_clauses, adjust lower_omp_for_lastprivate and lower_reduction_clauses callers, emit clist into a critical section if not emitted there already by lower_reduction_clauses. (lower_omp_taskreg, lower_omp_teams): Adjust lower_reduction_clauses callers. (lower_omp_1): Handle GIMPLE_ASSIGNs storing into lastprivate conditional variables. * omp-expand.c (determine_parallel_type): Punt if OMP_CLAUSE__CONDTEMP_ clause is present. (expand_omp_for_generic, expand_omp_for_static_nochunk, expand_omp_for_static_chunk): Handle lastprivate conditional. (expand_omp_for): Handle fd.lastprivate_conditional like fd.have_reductemp. gcc/testsuite/ * c-c++-common/gomp/lastprivate-conditional-2.c (foo): Don't expect sorry for omp for. * c-c++-common/gomp/lastprivate-conditional-3.c: New test. libgomp/ * testsuite/libgomp.c-c++-common/lastprivate-conditional-1.c: New test. * testsuite/libgomp.c-c++-common/lastprivate-conditional-2.c: New test. From-SVN: r271610
Diffstat (limited to 'gcc/omp-expand.c')
-rw-r--r--gcc/omp-expand.c300
1 files changed, 260 insertions, 40 deletions
diff --git a/gcc/omp-expand.c b/gcc/omp-expand.c
index 0d7f104..dfac4b0 100644
--- a/gcc/omp-expand.c
+++ b/gcc/omp-expand.c
@@ -345,7 +345,8 @@ determine_parallel_type (struct omp_region *region)
|| ((OMP_CLAUSE_SCHEDULE_KIND (c) & OMP_CLAUSE_SCHEDULE_MASK)
== OMP_CLAUSE_SCHEDULE_STATIC)
|| omp_find_clause (clauses, OMP_CLAUSE_ORDERED)
- || omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_))
+ || omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_)
+ || omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_))
return;
}
else if (region->inner->type == GIMPLE_OMP_SECTIONS
@@ -2679,16 +2680,17 @@ expand_omp_for_generic (struct omp_region *region,
gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
if (fd->ordered
- && omp_find_clause (gimple_omp_for_clauses (gsi_stmt (gsi)),
+ && omp_find_clause (gimple_omp_for_clauses (fd->for_stmt),
OMP_CLAUSE_LASTPRIVATE))
ordered_lastprivate = false;
tree reductions = NULL_TREE;
- tree mem = NULL_TREE;
+ tree mem = NULL_TREE, cond_var = NULL_TREE, condtemp = NULL_TREE;
+ tree memv = NULL_TREE;
if (sched_arg)
{
if (fd->have_reductemp)
{
- tree c = omp_find_clause (gimple_omp_for_clauses (gsi_stmt (gsi)),
+ tree c = omp_find_clause (gimple_omp_for_clauses (fd->for_stmt),
OMP_CLAUSE__REDUCTEMP_);
reductions = OMP_CLAUSE_DECL (c);
gcc_assert (TREE_CODE (reductions) == SSA_NAME);
@@ -2703,8 +2705,25 @@ expand_omp_for_generic (struct omp_region *region,
}
else
reductions = null_pointer_node;
- /* For now. */
- mem = null_pointer_node;
+ if (fd->lastprivate_conditional)
+ {
+ tree c = omp_find_clause (gimple_omp_for_clauses (fd->for_stmt),
+ OMP_CLAUSE__CONDTEMP_);
+ condtemp = OMP_CLAUSE_DECL (c);
+ c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE__CONDTEMP_);
+ cond_var = OMP_CLAUSE_DECL (c);
+ tree type = TREE_TYPE (condtemp);
+ memv = create_tmp_var (type);
+ TREE_ADDRESSABLE (memv) = 1;
+ unsigned HOST_WIDE_INT sz
+ = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (type)));
+ sz *= fd->lastprivate_conditional;
+ expand_omp_build_assign (&gsi, memv, build_int_cst (type, sz),
+ false);
+ mem = build_fold_addr_expr (memv);
+ }
+ else
+ mem = null_pointer_node;
}
if (fd->collapse > 1 || fd->ordered)
{
@@ -2959,6 +2978,8 @@ expand_omp_for_generic (struct omp_region *region,
gsi_insert_before (&gsi, gimple_build_assign (arr, clobber),
GSI_SAME_STMT);
}
+ if (fd->lastprivate_conditional)
+ expand_omp_build_assign (&gsi, condtemp, memv, false);
if (fd->have_reductemp)
{
gimple *g = gsi_stmt (gsi);
@@ -3029,6 +3050,35 @@ expand_omp_for_generic (struct omp_region *region,
NULL_TREE, false, GSI_CONTINUE_LINKING);
assign_stmt = gimple_build_assign (startvar, t);
gsi_insert_after (&gsi, assign_stmt, GSI_CONTINUE_LINKING);
+ if (cond_var)
+ {
+ tree itype = TREE_TYPE (cond_var);
+ /* For lastprivate(conditional:) itervar, we need some iteration
+ counter that starts at unsigned non-zero and increases.
+ Prefer as few IVs as possible, so if we can use startvar
+ itself, use that, or startvar + constant (those would be
+ incremented with step), and as last resort use the s0 + 1
+ incremented by 1. */
+ if ((fd->ordered && fd->collapse == 1)
+ || bias
+ || POINTER_TYPE_P (type)
+ || TREE_CODE (fd->loop.n1) != INTEGER_CST
+ || fd->loop.cond_code != LT_EXPR)
+ t = fold_build2 (PLUS_EXPR, itype, fold_convert (itype, istart0),
+ build_int_cst (itype, 1));
+ else if (tree_int_cst_sgn (fd->loop.n1) == 1)
+ t = fold_convert (itype, t);
+ else
+ {
+ tree c = fold_convert (itype, fd->loop.n1);
+ c = fold_build2 (MINUS_EXPR, itype, build_int_cst (itype, 1), c);
+ t = fold_build2 (PLUS_EXPR, itype, fold_convert (itype, t), c);
+ }
+ t = force_gimple_operand_gsi (&gsi, t, false,
+ NULL_TREE, false, GSI_CONTINUE_LINKING);
+ assign_stmt = gimple_build_assign (cond_var, t);
+ gsi_insert_after (&gsi, assign_stmt, GSI_CONTINUE_LINKING);
+ }
t = iend0;
if (fd->ordered && fd->collapse == 1)
@@ -3213,6 +3263,25 @@ expand_omp_for_generic (struct omp_region *region,
assign_stmt = gimple_build_assign (vback, t);
gsi_insert_before (&gsi, assign_stmt, GSI_SAME_STMT);
+ if (cond_var)
+ {
+ tree itype = TREE_TYPE (cond_var);
+ tree t2;
+ if ((fd->ordered && fd->collapse == 1)
+ || bias
+ || POINTER_TYPE_P (type)
+ || TREE_CODE (fd->loop.n1) != INTEGER_CST
+ || fd->loop.cond_code != LT_EXPR)
+ t2 = build_int_cst (itype, 1);
+ else
+ t2 = fold_convert (itype, fd->loop.step);
+ t2 = fold_build2 (PLUS_EXPR, itype, cond_var, t2);
+ t2 = force_gimple_operand_gsi (&gsi, t2, false,
+ NULL_TREE, true, GSI_SAME_STMT);
+ assign_stmt = gimple_build_assign (cond_var, t2);
+ gsi_insert_before (&gsi, assign_stmt, GSI_SAME_STMT);
+ }
+
if (fd->ordered && counts[fd->collapse - 1] == NULL_TREE)
{
tree tem;
@@ -3465,12 +3534,13 @@ expand_omp_for_static_nochunk (struct omp_region *region,
basic_block entry_bb, second_bb, third_bb, exit_bb, seq_start_bb;
basic_block body_bb, cont_bb, collapse_bb = NULL;
basic_block fin_bb;
- gimple_stmt_iterator gsi;
+ gimple_stmt_iterator gsi, gsip;
edge ep;
bool broken_loop = region->cont == NULL;
tree *counts = NULL;
tree n1, n2, step;
tree reductions = NULL_TREE;
+ tree cond_var = NULL_TREE;
itype = type = TREE_TYPE (fd->loop.v);
if (POINTER_TYPE_P (type))
@@ -3495,6 +3565,8 @@ expand_omp_for_static_nochunk (struct omp_region *region,
/* Iteration space partitioning goes in ENTRY_BB. */
gsi = gsi_last_nondebug_bb (entry_bb);
gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
+ gsip = gsi;
+ gsi_prev (&gsip);
if (fd->collapse > 1)
{
@@ -3524,7 +3596,7 @@ expand_omp_for_static_nochunk (struct omp_region *region,
n2 = force_gimple_operand_gsi (&gsi, n2, true, NULL_TREE,
true, GSI_SAME_STMT);
gcond *cond_stmt = gimple_build_cond (fd->loop.cond_code, n1, n2,
- NULL_TREE, NULL_TREE);
+ NULL_TREE, NULL_TREE);
gsi_insert_before (&gsi, cond_stmt, GSI_SAME_STMT);
if (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
expand_omp_regimplify_p, NULL, NULL)
@@ -3554,28 +3626,64 @@ expand_omp_for_static_nochunk (struct omp_region *region,
gsi = gsi_last_bb (entry_bb);
}
- if (fd->have_reductemp)
+ if (fd->have_reductemp || fd->lastprivate_conditional)
{
tree t1 = build_int_cst (long_integer_type_node, 0);
tree t2 = build_int_cst (long_integer_type_node, 1);
tree t3 = build_int_cstu (long_integer_type_node,
(HOST_WIDE_INT_1U << 31) + 1);
tree clauses = gimple_omp_for_clauses (fd->for_stmt);
- clauses = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
- reductions = OMP_CLAUSE_DECL (clauses);
- gcc_assert (TREE_CODE (reductions) == SSA_NAME);
- gimple *g = SSA_NAME_DEF_STMT (reductions);
- reductions = gimple_assign_rhs1 (g);
- OMP_CLAUSE_DECL (clauses) = reductions;
- gimple_stmt_iterator gsi2 = gsi_for_stmt (g);
+ gimple_stmt_iterator gsi2 = gsi_none ();
+ gimple *g = NULL;
+ tree mem = null_pointer_node, memv = NULL_TREE;
+ tree condtemp = NULL_TREE;
+ if (fd->have_reductemp)
+ {
+ tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
+ reductions = OMP_CLAUSE_DECL (c);
+ gcc_assert (TREE_CODE (reductions) == SSA_NAME);
+ g = SSA_NAME_DEF_STMT (reductions);
+ reductions = gimple_assign_rhs1 (g);
+ OMP_CLAUSE_DECL (c) = reductions;
+ gsi2 = gsi_for_stmt (g);
+ }
+ else
+ {
+ if (gsi_end_p (gsip))
+ gsi2 = gsi_after_labels (region->entry);
+ else
+ gsi2 = gsip;
+ reductions = null_pointer_node;
+ }
+ if (fd->lastprivate_conditional)
+ {
+ tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
+ condtemp = OMP_CLAUSE_DECL (c);
+ c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE__CONDTEMP_);
+ cond_var = OMP_CLAUSE_DECL (c);
+ tree type = TREE_TYPE (condtemp);
+ memv = create_tmp_var (type);
+ TREE_ADDRESSABLE (memv) = 1;
+ unsigned HOST_WIDE_INT sz
+ = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (type)));
+ sz *= fd->lastprivate_conditional;
+ expand_omp_build_assign (&gsi2, memv, build_int_cst (type, sz),
+ false);
+ mem = build_fold_addr_expr (memv);
+ }
tree t
= build_call_expr (builtin_decl_explicit (BUILT_IN_GOMP_LOOP_START),
9, t1, t2, t2, t3, t1, null_pointer_node,
- null_pointer_node, reductions, null_pointer_node);
+ null_pointer_node, reductions, mem);
force_gimple_operand_gsi (&gsi2, t, true, NULL_TREE,
true, GSI_SAME_STMT);
- gsi_remove (&gsi2, true);
- release_ssa_name (gimple_assign_lhs (g));
+ if (fd->lastprivate_conditional)
+ expand_omp_build_assign (&gsi2, condtemp, memv, false);
+ if (fd->have_reductemp)
+ {
+ gsi_remove (&gsi2, true);
+ release_ssa_name (gimple_assign_lhs (g));
+ }
}
switch (gimple_omp_for_kind (fd->for_stmt))
{
@@ -3735,6 +3843,33 @@ expand_omp_for_static_nochunk (struct omp_region *region,
NULL_TREE, false, GSI_CONTINUE_LINKING);
assign_stmt = gimple_build_assign (startvar, t);
gsi_insert_after (&gsi, assign_stmt, GSI_CONTINUE_LINKING);
+ if (cond_var)
+ {
+ tree itype = TREE_TYPE (cond_var);
+ /* For lastprivate(conditional:) itervar, we need some iteration
+ counter that starts at unsigned non-zero and increases.
+ Prefer as few IVs as possible, so if we can use startvar
+ itself, use that, or startvar + constant (those would be
+ incremented with step), and as last resort use the s0 + 1
+ incremented by 1. */
+ if (POINTER_TYPE_P (type)
+ || TREE_CODE (n1) != INTEGER_CST
+ || fd->loop.cond_code != LT_EXPR)
+ t = fold_build2 (PLUS_EXPR, itype, fold_convert (itype, s0),
+ build_int_cst (itype, 1));
+ else if (tree_int_cst_sgn (n1) == 1)
+ t = fold_convert (itype, t);
+ else
+ {
+ tree c = fold_convert (itype, n1);
+ c = fold_build2 (MINUS_EXPR, itype, build_int_cst (itype, 1), c);
+ t = fold_build2 (PLUS_EXPR, itype, fold_convert (itype, t), c);
+ }
+ t = force_gimple_operand_gsi (&gsi, t, false,
+ NULL_TREE, false, GSI_CONTINUE_LINKING);
+ assign_stmt = gimple_build_assign (cond_var, t);
+ gsi_insert_after (&gsi, assign_stmt, GSI_CONTINUE_LINKING);
+ }
t = fold_convert (itype, e0);
t = fold_build2 (MULT_EXPR, itype, t, step);
@@ -3829,6 +3964,23 @@ expand_omp_for_static_nochunk (struct omp_region *region,
assign_stmt = gimple_build_assign (vback, t);
gsi_insert_before (&gsi, assign_stmt, GSI_SAME_STMT);
+ if (cond_var)
+ {
+ tree itype = TREE_TYPE (cond_var);
+ tree t2;
+ if (POINTER_TYPE_P (type)
+ || TREE_CODE (n1) != INTEGER_CST
+ || fd->loop.cond_code != LT_EXPR)
+ t2 = build_int_cst (itype, 1);
+ else
+ t2 = fold_convert (itype, step);
+ t2 = fold_build2 (PLUS_EXPR, itype, cond_var, t2);
+ t2 = force_gimple_operand_gsi (&gsi, t2, false,
+ NULL_TREE, true, GSI_SAME_STMT);
+ assign_stmt = gimple_build_assign (cond_var, t2);
+ gsi_insert_before (&gsi, assign_stmt, GSI_SAME_STMT);
+ }
+
t = build2 (fd->loop.cond_code, boolean_type_node,
DECL_P (vback) && TREE_ADDRESSABLE (vback)
? t : vback, e);
@@ -3847,7 +3999,7 @@ expand_omp_for_static_nochunk (struct omp_region *region,
if (!gimple_omp_return_nowait_p (gsi_stmt (gsi)))
{
t = gimple_omp_return_lhs (gsi_stmt (gsi));
- if (fd->have_reductemp)
+ if (fd->have_reductemp || fd->lastprivate_conditional)
{
tree fn;
if (t)
@@ -3858,9 +4010,10 @@ expand_omp_for_static_nochunk (struct omp_region *region,
if (t)
{
gimple_call_set_lhs (g, t);
- gsi_insert_after (&gsi, gimple_build_assign (reductions,
- NOP_EXPR, t),
- GSI_SAME_STMT);
+ if (fd->have_reductemp)
+ gsi_insert_after (&gsi, gimple_build_assign (reductions,
+ NOP_EXPR, t),
+ GSI_SAME_STMT);
}
gsi_insert_after (&gsi, g, GSI_SAME_STMT);
}
@@ -3997,12 +4150,13 @@ expand_omp_for_static_chunk (struct omp_region *region,
tree type, itype, vmain, vback, vextra;
basic_block entry_bb, exit_bb, body_bb, seq_start_bb, iter_part_bb;
basic_block trip_update_bb = NULL, cont_bb, collapse_bb = NULL, fin_bb;
- gimple_stmt_iterator gsi;
+ gimple_stmt_iterator gsi, gsip;
edge se;
bool broken_loop = region->cont == NULL;
tree *counts = NULL;
tree n1, n2, step;
tree reductions = NULL_TREE;
+ tree cond_var = NULL_TREE;
itype = type = TREE_TYPE (fd->loop.v);
if (POINTER_TYPE_P (type))
@@ -4031,6 +4185,8 @@ expand_omp_for_static_chunk (struct omp_region *region,
/* Trip and adjustment setup goes in ENTRY_BB. */
gsi = gsi_last_nondebug_bb (entry_bb);
gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
+ gsip = gsi;
+ gsi_prev (&gsip);
if (fd->collapse > 1)
{
@@ -4090,28 +4246,64 @@ expand_omp_for_static_chunk (struct omp_region *region,
gsi = gsi_last_bb (entry_bb);
}
- if (fd->have_reductemp)
+ if (fd->have_reductemp || fd->lastprivate_conditional)
{
tree t1 = build_int_cst (long_integer_type_node, 0);
tree t2 = build_int_cst (long_integer_type_node, 1);
tree t3 = build_int_cstu (long_integer_type_node,
(HOST_WIDE_INT_1U << 31) + 1);
tree clauses = gimple_omp_for_clauses (fd->for_stmt);
- clauses = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
- reductions = OMP_CLAUSE_DECL (clauses);
- gcc_assert (TREE_CODE (reductions) == SSA_NAME);
- gimple *g = SSA_NAME_DEF_STMT (reductions);
- reductions = gimple_assign_rhs1 (g);
- OMP_CLAUSE_DECL (clauses) = reductions;
- gimple_stmt_iterator gsi2 = gsi_for_stmt (g);
+ gimple_stmt_iterator gsi2 = gsi_none ();
+ gimple *g = NULL;
+ tree mem = null_pointer_node, memv = NULL_TREE;
+ tree condtemp = NULL_TREE;
+ if (fd->have_reductemp)
+ {
+ tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
+ reductions = OMP_CLAUSE_DECL (c);
+ gcc_assert (TREE_CODE (reductions) == SSA_NAME);
+ g = SSA_NAME_DEF_STMT (reductions);
+ reductions = gimple_assign_rhs1 (g);
+ OMP_CLAUSE_DECL (c) = reductions;
+ gsi2 = gsi_for_stmt (g);
+ }
+ else
+ {
+ if (gsi_end_p (gsip))
+ gsi2 = gsi_after_labels (region->entry);
+ else
+ gsi2 = gsip;
+ reductions = null_pointer_node;
+ }
+ if (fd->lastprivate_conditional)
+ {
+ tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
+ condtemp = OMP_CLAUSE_DECL (c);
+ c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE__CONDTEMP_);
+ cond_var = OMP_CLAUSE_DECL (c);
+ tree type = TREE_TYPE (condtemp);
+ memv = create_tmp_var (type);
+ TREE_ADDRESSABLE (memv) = 1;
+ unsigned HOST_WIDE_INT sz
+ = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (type)));
+ sz *= fd->lastprivate_conditional;
+ expand_omp_build_assign (&gsi2, memv, build_int_cst (type, sz),
+ false);
+ mem = build_fold_addr_expr (memv);
+ }
tree t
= build_call_expr (builtin_decl_explicit (BUILT_IN_GOMP_LOOP_START),
9, t1, t2, t2, t3, t1, null_pointer_node,
- null_pointer_node, reductions, null_pointer_node);
+ null_pointer_node, reductions, mem);
force_gimple_operand_gsi (&gsi2, t, true, NULL_TREE,
true, GSI_SAME_STMT);
- gsi_remove (&gsi2, true);
- release_ssa_name (gimple_assign_lhs (g));
+ if (fd->lastprivate_conditional)
+ expand_omp_build_assign (&gsi2, condtemp, memv, false);
+ if (fd->have_reductemp)
+ {
+ gsi_remove (&gsi2, true);
+ release_ssa_name (gimple_assign_lhs (g));
+ }
}
switch (gimple_omp_for_kind (fd->for_stmt))
{
@@ -4286,6 +4478,33 @@ expand_omp_for_static_chunk (struct omp_region *region,
NULL_TREE, false, GSI_CONTINUE_LINKING);
assign_stmt = gimple_build_assign (startvar, t);
gsi_insert_after (&gsi, assign_stmt, GSI_CONTINUE_LINKING);
+ if (cond_var)
+ {
+ tree itype = TREE_TYPE (cond_var);
+ /* For lastprivate(conditional:) itervar, we need some iteration
+ counter that starts at unsigned non-zero and increases.
+ Prefer as few IVs as possible, so if we can use startvar
+ itself, use that, or startvar + constant (those would be
+ incremented with step), and as last resort use the s0 + 1
+ incremented by 1. */
+ if (POINTER_TYPE_P (type)
+ || TREE_CODE (n1) != INTEGER_CST
+ || fd->loop.cond_code != LT_EXPR)
+ t = fold_build2 (PLUS_EXPR, itype, fold_convert (itype, s0),
+ build_int_cst (itype, 1));
+ else if (tree_int_cst_sgn (n1) == 1)
+ t = fold_convert (itype, t);
+ else
+ {
+ tree c = fold_convert (itype, n1);
+ c = fold_build2 (MINUS_EXPR, itype, build_int_cst (itype, 1), c);
+ t = fold_build2 (PLUS_EXPR, itype, fold_convert (itype, t), c);
+ }
+ t = force_gimple_operand_gsi (&gsi, t, false,
+ NULL_TREE, false, GSI_CONTINUE_LINKING);
+ assign_stmt = gimple_build_assign (cond_var, t);
+ gsi_insert_after (&gsi, assign_stmt, GSI_CONTINUE_LINKING);
+ }
t = fold_convert (itype, e0);
t = fold_build2 (MULT_EXPR, itype, t, step);
@@ -4416,7 +4635,7 @@ expand_omp_for_static_chunk (struct omp_region *region,
if (!gimple_omp_return_nowait_p (gsi_stmt (gsi)))
{
t = gimple_omp_return_lhs (gsi_stmt (gsi));
- if (fd->have_reductemp)
+ if (fd->have_reductemp || fd->lastprivate_conditional)
{
tree fn;
if (t)
@@ -4427,9 +4646,10 @@ expand_omp_for_static_chunk (struct omp_region *region,
if (t)
{
gimple_call_set_lhs (g, t);
- gsi_insert_after (&gsi, gimple_build_assign (reductions,
- NOP_EXPR, t),
- GSI_SAME_STMT);
+ if (fd->have_reductemp)
+ gsi_insert_after (&gsi, gimple_build_assign (reductions,
+ NOP_EXPR, t),
+ GSI_SAME_STMT);
}
gsi_insert_after (&gsi, g, GSI_SAME_STMT);
}
@@ -6043,7 +6263,7 @@ expand_omp_for (struct omp_region *region, gimple *inner_stmt)
else
start_ix = ((int)BUILT_IN_GOMP_LOOP_STATIC_START) + fn_index;
next_ix = ((int)BUILT_IN_GOMP_LOOP_STATIC_NEXT) + fn_index;
- if (fd.have_reductemp)
+ if (fd.have_reductemp || fd.lastprivate_conditional)
{
if (fd.ordered)
start_ix = (int)BUILT_IN_GOMP_LOOP_DOACROSS_START;