aboutsummaryrefslogtreecommitdiff
path: root/gcc/omp-low.c
diff options
context:
space:
mode:
authorJakub Jelinek <jakub@redhat.com>2021-08-17 09:30:09 +0200
committerJakub Jelinek <jakub@redhat.com>2021-08-17 09:30:09 +0200
commite45483c7c4badc4bf2d6ced22360ce1ab172967f (patch)
treefabc4aa4e0eba482e7502c955508805984c4eb59 /gcc/omp-low.c
parentd56599979211266b2f7b7535311205dd758353ac (diff)
downloadgcc-e45483c7c4badc4bf2d6ced22360ce1ab172967f.zip
gcc-e45483c7c4badc4bf2d6ced22360ce1ab172967f.tar.gz
gcc-e45483c7c4badc4bf2d6ced22360ce1ab172967f.tar.bz2
openmp: Implement OpenMP 5.1 scope construct
This patch implements the OpenMP 5.1 scope construct, which is similar to worksharing constructs in many regards, but isn't one of them. The body of the construct is encountered by all threads though, it can be nested in itself or intermixed with taskgroup and worksharing etc. constructs can appear inside of it (but it can't be nested in worksharing etc. constructs). The main purpose of the construct is to allow reductions (normal and task ones) without the need to close the parallel and reopen another one. If it doesn't have task reductions, it can be implemented without any new library support, with nowait it just does the privatizations at the start if any and reductions before the end of the body, with without nowait emits a normal GOMP_barrier{,_cancel} at the end too. For task reductions, we need to ensure only one thread initializes the task reduction library data structures and other threads copy from that, so a new GOMP_scope_start routine is added to the library for that. It acts as if the start of the scope construct is a nowait worksharing construct (that is ok, it can't be nested in other worksharing constructs and all threads need to encounter the start in the same order) which does the task reduction initialization, but as the body can have other scope constructs and/or worksharing constructs, that is all where we use this dummy worksharing construct. With task reductions, the construct must not have nowait and ends with a GOMP_barrier{,_cancel}, followed by task reductions followed by GOMP_workshare_task_reduction_unregister. Only C/C++ FE support is done. 2021-08-17 Jakub Jelinek <jakub@redhat.com> gcc/ * tree.def (OMP_SCOPE): New tree code. * tree.h (OMP_SCOPE_BODY, OMP_SCOPE_CLAUSES): Define. * tree-nested.c (convert_nonlocal_reference_stmt, convert_local_reference_stmt, convert_gimple_call): Handle GIMPLE_OMP_SCOPE. * tree-pretty-print.c (dump_generic_node): Handle OMP_SCOPE. * gimple.def (GIMPLE_OMP_SCOPE): New gimple code. * gimple.c (gimple_build_omp_scope): New function. (gimple_copy): Handle GIMPLE_OMP_SCOPE. * gimple.h (gimple_build_omp_scope): Declare. (gimple_has_substatements): Handle GIMPLE_OMP_SCOPE. (gimple_omp_scope_clauses, gimple_omp_scope_clauses_ptr, gimple_omp_scope_set_clauses): New inline functions. (CASE_GIMPLE_OMP): Add GIMPLE_OMP_SCOPE. * gimple-pretty-print.c (dump_gimple_omp_scope): New function. (pp_gimple_stmt_1): Handle GIMPLE_OMP_SCOPE. * gimple-walk.c (walk_gimple_stmt): Likewise. * gimple-low.c (lower_stmt): Likewise. * gimplify.c (is_gimple_stmt): Handle OMP_MASTER. (gimplify_scan_omp_clauses): For task reductions, handle OMP_SCOPE like ORT_WORKSHARE constructs. Adjust diagnostics for %<scope%> allowing task reductions. Reject inscan reductions on scope. (omp_find_stores_stmt): Handle GIMPLE_OMP_SCOPE. (gimplify_omp_workshare, gimplify_expr): Handle OMP_SCOPE. * tree-inline.c (remap_gimple_stmt): Handle GIMPLE_OMP_SCOPE. (estimate_num_insns): Likewise. * omp-low.c (build_outer_var_ref): Look through GIMPLE_OMP_SCOPE contexts if var isn't privatized there. (check_omp_nesting_restrictions): Handle GIMPLE_OMP_SCOPE. (scan_omp_1_stmt): Likewise. (maybe_add_implicit_barrier_cancel): Look through outer scope constructs. (lower_omp_scope): New function. (lower_omp_task_reductions): Handle OMP_SCOPE. (lower_omp_1): Handle GIMPLE_OMP_SCOPE. (diagnose_sb_1, diagnose_sb_2): Likewise. * omp-expand.c (expand_omp_single): Support also GIMPLE_OMP_SCOPE. (expand_omp): Handle GIMPLE_OMP_SCOPE. (omp_make_gimple_edges): Likewise. * omp-builtins.def (BUILT_IN_GOMP_SCOPE_START): New built-in. gcc/c-family/ * c-pragma.h (enum pragma_kind): Add PRAGMA_OMP_SCOPE. * c-pragma.c (omp_pragmas): Add scope construct. * c-omp.c (omp_directives): Uncomment scope directive entry. gcc/c/ * c-parser.c (OMP_SCOPE_CLAUSE_MASK): Define. (c_parser_omp_scope): New function. (c_parser_omp_construct): Handle PRAGMA_OMP_SCOPE. gcc/cp/ * parser.c (OMP_SCOPE_CLAUSE_MASK): Define. (cp_parser_omp_scope): New function. (cp_parser_omp_construct, cp_parser_pragma): Handle PRAGMA_OMP_SCOPE. * pt.c (tsubst_expr): Handle OMP_SCOPE. gcc/testsuite/ * c-c++-common/gomp/nesting-2.c (foo): Add scope and masked construct tests. * c-c++-common/gomp/scan-1.c (f3): Add scope construct test.. * c-c++-common/gomp/cancel-1.c (f2): Add scope and masked construct tests. * c-c++-common/gomp/reduction-task-2.c (bar): Add scope construct test. Adjust diagnostics for the addition of scope. * c-c++-common/gomp/loop-1.c (f5): Add master, masked and scope construct tests. * c-c++-common/gomp/clause-dups-1.c (f1): Add scope construct test. * gcc.dg/gomp/nesting-1.c (f1, f2, f3): Add scope construct tests. * c-c++-common/gomp/scope-1.c: New test. * c-c++-common/gomp/scope-2.c: New test. * g++.dg/gomp/attrs-1.C (bar): Add scope construct tests. * g++.dg/gomp/attrs-2.C (bar): Likewise. * gfortran.dg/gomp/reduction4.f90: Adjust expected diagnostics. * gfortran.dg/gomp/reduction7.f90: Likewise. libgomp/ * Makefile.am (libgomp_la_SOURCES): Add scope.c * Makefile.in: Regenerated. * libgomp_g.h (GOMP_scope_start): Declare. * libgomp.map: Add GOMP_scope_start@@GOMP_5.1. * scope.c: New file. * testsuite/libgomp.c-c++-common/scope-1.c: New test. * testsuite/libgomp.c-c++-common/task-reduction-16.c: New test.
Diffstat (limited to 'gcc/omp-low.c')
-rw-r--r--gcc/omp-low.c181
1 files changed, 168 insertions, 13 deletions
diff --git a/gcc/omp-low.c b/gcc/omp-low.c
index bef9940..a0b41af 100644
--- a/gcc/omp-low.c
+++ b/gcc/omp-low.c
@@ -663,8 +663,15 @@ build_outer_var_ref (tree var, omp_context *ctx,
{
tree x;
omp_context *outer = ctx->outer;
- while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
- outer = outer->outer;
+ for (; outer; outer = outer->outer)
+ {
+ if (gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
+ continue;
+ if (gimple_code (outer->stmt) == GIMPLE_OMP_SCOPE
+ && !maybe_lookup_decl (var, outer))
+ continue;
+ break;
+ }
if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
x = var;
@@ -3493,6 +3500,40 @@ check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
break;
}
break;
+ case GIMPLE_OMP_SCOPE:
+ for (; ctx != NULL; ctx = ctx->outer)
+ switch (gimple_code (ctx->stmt))
+ {
+ case GIMPLE_OMP_FOR:
+ if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
+ && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
+ break;
+ /* FALLTHRU */
+ case GIMPLE_OMP_SECTIONS:
+ case GIMPLE_OMP_SINGLE:
+ case GIMPLE_OMP_TASK:
+ case GIMPLE_OMP_CRITICAL:
+ case GIMPLE_OMP_ORDERED:
+ case GIMPLE_OMP_MASTER:
+ case GIMPLE_OMP_MASKED:
+ error_at (gimple_location (stmt),
+ "%<scope%> region may not be closely nested inside "
+ "of work-sharing, %<loop%>, explicit %<task%>, "
+ "%<taskloop%>, %<critical%>, %<ordered%>, %<master%>, "
+ "or %<masked%> region");
+ return false;
+ case GIMPLE_OMP_PARALLEL:
+ case GIMPLE_OMP_TEAMS:
+ return true;
+ case GIMPLE_OMP_TARGET:
+ if (gimple_omp_target_kind (ctx->stmt)
+ == GF_OMP_TARGET_KIND_REGION)
+ return true;
+ break;
+ default:
+ break;
+ }
+ break;
case GIMPLE_OMP_TASK:
for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
@@ -4071,6 +4112,12 @@ scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
scan_omp_for (as_a <gomp_for *> (stmt), ctx);
break;
+ case GIMPLE_OMP_SCOPE:
+ ctx = new_omp_context (stmt, ctx);
+ scan_sharing_clauses (gimple_omp_scope_clauses (stmt), ctx);
+ scan_omp (gimple_omp_body_ptr (stmt), ctx);
+ break;
+
case GIMPLE_OMP_SECTIONS:
scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
break;
@@ -8359,7 +8406,8 @@ maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
gimple_seq_add_stmt (body, g);
gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
}
- else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
+ else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
+ && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
return;
}
@@ -8698,6 +8746,97 @@ lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
}
+/* Lower code for an OMP scope directive. */
+
+static void
+lower_omp_scope (gimple_stmt_iterator *gsi_p, omp_context *ctx)
+{
+ tree block;
+ gimple *scope_stmt = gsi_stmt (*gsi_p);
+ gbind *bind;
+ gimple_seq bind_body, bind_body_tail = NULL, dlist;
+ gimple_seq tred_dlist = NULL;
+
+ push_gimplify_context ();
+
+ block = make_node (BLOCK);
+ bind = gimple_build_bind (NULL, NULL, block);
+ gsi_replace (gsi_p, bind, true);
+ bind_body = NULL;
+ dlist = NULL;
+
+ tree rclauses
+ = omp_task_reductions_find_first (gimple_omp_scope_clauses (scope_stmt),
+ OMP_SCOPE, OMP_CLAUSE_REDUCTION);
+ if (rclauses)
+ {
+ tree type = build_pointer_type (pointer_sized_int_node);
+ tree temp = create_tmp_var (type);
+ tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
+ OMP_CLAUSE_DECL (c) = temp;
+ OMP_CLAUSE_CHAIN (c) = gimple_omp_scope_clauses (scope_stmt);
+ gimple_omp_scope_set_clauses (scope_stmt, c);
+ lower_omp_task_reductions (ctx, OMP_SCOPE,
+ gimple_omp_scope_clauses (scope_stmt),
+ &bind_body, &tred_dlist);
+ rclauses = c;
+ tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_SCOPE_START);
+ gimple *stmt = gimple_build_call (fndecl, 1, temp);
+ gimple_seq_add_stmt (&bind_body, stmt);
+ }
+
+ lower_rec_input_clauses (gimple_omp_scope_clauses (scope_stmt),
+ &bind_body, &dlist, ctx, NULL);
+ lower_omp (gimple_omp_body_ptr (scope_stmt), ctx);
+
+ gimple_seq_add_stmt (&bind_body, scope_stmt);
+
+ gimple_seq_add_seq (&bind_body, gimple_omp_body (scope_stmt));
+
+ gimple_omp_set_body (scope_stmt, NULL);
+
+ gimple_seq clist = NULL;
+ lower_reduction_clauses (gimple_omp_scope_clauses (scope_stmt),
+ &bind_body, &clist, ctx);
+ if (clist)
+ {
+ tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
+ gcall *g = gimple_build_call (fndecl, 0);
+ gimple_seq_add_stmt (&bind_body, g);
+ gimple_seq_add_seq (&bind_body, clist);
+ fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
+ g = gimple_build_call (fndecl, 0);
+ gimple_seq_add_stmt (&bind_body, g);
+ }
+
+ gimple_seq_add_seq (&bind_body, dlist);
+
+ bind_body = maybe_catch_exception (bind_body);
+
+ bool nowait = omp_find_clause (gimple_omp_scope_clauses (scope_stmt),
+ OMP_CLAUSE_NOWAIT) != NULL_TREE;
+ gimple *g = gimple_build_omp_return (nowait);
+ gimple_seq_add_stmt (&bind_body_tail, g);
+ gimple_seq_add_seq (&bind_body_tail, tred_dlist);
+ maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
+ if (ctx->record_type)
+ {
+ gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
+ tree clobber = build_clobber (ctx->record_type);
+ gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
+ clobber), GSI_SAME_STMT);
+ }
+ gimple_seq_add_seq (&bind_body, bind_body_tail);
+
+ gimple_bind_set_body (bind, bind_body);
+
+ pop_gimplify_context (bind);
+
+ gimple_bind_append_vars (bind, ctx->block_vars);
+ BLOCK_VARS (block) = ctx->block_vars;
+ if (BLOCK_VARS (block))
+ TREE_USED (block) = 1;
+}
/* Expand code for an OpenMP master or masked directive. */
static void
@@ -8803,7 +8942,7 @@ lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
clauses = omp_task_reductions_find_first (clauses, code, ccode);
if (clauses == NULL_TREE)
return;
- if (code == OMP_FOR || code == OMP_SECTIONS)
+ if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
{
for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
@@ -8812,7 +8951,8 @@ lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
cancellable = error_mark_node;
break;
}
- else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
+ else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
+ && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
break;
}
tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
@@ -8928,11 +9068,11 @@ lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
tree lab3 = NULL_TREE, lab7 = NULL_TREE;
gimple *g;
- if (code == OMP_FOR || code == OMP_SECTIONS)
+ if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
{
- /* For worksharing constructs, only perform it in the master thread,
- with the exception of cancelled implicit barriers - then only handle
- the current thread. */
+ /* For worksharing constructs or scope, only perform it in the master
+ thread, with the exception of cancelled implicit barriers - then only
+ handle the current thread. */
tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
tree thr_num = create_tmp_var (integer_type_node);
@@ -8947,8 +9087,10 @@ lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
lab3 = create_artificial_label (UNKNOWN_LOCATION);
if (code == OMP_FOR)
c = gimple_omp_for_clauses (ctx->stmt);
- else /* if (code == OMP_SECTIONS) */
+ else if (code == OMP_SECTIONS)
c = gimple_omp_sections_clauses (ctx->stmt);
+ else /* if (code == OMP_SCOPE) */
+ c = gimple_omp_scope_clauses (ctx->stmt);
c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
cancellable = c;
g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
@@ -9083,8 +9225,11 @@ lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
tree bfield = DECL_CHAIN (field);
tree cond;
- if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
- /* In parallel or worksharing all threads unconditionally
+ if (code == OMP_PARALLEL
+ || code == OMP_FOR
+ || code == OMP_SECTIONS
+ || code == OMP_SCOPE)
+ /* In parallel, worksharing or scope all threads unconditionally
initialize all their task reduction private variables. */
cond = boolean_true_node;
else if (TREE_TYPE (ptr) == ptr_type_node)
@@ -9325,6 +9470,8 @@ lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
c = gimple_omp_for_clauses (ctx->stmt);
else if (code == OMP_SECTIONS)
c = gimple_omp_sections_clauses (ctx->stmt);
+ else if (code == OMP_SCOPE)
+ c = gimple_omp_scope_clauses (ctx->stmt);
else
c = gimple_omp_taskreg_clauses (ctx->stmt);
c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
@@ -9339,7 +9486,7 @@ lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
gimple_seq_add_stmt (end, g);
gimple_seq_add_stmt (end, gimple_build_label (lab2));
- if (code == OMP_FOR || code == OMP_SECTIONS)
+ if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
{
enum built_in_function bfn
= BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
@@ -13897,6 +14044,11 @@ lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
lower_omp_sections (gsi_p, ctx);
break;
+ case GIMPLE_OMP_SCOPE:
+ ctx = maybe_lookup_ctx (stmt);
+ gcc_assert (ctx);
+ lower_omp_scope (gsi_p, ctx);
+ break;
case GIMPLE_OMP_SINGLE:
ctx = maybe_lookup_ctx (stmt);
gcc_assert (ctx);
@@ -14008,6 +14160,7 @@ lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
|| gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
|| gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
+ || gimple_code (up->stmt) == GIMPLE_OMP_SCOPE
|| gimple_code (up->stmt) == GIMPLE_OMP_SECTION
|| gimple_code (up->stmt) == GIMPLE_OMP_SCAN
|| (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
@@ -14277,6 +14430,7 @@ diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
case GIMPLE_OMP_PARALLEL:
case GIMPLE_OMP_TASK:
+ case GIMPLE_OMP_SCOPE:
case GIMPLE_OMP_SECTIONS:
case GIMPLE_OMP_SINGLE:
case GIMPLE_OMP_SECTION:
@@ -14339,6 +14493,7 @@ diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
case GIMPLE_OMP_PARALLEL:
case GIMPLE_OMP_TASK:
+ case GIMPLE_OMP_SCOPE:
case GIMPLE_OMP_SECTIONS:
case GIMPLE_OMP_SINGLE:
case GIMPLE_OMP_SECTION: