aboutsummaryrefslogtreecommitdiff
path: root/gcc/gimplify.cc
diff options
context:
space:
mode:
Diffstat (limited to 'gcc/gimplify.cc')
-rw-r--r--gcc/gimplify.cc2784
1 files changed, 2358 insertions, 426 deletions
diff --git a/gcc/gimplify.cc b/gcc/gimplify.cc
index 4f385b1..ad7c3ff 100644
--- a/gcc/gimplify.cc
+++ b/gcc/gimplify.cc
@@ -71,6 +71,8 @@ along with GCC; see the file COPYING3. If not see
#include "context.h"
#include "tree-nested.h"
#include "gcc-urlifier.h"
+#include "dwarf2out.h"
+#include "tree-ssa-loop-niter.h" /* For simplify_replace_tree. */
/* Identifier for a basic condition, mapping it to other basic conditions of
its Boolean expression. Basic conditions given the same uid (in the same
@@ -180,6 +182,9 @@ enum gimplify_omp_var_data
/* Flag for GOVD_FIRSTPRIVATE: OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT. */
GOVD_FIRSTPRIVATE_IMPLICIT = 0x4000000,
+ /* Flag for OpenACC deviceptrs. */
+ GOVD_DEVICEPTR = (1<<24),
+
GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
| GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
| GOVD_LOCAL)
@@ -269,10 +274,53 @@ enum gimplify_defaultmap_kind
GDMK_POINTER
};
+/* Used for topological sorting of mapping groups. UNVISITED means we haven't
+ started processing the group yet. The TEMPORARY mark is used when we first
+ encounter a group on a depth-first traversal, and the PERMANENT mark is used
+ when we have processed all the group's children (i.e. all the base pointers
+ referred to by the group's mapping nodes, recursively). */
+
+enum omp_tsort_mark {
+ UNVISITED,
+ TEMPORARY,
+ PERMANENT
+};
+
+/* A group of OMP_CLAUSE_MAP nodes that correspond to a single "map"
+ clause. */
+
+struct omp_mapping_group {
+ tree *grp_start;
+ tree grp_end;
+ omp_tsort_mark mark;
+ /* If we've removed the group but need to reindex, mark the group as
+ deleted. */
+ bool deleted;
+ /* The group points to an already-created "GOMP_MAP_STRUCT
+ GOMP_MAP_ATTACH_DETACH" pair. */
+ bool reprocess_struct;
+ /* The group should use "zero-length" allocations for pointers that are not
+ mapped "to" on the same directive. */
+ bool fragile;
+ struct omp_mapping_group *sibling;
+ struct omp_mapping_group *next;
+
+ omp_mapping_group (tree *_start, tree _end)
+ : grp_start (_start), grp_end (_end), mark (UNVISITED), deleted (false),
+ reprocess_struct (false), fragile (false), sibling (NULL), next (NULL)
+ {
+ }
+
+ omp_mapping_group ()
+ {
+ }
+};
+
struct gimplify_omp_ctx
{
struct gimplify_omp_ctx *outer_context;
splay_tree variables;
+ hash_map<omp_name_type<tree>, tree> *implicit_mappers;
hash_set<tree> *privatized_types;
tree clauses;
/* Iteration variables in an OMP_FOR. */
@@ -289,7 +337,14 @@ struct gimplify_omp_ctx
bool has_depend;
bool in_for_exprs;
bool in_call_args;
+ bool ompacc;
int defaultmap[5];
+ hash_map<tree, omp_mapping_group *> *decl_data_clause;
+};
+
+struct privatize_reduction
+{
+ tree ref_var, local_var;
};
static struct gimplify_ctx *gimplify_ctxp;
@@ -507,6 +562,7 @@ new_omp_context (enum omp_region_type region_type)
c = XCNEW (struct gimplify_omp_ctx);
c->outer_context = gimplify_omp_ctxp;
c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
+ c->implicit_mappers = new hash_map<omp_name_type<tree>, tree>;
c->privatized_types = new hash_set<tree>;
c->location = input_location;
c->region_type = region_type;
@@ -519,6 +575,7 @@ new_omp_context (enum omp_region_type region_type)
c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
c->defaultmap[GDMK_POINTER] = GOVD_MAP;
+ c->decl_data_clause = NULL;
return c;
}
@@ -530,7 +587,9 @@ delete_omp_context (struct gimplify_omp_ctx *c)
{
splay_tree_delete (c->variables);
delete c->privatized_types;
+ delete c->implicit_mappers;
c->loop_iter_var.release ();
+ delete c->decl_data_clause;
XDELETE (c);
}
@@ -1431,28 +1490,62 @@ gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
&& DECL_CONTEXT (t) == current_function_decl
&& TREE_USED (t)
&& (attr = lookup_attribute ("omp allocate", DECL_ATTRIBUTES (t)))
- != NULL_TREE)
+ != NULL_TREE
+ && TREE_PURPOSE (TREE_VALUE (attr)) != error_mark_node)
{
gcc_assert (!DECL_HAS_VALUE_EXPR_P (t));
tree alloc = TREE_PURPOSE (TREE_VALUE (attr));
tree align = TREE_VALUE (TREE_VALUE (attr));
+ /* The C++ front end smuggles a location through the chain field,
+ clear it to avoid conflicts with Fortran specific code. */
+ if (TREE_CHAIN (TREE_VALUE (attr)) != NULL_TREE
+ && TREE_CODE (TREE_CHAIN (TREE_VALUE (attr))) == NOP_EXPR)
+ TREE_CHAIN (TREE_VALUE (attr)) = NULL_TREE;
/* Allocate directives that appear in a target region must specify
an allocator clause unless a requires directive with the
dynamic_allocators clause is present in the same compilation
unit. */
bool missing_dyn_alloc = false;
- if (alloc == NULL_TREE
- && ((omp_requires_mask & OMP_REQUIRES_DYNAMIC_ALLOCATORS)
- == 0))
+ if ((omp_requires_mask & OMP_REQUIRES_DYNAMIC_ALLOCATORS) == 0)
{
/* This comes too early for omp_discover_declare_target...,
but should at least catch the most common cases. */
missing_dyn_alloc
- = cgraph_node::get (current_function_decl)->offloadable;
+ = (alloc == NULL_TREE
+ && cgraph_node::get (current_function_decl)->offloadable);
for (struct gimplify_omp_ctx *ctx2 = ctx;
ctx2 && !missing_dyn_alloc; ctx2 = ctx2->outer_context)
if (ctx2->code == OMP_TARGET)
- missing_dyn_alloc = true;
+ {
+ if (alloc == NULL_TREE)
+ missing_dyn_alloc = true;
+ else if (TREE_CODE (alloc) != INTEGER_CST)
+ {
+ tree alloc2 = alloc;
+ if (TREE_CODE (alloc2) == MEM_REF
+ || TREE_CODE (alloc2) == INDIRECT_REF)
+ alloc2 = TREE_OPERAND (alloc2, 0);
+ tree c2;
+ for (c2 = ctx2->clauses; c2;
+ c2 = OMP_CLAUSE_CHAIN (c2))
+ if (OMP_CLAUSE_CODE (c2)
+ == OMP_CLAUSE_USES_ALLOCATORS)
+ {
+ tree t2
+ = OMP_CLAUSE_USES_ALLOCATORS_ALLOCATOR (c2);
+ if (operand_equal_p (alloc2, t2))
+ break;
+ }
+ if (c2 == NULL_TREE)
+ error_at (EXPR_LOC_OR_LOC (
+ alloc, DECL_SOURCE_LOCATION (t)),
+ "%qE in %<allocator%> clause inside a "
+ "target region must be specified in an "
+ "%<uses_allocators%> clause on the "
+ "%<target%> directive", alloc2);
+ }
+ break;
+ }
}
if (missing_dyn_alloc)
error_at (DECL_SOURCE_LOCATION (t),
@@ -3888,7 +3981,7 @@ modify_call_for_omp_dispatch (tree expr, tree dispatch_clauses,
bool want_value, bool pointerize)
{
location_t loc = EXPR_LOCATION (expr);
- tree fndecl = get_callee_fndecl (expr);
+ const tree fndecl = get_callee_fndecl (expr);
/* Skip processing if we don't get the expected call form. */
if (!fndecl)
@@ -3897,23 +3990,180 @@ modify_call_for_omp_dispatch (tree expr, tree dispatch_clauses,
tree init_code = NULL_TREE;
tree cleanup = NULL_TREE;
tree clobbers = NULL_TREE;
- int nargs = call_expr_nargs (expr);
+ const int nargs = call_expr_nargs (expr);
tree dispatch_device_num = NULL_TREE;
tree dispatch_interop = NULL_TREE;
tree dispatch_append_args = NULL_TREE;
+ /* Equal to the number of parameters. */
int nfirst_args = 0;
- tree dispatch_adjust_args_list
- = lookup_attribute ("omp declare variant variant args",
- DECL_ATTRIBUTES (fndecl));
- if (dispatch_adjust_args_list)
+ const const_tree nothing_id = get_identifier ("nothing");
+ const const_tree need_ptr_id = get_identifier ("need_device_ptr");
+ const const_tree need_addr_id = get_identifier ("need_device_addr");
+
+ vec<tree> dispatch_adjust_args_specifiers = vNULL;
+
+ if (tree declare_variant_variant_args_attr
+ = lookup_attribute ("omp declare variant variant args",
+ DECL_ATTRIBUTES (fndecl)))
{
+ /* Due to how the nodes are layed out, unpacking them is pretty
+ incomprehensible. */
+ gcc_assert (TREE_VALUE (declare_variant_variant_args_attr));
+ dispatch_append_args
+ = TREE_CHAIN (TREE_VALUE (declare_variant_variant_args_attr));
+ tree dispatch_adjust_args_list
+ = TREE_VALUE (declare_variant_variant_args_attr);
+ gcc_assert (dispatch_adjust_args_list);
dispatch_adjust_args_list = TREE_VALUE (dispatch_adjust_args_list);
- dispatch_append_args = TREE_CHAIN (dispatch_adjust_args_list);
- if (TREE_PURPOSE (dispatch_adjust_args_list) == NULL_TREE
- && TREE_VALUE (dispatch_adjust_args_list) == NULL_TREE)
- dispatch_adjust_args_list = NULL_TREE;
+
+ if (dispatch_adjust_args_list)
+ {
+ dispatch_adjust_args_specifiers.create (nargs);
+ for (int arg_idx = 0; arg_idx < nargs; ++arg_idx)
+ dispatch_adjust_args_specifiers.quick_push (NULL_TREE);
+
+ for (tree n = dispatch_adjust_args_list; n; n = TREE_CHAIN (n))
+ {
+ gcc_assert (TREE_VALUE (n)
+ && (TREE_PURPOSE (n) == nothing_id
+ || TREE_PURPOSE (n) == need_ptr_id
+ || TREE_PURPOSE (n) == need_addr_id));
+ tree item = TREE_VALUE (n);
+ /* Diagnostics make more sense if we defer these. */
+ if (TREE_CODE (TREE_VALUE (item)) == TREE_LIST)
+ continue;
+ gcc_assert (TREE_CODE (TREE_VALUE (item)) == INTEGER_CST);
+ const int idx = tree_to_shwi (TREE_VALUE (item));
+ if (idx >= nargs)
+ {
+ /* Adjust to a 1 based index for output. */
+ const int adjusted = idx + 1;
+ error_at (EXPR_LOCATION (TREE_PURPOSE (item)),
+ "parameter index %d is out of range with %d "
+ "arguments",
+ adjusted, nargs);
+ continue;
+ }
+ tree& spec_at_idx = dispatch_adjust_args_specifiers[idx];
+ gcc_assert (spec_at_idx == NULL_TREE);
+ spec_at_idx = n;
+ }
+ /* There might be a better place to put this. */
+ const bool variadic_func_p = [&] ()
+ {
+ tree parm_type = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
+ while (parm_type && parm_type != void_list_node)
+ parm_type = TREE_CHAIN (parm_type);
+ return parm_type != void_list_node;
+ } (); /* IILE. */
+ auto expand_range = [&] (tree modifier_id, tree loc, tree range)
+ {
+ /* We only encounter numeric ranges here if fn is variadic. */
+ gcc_assert (variadic_func_p);
+ const location_t range_loc = EXPR_LOCATION (loc);
+ const tree lb_node = TREE_PURPOSE (range);
+ const tree ub_node = TREE_VALUE (range);
+ const bool relative_lb = TREE_PURPOSE (lb_node) != NULL_TREE;
+ const bool relative_ub = TREE_PURPOSE (ub_node) != NULL_TREE;
+ const ptrdiff_t lb_raw = tree_to_shwi (TREE_VALUE (lb_node));
+ const ptrdiff_t ub_raw = tree_to_shwi (TREE_VALUE (ub_node));
+ /* relative_lb implies lb_raw <= -1,
+ relative_ub implies ub_raw <= 0. */
+ gcc_assert ((relative_lb || relative_ub)
+ && (!relative_lb || lb_raw <= -1)
+ && (!relative_ub || ub_raw <= 0));
+ /* (relative_lb && relative_ub) implies lb_raw < ub_raw. */
+ gcc_assert (!(relative_lb && relative_ub) || lb_raw < ub_raw);
+ const ptrdiff_t lb = relative_lb ? lb_raw + nargs : lb_raw;
+ const ptrdiff_t ub = relative_ub ? ub_raw + nargs : ub_raw;
+ /* This will never happen, still gotta diagnose it. */
+ if (lb > INT_MAX || ub > INT_MAX)
+ {
+ if (lb > INT_MAX)
+ error_at (range_loc, "lb overflow");
+ else if (ub > INT_MAX)
+ error_at (range_loc, "ub overflow");
+ return;
+ }
+ /* Internally, ub is stored as one-past-the-end. */
+ if (lb < 0 || ub < 1)
+ {
+ if (lb < 0)
+ /* FIXME: Use location of lb specifically. */
+ error_at (range_loc,
+ "lower bound with logical offset is negative "
+ "with %d arguments",
+ nargs);
+ if (ub < 1)
+ /* FIXME: Use location of ub specifically. */
+ error_at (range_loc,
+ "upper bound with logical offset is negative "
+ "with %d arguments",
+ nargs);
+ return;
+ }
+ /* It's okay for lb and ub to be equal, we allow empty ranges
+ at this point. Don't bother diagnosing this if either bound
+ is out of range. */
+ if (lb > ub)
+ {
+ if (relative_lb)
+ error_at (range_loc,
+ "lower bound with logical offset is greater "
+ "than upper bound with %d arguments",
+ nargs);
+ else
+ error_at (range_loc,
+ "upper bound with logical offset is less than "
+ "lower bound with %d arguments",
+ nargs);
+ return;
+ }
+
+ for (int idx = lb; idx < ub; ++idx)
+ {
+ tree& spec_at_idx = dispatch_adjust_args_specifiers[idx];
+ if (spec_at_idx != NULL_TREE)
+ {
+ tree item = TREE_VALUE (spec_at_idx);
+ location_t dupe_loc
+ = EXPR_LOCATION (TREE_PURPOSE (item));
+ /* FIXME: Use nfirst_args to determine whether an index
+ refers to a variadic argument to enhance the
+ diagnostic. */
+ error_at (range_loc,
+ "expansion of numeric range with %d "
+ "arguments specifies an already specified "
+ "parameter",
+ nargs);
+ inform (dupe_loc, "parameter previously specified here");
+ /* Give up after the first collision to avoid spamming
+ errors. Alternatively, we could also remember which
+ ones we diagnosed, but it doesn't seem worth it. */
+ return;
+ }
+ else
+ {
+ /* We don't need to create an index node anymore,
+ it is represented by the position in vec. */
+ tree new_item = build_tree_list (loc, NULL_TREE);
+ spec_at_idx = build_tree_list (modifier_id, new_item);
+ }
+ }
+ };
+ for (tree n = dispatch_adjust_args_list; n; n = TREE_CHAIN (n))
+ {
+ tree item = TREE_VALUE (n);
+ if (TREE_CODE (TREE_VALUE (item)) != TREE_LIST)
+ continue;
+ expand_range (TREE_PURPOSE (n),
+ TREE_PURPOSE (item),
+ TREE_VALUE (item));
+ }
+ }
}
+
if (dispatch_append_args)
{
nfirst_args = tree_to_shwi (TREE_PURPOSE (dispatch_append_args));
@@ -3923,9 +4173,8 @@ modify_call_for_omp_dispatch (tree expr, tree dispatch_clauses,
if (dispatch_device_num)
dispatch_device_num = OMP_CLAUSE_DEVICE_ID (dispatch_device_num);
dispatch_interop = omp_find_clause (dispatch_clauses, OMP_CLAUSE_INTEROP);
- int nappend = 0, ninterop = 0;
- for (tree t = dispatch_append_args; t; t = TREE_CHAIN (t))
- nappend++;
+ const int nappend = list_length (dispatch_append_args);
+ int ninterop = 0;
/* FIXME: error checking should be taken out of this function and
handled before any attempt at filtering or resolution happens.
@@ -4153,10 +4402,14 @@ modify_call_for_omp_dispatch (tree expr, tree dispatch_clauses,
i += nappend;
for (j = nfirst_args; j < nargs; j++)
buffer[i++] = CALL_EXPR_ARG (expr, j);
- nargs += nappend;
+ /* Leave nargs alone so we don't need to account for changes of varargs
+ indices when adjusting the arguments below.
+ We also don't want any surprises if we move the above append_args
+ handling down, as it depends on nargs. */
+ const int new_nargs = nargs + nappend;
tree call = expr;
expr = build_call_array_loc (EXPR_LOCATION (expr), TREE_TYPE (call),
- CALL_EXPR_FN (call), nargs, buffer);
+ CALL_EXPR_FN (call), new_nargs, buffer);
/* Copy all CALL_EXPR flags. */
CALL_EXPR_STATIC_CHAIN (expr) = CALL_EXPR_STATIC_CHAIN (call);
@@ -4168,139 +4421,220 @@ modify_call_for_omp_dispatch (tree expr, tree dispatch_clauses,
CALL_EXPR_VA_ARG_PACK (expr) = CALL_EXPR_VA_ARG_PACK (call);
}
- /* Nothing to do for adjust_args? */
- if (!dispatch_adjust_args_list || !TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
- goto add_cleanup;
-
- /* Handle adjust_args. */
- for (int i = 0; i < nargs; i++)
+ auto adjust_the_arg = [&] (tree arg, tree aa_spec)
{
- tree *arg_p = &CALL_EXPR_ARG (expr, i);
+ if (integer_zerop (arg) || !aa_spec)
+ return arg;
+ const bool need_device_ptr = TREE_PURPOSE (aa_spec) == need_ptr_id;
+ const bool need_device_addr = TREE_PURPOSE (aa_spec) == need_addr_id;
+ if (!need_device_ptr && !need_device_addr)
+ return arg;
- /* Nothing to do if arg is constant null pointer. */
- if (integer_zerop (*arg_p))
- continue;
+ auto find_arg_in_clause = [&] (const_tree clauses) -> const_tree
+ {
+ const const_tree arg_decl = [&] ()
+ {
+ tree arg_decl = tree_strip_nop_conversions (arg);
+ if (TREE_CODE (arg_decl) == ADDR_EXPR)
+ arg_decl = TREE_OPERAND (arg_decl, 0);
+ return arg_decl;
+ } (); /* IILE. */
+ for (const_tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
+ {
+ if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
+ && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
+ continue;
+ const tree name_in_clause = DECL_NAME (OMP_CLAUSE_DECL (c));
+ if ((VAR_P (arg_decl) || TREE_CODE (arg_decl) == PARM_DECL)
+ && name_in_clause == DECL_NAME (arg_decl))
+ return c;
+ }
+ return NULL_TREE;
+ };
+ /* The code this was refactored from stops on the first clause with a
+ matching var/parm specified in it. */
+ const_tree clause_with_arg = find_arg_in_clause (dispatch_clauses);
+ /* I assume if a var/parm is used in multiple clauses it gets diagnosed
+ before we get here, make sure that is true. */
+ gcc_checking_assert (!clause_with_arg
+ || !find_arg_in_clause
+ (OMP_CLAUSE_CHAIN (clause_with_arg)));
+
+ const bool is_device_ptr = clause_with_arg
+ && OMP_CLAUSE_CODE (clause_with_arg)
+ == OMP_CLAUSE_IS_DEVICE_PTR;
+ const bool has_device_addr = clause_with_arg
+ && OMP_CLAUSE_CODE (clause_with_arg)
+ == OMP_CLAUSE_HAS_DEVICE_ADDR;
+ /* Obviously impossible with how things are currently implemented. */
+ gcc_assert (!(is_device_ptr && has_device_addr));
+
+ if (need_device_addr && is_device_ptr)
+ warning_at (OMP_CLAUSE_LOCATION (clause_with_arg),
+ OPT_Wopenmp,
+ "%<is_device_ptr%> for %qD does not imply "
+ "%<has_device_addr%> required for %<need_device_addr%>",
+ OMP_CLAUSE_DECL (clause_with_arg));
+ if (need_device_ptr && has_device_addr)
+ warning_at (OMP_CLAUSE_LOCATION (clause_with_arg),
+ OPT_Wopenmp,
+ "%<has_device_addr%> for %qD does not imply "
+ "%<is_device_ptr%> required for %<need_device_ptr%>",
+ OMP_CLAUSE_DECL (clause_with_arg));
+ /* ARG does not need to be adjusted. */
+ if ((need_device_ptr && is_device_ptr)
+ || (need_device_addr && has_device_addr))
+ return arg;
+
+ if (dispatch_device_num == NULL_TREE)
+ {
+ // device_num = omp_get_default_device ()
+ tree fn = builtin_decl_explicit (BUILT_IN_OMP_GET_DEFAULT_DEVICE);
+ tree call = build_call_expr (fn, 0);
+ dispatch_device_num = create_tmp_var_raw (TREE_TYPE (call));
+ tree init = build4 (TARGET_EXPR, TREE_TYPE (call),
+ dispatch_device_num, call, NULL_TREE, NULL_TREE);
+ if (init_code)
+ init_code = build2 (COMPOUND_EXPR, TREE_TYPE (init),
+ init_code, init);
+ else
+ init_code = init;
+ }
- bool need_device_ptr = false;
- bool need_device_addr = false;
- for (int need_addr = 0; need_addr <= 1; need_addr++)
- for (tree arg = (need_addr
- ? TREE_VALUE (dispatch_adjust_args_list)
- : TREE_PURPOSE (dispatch_adjust_args_list));
- arg != NULL; arg = TREE_CHAIN (arg))
- {
- if (TREE_VALUE (arg)
- && TREE_CODE (TREE_VALUE (arg)) == INTEGER_CST
- && wi::eq_p (i, wi::to_wide (TREE_VALUE (arg))))
- {
- if (need_addr)
- need_device_addr = true;
- else
- need_device_ptr = true;
- break;
- }
- }
+ // We want to emit the following statement:
+ // mapped_arg = omp_get_mapped_ptr (arg,
+ // device_num)
+ // but arg has to be the actual pointer, not a
+ // reference or a conversion expression.
+ tree fn = builtin_decl_explicit (BUILT_IN_OMP_GET_MAPPED_PTR);
+ tree mapped_arg = NULL_TREE;
+ bool reference_to_ptr_p = false;
+
+ tree argtype = TREE_TYPE (arg);
+ if (!POINTER_TYPE_P (argtype))
+ {
+ sorry_at (EXPR_LOCATION (arg),
+ "Invalid non-pointer/reference argument "
+ "not diagnosed properly earlier");
+ return arg;
+ }
- if (need_device_ptr || need_device_addr)
+ /* Fortran C_PTR passed by reference? Also handle the weird case
+ where an array of C_PTR is passed instead of its first element. */
+ if (need_device_ptr
+ && lang_GNU_Fortran ()
+ && (POINTER_TYPE_P (TREE_TYPE (argtype))
+ || (TREE_CODE (TREE_TYPE (argtype)) == ARRAY_TYPE
+ && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (argtype))))))
+ reference_to_ptr_p = true;
+
+ /* C++ pointer passed by reference? */
+ else if (need_device_ptr
+ && TREE_CODE (argtype) == REFERENCE_TYPE
+ && TREE_CODE (TREE_TYPE (argtype)) == POINTER_TYPE)
+ reference_to_ptr_p = true;
+
+ /* If reference_to_ptr_p is true, we need to dereference arg to
+ get the actual pointer. */
+ tree actual_ptr = (reference_to_ptr_p
+ ? build_fold_indirect_ref (arg) : arg);
+ tree actual_ptr_type = TREE_TYPE (actual_ptr);
+ STRIP_NOPS (actual_ptr);
+
+ if (lang_hooks.decls.omp_array_data (actual_ptr, true))
{
- bool is_device_ptr = false;
- bool has_device_addr = false;
+ /* This is a Fortran array with a descriptor. The actual_ptr that
+ lives on the target is the array data, not the descriptor. */
+ tree array_data
+ = lang_hooks.decls.omp_array_data (actual_ptr, false);
+ tree mapped_array_data =
+ build_call_expr_loc (loc, fn, 2, array_data, dispatch_device_num);
+
+ gcc_assert (TREE_CODE (array_data) == COMPONENT_REF);
+
+ /* We need to create a new array descriptor newd that points at the
+ mapped actual_ptr instead of the original one. Start by
+ creating the new descriptor and copy-initializing it from the
+ existing one. */
+ tree oldd = TREE_OPERAND (array_data, 0);
+ tree newd = create_tmp_var (TREE_TYPE (oldd), get_name (oldd));
+ tree t2 = build2 (MODIFY_EXPR, void_type_node, newd, oldd);
+ if (init_code)
+ init_code = build2 (COMPOUND_EXPR, void_type_node, init_code, t2);
+ else
+ init_code = t2;
+
+ /* Now stash the mapped array pointer in the new descriptor newd. */
+ tree lhs = build3 (COMPONENT_REF, TREE_TYPE (array_data), newd,
+ TREE_OPERAND (array_data, 1),
+ TREE_OPERAND (array_data, 2));
+ t2 = build2 (MODIFY_EXPR, void_type_node, lhs, mapped_array_data);
+ init_code = build2 (COMPOUND_EXPR, void_type_node, init_code, t2);
+ mapped_arg = build_fold_addr_expr (newd);
+ }
+ else
+ mapped_arg
+ = build_call_expr_loc (loc, fn, 2, actual_ptr, dispatch_device_num);
- for (tree c = dispatch_clauses; c; c = TREE_CHAIN (c))
- {
- if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR
- || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
- {
- tree decl1 = DECL_NAME (OMP_CLAUSE_DECL (c));
- tree decl2 = tree_strip_nop_conversions (*arg_p);
- if (TREE_CODE (decl2) == ADDR_EXPR)
- decl2 = TREE_OPERAND (decl2, 0);
- if (VAR_P (decl2) || TREE_CODE (decl2) == PARM_DECL)
- {
- decl2 = DECL_NAME (decl2);
- if (decl1 == decl2
- && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
- {
- if (need_device_addr)
- warning_at (OMP_CLAUSE_LOCATION (c),
- OPT_Wopenmp,
- "%<is_device_ptr%> for %qD does"
- " not imply %<has_device_addr%> "
- "required for %<need_device_addr%>",
- OMP_CLAUSE_DECL (c));
- is_device_ptr = true;
- break;
- }
- else if (decl1 == decl2)
- {
- if (need_device_ptr)
- warning_at (OMP_CLAUSE_LOCATION (c),
- OPT_Wopenmp,
- "%<has_device_addr%> for %qD does"
- " not imply %<is_device_ptr%> "
- "required for %<need_device_ptr%>",
- OMP_CLAUSE_DECL (c));
- has_device_addr = true;
- break;
- }
- }
- }
- }
+ /* Cast mapped_arg back to its original type, and if we need a
+ reference, build one. */
+ mapped_arg = build1 (NOP_EXPR, actual_ptr_type, mapped_arg);
+ if (reference_to_ptr_p)
+ mapped_arg = build_fold_addr_expr (mapped_arg);
+ return mapped_arg;
+ };
- if ((need_device_ptr && !is_device_ptr)
- || (need_device_addr && !has_device_addr))
- {
- if (dispatch_device_num == NULL_TREE)
- {
- // device_num = omp_get_default_device ()
- tree fn
- = builtin_decl_explicit (BUILT_IN_OMP_GET_DEFAULT_DEVICE);
- tree call = build_call_expr (fn, 0);
- dispatch_device_num = create_tmp_var_raw (TREE_TYPE (call));
- tree init
- = build4 (TARGET_EXPR, TREE_TYPE (call),
- dispatch_device_num, call, NULL_TREE, NULL_TREE);
- if (init_code)
- init_code = build2 (COMPOUND_EXPR, TREE_TYPE (init),
- init_code, init);
- else
- init_code = init;
- }
+ /* Nothing to do for adjust_args? */
+ const bool adjust_args_needed = [&] ()
+ {
+ if (!dispatch_adjust_args_specifiers.exists ())
+ return false;
+ for (auto const& aa_spec : dispatch_adjust_args_specifiers)
+ {
+ if (aa_spec
+ && (TREE_PURPOSE (aa_spec) == need_ptr_id
+ || TREE_PURPOSE (aa_spec) == need_addr_id))
+ return true;
+ }
+ return false;
+ } (); /* IILE. */
- // We want to emit the following statement:
- // mapped_arg = omp_get_mapped_ptr (arg,
- // device_num)
- // but arg has to be the actual pointer, not a
- // reference or a conversion expression.
- tree actual_ptr
- = ((TREE_CODE (*arg_p) == ADDR_EXPR)
- ? TREE_OPERAND (*arg_p, 0)
- : *arg_p);
- if (TREE_CODE (actual_ptr) == NOP_EXPR
- && (TREE_CODE (TREE_TYPE (TREE_OPERAND (actual_ptr, 0)))
- == REFERENCE_TYPE))
- {
- actual_ptr = TREE_OPERAND (actual_ptr, 0);
- actual_ptr = build1 (INDIRECT_REF,
- TREE_TYPE (actual_ptr),
- actual_ptr);
- }
- tree fn = builtin_decl_explicit (BUILT_IN_OMP_GET_MAPPED_PTR);
- tree mapped_arg = build_call_expr_loc (loc, fn, 2, actual_ptr,
- dispatch_device_num);
-
- if (TREE_CODE (*arg_p) == ADDR_EXPR
- || (TREE_CODE (TREE_TYPE (actual_ptr)) == REFERENCE_TYPE))
- mapped_arg = build_fold_addr_expr (mapped_arg);
- else if (TREE_CODE (*arg_p) == NOP_EXPR)
- mapped_arg = build1 (NOP_EXPR, TREE_TYPE (*arg_p),
- mapped_arg);
- *arg_p = mapped_arg;
- }
+ if (adjust_args_needed)
+ {
+ /* FIXME: We need to check argument types. */
+ const int num_parms = nfirst_args ? nfirst_args : nargs;
+ /* adjust_the_arg returns arg unchanged if no adjustments are needed. */
+ for (int idx = 0; idx < num_parms; ++idx)
+ {
+ gcc_assert (dispatch_adjust_args_specifiers.length ()
+ > static_cast<size_t>(idx));
+ const tree aa_spec = dispatch_adjust_args_specifiers[idx];
+ tree *const arg = &CALL_EXPR_ARG (expr, idx);
+ *arg = adjust_the_arg (*arg, aa_spec);
+ }
+ /* Variadic args come after append_args args, we can't do adjust_args
+ until after append_args is done though because append_args needs to
+ push into init_code first. We can probably fix this, but until then
+ we just need to adjust our index into CALL_EXPR_ARG by the number of
+ appended args.
+ It would just be simpler if we could handle adjust_args first, but I
+ don't know if there is a trivial way of handling the init_code
+ ordering.
+ This only handles varargs in functions that have an append_args
+ clause, varargs are handled in the above loop otherwise and this loop
+ is skipped. */
+ const int varargs_start = num_parms;
+ for (int idx = varargs_start; idx < nargs; ++idx)
+ {
+ gcc_assert (dispatch_adjust_args_specifiers.length ()
+ > static_cast<size_t>(idx));
+ const tree aa_spec = dispatch_adjust_args_specifiers[idx];
+ const int call_expr_arg_idx = idx + nappend;
+ tree *const arg = &CALL_EXPR_ARG (expr, call_expr_arg_idx);
+ *arg = adjust_the_arg (*arg, aa_spec);
}
}
- add_cleanup:
if (cleanup)
{
tree result = NULL_TREE;
@@ -8670,20 +9004,27 @@ omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
else
splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
- /* For reductions clauses in OpenACC loop directives, by default create a
- copy clause on the enclosing parallel construct for carrying back the
- results. */
+ /* For OpenACC loop directives, when a reduction clause is placed on
+ the outermost acc loop within an acc parallel or kernels
+ construct, it must have an implied copy data mapping. E.g.
+
+ #pragma acc parallel
+ {
+ #pragma acc loop reduction (+:sum)
+
+ a copy clause for sum should be added on the enclosing parallel
+ construct for carrying back the results. */
if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
{
struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
- while (outer_ctx)
+ if (outer_ctx)
{
n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
if (n != NULL)
{
/* Ignore local variables and explicitly declared clauses. */
if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
- break;
+ ;
else if (outer_ctx->region_type == ORT_ACC_KERNELS)
{
/* According to the OpenACC spec, such a reduction variable
@@ -8703,9 +9044,7 @@ omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
{
splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
GOVD_MAP | GOVD_SEEN);
- break;
}
- outer_ctx = outer_ctx->outer_context;
}
}
}
@@ -9214,6 +9553,7 @@ omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
error ("variable %qE declared in enclosing "
"%<host_data%> region", DECL_NAME (decl));
nflags |= GOVD_MAP;
+ nflags |= (n2->value & GOVD_DEVICEPTR);
if (octx->region_type == ORT_ACC_DATA
&& (n2->value & GOVD_MAP_0LEN_ARRAY))
nflags |= GOVD_MAP_0LEN_ARRAY;
@@ -9523,9 +9863,7 @@ gimplify_omp_affinity (tree *list_p, gimple_seq *pre_p)
if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY)
{
tree t = OMP_CLAUSE_DECL (c);
- if (TREE_CODE (t) == TREE_LIST
- && TREE_PURPOSE (t)
- && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
+ if (OMP_ITERATOR_DECL_P (t))
{
if (TREE_VALUE (t) == null_pointer_node)
continue;
@@ -9539,19 +9877,19 @@ gimplify_omp_affinity (tree *list_p, gimple_seq *pre_p)
}
for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
{
- if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
+ if (gimplify_expr (&OMP_ITERATORS_BEGIN (it), pre_p, NULL,
is_gimple_val, fb_rvalue) == GS_ERROR
- || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
+ || gimplify_expr (&OMP_ITERATORS_END (it), pre_p, NULL,
is_gimple_val, fb_rvalue) == GS_ERROR
- || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
+ || gimplify_expr (&OMP_ITERATORS_STEP (it), pre_p, NULL,
is_gimple_val, fb_rvalue) == GS_ERROR
- || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
- is_gimple_val, fb_rvalue)
+ || (gimplify_expr (&OMP_ITERATORS_ORIG_STEP (it), pre_p,
+ NULL, is_gimple_val, fb_rvalue)
== GS_ERROR))
return;
}
last_iter = TREE_PURPOSE (t);
- tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
+ tree block = OMP_ITERATORS_BLOCK (TREE_PURPOSE (t));
last_bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
NULL, block);
last_body = &BIND_EXPR_BODY (last_bind);
@@ -9559,10 +9897,10 @@ gimplify_omp_affinity (tree *list_p, gimple_seq *pre_p)
location_t loc = OMP_CLAUSE_LOCATION (c);
for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
{
- tree var = TREE_VEC_ELT (it, 0);
- tree begin = TREE_VEC_ELT (it, 1);
- tree end = TREE_VEC_ELT (it, 2);
- tree step = TREE_VEC_ELT (it, 3);
+ tree var = OMP_ITERATORS_VAR (it);
+ tree begin = OMP_ITERATORS_BEGIN (it);
+ tree end = OMP_ITERATORS_END (it);
+ tree step = OMP_ITERATORS_STEP (it);
loc = DECL_SOURCE_LOCATION (var);
tree tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
var, begin);
@@ -9630,6 +9968,640 @@ gimplify_omp_affinity (tree *list_p, gimple_seq *pre_p)
return;
}
+/* Returns a tree expression containing the total iteration count of the
+ OpenMP iterator IT. */
+
+static tree
+compute_omp_iterator_count (tree it, gimple_seq *pre_p)
+{
+ tree tcnt = size_one_node;
+ for (; it; it = TREE_CHAIN (it))
+ {
+ if (gimplify_expr (&OMP_ITERATORS_BEGIN (it), pre_p, NULL,
+ is_gimple_val, fb_rvalue) == GS_ERROR
+ || gimplify_expr (&OMP_ITERATORS_END (it), pre_p, NULL,
+ is_gimple_val, fb_rvalue) == GS_ERROR
+ || gimplify_expr (&OMP_ITERATORS_STEP (it), pre_p, NULL,
+ is_gimple_val, fb_rvalue) == GS_ERROR
+ || (gimplify_expr (&OMP_ITERATORS_ORIG_STEP (it), pre_p, NULL,
+ is_gimple_val, fb_rvalue) == GS_ERROR))
+ return NULL_TREE;
+ tree var = OMP_ITERATORS_VAR (it);
+ tree begin = OMP_ITERATORS_BEGIN (it);
+ tree end = OMP_ITERATORS_END (it);
+ tree step = OMP_ITERATORS_STEP (it);
+ tree orig_step = OMP_ITERATORS_ORIG_STEP (it);
+ tree type = TREE_TYPE (var);
+ tree stype = TREE_TYPE (step);
+ location_t loc = DECL_SOURCE_LOCATION (var);
+ tree endmbegin;
+ /* Compute count for this iterator as
+ orig_step > 0
+ ? (begin < end ? (end - begin + (step - 1)) / step : 0)
+ : (begin > end ? (end - begin + (step + 1)) / step : 0)
+ and compute product of those for the entire clause. */
+ if (POINTER_TYPE_P (type))
+ endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR, stype, end, begin);
+ else
+ endmbegin = fold_build2_loc (loc, MINUS_EXPR, type, end, begin);
+ /* Account for iteration stopping on the end value in Fortran rather
+ than before it. */
+ tree stepm1 = step;
+ tree stepp1 = step;
+ if (!lang_GNU_Fortran ())
+ {
+ stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype, step,
+ build_int_cst (stype, 1));
+ stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
+ build_int_cst (stype, 1));
+ }
+ tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
+ unshare_expr (endmbegin), stepm1);
+ pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype, pos, step);
+ tree neg = fold_build2_loc (loc, PLUS_EXPR, stype, endmbegin, stepp1);
+ if (TYPE_UNSIGNED (stype))
+ {
+ neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
+ step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
+ }
+ neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype, neg, step);
+ step = NULL_TREE;
+ tree_code cmp_op = lang_GNU_Fortran () ? LE_EXPR : LT_EXPR;
+ tree cond = fold_build2_loc (loc, cmp_op, boolean_type_node, begin, end);
+ pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
+ build_int_cst (stype, 0));
+ cond = fold_build2_loc (loc, cmp_op, boolean_type_node, end, begin);
+ neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
+ build_int_cst (stype, 0));
+ tree osteptype = TREE_TYPE (orig_step);
+ cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node, orig_step,
+ build_int_cst (osteptype, 0));
+ tree cnt = fold_build3_loc (loc, COND_EXPR, stype, cond, pos, neg);
+ cnt = fold_convert_loc (loc, sizetype, cnt);
+ if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
+ fb_rvalue) == GS_ERROR)
+ return NULL_TREE;
+ tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
+ }
+ if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
+ return NULL_TREE;
+
+ return tcnt;
+}
+
+/* Build loops iterating over the space defined by the OpenMP iterator IT.
+ Returns a pointer to the BIND_EXPR_BODY in the innermost loop body.
+ LAST_BIND is set to point to the BIND_EXPR containing the whole loop. */
+
+static tree *
+build_omp_iterator_loop (tree it, gimple_seq *pre_p, tree *last_bind)
+{
+ if (*last_bind)
+ gimplify_and_add (*last_bind, pre_p);
+ tree block = OMP_ITERATORS_BLOCK (it);
+ tree block_stmts = lang_GNU_Fortran () ? BLOCK_SUBBLOCKS (block) : NULL_TREE;
+ *last_bind = build3 (BIND_EXPR, void_type_node,
+ BLOCK_VARS (block), NULL, block);
+ TREE_SIDE_EFFECTS (*last_bind) = 1;
+ tree *p = &BIND_EXPR_BODY (*last_bind);
+ for (; it; it = TREE_CHAIN (it))
+ {
+ tree var = OMP_ITERATORS_VAR (it);
+ tree begin = OMP_ITERATORS_BEGIN (it);
+ tree end = OMP_ITERATORS_END (it);
+ tree step = OMP_ITERATORS_STEP (it);
+ tree orig_step = OMP_ITERATORS_ORIG_STEP (it);
+ block = OMP_ITERATORS_BLOCK (it);
+ tree type = TREE_TYPE (var);
+ location_t loc = DECL_SOURCE_LOCATION (var);
+ /* Emit:
+ var = begin;
+ goto cond_label;
+ beg_label:
+ ...
+ var = var + step;
+ cond_label:
+ if (orig_step > 0) {
+ if (var < end) goto beg_label; // <= for Fortran
+ } else {
+ if (var > end) goto beg_label; // >= for Fortran
+ }
+ for each iterator, with inner iterators added to
+ the ... above. */
+ tree beg_label = create_artificial_label (loc);
+ tree cond_label = NULL_TREE;
+ tree tem = build2_loc (loc, MODIFY_EXPR, void_type_node, var, begin);
+ append_to_statement_list_force (tem, p);
+ tem = build_and_jump (&cond_label);
+ append_to_statement_list_force (tem, p);
+ tem = build1 (LABEL_EXPR, void_type_node, beg_label);
+ append_to_statement_list (tem, p);
+ tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
+ NULL_TREE, NULL_TREE);
+ TREE_SIDE_EFFECTS (bind) = 1;
+ SET_EXPR_LOCATION (bind, loc);
+ append_to_statement_list_force (bind, p);
+ if (POINTER_TYPE_P (type))
+ tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
+ var, fold_convert_loc (loc, sizetype, step));
+ else
+ tem = build2_loc (loc, PLUS_EXPR, type, var, step);
+ tem = build2_loc (loc, MODIFY_EXPR, void_type_node, var, tem);
+ append_to_statement_list_force (tem, p);
+ tem = build1 (LABEL_EXPR, void_type_node, cond_label);
+ append_to_statement_list (tem, p);
+ tree cond = fold_build2_loc (loc, lang_GNU_Fortran () ? LE_EXPR : LT_EXPR,
+ boolean_type_node, var, end);
+ tree pos = fold_build3_loc (loc, COND_EXPR, void_type_node, cond,
+ build_and_jump (&beg_label), void_node);
+ cond = fold_build2_loc (loc, lang_GNU_Fortran () ? GE_EXPR : GT_EXPR,
+ boolean_type_node, var, end);
+ tree neg = fold_build3_loc (loc, COND_EXPR, void_type_node, cond,
+ build_and_jump (&beg_label), void_node);
+ tree osteptype = TREE_TYPE (orig_step);
+ cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node, orig_step,
+ build_int_cst (osteptype, 0));
+ tem = fold_build3_loc (loc, COND_EXPR, void_type_node, cond, pos, neg);
+ append_to_statement_list_force (tem, p);
+ p = &BIND_EXPR_BODY (bind);
+ /* The Fortran front-end stashes statements into the BLOCK_SUBBLOCKS
+ of the last element of the first iterator. These should go into the
+ body of the innermost loop. */
+ if (!TREE_CHAIN (it))
+ append_to_statement_list_force (block_stmts, p);
+ }
+
+ return p;
+}
+
+
+/* Callback for walk_tree to find a VAR_DECL (stored in DATA) in the
+ tree TP. */
+
+static tree
+find_var_decl (tree *tp, int *, void *data)
+{
+ if (*tp == (tree) data)
+ return *tp;
+
+ return NULL_TREE;
+}
+
+/* Returns an element-by-element copy of OMP iterator tree IT. */
+
+static tree
+copy_omp_iterator (tree it, int elem_count = -1)
+{
+ if (elem_count < 0)
+ elem_count = TREE_VEC_LENGTH (it);
+ tree new_it = make_tree_vec (elem_count);
+ for (int i = 0; i < TREE_VEC_LENGTH (it); i++)
+ TREE_VEC_ELT (new_it, i) = TREE_VEC_ELT (it, i);
+
+ return new_it;
+}
+
+/* Helper function for walk_tree in remap_omp_iterator_var. */
+
+static tree
+remap_omp_iterator_var_1 (tree *tp, int *, void *data)
+{
+ tree old_var = ((tree *) data)[0];
+ tree new_var = ((tree *) data)[1];
+
+ if (*tp == old_var)
+ *tp = new_var;
+ return NULL_TREE;
+}
+
+/* Replace instances of OLD_VAR in TP with NEW_VAR. */
+
+static void
+remap_omp_iterator_var (tree *tp, tree old_var, tree new_var)
+{
+ tree vars[2] = { old_var, new_var };
+ walk_tree (tp, remap_omp_iterator_var_1, vars, NULL);
+}
+
+/* Scan through all clauses using OpenMP iterators in LIST_P. If any
+ clauses have iterators with variables that are not used by the clause
+ decl or size, issue a warning and replace the iterator with a copy with
+ the unused variables removed. */
+
+static void
+remove_unused_omp_iterator_vars (tree *list_p)
+{
+ auto_vec< vec<tree> > iter_vars;
+ auto_vec<tree> new_iterators;
+
+ for (tree c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
+ {
+ if (!OMP_CLAUSE_HAS_ITERATORS (c))
+ continue;
+ auto_vec<tree> vars;
+ bool need_new_iterators = false;
+ for (tree it = OMP_CLAUSE_ITERATORS (c); it; it = TREE_CHAIN (it))
+ {
+ tree var = OMP_ITERATORS_VAR (it);
+ tree t = walk_tree (&OMP_CLAUSE_DECL (c), find_var_decl, var, NULL);
+ if (t == NULL_TREE)
+ t = walk_tree (&OMP_CLAUSE_SIZE (c), find_var_decl, var, NULL);
+ if (t == NULL_TREE)
+ {
+ need_new_iterators = true;
+ if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
+ || OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH_DETACH)
+ warning_at (OMP_CLAUSE_LOCATION (c), 0,
+ "iterator variable %qE not used in clause "
+ "expression", DECL_NAME (var));
+ }
+ else
+ vars.safe_push (var);
+ }
+ if (!need_new_iterators)
+ continue;
+ if (need_new_iterators && vars.is_empty ())
+ {
+ /* No iteration variables are used in the clause - remove the
+ iterator from the clause. */
+ OMP_CLAUSE_ITERATORS (c) = NULL_TREE;
+ continue;
+ }
+
+ /* If a new iterator has been created for the current set of used
+ iterator variables, then use that as the iterator. Otherwise,
+ create a new iterator for the current iterator variable set. */
+ unsigned i;
+ for (i = 0; i < iter_vars.length (); i++)
+ {
+ if (vars.length () != iter_vars[i].length ())
+ continue;
+ bool identical_p = true;
+ for (unsigned j = 0; j < vars.length () && identical_p; j++)
+ identical_p = vars[j] == iter_vars[i][j];
+
+ if (identical_p)
+ break;
+ }
+ if (i < iter_vars.length ())
+ OMP_CLAUSE_ITERATORS (c) = new_iterators[i];
+ else
+ {
+ tree new_iters = NULL_TREE;
+ tree *new_iters_p = &new_iters;
+ tree new_vars = NULL_TREE;
+ tree *new_vars_p = &new_vars;
+ i = 0;
+ for (tree it = OMP_CLAUSE_ITERATORS (c); it && i < vars.length();
+ it = TREE_CHAIN (it))
+ {
+ tree var = OMP_ITERATORS_VAR (it);
+ if (var == vars[i])
+ {
+ *new_iters_p = copy_omp_iterator (it);
+ *new_vars_p = build_decl (OMP_CLAUSE_LOCATION (c), VAR_DECL,
+ DECL_NAME (var), TREE_TYPE (var));
+ DECL_ARTIFICIAL (*new_vars_p) = 1;
+ DECL_CONTEXT (*new_vars_p) = DECL_CONTEXT (var);
+ OMP_ITERATORS_VAR (*new_iters_p) = *new_vars_p;
+ new_iters_p = &TREE_CHAIN (*new_iters_p);
+ new_vars_p = &DECL_CHAIN (*new_vars_p);
+ i++;
+ }
+ }
+ tree old_block = OMP_ITERATORS_BLOCK (OMP_CLAUSE_ITERATORS (c));
+ tree new_block = make_node (BLOCK);
+ BLOCK_VARS (new_block) = new_vars;
+ if (BLOCK_SUBBLOCKS (old_block))
+ {
+ BLOCK_SUBBLOCKS (new_block) = BLOCK_SUBBLOCKS (old_block);
+ BLOCK_SUBBLOCKS (old_block) = NULL_TREE;
+ }
+ OMP_ITERATORS_BLOCK (new_iters) = new_block;
+ new_iterators.safe_push (new_iters);
+ iter_vars.safe_push (vars.copy ());
+ OMP_CLAUSE_ITERATORS (c) = new_iters;
+ }
+
+ /* Remap clause to use the new variables. */
+ i = 0;
+ for (tree it = OMP_CLAUSE_ITERATORS (c); it; it = TREE_CHAIN (it))
+ {
+ tree old_var = vars[i++];
+ tree new_var = OMP_ITERATORS_VAR (it);
+ remap_omp_iterator_var (&OMP_CLAUSE_DECL (c), old_var, new_var);
+ remap_omp_iterator_var (&OMP_CLAUSE_SIZE (c), old_var, new_var);
+ }
+ }
+
+ for (unsigned i = 0; i < iter_vars.length (); i++)
+ iter_vars[i].release ();
+}
+
+struct iterator_loop_info_t
+{
+ tree bind;
+ tree count;
+ tree index;
+ tree body_label;
+ auto_vec<tree> clauses;
+};
+
+typedef hash_map<tree, iterator_loop_info_t> iterator_loop_info_map_t;
+
+tree
+omp_iterator_elems_length (tree count)
+{
+ tree count_2 = size_binop (MULT_EXPR, count, size_int (2));
+ return size_binop (PLUS_EXPR, count_2, size_int (1));
+}
+
+/* Builds a loop to expand any OpenMP iterators in the clauses in LIST_P,
+ reusing any previously built loops if they use the same set of iterators.
+ Generated Gimple statements are placed into LOOPS_SEQ_P. The clause
+ iterators are updated with information on how and where to insert code into
+ the loop body. */
+
+static void
+build_omp_iterators_loops (tree *list_p, gimple_seq *loops_seq_p)
+{
+ iterator_loop_info_map_t loops;
+
+ for (tree c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
+ {
+ if (!OMP_CLAUSE_HAS_ITERATORS (c))
+ continue;
+
+ bool built_p;
+ iterator_loop_info_t &loop
+ = loops.get_or_insert (OMP_CLAUSE_ITERATORS (c), &built_p);
+
+ if (!built_p)
+ {
+ loop.count = compute_omp_iterator_count (OMP_CLAUSE_ITERATORS (c),
+ loops_seq_p);
+ if (!loop.count)
+ continue;
+
+ loop.bind = NULL_TREE;
+ tree *body = build_omp_iterator_loop (OMP_CLAUSE_ITERATORS (c),
+ loops_seq_p, &loop.bind);
+
+ loop.index = create_tmp_var (sizetype);
+ SET_EXPR_LOCATION (loop.bind, OMP_CLAUSE_LOCATION (c));
+
+ /* BEFORE LOOP: */
+ /* idx = -1; */
+ /* This should be initialized to before the individual elements,
+ as idx is pre-incremented in the loop body. */
+ gimple *assign = gimple_build_assign (loop.index, size_int (-1));
+ gimple_seq_add_stmt (loops_seq_p, assign);
+
+ /* IN LOOP BODY: */
+ /* Create a label so we can find this point later. */
+ loop.body_label = create_artificial_label (OMP_CLAUSE_LOCATION (c));
+ tree tem = build1 (LABEL_EXPR, void_type_node, loop.body_label);
+ append_to_statement_list_force (tem, body);
+
+ /* idx += 2; */
+ tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
+ void_type_node, loop.index,
+ size_binop (PLUS_EXPR, loop.index, size_int (2)));
+ append_to_statement_list_force (tem, body);
+ }
+
+ /* Create array to hold expanded values. */
+ tree arr_length = omp_iterator_elems_length (loop.count);
+ tree elems_type = TREE_CONSTANT (arr_length)
+ ? build_array_type (ptr_type_node,
+ build_index_type (arr_length))
+ : build_pointer_type (ptr_type_node);
+ tree elems = create_tmp_var_raw (elems_type, "omp_iter_data");
+ TREE_ADDRESSABLE (elems) = 1;
+ gimple_add_tmp_var (elems);
+
+ /* BEFORE LOOP: */
+ /* elems[0] = count; */
+ tree lhs = TREE_CODE (TREE_TYPE (elems)) == ARRAY_TYPE
+ ? build4 (ARRAY_REF, ptr_type_node, elems, size_int (0), NULL_TREE,
+ NULL_TREE)
+ : build1 (INDIRECT_REF, ptr_type_node, elems);
+ tree tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
+ void_type_node, lhs, loop.count);
+ gimplify_and_add (tem, loops_seq_p);
+
+ /* Make a copy of the iterator with extra info at the end. */
+ int elem_count = TREE_VEC_LENGTH (OMP_CLAUSE_ITERATORS (c));
+ tree new_iterator = copy_omp_iterator (OMP_CLAUSE_ITERATORS (c),
+ elem_count + 4);
+ OMP_ITERATORS_LABEL (new_iterator) = loop.body_label;
+ OMP_ITERATORS_INDEX (new_iterator) = loop.index;
+ OMP_ITERATORS_ELEMS (new_iterator) = elems;
+ OMP_ITERATORS_COUNT (new_iterator) = loop.count;
+ TREE_CHAIN (new_iterator) = TREE_CHAIN (OMP_CLAUSE_ITERATORS (c));
+ OMP_CLAUSE_ITERATORS (c) = new_iterator;
+
+ loop.clauses.safe_push (c);
+ }
+
+ /* Now gimplify and add all the loops that were built. */
+ for (hash_map<tree, iterator_loop_info_t>::iterator it = loops.begin ();
+ it != loops.end (); ++it)
+ gimplify_and_add ((*it).second.bind, loops_seq_p);
+}
+
+/* Helper function for enter_omp_iterator_loop_context. */
+
+static gimple_seq *
+enter_omp_iterator_loop_context_1 (tree iterator, gimple_seq *loops_seq_p)
+{
+ /* Drill into the nested bind expressions to get to the loop body. */
+ for (gimple_stmt_iterator gsi = gsi_start (*loops_seq_p);
+ !gsi_end_p (gsi); gsi_next (&gsi))
+ {
+ gimple *stmt = gsi_stmt (gsi);
+
+ switch (gimple_code (stmt))
+ {
+ case GIMPLE_BIND:
+ {
+ gbind *bind_stmt = as_a<gbind *> (stmt);
+ gimple_push_bind_expr (bind_stmt);
+ gimple_seq *bind_body_p = gimple_bind_body_ptr (bind_stmt);
+ gimple_seq *seq =
+ enter_omp_iterator_loop_context_1 (iterator, bind_body_p);
+ if (seq)
+ return seq;
+ gimple_pop_bind_expr ();
+ }
+ break;
+ case GIMPLE_TRY:
+ {
+ gimple_seq *try_eval_p = gimple_try_eval_ptr (stmt);
+ gimple_seq *seq =
+ enter_omp_iterator_loop_context_1 (iterator, try_eval_p);
+ if (seq)
+ return seq;
+ }
+ break;
+ case GIMPLE_LABEL:
+ {
+ glabel *label_stmt = as_a<glabel *> (stmt);
+ tree label = gimple_label_label (label_stmt);
+ if (label == OMP_ITERATORS_LABEL (iterator))
+ return loops_seq_p;
+ }
+ break;
+ default:
+ break;
+ }
+ }
+
+ return NULL;
+}
+
+gimple_seq *
+enter_omp_iterator_loop_context (tree iterator, gimple_seq *loops_seq_p)
+{
+ push_gimplify_context ();
+
+ gimple_seq *seq = enter_omp_iterator_loop_context_1 (iterator, loops_seq_p);
+ gcc_assert (seq);
+ return seq;
+}
+
+/* Enter the Gimplification context in LOOPS_SEQ_P for the iterator loop
+ associated with OpenMP clause C. Returns the gimple_seq for the loop body
+ if C has OpenMP iterators, or ALT_SEQ_P if not. */
+
+static gimple_seq *
+enter_omp_iterator_loop_context (tree c, gimple_seq *loops_seq_p,
+ gimple_seq *alt_seq_p)
+{
+ if (!OMP_CLAUSE_HAS_ITERATORS (c))
+ return alt_seq_p;
+
+ return enter_omp_iterator_loop_context (OMP_CLAUSE_ITERATORS (c),
+ loops_seq_p);
+}
+
+/* Enter the Gimplification context in STMT for the iterator loop associated
+ with OpenMP clause C. Returns the gimple_seq for the loop body if C has
+ OpenMP iterators, or ALT_SEQ_P if not. */
+
+gimple_seq *
+enter_omp_iterator_loop_context (tree c, gomp_target *stmt,
+ gimple_seq *alt_seq_p)
+{
+ gimple_seq *loops_seq_p = gimple_omp_target_iterator_loops_ptr (stmt);
+ return enter_omp_iterator_loop_context (c, loops_seq_p, alt_seq_p);
+}
+
+void
+exit_omp_iterator_loop_context (void)
+{
+ while (!gimplify_ctxp->bind_expr_stack.is_empty ())
+ gimple_pop_bind_expr ();
+ pop_gimplify_context (NULL);
+}
+
+/* Exit the Gimplification context for the OpenMP clause C. */
+
+void
+exit_omp_iterator_loop_context (tree c)
+{
+ if (!OMP_CLAUSE_HAS_ITERATORS (c))
+ return;
+ exit_omp_iterator_loop_context ();
+}
+
+void
+assign_to_iterator_elems_array (tree t, tree iterator, gomp_target *stmt,
+ int index_offset)
+{
+ tree index = OMP_ITERATORS_INDEX (iterator);
+ if (index_offset)
+ index = size_binop (PLUS_EXPR, index, size_int (index_offset));
+ tree elems = OMP_ITERATORS_ELEMS (iterator);
+ gimple_seq *loop_body_p = gimple_omp_target_iterator_loops_ptr (stmt);
+ loop_body_p = enter_omp_iterator_loop_context (iterator, loop_body_p);
+
+ /* IN LOOP BODY: */
+ /* elems[index+index_offset] = t; */
+ tree lhs;
+ if (TREE_CODE (TREE_TYPE (elems)) == ARRAY_TYPE)
+ lhs = build4 (ARRAY_REF, ptr_type_node, elems, index, NULL_TREE, NULL_TREE);
+ else
+ {
+ tree tmp = size_binop (MULT_EXPR, index, TYPE_SIZE_UNIT (ptr_type_node));
+ tmp = size_binop (POINTER_PLUS_EXPR, elems, tmp);
+ lhs = build1 (INDIRECT_REF, ptr_type_node, tmp);
+ }
+ gimplify_assign (lhs, t, loop_body_p);
+ exit_omp_iterator_loop_context ();
+}
+
+tree
+add_new_omp_iterators_entry (tree iters, gimple_seq *loops_seq_p)
+{
+ gimple_stmt_iterator gsi;
+ gcc_assert (OMP_ITERATORS_EXPANDED_P (iters));
+
+ /* Search for <index> = -1. */
+ tree index = OMP_ITERATORS_INDEX (iters);
+ for (gsi = gsi_start (*loops_seq_p); !gsi_end_p (gsi); gsi_next (&gsi))
+ {
+ gimple *stmt = gsi_stmt (gsi);
+ if (gimple_code (stmt) == GIMPLE_ASSIGN
+ && gimple_assign_lhs (stmt) == index
+ && gimple_assign_rhs1 (stmt) == size_int (-1))
+ break;
+ }
+ gcc_assert (!gsi_end_p (gsi));
+
+ /* Create array for this clause. */
+ tree arr_length = omp_iterator_elems_length (OMP_ITERATORS_COUNT (iters));
+ tree elems_type = TREE_CONSTANT (arr_length)
+ ? build_array_type (ptr_type_node,
+ build_index_type (arr_length))
+ : build_pointer_type (ptr_type_node);
+ tree elems = create_tmp_var_raw (elems_type, "omp_iter_data");
+ TREE_ADDRESSABLE (elems) = 1;
+ gimple_add_tmp_var (elems);
+
+ /* BEFORE LOOP: */
+ /* elems[0] = count; */
+ tree lhs = TREE_CODE (TREE_TYPE (elems)) == ARRAY_TYPE
+ ? build4 (ARRAY_REF, ptr_type_node, elems, size_int (0), NULL_TREE,
+ NULL_TREE)
+ : build1 (INDIRECT_REF, ptr_type_node, elems);
+
+ gimple_seq assign_seq = NULL;
+ gimplify_assign (lhs, OMP_ITERATORS_COUNT (iters), &assign_seq);
+ gsi_insert_seq_after (&gsi, assign_seq, GSI_SAME_STMT);
+
+ /* Update iterator information. */
+ tree new_iterator = copy_omp_iterator (iters);
+ OMP_ITERATORS_ELEMS (new_iterator) = elems;
+ TREE_CHAIN (new_iterator) = TREE_CHAIN (iters);
+
+ return new_iterator;
+}
+
+/* Insert new OpenMP clause C into pre-existing iterator loop LOOPS_SEQ_P.
+ If the clause has an iterator, then that iterator is assumed to be in
+ the expanded form (i.e. it has info regarding the loop, expanded elements
+ etc.). */
+
+void
+add_new_omp_iterators_clause (tree c, gimple_seq *loops_seq_p)
+{
+ tree iters = OMP_CLAUSE_ITERATORS (c);
+ if (!iters)
+ return;
+ OMP_CLAUSE_ITERATORS (c) = add_new_omp_iterators_entry (iters, loops_seq_p);
+}
+
/* If *LIST_P contains any OpenMP depend clauses with iterators,
lower all the depend clauses by populating corresponding depend
array. Returns 0 if there are no such depend clauses, or
@@ -9674,89 +10646,13 @@ gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
tree t = OMP_CLAUSE_DECL (c);
if (first_loc == UNKNOWN_LOCATION)
first_loc = OMP_CLAUSE_LOCATION (c);
- if (TREE_CODE (t) == TREE_LIST
- && TREE_PURPOSE (t)
- && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
+ if (OMP_ITERATOR_DECL_P (t))
{
if (TREE_PURPOSE (t) != last_iter)
{
- tree tcnt = size_one_node;
- for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
- {
- if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
- is_gimple_val, fb_rvalue) == GS_ERROR
- || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
- is_gimple_val, fb_rvalue) == GS_ERROR
- || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
- is_gimple_val, fb_rvalue) == GS_ERROR
- || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
- is_gimple_val, fb_rvalue)
- == GS_ERROR))
- return 2;
- tree var = TREE_VEC_ELT (it, 0);
- tree begin = TREE_VEC_ELT (it, 1);
- tree end = TREE_VEC_ELT (it, 2);
- tree step = TREE_VEC_ELT (it, 3);
- tree orig_step = TREE_VEC_ELT (it, 4);
- tree type = TREE_TYPE (var);
- tree stype = TREE_TYPE (step);
- location_t loc = DECL_SOURCE_LOCATION (var);
- tree endmbegin;
- /* Compute count for this iterator as
- orig_step > 0
- ? (begin < end ? (end - begin + (step - 1)) / step : 0)
- : (begin > end ? (end - begin + (step + 1)) / step : 0)
- and compute product of those for the entire depend
- clause. */
- if (POINTER_TYPE_P (type))
- endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
- stype, end, begin);
- else
- endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
- end, begin);
- tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
- step,
- build_int_cst (stype, 1));
- tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
- build_int_cst (stype, 1));
- tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
- unshare_expr (endmbegin),
- stepm1);
- pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
- pos, step);
- tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
- endmbegin, stepp1);
- if (TYPE_UNSIGNED (stype))
- {
- neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
- step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
- }
- neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
- neg, step);
- step = NULL_TREE;
- tree cond = fold_build2_loc (loc, LT_EXPR,
- boolean_type_node,
- begin, end);
- pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
- build_int_cst (stype, 0));
- cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
- end, begin);
- neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
- build_int_cst (stype, 0));
- tree osteptype = TREE_TYPE (orig_step);
- cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
- orig_step,
- build_int_cst (osteptype, 0));
- tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
- cond, pos, neg);
- cnt = fold_convert_loc (loc, sizetype, cnt);
- if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
- fb_rvalue) == GS_ERROR)
- return 2;
- tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
- }
- if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
- fb_rvalue) == GS_ERROR)
+ tree tcnt = compute_omp_iterator_count (TREE_PURPOSE (t),
+ pre_p);
+ if (!tcnt)
return 2;
last_iter = TREE_PURPOSE (t);
last_count = tcnt;
@@ -9910,91 +10806,13 @@ gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
gcc_unreachable ();
}
tree t = OMP_CLAUSE_DECL (c);
- if (TREE_CODE (t) == TREE_LIST
- && TREE_PURPOSE (t)
- && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
+ if (OMP_ITERATOR_DECL_P (t))
{
if (TREE_PURPOSE (t) != last_iter)
{
- if (last_bind)
- gimplify_and_add (last_bind, pre_p);
- tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
- last_bind = build3 (BIND_EXPR, void_type_node,
- BLOCK_VARS (block), NULL, block);
- TREE_SIDE_EFFECTS (last_bind) = 1;
+ last_body = build_omp_iterator_loop (TREE_PURPOSE (t), pre_p,
+ &last_bind);
SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
- tree *p = &BIND_EXPR_BODY (last_bind);
- for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
- {
- tree var = TREE_VEC_ELT (it, 0);
- tree begin = TREE_VEC_ELT (it, 1);
- tree end = TREE_VEC_ELT (it, 2);
- tree step = TREE_VEC_ELT (it, 3);
- tree orig_step = TREE_VEC_ELT (it, 4);
- tree type = TREE_TYPE (var);
- location_t loc = DECL_SOURCE_LOCATION (var);
- /* Emit:
- var = begin;
- goto cond_label;
- beg_label:
- ...
- var = var + step;
- cond_label:
- if (orig_step > 0) {
- if (var < end) goto beg_label;
- } else {
- if (var > end) goto beg_label;
- }
- for each iterator, with inner iterators added to
- the ... above. */
- tree beg_label = create_artificial_label (loc);
- tree cond_label = NULL_TREE;
- tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
- var, begin);
- append_to_statement_list_force (tem, p);
- tem = build_and_jump (&cond_label);
- append_to_statement_list_force (tem, p);
- tem = build1 (LABEL_EXPR, void_type_node, beg_label);
- append_to_statement_list (tem, p);
- tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
- NULL_TREE, NULL_TREE);
- TREE_SIDE_EFFECTS (bind) = 1;
- SET_EXPR_LOCATION (bind, loc);
- append_to_statement_list_force (bind, p);
- if (POINTER_TYPE_P (type))
- tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
- var, fold_convert_loc (loc, sizetype,
- step));
- else
- tem = build2_loc (loc, PLUS_EXPR, type, var, step);
- tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
- var, tem);
- append_to_statement_list_force (tem, p);
- tem = build1 (LABEL_EXPR, void_type_node, cond_label);
- append_to_statement_list (tem, p);
- tree cond = fold_build2_loc (loc, LT_EXPR,
- boolean_type_node,
- var, end);
- tree pos
- = fold_build3_loc (loc, COND_EXPR, void_type_node,
- cond, build_and_jump (&beg_label),
- void_node);
- cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
- var, end);
- tree neg
- = fold_build3_loc (loc, COND_EXPR, void_type_node,
- cond, build_and_jump (&beg_label),
- void_node);
- tree osteptype = TREE_TYPE (orig_step);
- cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
- orig_step,
- build_int_cst (osteptype, 0));
- tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
- cond, pos, neg);
- append_to_statement_list_force (tem, p);
- p = &BIND_EXPR_BODY (bind);
- }
- last_body = p;
}
last_iter = TREE_PURPOSE (t);
if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
@@ -10163,6 +10981,26 @@ omp_map_clause_descriptor_p (tree c)
return false;
}
+/* Try to find a (Fortran) array descriptor given a data pointer PTR, i.e.
+ return "foo.descr" from "foo.descr.data". */
+
+static tree
+omp_maybe_get_descriptor_from_ptr (tree ptr)
+{
+ struct array_descr_info info;
+
+ if (TREE_CODE (ptr) != COMPONENT_REF)
+ return NULL_TREE;
+
+ ptr = TREE_OPERAND (ptr, 0);
+
+ if (lang_hooks.types.get_array_descr_info
+ && lang_hooks.types.get_array_descr_info (TREE_TYPE (ptr), &info))
+ return ptr;
+
+ return NULL_TREE;
+}
+
/* For a set of mappings describing an array section pointed to by a struct
(or derived type, etc.) component, create an "alloc" or "release" node to
insert into a list following a GOMP_MAP_STRUCT node. For some types of
@@ -10182,16 +11020,26 @@ omp_map_clause_descriptor_p (tree c)
static tree
build_omp_struct_comp_nodes (enum tree_code code, tree grp_start, tree grp_end,
- tree *extra_node)
+ tree *extra_node, gimple_seq *loops_seq_p)
{
+ tree descr = omp_maybe_get_descriptor_from_ptr (OMP_CLAUSE_DECL (grp_end));
enum gomp_map_kind mkind
= (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
- ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
+ ? GOMP_MAP_RELEASE : descr ? GOMP_MAP_ALWAYS_TO : GOMP_MAP_ALLOC;
gcc_assert (grp_start != grp_end);
tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
+ OMP_CLAUSE_ITERATORS (c2) = OMP_CLAUSE_ITERATORS (grp_end);
+ add_new_omp_iterators_clause (c2, loops_seq_p);
+ if (descr)
+ {
+ OMP_CLAUSE_DECL (c2) = unshare_expr (descr);
+ OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (TREE_TYPE (descr));
+ *extra_node = NULL_TREE;
+ return c2;
+ }
OMP_CLAUSE_DECL (c2) = unshare_expr (OMP_CLAUSE_DECL (grp_end));
OMP_CLAUSE_CHAIN (c2) = NULL_TREE;
tree grp_mid = NULL_TREE;
@@ -10210,6 +11058,8 @@ build_omp_struct_comp_nodes (enum tree_code code, tree grp_start, tree grp_end,
tree c3
= build_omp_clause (OMP_CLAUSE_LOCATION (grp_end), OMP_CLAUSE_MAP);
OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
+ OMP_CLAUSE_ITERATORS (c3) = OMP_CLAUSE_ITERATORS (grp_end);
+ add_new_omp_iterators_clause (c3, loops_seq_p);
OMP_CLAUSE_DECL (c3) = unshare_expr (OMP_CLAUSE_DECL (grp_mid));
OMP_CLAUSE_SIZE (c3) = TYPE_SIZE_UNIT (ptr_type_node);
OMP_CLAUSE_CHAIN (c3) = NULL_TREE;
@@ -10222,6 +11072,27 @@ build_omp_struct_comp_nodes (enum tree_code code, tree grp_start, tree grp_end,
return c2;
}
+/* Callback for walk_tree. Return any VAR_DECLS found. */
+
+static tree
+contains_vars_1 (tree* tp, int *, void *)
+{
+ tree t = *tp;
+
+ if (TREE_CODE (t) != VAR_DECL)
+ return NULL_TREE;
+
+ return t;
+}
+
+/* Return true if there are any variables present in EXPR. */
+
+static bool
+contains_vars (tree expr)
+{
+ return walk_tree (&expr, contains_vars_1, NULL, NULL);
+}
+
/* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
and set *BITPOSP and *POFFSETP to the bit offset of the access.
If BASE_REF is non-NULL and the containing object is a reference, set
@@ -10232,7 +11103,8 @@ build_omp_struct_comp_nodes (enum tree_code code, tree grp_start, tree grp_end,
static tree
extract_base_bit_offset (tree base, poly_int64 *bitposp,
poly_offset_int *poffsetp,
- bool *variable_offset)
+ bool *variable_offset,
+ tree iterator)
{
tree offset;
poly_int64 bitsize, bitpos;
@@ -10242,6 +11114,19 @@ extract_base_bit_offset (tree base, poly_int64 *bitposp,
STRIP_NOPS (base);
+ if (iterator)
+ {
+ /* Replace any iterator variables with constant zero. This will give us
+ the nominal offset and bit position of the first element, which is
+ all we should need to lay out the mappings. The actual locations
+ of the iterated mappings are elsewhere.
+ E.g. "array[i].field" gives "16" (say), not "i * 32 + 16". */
+ tree it;
+ for (it = iterator; it; it = TREE_CHAIN (it))
+ base = simplify_replace_tree (base, OMP_ITERATORS_VAR (it),
+ OMP_ITERATORS_BEGIN (it));
+ }
+
base = get_inner_reference (base, &bitsize, &bitpos, &offset, &mode,
&unsignedp, &reversep, &volatilep);
@@ -10256,6 +11141,8 @@ extract_base_bit_offset (tree base, poly_int64 *bitposp,
{
poffset = 0;
*variable_offset = (offset != NULL_TREE);
+ if (iterator && *variable_offset)
+ *variable_offset = contains_vars (offset);
}
if (maybe_ne (bitpos, 0))
@@ -10267,18 +11154,6 @@ extract_base_bit_offset (tree base, poly_int64 *bitposp,
return base;
}
-/* Used for topological sorting of mapping groups. UNVISITED means we haven't
- started processing the group yet. The TEMPORARY mark is used when we first
- encounter a group on a depth-first traversal, and the PERMANENT mark is used
- when we have processed all the group's children (i.e. all the base pointers
- referred to by the group's mapping nodes, recursively). */
-
-enum omp_tsort_mark {
- UNVISITED,
- TEMPORARY,
- PERMANENT
-};
-
/* Hash for trees based on operand_equal_p. Like tree_operand_hash
but ignores side effects in the equality comparisons. */
@@ -10295,26 +11170,6 @@ tree_operand_hash_no_se::equal (const value_type &t1,
return operand_equal_p (t1, t2, OEP_MATCH_SIDE_EFFECTS);
}
-/* A group of OMP_CLAUSE_MAP nodes that correspond to a single "map"
- clause. */
-
-struct omp_mapping_group {
- tree *grp_start;
- tree grp_end;
- omp_tsort_mark mark;
- /* If we've removed the group but need to reindex, mark the group as
- deleted. */
- bool deleted;
- /* The group points to an already-created "GOMP_MAP_STRUCT
- GOMP_MAP_ATTACH_DETACH" pair. */
- bool reprocess_struct;
- /* The group should use "zero-length" allocations for pointers that are not
- mapped "to" on the same directive. */
- bool fragile;
- struct omp_mapping_group *sibling;
- struct omp_mapping_group *next;
-};
-
DEBUG_FUNCTION void
debug_mapping_group (omp_mapping_group *grp)
{
@@ -10541,6 +11396,19 @@ omp_group_last (tree *start_p)
grp_last_p = &OMP_CLAUSE_CHAIN (c);
break;
+ case GOMP_MAP_TO_GRID:
+ case GOMP_MAP_FROM_GRID:
+ while (nc
+ && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
+ && (OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_GRID_DIM
+ || OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_GRID_STRIDE))
+ {
+ grp_last_p = &OMP_CLAUSE_CHAIN (c);
+ c = nc;
+ nc = OMP_CLAUSE_CHAIN (c);
+ }
+ break;
+
case GOMP_MAP_STRUCT:
case GOMP_MAP_STRUCT_UNORD:
{
@@ -10576,16 +11444,7 @@ omp_gather_mapping_groups_1 (tree *list_p, vec<omp_mapping_group> *groups,
continue;
tree *grp_last_p = omp_group_last (cp);
- omp_mapping_group grp;
-
- grp.grp_start = cp;
- grp.grp_end = *grp_last_p;
- grp.mark = UNVISITED;
- grp.sibling = NULL;
- grp.deleted = false;
- grp.reprocess_struct = false;
- grp.fragile = false;
- grp.next = NULL;
+ omp_mapping_group grp (cp, *grp_last_p);
groups->safe_push (grp);
cp = grp_last_p;
@@ -10643,6 +11502,14 @@ omp_group_base (omp_mapping_group *grp, unsigned int *chained,
case GOMP_MAP_ALWAYS_PRESENT_FROM:
case GOMP_MAP_ALWAYS_PRESENT_TO:
case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
+ case GOMP_MAP_NONCONTIG_ARRAY_ALLOC:
+ case GOMP_MAP_NONCONTIG_ARRAY_FROM:
+ case GOMP_MAP_NONCONTIG_ARRAY_TO:
+ case GOMP_MAP_NONCONTIG_ARRAY_TOFROM:
+ case GOMP_MAP_NONCONTIG_ARRAY_FORCE_ALLOC:
+ case GOMP_MAP_NONCONTIG_ARRAY_FORCE_FROM:
+ case GOMP_MAP_NONCONTIG_ARRAY_FORCE_TO:
+ case GOMP_MAP_NONCONTIG_ARRAY_FORCE_TOFROM:
case GOMP_MAP_ALLOC:
case GOMP_MAP_RELEASE:
case GOMP_MAP_DELETE:
@@ -10690,6 +11557,10 @@ omp_group_base (omp_mapping_group *grp, unsigned int *chained,
internal_error ("unexpected mapping node");
return error_mark_node;
+ case GOMP_MAP_TO_GRID:
+ case GOMP_MAP_FROM_GRID:
+ return *grp->grp_start;
+
case GOMP_MAP_ATTACH:
case GOMP_MAP_DETACH:
node = OMP_CLAUSE_CHAIN (node);
@@ -10733,6 +11604,8 @@ omp_group_base (omp_mapping_group *grp, unsigned int *chained,
case GOMP_MAP_FIRSTPRIVATE_INT:
case GOMP_MAP_USE_DEVICE_PTR:
case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
+ case GOMP_MAP_DECLARE_ALLOCATE:
+ case GOMP_MAP_DECLARE_DEALLOCATE:
return NULL_TREE;
case GOMP_MAP_FIRSTPRIVATE_POINTER:
@@ -12017,7 +12890,8 @@ omp_accumulate_sibling_list (enum omp_region_type region_type,
tree *grp_start_p, tree grp_end,
vec<omp_addr_token *> &addr_tokens, tree **inner,
bool *fragile_p, bool reprocessing_struct,
- tree **added_tail)
+ tree **added_tail,
+ gimple_seq *loops_seq_p)
{
using namespace omp_addr_tokenizer;
poly_offset_int coffset;
@@ -12061,8 +12935,11 @@ omp_accumulate_sibling_list (enum omp_region_type region_type,
}
bool variable_offset;
+ tree iterators = OMP_CLAUSE_HAS_ITERATORS (grp_end)
+ ? OMP_CLAUSE_ITERATORS (grp_end) : NULL_TREE;
tree base
- = extract_base_bit_offset (ocd, &cbitpos, &coffset, &variable_offset);
+ = extract_base_bit_offset (ocd, &cbitpos, &coffset, &variable_offset,
+ iterators);
int base_token;
for (base_token = addr_tokens.length () - 1; base_token >= 0; base_token--)
@@ -12137,7 +13014,7 @@ omp_accumulate_sibling_list (enum omp_region_type region_type,
tree extra_node;
tree alloc_node
= build_omp_struct_comp_nodes (code, *grp_start_p, grp_end,
- &extra_node);
+ &extra_node, loops_seq_p);
tree *tail;
OMP_CLAUSE_CHAIN (l) = alloc_node;
@@ -12320,6 +13197,8 @@ omp_accumulate_sibling_list (enum omp_region_type region_type,
OMP_CLAUSE_SIZE (c2)
= fold_build2_loc (OMP_CLAUSE_LOCATION (grp_end), MINUS_EXPR,
ptrdiff_type_node, baddr, decladdr);
+ OMP_CLAUSE_ITERATORS (c2) = iterators;
+ add_new_omp_iterators_clause (c2, loops_seq_p);
/* Insert after struct node. */
OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
OMP_CLAUSE_CHAIN (l) = c2;
@@ -12395,8 +13274,12 @@ omp_accumulate_sibling_list (enum omp_region_type region_type,
sc_decl = TREE_OPERAND (sc_decl, 0);
bool variable_offset2;
+ tree iterators2 = OMP_CLAUSE_HAS_ITERATORS (*sc)
+ ? OMP_CLAUSE_ITERATORS (*sc) : NULL_TREE;
+
tree base2 = extract_base_bit_offset (sc_decl, &bitpos, &offset,
- &variable_offset2);
+ &variable_offset2,
+ iterators2);
if (!base2 || !operand_equal_p (base2, base, 0))
break;
if (scp)
@@ -12461,7 +13344,8 @@ omp_accumulate_sibling_list (enum omp_region_type region_type,
gcc_unreachable ();
else if (attach_detach)
alloc_node = build_omp_struct_comp_nodes (code, *grp_start_p,
- grp_end, &extra_node);
+ grp_end, &extra_node,
+ loops_seq_p);
else
{
/* If we don't have an attach/detach node, this is a
@@ -12506,7 +13390,8 @@ omp_accumulate_sibling_list (enum omp_region_type region_type,
{
tree cl = NULL_TREE, extra_node;
tree alloc_node = build_omp_struct_comp_nodes (code, *grp_start_p,
- grp_end, &extra_node);
+ grp_end, &extra_node,
+ loops_seq_p);
tree *tail_chain = NULL;
if (*fragile_p
@@ -12604,7 +13489,8 @@ omp_build_struct_sibling_lists (enum tree_code code,
vec<omp_mapping_group> *groups,
hash_map<tree_operand_hash_no_se,
omp_mapping_group *> **grpmap,
- tree *list_p)
+ tree *list_p,
+ gimple_seq *loops_seq_p = NULL)
{
using namespace omp_addr_tokenizer;
unsigned i;
@@ -12748,7 +13634,8 @@ omp_build_struct_sibling_lists (enum tree_code code,
struct_map_to_clause, *grpmap,
grp_start_p, grp_end, addr_tokens,
&inner, &fragile_p,
- grp->reprocess_struct, &added_tail);
+ grp->reprocess_struct, &added_tail,
+ loops_seq_p);
if (inner)
{
@@ -12892,13 +13779,489 @@ error_out:
return success;
}
+struct instantiate_mapper_info
+{
+ tree *mapper_clauses_p;
+ struct gimplify_omp_ctx *omp_ctx;
+ gimple_seq *pre_p;
+};
+
+/* Helper function for omp_instantiate_mapper. */
+
+static tree
+remap_mapper_decl_1 (tree *tp, int *walk_subtrees, void *data)
+{
+ copy_body_data *id = (copy_body_data *) data;
+
+ if (DECL_P (*tp))
+ {
+ tree replacement = remap_decl (*tp, id);
+ if (*tp != replacement)
+ {
+ *tp = unshare_expr (replacement);
+ *walk_subtrees = 0;
+ }
+ }
+
+ return NULL_TREE;
+}
+
+/* A copy_decl implementation (for use with tree-inline.cc functions) that
+ only transform decls or SSA names that are part of a map we already
+ prepared. */
+
+static tree
+omp_mapper_copy_decl (tree var, copy_body_data *cb)
+{
+ tree *repl = cb->decl_map->get (var);
+
+ if (repl)
+ return *repl;
+
+ return var;
+}
+
+/* If we have a TREE_LIST representing an unprocessed mapping group (e.g. from
+ a "declare mapper" definition emitted by the Fortran FE), return the node
+ for the data being mapped. */
+
+static tree
+omp_mapping_group_data (tree group)
+{
+ gcc_assert (TREE_CODE (group) == TREE_LIST);
+ /* Use the first member of the group for substitution. */
+ return TREE_PURPOSE (group);
+}
+
+/* Return the final node of a mapping_group GROUP (represented as a tree list),
+ or NULL_TREE if it's not an attach_detach node. */
+
+static tree
+omp_mapping_group_ptr (tree group)
+{
+ gcc_assert (TREE_CODE (group) == TREE_LIST);
+
+ while (TREE_CHAIN (group))
+ group = TREE_CHAIN (group);
+
+ tree node = TREE_PURPOSE (group);
+
+ gcc_assert (OMP_CLAUSE_CODE (node) == OMP_CLAUSE_MAP);
+
+ if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_ATTACH_DETACH)
+ return node;
+
+ return NULL_TREE;
+}
+
+/* Return the pointer set (GOMP_MAP_TO_PSET) of a mapping_group node GROUP,
+ represented by a tree list, or NULL_TREE if there isn't one. */
+
+static tree
+omp_mapping_group_pset (tree group)
+{
+ gcc_assert (TREE_CODE (group) == TREE_LIST);
+
+ if (!TREE_CHAIN (group))
+ return NULL_TREE;
+
+ group = TREE_CHAIN (group);
+
+ tree node = TREE_PURPOSE (group);
+
+ if (omp_map_clause_descriptor_p (node))
+ return node;
+
+ return NULL_TREE;
+}
+
+static tree *
+omp_instantiate_mapper (gimple_seq *pre_p,
+ hash_map<omp_name_type<tree>, tree> *implicit_mappers,
+ tree mapperfn, tree expr, enum gomp_map_kind outer_kind,
+ tree *mapper_clauses_p)
+{
+ tree mapper_name = NULL_TREE;
+ tree mapper = lang_hooks.decls.omp_extract_mapper_directive (mapperfn);
+ gcc_assert (TREE_CODE (mapper) == OMP_DECLARE_MAPPER);
+
+ tree clause = OMP_DECLARE_MAPPER_CLAUSES (mapper);
+ tree dummy_var = OMP_DECLARE_MAPPER_DECL (mapper);
+
+ /* The "extraction map" is used to map the mapper variable in the "declare
+ mapper" directive, and also any temporary variables that have been created
+ as part of expanding the mapper function's body (which are expanded as a
+ "bind" expression in the pre_p sequence). */
+ hash_map<tree, tree> extraction_map;
+
+ if (TREE_CODE (mapperfn) == FUNCTION_DECL
+ && TREE_CODE (DECL_SAVED_TREE (mapperfn)) == BIND_EXPR)
+ {
+ tree body = NULL_TREE, bind = DECL_SAVED_TREE (mapperfn);
+ copy_body_data id;
+ hash_map<tree, tree> decl_map;
+
+ /* The "decl map" maps declarations in the definition of the mapper
+ function into new declarations in the current function. These are
+ local to the bind in which they are expanded, so we copy them out to
+ temporaries in the enclosing function scope, and use those temporaries
+ in the mapper expansion (see "extraction_map" above). (This also
+ allows a mapper to be invoked for multiple variables). */
+
+ memset (&id, 0, sizeof (id));
+ /* The source function isn't always mapperfn: e.g. for C++ mappers
+ defined within functions, the mapper decl is created in a scope
+ within that function, rather than in mapperfn. So, that containing
+ function is the one we need to copy from. */
+ id.src_fn = DECL_CONTEXT (dummy_var);
+ id.dst_fn = current_function_decl;
+ id.src_cfun = DECL_STRUCT_FUNCTION (mapperfn);
+ id.decl_map = &decl_map;
+ id.copy_decl = copy_decl_no_change;
+ id.transform_call_graph_edges = CB_CGE_DUPLICATE;
+ id.transform_new_cfg = true;
+
+ walk_tree (&bind, copy_tree_body_r, &id, NULL);
+
+ body = BIND_EXPR_BODY (bind);
+
+ extraction_map.put (dummy_var, expr);
+ extraction_map.put (expr, expr);
+
+ if (DECL_P (expr))
+ mark_addressable (expr);
+
+ tree dummy_var_remapped, *remapped_var_p = decl_map.get (dummy_var);
+ if (remapped_var_p)
+ dummy_var_remapped = *remapped_var_p;
+ else
+ internal_error ("failed to remap mapper variable");
+
+ hash_map<tree, tree> mapper_map;
+ mapper_map.put (dummy_var_remapped, expr);
+
+ /* Now we need to make two adjustments to the inlined bind: we have to
+ substitute the dummy variable for the expression in the clause
+ triggering this mapper instantiation, and we need to remove the
+ (remapped) decl from the bind's decl list. */
+
+ if (TREE_CODE (body) == STATEMENT_LIST)
+ {
+ copy_body_data id2;
+ memset (&id2, 0, sizeof (id2));
+ id2.src_fn = current_function_decl;
+ id2.dst_fn = current_function_decl;
+ id2.src_cfun = cfun;
+ id2.decl_map = &mapper_map;
+ id2.copy_decl = omp_mapper_copy_decl;
+ id2.transform_call_graph_edges = CB_CGE_DUPLICATE;
+ id2.transform_new_cfg = true;
+
+ tree_stmt_iterator tsi;
+ for (tsi = tsi_start (body); !tsi_end_p (tsi); tsi_next (&tsi))
+ {
+ tree* stmtp = tsi_stmt_ptr (tsi);
+ if (TREE_CODE (*stmtp) == OMP_DECLARE_MAPPER)
+ *stmtp = NULL_TREE;
+ else if (TREE_CODE (*stmtp) == DECL_EXPR
+ && DECL_EXPR_DECL (*stmtp) == dummy_var_remapped)
+ *stmtp = NULL_TREE;
+ else
+ walk_tree (stmtp, remap_mapper_decl_1, &id2, NULL);
+ }
+
+ tsi = tsi_last (body);
+
+ for (hash_map<tree, tree>::iterator ti = decl_map.begin ();
+ ti != decl_map.end ();
+ ++ti)
+ {
+ tree tmp, var = (*ti).first, inlined = (*ti).second;
+
+ if (var == dummy_var || var == inlined || !DECL_P (var))
+ continue;
+
+ if (!is_gimple_reg (var))
+ {
+ const char *decl_name
+ = IDENTIFIER_POINTER (DECL_NAME (var));
+ tmp = create_tmp_var (TREE_TYPE (var), decl_name);
+ }
+ else
+ tmp = create_tmp_var (TREE_TYPE (var));
+
+ /* We have three versions of the decl here. VAR is the version
+ as represented in the function defining the "declare mapper",
+ and in the clause list attached to the OMP_DECLARE_MAPPER
+ directive within that function. INLINED is the variable that
+ has been localised to a bind within the function where the
+ mapper is being instantiated (i.e. current_function_decl).
+ TMP is the variable that we copy the values created in that
+ block to. */
+
+ extraction_map.put (var, tmp);
+ extraction_map.put (tmp, tmp);
+
+ tree asgn = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, inlined);
+ tsi_link_after (&tsi, asgn, TSI_CONTINUE_LINKING);
+ }
+ }
+
+ /* We've replaced the "dummy variable" of the declare mapper definition
+ with a localised version in a bind expr in the current function. We
+ have just rewritten all references to that, so remove the decl. */
+
+ for (tree *decl = &BIND_EXPR_VARS (bind); *decl;)
+ {
+ if (*decl == dummy_var_remapped)
+ *decl = DECL_CHAIN (*decl);
+ else
+ decl = &DECL_CHAIN (*decl);
+ }
+
+ gimplify_bind_expr (&bind, pre_p);
+ }
+ else
+ {
+ extraction_map.put (dummy_var, expr);
+ extraction_map.put (expr, expr);
+ }
+
+ /* This copy_body_data is only used to remap the decls in the
+ OMP_DECLARE_MAPPER tree node expansion itself. All relevant decls should
+ already be in the current function. */
+ copy_body_data id;
+ memset (&id, 0, sizeof (id));
+ id.src_fn = current_function_decl;
+ id.dst_fn = current_function_decl;
+ id.src_cfun = cfun;
+ id.decl_map = &extraction_map;
+ id.copy_decl = omp_mapper_copy_decl;
+ id.transform_call_graph_edges = CB_CGE_DUPLICATE; // ???
+ id.transform_new_cfg = true; // ???
+
+ for (; clause; clause = OMP_CLAUSE_CHAIN (clause))
+ {
+ enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (clause);
+ tree *nested_mapper_p = NULL;
+
+ if (map_kind == GOMP_MAP_PUSH_MAPPER_NAME)
+ {
+ mapper_name = OMP_CLAUSE_DECL (clause);
+ continue;
+ }
+ else if (map_kind == GOMP_MAP_POP_MAPPER_NAME)
+ {
+ mapper_name = NULL_TREE;
+ continue;
+ }
+
+ tree decl = OMP_CLAUSE_DECL (clause);
+
+ if (map_kind == GOMP_MAP_MAPPING_GROUP)
+ {
+ tree data = omp_mapping_group_data (decl);
+ tree group_type = TREE_TYPE (OMP_CLAUSE_DECL (data));
+
+ group_type = TYPE_MAIN_VARIANT (group_type);
+
+ nested_mapper_p = implicit_mappers->get ({ mapper_name, group_type });
+
+ if (nested_mapper_p && *nested_mapper_p != mapperfn)
+ {
+ tree unshared = unshare_expr (data);
+ map_kind = OMP_CLAUSE_MAP_KIND (data);
+ walk_tree (&unshared, remap_mapper_decl_1, &id, NULL);
+ tree ptr = omp_mapping_group_ptr (decl);
+
+ /* !!! When ptr is NULL, we're discarding the other nodes in the
+ mapping group. Is that always OK? */
+
+ if (ptr)
+ {
+ /* This behaviour is Fortran-specific. That's fine for now
+ because only Fortran is using GOMP_MAP_MAPPING_GROUP, but
+ may need revisiting if that ever changes. */
+ gcc_assert (lang_GNU_Fortran ());
+
+ /* We're invoking a (nested) mapper from CLAUSE, which was a
+ pointer to a derived type. The elements of the derived
+ type are handled by the mapper, but we need to map the
+ actual pointer as well. Create an ALLOC node to do
+ that.
+ If we have an array descriptor, we want to copy it to the
+ target, so instead use an ALWAYS_TO mapping and copy the
+ descriptor itself rather than the data pointer. */
+
+ tree pset = omp_mapping_group_pset (decl);
+ tree ptr_unshared = unshare_expr (pset ? pset : ptr);
+ walk_tree (&ptr_unshared, remap_mapper_decl_1, &id, NULL);
+
+ tree node = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
+ OMP_CLAUSE_MAP);
+ OMP_CLAUSE_SET_MAP_KIND (node, pset ? GOMP_MAP_ALWAYS_TO
+ : GOMP_MAP_ALLOC);
+ OMP_CLAUSE_DECL (node) = OMP_CLAUSE_DECL (ptr_unshared);
+ OMP_CLAUSE_SIZE (node)
+ = TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (node)));
+
+ *mapper_clauses_p = node;
+ mapper_clauses_p = &OMP_CLAUSE_CHAIN (node);
+ }
+
+ if (map_kind == GOMP_MAP_UNSET)
+ map_kind = outer_kind;
+
+ mapper_clauses_p
+ = omp_instantiate_mapper (pre_p, implicit_mappers,
+ *nested_mapper_p,
+ OMP_CLAUSE_DECL (unshared), map_kind,
+ mapper_clauses_p);
+ }
+ else
+ /* No nested mapper, so process each element of the mapping
+ group. */
+ for (tree cp = OMP_CLAUSE_DECL (clause); cp; cp = TREE_CHAIN (cp))
+ {
+ tree node = unshare_expr (TREE_PURPOSE (cp));
+ walk_tree (&node, remap_mapper_decl_1, &id, NULL);
+
+ if (OMP_CLAUSE_MAP_KIND (node) == GOMP_MAP_UNSET)
+ OMP_CLAUSE_SET_MAP_KIND (node, outer_kind);
+
+ *mapper_clauses_p = node;
+ mapper_clauses_p = &OMP_CLAUSE_CHAIN (node);
+ }
+
+ continue;
+ }
+
+ tree unshared, type;
+ bool nonunit_array_with_mapper = false;
+
+ if (TREE_CODE (decl) == OMP_ARRAY_SECTION)
+ {
+ location_t loc = OMP_CLAUSE_LOCATION (clause);
+ tree tmp = lang_hooks.decls.omp_map_array_section (loc, decl);
+ if (tmp == decl)
+ {
+ unshared = unshare_expr (clause);
+ nonunit_array_with_mapper = true;
+ type = TREE_TYPE (TREE_TYPE (decl));
+ }
+ else
+ {
+ unshared = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
+ OMP_CLAUSE_CODE (clause));
+ OMP_CLAUSE_DECL (unshared) = tmp;
+ OMP_CLAUSE_SIZE (unshared)
+ = DECL_P (tmp) ? DECL_SIZE_UNIT (tmp)
+ : TYPE_SIZE_UNIT (TREE_TYPE (tmp));
+ type = TREE_TYPE (tmp);
+ }
+ }
+ else
+ {
+ unshared = unshare_expr (clause);
+ type = TREE_TYPE (decl);
+ }
+
+ walk_tree (&unshared, remap_mapper_decl_1, &id, NULL);
+
+ if (OMP_CLAUSE_MAP_KIND (unshared) == GOMP_MAP_UNSET)
+ OMP_CLAUSE_SET_MAP_KIND (unshared, outer_kind);
+
+ decl = OMP_CLAUSE_DECL (unshared);
+ type = TYPE_MAIN_VARIANT (type);
+
+ nested_mapper_p = implicit_mappers->get ({ mapper_name, type });
+
+ if (nested_mapper_p && *nested_mapper_p != mapperfn)
+ {
+ if (nonunit_array_with_mapper)
+ {
+ sorry ("user-defined mapper with non-unit length array section");
+ continue;
+ }
+
+ if (map_kind == GOMP_MAP_UNSET)
+ map_kind = outer_kind;
+
+ mapper_clauses_p
+ = omp_instantiate_mapper (pre_p, implicit_mappers,
+ *nested_mapper_p, decl, map_kind,
+ mapper_clauses_p);
+ continue;
+ }
+
+ *mapper_clauses_p = unshared;
+ mapper_clauses_p = &OMP_CLAUSE_CHAIN (unshared);
+ }
+
+ return mapper_clauses_p;
+}
+
+static int
+omp_instantiate_implicit_mappers (splay_tree_node n, void *data)
+{
+ tree decl = (tree) n->key;
+ instantiate_mapper_info *im_info = (instantiate_mapper_info *) data;
+ gimplify_omp_ctx *ctx = im_info->omp_ctx;
+ tree *mapper_p = NULL;
+ tree type = TREE_TYPE (decl);
+ bool ref_p = false;
+ unsigned flags = n->value;
+
+ if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
+ return 0;
+ if ((flags & GOVD_SEEN) == 0)
+ return 0;
+ /* If we already have clauses pertaining to a struct variable, then we don't
+ want to implicitly invoke a user-defined mapper. */
+ if ((flags & GOVD_EXPLICIT) != 0 && AGGREGATE_TYPE_P (TREE_TYPE (decl)))
+ return 0;
+
+ if (TREE_CODE (type) == REFERENCE_TYPE)
+ {
+ ref_p = true;
+ type = TREE_TYPE (type);
+ }
+
+ type = TYPE_MAIN_VARIANT (type);
+
+ if (DECL_P (decl) && type && AGGREGATE_TYPE_P (type))
+ {
+ gcc_assert (ctx);
+ mapper_p = ctx->implicit_mappers->get ({ NULL_TREE, type });
+ }
+
+ if (mapper_p)
+ {
+ /* If we have a reference, map the pointed-to object rather than the
+ reference itself. */
+ if (ref_p)
+ decl = build_fold_indirect_ref (decl);
+
+ im_info->mapper_clauses_p
+ = omp_instantiate_mapper (im_info->pre_p, ctx->implicit_mappers,
+ *mapper_p, decl, GOMP_MAP_TOFROM,
+ im_info->mapper_clauses_p);
+ /* Make sure we don't map the same variable implicitly in
+ gimplify_adjust_omp_clauses_1 also. */
+ n->value |= GOVD_EXPLICIT;
+ }
+
+ return 0;
+}
+
/* Scan the OMP clauses in *LIST_P, installing mappings into a new
and previous omp contexts. */
static void
gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
enum omp_region_type region_type,
- enum tree_code code)
+ enum tree_code code,
+ gimple_seq *loops_seq_p = NULL)
{
using namespace omp_addr_tokenizer;
struct gimplify_omp_ctx *ctx, *outer_ctx;
@@ -12943,6 +14306,7 @@ gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
|| code == OMP_TARGET_DATA
|| code == OMP_TARGET_ENTER_DATA
|| code == OMP_TARGET_EXIT_DATA
+ || code == OMP_TARGET_UPDATE
|| code == OACC_DATA
|| code == OACC_KERNELS
|| code == OACC_PARALLEL
@@ -13161,6 +14525,20 @@ gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
omp_firstprivatize_variable (ctx, v);
omp_notice_variable (ctx, v, true);
}
+ if (TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
+ || CONVERT_EXPR_CODE_P (TREE_CODE (TREE_OPERAND (decl, 0))))
+ {
+ gimplify_ctxp->into_ssa = false;
+ if (gimplify_expr (&TREE_OPERAND (decl, 0), pre_p,
+ NULL, is_gimple_val, fb_rvalue, false)
+ == GS_ERROR)
+ {
+ gimplify_ctxp->into_ssa = saved_into_ssa;
+ remove = true;
+ break;
+ }
+ gimplify_ctxp->into_ssa = saved_into_ssa;
+ }
decl = TREE_OPERAND (decl, 0);
if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
{
@@ -13592,6 +14970,18 @@ gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
&& TREE_CODE (TREE_TYPE (basetype)) == POINTER_TYPE)
break;
}
+ if (code == OACC_DATA && *grp_start_p != grp_end)
+ {
+ if (!ctx->decl_data_clause)
+ ctx->decl_data_clause = new hash_map<tree, omp_mapping_group *>;
+
+ omp_mapping_group *grp
+ = new omp_mapping_group (grp_start_p, grp_end);
+
+ gcc_assert (DECL_P (decl));
+
+ ctx->decl_data_clause->put (decl, grp);
+ }
flags = GOVD_MAP | GOVD_EXPLICIT;
if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
@@ -13599,6 +14989,8 @@ gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
|| OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_PRESENT_TO
|| OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_PRESENT_TOFROM)
flags |= GOVD_MAP_ALWAYS_TO;
+ else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FORCE_DEVICEPTR)
+ flags |= GOVD_DEVICEPTR;
goto do_add;
@@ -13669,25 +15061,37 @@ gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
: TYPE_SIZE_UNIT (TREE_TYPE (decl));
- if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
- NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
+ gimple_seq *seq_p;
+ seq_p = enter_omp_iterator_loop_context (c, loops_seq_p, pre_p);
+ if (gimplify_expr (&OMP_CLAUSE_SIZE (c), seq_p, NULL,
+ is_gimple_val, fb_rvalue) == GS_ERROR)
{
remove = true;
+ exit_omp_iterator_loop_context (c);
break;
}
if (!DECL_P (decl))
{
- if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
- NULL, is_gimple_lvalue, fb_lvalue)
- == GS_ERROR)
- {
- remove = true;
- break;
- }
+ if (gimplify_expr (&OMP_CLAUSE_DECL (c), seq_p, NULL,
+ is_gimple_lvalue, fb_lvalue) == GS_ERROR)
+ remove = true;
+ exit_omp_iterator_loop_context (c);
break;
}
+ exit_omp_iterator_loop_context (c);
goto do_notice;
+ case OMP_CLAUSE__MAPPER_BINDING_:
+ {
+ tree name = OMP_CLAUSE__MAPPER_BINDING__ID (c);
+ tree var = OMP_CLAUSE__MAPPER_BINDING__DECL (c);
+ tree type = TYPE_MAIN_VARIANT (TREE_TYPE (var));
+ tree fndecl = OMP_CLAUSE__MAPPER_BINDING__MAPPER (c);
+ ctx->implicit_mappers->put ({ name, type }, fndecl);
+ remove = true;
+ break;
+ }
+
case OMP_CLAUSE_USE_DEVICE_PTR:
case OMP_CLAUSE_USE_DEVICE_ADDR:
flags = GOVD_EXPLICIT;
@@ -13713,7 +15117,8 @@ gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
remove = true;
break;
}
- if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
+ if (DECL_P (decl) && DECL_NAME (decl) == NULL_TREE
+ && (flags & GOVD_SHARED) == 0)
{
tree t = omp_member_access_dummy_var (decl);
if (t)
@@ -14053,6 +15458,21 @@ gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
nowait = 1;
break;
+ case OMP_CLAUSE_USES_ALLOCATORS:
+ if (TREE_CODE (OMP_CLAUSE_USES_ALLOCATORS_ALLOCATOR (c))
+ != INTEGER_CST)
+ {
+ decl = OMP_CLAUSE_USES_ALLOCATORS_ALLOCATOR (c);
+ omp_add_variable (ctx, decl, GOVD_SEEN | GOVD_PRIVATE);
+
+ decl = OMP_CLAUSE_USES_ALLOCATORS_TRAITS (c);
+ if (decl && !DECL_INITIAL (decl))
+ omp_add_variable (ctx, decl, GOVD_SEEN | GOVD_FIRSTPRIVATE);
+ }
+ else
+ remove = true;
+ break;
+
case OMP_CLAUSE_ORDERED:
case OMP_CLAUSE_UNTIED:
case OMP_CLAUSE_COLLAPSE:
@@ -14076,6 +15496,10 @@ gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
case OMP_CLAUSE_DESTROY:
break;
+ case OMP_CLAUSE__OMPACC_:
+ ctx->ompacc = true;
+ break;
+
case OMP_CLAUSE_ORDER:
ctx->order_concurrent = true;
break;
@@ -14203,6 +15627,49 @@ gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
remove = true;
break;
}
+ if ((omp_requires_mask & OMP_REQUIRES_DYNAMIC_ALLOCATORS) == 0
+ && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
+ && TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)) != INTEGER_CST)
+ {
+ tree allocator = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
+ tree clauses = NULL_TREE;
+
+ /* Get clause list of the nearest enclosing target construct. */
+ if (ctx->code == OMP_TARGET)
+ clauses = *orig_list_p;
+ else
+ {
+ struct gimplify_omp_ctx *tctx = ctx->outer_context;
+ while (tctx && tctx->code != OMP_TARGET)
+ tctx = tctx->outer_context;
+ if (tctx)
+ clauses = tctx->clauses;
+ }
+
+ if (clauses)
+ {
+ tree uc;
+ if (TREE_CODE (allocator) == MEM_REF
+ || TREE_CODE (allocator) == INDIRECT_REF)
+ allocator = TREE_OPERAND (allocator, 0);
+ for (uc = clauses; uc; uc = OMP_CLAUSE_CHAIN (uc))
+ if (OMP_CLAUSE_CODE (uc) == OMP_CLAUSE_USES_ALLOCATORS)
+ {
+ tree uc_allocator
+ = OMP_CLAUSE_USES_ALLOCATORS_ALLOCATOR (uc);
+ if (operand_equal_p (allocator, uc_allocator))
+ break;
+ }
+ if (uc == NULL_TREE)
+ {
+ error_at (OMP_CLAUSE_LOCATION (c), "allocator %qE "
+ "requires %<uses_allocators(%E)%> clause in "
+ "target region", allocator, allocator);
+ remove = true;
+ break;
+ }
+ }
+ }
if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c), pre_p, NULL,
is_gimple_val, fb_rvalue) == GS_ERROR)
{
@@ -14267,11 +15734,6 @@ gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
gcc_unreachable ();
}
- if (code == OACC_DATA
- && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
- && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
- || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
- remove = true;
if (remove)
*list_p = OMP_CLAUSE_CHAIN (c);
else
@@ -14412,6 +15874,52 @@ struct gimplify_adjust_omp_clauses_data
gimple_seq *pre_p;
};
+/* For OpenACC offload regions, the implicit data mappings for arrays must
+ respect explicit data clauses set by a containing acc data region.
+ Specifically, an array section on the data clause must be transformed into
+ an equivalent PRESENT mapping on the inner offload region.
+ This function returns a pointer to a mapping group if an array slice of DECL
+ is specified on a lexically-enclosing data construct, or returns NULL
+ otherwise. */
+
+static omp_mapping_group *
+gomp_oacc_needs_data_present (tree decl)
+{
+ gimplify_omp_ctx *ctx = NULL;
+
+ if (gimplify_omp_ctxp->region_type != ORT_ACC_PARALLEL
+ && gimplify_omp_ctxp->region_type != ORT_ACC_SERIAL
+ && gimplify_omp_ctxp->region_type != ORT_ACC_KERNELS)
+ return NULL;
+
+ if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE
+ && TREE_CODE (TREE_TYPE (decl)) != POINTER_TYPE
+ && TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE
+ && (TREE_CODE (TREE_TYPE (decl)) != POINTER_TYPE
+ || TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) != ARRAY_TYPE))
+ return NULL;
+
+ decl = get_base_address (decl);
+
+ for (ctx = gimplify_omp_ctxp->outer_context; ctx; ctx = ctx->outer_context)
+ {
+ splay_tree_node on
+ = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
+
+ if (ctx->region_type == ORT_ACC_DATA
+ && on
+ && (((int) on->value) & GOVD_EXPLICIT)
+ && ctx->decl_data_clause != NULL)
+ {
+ omp_mapping_group **pgrp = ctx->decl_data_clause->get (decl);
+ if (pgrp)
+ return *pgrp;
+ }
+ }
+
+ return NULL;
+}
+
/* For all variables that were not actually used within the context,
remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
@@ -14472,6 +15980,8 @@ gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
g->have_offload = true;
}
}
+ if (lookup_attribute ("oacc declare create", DECL_ATTRIBUTES (decl)))
+ flags |= GOVD_MAP_FORCE_PRESENT;
}
else if (flags & GOVD_SHARED)
{
@@ -14511,6 +16021,12 @@ gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
"%<target%> construct", decl);
return 0;
}
+ if (lookup_attribute ("oacc declare create", DECL_ATTRIBUTES (decl)))
+ {
+ code = OMP_CLAUSE_MAP;
+ flags &= ~GOVD_FIRSTPRIVATE;
+ flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
+ }
}
else if (flags & GOVD_LASTPRIVATE)
code = OMP_CLAUSE_LASTPRIVATE;
@@ -14533,6 +16049,7 @@ gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
clause = build_omp_clause (input_location, code);
OMP_CLAUSE_DECL (clause) = decl;
OMP_CLAUSE_CHAIN (clause) = chain;
+ omp_mapping_group *outer_grp;
if (private_debug)
OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
@@ -14541,6 +16058,58 @@ gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
&& (flags & GOVD_WRITTEN) == 0
&& omp_shared_to_firstprivate_optimizable_decl_p (decl))
OMP_CLAUSE_SHARED_READONLY (clause) = 1;
+ else if ((gimplify_omp_ctxp->region_type & ORT_ACC) != 0
+ && (code == OMP_CLAUSE_MAP || code == OMP_CLAUSE_FIRSTPRIVATE)
+ && (outer_grp = gomp_oacc_needs_data_present (decl)))
+ {
+ if (code == OMP_CLAUSE_FIRSTPRIVATE)
+ /* Oops, we have the wrong type of clause. Rebuild it. */
+ clause = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
+ OMP_CLAUSE_MAP);
+
+ tree mapping = *outer_grp->grp_start;
+
+ OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_PRESENT);
+ OMP_CLAUSE_DECL (clause) = unshare_expr (OMP_CLAUSE_DECL (mapping));
+ OMP_CLAUSE_SIZE (clause) = unshare_expr (OMP_CLAUSE_SIZE (mapping));
+
+ /* Copy subsequent nodes (that are part of the mapping group) after the
+ initial one from the outer "acc data" directive -- "pointer" nodes,
+ including firstprivate_reference, pointer sets, etc. */
+
+ tree ptr = OMP_CLAUSE_CHAIN (mapping);
+ tree *ins = &OMP_CLAUSE_CHAIN (clause);
+ tree sentinel = OMP_CLAUSE_CHAIN (outer_grp->grp_end);
+ for (; ptr && ptr != sentinel; ptr = OMP_CLAUSE_CHAIN (ptr))
+ {
+ tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
+ OMP_CLAUSE_MAP);
+ OMP_CLAUSE_SET_MAP_KIND (nc, OMP_CLAUSE_MAP_KIND (ptr));
+ OMP_CLAUSE_DECL (nc) = unshare_expr (OMP_CLAUSE_DECL (ptr));
+ OMP_CLAUSE_SIZE (nc) = unshare_expr (OMP_CLAUSE_SIZE (ptr));
+ *ins = nc;
+ ins = &OMP_CLAUSE_CHAIN (nc);
+ }
+
+ *ins = chain;
+
+ gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
+ gimplify_omp_ctxp = ctx->outer_context;
+ for (ptr = clause; ptr != chain; ptr = OMP_CLAUSE_CHAIN (ptr))
+ {
+ /* The condition is specifically to not gimplify here if we have a
+ DECL_P with a DECL_VALUE_EXPR -- i.e. a VLA, or variable-sized
+ array section. If we do, omp-low.cc does not see the DECL_P it
+ expects here for e.g. firstprivate_pointer or
+ firstprivate_reference. */
+ if (!DECL_P (OMP_CLAUSE_DECL (ptr)))
+ gimplify_expr (&OMP_CLAUSE_DECL (ptr), pre_p, NULL,
+ is_gimple_lvalue, fb_lvalue);
+ gimplify_expr (&OMP_CLAUSE_SIZE (ptr), pre_p, NULL,
+ is_gimple_val, fb_rvalue);
+ }
+ gimplify_omp_ctxp = ctx;
+ }
else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
@@ -14588,7 +16157,8 @@ gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
| GOVD_MAP_FORCE
| GOVD_MAP_FORCE_PRESENT
| GOVD_MAP_ALLOC_ONLY
- | GOVD_MAP_FROM_ONLY))
+ | GOVD_MAP_FROM_ONLY
+ | GOVD_DEVICEPTR))
{
case 0:
kind = GOMP_MAP_TOFROM;
@@ -14614,14 +16184,14 @@ gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
case GOVD_MAP_FORCE_PRESENT | GOVD_MAP_ALLOC_ONLY:
kind = GOMP_MAP_FORCE_PRESENT;
break;
+ case GOVD_DEVICEPTR:
+ kind = GOMP_MAP_FORCE_DEVICEPTR;
+ break;
default:
gcc_unreachable ();
}
OMP_CLAUSE_SET_MAP_KIND (clause, kind);
- /* Setting of the implicit flag for the runtime is currently disabled for
- OpenACC. */
- if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
- OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (clause) = 1;
+ OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (clause) = 1;
if (DECL_SIZE (decl)
&& !poly_int_tree_p (DECL_SIZE (decl)))
{
@@ -14705,10 +16275,11 @@ gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
static void
gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
- enum tree_code code)
+ enum tree_code code,
+ gimple_seq *loops_seq_p = NULL)
{
struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
- tree *orig_list_p = list_p;
+ tree *prev_list_p = NULL, *orig_list_p = list_p;
tree c, decl;
bool has_inscan_reductions = false;
@@ -14772,6 +16343,30 @@ gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
|| code == OMP_TARGET_ENTER_DATA
|| code == OMP_TARGET_EXIT_DATA)
{
+ tree mapper_clauses = NULL_TREE;
+ instantiate_mapper_info im_info;
+
+ im_info.mapper_clauses_p = &mapper_clauses;
+ im_info.omp_ctx = ctx;
+ im_info.pre_p = pre_p;
+
+ splay_tree_foreach (ctx->variables,
+ omp_instantiate_implicit_mappers,
+ (void *) &im_info);
+
+ if (mapper_clauses)
+ {
+ mapper_clauses
+ = lang_hooks.decls.omp_finish_mapper_clauses (mapper_clauses);
+
+ /* Stick the implicitly-expanded mapper clauses at the end of the
+ clause list. */
+ tree *tail = list_p;
+ while (*tail)
+ tail = &OMP_CLAUSE_CHAIN (*tail);
+ *tail = mapper_clauses;
+ }
+
vec<omp_mapping_group> *groups;
groups = omp_gather_mapping_groups (list_p);
hash_map<tree_operand_hash_no_se, omp_mapping_group *> *grpmap = NULL;
@@ -14782,7 +16377,7 @@ gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
omp_resolve_clause_dependencies (code, groups, grpmap);
omp_build_struct_sibling_lists (code, ctx->region_type, groups,
- &grpmap, list_p);
+ &grpmap, list_p, loops_seq_p);
omp_mapping_group *outlist = NULL;
@@ -15010,18 +16605,21 @@ gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
switch (code)
{
case OACC_DATA:
- if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
- break;
- /* Fallthrough. */
case OACC_HOST_DATA:
case OACC_ENTER_DATA:
case OACC_EXIT_DATA:
case OMP_TARGET_DATA:
case OMP_TARGET_ENTER_DATA:
case OMP_TARGET_EXIT_DATA:
- if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
- || (OMP_CLAUSE_MAP_KIND (c)
- == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
+ if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
+ || (OMP_CLAUSE_MAP_KIND (c)
+ == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
+ && !(prev_list_p
+ && OMP_CLAUSE_CODE (*prev_list_p) == OMP_CLAUSE_MAP
+ && ((OMP_CLAUSE_MAP_KIND (*prev_list_p)
+ == GOMP_MAP_DECLARE_ALLOCATE)
+ || (OMP_CLAUSE_MAP_KIND (*prev_list_p)
+ == GOMP_MAP_DECLARE_DEALLOCATE))))
/* For target {,enter ,exit }data only the array slice is
mapped, but not the pointer to it. */
remove = true;
@@ -15037,7 +16635,9 @@ gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
}
if (remove)
break;
- if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
+ if (OMP_CLAUSE_SIZE (c) == NULL_TREE
+ && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_GRID_DIM
+ && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_GRID_STRIDE)
{
/* Sanity check: attach/detach map kinds use the size as a bias,
and it's never right to use the decl size for such
@@ -15052,21 +16652,33 @@ gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
: TYPE_SIZE_UNIT (TREE_TYPE (decl));
}
gimplify_omp_ctxp = ctx->outer_context;
- if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p, NULL,
+ gimple_seq *seq_p;
+ seq_p = enter_omp_iterator_loop_context (c, loops_seq_p, pre_p);
+ if (GOMP_MAP_NONCONTIG_ARRAY_P (OMP_CLAUSE_MAP_KIND (c)))
+ {
+ gcc_assert (OMP_CLAUSE_SIZE (c)
+ && TREE_CODE (OMP_CLAUSE_SIZE (c)) == TREE_LIST);
+ /* For non-contiguous array maps, OMP_CLAUSE_SIZE is a TREE_LIST
+ of the individual array dimensions, which gimplify_expr doesn't
+ handle, so skip the call to gimplify_expr here. */
+ }
+ else if (gimplify_expr (&OMP_CLAUSE_SIZE (c), seq_p, NULL,
is_gimple_val, fb_rvalue) == GS_ERROR)
{
gimplify_omp_ctxp = ctx;
remove = true;
- break;
+ goto end_adjust_omp_map_clause;
}
else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
|| (OMP_CLAUSE_MAP_KIND (c)
== GOMP_MAP_FIRSTPRIVATE_REFERENCE)
+ || (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
+ && ctx->region_type != ORT_ACC_KERNELS)
|| OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
&& TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
{
OMP_CLAUSE_SIZE (c)
- = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
+ = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), seq_p, NULL,
false);
if ((ctx->region_type & ORT_TARGET) != 0)
omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
@@ -15107,7 +16719,7 @@ gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
&& (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA))
{
remove = true;
- break;
+ goto end_adjust_omp_map_clause;
}
/* If we have a DECL_VALUE_EXPR (e.g. this is a class member and/or
a variable captured in a lambda closure), look through that now
@@ -15123,10 +16735,24 @@ gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
decl = OMP_CLAUSE_DECL (c) = DECL_VALUE_EXPR (decl);
if (TREE_CODE (decl) == TARGET_EXPR)
{
- if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
+ if (gimplify_expr (&OMP_CLAUSE_DECL (c), seq_p, NULL,
is_gimple_lvalue, fb_lvalue) == GS_ERROR)
remove = true;
}
+ else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_GRID_DIM
+ || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_GRID_STRIDE)
+ {
+ /* The OMP_CLAUSE_DECL for GRID_DIM/GRID_STRIDE isn't necessarily
+ an lvalue -- e.g. it might be a constant. So handle it
+ specially here. */
+ if (gimplify_expr (&OMP_CLAUSE_DECL (c), seq_p, NULL,
+ is_gimple_val, fb_rvalue) == GS_ERROR)
+ {
+ gimplify_omp_ctxp = ctx;
+ remove = true;
+ }
+ break;
+ }
else if (!DECL_P (decl))
{
if ((ctx->region_type & ORT_TARGET) != 0
@@ -15210,19 +16836,43 @@ gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
/* If we have e.g. map(struct: *var), don't gimplify the
argument since omp-low.cc wants to see the decl itself. */
if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
- break;
+ goto end_adjust_omp_map_clause;
+
+ /* If we have a non-contiguous (strided/rectangular) update
+ operation with a VIEW_CONVERT_EXPR, we need to be careful not
+ to gimplify the conversion away, because we need it during
+ omp-low.cc in order to retrieve the array's dimensions. Just
+ gimplify partially instead. */
+ if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_GRID
+ || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FROM_GRID)
+ && TREE_CODE (*pd) == VIEW_CONVERT_EXPR)
+ pd = &TREE_OPERAND (*pd, 0);
/* We've already partly gimplified this in
gimplify_scan_omp_clauses. Don't do any more. */
if (code == OMP_TARGET && OMP_CLAUSE_MAP_IN_REDUCTION (c))
- break;
+ goto end_adjust_omp_map_clause;
gimplify_omp_ctxp = ctx->outer_context;
- if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue,
- fb_lvalue) == GS_ERROR)
- remove = true;
+ if (gimplify_expr (pd, seq_p, NULL, is_gimple_lvalue,
+ fb_lvalue | fb_mayfail) == GS_ERROR)
+ {
+ sorry_at (OMP_CLAUSE_LOCATION (c),
+ "unsupported map expression %qE",
+ OMP_CLAUSE_DECL (c));
+ remove = true;
+ }
+
+ if (TREE_CODE (*pd) == ARRAY_REF
+ && DECL_P (TREE_OPERAND (*pd, 1))
+ && (ctx->region_type & ORT_TARGET) != 0
+ && (ctx->region_type & ORT_ACC) != 0
+ && ctx->region_type != ORT_ACC_KERNELS)
+ omp_add_variable (ctx, TREE_OPERAND (*pd, 1),
+ GOVD_FIRSTPRIVATE | GOVD_SEEN);
+
gimplify_omp_ctxp = ctx;
- break;
+ goto end_adjust_omp_map_clause;
}
if ((code == OMP_TARGET
@@ -15355,6 +17005,21 @@ gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
== GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION)))
move_attach = true;
+ if (!remove && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
+ && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH_DETACH
+ && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
+ && OMP_CLAUSE_CHAIN (c)
+ && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
+ && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
+ == GOMP_MAP_ALWAYS_POINTER)
+ || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
+ == GOMP_MAP_ATTACH_DETACH)
+ || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
+ == GOMP_MAP_TO_PSET)))
+ prev_list_p = list_p;
+
+end_adjust_omp_map_clause:
+ exit_omp_iterator_loop_context (c);
break;
case OMP_CLAUSE_TO:
@@ -15404,10 +17069,22 @@ gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
case OMP_CLAUSE_TASK_REDUCTION:
decl = OMP_CLAUSE_DECL (c);
/* OpenACC reductions need a present_or_copy data clause.
- Add one if necessary. Emit error when the reduction is private. */
+ Add one if necessary. Emit error when the reduction is
+ private. */
if (ctx->region_type == ORT_ACC_PARALLEL
|| ctx->region_type == ORT_ACC_SERIAL)
{
+ if (TREE_CODE (decl) == MEM_REF
+ && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
+ {
+ tree addr = TREE_OPERAND (decl, 0);
+ if (TREE_CODE (addr) == POINTER_PLUS_EXPR)
+ addr = TREE_OPERAND (addr, 0);
+ if (TREE_CODE (addr) == ADDR_EXPR
+ && DECL_P (TREE_OPERAND (addr, 0)))
+ decl = TREE_OPERAND (addr, 0);
+ }
+
n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
{
@@ -15527,6 +17204,8 @@ gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
case OMP_CLAUSE_FINALIZE:
case OMP_CLAUSE_INCLUSIVE:
case OMP_CLAUSE_EXCLUSIVE:
+ case OMP_CLAUSE_USES_ALLOCATORS:
+ case OMP_CLAUSE__OMPACC_:
break;
case OMP_CLAUSE_NOHOST:
@@ -16133,6 +17812,111 @@ gimplify_omp_loop_xform (tree *expr_p, gimple_seq *pre_p)
return GS_ALL_DONE;
}
+/* Helper function for localize_reductions. Replace all uses of REF_VAR with
+ LOCAL_VAR. */
+
+static tree
+localize_reductions_r (tree *tp, int *walk_subtrees, void *data)
+{
+ enum tree_code tc = TREE_CODE (*tp);
+ struct privatize_reduction *pr = (struct privatize_reduction *) data;
+
+ if (TYPE_P (*tp))
+ *walk_subtrees = 0;
+
+ switch (tc)
+ {
+ case INDIRECT_REF:
+ case MEM_REF:
+ if (TREE_OPERAND (*tp, 0) == pr->ref_var)
+ *tp = pr->local_var;
+
+ *walk_subtrees = 0;
+ break;
+
+ case VAR_DECL:
+ case PARM_DECL:
+ case RESULT_DECL:
+ if (*tp == pr->ref_var)
+ *tp = pr->local_var;
+
+ *walk_subtrees = 0;
+ break;
+
+ default:
+ break;
+ }
+
+ return NULL_TREE;
+}
+
+/* OpenACC worker and vector loop state propagation requires reductions
+ to be inside local variables. This function replaces all reference-type
+ reductions variables associated with the loop with a local copy. It is
+ also used to create private copies of reduction variables for those
+ which are not associated with acc loops. */
+
+static void
+localize_reductions (tree clauses, tree body)
+{
+ tree c, var, type, new_var;
+ struct privatize_reduction pr;
+
+ for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
+ if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
+ {
+ var = OMP_CLAUSE_DECL (c);
+
+ if (!lang_hooks.decls.omp_privatize_by_reference (var))
+ {
+ OMP_CLAUSE_REDUCTION_PRIVATE_DECL (c) = NULL;
+ continue;
+ }
+
+ type = TREE_TYPE (TREE_TYPE (var));
+ new_var = create_tmp_var (type, IDENTIFIER_POINTER (DECL_NAME (var)));
+
+ pr.ref_var = var;
+ pr.local_var = new_var;
+
+ walk_tree (&body, localize_reductions_r, &pr, NULL);
+
+ OMP_CLAUSE_REDUCTION_PRIVATE_DECL (c) = new_var;
+ }
+ else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE)
+ {
+ var = OMP_CLAUSE_DECL (c);
+
+ if (!lang_hooks.decls.omp_privatize_by_reference (var))
+ continue;
+ type = TREE_TYPE (TREE_TYPE (var));
+ if (TREE_CODE (type) == ARRAY_TYPE)
+ continue;
+ new_var = create_tmp_var (type, IDENTIFIER_POINTER (DECL_NAME (var)));
+
+ pr.ref_var = var;
+ pr.local_var = new_var;
+
+ walk_tree (&body, localize_reductions_r, &pr, NULL);
+ }
+}
+
+
+/* Return true if in an omp_context in OMPACC mode. */
+static bool
+gimplify_omp_ctx_ompacc_p (void)
+{
+ if (cgraph_node::get (current_function_decl)->offloadable
+ && lookup_attribute ("ompacc",
+ DECL_ATTRIBUTES (current_function_decl)))
+ return true;
+ struct gimplify_omp_ctx *ctx;
+ for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
+ if (ctx->ompacc)
+ return true;
+ return false;
+}
+
/* Gimplify the gross structure of an OMP_FOR statement. */
static enum gimplify_status
@@ -16164,6 +17948,18 @@ gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
*expr_p = NULL_TREE;
return GS_ERROR;
}
+
+ if (flag_openmp_target == OMP_TARGET_MODE_OMPACC
+ && gimplify_omp_ctx_ompacc_p ())
+ {
+ gcc_assert (inner_for_stmt && TREE_CODE (for_stmt) == OMP_DISTRIBUTE);
+ *expr_p = OMP_FOR_BODY (for_stmt);
+ tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_GANG);
+ OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (inner_for_stmt);
+ OMP_FOR_CLAUSES (inner_for_stmt) = c;
+ return GS_OK;
+ }
+
gcc_assert (inner_for_stmt == *data[3]);
omp_maybe_apply_loop_xforms (data[3],
data[2]
@@ -16392,6 +18188,24 @@ gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
gcc_unreachable ();
}
+ if (ort == ORT_ACC)
+ {
+ gimplify_omp_ctx *outer = gimplify_omp_ctxp;
+
+ while (outer
+ && outer->region_type != ORT_ACC_PARALLEL
+ && outer->region_type != ORT_ACC_KERNELS)
+ outer = outer->outer_context;
+
+ /* FIXME: Reductions only work in parallel regions at present. We avoid
+ doing the reduction localization transformation in kernels regions
+ here, because the code to remove reductions in kernels regions cannot
+ handle that. */
+ if (outer && outer->region_type == ORT_ACC_PARALLEL)
+ localize_reductions (OMP_FOR_CLAUSES (for_stmt),
+ OMP_FOR_BODY (for_stmt));
+ }
+
/* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
clause for the IV. */
if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
@@ -17993,17 +19807,29 @@ gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
gcc_unreachable ();
}
+ gimple_seq iterator_loops_seq = NULL;
+ if (TREE_CODE (expr) == OMP_TARGET)
+ {
+ remove_unused_omp_iterator_vars (&OMP_CLAUSES (expr));
+ build_omp_iterators_loops (&OMP_CLAUSES (expr), &iterator_loops_seq);
+ }
+
bool save_in_omp_construct = in_omp_construct;
if ((ort & ORT_ACC) == 0)
in_omp_construct = false;
gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
- TREE_CODE (expr));
+ TREE_CODE (expr), &iterator_loops_seq);
if (TREE_CODE (expr) == OMP_TARGET)
optimize_target_teams (expr, pre_p);
if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
|| (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
{
push_gimplify_context ();
+
+ /* FIXME: Reductions are not supported in kernels regions yet. */
+ if (/*ort == ORT_ACC_KERNELS ||*/ ort == ORT_ACC_PARALLEL)
+ localize_reductions (OMP_CLAUSES (expr), OMP_BODY (expr));
+
gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
if (gimple_code (g) == GIMPLE_BIND)
pop_gimplify_context (g);
@@ -18032,11 +19858,97 @@ gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
body = NULL;
gimple_seq_add_stmt (&body, g);
}
+ else if ((ort & ORT_TARGET) != 0 && (ort & ORT_ACC) == 0)
+ {
+ gimple_seq init_seq = NULL;
+ gimple_seq fini_seq = NULL;
+
+ tree omp_init_allocator_fn = NULL_TREE;
+ tree omp_destroy_allocator_fn = NULL_TREE;
+
+ for (tree *cp = &OMP_CLAUSES (expr); *cp != NULL;
+ cp = &OMP_CLAUSE_CHAIN (*cp))
+ if (OMP_CLAUSE_CODE (*cp) == OMP_CLAUSE_USES_ALLOCATORS)
+ {
+ tree c = *cp;
+ tree allocator = OMP_CLAUSE_USES_ALLOCATORS_ALLOCATOR (c);
+ tree memspace = OMP_CLAUSE_USES_ALLOCATORS_MEMSPACE (c);
+ tree traits = OMP_CLAUSE_USES_ALLOCATORS_TRAITS (c);
+
+ if (omp_init_allocator_fn == NULL_TREE)
+ {
+ omp_init_allocator_fn
+ = builtin_decl_explicit (BUILT_IN_OMP_INIT_ALLOCATOR);
+ omp_destroy_allocator_fn
+ = builtin_decl_explicit (BUILT_IN_OMP_DESTROY_ALLOCATOR);
+ }
+ tree ntraits, traits_var;
+ if (traits == NULL_TREE)
+ {
+ ntraits = integer_zero_node;
+ traits_var = null_pointer_node;
+ }
+ else if (DECL_INITIAL (traits))
+ {
+ location_t loc = OMP_CLAUSE_LOCATION (c);
+ tree t = DECL_INITIAL (traits);
+ gcc_assert (TREE_CODE (t) == CONSTRUCTOR);
+ ntraits = build_int_cst (integer_type_node,
+ CONSTRUCTOR_NELTS (t));
+ t = get_initialized_tmp_var (t, &init_seq, NULL);
+ traits_var = build_fold_addr_expr_loc (loc, t);
+ }
+ else
+ {
+ location_t loc = OMP_CLAUSE_LOCATION (c);
+ gcc_assert (TREE_CODE (TREE_TYPE (traits)) == ARRAY_TYPE);
+ tree t = TYPE_DOMAIN (TREE_TYPE (traits));
+ tree min = TYPE_MIN_VALUE (t);
+ tree max = TYPE_MAX_VALUE (t);
+ gcc_assert (TREE_CODE (min) == INTEGER_CST
+ && TREE_CODE (max) == INTEGER_CST);
+ t = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (min),
+ max, min);
+ t = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (min),
+ t, build_int_cst (TREE_TYPE (min), 1));
+ ntraits = t;
+ traits_var = build_fold_addr_expr_loc (loc, traits);
+ }
+
+ if (memspace == NULL_TREE)
+ memspace = build_int_cst (pointer_sized_int_node, 0);
+ else
+ memspace = fold_convert (pointer_sized_int_node,
+ memspace);
+
+ tree call = build_call_expr_loc (OMP_CLAUSE_LOCATION (c),
+ omp_init_allocator_fn, 3,
+ memspace, ntraits,
+ traits_var);
+ call = fold_convert (TREE_TYPE (allocator), call);
+ gimplify_assign (allocator, call, &init_seq);
+
+ call = build_call_expr_loc (OMP_CLAUSE_LOCATION (c),
+ omp_destroy_allocator_fn, 1,
+ allocator);
+ gimplify_and_add (call, &fini_seq);
+ }
+
+ if (fini_seq)
+ {
+ gbind *bind = as_a<gbind *> (gimple_seq_first_stmt (body));
+ g = gimple_build_try (gimple_bind_body (bind),
+ fini_seq, GIMPLE_TRY_FINALLY);
+ gimple_seq_add_stmt (&init_seq, g);
+ gimple_bind_set_body (bind, init_seq);
+ body = bind;
+ }
+ }
}
else
gimplify_and_add (OMP_BODY (expr), &body);
gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
- TREE_CODE (expr));
+ TREE_CODE (expr), &iterator_loops_seq);
in_omp_construct = save_in_omp_construct;
switch (TREE_CODE (expr))
@@ -18079,7 +19991,7 @@ gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
break;
case OMP_TARGET:
stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
- OMP_CLAUSES (expr));
+ OMP_CLAUSES (expr), iterator_loops_seq);
break;
case OMP_TARGET_DATA:
/* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
@@ -18154,10 +20066,16 @@ gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
default:
gcc_unreachable ();
}
+
+ gimple_seq iterator_loops_seq = NULL;
+ remove_unused_omp_iterator_vars (&OMP_STANDALONE_CLAUSES (expr));
+ build_omp_iterators_loops (&OMP_STANDALONE_CLAUSES (expr),
+ &iterator_loops_seq);
+
gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
- ort, TREE_CODE (expr));
+ ort, TREE_CODE (expr), &iterator_loops_seq);
gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
- TREE_CODE (expr));
+ TREE_CODE (expr), &iterator_loops_seq);
if (TREE_CODE (expr) == OACC_UPDATE
&& omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
OMP_CLAUSE_IF_PRESENT))
@@ -18221,7 +20139,8 @@ gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
gcc_unreachable ();
}
}
- stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
+ stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr),
+ iterator_loops_seq);
gimplify_seq_add_stmt (pre_p, stmt);
*expr_p = NULL_TREE;
@@ -19257,6 +21176,15 @@ gimplify_omp_metadirective (tree *expr_p, gimple_seq *pre_p, gimple_seq *,
return GS_OK;
}
+/* Gimplify an OMP_DECLARE_MAPPER node (by just removing it). */
+
+static enum gimplify_status
+gimplify_omp_declare_mapper (tree *expr_p)
+{
+ *expr_p = NULL_TREE;
+ return GS_ALL_DONE;
+}
+
/* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
expression produces a value to be used as an operand inside a GIMPLE
statement, the value will be stored back in *EXPR_P. This value will
@@ -20218,6 +22146,10 @@ gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
ret = GS_ALL_DONE;
break;
+ case OMP_DECLARE_MAPPER:
+ ret = gimplify_omp_declare_mapper (expr_p);
+ break;
+
case TRANSACTION_EXPR:
ret = gimplify_transaction (expr_p, pre_p);
break;