aboutsummaryrefslogtreecommitdiff
path: root/gcc/fortran/trans-openmp.cc
diff options
context:
space:
mode:
Diffstat (limited to 'gcc/fortran/trans-openmp.cc')
-rw-r--r--gcc/fortran/trans-openmp.cc1007
1 files changed, 942 insertions, 65 deletions
diff --git a/gcc/fortran/trans-openmp.cc b/gcc/fortran/trans-openmp.cc
index 03d9432..0b8150f 100644
--- a/gcc/fortran/trans-openmp.cc
+++ b/gcc/fortran/trans-openmp.cc
@@ -25,6 +25,10 @@ along with GCC; see the file COPYING3. If not see
#include "options.h"
#include "tree.h"
#include "gfortran.h"
+#include "basic-block.h"
+#include "tree-ssa.h"
+#include "function.h"
+#include "gimple.h"
#include "gimple-expr.h"
#include "trans.h"
#include "stringpool.h"
@@ -41,6 +45,8 @@ along with GCC; see the file COPYING3. If not see
#include "omp-low.h"
#include "memmodel.h" /* For MEMMODEL_ enums. */
#include "dependency.h"
+#include "gimple-iterator.h" /* For gsi_iterator_update. */
+#include "gimplify-me.h" /* For force_gimple_operand. */
#undef GCC_DIAG_STYLE
#define GCC_DIAG_STYLE __gcc_tdiag__
@@ -375,22 +381,28 @@ gfc_omp_report_decl (tree decl)
return decl;
}
-/* Return true if TYPE has any allocatable components. */
+/* Return true if TYPE has any allocatable components;
+ if ptr_ok, the decl itself is permitted to have the POINTER attribute.
+ if shallow_alloc_only, returns only true if any of the fields is an
+ allocatable; called with true by gfc_omp_replace_alloc_by_to_mapping. */
static bool
-gfc_has_alloc_comps (tree type, tree decl)
+gfc_has_alloc_comps (tree type, tree decl, bool ptr_ok,
+ bool shallow_alloc_only=false)
{
tree field, ftype;
if (POINTER_TYPE_P (type))
{
- if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl))
+ if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl)
+ || (ptr_ok && GFC_DECL_GET_SCALAR_POINTER (decl)))
type = TREE_TYPE (type);
else if (GFC_DECL_GET_SCALAR_POINTER (decl))
return false;
}
- if (GFC_DESCRIPTOR_TYPE_P (type)
+ if (!ptr_ok
+ && GFC_DESCRIPTOR_TYPE_P (type)
&& (GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_POINTER
|| GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_POINTER_CONT))
return false;
@@ -409,12 +421,51 @@ gfc_has_alloc_comps (tree type, tree decl)
if (GFC_DESCRIPTOR_TYPE_P (ftype)
&& GFC_TYPE_ARRAY_AKIND (ftype) == GFC_ARRAY_ALLOCATABLE)
return true;
- if (gfc_has_alloc_comps (ftype, field))
+ if (!shallow_alloc_only
+ && gfc_has_alloc_comps (ftype, field, false))
return true;
}
return false;
}
+/* gfc_omp_replace_alloc_by_to_mapping is used with gfc_omp_deep_mapping... to
+ handle the following:
+
+ For map(alloc: dt), the array descriptors of allocatable components should
+ be mapped as 'to'; this could be done by (A) adding 'map(to: dt%alloc_comp)'
+ for each component (and avoiding to increment the reference count).
+ Or (B) by just mapping all of 'dt' as 'to'.
+
+ If 'dt' contains several allocatable components and not much other data,
+ (A) is more efficient. If 'dt' contains a large const-size array, (A) will
+ copy it to the device instead of only 'alloc'ating it.
+
+ IMPLEMENTATION CHOICE: We do (A). It avoids the ref-count issue and it is
+ expected that, for real-world code, derived types with allocatable
+ components only have few other components and either no const-size arrays.
+ This copying is done irrespectively whether the allocatables are allocated.
+
+ If users wanted to save memory, they have to use 'map(alloc:dt%comp)' as
+ also with 'map(alloc:dt)' all components get copied.
+
+ For the copy to the device, only allocatable arrays are relevant as their
+ the bounds are required; the pointer is set separately (GOMP_MAP_ATTACH)
+ and the only setting required for scalars. However, when later copying out
+ of the device, an unallocated allocatable must remain unallocated/NULL on
+ the host; to achieve this we also must have it set to NULL on the device
+ to avoid issues with uninitialized memory being copied back for the pointer
+ address. If we could set the pointer to NULL, gfc_has_alloc_comps's
+ shallow_alloc_only could be restricted to return true only for arrays.
+
+ We only need to return true if there are allocatable-array components. */
+
+static bool
+gfc_omp_replace_alloc_by_to_mapping (tree type, tree decl, bool ptr_ok)
+{
+ return gfc_has_alloc_comps (type, decl, ptr_ok, true);
+}
+
+
/* Return true if TYPE is polymorphic but not with pointer attribute. */
static bool
@@ -487,7 +538,7 @@ gfc_omp_private_outer_ref (tree decl)
if (GFC_DECL_GET_SCALAR_ALLOCATABLE (decl))
return true;
- if (gfc_has_alloc_comps (type, decl))
+ if (gfc_has_alloc_comps (type, decl, false))
return true;
return false;
@@ -627,7 +678,7 @@ gfc_walk_alloc_comps (tree decl, tree dest, tree var,
{
tree ftype = TREE_TYPE (field);
tree declf, destf = NULL_TREE;
- bool has_alloc_comps = gfc_has_alloc_comps (ftype, field);
+ bool has_alloc_comps = gfc_has_alloc_comps (ftype, field, false);
if ((!GFC_DESCRIPTOR_TYPE_P (ftype)
|| GFC_TYPE_ARRAY_AKIND (ftype) != GFC_ARRAY_ALLOCATABLE)
&& !GFC_DECL_GET_SCALAR_ALLOCATABLE (field)
@@ -751,7 +802,7 @@ gfc_omp_clause_default_ctor (tree clause, tree decl, tree outer)
&& (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause))
|| !POINTER_TYPE_P (type)))
{
- if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
+ if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause), false))
{
gcc_assert (outer);
gfc_start_block (&block);
@@ -804,7 +855,7 @@ gfc_omp_clause_default_ctor (tree clause, tree decl, tree outer)
else
gfc_add_modify (&cond_block, unshare_expr (decl),
fold_convert (TREE_TYPE (decl), ptr));
- if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
+ if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause), false))
{
tree tem = gfc_walk_alloc_comps (outer, decl,
OMP_CLAUSE_DECL (clause),
@@ -945,7 +996,7 @@ gfc_omp_clause_copy_ctor (tree clause, tree dest, tree src)
&& (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause))
|| !POINTER_TYPE_P (type)))
{
- if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
+ if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause), false))
{
gfc_start_block (&block);
gfc_add_modify (&block, dest, src);
@@ -1004,7 +1055,7 @@ gfc_omp_clause_copy_ctor (tree clause, tree dest, tree src)
builtin_decl_explicit (BUILT_IN_MEMCPY), 3, ptr,
srcptr, size);
gfc_add_expr_to_block (&cond_block, fold_convert (void_type_node, call));
- if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
+ if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause), false))
{
tree tem = gfc_walk_alloc_comps (src, dest,
OMP_CLAUSE_DECL (clause),
@@ -1049,7 +1100,7 @@ gfc_omp_clause_assign_op (tree clause, tree dest, tree src)
&& (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause))
|| !POINTER_TYPE_P (type)))
{
- if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
+ if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause), false))
{
gfc_start_block (&block);
/* First dealloc any allocatable components in DEST. */
@@ -1071,7 +1122,7 @@ gfc_omp_clause_assign_op (tree clause, tree dest, tree src)
gfc_start_block (&block);
- if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
+ if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause), false))
{
then_b = gfc_walk_alloc_comps (dest, NULL_TREE, OMP_CLAUSE_DECL (clause),
WALK_ALLOC_COMPS_DTOR);
@@ -1186,7 +1237,7 @@ gfc_omp_clause_assign_op (tree clause, tree dest, tree src)
builtin_decl_explicit (BUILT_IN_MEMCPY), 3, ptr,
srcptr, size);
gfc_add_expr_to_block (&cond_block, fold_convert (void_type_node, call));
- if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
+ if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause), false))
{
tree tem = gfc_walk_alloc_comps (src, dest,
OMP_CLAUSE_DECL (clause),
@@ -1438,7 +1489,7 @@ gfc_omp_clause_dtor (tree clause, tree decl)
&& (!GFC_DECL_GET_SCALAR_ALLOCATABLE (OMP_CLAUSE_DECL (clause))
|| !POINTER_TYPE_P (type)))
{
- if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
+ if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause), false))
return gfc_walk_alloc_comps (decl, NULL_TREE,
OMP_CLAUSE_DECL (clause),
WALK_ALLOC_COMPS_DTOR);
@@ -1458,7 +1509,7 @@ gfc_omp_clause_dtor (tree clause, tree decl)
tem = gfc_call_free (decl);
tem = gfc_omp_unshare_expr (tem);
- if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause)))
+ if (gfc_has_alloc_comps (type, OMP_CLAUSE_DECL (clause), false))
{
stmtblock_t block;
tree then_b;
@@ -1538,6 +1589,7 @@ gfc_omp_finish_clause (tree c, gimple_seq *pre_p, bool openacc)
return;
tree decl = OMP_CLAUSE_DECL (c);
+ location_t loc = OMP_CLAUSE_LOCATION (c);
/* Assumed-size arrays can't be mapped implicitly, they have to be
mapped explicitly using array sections. */
@@ -1553,13 +1605,9 @@ gfc_omp_finish_clause (tree c, gimple_seq *pre_p, bool openacc)
return;
}
- if (!openacc && GFC_CLASS_TYPE_P (TREE_TYPE (decl)))
- warning_at (OMP_CLAUSE_LOCATION (c), OPT_Wopenmp,
- "Implicit mapping of polymorphic variable %qD is "
- "unspecified behavior", decl);
-
tree c2 = NULL_TREE, c3 = NULL_TREE, c4 = NULL_TREE;
tree present = gfc_omp_check_optional_argument (decl, true);
+ tree orig_decl = NULL_TREE;
if (POINTER_TYPE_P (TREE_TYPE (decl)))
{
if (!gfc_omp_privatize_by_reference (decl)
@@ -1568,7 +1616,7 @@ gfc_omp_finish_clause (tree c, gimple_seq *pre_p, bool openacc)
&& !GFC_DECL_CRAY_POINTEE (decl)
&& !GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
return;
- tree orig_decl = decl;
+ orig_decl = decl;
c4 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
OMP_CLAUSE_SET_MAP_KIND (c4, GOMP_MAP_POINTER);
@@ -1579,16 +1627,16 @@ gfc_omp_finish_clause (tree c, gimple_seq *pre_p, bool openacc)
&& (GFC_DECL_GET_SCALAR_POINTER (orig_decl)
|| GFC_DECL_GET_SCALAR_ALLOCATABLE (orig_decl)))
{
- c2 = build_omp_clause (input_location, OMP_CLAUSE_MAP);
+ c2 = build_omp_clause (loc, OMP_CLAUSE_MAP);
OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_POINTER);
- OMP_CLAUSE_DECL (c2) = decl;
+ OMP_CLAUSE_DECL (c2) = unshare_expr (decl);
OMP_CLAUSE_SIZE (c2) = size_int (0);
stmtblock_t block;
gfc_start_block (&block);
- tree ptr = decl;
- ptr = gfc_build_cond_assign_expr (&block, present, decl,
- null_pointer_node);
+ tree ptr = gfc_build_cond_assign_expr (&block, present,
+ unshare_expr (decl),
+ null_pointer_node);
gimplify_and_add (gfc_finish_block (&block), pre_p);
ptr = build_fold_indirect_ref (ptr);
OMP_CLAUSE_DECL (c) = ptr;
@@ -1605,10 +1653,10 @@ gfc_omp_finish_clause (tree c, gimple_seq *pre_p, bool openacc)
{
c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
OMP_CLAUSE_SET_MAP_KIND (c3, GOMP_MAP_POINTER);
- OMP_CLAUSE_DECL (c3) = unshare_expr (decl);
+ OMP_CLAUSE_DECL (c3) = decl;
OMP_CLAUSE_SIZE (c3) = size_int (0);
decl = build_fold_indirect_ref (decl);
- OMP_CLAUSE_DECL (c) = decl;
+ OMP_CLAUSE_DECL (c) = unshare_expr (decl);
}
}
if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
@@ -1634,7 +1682,7 @@ gfc_omp_finish_clause (tree c, gimple_seq *pre_p, bool openacc)
gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
ptr = build_fold_indirect_ref (ptr);
OMP_CLAUSE_DECL (c) = ptr;
- c2 = build_omp_clause (input_location, OMP_CLAUSE_MAP);
+ c2 = build_omp_clause (loc, OMP_CLAUSE_MAP);
OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_TO_PSET);
if (present)
{
@@ -1651,7 +1699,7 @@ gfc_omp_finish_clause (tree c, gimple_seq *pre_p, bool openacc)
: GOMP_MAP_POINTER);
if (present)
{
- ptr = gfc_conv_descriptor_data_get (decl);
+ ptr = gfc_conv_descriptor_data_get (unshare_expr (decl));
ptr = gfc_build_addr_expr (NULL, ptr);
ptr = gfc_build_cond_assign_expr (&block, present,
ptr, null_pointer_node);
@@ -1664,6 +1712,17 @@ gfc_omp_finish_clause (tree c, gimple_seq *pre_p, bool openacc)
tree size = create_tmp_var (gfc_array_index_type);
tree elemsz = TYPE_SIZE_UNIT (gfc_get_element_type (type));
elemsz = fold_convert (gfc_array_index_type, elemsz);
+
+ if (orig_decl == NULL_TREE)
+ orig_decl = decl;
+ if (!openacc
+ && gfc_has_alloc_comps (type, orig_decl, true))
+ {
+ /* Save array descriptor for use in gfc_omp_deep_mapping{,_p,_cnt};
+ force evaluate to ensure that it is not gimplified + is a decl. */
+ gfc_allocate_lang_decl (size);
+ GFC_DECL_SAVED_DESCRIPTOR (size) = orig_decl;
+ }
enum gfc_array_kind akind = GFC_TYPE_ARRAY_AKIND (type);
if (akind == GFC_ARRAY_ALLOCATABLE
|| akind == GFC_ARRAY_POINTER
@@ -1692,14 +1751,14 @@ gfc_omp_finish_clause (tree c, gimple_seq *pre_p, bool openacc)
else_b = gfc_finish_block (&cond_block);
tem = gfc_conv_descriptor_data_get (unshare_expr (decl));
tem = fold_convert (pvoid_type_node, tem);
- cond = fold_build2_loc (input_location, NE_EXPR,
+ cond = fold_build2_loc (loc, NE_EXPR,
boolean_type_node, tem, null_pointer_node);
if (present)
{
- cond = fold_build2_loc (input_location, TRUTH_ANDIF_EXPR,
+ cond = fold_build2_loc (loc, TRUTH_ANDIF_EXPR,
boolean_type_node, present, cond);
}
- gfc_add_expr_to_block (&block, build3_loc (input_location, COND_EXPR,
+ gfc_add_expr_to_block (&block, build3_loc (loc, COND_EXPR,
void_type_node, cond,
then_b, else_b));
}
@@ -1739,11 +1798,30 @@ gfc_omp_finish_clause (tree c, gimple_seq *pre_p, bool openacc)
tree stmt = gfc_finish_block (&block);
gimplify_and_add (stmt, pre_p);
}
+ else
+ {
+ if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
+ OMP_CLAUSE_SIZE (c)
+ = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
+ : TYPE_SIZE_UNIT (TREE_TYPE (decl));
+
+ tree type = TREE_TYPE (decl);
+ if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
+ type = TREE_TYPE (type);
+ if (!openacc
+ && orig_decl != NULL_TREE
+ && gfc_has_alloc_comps (type, orig_decl, true))
+ {
+ /* Save array descriptor for use in gfc_omp_deep_mapping{,_p,_cnt};
+ force evaluate to ensure that it is not gimplified + is a decl. */
+ tree size = create_tmp_var (TREE_TYPE (OMP_CLAUSE_SIZE (c)));
+ gfc_allocate_lang_decl (size);
+ GFC_DECL_SAVED_DESCRIPTOR (size) = orig_decl;
+ gimplify_assign (size, OMP_CLAUSE_SIZE (c), pre_p);
+ OMP_CLAUSE_SIZE (c) = size;
+ }
+ }
tree last = c;
- if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
- OMP_CLAUSE_SIZE (c)
- = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
- : TYPE_SIZE_UNIT (TREE_TYPE (decl));
if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
OMP_CLAUSE_SIZE (c) = size_int (0);
@@ -1767,6 +1845,715 @@ gfc_omp_finish_clause (tree c, gimple_seq *pre_p, bool openacc)
}
+/* map(<flag>: data [len: <size>])
+ map(attach: &data [bias: <bias>])
+ offset += 2; offset_data += 2 */
+static void
+gfc_omp_deep_mapping_map (tree data, tree size, unsigned HOST_WIDE_INT tkind,
+ location_t loc, tree data_array, tree sizes_array,
+ tree kinds_array, tree offset_data, tree offset,
+ gimple_seq *seq, const gimple *ctx)
+{
+ tree one = build_int_cst (size_type_node, 1);
+
+ STRIP_NOPS (data);
+ if (!POINTER_TYPE_P (TREE_TYPE (data)))
+ {
+ gcc_assert (TREE_CODE (data) == INDIRECT_REF);
+ data = TREE_OPERAND (data, 0);
+ }
+
+ /* data_array[offset_data] = data; */
+ tree tmp = build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (data_array)),
+ unshare_expr (data_array), offset_data,
+ NULL_TREE, NULL_TREE);
+ gimplify_assign (tmp, data, seq);
+
+ /* offset_data++ */
+ tmp = build2_loc (loc, PLUS_EXPR, size_type_node, offset_data, one);
+ gimplify_assign (offset_data, tmp, seq);
+
+ /* data_array[offset_data] = &data; */
+ tmp = build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (data_array)),
+ unshare_expr (data_array),
+ offset_data, NULL_TREE, NULL_TREE);
+ gimplify_assign (tmp, build_fold_addr_expr (data), seq);
+
+ /* offset_data++ */
+ tmp = build2_loc (loc, PLUS_EXPR, size_type_node, offset_data, one);
+ gimplify_assign (offset_data, tmp, seq);
+
+ /* sizes_array[offset] = size */
+ tmp = build2_loc (loc, MULT_EXPR, size_type_node,
+ TYPE_SIZE_UNIT (size_type_node), offset);
+ tmp = build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (sizes_array),
+ sizes_array, tmp);
+ gimple_seq seq2 = NULL;
+ tmp = force_gimple_operand (tmp, &seq2, true, NULL_TREE);
+ gimple_seq_add_seq (seq, seq2);
+ tmp = build_fold_indirect_ref_loc (loc, tmp);
+ gimplify_assign (tmp, size, seq);
+
+ /* FIXME: tkind |= talign << talign_shift; */
+ /* kinds_array[offset] = tkind. */
+ tmp = build2_loc (loc, MULT_EXPR, size_type_node,
+ TYPE_SIZE_UNIT (short_unsigned_type_node), offset);
+ tmp = build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (kinds_array),
+ kinds_array, tmp);
+ seq2 = NULL;
+ tmp = force_gimple_operand (tmp, &seq2, true, NULL_TREE);
+ gimple_seq_add_seq (seq, seq2);
+ tmp = build_fold_indirect_ref_loc (loc, tmp);
+ gimplify_assign (tmp, build_int_cst (short_unsigned_type_node, tkind), seq);
+
+ /* offset++ */
+ tmp = build2_loc (loc, PLUS_EXPR, size_type_node, offset, one);
+ gimplify_assign (offset, tmp, seq);
+
+ /* sizes_array[offset] = bias (= 0). */
+ tmp = build2_loc (loc, MULT_EXPR, size_type_node,
+ TYPE_SIZE_UNIT (size_type_node), offset);
+ tmp = build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (sizes_array),
+ sizes_array, tmp);
+ seq2 = NULL;
+ tmp = force_gimple_operand (tmp, &seq2, true, NULL_TREE);
+ gimple_seq_add_seq (seq, seq2);
+ tmp = build_fold_indirect_ref_loc (loc, tmp);
+ gimplify_assign (tmp, build_zero_cst (size_type_node), seq);
+
+ gcc_assert (gimple_code (ctx) == GIMPLE_OMP_TARGET);
+ tkind = (gimple_omp_target_kind (ctx) == GF_OMP_TARGET_KIND_EXIT_DATA
+ ? GOMP_MAP_DETACH : GOMP_MAP_ATTACH);
+
+ /* kinds_array[offset] = tkind. */
+ tmp = build2_loc (loc, MULT_EXPR, size_type_node,
+ TYPE_SIZE_UNIT (short_unsigned_type_node), offset);
+ tmp = build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (kinds_array),
+ kinds_array, tmp);
+ seq2 = NULL;
+ tmp = force_gimple_operand (tmp, &seq2, true, NULL_TREE);
+ gimple_seq_add_seq (seq, seq2);
+ tmp = build_fold_indirect_ref_loc (loc, tmp);
+ gimplify_assign (tmp, build_int_cst (short_unsigned_type_node, tkind), seq);
+
+ /* offset++ */
+ tmp = build2_loc (loc, PLUS_EXPR, size_type_node, offset, one);
+ gimplify_assign (offset, tmp, seq);
+}
+
+static void gfc_omp_deep_mapping_item (bool, bool, bool, location_t, tree,
+ tree *, unsigned HOST_WIDE_INT, tree,
+ tree, tree, tree, tree, tree,
+ gimple_seq *, const gimple *, bool *);
+
+/* Map allocatable components. */
+static void
+gfc_omp_deep_mapping_comps (bool is_cnt, location_t loc, tree decl,
+ tree *token, unsigned HOST_WIDE_INT tkind,
+ tree data_array, tree sizes_array, tree kinds_array,
+ tree offset_data, tree offset, tree num,
+ gimple_seq *seq, const gimple *ctx,
+ bool *poly_warned)
+{
+ tree type = TREE_TYPE (decl);
+ if (TREE_CODE (type) != RECORD_TYPE)
+ return;
+ for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
+ {
+ type = TREE_TYPE (field);
+ if (gfc_is_polymorphic_nonptr (type)
+ || GFC_DECL_GET_SCALAR_ALLOCATABLE (field)
+ || (GFC_DESCRIPTOR_TYPE_P (type)
+ && GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE))
+ {
+ tree tmp = fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (field),
+ decl, field, NULL_TREE);
+ gfc_omp_deep_mapping_item (is_cnt, true, true, loc, tmp, token,
+ tkind, data_array, sizes_array,
+ kinds_array, offset_data, offset, num,
+ seq, ctx, poly_warned);
+ }
+ else if (GFC_DECL_GET_SCALAR_POINTER (field)
+ || GFC_DESCRIPTOR_TYPE_P (type))
+ continue;
+ else if (gfc_has_alloc_comps (TREE_TYPE (field), field, false))
+ {
+ tree tmp = fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (field),
+ decl, field, NULL_TREE);
+ if (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE)
+ gfc_omp_deep_mapping_item (is_cnt, false, false, loc, tmp,
+ token, tkind, data_array, sizes_array,
+ kinds_array, offset_data, offset, num,
+ seq, ctx, poly_warned);
+ else
+ gfc_omp_deep_mapping_comps (is_cnt, loc, tmp, token, tkind,
+ data_array, sizes_array, kinds_array,
+ offset_data, offset, num, seq, ctx,
+ poly_warned);
+ }
+ }
+}
+
+static void
+gfc_omp_gen_simple_loop (tree var, tree begin, tree end, enum tree_code cond,
+ tree step, location_t loc, gimple_seq *seq1,
+ gimple_seq *seq2)
+{
+ tree tmp;
+
+ /* var = begin. */
+ gimplify_assign (var, begin, seq1);
+
+ /* Loop: for (var = begin; var <cond> end; var += step). */
+ tree label_loop = create_artificial_label (loc);
+ tree label_cond = create_artificial_label (loc);
+
+ gimplify_and_add (fold_build1_loc (loc, GOTO_EXPR, void_type_node,
+ label_cond), seq1);
+ gimple_seq_add_stmt (seq1, gimple_build_label (label_loop));
+
+ /* Everything above is seq1; place loop body here. */
+
+ /* End of loop body -> put into seq2. */
+ tmp = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (var), var, step);
+ gimplify_assign (var, tmp, seq2);
+ gimple_seq_add_stmt (seq2, gimple_build_label (label_cond));
+ tmp = fold_build2_loc (loc, cond, boolean_type_node, var, end);
+ tmp = build3_v (COND_EXPR, tmp, build1_v (GOTO_EXPR, label_loop),
+ build_empty_stmt (loc));
+ gimplify_and_add (tmp, seq2);
+}
+
+/* Return size variable with the size of an array. */
+static tree
+gfc_omp_get_array_size (location_t loc, tree desc, gimple_seq *seq)
+{
+ tree tmp;
+ gimple_seq seq1 = NULL, seq2 = NULL;
+ tree size = build_decl (loc, VAR_DECL, create_tmp_var_name ("size"),
+ size_type_node);
+ tree extent = build_decl (loc, VAR_DECL, create_tmp_var_name ("extent"),
+ gfc_array_index_type);
+ tree idx = build_decl (loc, VAR_DECL, create_tmp_var_name ("idx"),
+ signed_char_type_node);
+
+ tree begin = build_zero_cst (signed_char_type_node);
+ tree end;
+ if (GFC_TYPE_ARRAY_AKIND (TREE_TYPE (desc)) == GFC_ARRAY_ASSUMED_SHAPE_CONT
+ || GFC_TYPE_ARRAY_AKIND (TREE_TYPE (desc)) == GFC_ARRAY_ASSUMED_SHAPE)
+ end = gfc_conv_descriptor_rank (desc);
+ else
+ end = build_int_cst (signed_char_type_node,
+ GFC_TYPE_ARRAY_RANK (TREE_TYPE (desc)));
+ tree step = build_int_cst (signed_char_type_node, 1);
+
+ /* size = 0
+ for (idx = 0; idx < rank; idx++)
+ extent = gfc->dim[i].ubound - gfc->dim[i].lbound + 1
+ if (extent < 0) extent = 0
+ size *= extent. */
+ gimplify_assign (size, build_int_cst (size_type_node, 1), seq);
+
+ gfc_omp_gen_simple_loop (idx, begin, end, LT_EXPR, step, loc, &seq1, &seq2);
+ gimple_seq_add_seq (seq, seq1);
+
+ tmp = fold_build2_loc (loc, MINUS_EXPR, gfc_array_index_type,
+ gfc_conv_descriptor_ubound_get (desc, idx),
+ gfc_conv_descriptor_lbound_get (desc, idx));
+ tmp = fold_build2_loc (loc, PLUS_EXPR, gfc_array_index_type,
+ tmp, gfc_index_one_node);
+ gimplify_assign (extent, tmp, seq);
+ tmp = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
+ extent, gfc_index_zero_node);
+ tmp = build3_v (COND_EXPR, tmp,
+ fold_build2_loc (loc, MODIFY_EXPR,
+ gfc_array_index_type,
+ extent, gfc_index_zero_node),
+ build_empty_stmt (loc));
+ gimplify_and_add (tmp, seq);
+ /* size *= extent. */
+ gimplify_assign (size, fold_build2_loc (loc, MULT_EXPR, size_type_node, size,
+ fold_convert (size_type_node,
+ extent)), seq);
+ gimple_seq_add_seq (seq, seq2);
+ return size;
+}
+
+/* Generate loop to access every array element; takes addr of first element
+ (decl's data comp); returns loop code in seq1 + seq2
+ and the pointer to the element as return value. */
+static tree
+gfc_omp_elmental_loop (location_t loc, tree decl, tree size, tree elem_len,
+ gimple_seq *seq1, gimple_seq *seq2)
+{
+ tree idx = build_decl (loc, VAR_DECL, create_tmp_var_name ("idx"),
+ size_type_node);
+ tree begin = build_zero_cst (size_type_node);
+ tree end = size;
+ tree step = build_int_cst (size_type_node, 1);
+ tree ptr;
+
+ gfc_omp_gen_simple_loop (idx, begin, end, LT_EXPR, step, loc, seq1, seq2);
+
+ tree type = TREE_TYPE (decl);
+ if (POINTER_TYPE_P (type))
+ {
+ type = TREE_TYPE (type);
+ gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
+ decl = fold_convert (build_pointer_type (TREE_TYPE (type)), decl);
+ }
+ else
+ {
+ gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
+ decl = build_fold_addr_expr_loc (loc, decl);
+ }
+ decl = fold_convert (build_pointer_type (TREE_TYPE (type)), decl);
+ tree tmp = build2_loc (loc, MULT_EXPR, size_type_node, idx,
+ fold_convert (size_type_node, elem_len));
+ ptr = build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (decl), decl, tmp);
+ gimple_seq seq3 = NULL;
+ ptr = force_gimple_operand (ptr, &seq3, true, NULL_TREE);
+ gimple_seq_add_seq (seq1, seq3);
+
+ return ptr;
+}
+
+
+/* If do_copy, copy data pointer and vptr (if applicable) as well.
+ Otherwise, only handle allocatable components.
+ do_copy == false can happen only with nonpolymorphic arguments
+ to a copy clause.
+ if (is_cnt) token ... offset is ignored and num is used, otherwise
+ num is NULL_TREE and unused. */
+
+static void
+gfc_omp_deep_mapping_item (bool is_cnt, bool do_copy, bool do_alloc_check,
+ location_t loc, tree decl, tree *token,
+ unsigned HOST_WIDE_INT tkind, tree data_array,
+ tree sizes_array, tree kinds_array, tree offset_data,
+ tree offset, tree num, gimple_seq *seq,
+ const gimple *ctx, bool *poly_warned)
+{
+ tree tmp;
+ tree type = TREE_TYPE (decl);
+ if (POINTER_TYPE_P (type))
+ type = TREE_TYPE (type);
+ tree end_label = NULL_TREE;
+ tree size = NULL_TREE, elem_len = NULL_TREE;
+
+ bool poly = gfc_is_polymorphic_nonptr (type);
+ if (poly && is_cnt && !*poly_warned)
+ {
+ if (gfc_is_unlimited_polymorphic_nonptr (type))
+ error_at (loc,
+ "Mapping of unlimited polymorphic list item %qD is "
+ "unspecified behavior and unsupported", decl);
+
+ else
+ warning_at (loc, OPT_Wopenmp,
+ "Mapping of polymorphic list item %qD is "
+ "unspecified behavior", decl);
+ *poly_warned = true;
+ }
+ if (do_alloc_check)
+ {
+ tree then_label = create_artificial_label (loc);
+ end_label = create_artificial_label (loc);
+ tmp = decl;
+ if (TREE_CODE (TREE_TYPE (tmp)) == REFERENCE_TYPE
+ || (POINTER_TYPE_P (TREE_TYPE (tmp))
+ && (POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (tmp)))
+ || GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (tmp))))))
+ tmp = build_fold_indirect_ref_loc (loc, tmp);
+ if (poly)
+ tmp = gfc_class_data_get (tmp);
+ if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (tmp)))
+ tmp = gfc_conv_descriptor_data_get (tmp);
+ gimple_seq seq2 = NULL;
+ tmp = force_gimple_operand (tmp, &seq2, true, NULL_TREE);
+ gimple_seq_add_seq (seq, seq2);
+
+ gimple_seq_add_stmt (seq,
+ gimple_build_cond (NE_EXPR, tmp, null_pointer_node,
+ then_label, end_label));
+ gimple_seq_add_stmt (seq, gimple_build_label (then_label));
+ }
+ tree class_decl = decl;
+ if (poly)
+ {
+ decl = gfc_class_data_get (decl);
+ type = TREE_TYPE (decl);
+ }
+ if (POINTER_TYPE_P (TREE_TYPE (decl)))
+ {
+ decl = build_fold_indirect_ref (decl);
+ type = TREE_TYPE (decl);
+ }
+
+ if (is_cnt && do_copy)
+ {
+ tree tmp = fold_build2_loc (loc, PLUS_EXPR, size_type_node,
+ num, build_int_cst (size_type_node, 1));
+ gimplify_assign (num, tmp, seq);
+ }
+ else if (do_copy)
+ {
+ /* copy data pointer */
+ tree bytesize;
+ if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
+ {
+ /* TODO: Optimization: Shouldn't this be an expr. const, except for
+ deferred-length strings. (Cf. also below). */
+ elem_len = (poly ? gfc_class_vtab_size_get (class_decl)
+ : gfc_conv_descriptor_elem_len (decl));
+ tmp = (POINTER_TYPE_P (TREE_TYPE (decl))
+ ? build_fold_indirect_ref (decl) : decl);
+ size = gfc_omp_get_array_size (loc, tmp, seq);
+ bytesize = fold_build2_loc (loc, MULT_EXPR, size_type_node,
+ fold_convert (size_type_node, size),
+ fold_convert (size_type_node, elem_len));
+ tmp = gfc_conv_descriptor_data_get (decl);
+ }
+ else if (poly)
+ {
+ tmp = decl;
+ bytesize = fold_convert (size_type_node,
+ gfc_class_vtab_size_get (class_decl));
+ }
+ else
+ {
+ tmp = decl;
+ bytesize = TYPE_SIZE_UNIT (TREE_TYPE (decl));
+ }
+ unsigned HOST_WIDE_INT tkind2 = tkind;
+ if (!is_cnt
+ && (tkind == GOMP_MAP_ALLOC
+ || (tkind == GOMP_MAP_FROM
+ && (gimple_omp_target_kind (ctx)
+ != GF_OMP_TARGET_KIND_EXIT_DATA)))
+ && gfc_omp_replace_alloc_by_to_mapping (TREE_TYPE (decl), decl, true))
+ tkind2 = tkind == GOMP_MAP_ALLOC ? GOMP_MAP_TO : GOMP_MAP_TOFROM;
+
+ gfc_omp_deep_mapping_map (tmp, bytesize, tkind2, loc, data_array,
+ sizes_array, kinds_array, offset_data,
+ offset, seq, ctx);
+ }
+
+ tmp = decl;
+ if (POINTER_TYPE_P (TREE_TYPE (decl)))
+ while (TREE_CODE (tmp) == COMPONENT_REF || TREE_CODE (tmp) == ARRAY_REF)
+ tmp = TREE_OPERAND (tmp, TREE_CODE (tmp) == COMPONENT_REF ? 1 : 0);
+ if (poly || gfc_has_alloc_comps (type, tmp, true))
+ {
+ gimple_seq seq2 = NULL;
+ if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl)))
+ {
+ if (elem_len == NULL_TREE)
+ {
+ elem_len = gfc_conv_descriptor_elem_len (decl);
+ size = fold_convert (size_type_node,
+ gfc_omp_get_array_size (loc, decl, seq));
+ }
+ decl = gfc_conv_descriptor_data_get (decl);
+ decl = gfc_omp_elmental_loop (loc, decl, size, elem_len, seq, &seq2);
+ decl = build_fold_indirect_ref_loc (loc, decl);
+ }
+ else if (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE)
+ {
+ type = TREE_TYPE (tmp);
+ /* FIXME: PR95868 - for var%str of deferred length, elem_len == 0;
+ len is stored as var%_str_length, but not in GFC_DECL_STRING_LEN
+ nor in TYPE_SIZE_UNIT as expression. */
+ elem_len = TYPE_SIZE_UNIT (TREE_TYPE (type));
+ size = fold_convert (size_type_node, GFC_TYPE_ARRAY_SIZE (type));
+ decl = gfc_omp_elmental_loop (loc, decl, size, elem_len, seq, &seq2);
+ decl = build_fold_indirect_ref_loc (loc, decl);
+ }
+ else if (POINTER_TYPE_P (TREE_TYPE (decl)))
+ decl = build_fold_indirect_ref (decl);
+
+ gfc_omp_deep_mapping_comps (is_cnt, loc, decl, token, tkind,
+ data_array, sizes_array, kinds_array,
+ offset_data, offset, num, seq, ctx,
+ poly_warned);
+ gimple_seq_add_seq (seq, seq2);
+ }
+ if (end_label)
+ gimple_seq_add_stmt (seq, gimple_build_label (end_label));
+}
+
+
+/* Which map types to check/handle for deep mapping. */
+static bool
+gfc_omp_deep_map_kind_p (tree clause)
+{
+ switch (OMP_CLAUSE_CODE (clause))
+ {
+ case OMP_CLAUSE_MAP:
+ break;
+ case OMP_CLAUSE_FIRSTPRIVATE:
+ case OMP_CLAUSE_TO:
+ case OMP_CLAUSE_FROM:
+ return true;
+ default:
+ gcc_unreachable ();
+ }
+
+ switch (OMP_CLAUSE_MAP_KIND (clause))
+ {
+ case GOMP_MAP_TO:
+ case GOMP_MAP_FROM:
+ case GOMP_MAP_TOFROM:
+ case GOMP_MAP_ALWAYS_TO:
+ case GOMP_MAP_ALWAYS_FROM:
+ case GOMP_MAP_ALWAYS_TOFROM:
+ case GOMP_MAP_ALWAYS_PRESENT_FROM:
+ case GOMP_MAP_ALWAYS_PRESENT_TO:
+ case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
+ case GOMP_MAP_FIRSTPRIVATE:
+ case GOMP_MAP_ALLOC:
+ return true;
+ case GOMP_MAP_POINTER:
+ case GOMP_MAP_TO_PSET:
+ case GOMP_MAP_FORCE_PRESENT:
+ case GOMP_MAP_DELETE:
+ case GOMP_MAP_FORCE_DEVICEPTR:
+ case GOMP_MAP_DEVICE_RESIDENT:
+ case GOMP_MAP_LINK:
+ case GOMP_MAP_IF_PRESENT:
+ case GOMP_MAP_PRESENT_ALLOC:
+ case GOMP_MAP_PRESENT_FROM:
+ case GOMP_MAP_PRESENT_TO:
+ case GOMP_MAP_PRESENT_TOFROM:
+ case GOMP_MAP_FIRSTPRIVATE_INT:
+ case GOMP_MAP_USE_DEVICE_PTR:
+ case GOMP_MAP_ZERO_LEN_ARRAY_SECTION:
+ case GOMP_MAP_FORCE_ALLOC:
+ case GOMP_MAP_FORCE_TO:
+ case GOMP_MAP_FORCE_FROM:
+ case GOMP_MAP_FORCE_TOFROM:
+ case GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT:
+ case GOMP_MAP_STRUCT:
+ case GOMP_MAP_STRUCT_UNORD:
+ case GOMP_MAP_ALWAYS_POINTER:
+ case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
+ case GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION:
+ case GOMP_MAP_RELEASE:
+ case GOMP_MAP_ATTACH:
+ case GOMP_MAP_DETACH:
+ case GOMP_MAP_FORCE_DETACH:
+ case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
+ case GOMP_MAP_FIRSTPRIVATE_POINTER:
+ case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
+ case GOMP_MAP_ATTACH_DETACH:
+ break;
+ default:
+ gcc_unreachable ();
+ }
+ return false;
+}
+
+/* Three OpenMP deep-mapping lang hooks: gfc_omp_deep_mapping{_p,_cnt,}. */
+
+/* Common check for gfc_omp_deep_mapping_p and gfc_omp_deep_mapping_do. */
+
+static tree
+gfc_omp_deep_mapping_int_p (const gimple *ctx, tree clause)
+{
+ if (is_gimple_omp_oacc (ctx) || !gfc_omp_deep_map_kind_p (clause))
+ return NULL_TREE;
+ tree decl = OMP_CLAUSE_DECL (clause);
+ if (OMP_CLAUSE_SIZE (clause) != NULL_TREE
+ && DECL_P (OMP_CLAUSE_SIZE (clause))
+ && DECL_LANG_SPECIFIC (OMP_CLAUSE_SIZE (clause))
+ && GFC_DECL_SAVED_DESCRIPTOR (OMP_CLAUSE_SIZE (clause)))
+ /* Saved decl. */
+ decl = GFC_DECL_SAVED_DESCRIPTOR (OMP_CLAUSE_SIZE (clause));
+ else if (TREE_CODE (decl) == MEM_REF || TREE_CODE (decl) == INDIRECT_REF)
+ /* The following can happen for, e.g., class(t) :: var(..) */
+ decl = TREE_OPERAND (decl, 0);
+ if (TREE_CODE (decl) == INDIRECT_REF)
+ /* The following can happen for, e.g., class(t) :: var(..) */
+ decl = TREE_OPERAND (decl, 0);
+ if (DECL_P (decl)
+ && DECL_LANG_SPECIFIC (decl)
+ && GFC_DECL_SAVED_DESCRIPTOR (decl))
+ decl = GFC_DECL_SAVED_DESCRIPTOR (decl);
+ /* Handle map(to: var.desc) map([to/from/tofrom:] var.desc.data)
+ to get proper map kind by skipping to the next item. */
+ tree tmp = OMP_CLAUSE_CHAIN (clause);
+ if (tmp != NULL_TREE
+ && OMP_CLAUSE_CODE (tmp) == OMP_CLAUSE_CODE (clause)
+ && OMP_CLAUSE_SIZE (tmp) != NULL_TREE
+ && DECL_P (OMP_CLAUSE_SIZE (tmp))
+ && DECL_LANG_SPECIFIC (OMP_CLAUSE_SIZE (tmp))
+ && GFC_DECL_SAVED_DESCRIPTOR (OMP_CLAUSE_SIZE (tmp)) == decl)
+ return NULL_TREE;
+ if (DECL_P (decl)
+ && DECL_LANG_SPECIFIC (decl)
+ && GFC_DECL_SAVED_DESCRIPTOR (decl))
+ decl = GFC_DECL_SAVED_DESCRIPTOR (decl);
+ tree type = TREE_TYPE (decl);
+ if (POINTER_TYPE_P (type))
+ type = TREE_TYPE (type);
+ if (POINTER_TYPE_P (type))
+ type = TREE_TYPE (type);
+ tmp = decl;
+ while (TREE_CODE (tmp) == COMPONENT_REF || TREE_CODE (tmp) == ARRAY_REF)
+ tmp = TREE_OPERAND (tmp, TREE_CODE (tmp) == COMPONENT_REF ? 1 : 0);
+ if (!gfc_is_polymorphic_nonptr (type)
+ && !gfc_has_alloc_comps (type, tmp, true))
+ return NULL_TREE;
+ return decl;
+}
+
+/* Return true if there is deep mapping, even if the number of mapping is known
+ at compile time. */
+bool
+gfc_omp_deep_mapping_p (const gimple *ctx, tree clause)
+{
+ tree decl = gfc_omp_deep_mapping_int_p (ctx, clause);
+ if (decl == NULL_TREE)
+ return false;
+ return true;
+}
+
+/* Handle gfc_omp_deep_mapping{,_cnt} */
+static tree
+gfc_omp_deep_mapping_do (bool is_cnt, const gimple *ctx, tree clause,
+ unsigned HOST_WIDE_INT tkind, tree data, tree sizes,
+ tree kinds, tree offset_data, tree offset,
+ gimple_seq *seq)
+{
+ tree num = NULL_TREE;
+ location_t loc = OMP_CLAUSE_LOCATION (clause);
+ tree decl = gfc_omp_deep_mapping_int_p (ctx, clause);
+ bool poly_warned = false;
+ if (decl == NULL_TREE)
+ return NULL_TREE;
+ /* Handle: map(alloc:dt%cmp [len: ptr_size]) map(tofrom: D.0123...),
+ where GFC_DECL_SAVED_DESCRIPTOR(D.0123) is the same (here: dt%cmp). */
+ if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
+ && (OMP_CLAUSE_MAP_KIND (clause) == GOMP_MAP_ALLOC
+ || OMP_CLAUSE_MAP_KIND (clause) == GOMP_MAP_PRESENT_ALLOC))
+ {
+ tree c = clause;
+ while ((c = OMP_CLAUSE_CHAIN (c)) != NULL_TREE)
+ {
+ if (!gfc_omp_deep_map_kind_p (c))
+ continue;
+ tree d = gfc_omp_deep_mapping_int_p (ctx, c);
+ if (d != NULL_TREE && operand_equal_p (decl, d, 0))
+ return NULL_TREE;
+ }
+ }
+ tree type = TREE_TYPE (decl);
+ if (POINTER_TYPE_P (type))
+ type = TREE_TYPE (type);
+ if (POINTER_TYPE_P (type))
+ type = TREE_TYPE (type);
+ bool poly = gfc_is_polymorphic_nonptr (type);
+
+ if (is_cnt)
+ {
+ num = build_decl (loc, VAR_DECL,
+ create_tmp_var_name ("n_deepmap"), size_type_node);
+ tree tmp = fold_build2_loc (loc, MODIFY_EXPR, size_type_node, num,
+ build_int_cst (size_type_node, 0));
+ gimple_add_tmp_var (num);
+ gimplify_and_add (tmp, seq);
+ }
+ else
+ gcc_assert (short_unsigned_type_node == TREE_TYPE (TREE_TYPE (kinds)));
+
+ bool do_copy = poly;
+ bool do_alloc_check = false;
+ tree token = NULL_TREE;
+ tree tmp = decl;
+ if (poly)
+ {
+ tmp = TYPE_FIELDS (type);
+ type = TREE_TYPE (tmp);
+ }
+ else
+ while (TREE_CODE (tmp) == COMPONENT_REF || TREE_CODE (tmp) == ARRAY_REF)
+ tmp = TREE_OPERAND (tmp, TREE_CODE (tmp) == COMPONENT_REF ? 1 : 0);
+ /* If the clause argument is nonallocatable, skip is-allocate check. */
+ if (GFC_DECL_GET_SCALAR_ALLOCATABLE (tmp)
+ || GFC_DECL_GET_SCALAR_POINTER (tmp)
+ || (GFC_DESCRIPTOR_TYPE_P (type)
+ && (GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE
+ || GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_POINTER
+ || GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_POINTER_CONT)))
+ do_alloc_check = true;
+
+ if (!is_cnt
+ && OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
+ && (tkind == GOMP_MAP_ALLOC
+ || (tkind == GOMP_MAP_FROM
+ && (gimple_omp_target_kind (ctx)
+ != GF_OMP_TARGET_KIND_EXIT_DATA)))
+ && (poly || gfc_omp_replace_alloc_by_to_mapping (type, tmp, true)))
+ OMP_CLAUSE_SET_MAP_KIND (clause, tkind == GOMP_MAP_ALLOC ? GOMP_MAP_TO
+ : GOMP_MAP_TOFROM);
+
+ /* TODO: For map(a(:)), we know it is present & allocated. */
+
+ tree present = (DECL_P (decl) ? gfc_omp_check_optional_argument (decl, true)
+ : NULL_TREE);
+ if (POINTER_TYPE_P (TREE_TYPE (decl))
+ && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
+ decl = build_fold_indirect_ref (decl);
+ if (present)
+ {
+ tree then_label = create_artificial_label (loc);
+ tree end_label = create_artificial_label (loc);
+ gimple_seq seq2 = NULL;
+ tmp = force_gimple_operand (present, &seq2, true, NULL_TREE);
+ gimple_seq_add_seq (seq, seq2);
+ gimple_seq_add_stmt (seq,
+ gimple_build_cond_from_tree (present,
+ then_label, end_label));
+ gimple_seq_add_stmt (seq, gimple_build_label (then_label));
+ gfc_omp_deep_mapping_item (is_cnt, do_copy, do_alloc_check, loc, decl,
+ &token, tkind, data, sizes, kinds,
+ offset_data, offset, num, seq, ctx,
+ &poly_warned);
+ gimple_seq_add_stmt (seq, gimple_build_label (end_label));
+ }
+ else
+ gfc_omp_deep_mapping_item (is_cnt, do_copy, do_alloc_check, loc, decl,
+ &token, tkind, data, sizes, kinds, offset_data,
+ offset, num, seq, ctx, &poly_warned);
+ /* Multiply by 2 as there are two mappings: data + pointer assign. */
+ if (is_cnt)
+ gimplify_assign (num,
+ fold_build2_loc (loc, MULT_EXPR,
+ size_type_node, num,
+ build_int_cst (size_type_node, 2)), seq);
+ return num;
+}
+
+/* Return tree with a variable which contains the count of deep-mappyings
+ (value depends, e.g., on allocation status) */
+tree
+gfc_omp_deep_mapping_cnt (const gimple *ctx, tree clause, gimple_seq *seq)
+{
+ return gfc_omp_deep_mapping_do (true, ctx, clause, 0, NULL_TREE, NULL_TREE,
+ NULL_TREE, NULL_TREE, NULL_TREE, seq);
+}
+
+/* Does the actual deep mapping. */
+void
+gfc_omp_deep_mapping (const gimple *ctx, tree clause,
+ unsigned HOST_WIDE_INT tkind, tree data,
+ tree sizes, tree kinds, tree offset_data, tree offset,
+ gimple_seq *seq)
+{
+ (void) gfc_omp_deep_mapping_do (false, ctx, clause, tkind, data, sizes, kinds,
+ offset_data, offset, seq);
+}
+
/* Return true if DECL is a scalar variable (for the purpose of
implicit firstprivatization/mapping). Only if 'ptr_alloc_ok.'
is true, allocatables and pointers are permitted. */
@@ -2478,6 +3265,18 @@ gfc_trans_omp_array_section (stmtblock_t *block, gfc_exec_op op,
elemsz = fold_convert (gfc_array_index_type, elemsz);
OMP_CLAUSE_SIZE (node) = fold_build2 (MULT_EXPR, gfc_array_index_type,
OMP_CLAUSE_SIZE (node), elemsz);
+ if (n->expr->ts.type == BT_DERIVED
+ && n->expr->ts.u.derived->attr.alloc_comp)
+ {
+ /* Save array descriptor for use in gfc_omp_deep_mapping{,_p,_cnt};
+ force evaluate to ensure that it is not gimplified + is a decl. */
+ tree tmp = OMP_CLAUSE_SIZE (node);
+ tree var = gfc_create_var (TREE_TYPE (tmp), NULL);
+ gfc_add_modify_loc (input_location, block, var, tmp);
+ OMP_CLAUSE_SIZE (node) = var;
+ gfc_allocate_lang_decl (var);
+ GFC_DECL_SAVED_DESCRIPTOR (var) = se.expr;
+ }
}
gcc_assert (se.post.head == NULL_TREE);
gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
@@ -3213,8 +4012,9 @@ gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
if (!n->sym->attr.referenced)
continue;
+ location_t map_loc = gfc_get_location (&n->where);
bool always_modifier = false;
- tree node = build_omp_clause (input_location, OMP_CLAUSE_MAP);
+ tree node = build_omp_clause (map_loc, OMP_CLAUSE_MAP);
tree node2 = NULL_TREE;
tree node3 = NULL_TREE;
tree node4 = NULL_TREE;
@@ -3361,7 +4161,7 @@ gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
&& n->u.map.op != OMP_MAP_RELEASE)
{
gcc_assert (n->sym->ts.u.cl->backend_decl);
- node5 = build_omp_clause (input_location, OMP_CLAUSE_MAP);
+ node5 = build_omp_clause (map_loc, OMP_CLAUSE_MAP);
OMP_CLAUSE_SET_MAP_KIND (node5, GOMP_MAP_ALWAYS_TO);
OMP_CLAUSE_DECL (node5) = n->sym->ts.u.cl->backend_decl;
OMP_CLAUSE_SIZE (node5)
@@ -3378,7 +4178,7 @@ gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
ptr = build_fold_indirect_ref (ptr);
OMP_CLAUSE_DECL (node) = ptr;
OMP_CLAUSE_SIZE (node) = gfc_class_vtab_size_get (decl);
- node2 = build_omp_clause (input_location, OMP_CLAUSE_MAP);
+ node2 = build_omp_clause (map_loc, OMP_CLAUSE_MAP);
OMP_CLAUSE_SET_MAP_KIND (node2, GOMP_MAP_ATTACH_DETACH);
OMP_CLAUSE_DECL (node2) = gfc_class_data_get (decl);
OMP_CLAUSE_SIZE (node2) = size_int (0);
@@ -3434,8 +4234,7 @@ gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
size = TYPE_SIZE_UNIT (TREE_TYPE (decl));
else
size = size_int (0);
- node4 = build_omp_clause (input_location,
- OMP_CLAUSE_MAP);
+ node4 = build_omp_clause (map_loc, OMP_CLAUSE_MAP);
OMP_CLAUSE_SET_MAP_KIND (node4, gmk);
OMP_CLAUSE_DECL (node4) = decl;
OMP_CLAUSE_SIZE (node4) = size;
@@ -3459,8 +4258,7 @@ gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
size = TYPE_SIZE_UNIT (TREE_TYPE (decl));
else
size = size_int (0);
- node3 = build_omp_clause (input_location,
- OMP_CLAUSE_MAP);
+ node3 = build_omp_clause (map_loc, OMP_CLAUSE_MAP);
OMP_CLAUSE_SET_MAP_KIND (node3, gmk);
OMP_CLAUSE_DECL (node3) = decl;
OMP_CLAUSE_SIZE (node3) = size;
@@ -3477,7 +4275,7 @@ gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
ptr = build_fold_indirect_ref (ptr);
OMP_CLAUSE_DECL (node) = ptr;
- node2 = build_omp_clause (input_location, OMP_CLAUSE_MAP);
+ node2 = build_omp_clause (map_loc, OMP_CLAUSE_MAP);
OMP_CLAUSE_DECL (node2) = decl;
OMP_CLAUSE_SIZE (node2) = TYPE_SIZE_UNIT (type);
if (n->u.map.op == OMP_MAP_DELETE)
@@ -3493,8 +4291,7 @@ gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
&& n->u.map.op != OMP_MAP_DELETE
&& n->u.map.op != OMP_MAP_RELEASE)
{
- node3 = build_omp_clause (input_location,
- OMP_CLAUSE_MAP);
+ node3 = build_omp_clause (map_loc, OMP_CLAUSE_MAP);
if (present)
{
ptr = gfc_conv_descriptor_data_get (decl);
@@ -3634,10 +4431,10 @@ gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
{
/* A single indirectref is handled by the middle end. */
gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
- decl = TREE_OPERAND (decl, 0);
- decl = gfc_build_cond_assign_expr (block, present, decl,
+ tree tmp = TREE_OPERAND (decl, 0);
+ tmp = gfc_build_cond_assign_expr (block, present, tmp,
null_pointer_node);
- OMP_CLAUSE_DECL (node) = build_fold_indirect_ref (decl);
+ OMP_CLAUSE_DECL (node) = build_fold_indirect_ref (tmp);
}
else
OMP_CLAUSE_DECL (node) = decl;
@@ -3672,6 +4469,33 @@ gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
size = gfc_evaluate_now (size, block);
OMP_CLAUSE_SIZE (node) = size;
}
+ if ((TREE_CODE (decl) != PARM_DECL
+ || DECL_ARTIFICIAL (OMP_CLAUSE_DECL (node)))
+ && n->sym->ts.type == BT_DERIVED
+ && n->sym->ts.u.derived->attr.alloc_comp)
+ {
+ /* Save array descriptor for use in
+ gfc_omp_deep_mapping{,_p,_cnt}; force evaluate
+ to ensure that it is not gimplified + is a decl. */
+ tree tmp = OMP_CLAUSE_SIZE (node);
+ if (tmp == NULL_TREE)
+ tmp = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
+ : TYPE_SIZE_UNIT (TREE_TYPE (decl));
+ tree var = gfc_create_var (TREE_TYPE (tmp), NULL);
+ gfc_add_modify_loc (input_location, block, var, tmp);
+ OMP_CLAUSE_SIZE (node) = var;
+ gfc_allocate_lang_decl (var);
+ if (TREE_CODE (decl) == INDIRECT_REF)
+ decl = TREE_OPERAND (decl, 0);
+ if (TREE_CODE (decl) == INDIRECT_REF)
+ decl = TREE_OPERAND (decl, 0);
+ if (DECL_LANG_SPECIFIC (decl)
+ && GFC_DECL_SAVED_DESCRIPTOR (decl))
+ GFC_DECL_SAVED_DESCRIPTOR (var)
+ = GFC_DECL_SAVED_DESCRIPTOR (decl);
+ else
+ GFC_DECL_SAVED_DESCRIPTOR (var) = decl;
+ }
}
else if (n->expr
&& n->expr->expr_type == EXPR_VARIABLE
@@ -3727,8 +4551,7 @@ gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
goto finalize_map_clause;
}
- node2 = build_omp_clause (input_location,
- OMP_CLAUSE_MAP);
+ node2 = build_omp_clause (map_loc, OMP_CLAUSE_MAP);
OMP_CLAUSE_SET_MAP_KIND (node2, GOMP_MAP_ATTACH_DETACH);
OMP_CLAUSE_DECL (node2)
= POINTER_TYPE_P (TREE_TYPE (se.expr))
@@ -3754,13 +4577,37 @@ gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
kind = GOMP_MAP_RELEASE;
else
kind = GOMP_MAP_TO;
- node3 = build_omp_clause (input_location,
- OMP_CLAUSE_MAP);
+ node3 = build_omp_clause (map_loc, OMP_CLAUSE_MAP);
OMP_CLAUSE_SET_MAP_KIND (node3, kind);
OMP_CLAUSE_DECL (node3) = se.string_length;
OMP_CLAUSE_SIZE (node3)
= TYPE_SIZE_UNIT (gfc_charlen_type_node);
}
+ if (!openacc
+ && n->expr->ts.type == BT_DERIVED
+ && n->expr->ts.u.derived->attr.alloc_comp)
+ {
+ /* Save array descriptor for use in
+ gfc_omp_deep_mapping{,_p,_cnt}; force evaluate
+ to ensure that it is not gimplified + is a decl. */
+ tree tmp = OMP_CLAUSE_SIZE (node);
+ if (tmp == NULL_TREE)
+ tmp = (DECL_P (se.expr)
+ ? DECL_SIZE_UNIT (se.expr)
+ : TYPE_SIZE_UNIT (TREE_TYPE (se.expr)));
+ tree var = gfc_create_var (TREE_TYPE (tmp), NULL);
+ gfc_add_modify_loc (input_location, block, var, tmp);
+ OMP_CLAUSE_SIZE (node) = var;
+ gfc_allocate_lang_decl (var);
+ if (TREE_CODE (se.expr) == INDIRECT_REF)
+ se.expr = TREE_OPERAND (se.expr, 0);
+ if (DECL_LANG_SPECIFIC (se.expr)
+ && GFC_DECL_SAVED_DESCRIPTOR (se.expr))
+ GFC_DECL_SAVED_DESCRIPTOR (var)
+ = GFC_DECL_SAVED_DESCRIPTOR (se.expr);
+ else
+ GFC_DECL_SAVED_DESCRIPTOR (var) = se.expr;
+ }
}
}
else if (n->expr
@@ -3800,7 +4647,7 @@ gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
&& (lastref->u.c.component->ts.type == BT_DERIVED
|| lastref->u.c.component->ts.type == BT_CLASS))
{
- if (pointer || (openacc && allocatable))
+ if (pointer || allocatable)
{
/* If it's a bare attach/detach clause, we just want
to perform a single attach/detach operation, of the
@@ -3880,8 +4727,7 @@ gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
OMP_CLAUSE_DECL (node) = data;
OMP_CLAUSE_SIZE (node) = size;
- node2 = build_omp_clause (input_location,
- OMP_CLAUSE_MAP);
+ node2 = build_omp_clause (map_loc, OMP_CLAUSE_MAP);
OMP_CLAUSE_SET_MAP_KIND (node2,
GOMP_MAP_ATTACH_DETACH);
OMP_CLAUSE_DECL (node2) = build_fold_addr_expr (data);
@@ -3893,6 +4739,22 @@ gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
OMP_CLAUSE_SIZE (node)
= TYPE_SIZE_UNIT (TREE_TYPE (inner));
}
+ if (!openacc
+ && n->expr->ts.type == BT_DERIVED
+ && n->expr->ts.u.derived->attr.alloc_comp)
+ {
+ /* Save array descriptor for use in
+ gfc_omp_deep_mapping{,_p,_cnt}; force evaluate
+ to ensure that it is not gimplified + is a decl. */
+ tree tmp = OMP_CLAUSE_SIZE (node);
+ tree var = gfc_create_var (TREE_TYPE (tmp), NULL);
+ gfc_add_modify_loc (input_location, block, var, tmp);
+ OMP_CLAUSE_SIZE (node) = var;
+ gfc_allocate_lang_decl (var);
+ if (TREE_CODE (inner) == INDIRECT_REF)
+ inner = TREE_OPERAND (inner, 0);
+ GFC_DECL_SAVED_DESCRIPTOR (var) = inner;
+ }
}
else if (lastref->type == REF_ARRAY
&& lastref->u.ar.type == AR_FULL)
@@ -3952,8 +4814,7 @@ gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
elemsz = TYPE_SIZE_UNIT (elemsz);
elemsz = fold_build2 (MULT_EXPR, size_type_node,
len, elemsz);
- node4 = build_omp_clause (input_location,
- OMP_CLAUSE_MAP);
+ node4 = build_omp_clause (map_loc, OMP_CLAUSE_MAP);
OMP_CLAUSE_SET_MAP_KIND (node4, map_kind);
OMP_CLAUSE_DECL (node4) = se.string_length;
OMP_CLAUSE_SIZE (node4)
@@ -3963,8 +4824,7 @@ gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
OMP_CLAUSE_SIZE (node)
= fold_build2 (MULT_EXPR, gfc_array_index_type,
OMP_CLAUSE_SIZE (node), elemsz);
- node2 = build_omp_clause (input_location,
- OMP_CLAUSE_MAP);
+ node2 = build_omp_clause (map_loc, OMP_CLAUSE_MAP);
if (map_kind == GOMP_MAP_RELEASE
|| map_kind == GOMP_MAP_DELETE)
{
@@ -3978,6 +4838,23 @@ gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
OMP_CLAUSE_SIZE (node2) = TYPE_SIZE_UNIT (type);
if (!openacc)
{
+ if (n->expr->ts.type == BT_DERIVED
+ && n->expr->ts.u.derived->attr.alloc_comp)
+ {
+ /* Save array descriptor for use
+ in gfc_omp_deep_mapping{,_p,_cnt}; force
+ evaluate to ensure that it is
+ not gimplified + is a decl. */
+ tree tmp = OMP_CLAUSE_SIZE (node);
+ tree var = gfc_create_var (TREE_TYPE (tmp),
+ NULL);
+ gfc_add_modify_loc (map_loc, block,
+ var, tmp);
+ OMP_CLAUSE_SIZE (node) = var;
+ gfc_allocate_lang_decl (var);
+ GFC_DECL_SAVED_DESCRIPTOR (var) = inner;
+ }
+
gfc_omp_namelist *n2
= clauses->lists[OMP_LIST_MAP];
@@ -4035,8 +4912,7 @@ gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
if (drop_mapping)
continue;
}
- node3 = build_omp_clause (input_location,
- OMP_CLAUSE_MAP);
+ node3 = build_omp_clause (map_loc, OMP_CLAUSE_MAP);
OMP_CLAUSE_SET_MAP_KIND (node3,
GOMP_MAP_ATTACH_DETACH);
OMP_CLAUSE_DECL (node3)
@@ -4107,7 +4983,8 @@ gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
default:
gcc_unreachable ();
}
- tree node = build_omp_clause (input_location, clause_code);
+ tree node = build_omp_clause (gfc_get_location (&n->where),
+ clause_code);
if (n->expr == NULL
|| (n->expr->ref->type == REF_ARRAY
&& n->expr->ref->u.ar.type == AR_FULL