aboutsummaryrefslogtreecommitdiff
path: root/gcc/cp/semantics.cc
diff options
context:
space:
mode:
Diffstat (limited to 'gcc/cp/semantics.cc')
-rw-r--r--gcc/cp/semantics.cc558
1 files changed, 464 insertions, 94 deletions
diff --git a/gcc/cp/semantics.cc b/gcc/cp/semantics.cc
index b0a5f9a..1937ace 100644
--- a/gcc/cp/semantics.cc
+++ b/gcc/cp/semantics.cc
@@ -45,13 +45,14 @@ along with GCC; see the file COPYING3. If not see
#include "gomp-constants.h"
#include "predict.h"
#include "memmodel.h"
+#include "gimplify.h"
+#include "contracts.h"
/* There routines provide a modular interface to perform many parsing
operations. They may therefore be used during actual parsing, or
during template instantiation, which may be regarded as a
degenerate form of parsing. */
-static tree maybe_convert_cond (tree);
static tree finalize_nrv_r (tree *, int *, void *);
/* Used for OpenMP non-static data member privatization. */
@@ -677,7 +678,7 @@ do_poplevel (tree stmt_list)
/* Begin a new scope. */
-static tree
+tree
do_pushlevel (scope_kind sk)
{
tree ret = push_stmt_list ();
@@ -1116,7 +1117,7 @@ annotate_saver::restore (tree new_inner)
statement. Convert it to a boolean value, if appropriate.
In addition, verify sequence points if -Wsequence-point is enabled. */
-static tree
+tree
maybe_convert_cond (tree cond)
{
/* Empty conditions remain empty. */
@@ -1180,10 +1181,13 @@ finish_expr_stmt (tree expr)
expr = error_mark_node;
/* Simplification of inner statement expressions, compound exprs,
- etc can result in us already having an EXPR_STMT. */
+ etc can result in us already having an EXPR_STMT or other statement
+ tree. Don't wrap them in EXPR_STMT. */
if (TREE_CODE (expr) != CLEANUP_POINT_EXPR)
{
- if (TREE_CODE (expr) != EXPR_STMT)
+ if (TREE_CODE (expr) != EXPR_STMT
+ && !STATEMENT_CLASS_P (expr)
+ && TREE_CODE (expr) != STATEMENT_LIST)
expr = build_stmt (loc, EXPR_STMT, expr);
expr = maybe_cleanup_point_expr_void (expr);
}
@@ -1851,6 +1855,21 @@ finish_range_for_decl (tree range_for_stmt, tree decl, tree expr)
RANGE_FOR_BODY (range_for_stmt) = do_pushlevel (sk_block);
}
+/* Begin the scope of an expansion-statement. */
+
+tree
+begin_template_for_scope (tree *init)
+{
+ tree scope = do_pushlevel (sk_template_for);
+
+ if (processing_template_decl)
+ *init = push_stmt_list ();
+ else
+ *init = NULL_TREE;
+
+ return scope;
+}
+
/* Finish a break-statement. */
tree
@@ -2335,7 +2354,8 @@ finish_asm_stmt (location_t loc, int volatile_p, tree string,
oconstraints[i] = constraint;
if (parse_output_constraint (&constraint, i, ninputs, noutputs,
- &allows_mem, &allows_reg, &is_inout))
+ &allows_mem, &allows_reg, &is_inout,
+ nullptr))
{
/* If the operand is going to end up in memory,
mark it addressable. */
@@ -2394,7 +2414,8 @@ finish_asm_stmt (location_t loc, int volatile_p, tree string,
constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
bool constraint_parsed
= parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
- oconstraints, &allows_mem, &allows_reg);
+ oconstraints, &allows_mem, &allows_reg,
+ nullptr);
/* If the operand is going to end up in memory, don't call
decay_conversion. */
if (constraint_parsed && !allows_reg && allows_mem)
@@ -2767,7 +2788,7 @@ finish_non_static_data_member (tree decl, tree object, tree qualifying_scope,
else if (PACK_EXPANSION_P (type))
/* Don't bother trying to represent this. */
type = NULL_TREE;
- else if (WILDCARD_TYPE_P (TREE_TYPE (object)))
+ else if (!TREE_TYPE (object) || WILDCARD_TYPE_P (TREE_TYPE (object)))
/* We don't know what the eventual quals will be, so punt until
instantiation time.
@@ -3082,6 +3103,7 @@ finish_stmt_expr_expr (tree expr, tree stmt_expr)
}
else if (processing_template_decl)
{
+ /* Not finish_expr_stmt because we don't want convert_to_void. */
expr = build_stmt (input_location, EXPR_STMT, expr);
expr = add_stmt (expr);
/* Mark the last statement so that we can recognize it as such at
@@ -3321,6 +3343,14 @@ finish_call_expr (tree fn, vec<tree, va_gc> **args, bool disallow_virtual,
if (type_dependent_expression_p (fn)
|| any_type_dependent_arguments_p (*args))
{
+ if (koenig_p
+ && TREE_CODE (orig_fn) == FUNCTION_DECL
+ && !fndecl_built_in_p (orig_fn))
+ /* For an ADL-enabled call where unqualified lookup found a
+ single non-template function, wrap it in an OVERLOAD so that
+ later substitution doesn't overeagerly mark the function as
+ used. */
+ orig_fn = ovl_make (orig_fn, NULL_TREE);
result = build_min_nt_call_vec (orig_fn, *args);
SET_EXPR_LOCATION (result, cp_expr_loc_or_input_loc (fn));
KOENIG_LOOKUP_P (result) = koenig_p;
@@ -3593,16 +3623,13 @@ finish_this_expr (void)
{
tree result = NULL_TREE;
- if (current_class_ptr)
- {
- tree type = TREE_TYPE (current_class_ref);
-
- /* In a lambda expression, 'this' refers to the captured 'this'. */
- if (LAMBDA_TYPE_P (type))
- result = lambda_expr_this_capture (CLASSTYPE_LAMBDA_EXPR (type), true);
- else
- result = current_class_ptr;
- }
+ if (current_class_ref && !LAMBDA_TYPE_P (TREE_TYPE (current_class_ref)))
+ result = current_class_ptr;
+ else if (current_class_type && LAMBDA_TYPE_P (current_class_type))
+ result = (lambda_expr_this_capture
+ (CLASSTYPE_LAMBDA_EXPR (current_class_type), /*add*/true));
+ else
+ gcc_checking_assert (!current_class_ptr);
if (result)
/* The keyword 'this' is a prvalue expression. */
@@ -3732,6 +3759,11 @@ finish_unary_op_expr (location_t op_loc, enum tree_code code, cp_expr expr,
if (!(complain & tf_warning))
return result;
+ /* These will never fold into a constant, so no need to check for
+ overflow for them. */
+ if (code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR)
+ return result;
+
tree result_ovl = result;
tree expr_ovl = expr;
@@ -3976,9 +4008,15 @@ finish_compound_literal (tree type, tree compound_literal,
tree
finish_fname (tree id)
{
- tree decl;
-
- decl = fname_decl (input_location, C_RID_CODE (id), id);
+ tree decl = fname_decl (input_location, C_RID_CODE (id), id);
+ /* [expr.prim.lambda.closure]/16 "Unless the compound-statement is that
+ of a consteval-block-declaration, a variable __func__ is implicitly
+ defined...". We could be in a consteval block in a function, though,
+ and then we shouldn't warn. */
+ if (current_function_decl
+ && !current_nonlambda_function (/*only_skip_consteval_block_p=*/true))
+ pedwarn (input_location, 0, "%qD is not defined outside of function scope",
+ decl);
if (processing_template_decl && current_function_decl
&& decl != error_mark_node)
decl = DECL_NAME (decl);
@@ -4474,6 +4512,17 @@ baselink_for_fns (tree fns)
return build_baselink (conv_path, access_path, fns, /*optype=*/NULL_TREE);
}
+/* Returns true iff we are currently parsing a lambda-declarator. */
+
+static bool
+parsing_lambda_declarator ()
+{
+ cp_binding_level *b = current_binding_level;
+ while (b->kind == sk_template_parms || b->kind == sk_function_parms)
+ b = b->level_chain;
+ return b->kind == sk_lambda;
+}
+
/* Returns true iff DECL is a variable from a function outside
the current one. */
@@ -4488,7 +4537,15 @@ outer_var_p (tree decl)
/* Don't get confused by temporaries. */
&& DECL_NAME (decl)
&& (DECL_CONTEXT (decl) != current_function_decl
- || parsing_nsdmi ()));
+ || parsing_nsdmi ()
+ /* Also consider captures as outer vars if we are in
+ decltype in a lambda declarator as in:
+ auto l = [j=0]() -> decltype((j)) { ... }
+ for the sake of finish_decltype_type.
+
+ (Similar issue also affects non-lambdas, but vexing parse
+ makes it more difficult to handle than lambdas.) */
+ || parsing_lambda_declarator ()));
}
/* As above, but also checks that DECL is automatic. */
@@ -4534,7 +4591,7 @@ process_outer_var_ref (tree decl, tsubst_flags_t complain, bool odr_use)
if (!mark_used (decl, complain))
return error_mark_node;
- if (parsing_nsdmi ())
+ if (parsing_nsdmi () || parsing_lambda_declarator ())
containing_function = NULL_TREE;
if (containing_function && LAMBDA_FUNCTION_P (containing_function))
@@ -4755,6 +4812,7 @@ finish_id_expression_1 (tree id_expression,
body, except inside an unevaluated context (i.e. decltype). */
if (TREE_CODE (decl) == PARM_DECL
&& DECL_CONTEXT (decl) == NULL_TREE
+ && !CONSTRAINT_VAR_P (decl)
&& !cp_unevaluated_operand
&& !processing_contract_condition
&& !processing_omp_trait_property_expr)
@@ -5086,23 +5144,33 @@ finish_underlying_type (tree type)
static tree
finish_type_pack_element (tree idx, tree types, tsubst_flags_t complain)
{
- idx = maybe_constant_value (idx);
- if (TREE_CODE (idx) != INTEGER_CST || !INTEGRAL_TYPE_P (TREE_TYPE (idx)))
+ idx = maybe_constant_value (idx, NULL_TREE, mce_true);
+ if (!INTEGRAL_TYPE_P (TREE_TYPE (idx)))
{
if (complain & tf_error)
- error ("pack index is not an integral constant");
+ error ("pack index has non-integral type %qT", TREE_TYPE (idx));
+ return error_mark_node;
+ }
+ if (TREE_CODE (idx) != INTEGER_CST)
+ {
+ if (complain & tf_error)
+ {
+ error ("pack index is not an integral constant");
+ cxx_constant_value (idx);
+ }
return error_mark_node;
}
if (tree_int_cst_sgn (idx) < 0)
{
if (complain & tf_error)
- error ("pack index is negative");
+ error ("pack index %qE is negative", idx);
return error_mark_node;
}
if (wi::to_widest (idx) >= TREE_VEC_LENGTH (types))
{
if (complain & tf_error)
- error ("pack index is out of range");
+ error ("pack index %qE is out of range for pack of length %qd",
+ idx, TREE_VEC_LENGTH (types));
return error_mark_node;
}
return TREE_VEC_ELT (types, tree_to_shwi (idx));
@@ -6277,9 +6345,7 @@ handle_omp_array_sections (tree &c, enum c_omp_region_type ort)
tree *tp = &OMP_CLAUSE_DECL (c);
if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
|| OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY)
- && TREE_CODE (*tp) == TREE_LIST
- && TREE_PURPOSE (*tp)
- && TREE_CODE (TREE_PURPOSE (*tp)) == TREE_VEC)
+ && OMP_ITERATOR_DECL_P (*tp))
tp = &TREE_VALUE (*tp);
tree first = handle_omp_array_sections_1 (c, *tp, types,
maybe_zero_len, first_non_one,
@@ -6722,6 +6788,97 @@ omp_reduction_lookup (location_t loc, tree id, tree type, tree *baselinkp,
return id;
}
+/* Return identifier to look up for omp declare mapper. */
+
+tree
+omp_mapper_id (tree mapper_id, tree type)
+{
+ const char *p = NULL;
+ const char *m = NULL;
+
+ if (mapper_id == NULL_TREE)
+ p = "";
+ else if (TREE_CODE (mapper_id) == IDENTIFIER_NODE)
+ p = IDENTIFIER_POINTER (mapper_id);
+ else
+ return error_mark_node;
+
+ if (type != NULL_TREE)
+ m = mangle_type_string (TYPE_MAIN_VARIANT (type));
+
+ const char prefix[] = "omp declare mapper ";
+ size_t lenp = sizeof (prefix);
+ if (strncmp (p, prefix, lenp - 1) == 0)
+ lenp = 1;
+ size_t len = strlen (p);
+ size_t lenm = m ? strlen (m) + 1 : 0;
+ char *name = XALLOCAVEC (char, lenp + len + lenm);
+ memcpy (name, prefix, lenp - 1);
+ memcpy (name + lenp - 1, p, len + 1);
+ if (m)
+ {
+ name[lenp + len - 1] = '~';
+ memcpy (name + lenp + len, m, lenm);
+ }
+ return get_identifier (name);
+}
+
+tree
+cxx_omp_mapper_lookup (tree id, tree type)
+{
+ if (!RECORD_OR_UNION_TYPE_P (type))
+ return NULL_TREE;
+ id = omp_mapper_id (id, type);
+ return lookup_name (id);
+}
+
+tree
+cxx_omp_extract_mapper_directive (tree vardecl)
+{
+ gcc_assert (TREE_CODE (vardecl) == VAR_DECL);
+
+ /* Instantiate the decl if we haven't already. */
+ mark_used (vardecl);
+ tree body = DECL_INITIAL (vardecl);
+
+ if (TREE_CODE (body) == STATEMENT_LIST)
+ {
+ tree_stmt_iterator tsi = tsi_start (body);
+ gcc_assert (TREE_CODE (tsi_stmt (tsi)) == DECL_EXPR);
+ tsi_next (&tsi);
+ body = tsi_stmt (tsi);
+ }
+
+ gcc_assert (TREE_CODE (body) == OMP_DECLARE_MAPPER);
+
+ return body;
+}
+
+/* For now we can handle singleton OMP_ARRAY_SECTIONs with custom mappers, but
+ nothing more complicated. */
+
+tree
+cxx_omp_map_array_section (location_t loc, tree t)
+{
+ tree low = TREE_OPERAND (t, 1);
+ tree len = TREE_OPERAND (t, 2);
+
+ if (len && integer_onep (len))
+ {
+ t = TREE_OPERAND (t, 0);
+
+ if (!low)
+ low = integer_zero_node;
+
+ if (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE)
+ t = convert_from_reference (t);
+
+ t = build_array_ref (loc, t, low);
+ }
+
+ return t;
+}
+
/* Helper function for cp_parser_omp_declare_reduction_exprs
and tsubst_omp_udr.
Remove CLEANUP_STMT for data (omp_priv variable).
@@ -7203,6 +7360,33 @@ finish_omp_reduction_clause (tree c, bool *need_default_ctor, bool *need_dtor)
return false;
}
+/* Check an instance of an "omp declare mapper" function. */
+
+bool
+cp_check_omp_declare_mapper (tree udm)
+{
+ tree type = TREE_TYPE (udm);
+ location_t loc = DECL_SOURCE_LOCATION (udm);
+
+ if (type == error_mark_node)
+ return false;
+
+ if (!processing_template_decl && !RECORD_OR_UNION_TYPE_P (type))
+ {
+ error_at (loc, "%qT is not a struct, union or class type in "
+ "%<#pragma omp declare mapper%>", type);
+ return false;
+ }
+ if (!processing_template_decl && CLASSTYPE_VBASECLASSES (type))
+ {
+ error_at (loc, "%qT must not be a virtual base class in "
+ "%<#pragma omp declare mapper%>", type);
+ return false;
+ }
+
+ return true;
+}
+
/* Called from finish_struct_1. linear(this) or linear(this:step)
clauses might not be finalized yet because the class has been incomplete
when parsing #pragma omp declare simd methods. Fix those up now. */
@@ -7466,17 +7650,17 @@ cp_oacc_check_attachments (tree c)
/* Update OMP_CLAUSE_INIT_PREFER_TYPE in case template substitution
happened. */
-static void
-cp_omp_init_prefer_type_update (tree c)
+tree
+cp_finish_omp_init_prefer_type (tree pref_type)
{
if (processing_template_decl
- || OMP_CLAUSE_INIT_PREFER_TYPE (c) == NULL_TREE
- || TREE_CODE (OMP_CLAUSE_INIT_PREFER_TYPE (c)) != TREE_LIST)
- return;
+ || pref_type == NULL_TREE
+ || TREE_CODE (pref_type) != TREE_LIST)
+ return pref_type;
- tree t = TREE_PURPOSE (OMP_CLAUSE_INIT_PREFER_TYPE (c));
+ tree t = TREE_PURPOSE (pref_type);
char *str = const_cast<char *> (TREE_STRING_POINTER (t));
- tree fr_list = TREE_VALUE (OMP_CLAUSE_INIT_PREFER_TYPE (c));
+ tree fr_list = TREE_VALUE (pref_type);
int len = TREE_VEC_LENGTH (fr_list);
int cnt = 0;
@@ -7502,7 +7686,7 @@ cp_omp_init_prefer_type_update (tree c)
|| !tree_fits_shwi_p (value))
error_at (loc,
"expected string literal or "
- "constant integer expression instead of %qE", value); // FIXME of 'qE' and no 'loc'?
+ "constant integer expression instead of %qE", value);
else
{
HOST_WIDE_INT n = tree_to_shwi (value);
@@ -7531,7 +7715,7 @@ cp_omp_init_prefer_type_update (tree c)
if (cnt >= len)
break;
}
- OMP_CLAUSE_INIT_PREFER_TYPE (c) = t;
+ return t;
}
/* For all elements of CLAUSES, validate them vs OpenMP constraints.
@@ -7602,7 +7786,14 @@ finish_omp_clauses (tree clauses, enum c_omp_region_type ort)
/* We've reached the end of a list of expanded nodes. Reset the group
start pointer. */
if (c == grp_sentinel)
- grp_start_p = NULL;
+ {
+ if (grp_start_p
+ && OMP_CLAUSE_HAS_ITERATORS (*grp_start_p))
+ for (tree gc = *grp_start_p; gc != grp_sentinel;
+ gc = OMP_CLAUSE_CHAIN (gc))
+ OMP_CLAUSE_ITERATORS (gc) = OMP_CLAUSE_ITERATORS (*grp_start_p);
+ grp_start_p = NULL;
+ }
switch (OMP_CLAUSE_CODE (c))
{
@@ -8681,9 +8872,7 @@ finish_omp_clauses (tree clauses, enum c_omp_region_type ort)
/* FALLTHRU */
case OMP_CLAUSE_AFFINITY:
t = OMP_CLAUSE_DECL (c);
- if (TREE_CODE (t) == TREE_LIST
- && TREE_PURPOSE (t)
- && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
+ if (OMP_ITERATOR_DECL_P (t))
{
if (TREE_PURPOSE (t) != last_iterators)
last_iterators_remove
@@ -8847,9 +9036,22 @@ finish_omp_clauses (tree clauses, enum c_omp_region_type ort)
case OMP_CLAUSE_MAP:
if (OMP_CLAUSE_MAP_IMPLICIT (c) && !implicit_moved)
goto move_implicit;
+ if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_PUSH_MAPPER_NAME
+ || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POP_MAPPER_NAME)
+ {
+ remove = true;
+ break;
+ }
/* FALLTHRU */
case OMP_CLAUSE_TO:
case OMP_CLAUSE_FROM:
+ if (OMP_CLAUSE_ITERATORS (c)
+ && cp_omp_finish_iterators (OMP_CLAUSE_ITERATORS (c)))
+ {
+ t = error_mark_node;
+ break;
+ }
+ /* FALLTHRU */
case OMP_CLAUSE__CACHE_:
{
using namespace omp_addr_tokenizer;
@@ -9690,7 +9892,8 @@ finish_omp_clauses (tree clauses, enum c_omp_region_type ort)
break;
case OMP_CLAUSE_INIT:
init_seen = true;
- cp_omp_init_prefer_type_update (c);
+ OMP_CLAUSE_INIT_PREFER_TYPE (c)
+ = cp_finish_omp_init_prefer_type (OMP_CLAUSE_INIT_PREFER_TYPE (c));
if (!OMP_CLAUSE_INIT_TARGETSYNC (c))
init_no_targetsync_clause = c;
/* FALLTHRU */
@@ -9752,6 +9955,11 @@ finish_omp_clauses (tree clauses, enum c_omp_region_type ort)
pc = &OMP_CLAUSE_CHAIN (c);
}
+ if (grp_start_p
+ && OMP_CLAUSE_HAS_ITERATORS (*grp_start_p))
+ for (tree gc = *grp_start_p; gc; gc = OMP_CLAUSE_CHAIN (gc))
+ OMP_CLAUSE_ITERATORS (gc) = OMP_CLAUSE_ITERATORS (*grp_start_p);
+
if (reduction_seen < 0 && (ordered_seen || schedule_seen))
reduction_seen = -2;
@@ -10465,6 +10673,8 @@ struct omp_target_walk_data
/* Local variables declared inside a BIND_EXPR, used to filter out such
variables when recording lambda_objects_accessed. */
hash_set<tree> local_decls;
+
+ omp_mapper_list<tree> *mappers;
};
/* Helper function of finish_omp_target_clauses, called via
@@ -10478,6 +10688,7 @@ finish_omp_target_clauses_r (tree *tp, int *walk_subtrees, void *ptr)
struct omp_target_walk_data *data = (struct omp_target_walk_data *) ptr;
tree current_object = data->current_object;
tree current_closure = data->current_closure;
+ omp_mapper_list<tree> *mlist = data->mappers;
/* References inside of these expression codes shouldn't incur any
form of mapping, so return early. */
@@ -10491,6 +10702,27 @@ finish_omp_target_clauses_r (tree *tp, int *walk_subtrees, void *ptr)
if (TREE_CODE (t) == OMP_CLAUSE)
return NULL_TREE;
+ if (!processing_template_decl)
+ {
+ tree aggr_type = NULL_TREE;
+
+ if (TREE_CODE (t) == COMPONENT_REF
+ && RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
+ aggr_type = TREE_TYPE (TREE_OPERAND (t, 0));
+ else if ((TREE_CODE (t) == VAR_DECL
+ || TREE_CODE (t) == PARM_DECL
+ || TREE_CODE (t) == RESULT_DECL)
+ && RECORD_OR_UNION_TYPE_P (TREE_TYPE (t)))
+ aggr_type = TREE_TYPE (t);
+
+ if (aggr_type)
+ {
+ tree mapper_fn = cxx_omp_mapper_lookup (NULL_TREE, aggr_type);
+ if (mapper_fn)
+ mlist->add_mapper (NULL_TREE, aggr_type, mapper_fn);
+ }
+ }
+
if (current_object)
{
tree this_expr = TREE_OPERAND (current_object, 0);
@@ -10541,10 +10773,10 @@ finish_omp_target_clauses_r (tree *tp, int *walk_subtrees, void *ptr)
if (TREE_CODE (t) == BIND_EXPR)
{
- tree block = BIND_EXPR_BLOCK (t);
- for (tree var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
- if (!data->local_decls.contains (var))
- data->local_decls.add (var);
+ if (tree block = BIND_EXPR_BLOCK (t))
+ for (tree var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
+ if (!data->local_decls.contains (var))
+ data->local_decls.add (var);
return NULL_TREE;
}
@@ -10593,10 +10825,48 @@ finish_omp_target_clauses (location_t loc, tree body, tree *clauses_ptr)
else
data.current_closure = NULL_TREE;
- cp_walk_tree_without_duplicates (&body, finish_omp_target_clauses_r, &data);
-
auto_vec<tree, 16> new_clauses;
+ if (!processing_template_decl)
+ {
+ hash_set<omp_name_type<tree> > seen_types;
+ auto_vec<tree> mapper_fns;
+ omp_mapper_list<tree> mlist (&seen_types, &mapper_fns);
+ data.mappers = &mlist;
+
+ cp_walk_tree_without_duplicates (&body, finish_omp_target_clauses_r,
+ &data);
+
+ unsigned int i;
+ tree mapper_fn;
+ FOR_EACH_VEC_ELT (mapper_fns, i, mapper_fn)
+ c_omp_find_nested_mappers (&mlist, mapper_fn);
+
+ FOR_EACH_VEC_ELT (mapper_fns, i, mapper_fn)
+ {
+ tree mapper = cxx_omp_extract_mapper_directive (mapper_fn);
+ if (mapper == error_mark_node)
+ continue;
+ tree mapper_name = OMP_DECLARE_MAPPER_ID (mapper);
+ tree decl = OMP_DECLARE_MAPPER_DECL (mapper);
+ if (BASELINK_P (mapper_fn))
+ mapper_fn = BASELINK_FUNCTIONS (mapper_fn);
+
+ tree c = build_omp_clause (loc, OMP_CLAUSE__MAPPER_BINDING_);
+ OMP_CLAUSE__MAPPER_BINDING__ID (c) = mapper_name;
+ OMP_CLAUSE__MAPPER_BINDING__DECL (c) = decl;
+ OMP_CLAUSE__MAPPER_BINDING__MAPPER (c) = mapper_fn;
+
+ new_clauses.safe_push (c);
+ }
+ }
+ else
+ {
+ data.mappers = NULL;
+ cp_walk_tree_without_duplicates (&body, finish_omp_target_clauses_r,
+ &data);
+ }
+
tree omp_target_this_expr = NULL_TREE;
tree *explicit_this_deref_map = NULL;
if (data.this_expr_accessed)
@@ -12388,11 +12658,14 @@ cexpr_str::extract (location_t location, const char * & msg, int &len)
CONDITION and the message text MESSAGE. LOCATION is the location
of the static assertion in the source code. When MEMBER_P, this
static assertion is a member of a class. If SHOW_EXPR_P is true,
- print the condition (because it was instantiation-dependent). */
+ print the condition (because it was instantiation-dependent).
+ If CONSTEVAL_BLOCK_P is true, this static assertion represents
+ a consteval block. */
void
finish_static_assert (tree condition, tree message, location_t location,
- bool member_p, bool show_expr_p)
+ bool member_p, bool show_expr_p,
+ bool consteval_block_p/*=false*/)
{
tsubst_flags_t complain = tf_warning_or_error;
@@ -12420,6 +12693,7 @@ finish_static_assert (tree condition, tree message, location_t location,
STATIC_ASSERT_CONDITION (assertion) = orig_condition;
STATIC_ASSERT_MESSAGE (assertion) = cstr.message;
STATIC_ASSERT_SOURCE_LOCATION (assertion) = location;
+ CONSTEVAL_BLOCK_P (assertion) = consteval_block_p;
if (member_p)
maybe_add_class_template_decl_list (current_class_type,
@@ -12431,6 +12705,13 @@ finish_static_assert (tree condition, tree message, location_t location,
return;
}
+ /* Evaluate the consteval { }. This must be done only once. */
+ if (consteval_block_p)
+ {
+ cxx_constant_value (condition);
+ return;
+ }
+
/* Fold the expression and convert it to a boolean value. */
condition = contextual_conv_bool (condition, complain);
condition = fold_non_dependent_expr (condition, complain,
@@ -12680,9 +12961,9 @@ finish_decltype_type (tree expr, bool id_expression_or_member_access_p,
}
else
{
- if (outer_automatic_var_p (STRIP_REFERENCE_REF (expr))
- && current_function_decl
- && LAMBDA_FUNCTION_P (current_function_decl))
+ tree decl = STRIP_REFERENCE_REF (expr);
+ tree lam = current_lambda_expr ();
+ if (lam && outer_automatic_var_p (decl))
{
/* [expr.prim.id.unqual]/3: If naming the entity from outside of an
unevaluated operand within S would refer to an entity captured by
@@ -12699,8 +12980,6 @@ finish_decltype_type (tree expr, bool id_expression_or_member_access_p,
local variable inside decltype, not just decltype((x)) (PR83167).
And we don't handle nested lambdas properly, where we need to
consider the outer lambdas as well (PR112926). */
- tree decl = STRIP_REFERENCE_REF (expr);
- tree lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
tree cap = lookup_name (DECL_NAME (decl), LOOK_where::BLOCK,
LOOK_want::HIDDEN_LAMBDA);
@@ -12716,17 +12995,28 @@ finish_decltype_type (tree expr, bool id_expression_or_member_access_p,
if (type && !TYPE_REF_P (type))
{
- tree obtype = TREE_TYPE (DECL_ARGUMENTS (current_function_decl));
- if (WILDCARD_TYPE_P (non_reference (obtype)))
- /* We don't know what the eventual obtype quals will be. */
- goto dependent;
- auto direct_type = [](tree t){
- if (INDIRECT_TYPE_P (t))
- return TREE_TYPE (t);
- return t;
- };
- int const quals = cp_type_quals (type)
- | cp_type_quals (direct_type (obtype));
+ int quals;
+ if (current_function_decl
+ && LAMBDA_FUNCTION_P (current_function_decl)
+ && DECL_XOBJ_MEMBER_FUNCTION_P (current_function_decl))
+ {
+ tree obtype = TREE_TYPE (DECL_ARGUMENTS (current_function_decl));
+ if (WILDCARD_TYPE_P (non_reference (obtype)))
+ /* We don't know what the eventual obtype quals will be. */
+ goto dependent;
+ auto direct_type = [](tree t){
+ if (INDIRECT_TYPE_P (t))
+ return TREE_TYPE (t);
+ return t;
+ };
+ quals = (cp_type_quals (type)
+ | cp_type_quals (direct_type (obtype)));
+ }
+ else
+ /* We are in the parameter clause, trailing return type, or
+ the requires clause and have no relevant c_f_decl yet. */
+ quals = (LAMBDA_EXPR_CONST_QUAL_P (lam)
+ ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED);
type = cp_build_qualified_type (type, quals);
type = build_reference_type (type);
}
@@ -13154,6 +13444,18 @@ object_type_p (const_tree type)
&& !VOID_TYPE_P (type));
}
+/* [defns.referenceable] True iff TYPE is a referenceable type. */
+
+static bool
+referenceable_type_p (const_tree type)
+{
+ return (TYPE_REF_P (type)
+ || object_type_p (type)
+ || (FUNC_OR_METHOD_TYPE_P (type)
+ && type_memfn_quals (type) == TYPE_UNQUALIFIED
+ && type_memfn_rqual (type) == REF_QUAL_NONE));
+}
+
/* Actually evaluates the trait. */
static bool
@@ -13211,6 +13513,15 @@ trait_expr_value (cp_trait_kind kind, tree type1, tree type2)
case CPTK_HAS_TRIVIAL_DESTRUCTOR:
type1 = strip_array_types (type1);
+ if (CLASS_TYPE_P (type1) && type_build_dtor_call (type1))
+ {
+ deferring_access_check_sentinel dacs (dk_no_check);
+ cp_unevaluated un;
+ tree fn = get_dtor (type1, tf_none);
+ if (!fn && !seen_error ())
+ warning (0, "checking %qs for type %qT with a destructor that "
+ "cannot be called", "__has_trivial_destructor", type1);
+ }
return (trivial_type_p (type1) || type_code1 == REFERENCE_TYPE
|| (CLASS_TYPE_P (type1)
&& TYPE_HAS_TRIVIAL_DESTRUCTOR (type1)));
@@ -13266,6 +13577,9 @@ trait_expr_value (cp_trait_kind kind, tree type1, tree type2)
case CPTK_IS_CONVERTIBLE:
return is_convertible (type1, type2);
+ case CPTK_IS_DESTRUCTIBLE:
+ return is_xible (BIT_NOT_EXPR, type1, NULL_TREE);
+
case CPTK_IS_EMPTY:
return NON_UNION_CLASS_TYPE_P (type1) && CLASSTYPE_EMPTY_P (type1);
@@ -13305,9 +13619,27 @@ trait_expr_value (cp_trait_kind kind, tree type1, tree type2)
case CPTK_IS_NOTHROW_CONVERTIBLE:
return is_nothrow_convertible (type1, type2);
+ case CPTK_IS_NOTHROW_DESTRUCTIBLE:
+ return is_nothrow_xible (BIT_NOT_EXPR, type1, NULL_TREE);
+
case CPTK_IS_NOTHROW_INVOCABLE:
return expr_noexcept_p (build_invoke (type1, type2, tf_none), tf_none);
+ case CPTK_IS_NOTHROW_RELOCATABLE:
+ if (trivially_relocatable_type_p (type1))
+ return true;
+ else
+ {
+ type1 = strip_array_types (type1);
+ if (!referenceable_type_p (type1))
+ return false;
+ tree arg = make_tree_vec (1);
+ TREE_VEC_ELT (arg, 0)
+ = cp_build_reference_type (type1, /*rval=*/true);
+ return (is_nothrow_xible (INIT_EXPR, type1, arg)
+ && is_nothrow_xible (BIT_NOT_EXPR, type1, NULL_TREE));
+ }
+
case CPTK_IS_OBJECT:
return object_type_p (type1);
@@ -13326,6 +13658,9 @@ trait_expr_value (cp_trait_kind kind, tree type1, tree type2)
case CPTK_IS_REFERENCE:
return type_code1 == REFERENCE_TYPE;
+ case CPTK_IS_REPLACEABLE:
+ return replaceable_type_p (type1);
+
case CPTK_IS_SAME:
return same_type_p (type1, type2);
@@ -13347,6 +13682,12 @@ trait_expr_value (cp_trait_kind kind, tree type1, tree type2)
case CPTK_IS_TRIVIALLY_COPYABLE:
return trivially_copyable_p (type1);
+ case CPTK_IS_TRIVIALLY_DESTRUCTIBLE:
+ return is_trivially_xible (BIT_NOT_EXPR, type1, NULL_TREE);
+
+ case CPTK_IS_TRIVIALLY_RELOCATABLE:
+ return trivially_relocatable_type_p (type1);
+
case CPTK_IS_UNBOUNDED_ARRAY:
return array_of_unknown_bound_p (type1);
@@ -13370,8 +13711,11 @@ trait_expr_value (cp_trait_kind kind, tree type1, tree type2)
case CPTK_IS_DEDUCIBLE:
return type_targs_deducible_from (type1, type2);
- /* __array_rank is handled in finish_trait_expr. */
+ /* __array_rank, __builtin_type_order and __builtin_structured_binding_size
+ are handled in finish_trait_expr. */
case CPTK_RANK:
+ case CPTK_TYPE_ORDER:
+ case CPTK_STRUCTURED_BINDING_SIZE:
gcc_unreachable ();
#define DEFTRAIT_TYPE(CODE, NAME, ARITY) \
@@ -13428,7 +13772,8 @@ check_trait_type (tree type, int kind = 1)
type = complete_type (strip_array_types (type));
if (!COMPLETE_TYPE_P (type)
- && cxx_incomplete_type_diagnostic (NULL_TREE, type, DK_PERMERROR)
+ && cxx_incomplete_type_diagnostic (NULL_TREE, type,
+ diagnostics::kind::permerror)
&& !flag_permissive)
return false;
return true;
@@ -13475,16 +13820,25 @@ same_type_ref_bind_p (cp_trait_kind kind, tree type1, tree type2)
(non_reference (to), non_reference (from))));
}
-/* [defns.referenceable] True iff TYPE is a referenceable type. */
+/* Helper for finish_trait_expr and tsubst_expr. Handle
+ CPTK_STRUCTURED_BINDING_SIZE in possibly SFINAE-friendly
+ way. */
-static bool
-referenceable_type_p (const_tree type)
+tree
+finish_structured_binding_size (location_t loc, tree type,
+ tsubst_flags_t complain)
{
- return (TYPE_REF_P (type)
- || object_type_p (type)
- || (FUNC_OR_METHOD_TYPE_P (type)
- && (type_memfn_quals (type) == TYPE_UNQUALIFIED
- && type_memfn_rqual (type) == REF_QUAL_NONE)));
+ if (TYPE_REF_P (type))
+ {
+ if (complain & tf_error)
+ error_at (loc, "%qs argument %qT is a reference",
+ "__builtin_structured_binding_size", type);
+ return error_mark_node;
+ }
+ HOST_WIDE_INT ret = cp_decomp_size (loc, type, complain);
+ if (ret == -1)
+ return error_mark_node;
+ return maybe_wrap_with_location (build_int_cst (size_type_node, ret), loc);
}
/* Process a trait expression. */
@@ -13499,8 +13853,14 @@ finish_trait_expr (location_t loc, cp_trait_kind kind, tree type1, tree type2)
if (processing_template_decl)
{
tree trait_expr = make_node (TRAIT_EXPR);
- if (kind == CPTK_RANK)
+ if (kind == CPTK_RANK || kind == CPTK_STRUCTURED_BINDING_SIZE)
TREE_TYPE (trait_expr) = size_type_node;
+ else if (kind == CPTK_TYPE_ORDER)
+ {
+ tree val = type_order_value (type1, type1);
+ if (val != error_mark_node)
+ TREE_TYPE (trait_expr) = TREE_TYPE (val);
+ }
else
TREE_TYPE (trait_expr) = boolean_type_node;
TRAIT_EXPR_TYPE1 (trait_expr) = type1;
@@ -13519,6 +13879,9 @@ finish_trait_expr (location_t loc, cp_trait_kind kind, tree type1, tree type2)
case CPTK_HAS_NOTHROW_COPY:
case CPTK_HAS_TRIVIAL_COPY:
case CPTK_HAS_TRIVIAL_DESTRUCTOR:
+ case CPTK_IS_DESTRUCTIBLE:
+ case CPTK_IS_NOTHROW_DESTRUCTIBLE:
+ case CPTK_IS_TRIVIALLY_DESTRUCTIBLE:
if (!check_trait_type (type1))
return error_mark_node;
break;
@@ -13526,8 +13889,11 @@ finish_trait_expr (location_t loc, cp_trait_kind kind, tree type1, tree type2)
case CPTK_IS_LITERAL_TYPE:
case CPTK_IS_POD:
case CPTK_IS_STD_LAYOUT:
+ case CPTK_IS_REPLACEABLE:
+ case CPTK_IS_NOTHROW_RELOCATABLE:
case CPTK_IS_TRIVIAL:
case CPTK_IS_TRIVIALLY_COPYABLE:
+ case CPTK_IS_TRIVIALLY_RELOCATABLE:
case CPTK_HAS_UNIQUE_OBJ_REPRESENTATIONS:
if (!check_trait_type (type1, /* kind = */ 2))
return error_mark_node;
@@ -13604,9 +13970,23 @@ finish_trait_expr (location_t loc, cp_trait_kind kind, tree type1, tree type2)
case CPTK_IS_UNBOUNDED_ARRAY:
case CPTK_IS_UNION:
case CPTK_IS_VOLATILE:
- case CPTK_RANK:
break;
+ case CPTK_RANK:
+ {
+ size_t rank = 0;
+ for (; TREE_CODE (type1) == ARRAY_TYPE; type1 = TREE_TYPE (type1))
+ ++rank;
+ return maybe_wrap_with_location (build_int_cst (size_type_node, rank),
+ loc);
+ }
+
+ case CPTK_TYPE_ORDER:
+ return maybe_wrap_with_location (type_order_value (type1, type2), loc);
+
+ case CPTK_STRUCTURED_BINDING_SIZE:
+ return finish_structured_binding_size (loc, type1, tf_warning_or_error);
+
case CPTK_IS_LAYOUT_COMPATIBLE:
if (!array_of_unknown_bound_p (type1)
&& TREE_CODE (type1) != VOID_TYPE
@@ -13636,18 +14016,8 @@ finish_trait_expr (location_t loc, cp_trait_kind kind, tree type1, tree type2)
gcc_unreachable ();
}
- tree val;
- if (kind == CPTK_RANK)
- {
- size_t rank = 0;
- for (; TREE_CODE (type1) == ARRAY_TYPE; type1 = TREE_TYPE (type1))
- ++rank;
- val = build_int_cst (size_type_node, rank);
- }
- else
- val = (trait_expr_value (kind, type1, type2)
- ? boolean_true_node : boolean_false_node);
-
+ tree val = (trait_expr_value (kind, type1, type2)
+ ? boolean_true_node : boolean_false_node);
return maybe_wrap_with_location (val, loc);
}
@@ -13844,7 +14214,7 @@ apply_deduced_return_type (tree fco, tree return_type)
result);
DECL_RESULT (fco) = result;
- if (!processing_template_decl)
+ if (!uses_template_parms (fco))
if (function *fun = DECL_STRUCT_FUNCTION (fco))
{
bool aggr = aggregate_value_p (result, fco);