diff options
Diffstat (limited to 'gcc/rust/backend')
-rw-r--r-- | gcc/rust/backend/rust-compile-base.cc | 8 | ||||
-rw-r--r-- | gcc/rust/backend/rust-compile-intrinsic.cc | 8 | ||||
-rw-r--r-- | gcc/rust/backend/rust-constexpr.cc | 6112 | ||||
-rw-r--r-- | gcc/rust/backend/rust-constexpr.h | 2 | ||||
-rw-r--r-- | gcc/rust/backend/rust-tree.cc | 5241 | ||||
-rw-r--r-- | gcc/rust/backend/rust-tree.h | 2904 |
6 files changed, 14234 insertions, 41 deletions
diff --git a/gcc/rust/backend/rust-compile-base.cc b/gcc/rust/backend/rust-compile-base.cc index 84ad7be..e1506b3 100644 --- a/gcc/rust/backend/rust-compile-base.cc +++ b/gcc/rust/backend/rust-compile-base.cc @@ -56,6 +56,7 @@ HIRCompileBase::setup_fndecl (tree fndecl, bool is_main_entry_point, } // is it a const fn + DECL_DECLARED_CONSTEXPR_P (fndecl) = qualifiers.is_const (); if (qualifiers.is_const ()) { TREE_READONLY (fndecl) = 1; @@ -630,6 +631,11 @@ HIRCompileBase::compile_function ( ctx->pop_fn (); ctx->push_function (fndecl); + if (DECL_DECLARED_CONSTEXPR_P (fndecl)) + { + maybe_save_constexpr_fundef (fndecl); + } + return fndecl; } @@ -695,6 +701,8 @@ HIRCompileBase::compile_constant_item ( gcc_assert (TREE_CODE (bind_tree) == BIND_EXPR); DECL_SAVED_TREE (fndecl) = bind_tree; + DECL_DECLARED_CONSTEXPR_P (fndecl) = 1; + maybe_save_constexpr_fundef (fndecl); ctx->pop_fn (); diff --git a/gcc/rust/backend/rust-compile-intrinsic.cc b/gcc/rust/backend/rust-compile-intrinsic.cc index 61084b9..6169196 100644 --- a/gcc/rust/backend/rust-compile-intrinsic.cc +++ b/gcc/rust/backend/rust-compile-intrinsic.cc @@ -15,17 +15,18 @@ // <http://www.gnu.org/licenses/>. #include "rust-compile-intrinsic.h" -#include "fold-const.h" -#include "langhooks.h" #include "rust-compile-context.h" #include "rust-compile-type.h" #include "rust-compile-fnparam.h" #include "rust-builtins.h" #include "rust-diagnostics.h" #include "rust-location.h" +#include "rust-constexpr.h" #include "rust-tree.h" #include "tree-core.h" #include "print-tree.h" +#include "fold-const.h" +#include "langhooks.h" namespace Rust { namespace Compile { @@ -213,6 +214,9 @@ finalize_intrinsic_block (Context *ctx, tree fndecl) DECL_SAVED_TREE (fndecl) = bind_tree; ctx->push_function (fndecl); + + DECL_DECLARED_CONSTEXPR_P (fndecl) = 1; + maybe_save_constexpr_fundef (fndecl); } static tree diff --git a/gcc/rust/backend/rust-constexpr.cc b/gcc/rust/backend/rust-constexpr.cc index 5aa10d9..8efb430 100644 --- a/gcc/rust/backend/rust-constexpr.cc +++ b/gcc/rust/backend/rust-constexpr.cc @@ -18,29 +18,449 @@ #include "rust-location.h" #include "rust-diagnostics.h" #include "rust-tree.h" - #include "fold-const.h" #include "realmpfr.h" #include "convert.h" #include "print-tree.h" #include "gimplify.h" #include "tree-iterator.h" +#include "timevar.h" +#include "varasm.h" +#include "cgraph.h" +#include "tree-inline.h" +#include "vec.h" +#include "function.h" +#include "diagnostic.h" +#include "target.h" +#include "builtins.h" + +#define VERIFY_CONSTANT(X) \ + do \ + { \ + if (verify_constant ((X), ctx->quiet, non_constant_p, overflow_p)) \ + return t; \ + } \ + while (0) namespace Rust { namespace Compile { +/* Returns true iff FUN is an instantiation of a constexpr function + template or a defaulted constexpr function. */ + +bool +is_instantiation_of_constexpr (tree fun) +{ + return DECL_DECLARED_CONSTEXPR_P (fun); +} + +/* Return true if T is a literal type. */ + +bool +literal_type_p (tree t) +{ + if (SCALAR_TYPE_P (t) || VECTOR_TYPE_P (t) || TYPE_REF_P (t) + || (VOID_TYPE_P (t))) + return true; + + if (TREE_CODE (t) == ARRAY_TYPE) + return literal_type_p (strip_array_types (t)); + return false; +} + +static bool +verify_constant (tree, bool, bool *, bool *); + +static HOST_WIDE_INT +find_array_ctor_elt (tree ary, tree dindex, bool insert = false); +static int +array_index_cmp (tree key, tree index); +static bool +potential_constant_expression_1 (tree t, bool want_rval, bool strict, bool now, + tsubst_flags_t flags, tree *jump_target); +bool +potential_constant_expression_1 (tree t, bool want_rval, bool strict, bool now, + tsubst_flags_t flags); +inline tree +get_nth_callarg (tree t, int n); +tree +unshare_constructor (tree t MEM_STAT_DECL); +void +maybe_save_constexpr_fundef (tree fun); + +static bool +returns (tree *jump_target); +static bool +breaks (tree *jump_target); +static bool +continues (tree *jump_target); +static bool +switches (tree *jump_target); + struct constexpr_global_ctx { + /* Values for any temporaries or local variables within the + constant-expression. */ + hash_map<tree, tree> values; + /* Number of cxx_eval_constant_expression calls (except skipped ones, + on simple constants or location wrappers) encountered during current + cxx_eval_outermost_constant_expr call. */ HOST_WIDE_INT constexpr_ops_count; + /* Heap VAR_DECLs created during the evaluation of the outermost constant + expression. */ + auto_vec<tree, 16> heap_vars; + /* Cleanups that need to be evaluated at the end of CLEANUP_POINT_EXPR. */ + vec<tree> *cleanups; + /* Number of heap VAR_DECL deallocations. */ + unsigned heap_dealloc_count; + /* Constructor. */ + constexpr_global_ctx () + : constexpr_ops_count (0), cleanups (NULL), heap_dealloc_count (0) + {} +}; - constexpr_global_ctx () : constexpr_ops_count (0) {} +/* In constexpr.cc */ +/* Representation of entries in the constexpr function definition table. */ + +struct GTY ((for_user)) constexpr_fundef +{ + tree decl; + tree body; + tree parms; + tree result; +}; + +/* Objects of this type represent calls to constexpr functions + along with the bindings of parameters to their arguments, for + the purpose of compile time evaluation. */ + +struct GTY ((for_user)) constexpr_call +{ + /* Description of the constexpr function definition. */ + constexpr_fundef *fundef; + /* Parameter bindings environment. A TREE_VEC of arguments. */ + tree bindings; + /* Result of the call. + NULL means the call is being evaluated. + error_mark_node means that the evaluation was erroneous; + otherwise, the actuall value of the call. */ + tree result; + /* The hash of this call; we remember it here to avoid having to + recalculate it when expanding the hash table. */ + hashval_t hash; + /* Whether __builtin_is_constant_evaluated() should evaluate to true. */ + bool manifestly_const_eval; +}; + +struct constexpr_call_hasher : ggc_ptr_hash<constexpr_call> +{ + static hashval_t hash (constexpr_call *); + static bool equal (constexpr_call *, constexpr_call *); +}; + +enum constexpr_switch_state +{ + /* Used when processing a switch for the first time by cxx_eval_switch_expr + and default: label for that switch has not been seen yet. */ + css_default_not_seen, + /* Used when processing a switch for the first time by cxx_eval_switch_expr + and default: label for that switch has been seen already. */ + css_default_seen, + /* Used when processing a switch for the second time by + cxx_eval_switch_expr, where default: label should match. */ + css_default_processing }; struct constexpr_ctx { + /* The part of the context that needs to be unique to the whole + cxx_eval_outermost_constant_expr invocation. */ constexpr_global_ctx *global; + /* The innermost call we're evaluating. */ + constexpr_call *call; + /* SAVE_EXPRs and TARGET_EXPR_SLOT vars of TARGET_EXPRs that we've seen + within the current LOOP_EXPR. NULL if we aren't inside a loop. */ + vec<tree> *save_exprs; + /* The CONSTRUCTOR we're currently building up for an aggregate + initializer. */ + tree ctor; + /* The object we're building the CONSTRUCTOR for. */ + tree object; + /* If inside SWITCH_EXPR. */ + constexpr_switch_state *css_state; + /* The aggregate initialization context inside which this one is nested. This + is used by lookup_placeholder to resolve PLACEHOLDER_EXPRs. */ + const constexpr_ctx *parent; + + /* Whether we should error on a non-constant expression or fail quietly. + This flag needs to be here, but some of the others could move to global + if they get larger than a word. */ + bool quiet; + /* Whether we are strictly conforming to constant expression rules or + trying harder to get a constant value. */ + bool strict; + /* Whether __builtin_is_constant_evaluated () should be true. */ + bool manifestly_const_eval; +}; + +struct constexpr_fundef_hasher : ggc_ptr_hash<constexpr_fundef> +{ + static hashval_t hash (const constexpr_fundef *); + static bool equal (const constexpr_fundef *, const constexpr_fundef *); +}; + +/* This table holds all constexpr function definitions seen in + the current translation unit. */ + +static GTY (()) hash_table<constexpr_fundef_hasher> *constexpr_fundef_table; + +/* Utility function used for managing the constexpr function table. + Return true if the entries pointed to by P and Q are for the + same constexpr function. */ + +inline bool +constexpr_fundef_hasher::equal (const constexpr_fundef *lhs, + const constexpr_fundef *rhs) +{ + return lhs->decl == rhs->decl; +} + +/* Utility function used for managing the constexpr function table. + Return a hash value for the entry pointed to by Q. */ + +inline hashval_t +constexpr_fundef_hasher::hash (const constexpr_fundef *fundef) +{ + return DECL_UID (fundef->decl); +} + +/* Return a previously saved definition of function FUN. */ + +constexpr_fundef * +retrieve_constexpr_fundef (tree fun) +{ + if (constexpr_fundef_table == NULL) + return NULL; + + constexpr_fundef fundef = {fun, NULL_TREE, NULL_TREE, NULL_TREE}; + return constexpr_fundef_table->find (&fundef); +} + +/* This internal flag controls whether we should avoid doing anything during + constexpr evaluation that would cause extra DECL_UID generation, such as + template instantiation and function body copying. */ + +static bool uid_sensitive_constexpr_evaluation_value; + +/* An internal counter that keeps track of the number of times + uid_sensitive_constexpr_evaluation_p returned true. */ + +static unsigned uid_sensitive_constexpr_evaluation_true_counter; + +/* The accessor for uid_sensitive_constexpr_evaluation_value which also + increments the corresponding counter. */ + +static bool +uid_sensitive_constexpr_evaluation_p () +{ + if (uid_sensitive_constexpr_evaluation_value) + { + ++uid_sensitive_constexpr_evaluation_true_counter; + return true; + } + else + return false; +} + +/* RAII sentinel that saves the value of a variable, optionally + overrides it right away, and restores its value when the sentinel + id destructed. */ + +template <typename T> class temp_override +{ + T &overridden_variable; + T saved_value; + +public: + temp_override (T &var) : overridden_variable (var), saved_value (var) {} + temp_override (T &var, T overrider) + : overridden_variable (var), saved_value (var) + { + overridden_variable = overrider; + } + ~temp_override () { overridden_variable = saved_value; } }; +/* An RAII sentinel used to restrict constexpr evaluation so that it + doesn't do anything that causes extra DECL_UID generation. */ + +struct uid_sensitive_constexpr_evaluation_sentinel +{ + temp_override<bool> ovr; + uid_sensitive_constexpr_evaluation_sentinel (); +}; + +/* Used to determine whether uid_sensitive_constexpr_evaluation_p was + called and returned true, indicating that we've restricted constexpr + evaluation in order to avoid UID generation. We use this to control + updates to the fold_cache and cv_cache. */ + +struct uid_sensitive_constexpr_evaluation_checker +{ + const unsigned saved_counter; + uid_sensitive_constexpr_evaluation_checker (); + bool evaluation_restricted_p () const; +}; + +/* The default constructor for uid_sensitive_constexpr_evaluation_sentinel + enables the internal flag for uid_sensitive_constexpr_evaluation_p + during the lifetime of the sentinel object. Upon its destruction, the + previous value of uid_sensitive_constexpr_evaluation_p is restored. */ + +uid_sensitive_constexpr_evaluation_sentinel :: + uid_sensitive_constexpr_evaluation_sentinel () + : ovr (uid_sensitive_constexpr_evaluation_value, true) +{} + +/* The default constructor for uid_sensitive_constexpr_evaluation_checker + records the current number of times that uid_sensitive_constexpr_evaluation_p + has been called and returned true. */ + +uid_sensitive_constexpr_evaluation_checker :: + uid_sensitive_constexpr_evaluation_checker () + : saved_counter (uid_sensitive_constexpr_evaluation_true_counter) +{} + +/* Returns true iff uid_sensitive_constexpr_evaluation_p is true, and + some constexpr evaluation was restricted due to u_s_c_e_p being called + and returning true during the lifetime of this checker object. */ + +bool +uid_sensitive_constexpr_evaluation_checker::evaluation_restricted_p () const +{ + return (uid_sensitive_constexpr_evaluation_value + && saved_counter != uid_sensitive_constexpr_evaluation_true_counter); +} + +/* A table of all constexpr calls that have been evaluated by the + compiler in this translation unit. */ + +static GTY (()) hash_table<constexpr_call_hasher> *constexpr_call_table; + +/* Compute a hash value for a constexpr call representation. */ + +inline hashval_t +constexpr_call_hasher::hash (constexpr_call *info) +{ + return info->hash; +} + +/* Return true if the objects pointed to by P and Q represent calls + to the same constexpr function with the same arguments. + Otherwise, return false. */ + +bool +constexpr_call_hasher::equal (constexpr_call *lhs, constexpr_call *rhs) +{ + if (lhs == rhs) + return true; + if (lhs->hash != rhs->hash) + return false; + if (lhs->manifestly_const_eval != rhs->manifestly_const_eval) + return false; + if (!constexpr_fundef_hasher::equal (lhs->fundef, rhs->fundef)) + return false; + return rs_tree_equal (lhs->bindings, rhs->bindings); +} + +/* Initialize the constexpr call table, if needed. */ + +static void +maybe_initialize_constexpr_call_table (void) +{ + if (constexpr_call_table == NULL) + constexpr_call_table = hash_table<constexpr_call_hasher>::create_ggc (101); +} + +/* During constexpr CALL_EXPR evaluation, to avoid issues with sharing when + a function happens to get called recursively, we unshare the callee + function's body and evaluate this unshared copy instead of evaluating the + original body. + + FUNDEF_COPIES_TABLE is a per-function freelist of these unshared function + copies. The underlying data structure of FUNDEF_COPIES_TABLE is a hash_map + that's keyed off of the original FUNCTION_DECL and whose value is a + TREE_LIST of this function's unused copies awaiting reuse. + + This is not GC-deletable to avoid GC affecting UID generation. */ + +static GTY (()) decl_tree_map *fundef_copies_table; + +/* Reuse a copy or create a new unshared copy of the function FUN. + Return this copy. We use a TREE_LIST whose PURPOSE is body, VALUE + is parms, TYPE is result. */ + +static tree +get_fundef_copy (constexpr_fundef *fundef) +{ + tree copy; + bool existed; + tree *slot + = &(hash_map_safe_get_or_insert<hm_ggc> (fundef_copies_table, fundef->decl, + &existed, 127)); + + if (!existed) + { + /* There is no cached function available, or in use. We can use + the function directly. That the slot is now created records + that this function is now in use. */ + copy = build_tree_list (fundef->body, fundef->parms); + TREE_TYPE (copy) = fundef->result; + } + else if (*slot == NULL_TREE) + { + if (uid_sensitive_constexpr_evaluation_p ()) + return NULL_TREE; + + /* We've already used the function itself, so make a copy. */ + copy = build_tree_list (NULL, NULL); + tree saved_body = DECL_SAVED_TREE (fundef->decl); + tree saved_parms = DECL_ARGUMENTS (fundef->decl); + tree saved_result = DECL_RESULT (fundef->decl); + tree saved_fn = current_function_decl; + DECL_SAVED_TREE (fundef->decl) = fundef->body; + DECL_ARGUMENTS (fundef->decl) = fundef->parms; + DECL_RESULT (fundef->decl) = fundef->result; + current_function_decl = fundef->decl; + TREE_PURPOSE (copy) + = copy_fn (fundef->decl, TREE_VALUE (copy), TREE_TYPE (copy)); + current_function_decl = saved_fn; + DECL_RESULT (fundef->decl) = saved_result; + DECL_ARGUMENTS (fundef->decl) = saved_parms; + DECL_SAVED_TREE (fundef->decl) = saved_body; + } + else + { + /* We have a cached function available. */ + copy = *slot; + *slot = TREE_CHAIN (copy); + } + + return copy; +} + +/* Save the copy COPY of function FUN for later reuse by + get_fundef_copy(). By construction, there will always be an entry + to find. */ + +static void +save_fundef_copy (tree fun, tree copy) +{ + tree *slot = fundef_copies_table->get (fun); + TREE_CHAIN (copy) = *slot; + *slot = copy; +} + static tree constant_value_1 (tree decl, bool strict_p, bool return_aggregate_cst_ok_p, bool unshare_p); @@ -51,37 +471,1433 @@ static void non_const_var_error (location_t loc, tree r); static tree -constexpr_expression (const constexpr_ctx *ctx, tree); +eval_constant_expression (const constexpr_ctx *ctx, tree, bool, bool *, bool *, + tree * = NULL); static tree constexpr_fn_retval (const constexpr_ctx *ctx, tree r); static tree -eval_store_expression (const constexpr_ctx *ctx, tree r); +eval_store_expression (const constexpr_ctx *ctx, tree r, bool, bool *, bool *); static tree -eval_call_expression (const constexpr_ctx *ctx, tree r); +eval_call_expression (const constexpr_ctx *ctx, tree r, bool, bool *, bool *); static tree -eval_binary_expression (const constexpr_ctx *ctx, tree r); +eval_binary_expression (const constexpr_ctx *ctx, tree r, bool, bool *, bool *); static tree get_function_named_in_call (tree t); +static tree +eval_statement_list (const constexpr_ctx *ctx, tree t, bool *non_constant_p, + bool *overflow_p, tree *jump_target); +static tree +extract_string_elt (tree string, unsigned chars_per_elt, unsigned index); + +static tree +eval_conditional_expression (const constexpr_ctx *ctx, tree t, bool lval, + bool *non_constant_p, bool *overflow_p, + tree *jump_target); + +static tree +eval_bit_field_ref (const constexpr_ctx *ctx, tree t, bool lval, + bool *non_constant_p, bool *overflow_p); + +static tree +eval_loop_expr (const constexpr_ctx *ctx, tree t, bool *non_constant_p, + bool *overflow_p, tree *jump_target); + +static tree +eval_switch_expr (const constexpr_ctx *ctx, tree t, bool *non_constant_p, + bool *overflow_p, tree *jump_target); + +static tree +eval_unary_expression (const constexpr_ctx *ctx, tree t, bool /*lval*/, + bool *non_constant_p, bool *overflow_p); + +/* Variables and functions to manage constexpr call expansion context. + These do not need to be marked for PCH or GC. */ + +/* FIXME remember and print actual constant arguments. */ +static vec<tree> call_stack; +static int call_stack_tick; +static int last_cx_error_tick; + +static int +push_cx_call_context (tree call) +{ + ++call_stack_tick; + if (!EXPR_HAS_LOCATION (call)) + SET_EXPR_LOCATION (call, input_location); + call_stack.safe_push (call); + int len = call_stack.length (); + if (len > max_constexpr_depth) + return false; + return len; +} + +static void +pop_cx_call_context (void) +{ + ++call_stack_tick; + call_stack.pop (); +} + +vec<tree> +cx_error_context (void) +{ + vec<tree> r = vNULL; + if (call_stack_tick != last_cx_error_tick && !call_stack.is_empty ()) + r = call_stack; + last_cx_error_tick = call_stack_tick; + return r; +} + +// this is ported from cxx_eval_outermost_constant_expr tree fold_expr (tree expr) { + bool allow_non_constant = false; + bool strict = true; + bool manifestly_const_eval = false; + constexpr_global_ctx global_ctx; - constexpr_ctx ctx = {&global_ctx}; + constexpr_ctx ctx + = {&global_ctx, NULL, + NULL, NULL, + NULL, NULL, + NULL, allow_non_constant, + strict, manifestly_const_eval || !allow_non_constant}; + + auto_vec<tree, 16> cleanups; + global_ctx.cleanups = &cleanups; - tree folded = constexpr_expression (&ctx, expr); + bool non_constant_p = false; + bool overflow_p = false; + + tree folded = eval_constant_expression (&ctx, expr, false, &non_constant_p, + &overflow_p); rust_assert (folded != NULL_TREE); + + // more logic here to possibly port return folded; } +static bool +same_type_ignoring_tlq_and_bounds_p (tree type1, tree type2) +{ + while (TREE_CODE (type1) == ARRAY_TYPE && TREE_CODE (type2) == ARRAY_TYPE + && (!TYPE_DOMAIN (type1) || !TYPE_DOMAIN (type2))) + { + type1 = TREE_TYPE (type1); + type2 = TREE_TYPE (type2); + } + return same_type_ignoring_top_level_qualifiers_p (type1, type2); +} + +// forked from gcc/cp/constexpr.cc cxx_union_active_member + +/* Try to determine the currently active union member for an expression + with UNION_TYPE. If it can be determined, return the FIELD_DECL, + otherwise return NULL_TREE. */ + +static tree +union_active_member (const constexpr_ctx *ctx, tree t) +{ + constexpr_ctx new_ctx = *ctx; + new_ctx.quiet = true; + bool non_constant_p = false, overflow_p = false; + tree ctor = eval_constant_expression (&new_ctx, t, false, &non_constant_p, + &overflow_p); + if (TREE_CODE (ctor) == CONSTRUCTOR && CONSTRUCTOR_NELTS (ctor) == 1 + && CONSTRUCTOR_ELT (ctor, 0)->index + && TREE_CODE (CONSTRUCTOR_ELT (ctor, 0)->index) == FIELD_DECL) + return CONSTRUCTOR_ELT (ctor, 0)->index; + return NULL_TREE; +} + +// forked from gcc/cp/constexpr.cc cxx_fold_indirect_ref_1 + +static tree +fold_indirect_ref_1 (const constexpr_ctx *ctx, location_t loc, tree type, + tree op, unsigned HOST_WIDE_INT off, bool *empty_base) +{ + tree optype = TREE_TYPE (op); + unsigned HOST_WIDE_INT const_nunits; + if (off == 0 && similar_type_p (optype, type)) + return op; + else if (TREE_CODE (optype) == COMPLEX_TYPE + && similar_type_p (type, TREE_TYPE (optype))) + { + /* *(foo *)&complexfoo => __real__ complexfoo */ + if (off == 0) + return build1_loc (loc, REALPART_EXPR, type, op); + /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */ + else if (tree_to_uhwi (TYPE_SIZE_UNIT (type)) == off) + return build1_loc (loc, IMAGPART_EXPR, type, op); + } + /* ((foo*)&vectorfoo)[x] => BIT_FIELD_REF<vectorfoo,...> */ + else if (VECTOR_TYPE_P (optype) && similar_type_p (type, TREE_TYPE (optype)) + && TYPE_VECTOR_SUBPARTS (optype).is_constant (&const_nunits)) + { + unsigned HOST_WIDE_INT part_width = tree_to_uhwi (TYPE_SIZE_UNIT (type)); + unsigned HOST_WIDE_INT max_offset = part_width * const_nunits; + if (off < max_offset && off % part_width == 0) + { + tree index = bitsize_int (off * BITS_PER_UNIT); + return build3_loc (loc, BIT_FIELD_REF, type, op, TYPE_SIZE (type), + index); + } + } + /* ((foo *)&fooarray)[x] => fooarray[x] */ + else if (TREE_CODE (optype) == ARRAY_TYPE + && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (optype))) + && !integer_zerop (TYPE_SIZE_UNIT (TREE_TYPE (optype)))) + { + tree type_domain = TYPE_DOMAIN (optype); + tree min_val = size_zero_node; + if (type_domain && TYPE_MIN_VALUE (type_domain)) + min_val = TYPE_MIN_VALUE (type_domain); + unsigned HOST_WIDE_INT el_sz + = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (optype))); + unsigned HOST_WIDE_INT idx = off / el_sz; + unsigned HOST_WIDE_INT rem = off % el_sz; + if (tree_fits_uhwi_p (min_val)) + { + tree index = size_int (idx + tree_to_uhwi (min_val)); + op = build4_loc (loc, ARRAY_REF, TREE_TYPE (optype), op, index, + NULL_TREE, NULL_TREE); + return fold_indirect_ref_1 (ctx, loc, type, op, rem, empty_base); + } + } + /* ((foo *)&struct_with_foo_field)[x] => COMPONENT_REF */ + else if (TREE_CODE (optype) == RECORD_TYPE + || TREE_CODE (optype) == UNION_TYPE) + { + if (TREE_CODE (optype) == UNION_TYPE) + /* For unions prefer the currently active member. */ + if (tree field = union_active_member (ctx, op)) + { + unsigned HOST_WIDE_INT el_sz + = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (field))); + if (off < el_sz) + { + tree cop = build3 (COMPONENT_REF, TREE_TYPE (field), op, field, + NULL_TREE); + if (tree ret = fold_indirect_ref_1 (ctx, loc, type, cop, off, + empty_base)) + return ret; + } + } + for (tree field = TYPE_FIELDS (optype); field; field = DECL_CHAIN (field)) + if (TREE_CODE (field) == FIELD_DECL + && TREE_TYPE (field) != error_mark_node + && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (field)))) + { + tree pos = byte_position (field); + if (!tree_fits_uhwi_p (pos)) + continue; + unsigned HOST_WIDE_INT upos = tree_to_uhwi (pos); + unsigned HOST_WIDE_INT el_sz + = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (field))); + if (upos <= off && off < upos + el_sz) + { + tree cop = build3 (COMPONENT_REF, TREE_TYPE (field), op, field, + NULL_TREE); + if (tree ret = fold_indirect_ref_1 (ctx, loc, type, cop, + off - upos, empty_base)) + return ret; + } + } + /* Also handle conversion to an empty base class, which + is represented with a NOP_EXPR. */ + if (is_empty_class (type) && CLASS_TYPE_P (optype)) + { + *empty_base = true; + return op; + } + } + + return NULL_TREE; +} + +// forked from gcc/cp/constexpr.cc cxx_fold_indirect_ref + +/* A less strict version of fold_indirect_ref_1, which requires cv-quals to + match. We want to be less strict for simple *& folding; if we have a + non-const temporary that we access through a const pointer, that should + work. We handle this here rather than change fold_indirect_ref_1 + because we're dealing with things like ADDR_EXPR of INTEGER_CST which + don't really make sense outside of constant expression evaluation. Also + we want to allow folding to COMPONENT_REF, which could cause trouble + with TBAA in fold_indirect_ref_1. */ + +static tree +rs_fold_indirect_ref (const constexpr_ctx *ctx, location_t loc, tree type, + tree op0, bool *empty_base) +{ + tree sub = op0; + tree subtype; + poly_uint64 const_op01; + + /* STRIP_NOPS, but stop if REINTERPRET_CAST_P. */ + while (CONVERT_EXPR_P (sub) || TREE_CODE (sub) == NON_LVALUE_EXPR + || TREE_CODE (sub) == VIEW_CONVERT_EXPR) + { + if (TREE_CODE (sub) == NOP_EXPR && REINTERPRET_CAST_P (sub)) + return NULL_TREE; + sub = TREE_OPERAND (sub, 0); + } + + subtype = TREE_TYPE (sub); + if (!INDIRECT_TYPE_P (subtype)) + return NULL_TREE; + + /* Canonicalizes the given OBJ/OFF pair by iteratively absorbing + the innermost component into the offset until it would make the + offset positive, so that cxx_fold_indirect_ref_1 can identify + more folding opportunities. */ + auto canonicalize_obj_off = [] (tree &obj, tree &off) { + while (TREE_CODE (obj) == COMPONENT_REF + && (tree_int_cst_sign_bit (off) || integer_zerop (off))) + { + tree field = TREE_OPERAND (obj, 1); + tree pos = byte_position (field); + if (integer_zerop (off) && integer_nonzerop (pos)) + /* If the offset is already 0, keep going as long as the + component is at position 0. */ + break; + off = int_const_binop (PLUS_EXPR, off, pos); + obj = TREE_OPERAND (obj, 0); + } + }; + + if (TREE_CODE (sub) == ADDR_EXPR) + { + tree op = TREE_OPERAND (sub, 0); + tree optype = TREE_TYPE (op); + + /* *&CONST_DECL -> to the value of the const decl. */ + if (TREE_CODE (op) == CONST_DECL) + return DECL_INITIAL (op); + /* *&p => p; make sure to handle *&"str"[cst] here. */ + if (similar_type_p (optype, type)) + { + tree fop = fold_read_from_constant_string (op); + if (fop) + return fop; + else + return op; + } + else + { + tree off = integer_zero_node; + canonicalize_obj_off (op, off); + gcc_assert (integer_zerop (off)); + return fold_indirect_ref_1 (ctx, loc, type, op, 0, empty_base); + } + } + else if (TREE_CODE (sub) == POINTER_PLUS_EXPR + && tree_fits_uhwi_p (TREE_OPERAND (sub, 1))) + { + tree op00 = TREE_OPERAND (sub, 0); + tree off = TREE_OPERAND (sub, 1); + + STRIP_NOPS (op00); + if (TREE_CODE (op00) == ADDR_EXPR) + { + tree obj = TREE_OPERAND (op00, 0); + canonicalize_obj_off (obj, off); + return fold_indirect_ref_1 (ctx, loc, type, obj, tree_to_uhwi (off), + empty_base); + } + } + /* *(foo *)fooarrptr => (*fooarrptr)[0] */ + else if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE + && similar_type_p (type, TREE_TYPE (TREE_TYPE (subtype)))) + { + tree type_domain; + tree min_val = size_zero_node; + tree newsub + = rs_fold_indirect_ref (ctx, loc, TREE_TYPE (subtype), sub, NULL); + if (newsub) + sub = newsub; + else + sub = build1_loc (loc, INDIRECT_REF, TREE_TYPE (subtype), sub); + type_domain = TYPE_DOMAIN (TREE_TYPE (sub)); + if (type_domain && TYPE_MIN_VALUE (type_domain)) + min_val = TYPE_MIN_VALUE (type_domain); + return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE, + NULL_TREE); + } + + return NULL_TREE; +} + +// forked from gcc/cp/constexpr.cc cxx_eval_indirect_ref + +static tree +rs_eval_indirect_ref (const constexpr_ctx *ctx, tree t, bool lval, + bool *non_constant_p, bool *overflow_p) +{ + tree orig_op0 = TREE_OPERAND (t, 0); + bool empty_base = false; + + /* We can handle a MEM_REF like an INDIRECT_REF, if MEM_REF's second + operand is an integer-zero. Otherwise reject the MEM_REF for now. */ + + if (TREE_CODE (t) == MEM_REF + && (!TREE_OPERAND (t, 1) || !integer_zerop (TREE_OPERAND (t, 1)))) + { + gcc_assert (ctx->quiet); + *non_constant_p = true; + return t; + } + + /* First try to simplify it directly. */ + tree r = rs_fold_indirect_ref (ctx, EXPR_LOCATION (t), TREE_TYPE (t), + orig_op0, &empty_base); + if (!r) + { + /* If that didn't work, evaluate the operand first. */ + tree op0 + = eval_constant_expression (ctx, orig_op0, + /*lval*/ false, non_constant_p, overflow_p); + /* Don't VERIFY_CONSTANT here. */ + if (*non_constant_p) + return t; + + if (!lval && integer_zerop (op0)) + { + if (!ctx->quiet) + error ("dereferencing a null pointer"); + *non_constant_p = true; + return t; + } + + r = rs_fold_indirect_ref (ctx, EXPR_LOCATION (t), TREE_TYPE (t), op0, + &empty_base); + if (r == NULL_TREE) + { + /* We couldn't fold to a constant value. Make sure it's not + something we should have been able to fold. */ + tree sub = op0; + STRIP_NOPS (sub); + if (TREE_CODE (sub) == ADDR_EXPR) + { + gcc_assert ( + !similar_type_p (TREE_TYPE (TREE_TYPE (sub)), TREE_TYPE (t))); + /* DR 1188 says we don't have to deal with this. */ + if (!ctx->quiet) + error_at (rs_expr_loc_or_input_loc (t), + "accessing value of %qE through a %qT glvalue in a " + "constant expression", + build_fold_indirect_ref (sub), TREE_TYPE (t)); + *non_constant_p = true; + return t; + } + + if (lval && op0 != orig_op0) + return build1 (INDIRECT_REF, TREE_TYPE (t), op0); + if (!lval) + VERIFY_CONSTANT (t); + return t; + } + } + + r = eval_constant_expression (ctx, r, lval, non_constant_p, overflow_p); + if (*non_constant_p) + return t; + + /* If we're pulling out the value of an empty base, just return an empty + CONSTRUCTOR. */ + if (empty_base && !lval) + { + r = build_constructor (TREE_TYPE (t), NULL); + TREE_CONSTANT (r) = true; + } + + return r; +} + +// forked from gcc/cp/constexpr.cc cxx_eval_logical_expression + +/* Subroutine of cxx_eval_constant_expression. + Evaluate a short-circuited logical expression T in the context + of a given constexpr CALL. BAILOUT_VALUE is the value for + early return. CONTINUE_VALUE is used here purely for + sanity check purposes. */ + +static tree +eval_logical_expression (const constexpr_ctx *ctx, tree t, tree bailout_value, + tree continue_value, bool lval, bool *non_constant_p, + bool *overflow_p) +{ + tree r; + tree lhs = eval_constant_expression (ctx, TREE_OPERAND (t, 0), lval, + non_constant_p, overflow_p); + VERIFY_CONSTANT (lhs); + if (tree_int_cst_equal (lhs, bailout_value)) + return lhs; + gcc_assert (tree_int_cst_equal (lhs, continue_value)); + r = eval_constant_expression (ctx, TREE_OPERAND (t, 1), lval, non_constant_p, + overflow_p); + VERIFY_CONSTANT (r); + return r; +} + +// forked from gcc/cp/constexp.rcc lookup_placeholder + +/* Find the object of TYPE under initialization in CTX. */ + +static tree +lookup_placeholder (const constexpr_ctx *ctx, bool lval, tree type) +{ + if (!ctx) + return NULL_TREE; + + /* Prefer the outermost matching object, but don't cross + CONSTRUCTOR_PLACEHOLDER_BOUNDARY constructors. */ + if (ctx->ctor && !CONSTRUCTOR_PLACEHOLDER_BOUNDARY (ctx->ctor)) + if (tree outer_ob = lookup_placeholder (ctx->parent, lval, type)) + return outer_ob; + + /* We could use ctx->object unconditionally, but using ctx->ctor when we + can is a minor optimization. */ + if (!lval && ctx->ctor && same_type_p (TREE_TYPE (ctx->ctor), type)) + return ctx->ctor; + + if (!ctx->object) + return NULL_TREE; + + /* Since an object cannot have a field of its own type, we can search outward + from ctx->object to find the unique containing object of TYPE. */ + tree ob = ctx->object; + while (ob) + { + if (same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (ob), type)) + break; + if (handled_component_p (ob)) + ob = TREE_OPERAND (ob, 0); + else + ob = NULL_TREE; + } + + return ob; +} + +// forked from gcc/cp/constexp.rcc inline_asm_in_constexpr_error + +/* Complain about an attempt to evaluate inline assembly. */ + +static void +inline_asm_in_constexpr_error (location_t loc) +{ + auto_diagnostic_group d; + error_at (loc, "inline assembly is not a constant expression"); + inform (loc, "only unevaluated inline assembly is allowed in a " + "%<constexpr%> function in C++20"); +} + +// forked from gcc/cp/constexpr.cc verify_ctor_sanity + +/* We're about to process an initializer for a class or array TYPE. Make + sure that CTX is set up appropriately. */ + +static void +verify_ctor_sanity (const constexpr_ctx *ctx, tree type) +{ + /* We don't bother building a ctor for an empty base subobject. */ + if (is_empty_class (type)) + return; + + /* We're in the middle of an initializer that might involve placeholders; + our caller should have created a CONSTRUCTOR for us to put the + initializer into. We will either return that constructor or T. */ + gcc_assert (ctx->ctor); + gcc_assert ( + same_type_ignoring_top_level_qualifiers_p (type, TREE_TYPE (ctx->ctor))); + /* We used to check that ctx->ctor was empty, but that isn't the case when + the object is zero-initialized before calling the constructor. */ + if (ctx->object) + { + tree otype = TREE_TYPE (ctx->object); + gcc_assert (same_type_ignoring_top_level_qualifiers_p (type, otype) + /* Handle flexible array members. */ + || (TREE_CODE (otype) == ARRAY_TYPE + && TYPE_DOMAIN (otype) == NULL_TREE + && TREE_CODE (type) == ARRAY_TYPE + && (same_type_ignoring_top_level_qualifiers_p ( + TREE_TYPE (type), TREE_TYPE (otype))))); + } + gcc_assert (!ctx->object || !DECL_P (ctx->object) + || *(ctx->global->values.get (ctx->object)) == ctx->ctor); +} + +// forked from gcc/cp/constexpr.cc array_index_cmp + +/* Some of the expressions fed to the constexpr mechanism are calls to + constructors, which have type void. In that case, return the type being + initialized by the constructor. */ + +static tree +initialized_type (tree t) +{ + if (TYPE_P (t)) + return t; + tree type = TREE_TYPE (t); + if (TREE_CODE (t) == CALL_EXPR) + { + /* A constructor call has void type, so we need to look deeper. */ + tree fn = get_function_named_in_call (t); + if (fn && TREE_CODE (fn) == FUNCTION_DECL && DECL_CXX_CONSTRUCTOR_P (fn)) + type = DECL_CONTEXT (fn); + } + else if (TREE_CODE (t) == COMPOUND_EXPR) + return initialized_type (TREE_OPERAND (t, 1)); + + return cv_unqualified (type); +} + +// forked from gcc/cp/constexpr.cc init_subob_ctx + +/* We're about to initialize element INDEX of an array or class from VALUE. + Set up NEW_CTX appropriately by adjusting .object to refer to the + subobject and creating a new CONSTRUCTOR if the element is itself + a class or array. */ + +static void +init_subob_ctx (const constexpr_ctx *ctx, constexpr_ctx &new_ctx, tree index, + tree &value) +{ + new_ctx = *ctx; + + if (index && TREE_CODE (index) != INTEGER_CST + && TREE_CODE (index) != FIELD_DECL && TREE_CODE (index) != RANGE_EXPR) + /* This won't have an element in the new CONSTRUCTOR. */ + return; + + tree type = initialized_type (value); + if (!AGGREGATE_TYPE_P (type) && !VECTOR_TYPE_P (type)) + /* A non-aggregate member doesn't get its own CONSTRUCTOR. */ + return; + + /* The sub-aggregate initializer might contain a placeholder; + update object to refer to the subobject and ctor to refer to + the (newly created) sub-initializer. */ + if (ctx->object) + { + if (index == NULL_TREE || TREE_CODE (index) == RANGE_EXPR) + /* There's no well-defined subobject for this index. */ + new_ctx.object = NULL_TREE; + else + { + // Faisal: commenting this out as not sure if it's needed and it's + // huge new_ctx.object = build_ctor_subob_ref (index, type, + // ctx->object); + } + } + tree elt = build_constructor (type, NULL); + CONSTRUCTOR_NO_CLEARING (elt) = true; + new_ctx.ctor = elt; + + if (TREE_CODE (value) == TARGET_EXPR) + /* Avoid creating another CONSTRUCTOR when we expand the TARGET_EXPR. */ + value = TARGET_EXPR_INITIAL (value); +} + +// forked from gcc/cp/constexpr.cc base_field_constructor_elt + +/* REF is a COMPONENT_REF designating a particular field. V is a vector of + CONSTRUCTOR elements to initialize (part of) an object containing that + field. Return a pointer to the constructor_elt corresponding to the + initialization of the field. */ + +static constructor_elt * +base_field_constructor_elt (vec<constructor_elt, va_gc> *v, tree ref) +{ + tree aggr = TREE_OPERAND (ref, 0); + tree field = TREE_OPERAND (ref, 1); + HOST_WIDE_INT i; + constructor_elt *ce; + + gcc_assert (TREE_CODE (ref) == COMPONENT_REF); + + if (TREE_CODE (aggr) == COMPONENT_REF) + { + constructor_elt *base_ce = base_field_constructor_elt (v, aggr); + v = CONSTRUCTOR_ELTS (base_ce->value); + } + + for (i = 0; vec_safe_iterate (v, i, &ce); ++i) + if (ce->index == field) + return ce; + + gcc_unreachable (); + return NULL; +} + +/* Return a pointer to the constructor_elt of CTOR which matches INDEX. If no + matching constructor_elt exists, then add one to CTOR. + + As an optimization, if POS_HINT is non-negative then it is used as a guess + for the (integer) index of the matching constructor_elt within CTOR. */ + +static constructor_elt * +get_or_insert_ctor_field (tree ctor, tree index, int pos_hint = -1) +{ + /* Check the hint first. */ + if (pos_hint >= 0 && (unsigned) pos_hint < CONSTRUCTOR_NELTS (ctor) + && CONSTRUCTOR_ELT (ctor, pos_hint)->index == index) + return CONSTRUCTOR_ELT (ctor, pos_hint); + + tree type = TREE_TYPE (ctor); + if (TREE_CODE (type) == VECTOR_TYPE && index == NULL_TREE) + { + CONSTRUCTOR_APPEND_ELT (CONSTRUCTOR_ELTS (ctor), index, NULL_TREE); + return &CONSTRUCTOR_ELTS (ctor)->last (); + } + else if (TREE_CODE (type) == ARRAY_TYPE || TREE_CODE (type) == VECTOR_TYPE) + { + if (TREE_CODE (index) == RANGE_EXPR) + { + /* Support for RANGE_EXPR index lookups is currently limited to + accessing an existing element via POS_HINT, or appending a new + element to the end of CTOR. ??? Support for other access + patterns may also be needed. */ + vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor); + if (vec_safe_length (elts)) + { + tree lo = TREE_OPERAND (index, 0); + gcc_assert (array_index_cmp (elts->last ().index, lo) < 0); + } + CONSTRUCTOR_APPEND_ELT (elts, index, NULL_TREE); + return &elts->last (); + } + + HOST_WIDE_INT i = find_array_ctor_elt (ctor, index, /*insert*/ true); + gcc_assert (i >= 0); + constructor_elt *cep = CONSTRUCTOR_ELT (ctor, i); + gcc_assert (cep->index == NULL_TREE + || TREE_CODE (cep->index) != RANGE_EXPR); + return cep; + } + else + { + gcc_assert ( + TREE_CODE (index) == FIELD_DECL + && (same_type_ignoring_top_level_qualifiers_p (DECL_CONTEXT (index), + TREE_TYPE (ctor)))); + + /* We must keep the CONSTRUCTOR's ELTS in FIELD order. + Usually we meet initializers in that order, but it is + possible for base types to be placed not in program + order. */ + tree fields = TYPE_FIELDS (DECL_CONTEXT (index)); + unsigned HOST_WIDE_INT idx = 0; + constructor_elt *cep = NULL; + + /* Check if we're changing the active member of a union. */ + if (TREE_CODE (type) == UNION_TYPE && CONSTRUCTOR_NELTS (ctor) + && CONSTRUCTOR_ELT (ctor, 0)->index != index) + vec_safe_truncate (CONSTRUCTOR_ELTS (ctor), 0); + /* If the bit offset of INDEX is larger than that of the last + constructor_elt, then we can just immediately append a new + constructor_elt to the end of CTOR. */ + else if (CONSTRUCTOR_NELTS (ctor) + && tree_int_cst_compare ( + bit_position (index), + bit_position (CONSTRUCTOR_ELTS (ctor)->last ().index)) + > 0) + { + idx = CONSTRUCTOR_NELTS (ctor); + goto insert; + } + + /* Otherwise, we need to iterate over CTOR to find or insert INDEX + appropriately. */ + + for (; vec_safe_iterate (CONSTRUCTOR_ELTS (ctor), idx, &cep); + idx++, fields = DECL_CHAIN (fields)) + { + if (index == cep->index) + goto found; + + /* The field we're initializing must be on the field + list. Look to see if it is present before the + field the current ELT initializes. */ + for (; fields != cep->index; fields = DECL_CHAIN (fields)) + if (index == fields) + goto insert; + } + /* We fell off the end of the CONSTRUCTOR, so insert a new + entry at the end. */ + + insert : { + constructor_elt ce = {index, NULL_TREE}; + + vec_safe_insert (CONSTRUCTOR_ELTS (ctor), idx, ce); + cep = CONSTRUCTOR_ELT (ctor, idx); + } + found:; + + return cep; + } +} + +// forked from gcc/cp/constexpr.cc cxx_eval_vector_conditional_expression + +/* Subroutine of cxx_eval_constant_expression. + Attempt to evaluate vector condition expressions. Unlike + cxx_eval_conditional_expression, VEC_COND_EXPR acts like a normal + ternary arithmetics operation, where all 3 arguments have to be + evaluated as constants and then folding computes the result from + them. */ + +static tree +eval_vector_conditional_expression (const constexpr_ctx *ctx, tree t, + bool *non_constant_p, bool *overflow_p) +{ + tree arg1 + = eval_constant_expression (ctx, TREE_OPERAND (t, 0), + /*lval*/ false, non_constant_p, overflow_p); + VERIFY_CONSTANT (arg1); + tree arg2 + = eval_constant_expression (ctx, TREE_OPERAND (t, 1), + /*lval*/ false, non_constant_p, overflow_p); + VERIFY_CONSTANT (arg2); + tree arg3 + = eval_constant_expression (ctx, TREE_OPERAND (t, 2), + /*lval*/ false, non_constant_p, overflow_p); + VERIFY_CONSTANT (arg3); + location_t loc = EXPR_LOCATION (t); + tree type = TREE_TYPE (t); + tree r = fold_ternary_loc (loc, VEC_COND_EXPR, type, arg1, arg2, arg3); + if (r == NULL_TREE) + { + if (arg1 == TREE_OPERAND (t, 0) && arg2 == TREE_OPERAND (t, 1) + && arg3 == TREE_OPERAND (t, 2)) + r = t; + else + r = build3_loc (loc, VEC_COND_EXPR, type, arg1, arg2, arg3); + } + VERIFY_CONSTANT (r); + return r; +} + +// forked from gcc/cp/constexpr.cc cxx_eval_bare_aggregate + +/* Subroutine of cxx_eval_constant_expression. + The expression tree T denotes a C-style array or a C-style + aggregate. Reduce it to a constant expression. */ + +static tree +eval_bare_aggregate (const constexpr_ctx *ctx, tree t, bool lval, + bool *non_constant_p, bool *overflow_p) +{ + vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (t); + bool changed = false; + gcc_assert (!BRACE_ENCLOSED_INITIALIZER_P (t)); + tree type = TREE_TYPE (t); + + constexpr_ctx new_ctx; + if (TYPE_PTRMEMFUNC_P (type) || VECTOR_TYPE_P (type)) + { + /* We don't really need the ctx->ctor business for a PMF or + vector, but it's simpler to use the same code. */ + new_ctx = *ctx; + new_ctx.ctor = build_constructor (type, NULL); + new_ctx.object = NULL_TREE; + ctx = &new_ctx; + }; + verify_ctor_sanity (ctx, type); + vec<constructor_elt, va_gc> **p = &CONSTRUCTOR_ELTS (ctx->ctor); + vec_alloc (*p, vec_safe_length (v)); + + if (CONSTRUCTOR_PLACEHOLDER_BOUNDARY (t)) + CONSTRUCTOR_PLACEHOLDER_BOUNDARY (ctx->ctor) = 1; + + unsigned i; + tree index, value; + bool constant_p = true; + bool side_effects_p = false; + FOR_EACH_CONSTRUCTOR_ELT (v, i, index, value) + { + tree orig_value = value; + /* Like in cxx_eval_store_expression, omit entries for empty fields. */ + bool no_slot = TREE_CODE (type) == RECORD_TYPE && is_empty_field (index); + if (no_slot) + new_ctx = *ctx; + else + init_subob_ctx (ctx, new_ctx, index, value); + int pos_hint = -1; + if (new_ctx.ctor != ctx->ctor) + { + /* If we built a new CONSTRUCTOR, attach it now so that other + initializers can refer to it. */ + constructor_elt *cep = get_or_insert_ctor_field (ctx->ctor, index); + cep->value = new_ctx.ctor; + pos_hint = cep - (*p)->begin (); + } + else if (TREE_CODE (type) == UNION_TYPE) + /* Otherwise if we're constructing a non-aggregate union member, set + the active union member now so that we can later detect and diagnose + if its initializer attempts to activate another member. */ + get_or_insert_ctor_field (ctx->ctor, index); + tree elt = eval_constant_expression (&new_ctx, value, lval, + non_constant_p, overflow_p); + /* Don't VERIFY_CONSTANT here. */ + if (ctx->quiet && *non_constant_p) + break; + if (elt != orig_value) + changed = true; + + if (!TREE_CONSTANT (elt)) + constant_p = false; + if (TREE_SIDE_EFFECTS (elt)) + side_effects_p = true; + if (index && TREE_CODE (index) == COMPONENT_REF) + { + /* This is an initialization of a vfield inside a base + subaggregate that we already initialized; push this + initialization into the previous initialization. */ + constructor_elt *inner = base_field_constructor_elt (*p, index); + inner->value = elt; + changed = true; + } + else if (index + && (TREE_CODE (index) == NOP_EXPR + || TREE_CODE (index) == POINTER_PLUS_EXPR)) + { + /* This is an initializer for an empty base; now that we've + checked that it's constant, we can ignore it. */ + gcc_assert (is_empty_class (TREE_TYPE (TREE_TYPE (index)))); + changed = true; + } + else if (no_slot) + changed = true; + else + { + if (TREE_CODE (type) == UNION_TYPE && (*p)->last ().index != index) + /* The initializer erroneously changed the active union member that + we're initializing. */ + gcc_assert (*non_constant_p); + else + { + /* The initializer might have mutated the underlying CONSTRUCTOR, + so recompute the location of the target constructer_elt. */ + constructor_elt *cep + = get_or_insert_ctor_field (ctx->ctor, index, pos_hint); + cep->value = elt; + } + + /* Adding or replacing an element might change the ctor's flags. */ + TREE_CONSTANT (ctx->ctor) = constant_p; + TREE_SIDE_EFFECTS (ctx->ctor) = side_effects_p; + } + } + if (*non_constant_p || !changed) + return t; + t = ctx->ctor; + /* We're done building this CONSTRUCTOR, so now we can interpret an + element without an explicit initializer as value-initialized. */ + CONSTRUCTOR_NO_CLEARING (t) = false; + TREE_CONSTANT (t) = constant_p; + TREE_SIDE_EFFECTS (t) = side_effects_p; + if (VECTOR_TYPE_P (type)) + t = fold (t); + return t; +} + +/* Subroutine of cxx_eval_constant_expression. + Like cxx_eval_unary_expression, except for trinary expressions. */ + +static tree +cxx_eval_trinary_expression (const constexpr_ctx *ctx, tree t, bool lval, + bool *non_constant_p, bool *overflow_p) +{ + int i; + tree args[3]; + tree val; + + for (i = 0; i < 3; i++) + { + args[i] = eval_constant_expression (ctx, TREE_OPERAND (t, i), lval, + non_constant_p, overflow_p); + VERIFY_CONSTANT (args[i]); + } + + val = fold_ternary_loc (EXPR_LOCATION (t), TREE_CODE (t), TREE_TYPE (t), + args[0], args[1], args[2]); + if (val == NULL_TREE) + return t; + VERIFY_CONSTANT (val); + return val; +} + +/* Return true if T is a valid constant initializer. If a CONSTRUCTOR + initializes all the members, the CONSTRUCTOR_NO_CLEARING flag will be + cleared. + FIXME speed this up, it's taking 16% of compile time on sieve testcase. */ + +bool +reduced_constant_expression_p (tree t) +{ + if (t == NULL_TREE) + return false; + + switch (TREE_CODE (t)) + { + case PTRMEM_CST: + /* Even if we can't lower this yet, it's constant. */ + return true; + + case CONSTRUCTOR: + /* And we need to handle PTRMEM_CST wrapped in a CONSTRUCTOR. */ + tree field; + if (CONSTRUCTOR_NO_CLEARING (t)) + { + if (TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE) + /* An initialized vector would have a VECTOR_CST. */ + return false; + else if (TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE) + { + /* There must be a valid constant initializer at every array + index. */ + tree min = TYPE_MIN_VALUE (TYPE_DOMAIN (TREE_TYPE (t))); + tree max = TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (t))); + tree cursor = min; + for (auto &e : CONSTRUCTOR_ELTS (t)) + { + if (!reduced_constant_expression_p (e.value)) + return false; + if (array_index_cmp (cursor, e.index) != 0) + return false; + if (TREE_CODE (e.index) == RANGE_EXPR) + cursor = TREE_OPERAND (e.index, 1); + cursor = int_const_binop (PLUS_EXPR, cursor, size_one_node); + } + if (find_array_ctor_elt (t, max) == -1) + return false; + goto ok; + } + else if (TREE_CODE (TREE_TYPE (t)) == UNION_TYPE) + { + if (CONSTRUCTOR_NELTS (t) == 0) + /* An initialized union has a constructor element. */ + return false; + /* And it only initializes one member. */ + field = NULL_TREE; + } + else + field = next_initializable_field (TYPE_FIELDS (TREE_TYPE (t))); + } + else + field = NULL_TREE; + for (auto &e : CONSTRUCTOR_ELTS (t)) + { + /* If VAL is null, we're in the middle of initializing this + element. */ + if (!reduced_constant_expression_p (e.value)) + return false; + /* Empty class field may or may not have an initializer. */ + for (; field && e.index != field; + field = next_initializable_field (DECL_CHAIN (field))) + if (!is_really_empty_class (TREE_TYPE (field), + /*ignore_vptr*/ false)) + return false; + if (field) + field = next_initializable_field (DECL_CHAIN (field)); + } + /* There could be a non-empty field at the end. */ + for (; field; field = next_initializable_field (DECL_CHAIN (field))) + if (!is_really_empty_class (TREE_TYPE (field), /*ignore_vptr*/ false)) + return false; + ok: + if (CONSTRUCTOR_NO_CLEARING (t)) + /* All the fields are initialized. */ + CONSTRUCTOR_NO_CLEARING (t) = false; + return true; + + default: + /* FIXME are we calling this too much? */ + return initializer_constant_valid_p (t, TREE_TYPE (t)) != NULL_TREE; + } +} + +/* TEMP is the constant value of a temporary object of type TYPE. Adjust + the type of the value to match. */ + +static tree +adjust_temp_type (tree type, tree temp) +{ + if (same_type_p (TREE_TYPE (temp), type)) + return temp; + + gcc_assert (scalarish_type_p (type)); + /* Now we know we're dealing with a scalar, and a prvalue of non-class + type is cv-unqualified. */ + return fold_convert (cv_unqualified (type), temp); +} + +// forked from gcc/cp/constexpr.cc free_constructor + +/* If T is a CONSTRUCTOR, ggc_free T and any sub-CONSTRUCTORs. */ + +static void +free_constructor (tree t) +{ + if (!t || TREE_CODE (t) != CONSTRUCTOR) + return; + releasing_vec ctors; + vec_safe_push (ctors, t); + while (!ctors->is_empty ()) + { + tree c = ctors->pop (); + if (vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (c)) + { + constructor_elt *ce; + for (HOST_WIDE_INT i = 0; vec_safe_iterate (elts, i, &ce); ++i) + if (TREE_CODE (ce->value) == CONSTRUCTOR) + vec_safe_push (ctors, ce->value); + ggc_free (elts); + } + ggc_free (c); + } +} + +static tree +eval_and_check_array_index (const constexpr_ctx *ctx, tree t, + bool allow_one_past, bool *non_constant_p, + bool *overflow_p); + +// forked from gcc/cp/constexpr.cc cxx_eval_array_reference + +/* Subroutine of cxx_eval_constant_expression. + Attempt to reduce a reference to an array slot. */ + +static tree +eval_array_reference (const constexpr_ctx *ctx, tree t, bool lval, + bool *non_constant_p, bool *overflow_p) +{ + tree oldary = TREE_OPERAND (t, 0); + tree ary + = eval_constant_expression (ctx, oldary, lval, non_constant_p, overflow_p); + if (*non_constant_p) + return t; + if (!lval && TREE_CODE (ary) == VIEW_CONVERT_EXPR + && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (ary, 0))) + && TREE_TYPE (t) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (ary, 0)))) + ary = TREE_OPERAND (ary, 0); + + tree oldidx = TREE_OPERAND (t, 1); + tree index + = eval_and_check_array_index (ctx, t, lval, non_constant_p, overflow_p); + if (*non_constant_p) + return t; + + if (lval && ary == oldary && index == oldidx) + return t; + else if (lval) + return build4 (ARRAY_REF, TREE_TYPE (t), ary, index, NULL, NULL); + + unsigned len = 0, elem_nchars = 1; + tree elem_type = TREE_TYPE (TREE_TYPE (ary)); + if (TREE_CODE (ary) == CONSTRUCTOR) + len = CONSTRUCTOR_NELTS (ary); + else if (TREE_CODE (ary) == STRING_CST) + { + elem_nchars + = (TYPE_PRECISION (elem_type) / TYPE_PRECISION (char_type_node)); + len = (unsigned) TREE_STRING_LENGTH (ary) / elem_nchars; + } + else if (TREE_CODE (ary) == VECTOR_CST) + /* We don't create variable-length VECTOR_CSTs. */ + len = VECTOR_CST_NELTS (ary).to_constant (); + else + { + /* We can't do anything with other tree codes, so use + VERIFY_CONSTANT to complain and fail. */ + VERIFY_CONSTANT (ary); + gcc_unreachable (); + } + + bool found; + HOST_WIDE_INT i = 0; + if (TREE_CODE (ary) == CONSTRUCTOR) + { + HOST_WIDE_INT ix = find_array_ctor_elt (ary, index); + found = (ix >= 0); + if (found) + i = ix; + } + else + { + i = tree_to_shwi (index); + found = (i < len); + } + + if (found) + { + tree r; + if (TREE_CODE (ary) == CONSTRUCTOR) + r = (*CONSTRUCTOR_ELTS (ary))[i].value; + else if (TREE_CODE (ary) == VECTOR_CST) + r = VECTOR_CST_ELT (ary, i); + else + r = extract_string_elt (ary, elem_nchars, i); + + if (r) + /* Don't VERIFY_CONSTANT here. */ + return r; + + /* Otherwise the element doesn't have a value yet. */ + } + + /* Not found. */ + + if (TREE_CODE (ary) == CONSTRUCTOR && CONSTRUCTOR_NO_CLEARING (ary)) + { + /* 'ary' is part of the aggregate initializer we're currently + building; if there's no initializer for this element yet, + that's an error. */ + if (!ctx->quiet) + error ("accessing uninitialized array element"); + *non_constant_p = true; + return t; + } + + /* If it's within the array bounds but doesn't have an explicit + initializer, it's initialized from {}. But use build_value_init + directly for non-aggregates to avoid creating a garbage CONSTRUCTOR. */ + tree val = NULL_TREE; + sorry ("array size expression is not supported yet"); + + constexpr_ctx new_ctx; + if (is_really_empty_class (elem_type, /*ignore_vptr*/ false)) + return build_constructor (elem_type, NULL); + // else if (CP_AGGREGATE_TYPE_P (elem_type)) + // { + // tree empty_ctor = build_constructor (init_list_type_node, NULL); + // //val = digest_init (elem_type, empty_ctor, tf_warning_or_error); + // } + // else + // val = build_value_init (elem_type, tf_warning_or_error); + + if (!SCALAR_TYPE_P (elem_type)) + { + new_ctx = *ctx; + if (ctx->object) + /* If there was no object, don't add one: it could confuse us + into thinking we're modifying a const object. */ + new_ctx.object = t; + new_ctx.ctor = build_constructor (elem_type, NULL); + ctx = &new_ctx; + } + t = eval_constant_expression (ctx, val, lval, non_constant_p, overflow_p); + if (!SCALAR_TYPE_P (elem_type) && t != ctx->ctor) + free_constructor (ctx->ctor); + return t; +} + +// forked from gcc/cp/constexpr.cc cxx_eval_component_reference + +/* Subroutine of cxx_eval_constant_expression. + Attempt to reduce a field access of a value of class type. */ + +static tree +eval_component_reference (const constexpr_ctx *ctx, tree t, bool lval, + bool *non_constant_p, bool *overflow_p) +{ + unsigned HOST_WIDE_INT i; + tree field; + tree value; + tree part = TREE_OPERAND (t, 1); + tree orig_whole = TREE_OPERAND (t, 0); + tree whole = eval_constant_expression (ctx, orig_whole, lval, non_constant_p, + overflow_p); + if (INDIRECT_REF_P (whole) && integer_zerop (TREE_OPERAND (whole, 0))) + { + if (!ctx->quiet) + error ("dereferencing a null pointer in %qE", orig_whole); + *non_constant_p = true; + return t; + } + + if (whole == orig_whole) + return t; + if (lval) + return fold_build3 (COMPONENT_REF, TREE_TYPE (t), whole, part, NULL_TREE); + /* Don't VERIFY_CONSTANT here; we only want to check that we got a + CONSTRUCTOR. */ + if (!*non_constant_p && TREE_CODE (whole) != CONSTRUCTOR) + { + if (!ctx->quiet) + error ("%qE is not a constant expression", orig_whole); + *non_constant_p = true; + } + if (DECL_MUTABLE_P (part)) + { + if (!ctx->quiet) + error ("mutable %qD is not usable in a constant expression", part); + *non_constant_p = true; + } + if (*non_constant_p) + return t; + bool pmf = TYPE_PTRMEMFUNC_P (TREE_TYPE (whole)); + FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (whole), i, field, value) + { + /* Use name match for PMF fields, as a variant will have a + different FIELD_DECL with a different type. */ + if (pmf ? DECL_NAME (field) == DECL_NAME (part) : field == part) + { + if (value) + { + STRIP_ANY_LOCATION_WRAPPER (value); + return value; + } + else + /* We're in the middle of initializing it. */ + break; + } + } + if (TREE_CODE (TREE_TYPE (whole)) == UNION_TYPE + && CONSTRUCTOR_NELTS (whole) > 0) + { + /* DR 1188 says we don't have to deal with this. */ + if (!ctx->quiet) + { + constructor_elt *cep = CONSTRUCTOR_ELT (whole, 0); + if (cep->value == NULL_TREE) + error ("accessing uninitialized member %qD", part); + else + error ("accessing %qD member instead of initialized %qD member in " + "constant expression", + part, cep->index); + } + *non_constant_p = true; + return t; + } + + /* We only create a CONSTRUCTOR for a subobject when we modify it, so empty + classes never get represented; throw together a value now. */ + if (is_really_empty_class (TREE_TYPE (t), /*ignore_vptr*/ false)) + return build_constructor (TREE_TYPE (t), NULL); + + gcc_assert (DECL_CONTEXT (part) == TYPE_MAIN_VARIANT (TREE_TYPE (whole))); + + if (CONSTRUCTOR_NO_CLEARING (whole)) + { + /* 'whole' is part of the aggregate initializer we're currently + building; if there's no initializer for this member yet, that's an + error. */ + if (!ctx->quiet) + error ("accessing uninitialized member %qD", part); + *non_constant_p = true; + return t; + } + + value = NULL_TREE; + sorry ("constant folding not supported for this tree code"); + /* If there's no explicit init for this field, it's value-initialized. */ + // Faisal: commenting this out as not sure if we need this but we need to come + // back to handle this to assign suitable value to value before sending it in + // eval_constant_expression below + // value = build_value_init (TREE_TYPE (t), tf_warning_or_error); + return eval_constant_expression (ctx, value, lval, non_constant_p, + overflow_p); +} + +/* Subroutine of cxx_eval_statement_list. Determine whether the statement + STMT matches *jump_target. If we're looking for a case label and we see + the default label, note it in ctx->css_state. */ + +static bool +label_matches (const constexpr_ctx *ctx, tree *jump_target, tree stmt) +{ + switch (TREE_CODE (*jump_target)) + { + case LABEL_DECL: + if (TREE_CODE (stmt) == LABEL_EXPR + && LABEL_EXPR_LABEL (stmt) == *jump_target) + return true; + break; + + case INTEGER_CST: + if (TREE_CODE (stmt) == CASE_LABEL_EXPR) + { + gcc_assert (ctx->css_state != NULL); + if (!CASE_LOW (stmt)) + { + /* default: should appear just once in a SWITCH_EXPR + body (excluding nested SWITCH_EXPR). */ + gcc_assert (*ctx->css_state != css_default_seen); + /* When evaluating SWITCH_EXPR body for the second time, + return true for the default: label. */ + if (*ctx->css_state == css_default_processing) + return true; + *ctx->css_state = css_default_seen; + } + else if (CASE_HIGH (stmt)) + { + if (tree_int_cst_le (CASE_LOW (stmt), *jump_target) + && tree_int_cst_le (*jump_target, CASE_HIGH (stmt))) + return true; + } + else if (tree_int_cst_equal (*jump_target, CASE_LOW (stmt))) + return true; + } + break; + + case BREAK_STMT: + case CONTINUE_STMT: + /* These two are handled directly in cxx_eval_loop_expr by testing + breaks (jump_target) or continues (jump_target). */ + break; + + default: + gcc_unreachable (); + } + return false; +} + static tree -constexpr_expression (const constexpr_ctx *ctx, tree t) +eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval, + bool *non_constant_p, bool *overflow_p, + tree *jump_target /* = NULL */) { + if (jump_target && *jump_target) + { + /* If we are jumping, ignore all statements/expressions except those + that could have LABEL_EXPR or CASE_LABEL_EXPR in their bodies. */ + switch (TREE_CODE (t)) + { + case BIND_EXPR: + case STATEMENT_LIST: + case LOOP_EXPR: + case COND_EXPR: + case IF_STMT: + case DO_STMT: + case WHILE_STMT: + case FOR_STMT: + break; + case LABEL_EXPR: + case CASE_LABEL_EXPR: + if (label_matches (ctx, jump_target, t)) + /* Found it. */ + *jump_target = NULL_TREE; + return NULL_TREE; + default: + return NULL_TREE; + } + } + if (error_operand_p (t)) + { + *non_constant_p = true; + return t; + } + location_t loc = EXPR_LOCATION (t); if (CONSTANT_CLASS_P (t)) @@ -107,11 +1923,33 @@ constexpr_expression (const constexpr_ctx *ctx, tree t) return t; } + constexpr_ctx new_ctx; tree r = t; tree_code tcode = TREE_CODE (t); switch (tcode) { + case VAR_DECL: + if (DECL_HAS_VALUE_EXPR_P (t)) + { + r = DECL_VALUE_EXPR (t); + return eval_constant_expression (ctx, r, lval, non_constant_p, + overflow_p); + } + /* fall through */ case CONST_DECL: { + /* We used to not check lval for CONST_DECL, but darwin.cc uses + CONST_DECL for aggregate constants. */ + if (lval) + return t; + else if (t == ctx->object) + return ctx->ctor; + if (VAR_P (t)) + if (tree *p = ctx->global->values.get (t)) + if (*p != NULL_TREE) + { + r = *p; + break; + } r = decl_constant_value (t, /*unshare_p=*/false); if (TREE_CODE (r) == TARGET_EXPR && TREE_CODE (TARGET_EXPR_INITIAL (r)) == CONSTRUCTOR) @@ -124,6 +1962,28 @@ constexpr_expression (const constexpr_ctx *ctx, tree t) } break; + case PARM_DECL: + if (lval && !TYPE_REF_P (TREE_TYPE (t))) + /* glvalue use. */; + else if (tree *p = ctx->global->values.get (r)) + r = *p; + else if (lval) + /* Defer in case this is only used for its type. */; + else if (COMPLETE_TYPE_P (TREE_TYPE (t)) + && is_really_empty_class (TREE_TYPE (t), /*ignore_vptr*/ false)) + { + /* If the class is empty, we aren't actually loading anything. */ + r = build_constructor (TREE_TYPE (t), NULL); + TREE_CONSTANT (r) = true; + } + else + { + if (!ctx->quiet) + error ("%qE is not a constant expression", t); + *non_constant_p = true; + } + break; + case POINTER_PLUS_EXPR: case POINTER_DIFF_EXPR: case PLUS_EXPR: @@ -165,22 +2025,577 @@ constexpr_expression (const constexpr_ctx *ctx, tree t) case LTGT_EXPR: case RANGE_EXPR: case COMPLEX_EXPR: - r = eval_binary_expression (ctx, t); + r = eval_binary_expression (ctx, t, lval, non_constant_p, overflow_p); + break; + + /* fold can introduce non-IF versions of these; still treat them as + short-circuiting. */ + case TRUTH_AND_EXPR: + case TRUTH_ANDIF_EXPR: + r = eval_logical_expression (ctx, t, boolean_false_node, + boolean_true_node, lval, non_constant_p, + overflow_p); + break; + + case TRUTH_OR_EXPR: + case TRUTH_ORIF_EXPR: + r = eval_logical_expression (ctx, t, boolean_true_node, + boolean_false_node, lval, non_constant_p, + overflow_p); + break; + + case TARGET_EXPR: { + tree type = TREE_TYPE (t); + + if (!literal_type_p (type)) + { + if (!ctx->quiet) + { + auto_diagnostic_group d; + error ("temporary of non-literal type %qT in a " + "constant expression", + type); + explain_non_literal_class (type); + } + *non_constant_p = true; + break; + } + gcc_checking_assert (!TARGET_EXPR_DIRECT_INIT_P (t)); + /* Avoid evaluating a TARGET_EXPR more than once. */ + tree slot = TARGET_EXPR_SLOT (t); + if (tree *p = ctx->global->values.get (slot)) + { + if (lval) + return slot; + r = *p; + break; + } + if ((AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))) + { + /* We're being expanded without an explicit target, so start + initializing a new object; expansion with an explicit target + strips the TARGET_EXPR before we get here. */ + new_ctx = *ctx; + /* Link CTX to NEW_CTX so that lookup_placeholder can resolve + any PLACEHOLDER_EXPR within the initializer that refers to the + former object under construction. */ + new_ctx.parent = ctx; + new_ctx.ctor = build_constructor (type, NULL); + CONSTRUCTOR_NO_CLEARING (new_ctx.ctor) = true; + new_ctx.object = slot; + ctx->global->values.put (new_ctx.object, new_ctx.ctor); + ctx = &new_ctx; + } + /* Pass false for 'lval' because this indicates + initialization of a temporary. */ + r = eval_constant_expression (ctx, TREE_OPERAND (t, 1), false, + non_constant_p, overflow_p); + if (*non_constant_p) + break; + /* Adjust the type of the result to the type of the temporary. */ + r = adjust_temp_type (type, r); + if (TARGET_EXPR_CLEANUP (t) && !CLEANUP_EH_ONLY (t)) + ctx->global->cleanups->safe_push (TARGET_EXPR_CLEANUP (t)); + r = unshare_constructor (r); + ctx->global->values.put (slot, r); + if (ctx->save_exprs) + ctx->save_exprs->safe_push (slot); + if (lval) + return slot; + } break; case CALL_EXPR: - r = eval_call_expression (ctx, t); + r = eval_call_expression (ctx, t, lval, non_constant_p, overflow_p); break; case RETURN_EXPR: - rust_assert (TREE_OPERAND (t, 0) != NULL_TREE); - r = constexpr_expression (ctx, TREE_OPERAND (t, 0)); + if (TREE_OPERAND (t, 0) != NULL_TREE) + r = eval_constant_expression (ctx, TREE_OPERAND (t, 0), lval, + non_constant_p, overflow_p); + /* FALLTHRU */ + case BREAK_STMT: + case CONTINUE_STMT: + if (jump_target) + *jump_target = t; + else + { + /* Can happen with ({ return true; }) && false; passed to + maybe_constant_value. There is nothing to jump over in this + case, and the bug will be diagnosed later. */ + gcc_assert (ctx->quiet); + *non_constant_p = true; + } + break; + + case DECL_EXPR: { + r = DECL_EXPR_DECL (t); + + if (AGGREGATE_TYPE_P (TREE_TYPE (r)) || VECTOR_TYPE_P (TREE_TYPE (r))) + { + new_ctx = *ctx; + new_ctx.object = r; + new_ctx.ctor = build_constructor (TREE_TYPE (r), NULL); + CONSTRUCTOR_NO_CLEARING (new_ctx.ctor) = true; + ctx->global->values.put (r, new_ctx.ctor); + ctx = &new_ctx; + } + + if (tree init = DECL_INITIAL (r)) + { + init = eval_constant_expression (ctx, init, false, non_constant_p, + overflow_p); + /* Don't share a CONSTRUCTOR that might be changed. */ + init = unshare_constructor (init); + /* Remember that a constant object's constructor has already + run. */ + if (CLASS_TYPE_P (TREE_TYPE (r)) && RS_TYPE_CONST_P (TREE_TYPE (r))) + TREE_READONLY (init) = true; + ctx->global->values.put (r, init); + } + else if (ctx == &new_ctx) + /* We gave it a CONSTRUCTOR above. */; + else + ctx->global->values.put (r, NULL_TREE); + } + break; + + /* These differ from cxx_eval_unary_expression in that this doesn't + check for a constant operand or result; an address can be + constant without its operand being, and vice versa. */ + case MEM_REF: + case INDIRECT_REF: + r = rs_eval_indirect_ref (ctx, t, lval, non_constant_p, overflow_p); + break; + + case VEC_PERM_EXPR: + r = cxx_eval_trinary_expression (ctx, t, lval, non_constant_p, + overflow_p); + break; + + case PAREN_EXPR: + gcc_assert (!REF_PARENTHESIZED_P (t)); + /* A PAREN_EXPR resulting from __builtin_assoc_barrier has no effect in + constant expressions since it's unaffected by -fassociative-math. */ + r = eval_constant_expression (ctx, TREE_OPERAND (t, 0), lval, + non_constant_p, overflow_p); break; case MODIFY_EXPR: - r = eval_store_expression (ctx, t); + r = eval_store_expression (ctx, t, false, non_constant_p, overflow_p); + break; + + case STATEMENT_LIST: + new_ctx = *ctx; + new_ctx.ctor = new_ctx.object = NULL_TREE; + return eval_statement_list (&new_ctx, t, non_constant_p, overflow_p, + jump_target); + + case BIND_EXPR: + return eval_constant_expression (ctx, BIND_EXPR_BODY (t), lval, + non_constant_p, overflow_p, jump_target); + + case OBJ_TYPE_REF: + /* Virtual function lookup. We don't need to do anything fancy. */ + return eval_constant_expression (ctx, OBJ_TYPE_REF_EXPR (t), lval, + non_constant_p, overflow_p); + + case EXIT_EXPR: { + tree cond = TREE_OPERAND (t, 0); + cond = eval_constant_expression (ctx, cond, /*lval*/ false, + non_constant_p, overflow_p); + VERIFY_CONSTANT (cond); + if (integer_nonzerop (cond)) + *jump_target = t; + } + break; + + case RESULT_DECL: + if (lval) + return t; + /* We ask for an rvalue for the RESULT_DECL when indirecting + through an invisible reference, or in named return value + optimization. */ + if (tree *p = ctx->global->values.get (t)) + return *p; + else + { + if (!ctx->quiet) + error ("%qE is not a constant expression", t); + *non_constant_p = true; + } + break; + + case SAVE_EXPR: + /* Avoid evaluating a SAVE_EXPR more than once. */ + if (tree *p = ctx->global->values.get (t)) + r = *p; + else + { + r = eval_constant_expression (ctx, TREE_OPERAND (t, 0), false, + non_constant_p, overflow_p); + if (*non_constant_p) + break; + ctx->global->values.put (t, r); + if (ctx->save_exprs) + ctx->save_exprs->safe_push (t); + } + break; + + case ADDR_EXPR: { + tree oldop = TREE_OPERAND (t, 0); + tree op = eval_constant_expression (ctx, oldop, + /*lval*/ true, non_constant_p, + overflow_p); + /* Don't VERIFY_CONSTANT here. */ + if (*non_constant_p) + return t; + gcc_checking_assert (TREE_CODE (op) != CONSTRUCTOR); + /* This function does more aggressive folding than fold itself. */ + r = build_fold_addr_expr_with_type (op, TREE_TYPE (t)); + if (TREE_CODE (r) == ADDR_EXPR && TREE_OPERAND (r, 0) == oldop) + { + ggc_free (r); + return t; + } + break; + } + + case COMPOUND_EXPR: { + /* check_return_expr sometimes wraps a TARGET_EXPR in a + COMPOUND_EXPR; don't get confused. Also handle EMPTY_CLASS_EXPR + introduced by build_call_a. */ + tree op0 = TREE_OPERAND (t, 0); + tree op1 = TREE_OPERAND (t, 1); + STRIP_NOPS (op1); + if ((TREE_CODE (op0) == TARGET_EXPR && op1 == TARGET_EXPR_SLOT (op0)) + || TREE_CODE (op1) == EMPTY_CLASS_EXPR) + r = eval_constant_expression (ctx, op0, lval, non_constant_p, + overflow_p, jump_target); + else + { + /* Check that the LHS is constant and then discard it. */ + eval_constant_expression (ctx, op0, true, non_constant_p, + overflow_p, jump_target); + if (*non_constant_p) + return t; + op1 = TREE_OPERAND (t, 1); + r = eval_constant_expression (ctx, op1, lval, non_constant_p, + overflow_p, jump_target); + } + } + break; + + case REALPART_EXPR: + case IMAGPART_EXPR: + if (lval) + { + r = eval_constant_expression (ctx, TREE_OPERAND (t, 0), lval, + non_constant_p, overflow_p); + if (r == error_mark_node) + ; + else if (r == TREE_OPERAND (t, 0)) + r = t; + else + r = fold_build1 (TREE_CODE (t), TREE_TYPE (t), r); + break; + } + /* FALLTHRU */ + case CONJ_EXPR: + case FIX_TRUNC_EXPR: + case FLOAT_EXPR: + case NEGATE_EXPR: + case ABS_EXPR: + case ABSU_EXPR: + case BIT_NOT_EXPR: + case TRUTH_NOT_EXPR: + case FIXED_CONVERT_EXPR: + r = eval_unary_expression (ctx, t, lval, non_constant_p, overflow_p); + break; + + case LOOP_EXPR: + case WHILE_STMT: + case FOR_STMT: + eval_loop_expr (ctx, t, non_constant_p, overflow_p, jump_target); + break; + + case SWITCH_EXPR: + case SWITCH_STMT: + eval_switch_expr (ctx, t, non_constant_p, overflow_p, jump_target); + break; + + case ARRAY_REF: + r = eval_array_reference (ctx, t, lval, non_constant_p, overflow_p); + break; + + case COMPONENT_REF: + if (is_overloaded_fn (t)) + { + /* We can only get here in checking mode via + build_non_dependent_expr, because any expression that + calls or takes the address of the function will have + pulled a FUNCTION_DECL out of the COMPONENT_REF. */ + gcc_checking_assert (ctx->quiet || errorcount); + *non_constant_p = true; + return t; + } + r = eval_component_reference (ctx, t, lval, non_constant_p, overflow_p); + break; + + case BIT_FIELD_REF: + r = eval_bit_field_ref (ctx, t, lval, non_constant_p, overflow_p); + break; + + case COND_EXPR: + case IF_STMT: // comes from cp-tree.def + if (jump_target && *jump_target) + { + tree orig_jump = *jump_target; + tree arg = ((TREE_CODE (t) != IF_STMT || TREE_OPERAND (t, 1)) + ? TREE_OPERAND (t, 1) + : void_node); + /* When jumping to a label, the label might be either in the + then or else blocks, so process then block first in skipping + mode first, and if we are still in the skipping mode at its end, + process the else block too. */ + r = eval_constant_expression (ctx, arg, lval, non_constant_p, + overflow_p, jump_target); + /* It's possible that we found the label in the then block. But + it could have been followed by another jumping statement, e.g. + say we're looking for case 1: + if (cond) + { + // skipped statements + case 1:; // clears up *jump_target + return 1; // and sets it to a RETURN_EXPR + } + else { ... } + in which case we need not go looking to the else block. + (goto is not allowed in a constexpr function.) */ + if (*jump_target == orig_jump) + { + arg = ((TREE_CODE (t) != IF_STMT || TREE_OPERAND (t, 2)) + ? TREE_OPERAND (t, 2) + : void_node); + r = eval_constant_expression (ctx, arg, lval, non_constant_p, + overflow_p, jump_target); + } + break; + } + r = eval_conditional_expression (ctx, t, lval, non_constant_p, overflow_p, + jump_target); + break; + + case VEC_COND_EXPR: + r = eval_vector_conditional_expression (ctx, t, non_constant_p, + overflow_p); + break; + + case TRY_CATCH_EXPR: + if (TREE_OPERAND (t, 0) == NULL_TREE) + { + r = void_node; + break; + } + r = eval_constant_expression (ctx, TREE_OPERAND (t, 0), lval, + non_constant_p, overflow_p, jump_target); + break; + + case CLEANUP_POINT_EXPR: { + auto_vec<tree, 2> cleanups; + vec<tree> *prev_cleanups = ctx->global->cleanups; + ctx->global->cleanups = &cleanups; + r = eval_constant_expression (ctx, TREE_OPERAND (t, 0), lval, + non_constant_p, overflow_p, jump_target); + ctx->global->cleanups = prev_cleanups; + unsigned int i; + tree cleanup; + /* Evaluate the cleanups. */ + FOR_EACH_VEC_ELT_REVERSE (cleanups, i, cleanup) + eval_constant_expression (ctx, cleanup, false, non_constant_p, + overflow_p); + } + break; + + case TRY_FINALLY_EXPR: + r = eval_constant_expression (ctx, TREE_OPERAND (t, 0), lval, + non_constant_p, overflow_p, jump_target); + if (!*non_constant_p) + /* Also evaluate the cleanup. */ + eval_constant_expression (ctx, TREE_OPERAND (t, 1), true, + non_constant_p, overflow_p); + break; + + case CONSTRUCTOR: + if (TREE_CONSTANT (t) && reduced_constant_expression_p (t)) + { + /* Don't re-process a constant CONSTRUCTOR, but do fold it to + VECTOR_CST if applicable. */ + verify_constructor_flags (t); + if (TREE_CONSTANT (t)) + return fold (t); + } + r = eval_bare_aggregate (ctx, t, lval, non_constant_p, overflow_p); + break; + + /* FALLTHROUGH. */ + case NOP_EXPR: + case CONVERT_EXPR: + case VIEW_CONVERT_EXPR: { + tree oldop = TREE_OPERAND (t, 0); + + tree op = eval_constant_expression (ctx, oldop, lval, non_constant_p, + overflow_p); + if (*non_constant_p) + return t; + tree type = TREE_TYPE (t); + + if (VOID_TYPE_P (type)) + return void_node; + + if (TREE_CODE (t) == CONVERT_EXPR && ARITHMETIC_TYPE_P (type) + && INDIRECT_TYPE_P (TREE_TYPE (op)) && ctx->manifestly_const_eval) + { + if (!ctx->quiet) + error_at (loc, + "conversion from pointer type %qT to arithmetic type " + "%qT in a constant expression", + TREE_TYPE (op), type); + *non_constant_p = true; + return t; + } + + if (TYPE_PTROB_P (type) && TYPE_PTR_P (TREE_TYPE (op)) + && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (op)))) + { + /* Likewise, don't error when casting from void* when OP is + &heap uninit and similar. */ + tree sop = tree_strip_nop_conversions (op); + if (TREE_CODE (sop) == ADDR_EXPR && VAR_P (TREE_OPERAND (sop, 0)) + && DECL_ARTIFICIAL (TREE_OPERAND (sop, 0))) + /* OK */; + else + { + if (!ctx->quiet) + error_at (loc, "cast from %qT is not allowed", + TREE_TYPE (op)); + *non_constant_p = true; + return t; + } + } + + if (INDIRECT_TYPE_P (type) && TREE_CODE (op) == INTEGER_CST) + { + if (integer_zerop (op)) + { + if (TYPE_REF_P (type)) + { + if (!ctx->quiet) + error_at (loc, "dereferencing a null pointer"); + *non_constant_p = true; + return t; + } + } + else + { + /* This detects for example: + reinterpret_cast<void*>(sizeof 0) + */ + if (!ctx->quiet) + error_at (loc, + "%<reinterpret_cast<%T>(%E)%> is not " + "a constant expression", + type, op); + *non_constant_p = true; + return t; + } + } + + if (INDIRECT_TYPE_P (type) && TREE_CODE (op) == NOP_EXPR + && TREE_TYPE (op) == ptr_type_node + && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR + && VAR_P (TREE_OPERAND (TREE_OPERAND (op, 0), 0)) + && (DECL_NAME (TREE_OPERAND (TREE_OPERAND (op, 0), 0)) + == heap_uninit_identifier + || DECL_NAME (TREE_OPERAND (TREE_OPERAND (op, 0), 0)) + == heap_vec_uninit_identifier)) + { + tree var = TREE_OPERAND (TREE_OPERAND (op, 0), 0); + tree var_size = TYPE_SIZE_UNIT (TREE_TYPE (var)); + tree elt_type = TREE_TYPE (type); + tree cookie_size = NULL_TREE; + if (TREE_CODE (elt_type) == RECORD_TYPE + && TYPE_NAME (elt_type) == heap_identifier) + { + tree fld1 = TYPE_FIELDS (elt_type); + tree fld2 = DECL_CHAIN (fld1); + elt_type = TREE_TYPE (TREE_TYPE (fld2)); + cookie_size = TYPE_SIZE_UNIT (TREE_TYPE (fld1)); + } + DECL_NAME (var) = (DECL_NAME (var) == heap_uninit_identifier + ? heap_identifier + : heap_vec_identifier); + TREE_TYPE (var) + = build_new_constexpr_heap_type (elt_type, cookie_size, var_size); + TREE_TYPE (TREE_OPERAND (op, 0)) + = build_pointer_type (TREE_TYPE (var)); + } + + if (op == oldop) + /* We didn't fold at the top so we could check for ptr-int + conversion. */ + return fold (t); + + tree sop; + + /* Handle an array's bounds having been deduced after we built + the wrapping expression. */ + if (same_type_ignoring_tlq_and_bounds_p (type, TREE_TYPE (op))) + r = op; + else if (sop = tree_strip_nop_conversions (op), + sop != op + && (same_type_ignoring_tlq_and_bounds_p (type, + TREE_TYPE (sop)))) + r = sop; + else + r = fold_build1 (tcode, type, op); + + /* Conversion of an out-of-range value has implementation-defined + behavior; the language considers it different from arithmetic + overflow, which is undefined. */ + if (TREE_OVERFLOW_P (r) && !TREE_OVERFLOW_P (op)) + TREE_OVERFLOW (r) = false; + } + break; + + case PLACEHOLDER_EXPR: + /* Use of the value or address of the current object. */ + if (tree ctor = lookup_placeholder (ctx, lval, TREE_TYPE (t))) + { + if (TREE_CODE (ctor) == CONSTRUCTOR) + return ctor; + else + return eval_constant_expression (ctx, ctor, lval, non_constant_p, + overflow_p); + } + /* A placeholder without a referent. We can get here when + checking whether NSDMIs are noexcept, or in massage_init_elt; + just say it's non-constant for now. */ + gcc_assert (ctx->quiet); + *non_constant_p = true; break; + case ANNOTATE_EXPR: + r = eval_constant_expression (ctx, TREE_OPERAND (t, 0), lval, + non_constant_p, overflow_p, jump_target); + break; + + case ASM_EXPR: + if (!ctx->quiet) + inline_asm_in_constexpr_error (loc); + *non_constant_p = true; + return t; + default: break; } @@ -188,9 +2603,44 @@ constexpr_expression (const constexpr_ctx *ctx, tree t) return r; } +/* Complain about a const object OBJ being modified in a constant expression. + EXPR is the MODIFY_EXPR expression performing the modification. */ + +static void +modifying_const_object_error (tree expr, tree obj) +{ + location_t loc = EXPR_LOCATION (expr); + auto_diagnostic_group d; + error_at (loc, + "modifying a const object %qE is not allowed in " + "a constant expression", + TREE_OPERAND (expr, 0)); + inform (location_of (obj), "originally declared %<const%> here"); +} + +/* Return true iff DECL is an empty field, either for an empty base or a + [[no_unique_address]] data member. */ + +bool +is_empty_field (tree decl) +{ + if (!decl || TREE_CODE (decl) != FIELD_DECL) + return false; + + bool r = is_empty_class (TREE_TYPE (decl)); + + /* Empty fields should have size zero. */ + gcc_checking_assert (!r || integer_zerop (DECL_SIZE (decl))); + + return r; +} + static tree -eval_store_expression (const constexpr_ctx *ctx, tree t) +eval_store_expression (const constexpr_ctx *ctx, tree t, bool lval, + bool *non_constant_p, bool *overflow_p) { + constexpr_ctx new_ctx = *ctx; + tree init = TREE_OPERAND (t, 1); if (TREE_CLOBBER_P (init)) /* Just ignore clobbers. */ @@ -205,41 +2655,374 @@ eval_store_expression (const constexpr_ctx *ctx, tree t) { /* Evaluate the value to be stored without knowing what object it will be stored in, so that any side-effects happen first. */ - init = fold_expr (init); + if (!SCALAR_TYPE_P (type)) + new_ctx.ctor = new_ctx.object = NULL_TREE; + init = eval_constant_expression (&new_ctx, init, false, non_constant_p, + overflow_p); + if (*non_constant_p) + return t; } bool evaluated = false; + if (lval) + { + /* If we want to return a reference to the target, we need to evaluate it + as a whole; otherwise, only evaluate the innermost piece to avoid + building up unnecessary *_REFs. */ + target = eval_constant_expression (ctx, target, true, non_constant_p, + overflow_p); + evaluated = true; + if (*non_constant_p) + return t; + } + + /* Find the underlying variable. */ + releasing_vec refs; tree object = NULL_TREE; + /* If we're modifying a const object, save it. */ + tree const_object_being_modified = NULL_TREE; + // bool mutable_p = false; for (tree probe = target; object == NULL_TREE;) { switch (TREE_CODE (probe)) { + case BIT_FIELD_REF: + case COMPONENT_REF: + case ARRAY_REF: { + tree ob = TREE_OPERAND (probe, 0); + tree elt = TREE_OPERAND (probe, 1); + if (TREE_CODE (elt) == FIELD_DECL /*&& DECL_MUTABLE_P (elt)*/) + { + // mutable_p = true; + } + if (TREE_CODE (probe) == ARRAY_REF) + { + // TODO + gcc_unreachable (); + // elt = eval_and_check_array_index (ctx, probe, false, + // non_constant_p, overflow_p); + if (*non_constant_p) + return t; + } + /* We don't check modifying_const_object_p for ARRAY_REFs. Given + "int a[10]", an ARRAY_REF "a[2]" can be "const int", even though + the array isn't const. Instead, check "a" in the next iteration; + that will detect modifying "const int a[10]". */ + // else if (evaluated + // && modifying_const_object_p (TREE_CODE (t), probe, + // mutable_p) + // && const_object_being_modified == NULL_TREE) + // const_object_being_modified = probe; + vec_safe_push (refs, elt); + vec_safe_push (refs, TREE_TYPE (probe)); + probe = ob; + } + break; + default: if (evaluated) object = probe; else { - probe = constexpr_expression (ctx, probe); + probe = eval_constant_expression (ctx, probe, true, + non_constant_p, overflow_p); evaluated = true; + if (*non_constant_p) + return t; } break; } } - return init; + // if (modifying_const_object_p (TREE_CODE (t), object, mutable_p) + // && const_object_being_modified == NULL_TREE) + // const_object_being_modified = object; + + /* And then find/build up our initializer for the path to the subobject + we're initializing. */ + tree *valp; + if (DECL_P (object)) + valp = ctx->global->values.get (object); + else + valp = NULL; + if (!valp) + { + /* A constant-expression cannot modify objects from outside the + constant-expression. */ + if (!ctx->quiet) + error ("modification of %qE is not a constant expression", object); + *non_constant_p = true; + return t; + } + type = TREE_TYPE (object); + bool no_zero_init = true; + + releasing_vec ctors, indexes; + auto_vec<int> index_pos_hints; + bool activated_union_member_p = false; + while (!refs->is_empty ()) + { + if (*valp == NULL_TREE) + { + *valp = build_constructor (type, NULL); + CONSTRUCTOR_NO_CLEARING (*valp) = no_zero_init; + } + else if (TREE_CODE (*valp) == STRING_CST) + { + /* An array was initialized with a string constant, and now + we're writing into one of its elements. Explode the + single initialization into a set of element + initializations. */ + gcc_assert (TREE_CODE (type) == ARRAY_TYPE); + + tree string = *valp; + tree elt_type = TREE_TYPE (type); + unsigned chars_per_elt + = (TYPE_PRECISION (elt_type) / TYPE_PRECISION (char_type_node)); + unsigned num_elts = TREE_STRING_LENGTH (string) / chars_per_elt; + tree ary_ctor = build_constructor (type, NULL); + + vec_safe_reserve (CONSTRUCTOR_ELTS (ary_ctor), num_elts); + for (unsigned ix = 0; ix != num_elts; ix++) + { + constructor_elt elt + = {build_int_cst (size_type_node, ix), + extract_string_elt (string, chars_per_elt, ix)}; + CONSTRUCTOR_ELTS (ary_ctor)->quick_push (elt); + } + + *valp = ary_ctor; + } + + /* If the value of object is already zero-initialized, any new ctors for + subobjects will also be zero-initialized. */ + no_zero_init = CONSTRUCTOR_NO_CLEARING (*valp); + + enum tree_code code = TREE_CODE (type); + type = refs->pop (); + tree index = refs->pop (); + + if (code == RECORD_TYPE && is_empty_field (index)) + /* Don't build a sub-CONSTRUCTOR for an empty base or field, as they + have no data and might have an offset lower than previously declared + fields, which confuses the middle-end. The code below will notice + that we don't have a CONSTRUCTOR for our inner target and just + return init. */ + break; + + if (code == UNION_TYPE && CONSTRUCTOR_NELTS (*valp) + && CONSTRUCTOR_ELT (*valp, 0)->index != index) + { + if (TREE_CODE (t) == MODIFY_EXPR && CONSTRUCTOR_NO_CLEARING (*valp)) + { + /* Diagnose changing the active union member while the union + is in the process of being initialized. */ + if (!ctx->quiet) + error_at (EXPR_LOCATION (t), + "change of the active member of a union " + "from %qD to %qD during initialization", + CONSTRUCTOR_ELT (*valp, 0)->index, index); + *non_constant_p = true; + } + no_zero_init = true; + } + + vec_safe_push (ctors, *valp); + vec_safe_push (indexes, index); + + constructor_elt *cep = get_or_insert_ctor_field (*valp, index); + index_pos_hints.safe_push (cep - CONSTRUCTOR_ELTS (*valp)->begin ()); + + if (code == UNION_TYPE) + activated_union_member_p = true; + + valp = &cep->value; + } + + /* Detect modifying a constant object in constexpr evaluation. + We have found a const object that is being modified. Figure out + if we need to issue an error. Consider + + struct A { + int n; + constexpr A() : n(1) { n = 2; } // #1 + }; + struct B { + const A a; + constexpr B() { a.n = 3; } // #2 + }; + constexpr B b{}; + + #1 is OK, since we're modifying an object under construction, but + #2 is wrong, since "a" is const and has been fully constructed. + To track it, we use the TREE_READONLY bit in the object's CONSTRUCTOR + which means that the object is read-only. For the example above, the + *ctors stack at the point of #2 will look like: + + ctors[0] = {.a={.n=2}} TREE_READONLY = 0 + ctors[1] = {.n=2} TREE_READONLY = 1 + + and we're modifying "b.a", so we search the stack and see if the + constructor for "b.a" has already run. */ + if (const_object_being_modified) + { + bool fail = false; + tree const_objtype + = strip_array_types (TREE_TYPE (const_object_being_modified)); + if (!CLASS_TYPE_P (const_objtype)) + fail = true; + else + { + /* [class.ctor]p5 "A constructor can be invoked for a const, + volatile, or const volatile object. const and volatile + semantics are not applied on an object under construction. + They come into effect when the constructor for the most + derived object ends." */ + for (tree elt : *ctors) + if (same_type_ignoring_top_level_qualifiers_p ( + TREE_TYPE (const_object_being_modified), TREE_TYPE (elt))) + { + fail = TREE_READONLY (elt); + break; + } + } + if (fail) + { + if (!ctx->quiet) + modifying_const_object_error (t, const_object_being_modified); + *non_constant_p = true; + return t; + } + } + + if (!preeval) + { + /* We're handling an INIT_EXPR of class type, so the value of the + initializer can depend on the object it's initializing. */ + + /* Create a new CONSTRUCTOR in case evaluation of the initializer + wants to modify it. */ + if (*valp == NULL_TREE) + { + *valp = build_constructor (type, NULL); + CONSTRUCTOR_NO_CLEARING (*valp) = no_zero_init; + } + new_ctx.ctor = *valp; + new_ctx.object = target; + /* Avoid temporary materialization when initializing from a TARGET_EXPR. + We don't need to mess with AGGR_EXPR_SLOT/VEC_INIT_EXPR_SLOT because + expansion of those trees uses ctx instead. */ + if (TREE_CODE (init) == TARGET_EXPR) + if (tree tinit = TARGET_EXPR_INITIAL (init)) + init = tinit; + init = eval_constant_expression (&new_ctx, init, false, non_constant_p, + overflow_p); + /* The hash table might have moved since the get earlier, and the + initializer might have mutated the underlying CONSTRUCTORs, so we must + recompute VALP. */ + valp = ctx->global->values.get (object); + for (unsigned i = 0; i < vec_safe_length (indexes); i++) + { + constructor_elt *cep + = get_or_insert_ctor_field (*valp, indexes[i], index_pos_hints[i]); + valp = &cep->value; + } + } + + /* Don't share a CONSTRUCTOR that might be changed later. */ + init = unshare_constructor (init); + + if (*valp && TREE_CODE (*valp) == CONSTRUCTOR + && TREE_CODE (init) == CONSTRUCTOR) + { + /* An outer ctx->ctor might be pointing to *valp, so replace + its contents. */ + if (!same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (init), + TREE_TYPE (*valp))) + { + /* For initialization of an empty base, the original target will be + *(base*)this, evaluation of which resolves to the object + argument, which has the derived type rather than the base type. In + this situation, just evaluate the initializer and return, since + there's no actual data to store. */ + gcc_assert (is_empty_class (TREE_TYPE (init))); + return lval ? target : init; + } + CONSTRUCTOR_ELTS (*valp) = CONSTRUCTOR_ELTS (init); + TREE_CONSTANT (*valp) = TREE_CONSTANT (init); + TREE_SIDE_EFFECTS (*valp) = TREE_SIDE_EFFECTS (init); + CONSTRUCTOR_NO_CLEARING (*valp) = CONSTRUCTOR_NO_CLEARING (init); + } + else if (TREE_CODE (init) == CONSTRUCTOR + && !same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (init), + type)) + { + /* See above on initialization of empty bases. */ + gcc_assert (is_empty_class (TREE_TYPE (init)) && !lval); + return init; + } + else + *valp = init; + + /* After initialization, 'const' semantics apply to the value of the + object. Make a note of this fact by marking the CONSTRUCTOR + TREE_READONLY. */ + if (TREE_CODE (t) == INIT_EXPR && TREE_CODE (*valp) == CONSTRUCTOR + && TYPE_READONLY (type)) + { + // this vs self? can rust's self be anything other than self or &self in + // constexpr mode? if (INDIRECT_REF_P (target) + // && (is_this_parameter ( + // tree_strip_nop_conversions (TREE_OPERAND (target, 0))))) + /* We've just initialized '*this' (perhaps via the target + constructor of a delegating constructor). Leave it up to the + caller that set 'this' to set TREE_READONLY appropriately. */ + // gcc_checking_assert ( + // same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (target), + // type)); + // else + // TREE_READONLY (*valp) = true; + } + + /* Update TREE_CONSTANT and TREE_SIDE_EFFECTS on enclosing + CONSTRUCTORs, if any. */ + bool c = TREE_CONSTANT (init); + bool s = TREE_SIDE_EFFECTS (init); + if (!c || s || activated_union_member_p) + for (tree elt : *ctors) + { + if (!c) + TREE_CONSTANT (elt) = false; + if (s) + TREE_SIDE_EFFECTS (elt) = true; + /* Clear CONSTRUCTOR_NO_CLEARING since we've activated a member of + this union. */ + if (TREE_CODE (TREE_TYPE (elt)) == UNION_TYPE) + CONSTRUCTOR_NO_CLEARING (elt) = false; + } + + if (*non_constant_p) + return t; + else if (lval) + return target; + else + return init; } /* Subroutine of cxx_eval_constant_expression. Like cxx_eval_unary_expression, except for binary expressions. */ static tree -eval_binary_expression (const constexpr_ctx *ctx, tree t) +eval_binary_expression (const constexpr_ctx *ctx, tree t, bool lval, + bool *non_constant_p, bool *overflow_p) { tree orig_lhs = TREE_OPERAND (t, 0); tree orig_rhs = TREE_OPERAND (t, 1); tree lhs, rhs; - lhs = constexpr_expression (ctx, orig_lhs); - rhs = constexpr_expression (ctx, orig_rhs); + lhs = eval_constant_expression (ctx, orig_lhs, lval, non_constant_p, + overflow_p); + rhs = eval_constant_expression (ctx, orig_rhs, lval, non_constant_p, + overflow_p); location_t loc = EXPR_LOCATION (t); enum tree_code code = TREE_CODE (t); @@ -248,16 +3031,902 @@ eval_binary_expression (const constexpr_ctx *ctx, tree t) return fold_binary_loc (loc, code, type, lhs, rhs); } +/* Helper function of cxx_bind_parameters_in_call. Return non-NULL + if *TP is address of a static variable (or part of it) currently being + constructed or of a heap artificial variable. */ + +static tree +addr_of_non_const_var (tree *tp, int *walk_subtrees, void *data) +{ + if (TREE_CODE (*tp) == ADDR_EXPR) + if (tree var = get_base_address (TREE_OPERAND (*tp, 0))) + if (VAR_P (var) && TREE_STATIC (var)) + { + if (DECL_NAME (var) == heap_uninit_identifier + || DECL_NAME (var) == heap_identifier + || DECL_NAME (var) == heap_vec_uninit_identifier + || DECL_NAME (var) == heap_vec_identifier) + return var; + + constexpr_global_ctx *global = (constexpr_global_ctx *) data; + if (global->values.get (var)) + return var; + } + if (TYPE_P (*tp)) + *walk_subtrees = false; + return NULL_TREE; +} + +/* Subroutine of cxx_eval_call_expression. + We are processing a call expression (either CALL_EXPR or + AGGR_INIT_EXPR) in the context of CTX. Evaluate + all arguments and bind their values to correspondings + parameters, making up the NEW_CALL context. */ + +static tree +rs_bind_parameters_in_call (const constexpr_ctx *ctx, tree t, tree fun, + bool *non_constant_p, bool *overflow_p, + bool *non_constant_args) +{ + const int nargs = call_expr_nargs (t); + tree parms = DECL_ARGUMENTS (fun); + int i; + /* We don't record ellipsis args below. */ + int nparms = list_length (parms); + int nbinds = nargs < nparms ? nargs : nparms; + tree binds = make_tree_vec (nbinds); + for (i = 0; i < nargs; ++i) + { + tree x, arg; + tree type = parms ? TREE_TYPE (parms) : void_type_node; + if (parms && DECL_BY_REFERENCE (parms)) + type = TREE_TYPE (type); + x = get_nth_callarg (t, i); + + if (TREE_ADDRESSABLE (type)) + /* Undo convert_for_arg_passing work here. */ + x = convert_from_reference (x); + /* Normally we would strip a TARGET_EXPR in an initialization context + such as this, but here we do the elision differently: we keep the + TARGET_EXPR, and use its CONSTRUCTOR as the value of the parm. */ + arg = eval_constant_expression (ctx, x, /*lval=*/false, non_constant_p, + overflow_p); + /* Don't VERIFY_CONSTANT here. */ + if (*non_constant_p && ctx->quiet) + break; + /* Just discard ellipsis args after checking their constantitude. */ + if (!parms) + continue; + + if (!*non_constant_p) + { + /* Make sure the binding has the same type as the parm. But + only for constant args. */ + if (!TYPE_REF_P (type)) + arg = adjust_temp_type (type, arg); + if (!TREE_CONSTANT (arg)) + *non_constant_args = true; + else if (TYPE_HAS_NONTRIVIAL_DESTRUCTOR (type)) + /* The destructor needs to see any modifications the callee makes + to the argument. */ + *non_constant_args = true; + /* If arg is or contains address of a heap artificial variable or + of a static variable being constructed, avoid caching the + function call, as those variables might be modified by the + function, or might be modified by the callers in between + the cached function and just read by the function. */ + else if (!*non_constant_args + && rs_walk_tree (&arg, addr_of_non_const_var, ctx->global, + NULL)) + *non_constant_args = true; + + // /* For virtual calls, adjust the this argument, so that it is + // the object on which the method is called, rather than + // one of its bases. */ + // if (i == 0 && DECL_VIRTUAL_P (fun)) + // { + // tree addr = arg; + // STRIP_NOPS (addr); + // if (TREE_CODE (addr) == ADDR_EXPR) + // { + // tree obj = TREE_OPERAND (addr, 0); + // while (TREE_CODE (obj) == COMPONENT_REF + // && DECL_FIELD_IS_BASE (TREE_OPERAND (obj, 1)) + // && !same_type_ignoring_top_level_qualifiers_p ( + // TREE_TYPE (obj), DECL_CONTEXT (fun))) + // obj = TREE_OPERAND (obj, 0); + // if (obj != TREE_OPERAND (addr, 0)) + // arg = build_fold_addr_expr_with_type (obj, TREE_TYPE + // (arg)); + // } + // } + TREE_VEC_ELT (binds, i) = arg; + } + parms = TREE_CHAIN (parms); + } + + return binds; +} + +// forked from gcc/cp/constexpr.cc cxx_eval_builtin_function_call + +/* Attempt to evaluate T which represents a call to a builtin function. + We assume here that all builtin functions evaluate to scalar types + represented by _CST nodes. */ + +static tree +eval_builtin_function_call (const constexpr_ctx *ctx, tree t, tree fun, + bool lval, bool *non_constant_p, bool *overflow_p) +{ + const int nargs = call_expr_nargs (t); + tree *args = (tree *) alloca (nargs * sizeof (tree)); + tree new_call; + int i; + + /* Don't fold __builtin_constant_p within a constexpr function. */ + bool bi_const_p = DECL_IS_BUILTIN_CONSTANT_P (fun); + + /* If we aren't requiring a constant expression, defer __builtin_constant_p + in a constexpr function until we have values for the parameters. */ + if (bi_const_p && !ctx->manifestly_const_eval && current_function_decl + && DECL_DECLARED_CONSTEXPR_P (current_function_decl)) + { + *non_constant_p = true; + return t; + } + + /* For __builtin_is_constant_evaluated, defer it if not + ctx->manifestly_const_eval (as sometimes we try to constant evaluate + without manifestly_const_eval even expressions or parts thereof which + will later be manifestly const_eval evaluated), otherwise fold it to + true. */ + if (fndecl_built_in_p (fun, CP_BUILT_IN_IS_CONSTANT_EVALUATED, + BUILT_IN_FRONTEND)) + { + if (!ctx->manifestly_const_eval) + { + *non_constant_p = true; + return t; + } + return boolean_true_node; + } + + if (fndecl_built_in_p (fun, CP_BUILT_IN_SOURCE_LOCATION, BUILT_IN_FRONTEND)) + { + temp_override<tree> ovr (current_function_decl); + if (ctx->call && ctx->call->fundef) + current_function_decl = ctx->call->fundef->decl; + return fold_builtin_source_location (EXPR_LOCATION (t)); + } + + int strops = 0; + int strret = 0; + if (fndecl_built_in_p (fun, BUILT_IN_NORMAL)) + switch (DECL_FUNCTION_CODE (fun)) + { + case BUILT_IN_STRLEN: + case BUILT_IN_STRNLEN: + strops = 1; + break; + case BUILT_IN_MEMCHR: + case BUILT_IN_STRCHR: + case BUILT_IN_STRRCHR: + strops = 1; + strret = 1; + break; + case BUILT_IN_MEMCMP: + case BUILT_IN_STRCMP: + strops = 2; + break; + case BUILT_IN_STRSTR: + strops = 2; + strret = 1; + break; + case BUILT_IN_ASAN_POINTER_COMPARE: + case BUILT_IN_ASAN_POINTER_SUBTRACT: + /* These builtins shall be ignored during constant expression + evaluation. */ + return void_node; + default: + break; + } + + /* Be permissive for arguments to built-ins; __builtin_constant_p should + return constant false for a non-constant argument. */ + constexpr_ctx new_ctx = *ctx; + new_ctx.quiet = true; + for (i = 0; i < nargs; ++i) + { + tree arg = CALL_EXPR_ARG (t, i); + tree oarg = arg; + + /* To handle string built-ins we need to pass ADDR_EXPR<STRING_CST> since + expand_builtin doesn't know how to look in the values table. */ + bool strop = i < strops; + if (strop) + { + STRIP_NOPS (arg); + if (TREE_CODE (arg) == ADDR_EXPR) + arg = TREE_OPERAND (arg, 0); + else + strop = false; + } + + /* If builtin_valid_in_constant_expr_p is true, + potential_constant_expression_1 has not recursed into the arguments + of the builtin, verify it here. */ + if (!builtin_valid_in_constant_expr_p (fun) + || potential_constant_expression (arg)) + { + bool dummy1 = false, dummy2 = false; + arg + = eval_constant_expression (&new_ctx, arg, false, &dummy1, &dummy2); + } + + if (bi_const_p) + /* For __builtin_constant_p, fold all expressions with constant values + even if they aren't C++ constant-expressions. */ + arg = cp_fold_rvalue (arg); + else if (strop) + { + if (TREE_CODE (arg) == CONSTRUCTOR) + arg = braced_lists_to_strings (TREE_TYPE (arg), arg); + if (TREE_CODE (arg) == STRING_CST) + arg = build_address (arg); + else + arg = oarg; + } + + args[i] = arg; + } + + bool save_ffbcp = force_folding_builtin_constant_p; + force_folding_builtin_constant_p |= ctx->manifestly_const_eval; + tree save_cur_fn = current_function_decl; + /* Return name of ctx->call->fundef->decl for __builtin_FUNCTION (). */ + if (fndecl_built_in_p (fun, BUILT_IN_FUNCTION) && ctx->call + && ctx->call->fundef) + current_function_decl = ctx->call->fundef->decl; + if (fndecl_built_in_p (fun, + CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS, + BUILT_IN_FRONTEND)) + { + location_t loc = EXPR_LOCATION (t); + if (nargs >= 1) + VERIFY_CONSTANT (args[0]); + new_call + = fold_builtin_is_pointer_inverconvertible_with_class (loc, nargs, + args); + } + else if (fndecl_built_in_p (fun, CP_BUILT_IN_IS_CORRESPONDING_MEMBER, + BUILT_IN_FRONTEND)) + { + location_t loc = EXPR_LOCATION (t); + if (nargs >= 2) + { + VERIFY_CONSTANT (args[0]); + VERIFY_CONSTANT (args[1]); + } + new_call = fold_builtin_is_corresponding_member (loc, nargs, args); + } + else + new_call = fold_builtin_call_array (EXPR_LOCATION (t), TREE_TYPE (t), + CALL_EXPR_FN (t), nargs, args); + current_function_decl = save_cur_fn; + force_folding_builtin_constant_p = save_ffbcp; + if (new_call == NULL) + { + if (!*non_constant_p && !ctx->quiet) + { + /* Do not allow__builtin_unreachable in constexpr function. + The __builtin_unreachable call with BUILTINS_LOCATION + comes from cp_maybe_instrument_return. */ + if (fndecl_built_in_p (fun, BUILT_IN_UNREACHABLE) + && EXPR_LOCATION (t) == BUILTINS_LOCATION) + error ("%<constexpr%> call flows off the end of the function"); + else + { + new_call = build_call_array_loc (EXPR_LOCATION (t), TREE_TYPE (t), + CALL_EXPR_FN (t), nargs, args); + error ("%q+E is not a constant expression", new_call); + } + } + *non_constant_p = true; + return t; + } + + if (!potential_constant_expression (new_call)) + { + if (!*non_constant_p && !ctx->quiet) + error ("%q+E is not a constant expression", new_call); + *non_constant_p = true; + return t; + } + + if (strret) + { + /* memchr returns a pointer into the first argument, but we replaced the + argument above with a STRING_CST; put it back it now. */ + tree op = CALL_EXPR_ARG (t, strret - 1); + STRIP_NOPS (new_call); + if (TREE_CODE (new_call) == POINTER_PLUS_EXPR) + TREE_OPERAND (new_call, 0) = op; + else if (TREE_CODE (new_call) == ADDR_EXPR) + new_call = op; + } + + return eval_constant_expression (&new_ctx, new_call, lval, non_constant_p, + overflow_p); +} + // Subroutine of cxx_eval_constant_expression. // Evaluate the call expression tree T in the context of OLD_CALL expression // evaluation. static tree -eval_call_expression (const constexpr_ctx *ctx, tree t) +eval_call_expression (const constexpr_ctx *ctx, tree t, bool lval, + bool *non_constant_p, bool *overflow_p) { + location_t loc = EXPR_LOCATION (t); tree fun = get_function_named_in_call (t); - return constexpr_fn_retval (ctx, DECL_SAVED_TREE (fun)); + constexpr_call new_call = {NULL, NULL, NULL, 0, ctx->manifestly_const_eval}; + int depth_ok; + + if (fun == NULL_TREE) + { + // return cxx_eval_internal_function (ctx, t, lval, + // non_constant_p, overflow_p); + gcc_unreachable (); + return error_mark_node; + } + + if (TREE_CODE (fun) != FUNCTION_DECL) + { + if (!ctx->quiet && !*non_constant_p) + error_at (loc, + "expression %qE does not designate a %<constexpr%> " + "function", + fun); + *non_constant_p = true; + return t; + } + + if (fndecl_built_in_p (fun)) + return eval_builtin_function_call (ctx, t, fun, lval, non_constant_p, + overflow_p); + + bool non_constant_args = false; + new_call.bindings + = rs_bind_parameters_in_call (ctx, t, fun, non_constant_p, overflow_p, + &non_constant_args); + + /* We build up the bindings list before we know whether we already have this + call cached. If we don't end up saving these bindings, ggc_free them when + this function exits. */ + class free_bindings + { + tree *bindings; + + public: + free_bindings (tree &b) : bindings (&b) {} + ~free_bindings () + { + if (bindings) + ggc_free (*bindings); + } + void preserve () { bindings = NULL; } + } fb (new_call.bindings); + + if (*non_constant_p) + return t; + + /* If in direct recursive call, optimize definition search. */ + if (ctx && ctx->call && ctx->call->fundef && ctx->call->fundef->decl == fun) + new_call.fundef = ctx->call->fundef; + else + { + new_call.fundef = retrieve_constexpr_fundef (fun); + if (new_call.fundef == NULL || new_call.fundef->body == NULL + || new_call.fundef->result == error_mark_node + || fun == current_function_decl) + { + if (!ctx->quiet) + { + /* We need to check for current_function_decl here in case we're + being called during cp_fold_function, because at that point + DECL_INITIAL is set properly and we have a fundef but we + haven't lowered invisirefs yet (c++/70344). */ + if (DECL_INITIAL (fun) == error_mark_node + || fun == current_function_decl) + error_at (loc, + "%qD called in a constant expression before its " + "definition is complete", + fun); + else if (DECL_INITIAL (fun)) + { + // /* The definition of fun was somehow unsuitable. But + // pretend + // that lambda static thunks don't exist. */ + // if (!lambda_static_thunk_p (fun)) + // error_at (loc, "%qD called in a constant expression", + // fun); + explain_invalid_constexpr_fn (fun); + } + else + error_at (loc, "%qD used before its definition", fun); + } + *non_constant_p = true; + return t; + } + } + + depth_ok = push_cx_call_context (t); + + tree result = NULL_TREE; + constexpr_call *entry = NULL; + if (depth_ok && !non_constant_args && ctx->strict) + { + new_call.hash = constexpr_fundef_hasher::hash (new_call.fundef); + new_call.hash = iterative_hash_object (new_call.bindings, new_call.hash); + new_call.hash + = iterative_hash_object (ctx->manifestly_const_eval, new_call.hash); + + /* If we have seen this call before, we are done. */ + maybe_initialize_constexpr_call_table (); + constexpr_call **slot + = constexpr_call_table->find_slot (&new_call, INSERT); + entry = *slot; + if (entry == NULL) + { + /* Only cache up to constexpr_cache_depth to limit memory use. */ + if (depth_ok < constexpr_cache_depth) + { + /* We need to keep a pointer to the entry, not just the slot, as + the slot can move during evaluation of the body. */ + *slot = entry = ggc_alloc<constexpr_call> (); + *entry = new_call; + fb.preserve (); + } + } + /* Calls that are in progress have their result set to NULL, so that we + can detect circular dependencies. Now that we only cache up to + constexpr_cache_depth this won't catch circular dependencies that + start deeper, but they'll hit the recursion or ops limit. */ + else if (entry->result == NULL) + { + if (!ctx->quiet) + error ("call has circular dependency"); + *non_constant_p = true; + entry->result = result = error_mark_node; + } + else + result = entry->result; + } + + if (!depth_ok) + { + if (!ctx->quiet) + error ("%<constexpr%> evaluation depth exceeds maximum of %d (use " + "%<-fconstexpr-depth=%> to increase the maximum)", + max_constexpr_depth); + *non_constant_p = true; + result = error_mark_node; + } + else + { + bool cacheable = true; + if (result && result != error_mark_node) + /* OK */; + else if (!DECL_SAVED_TREE (fun)) + { + /* When at_eof >= 2, cgraph has started throwing away + DECL_SAVED_TREE, so fail quietly. FIXME we get here because of + late code generation for VEC_INIT_EXPR, which needs to be + completely reconsidered. */ + // gcc_assert (at_eof >= 2 && ctx->quiet); + *non_constant_p = true; + } + else if (tree copy = get_fundef_copy (new_call.fundef)) + { + tree body, parms, res; + releasing_vec ctors; + + /* Reuse or create a new unshared copy of this function's body. */ + body = TREE_PURPOSE (copy); + parms = TREE_VALUE (copy); + res = TREE_TYPE (copy); + + /* Associate the bindings with the remapped parms. */ + tree bound = new_call.bindings; + tree remapped = parms; + for (int i = 0; i < TREE_VEC_LENGTH (bound); ++i) + { + tree arg = TREE_VEC_ELT (bound, i); + if (entry) + { + /* Unshare args going into the hash table to separate them + from the caller's context, for better GC and to avoid + problems with verify_gimple. */ + arg = unshare_expr_without_location (arg); + TREE_VEC_ELT (bound, i) = arg; + + /* And then unshare again so the callee doesn't change the + argument values in the hash table. XXX Could we unshare + lazily in cxx_eval_store_expression? */ + arg = unshare_constructor (arg); + if (TREE_CODE (arg) == CONSTRUCTOR) + vec_safe_push (ctors, arg); + } + + ctx->global->values.put (remapped, arg); + remapped = DECL_CHAIN (remapped); + } + /* Add the RESULT_DECL to the values map, too. */ + gcc_assert (!DECL_BY_REFERENCE (res)); + ctx->global->values.put (res, NULL_TREE); + + /* Track the callee's evaluated SAVE_EXPRs and TARGET_EXPRs so that + we can forget their values after the call. */ + constexpr_ctx ctx_with_save_exprs = *ctx; + auto_vec<tree, 10> save_exprs; + ctx_with_save_exprs.save_exprs = &save_exprs; + ctx_with_save_exprs.call = &new_call; + unsigned save_heap_alloc_count = ctx->global->heap_vars.length (); + unsigned save_heap_dealloc_count = ctx->global->heap_dealloc_count; + + tree jump_target = NULL_TREE; + eval_constant_expression (&ctx_with_save_exprs, body, lval, + non_constant_p, overflow_p, &jump_target); + + if (VOID_TYPE_P (TREE_TYPE (res))) + result = void_node; + else + { + result = *ctx->global->values.get (res); + if (result == NULL_TREE && !*non_constant_p) + { + if (!ctx->quiet) + error ("%<constexpr%> call flows off the end " + "of the function"); + *non_constant_p = true; + } + } + + /* Forget the saved values of the callee's SAVE_EXPRs and + TARGET_EXPRs. */ + for (tree save_expr : save_exprs) + ctx->global->values.remove (save_expr); + + /* Remove the parms/result from the values map. Is it worth + bothering to do this when the map itself is only live for + one constexpr evaluation? If so, maybe also clear out + other vars from call, maybe in BIND_EXPR handling? */ + ctx->global->values.remove (res); + for (tree parm = parms; parm; parm = TREE_CHAIN (parm)) + ctx->global->values.remove (parm); + + /* Make the unshared function copy we used available for re-use. */ + save_fundef_copy (fun, copy); + + /* If the call allocated some heap object that hasn't been + deallocated during the call, or if it deallocated some heap + object it has not allocated, the call isn't really stateless + for the constexpr evaluation and should not be cached. + It is fine if the call allocates something and deallocates it + too. */ + if (entry + && (save_heap_alloc_count != ctx->global->heap_vars.length () + || (save_heap_dealloc_count + != ctx->global->heap_dealloc_count))) + { + tree heap_var; + unsigned int i; + if ((ctx->global->heap_vars.length () + - ctx->global->heap_dealloc_count) + != save_heap_alloc_count - save_heap_dealloc_count) + cacheable = false; + else + FOR_EACH_VEC_ELT_FROM (ctx->global->heap_vars, i, heap_var, + save_heap_alloc_count) + if (DECL_NAME (heap_var) != heap_deleted_identifier) + { + cacheable = false; + break; + } + } + } + else + /* Couldn't get a function copy to evaluate. */ + *non_constant_p = true; + + if (result == error_mark_node) + *non_constant_p = true; + if (*non_constant_p || *overflow_p) + result = error_mark_node; + else if (!result) + result = void_node; + if (entry) + entry->result = cacheable ? result : error_mark_node; + } + + pop_cx_call_context (); + return result; +} + +/* Subroutine of build_data_member_initialization. MEMBER is a COMPONENT_REF + for a member of an anonymous aggregate, INIT is the initializer for that + member, and VEC_OUTER is the vector of constructor elements for the class + whose constructor we are processing. Add the initializer to the vector + and return true to indicate success. */ + +static bool +build_anon_member_initialization (tree member, tree init, + vec<constructor_elt, va_gc> **vec_outer) +{ + /* MEMBER presents the relevant fields from the inside out, but we need + to build up the initializer from the outside in so that we can reuse + previously built CONSTRUCTORs if this is, say, the second field in an + anonymous struct. So we use a vec as a stack. */ + auto_vec<tree, 2> fields; + do + { + fields.safe_push (TREE_OPERAND (member, 1)); + member = TREE_OPERAND (member, 0); + } + while (ANON_AGGR_TYPE_P (TREE_TYPE (member)) + && TREE_CODE (member) == COMPONENT_REF); + + /* VEC has the constructor elements vector for the context of FIELD. + If FIELD is an anonymous aggregate, we will push inside it. */ + vec<constructor_elt, va_gc> **vec = vec_outer; + tree field; + while (field = fields.pop (), ANON_AGGR_TYPE_P (TREE_TYPE (field))) + { + tree ctor; + /* If there is already an outer constructor entry for the anonymous + aggregate FIELD, use it; otherwise, insert one. */ + if (vec_safe_is_empty (*vec) || (*vec)->last ().index != field) + { + ctor = build_constructor (TREE_TYPE (field), NULL); + CONSTRUCTOR_APPEND_ELT (*vec, field, ctor); + } + else + ctor = (*vec)->last ().value; + vec = &CONSTRUCTOR_ELTS (ctor); + } + + /* Now we're at the innermost field, the one that isn't an anonymous + aggregate. Add its initializer to the CONSTRUCTOR and we're done. */ + gcc_assert (fields.is_empty ()); + CONSTRUCTOR_APPEND_ELT (*vec, field, init); + + return true; } +///* V is a vector of constructor elements built up for the base and member +// initializers of a constructor for TYPE. They need to be in increasing +// offset order, which they might not be yet if TYPE has a primary base +// which is not first in the base-clause or a vptr and at least one base +// all of which are non-primary. */ +// +// static vec<constructor_elt, va_gc> * +// sort_constexpr_mem_initializers (tree type, vec<constructor_elt, va_gc> *v) +//{ +// tree pri = CLASSTYPE_PRIMARY_BINFO (type); +// tree field_type; +// unsigned i; +// constructor_elt *ce; +// +// if (pri) +// field_type = BINFO_TYPE (pri); +// else if (TYPE_CONTAINS_VPTR_P (type)) +// field_type = vtbl_ptr_type_node; +// else +// return v; +// +// /* Find the element for the primary base or vptr and move it to the +// beginning of the vec. */ +// for (i = 0; vec_safe_iterate (v, i, &ce); ++i) +// if (TREE_TYPE (ce->index) == field_type) +// break; +// +// if (i > 0 && i < vec_safe_length (v)) +// { +// vec<constructor_elt, va_gc> &vref = *v; +// constructor_elt elt = vref[i]; +// for (; i > 0; --i) +// vref[i] = vref[i - 1]; +// vref[0] = elt; +// } +// +// return v; +//} + +/* Subroutine of build_constexpr_constructor_member_initializers. + The expression tree T represents a data member initialization + in a (constexpr) constructor definition. Build a pairing of + the data member with its initializer, and prepend that pair + to the existing initialization pair INITS. */ + +static bool +build_data_member_initialization (tree t, vec<constructor_elt, va_gc> **vec) +{ + tree member, init; + if (TREE_CODE (t) == CLEANUP_POINT_EXPR) + t = TREE_OPERAND (t, 0); + if (TREE_CODE (t) == EXPR_STMT) + t = TREE_OPERAND (t, 0); + if (t == error_mark_node) + return false; + if (TREE_CODE (t) == STATEMENT_LIST) + { + for (tree stmt : tsi_range (t)) + if (!build_data_member_initialization (stmt, vec)) + return false; + return true; + } + if (TREE_CODE (t) == CONVERT_EXPR) + t = TREE_OPERAND (t, 0); + if (TREE_CODE (t) == INIT_EXPR + /* vptr initialization shows up as a MODIFY_EXPR. In C++14 we only + use what this function builds for cx_check_missing_mem_inits, and + assignment in the ctor body doesn't count. */ + || (TREE_CODE (t) == MODIFY_EXPR)) + { + member = TREE_OPERAND (t, 0); + // Faisal: not sure if we need to port over break_out_target_exprs + // if not, then not sure how to handle init in this case + // init = break_out_target_exprs (TREE_OPERAND (t, 1)); + } + else if (TREE_CODE (t) == CALL_EXPR) + { + tree fn = get_callee_fndecl (t); + if (!fn || !DECL_CONSTRUCTOR_P (fn)) + /* We're only interested in calls to subobject constructors. */ + return true; + member = CALL_EXPR_ARG (t, 0); + /* We don't use build_cplus_new here because it complains about + abstract bases. Leaving the call unwrapped means that it has the + wrong type, but cxx_eval_constant_expression doesn't care. */ + // Faisal: not sure if we need to port over break_out_target_exprs + // if not, then not sure how to handle init in this case + // init = break_out_target_exprs (t); + } + else if (TREE_CODE (t) == BIND_EXPR) + return build_data_member_initialization (BIND_EXPR_BODY (t), vec); + else + /* Don't add anything else to the CONSTRUCTOR. */ + return true; + if (INDIRECT_REF_P (member)) + member = TREE_OPERAND (member, 0); + if (TREE_CODE (member) == NOP_EXPR) + { + tree op = member; + STRIP_NOPS (op); + if (TREE_CODE (op) == ADDR_EXPR) + { + gcc_assert (same_type_ignoring_top_level_qualifiers_p ( + TREE_TYPE (TREE_TYPE (op)), TREE_TYPE (TREE_TYPE (member)))); + /* Initializing a cv-qualified member; we need to look through + the const_cast. */ + member = op; + } + else if (op == current_class_ptr + && (same_type_ignoring_top_level_qualifiers_p ( + TREE_TYPE (TREE_TYPE (member)), current_class_type))) + /* Delegating constructor. */ + member = op; + else + { + /* This is an initializer for an empty base; keep it for now so + we can check it in cxx_eval_bare_aggregate. */ + gcc_assert (is_empty_class (TREE_TYPE (TREE_TYPE (member)))); + } + } + if (TREE_CODE (member) == ADDR_EXPR) + member = TREE_OPERAND (member, 0); + if (TREE_CODE (member) == COMPONENT_REF) + { + tree aggr = TREE_OPERAND (member, 0); + if (TREE_CODE (aggr) == VAR_DECL) + /* Initializing a local variable, don't add anything. */ + return true; + if (TREE_CODE (aggr) != COMPONENT_REF) + /* Normal member initialization. */ + member = TREE_OPERAND (member, 1); + else if (ANON_AGGR_TYPE_P (TREE_TYPE (aggr))) + /* Initializing a member of an anonymous union. */ + return build_anon_member_initialization (member, init, vec); + else + /* We're initializing a vtable pointer in a base. Leave it as + COMPONENT_REF so we remember the path to get to the vfield. */ + gcc_assert (TREE_TYPE (member) == vtbl_ptr_type_node); + } + + /* Value-initialization can produce multiple initializers for the + same field; use the last one. */ + if (!vec_safe_is_empty (*vec) && (*vec)->last ().index == member) + (*vec)->last ().value = init; + else + CONSTRUCTOR_APPEND_ELT (*vec, member, init); + return true; +} + +///* Build compile-time evalable representations of member-initializer list +// for a constexpr constructor. */ +// +// static tree +// build_constexpr_constructor_member_initializers (tree type, tree body) +//{ +// vec<constructor_elt, va_gc> *vec = NULL; +// bool ok = true; +// while (true) +// switch (TREE_CODE (body)) +// { +// case STATEMENT_LIST: +// for (tree stmt : tsi_range (body)) +// { +// body = stmt; +// if (TREE_CODE (body) == BIND_EXPR) +// break; +// } +// break; +// +// case BIND_EXPR: +// body = BIND_EXPR_BODY (body); +// goto found; +// +// default: +// gcc_unreachable (); +// } +// found: +// +// if (TREE_CODE (body) == CLEANUP_POINT_EXPR) +// { +// body = TREE_OPERAND (body, 0); +// if (TREE_CODE (body) == EXPR_STMT) +// body = TREE_OPERAND (body, 0); +// if (TREE_CODE (body) == INIT_EXPR +// && (same_type_ignoring_top_level_qualifiers_p ( +// TREE_TYPE (TREE_OPERAND (body, 0)), current_class_type))) +// { +// /* Trivial copy. */ +// return TREE_OPERAND (body, 1); +// } +// ok = build_data_member_initialization (body, &vec); +// } +// else if (TREE_CODE (body) == STATEMENT_LIST) +// { +// for (tree stmt : tsi_range (body)) +// { +// ok = build_data_member_initialization (stmt, &vec); +// if (!ok) +// break; +// } +// } +// else if (EXPR_P (body)) +// ok = build_data_member_initialization (body, &vec); +// else +// gcc_assert (errorcount > 0); +// if (ok) +// { +// if (vec_safe_length (vec) > 0) +// { +// /* In a delegating constructor, return the target. */ +// constructor_elt *ce = &(*vec)[0]; +// if (ce->index == current_class_ptr) +// { +// body = ce->value; +// vec_free (vec); +// return body; +// } +// } +// vec = sort_constexpr_mem_initializers (type, vec); +// return build_constructor (type, vec); +// } +// else +// return error_mark_node; +//} + // Subroutine of check_constexpr_fundef. BODY is the body of a function // declared to be constexpr, or a sub-statement thereof. Returns the // return value if suitable, error_mark_node for a statement not allowed in @@ -285,9 +3954,12 @@ constexpr_fn_retval (const constexpr_ctx *ctx, tree body) return expr; } - case RETURN_EXPR: - return constexpr_expression (ctx, body); - + case RETURN_EXPR: { + bool non_constant_p = false; + bool overflow_p = false; + return eval_constant_expression (ctx, body, false, &non_constant_p, + &overflow_p); + } case DECL_EXPR: { tree decl = DECL_EXPR_DECL (body); if (TREE_CODE (decl) == USING_DECL @@ -429,5 +4101,2393 @@ var_in_maybe_constexpr_fn (tree t) return (DECL_FUNCTION_SCOPE_P (t) && maybe_constexpr_fn (DECL_CONTEXT (t))); } +/* P0859: A function is needed for constant evaluation if it is a constexpr + function that is named by an expression ([basic.def.odr]) that is + potentially constant evaluated. + + So we need to instantiate any constexpr functions mentioned by the + expression even if the definition isn't needed for evaluating the + expression. */ + +static tree +instantiate_cx_fn_r (tree *tp, int *walk_subtrees, void * /*data*/) +{ + if (TREE_CODE (*tp) == CALL_EXPR) + { + if (EXPR_HAS_LOCATION (*tp)) + input_location = EXPR_LOCATION (*tp); + } + + if (!EXPR_P (*tp)) + *walk_subtrees = 0; + + return NULL_TREE; +} + +static void +instantiate_constexpr_fns (tree t) +{ + location_t loc = input_location; + rs_walk_tree_without_duplicates (&t, instantiate_cx_fn_r, NULL); + input_location = loc; +} + +/* Returns less than, equal to, or greater than zero if KEY is found to be + less than, to match, or to be greater than the constructor_elt's INDEX. */ + +static int +array_index_cmp (tree key, tree index) +{ + gcc_assert (TREE_CODE (key) == INTEGER_CST); + + switch (TREE_CODE (index)) + { + case INTEGER_CST: + return tree_int_cst_compare (key, index); + case RANGE_EXPR: { + tree lo = TREE_OPERAND (index, 0); + tree hi = TREE_OPERAND (index, 1); + if (tree_int_cst_lt (key, lo)) + return -1; + else if (tree_int_cst_lt (hi, key)) + return 1; + else + return 0; + } + default: + gcc_unreachable (); + } +} + +/* If T is a CONSTRUCTOR, return an unshared copy of T and any + sub-CONSTRUCTORs. Otherwise return T. + + We use this whenever we initialize an object as a whole, whether it's a + parameter, a local variable, or a subobject, so that subsequent + modifications don't affect other places where it was used. */ + +tree +unshare_constructor (tree t MEM_STAT_DECL) +{ + if (!t || TREE_CODE (t) != CONSTRUCTOR) + return t; + auto_vec<tree *, 4> ptrs; + ptrs.safe_push (&t); + while (!ptrs.is_empty ()) + { + tree *p = ptrs.pop (); + tree n = copy_node (*p PASS_MEM_STAT); + CONSTRUCTOR_ELTS (n) + = vec_safe_copy (CONSTRUCTOR_ELTS (*p) PASS_MEM_STAT); + *p = n; + vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (n); + constructor_elt *ce; + for (HOST_WIDE_INT i = 0; vec_safe_iterate (v, i, &ce); ++i) + if (ce->value && TREE_CODE (ce->value) == CONSTRUCTOR) + ptrs.safe_push (&ce->value); + } + return t; +} + +/* Returns the index of the constructor_elt of ARY which matches DINDEX, or -1 + if none. If INSERT is true, insert a matching element rather than fail. */ + +static HOST_WIDE_INT +find_array_ctor_elt (tree ary, tree dindex, bool insert) +{ + if (tree_int_cst_sgn (dindex) < 0) + return -1; + + unsigned HOST_WIDE_INT i = tree_to_uhwi (dindex); + vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ary); + unsigned HOST_WIDE_INT len = vec_safe_length (elts); + + unsigned HOST_WIDE_INT end = len; + unsigned HOST_WIDE_INT begin = 0; + + /* If the last element of the CONSTRUCTOR has its own index, we can assume + that the same is true of the other elements and index directly. */ + if (end > 0) + { + tree cindex = (*elts)[end - 1].index; + if (cindex == NULL_TREE) + { + /* Verify that if the last index is missing, all indexes + are missing. */ + if (flag_checking) + for (unsigned int j = 0; j < len - 1; ++j) + gcc_assert ((*elts)[j].index == NULL_TREE); + if (i < end) + return i; + else + { + begin = end; + if (i == end) + /* If the element is to be added right at the end, + make sure it is added with cleared index too. */ + dindex = NULL_TREE; + else if (insert) + /* Otherwise, in order not to break the assumption + that CONSTRUCTOR either has all indexes or none, + we need to add indexes to all elements. */ + for (unsigned int j = 0; j < len; ++j) + (*elts)[j].index = build_int_cst (TREE_TYPE (dindex), j); + } + } + else if (TREE_CODE (cindex) == INTEGER_CST + && compare_tree_int (cindex, end - 1) == 0) + { + if (i < end) + return i; + else + begin = end; + } + } + + /* Otherwise, find a matching index by means of a binary search. */ + while (begin != end) + { + unsigned HOST_WIDE_INT middle = (begin + end) / 2; + constructor_elt &elt = (*elts)[middle]; + tree idx = elt.index; + + int cmp = array_index_cmp (dindex, idx); + if (cmp < 0) + end = middle; + else if (cmp > 0) + begin = middle + 1; + else + { + if (insert && TREE_CODE (idx) == RANGE_EXPR) + { + /* We need to split the range. */ + constructor_elt e; + tree lo = TREE_OPERAND (idx, 0); + tree hi = TREE_OPERAND (idx, 1); + tree value = elt.value; + dindex = fold_convert (sizetype, dindex); + if (tree_int_cst_lt (lo, dindex)) + { + /* There are still some lower elts; shorten the range. */ + tree new_hi + = int_const_binop (MINUS_EXPR, dindex, size_one_node); + if (tree_int_cst_equal (lo, new_hi)) + /* Only one element left, no longer a range. */ + elt.index = lo; + else + TREE_OPERAND (idx, 1) = new_hi; + /* Append the element we want to insert. */ + ++middle; + e.index = dindex; + e.value = unshare_constructor (value); + vec_safe_insert (CONSTRUCTOR_ELTS (ary), middle, e); + } + else + /* No lower elts, the range elt is now ours. */ + elt.index = dindex; + + if (tree_int_cst_lt (dindex, hi)) + { + /* There are still some higher elts; append a range. */ + tree new_lo + = int_const_binop (PLUS_EXPR, dindex, size_one_node); + if (tree_int_cst_equal (new_lo, hi)) + e.index = hi; + else + e.index = build2 (RANGE_EXPR, sizetype, new_lo, hi); + e.value = unshare_constructor (value); + vec_safe_insert (CONSTRUCTOR_ELTS (ary), middle + 1, e); + } + } + return middle; + } + } + + if (insert) + { + constructor_elt e = {dindex, NULL_TREE}; + vec_safe_insert (CONSTRUCTOR_ELTS (ary), end, e); + return end; + } + + return -1; +} + +/* Some expressions may have constant operands but are not constant + themselves, such as 1/0. Call this function to check for that + condition. + + We only call this in places that require an arithmetic constant, not in + places where we might have a non-constant expression that can be a + component of a constant expression, such as the address of a constexpr + variable that might be dereferenced later. */ + +static bool +verify_constant (tree t, bool allow_non_constant, bool *non_constant_p, + bool *overflow_p) +{ + if (!*non_constant_p && !reduced_constant_expression_p (t) && t != void_node) + { + if (!allow_non_constant) + error ("%q+E is not a constant expression", t); + *non_constant_p = true; + } + if (TREE_OVERFLOW_P (t)) + { + if (!allow_non_constant) + { + permerror (input_location, "overflow in constant expression"); + /* If we're being permissive (and are in an enforcing + context), ignore the overflow. */ + if (flag_permissive) + return *non_constant_p; + } + *overflow_p = true; + } + return *non_constant_p; +} + +// forked from gcc/cp/constexpr.cc find_heap_var_refs + +/* Look for heap variables in the expression *TP. */ + +static tree +find_heap_var_refs (tree *tp, int *walk_subtrees, void * /*data*/) +{ + if (VAR_P (*tp) + && (DECL_NAME (*tp) == heap_uninit_identifier + || DECL_NAME (*tp) == heap_identifier + || DECL_NAME (*tp) == heap_vec_uninit_identifier + || DECL_NAME (*tp) == heap_vec_identifier + || DECL_NAME (*tp) == heap_deleted_identifier)) + return *tp; + + if (TYPE_P (*tp)) + *walk_subtrees = 0; + return NULL_TREE; +} + +// forked from gcc/cp/constexpr.cc find_immediate_fndecl + +/* Find immediate function decls in *TP if any. */ + +static tree +find_immediate_fndecl (tree *tp, int * /*walk_subtrees*/, void * /*data*/) +{ + if (TREE_CODE (*tp) == FUNCTION_DECL && DECL_IMMEDIATE_FUNCTION_P (*tp)) + return *tp; + if (TREE_CODE (*tp) == PTRMEM_CST + && TREE_CODE (PTRMEM_CST_MEMBER (*tp)) == FUNCTION_DECL + && DECL_IMMEDIATE_FUNCTION_P (PTRMEM_CST_MEMBER (*tp))) + return PTRMEM_CST_MEMBER (*tp); + return NULL_TREE; +} + +// forked in gcc/cp/constexpr.cc diag_array_subscript + +/* Under the control of CTX, issue a detailed diagnostic for + an out-of-bounds subscript INDEX into the expression ARRAY. */ + +static void +diag_array_subscript (location_t loc, const constexpr_ctx *ctx, tree array, + tree index) +{ + if (!ctx->quiet) + { + tree arraytype = TREE_TYPE (array); + + /* Convert the unsigned array subscript to a signed integer to avoid + printing huge numbers for small negative values. */ + tree sidx = fold_convert (ssizetype, index); + STRIP_ANY_LOCATION_WRAPPER (array); + if (DECL_P (array)) + { + if (TYPE_DOMAIN (arraytype)) + error_at (loc, + "array subscript value %qE is outside the bounds " + "of array %qD of type %qT", + sidx, array, arraytype); + else + error_at (loc, + "nonzero array subscript %qE is used with array %qD of " + "type %qT with unknown bounds", + sidx, array, arraytype); + inform (DECL_SOURCE_LOCATION (array), "declared here"); + } + else if (TYPE_DOMAIN (arraytype)) + error_at (loc, + "array subscript value %qE is outside the bounds " + "of array type %qT", + sidx, arraytype); + else + error_at (loc, + "nonzero array subscript %qE is used with array of type %qT " + "with unknown bounds", + sidx, arraytype); + } +} + +// forked from gcc/cp/constexpr.cc get_array_or_vector_nelts + +/* Return the number of elements for TYPE (which is an ARRAY_TYPE or + a VECTOR_TYPE). */ + +static tree +get_array_or_vector_nelts (const constexpr_ctx *ctx, tree type, + bool *non_constant_p, bool *overflow_p) +{ + tree nelts; + if (TREE_CODE (type) == ARRAY_TYPE) + { + if (TYPE_DOMAIN (type)) + nelts = array_type_nelts_top (type); + else + nelts = size_zero_node; + } + else if (VECTOR_TYPE_P (type)) + nelts = size_int (TYPE_VECTOR_SUBPARTS (type)); + else + gcc_unreachable (); + + /* For VLAs, the number of elements won't be an integer constant. */ + nelts + = eval_constant_expression (ctx, nelts, false, non_constant_p, overflow_p); + return nelts; +} + +// forked from gcc/cp/constexpr.cc eval_and_check_array_index + +/* Subroutine of cxx_eval_array_reference. T is an ARRAY_REF; evaluate the + subscript, diagnose any problems with it, and return the result. */ + +static tree +eval_and_check_array_index (const constexpr_ctx *ctx, tree t, + bool allow_one_past, bool *non_constant_p, + bool *overflow_p) +{ + location_t loc = rs_expr_loc_or_input_loc (t); + tree ary = TREE_OPERAND (t, 0); + t = TREE_OPERAND (t, 1); + tree index = eval_constant_expression (ctx, t, allow_one_past, non_constant_p, + overflow_p); + VERIFY_CONSTANT (index); + + if (!tree_fits_shwi_p (index) || tree_int_cst_sgn (index) < 0) + { + diag_array_subscript (loc, ctx, ary, index); + *non_constant_p = true; + return t; + } + + tree nelts = get_array_or_vector_nelts (ctx, TREE_TYPE (ary), non_constant_p, + overflow_p); + VERIFY_CONSTANT (nelts); + if (allow_one_past ? !tree_int_cst_le (index, nelts) + : !tree_int_cst_lt (index, nelts)) + { + diag_array_subscript (loc, ctx, ary, index); + *non_constant_p = true; + return t; + } + + return index; +} + +// forked from gcc/cp/constexpr.cc extract_string_elt + +/* Extract element INDEX consisting of CHARS_PER_ELT chars from + STRING_CST STRING. */ + +static tree +extract_string_elt (tree string, unsigned chars_per_elt, unsigned index) +{ + tree type = cv_unqualified (TREE_TYPE (TREE_TYPE (string))); + tree r; + + if (chars_per_elt == 1) + r = build_int_cst (type, TREE_STRING_POINTER (string)[index]); + else + { + const unsigned char *ptr + = ((const unsigned char *) TREE_STRING_POINTER (string) + + index * chars_per_elt); + r = native_interpret_expr (type, ptr, chars_per_elt); + } + return r; +} + +/* Check whether the parameter and return types of FUN are valid for a + constexpr function, and complain if COMPLAIN. */ + +bool +is_valid_constexpr_fn (tree fun, bool complain) +{ + bool ret = true; + + for (tree parm = FUNCTION_FIRST_USER_PARM (fun); parm != NULL_TREE; + parm = TREE_CHAIN (parm)) + if (!literal_type_p (TREE_TYPE (parm))) + { + ret = false; + if (complain) + { + // auto_diagnostic_group d; + // error ("invalid type for parameter %d of %<constexpr%> " + // "function %q+#D", + // DECL_PARM_INDEX (parm), fun); + Location locus = Location (DECL_SOURCE_LOCATION (fun)); + rust_error_at ( + locus, "invalid type for parameter %d of %<constexpr%> function", + DECL_PARM_INDEX (parm)); + } + } + + return ret; +} + +void +explain_invalid_constexpr_fn (tree fun) +{ + static hash_set<tree> *diagnosed; + // tree body; + + if (diagnosed == NULL) + diagnosed = new hash_set<tree>; + if (diagnosed->add (fun)) + /* Already explained. */ + return; + + iloc_sentinel ils = input_location; + // if (!lambda_static_thunk_p (fun)) + // { + // /* Diagnostics should completely ignore the static thunk, so leave + // input_location set to our caller's location. */ + // input_location = DECL_SOURCE_LOCATION (fun); + // inform (input_location, + // "%qD is not usable as a %<constexpr%> function because:", + // fun); + // } + + /* First check the declaration. */ + if (is_valid_constexpr_fn (fun, true)) + { + // /* Then if it's OK, the body. */ + // if (!DECL_DECLARED_CONSTEXPR_P (fun)) + // explain_implicit_non_constexpr (fun); + // else + // { + // if (constexpr_fundef *fd = retrieve_constexpr_fundef (fun)) + // body = fd->body; + // else + // body = DECL_SAVED_TREE (fun); + // body = massage_constexpr_body (fun, body); + // require_potential_rvalue_constant_expression (body); + // } + } +} + +/* BODY is a validated and massaged definition of a constexpr + function. Register it in the hash table. */ + +void +register_constexpr_fundef (const constexpr_fundef &value) +{ + /* Create the constexpr function table if necessary. */ + if (constexpr_fundef_table == NULL) + constexpr_fundef_table + = hash_table<constexpr_fundef_hasher>::create_ggc (101); + + constexpr_fundef **slot = constexpr_fundef_table->find_slot ( + const_cast<constexpr_fundef *> (&value), INSERT); + + gcc_assert (*slot == NULL); + *slot = ggc_alloc<constexpr_fundef> (); + **slot = value; +} + +/* We are processing the definition of the constexpr function FUN. + Check that its body fulfills the apropriate requirements and + enter it in the constexpr function definition table. */ + +void +maybe_save_constexpr_fundef (tree fun) +{ + // FIXME + + constexpr_fundef entry = {fun, NULL_TREE, NULL_TREE, NULL_TREE}; + bool clear_ctx = false; + if (DECL_RESULT (fun) && DECL_CONTEXT (DECL_RESULT (fun)) == NULL_TREE) + { + clear_ctx = true; + DECL_CONTEXT (DECL_RESULT (fun)) = fun; + } + tree saved_fn = current_function_decl; + current_function_decl = fun; + entry.body = copy_fn (entry.decl, entry.parms, entry.result); + current_function_decl = saved_fn; + if (clear_ctx) + DECL_CONTEXT (DECL_RESULT (entry.decl)) = NULL_TREE; + + register_constexpr_fundef (entry); +} + +/* Evaluate a STATEMENT_LIST for side-effects. Handles various jump + semantics, for switch, break, continue, and return. */ + +static tree +eval_statement_list (const constexpr_ctx *ctx, tree t, bool *non_constant_p, + bool *overflow_p, tree *jump_target) +{ + tree local_target; + /* In a statement-expression we want to return the last value. + For empty statement expression return void_node. */ + tree r = void_node; + if (!jump_target) + { + local_target = NULL_TREE; + jump_target = &local_target; + } + for (tree stmt : tsi_range (t)) + { + /* We've found a continue, so skip everything until we reach + the label its jumping to. */ + if (continues (jump_target)) + { + if (label_matches (ctx, jump_target, stmt)) + /* Found it. */ + *jump_target = NULL_TREE; + else + continue; + } + if (TREE_CODE (stmt) == DEBUG_BEGIN_STMT) + continue; + r = eval_constant_expression (ctx, stmt, false, non_constant_p, + overflow_p, jump_target); + if (*non_constant_p) + break; + if (returns (jump_target) || breaks (jump_target)) + break; + } + if (*jump_target && jump_target == &local_target) + { + /* We aren't communicating the jump to our caller, so give up. We don't + need to support evaluation of jumps out of statement-exprs. */ + if (!ctx->quiet) + error_at (EXPR_LOCATION (r), "statement is not a constant expression"); + *non_constant_p = true; + } + return r; +} + +// forked from gcc/cp/constexpr.cc cxx_eval_conditional_expression + +/* Subroutine of cxx_eval_constant_expression. + Attempt to evaluate condition expressions. Dead branches are not + looked into. */ + +static tree +eval_conditional_expression (const constexpr_ctx *ctx, tree t, bool lval, + bool *non_constant_p, bool *overflow_p, + tree *jump_target) +{ + tree val + = eval_constant_expression (ctx, TREE_OPERAND (t, 0), + /*lval*/ false, non_constant_p, overflow_p); + VERIFY_CONSTANT (val); + if (TREE_CODE (t) == IF_STMT && IF_STMT_CONSTEVAL_P (t)) + { + /* Evaluate the condition as if it was + if (__builtin_is_constant_evaluated ()), i.e. defer it if not + ctx->manifestly_const_eval (as sometimes we try to constant evaluate + without manifestly_const_eval even expressions or parts thereof which + will later be manifestly const_eval evaluated), otherwise fold it to + true. */ + if (ctx->manifestly_const_eval) + val = boolean_true_node; + else + { + *non_constant_p = true; + return t; + } + } + /* Don't VERIFY_CONSTANT the other operands. */ + if (integer_zerop (val)) + val = TREE_OPERAND (t, 2); + else + val = TREE_OPERAND (t, 1); + if (/*TREE_CODE (t) == IF_STMT && */ !val) + val = void_node; + return eval_constant_expression (ctx, val, lval, non_constant_p, overflow_p, + jump_target); +} + +// forked from gcc/cp/constexpr.cc cxx_eval_bit_field_ref + +/* Subroutine of cxx_eval_constant_expression. + Attempt to reduce a field access of a value of class type that is + expressed as a BIT_FIELD_REF. */ + +static tree +eval_bit_field_ref (const constexpr_ctx *ctx, tree t, bool lval, + bool *non_constant_p, bool *overflow_p) +{ + tree orig_whole = TREE_OPERAND (t, 0); + tree retval, fldval, utype, mask; + bool fld_seen = false; + HOST_WIDE_INT istart, isize; + tree whole = eval_constant_expression (ctx, orig_whole, lval, non_constant_p, + overflow_p); + tree start, field, value; + unsigned HOST_WIDE_INT i; + + if (whole == orig_whole) + return t; + /* Don't VERIFY_CONSTANT here; we only want to check that we got a + CONSTRUCTOR. */ + if (!*non_constant_p && TREE_CODE (whole) != VECTOR_CST + && TREE_CODE (whole) != CONSTRUCTOR) + { + if (!ctx->quiet) + error ("%qE is not a constant expression", orig_whole); + *non_constant_p = true; + } + if (*non_constant_p) + return t; + + if (TREE_CODE (whole) == VECTOR_CST) + return fold_ternary (BIT_FIELD_REF, TREE_TYPE (t), whole, + TREE_OPERAND (t, 1), TREE_OPERAND (t, 2)); + + start = TREE_OPERAND (t, 2); + istart = tree_to_shwi (start); + isize = tree_to_shwi (TREE_OPERAND (t, 1)); + utype = TREE_TYPE (t); + if (!TYPE_UNSIGNED (utype)) + utype = build_nonstandard_integer_type (TYPE_PRECISION (utype), 1); + retval = build_int_cst (utype, 0); + FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (whole), i, field, value) + { + tree bitpos = bit_position (field); + STRIP_ANY_LOCATION_WRAPPER (value); + if (bitpos == start && DECL_SIZE (field) == TREE_OPERAND (t, 1)) + return value; + if (TREE_CODE (TREE_TYPE (field)) == INTEGER_TYPE + && TREE_CODE (value) == INTEGER_CST && tree_fits_shwi_p (bitpos) + && tree_fits_shwi_p (DECL_SIZE (field))) + { + HOST_WIDE_INT bit = tree_to_shwi (bitpos); + HOST_WIDE_INT sz = tree_to_shwi (DECL_SIZE (field)); + HOST_WIDE_INT shift; + if (bit >= istart && bit + sz <= istart + isize) + { + fldval = fold_convert (utype, value); + mask = build_int_cst_type (utype, -1); + mask = fold_build2 (LSHIFT_EXPR, utype, mask, + size_int (TYPE_PRECISION (utype) - sz)); + mask = fold_build2 (RSHIFT_EXPR, utype, mask, + size_int (TYPE_PRECISION (utype) - sz)); + fldval = fold_build2 (BIT_AND_EXPR, utype, fldval, mask); + shift = bit - istart; + if (BYTES_BIG_ENDIAN) + shift = TYPE_PRECISION (utype) - shift - sz; + fldval + = fold_build2 (LSHIFT_EXPR, utype, fldval, size_int (shift)); + retval = fold_build2 (BIT_IOR_EXPR, utype, retval, fldval); + fld_seen = true; + } + } + } + if (fld_seen) + return fold_convert (TREE_TYPE (t), retval); + gcc_unreachable (); + return error_mark_node; +} + +// forked from gcc/cp/constexpr.cc returns + +/* Predicates for the meaning of *jump_target. */ + +static bool +returns (tree *jump_target) +{ + return *jump_target + && (TREE_CODE (*jump_target) == RETURN_EXPR + || (TREE_CODE (*jump_target) == LABEL_DECL + && LABEL_DECL_CDTOR (*jump_target))); +} + +// forked from gcc/cp/constexpr.cc breaks + +static bool +breaks (tree *jump_target) +{ + return *jump_target + && ((TREE_CODE (*jump_target) == LABEL_DECL + && LABEL_DECL_BREAK (*jump_target)) + || TREE_CODE (*jump_target) == BREAK_STMT + || TREE_CODE (*jump_target) == EXIT_EXPR); +} + +// forked from gcc/cp/constexpr.cc continues + +static bool +continues (tree *jump_target) +{ + return *jump_target + && ((TREE_CODE (*jump_target) == LABEL_DECL + && LABEL_DECL_CONTINUE (*jump_target)) + || TREE_CODE (*jump_target) == CONTINUE_STMT); +} + +// forked from gcc/cp/constexpr.cc switches + +static bool +switches (tree *jump_target) +{ + return *jump_target && TREE_CODE (*jump_target) == INTEGER_CST; +} + +// forked from gcc/cp/constexpr.cc cxx_eval_loop_expr + +/* Evaluate a LOOP_EXPR for side-effects. Handles break and return + semantics; continue semantics are covered by cxx_eval_statement_list. */ + +static tree +eval_loop_expr (const constexpr_ctx *ctx, tree t, bool *non_constant_p, + bool *overflow_p, tree *jump_target) +{ + constexpr_ctx new_ctx = *ctx; + tree local_target; + if (!jump_target) + { + local_target = NULL_TREE; + jump_target = &local_target; + } + + tree body, cond = NULL_TREE, expr = NULL_TREE; + int count = 0; + switch (TREE_CODE (t)) + { + case LOOP_EXPR: + body = LOOP_EXPR_BODY (t); + break; + case WHILE_STMT: + body = WHILE_BODY (t); + cond = WHILE_COND (t); + count = -1; + break; + case FOR_STMT: + if (FOR_INIT_STMT (t)) + eval_constant_expression (ctx, FOR_INIT_STMT (t), /*lval*/ false, + non_constant_p, overflow_p, jump_target); + if (*non_constant_p) + return NULL_TREE; + body = FOR_BODY (t); + cond = FOR_COND (t); + expr = FOR_EXPR (t); + count = -1; + break; + default: + gcc_unreachable (); + } + auto_vec<tree, 10> save_exprs; + new_ctx.save_exprs = &save_exprs; + do + { + if (count != -1) + { + if (body) + eval_constant_expression (&new_ctx, body, /*lval*/ false, + non_constant_p, overflow_p, jump_target); + if (breaks (jump_target)) + { + *jump_target = NULL_TREE; + break; + } + + if (TREE_CODE (t) != LOOP_EXPR && continues (jump_target)) + *jump_target = NULL_TREE; + + if (expr) + eval_constant_expression (&new_ctx, expr, /*lval*/ false, + non_constant_p, overflow_p, jump_target); + } + + if (cond) + { + tree res = eval_constant_expression (&new_ctx, cond, /*lval*/ false, + non_constant_p, overflow_p, + jump_target); + if (res) + { + if (verify_constant (res, ctx->quiet, non_constant_p, overflow_p)) + break; + if (integer_zerop (res)) + break; + } + else + gcc_assert (*jump_target); + } + + /* Forget saved values of SAVE_EXPRs and TARGET_EXPRs. */ + for (tree save_expr : save_exprs) + ctx->global->values.remove (save_expr); + save_exprs.truncate (0); + + if (++count >= constexpr_loop_limit) + { + if (!ctx->quiet) + error_at (rs_expr_loc_or_input_loc (t), + "%<constexpr%> loop iteration count exceeds limit of %d " + "(use %<-fconstexpr-loop-limit=%> to increase the limit)", + constexpr_loop_limit); + *non_constant_p = true; + break; + } + } + while (!returns (jump_target) && !breaks (jump_target) + && !continues (jump_target) && (!switches (jump_target) || count == 0) + && !*non_constant_p); + + /* Forget saved values of SAVE_EXPRs and TARGET_EXPRs. */ + for (tree save_expr : save_exprs) + ctx->global->values.remove (save_expr); + + return NULL_TREE; +} + +// forked from gcc/cp/constexpr.cc cxx_eval_switch_expr + +/* Evaluate a SWITCH_EXPR for side-effects. Handles switch and break jump + semantics. */ + +static tree +eval_switch_expr (const constexpr_ctx *ctx, tree t, bool *non_constant_p, + bool *overflow_p, tree *jump_target) +{ + tree cond + = TREE_CODE (t) == SWITCH_STMT ? SWITCH_STMT_COND (t) : SWITCH_COND (t); + cond + = eval_constant_expression (ctx, cond, false, non_constant_p, overflow_p); + VERIFY_CONSTANT (cond); + *jump_target = cond; + + tree body + = TREE_CODE (t) == SWITCH_STMT ? SWITCH_STMT_BODY (t) : SWITCH_BODY (t); + constexpr_ctx new_ctx = *ctx; + constexpr_switch_state css = css_default_not_seen; + new_ctx.css_state = &css; + eval_constant_expression (&new_ctx, body, false, non_constant_p, overflow_p, + jump_target); + if (switches (jump_target) && css == css_default_seen) + { + /* If the SWITCH_EXPR body has default: label, process it once again, + this time instructing label_matches to return true for default: + label on switches (jump_target). */ + css = css_default_processing; + eval_constant_expression (&new_ctx, body, false, non_constant_p, + overflow_p, jump_target); + } + if (breaks (jump_target) || switches (jump_target)) + *jump_target = NULL_TREE; + return NULL_TREE; +} + +// forked from gcc/cp/constexpr.cc eval_unary_expression + +/* Subroutine of cxx_eval_constant_expression. + Attempt to reduce the unary expression tree T to a compile time value. + If successful, return the value. Otherwise issue a diagnostic + and return error_mark_node. */ + +static tree +eval_unary_expression (const constexpr_ctx *ctx, tree t, bool /*lval*/, + bool *non_constant_p, bool *overflow_p) +{ + tree r; + tree orig_arg = TREE_OPERAND (t, 0); + tree arg = eval_constant_expression (ctx, orig_arg, /*lval*/ false, + non_constant_p, overflow_p); + VERIFY_CONSTANT (arg); + location_t loc = EXPR_LOCATION (t); + enum tree_code code = TREE_CODE (t); + tree type = TREE_TYPE (t); + r = fold_unary_loc (loc, code, type, arg); + if (r == NULL_TREE) + { + if (arg == orig_arg) + r = t; + else + r = build1_loc (loc, code, type, arg); + } + VERIFY_CONSTANT (r); + return r; +} + +// forked from gcc/cp/constexpr.cc cxx_eval_outermost_constant_expr + +/* ALLOW_NON_CONSTANT is false if T is required to be a constant expression. + STRICT has the same sense as for constant_value_1: true if we only allow + conforming C++ constant expressions, or false if we want a constant value + even if it doesn't conform. + MANIFESTLY_CONST_EVAL is true if T is manifestly const-evaluated as + per P0595 even when ALLOW_NON_CONSTANT is true. + CONSTEXPR_DTOR is true when evaluating the dtor of a constexpr variable. + OBJECT must be non-NULL in that case. */ + +static tree +cxx_eval_outermost_constant_expr (tree t, bool allow_non_constant, + bool strict = true, + bool manifestly_const_eval = false, + bool constexpr_dtor = false, + tree object = NULL_TREE) +{ + auto_timevar time (TV_CONSTEXPR); + + bool non_constant_p = false; + bool overflow_p = false; + + if (BRACE_ENCLOSED_INITIALIZER_P (t)) + { + gcc_checking_assert (allow_non_constant); + return t; + } + + constexpr_global_ctx global_ctx; + constexpr_ctx ctx + = {&global_ctx, NULL, + NULL, NULL, + NULL, NULL, + NULL, allow_non_constant, + strict, manifestly_const_eval || !allow_non_constant}; + + /* Turn off -frounding-math for manifestly constant evaluation. */ + warning_sentinel rm (flag_rounding_math, ctx.manifestly_const_eval); + tree type = initialized_type (t); + tree r = t; + bool is_consteval = false; + if (VOID_TYPE_P (type)) + { + if (constexpr_dtor) + /* Used for destructors of array elements. */ + type = TREE_TYPE (object); + else + { + if (TREE_CODE (t) != CALL_EXPR) + return t; + /* Calls to immediate functions returning void need to be + evaluated. */ + tree fndecl = rs_get_callee_fndecl_nofold (t); + if (fndecl == NULL_TREE || !DECL_IMMEDIATE_FUNCTION_P (fndecl)) + return t; + else + is_consteval = true; + } + } + else if ((TREE_CODE (t) == CALL_EXPR || TREE_CODE (t) == TARGET_EXPR)) + { + /* For non-concept checks, determine if it is consteval. */ + tree x = t; + if (TREE_CODE (x) == TARGET_EXPR) + x = TARGET_EXPR_INITIAL (x); + tree fndecl = rs_get_callee_fndecl_nofold (x); + if (fndecl && DECL_IMMEDIATE_FUNCTION_P (fndecl)) + is_consteval = true; + } + if (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type)) + { + /* In C++14 an NSDMI can participate in aggregate initialization, + and can refer to the address of the object being initialized, so + we need to pass in the relevant VAR_DECL if we want to do the + evaluation in a single pass. The evaluation will dynamically + update ctx.values for the VAR_DECL. We use the same strategy + for C++11 constexpr constructors that refer to the object being + initialized. */ + if (constexpr_dtor) + { + gcc_assert (object && VAR_P (object)); + gcc_assert (DECL_DECLARED_CONSTEXPR_P (object)); + gcc_assert (DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (object)); + if (error_operand_p (DECL_INITIAL (object))) + return t; + ctx.ctor = unshare_expr (DECL_INITIAL (object)); + TREE_READONLY (ctx.ctor) = false; + /* Temporarily force decl_really_constant_value to return false + for it, we want to use ctx.ctor for the current value instead. */ + DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (object) = false; + } + else + { + ctx.ctor = build_constructor (type, NULL); + CONSTRUCTOR_NO_CLEARING (ctx.ctor) = true; + } + if (!object) + { + if (TREE_CODE (t) == TARGET_EXPR) + object = TARGET_EXPR_SLOT (t); + } + ctx.object = object; + if (object) + gcc_assert ( + same_type_ignoring_top_level_qualifiers_p (type, TREE_TYPE (object))); + if (object && DECL_P (object)) + global_ctx.values.put (object, ctx.ctor); + if (TREE_CODE (r) == TARGET_EXPR) + /* Avoid creating another CONSTRUCTOR when we expand the + TARGET_EXPR. */ + r = TARGET_EXPR_INITIAL (r); + } + + auto_vec<tree, 16> cleanups; + global_ctx.cleanups = &cleanups; + + if (manifestly_const_eval) + instantiate_constexpr_fns (r); + r = eval_constant_expression (&ctx, r, false, &non_constant_p, &overflow_p); + + if (!constexpr_dtor) + verify_constant (r, allow_non_constant, &non_constant_p, &overflow_p); + else + DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (object) = true; + + unsigned int i; + tree cleanup; + /* Evaluate the cleanups. */ + FOR_EACH_VEC_ELT_REVERSE (cleanups, i, cleanup) + eval_constant_expression (&ctx, cleanup, false, &non_constant_p, + &overflow_p); + + /* Mutable logic is a bit tricky: we want to allow initialization of + constexpr variables with mutable members, but we can't copy those + members to another constexpr variable. */ + if (TREE_CODE (r) == CONSTRUCTOR && CONSTRUCTOR_MUTABLE_POISON (r)) + { + if (!allow_non_constant) + error ("%qE is not a constant expression because it refers to " + "mutable subobjects of %qT", + t, type); + non_constant_p = true; + } + + if (TREE_CODE (r) == CONSTRUCTOR && CONSTRUCTOR_NO_CLEARING (r)) + { + if (!allow_non_constant) + error ("%qE is not a constant expression because it refers to " + "an incompletely initialized variable", + t); + TREE_CONSTANT (r) = false; + non_constant_p = true; + } + + if (!global_ctx.heap_vars.is_empty ()) + { + tree heap_var + = rs_walk_tree_without_duplicates (&r, find_heap_var_refs, NULL); + unsigned int i; + if (heap_var) + { + if (!allow_non_constant && !non_constant_p) + error_at (DECL_SOURCE_LOCATION (heap_var), + "%qE is not a constant expression because it refers to " + "a result of %<operator new%>", + t); + r = t; + non_constant_p = true; + } + FOR_EACH_VEC_ELT (global_ctx.heap_vars, i, heap_var) + { + if (DECL_NAME (heap_var) != heap_deleted_identifier) + { + if (!allow_non_constant && !non_constant_p) + error_at (DECL_SOURCE_LOCATION (heap_var), + "%qE is not a constant expression because allocated " + "storage has not been deallocated", + t); + r = t; + non_constant_p = true; + } + varpool_node::get (heap_var)->remove (); + } + } + + /* Check that immediate invocation does not return an expression referencing + any immediate function decls. */ + if (is_consteval || in_immediate_context ()) + if (tree immediate_fndecl + = rs_walk_tree_without_duplicates (&r, find_immediate_fndecl, NULL)) + { + if (!allow_non_constant && !non_constant_p) + error_at (rs_expr_loc_or_input_loc (t), + "immediate evaluation returns address of immediate " + "function %qD", + immediate_fndecl); + r = t; + non_constant_p = true; + } + + if (non_constant_p) + /* If we saw something bad, go back to our argument. The wrapping below is + only for the cases of TREE_CONSTANT argument or overflow. */ + r = t; + + if (!non_constant_p && overflow_p) + non_constant_p = true; + + /* Unshare the result. */ + bool should_unshare = true; + if (r == t || (TREE_CODE (t) == TARGET_EXPR && TARGET_EXPR_INITIAL (t) == r)) + should_unshare = false; + + if (non_constant_p && !allow_non_constant) + return error_mark_node; + else if (constexpr_dtor) + return r; + else if (non_constant_p && TREE_CONSTANT (r)) + { + /* This isn't actually constant, so unset TREE_CONSTANT. + Don't clear TREE_CONSTANT on ADDR_EXPR, as the middle-end requires + it to be set if it is invariant address, even when it is not + a valid C++ constant expression. Wrap it with a NOP_EXPR + instead. */ + if (EXPR_P (r) && TREE_CODE (r) != ADDR_EXPR) + r = copy_node (r); + else if (TREE_CODE (r) == CONSTRUCTOR) + r = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (r), r); + else + r = build_nop (TREE_TYPE (r), r); + TREE_CONSTANT (r) = false; + } + else if (non_constant_p) + return t; + + if (should_unshare) + r = unshare_expr (r); + + if (TREE_CODE (r) == CONSTRUCTOR && CLASS_TYPE_P (TREE_TYPE (r))) + { + r = adjust_temp_type (type, r); + if (TREE_CODE (t) == TARGET_EXPR && TARGET_EXPR_INITIAL (t) == r) + return t; + } + + /* Remember the original location if that wouldn't need a wrapper. */ + if (location_t loc = EXPR_LOCATION (t)) + protected_set_expr_location (r, loc); + + return r; +} + +/* Like is_constant_expression, but allow const variables that are not allowed + under constexpr rules. */ + +bool +is_static_init_expression (tree t) +{ + return potential_constant_expression_1 (t, false, false, true, tf_none); +} + +/* Like potential_constant_expression, but don't consider possible constexpr + substitution of the current function. That is, PARM_DECL qualifies under + potential_constant_expression, but not here. + + This is basically what you can check when any actual constant values might + be value-dependent. */ + +bool +is_constant_expression (tree t) +{ + return potential_constant_expression_1 (t, false, true, true, tf_none); +} + +/* Returns true if T is a potential static initializer expression that is not + instantiation-dependent. */ + +bool +is_nondependent_static_init_expression (tree t) +{ + return (!type_unknown_p (t) && is_static_init_expression (t)); +} + +/* Like maybe_constant_value, but returns a CONSTRUCTOR directly, rather + than wrapped in a TARGET_EXPR. + ALLOW_NON_CONSTANT is false if T is required to be a constant expression. + MANIFESTLY_CONST_EVAL is true if T is manifestly const-evaluated as + per P0595 even when ALLOW_NON_CONSTANT is true. */ + +static tree +maybe_constant_init_1 (tree t, tree decl, bool allow_non_constant, + bool manifestly_const_eval) +{ + if (!t) + return t; + if (TREE_CODE (t) == EXPR_STMT) + t = TREE_OPERAND (t, 0); + if (TREE_CODE (t) == CONVERT_EXPR && VOID_TYPE_P (TREE_TYPE (t))) + t = TREE_OPERAND (t, 0); + if (TREE_CODE (t) == INIT_EXPR) + t = TREE_OPERAND (t, 1); + if (TREE_CODE (t) == TARGET_EXPR) + t = TARGET_EXPR_INITIAL (t); + if (!is_nondependent_static_init_expression (t)) + /* Don't try to evaluate it. */; + else if (CONSTANT_CLASS_P (t) && allow_non_constant) + /* No evaluation needed. */; + else + t = cxx_eval_outermost_constant_expr (t, allow_non_constant, + /*strict*/ false, + manifestly_const_eval, false, decl); + if (TREE_CODE (t) == TARGET_EXPR) + { + tree init = TARGET_EXPR_INITIAL (t); + if (TREE_CODE (init) == CONSTRUCTOR) + t = init; + } + return t; +} + +/* Wrapper for maybe_constant_init_1 which permits non constants. */ + +tree +maybe_constant_init (tree t, tree decl, bool manifestly_const_eval) +{ + return maybe_constant_init_1 (t, decl, true, manifestly_const_eval); +} + +/* Returns true if T is a potential constant expression that is not + instantiation-dependent, and therefore a candidate for constant folding even + in a template. */ + +bool +is_nondependent_constant_expression (tree t) +{ + return (!type_unknown_p (t) && is_constant_expression (t) + && !instantiation_dependent_expression_p (t)); +} + +// forked from gcc/cp/parser.cc cp_unevaluated_operand + +/* Nonzero if we are parsing an unevaluated operand: an operand to + sizeof, typeof, or alignof. */ +int cp_unevaluated_operand; + +// forked from gcc/cp/constexpr.cc cv_cache + +/* If T is a constant expression, returns its reduced value. + Otherwise, if T does not have TREE_CONSTANT set, returns T. + Otherwise, returns a version of T without TREE_CONSTANT. + MANIFESTLY_CONST_EVAL is true if T is manifestly const-evaluated + as per P0595. */ + +static GTY ((deletable)) hash_map<tree, tree> *cv_cache; + +// forked from gcc/cp/constexpr.cc maybe_constant_value + +tree +maybe_constant_value (tree t, tree decl, bool manifestly_const_eval) +{ + tree r; + + if (!is_nondependent_constant_expression (t)) + { + if (TREE_OVERFLOW_P (t)) + { + t = build_nop (TREE_TYPE (t), t); + TREE_CONSTANT (t) = false; + } + return t; + } + else if (CONSTANT_CLASS_P (t)) + /* No caching or evaluation needed. */ + return t; + + if (manifestly_const_eval) + return cxx_eval_outermost_constant_expr (t, true, true, true, false, decl); + + if (cv_cache == NULL) + cv_cache = hash_map<tree, tree>::create_ggc (101); + if (tree *cached = cv_cache->get (t)) + { + r = *cached; + if (r != t) + { + // Faisal: commenting this out as not sure if it's needed and it's + // huge r = break_out_target_exprs (r, /*clear_loc*/true); + protected_set_expr_location (r, EXPR_LOCATION (t)); + } + return r; + } + + /* Don't evaluate an unevaluated operand. */ + if (cp_unevaluated_operand) + return t; + + uid_sensitive_constexpr_evaluation_checker c; + r = cxx_eval_outermost_constant_expr (t, true, true, false, false, decl); + gcc_checking_assert ( + r == t || CONVERT_EXPR_P (t) || TREE_CODE (t) == VIEW_CONVERT_EXPR + || (TREE_CONSTANT (t) && !TREE_CONSTANT (r)) || !rs_tree_equal (r, t)); + if (!c.evaluation_restricted_p ()) + cv_cache->put (t, r); + return r; +} + +// forked from gcc/cp/constexpr.cc + +bool +potential_constant_expression (tree t) +{ + return potential_constant_expression_1 (t, false, true, false, tf_none); +} + +/* Data structure for passing data from potential_constant_expression_1 + to check_for_return_continue via cp_walk_tree. */ +struct check_for_return_continue_data +{ + hash_set<tree> *pset; + tree continue_stmt; + tree break_stmt; +}; + +/* Helper function for potential_constant_expression_1 SWITCH_STMT handling, + called through cp_walk_tree. Return the first RETURN_EXPR found, or note + the first CONTINUE_STMT and/or BREAK_STMT if RETURN_EXPR is not found. */ +static tree +check_for_return_continue (tree *tp, int *walk_subtrees, void *data) +{ + tree t = *tp, s, b; + check_for_return_continue_data *d = (check_for_return_continue_data *) data; + switch (TREE_CODE (t)) + { + case RETURN_EXPR: + return t; + + case CONTINUE_STMT: + if (d->continue_stmt == NULL_TREE) + d->continue_stmt = t; + break; + + case BREAK_STMT: + if (d->break_stmt == NULL_TREE) + d->break_stmt = t; + break; + +#define RECUR(x) \ + if (tree r = rs_walk_tree (&x, check_for_return_continue, data, d->pset)) \ + return r + + /* For loops, walk subtrees manually, so that continue stmts found + inside of the bodies of the loops are ignored. */ + + case WHILE_STMT: + *walk_subtrees = 0; + RECUR (WHILE_COND (t)); + s = d->continue_stmt; + b = d->break_stmt; + RECUR (WHILE_BODY (t)); + d->continue_stmt = s; + d->break_stmt = b; + break; + + case FOR_STMT: + *walk_subtrees = 0; + RECUR (FOR_INIT_STMT (t)); + RECUR (FOR_COND (t)); + RECUR (FOR_EXPR (t)); + s = d->continue_stmt; + b = d->break_stmt; + RECUR (FOR_BODY (t)); + d->continue_stmt = s; + d->break_stmt = b; + break; + + case RANGE_FOR_STMT: + *walk_subtrees = 0; + RECUR (RANGE_FOR_EXPR (t)); + s = d->continue_stmt; + b = d->break_stmt; + RECUR (RANGE_FOR_BODY (t)); + d->continue_stmt = s; + d->break_stmt = b; + break; + + case SWITCH_STMT: + *walk_subtrees = 0; + RECUR (SWITCH_STMT_COND (t)); + b = d->break_stmt; + RECUR (SWITCH_STMT_BODY (t)); + d->break_stmt = b; + break; +#undef RECUR + + case STATEMENT_LIST: + case CONSTRUCTOR: + break; + + default: + if (!EXPR_P (t)) + *walk_subtrees = 0; + break; + } + + return NULL_TREE; +} + +/* Returns the namespace that contains DECL, whether directly or + indirectly. */ + +tree +decl_namespace_context (tree decl) +{ + while (1) + { + if (TREE_CODE (decl) == NAMESPACE_DECL) + return decl; + else if (TYPE_P (decl)) + decl = CP_DECL_CONTEXT (TYPE_MAIN_DECL (decl)); + else + decl = CP_DECL_CONTEXT (decl); + } +} + +/* Returns true if DECL is in the std namespace. */ + +bool +decl_in_std_namespace_p (tree decl) +{ + while (decl) + { + decl = decl_namespace_context (decl); + if (DECL_NAMESPACE_STD_P (decl)) + return true; + /* Allow inline namespaces inside of std namespace, e.g. with + --enable-symvers=gnu-versioned-namespace std::forward would be + actually std::_8::forward. */ + if (!DECL_NAMESPACE_INLINE_P (decl)) + return false; + decl = CP_DECL_CONTEXT (decl); + } + return false; +} + +/* Return true if FNDECL is std::construct_at. */ + +static inline bool +is_std_construct_at (tree fndecl) +{ + if (!decl_in_std_namespace_p (fndecl)) + return false; + + tree name = DECL_NAME (fndecl); + return name && id_equal (name, "construct_at"); +} + +/* Return true if FNDECL is __dynamic_cast. */ + +static inline bool +cxx_dynamic_cast_fn_p (tree fndecl) +{ + return (id_equal (DECL_NAME (fndecl), "__dynamic_cast") + && CP_DECL_CONTEXT (fndecl) == global_namespace); +} + +/* Return true if FNDECL is std::allocator<T>::{,de}allocate. */ + +static inline bool +is_std_allocator_allocate (tree fndecl) +{ + tree name = DECL_NAME (fndecl); + if (name == NULL_TREE + || !(id_equal (name, "allocate") || id_equal (name, "deallocate"))) + return false; + + tree ctx = DECL_CONTEXT (fndecl); + if (ctx == NULL_TREE || !CLASS_TYPE_P (ctx) || !TYPE_MAIN_DECL (ctx)) + return false; + + tree decl = TYPE_MAIN_DECL (ctx); + name = DECL_NAME (decl); + if (name == NULL_TREE || !id_equal (name, "allocator")) + return false; + + return decl_in_std_namespace_p (decl); +} + +/* Overload for the above taking constexpr_call*. */ + +static inline bool +is_std_allocator_allocate (const constexpr_call *call) +{ + return (call && call->fundef + && is_std_allocator_allocate (call->fundef->decl)); +} + +/* Return true if T denotes a potentially constant expression. Issue + diagnostic as appropriate under control of FLAGS. If WANT_RVAL is true, + an lvalue-rvalue conversion is implied. If NOW is true, we want to + consider the expression in the current context, independent of constexpr + substitution. + + C++0x [expr.const] used to say + + 6 An expression is a potential constant expression if it is + a constant expression where all occurrences of function + parameters are replaced by arbitrary constant expressions + of the appropriate type. + + 2 A conditional expression is a constant expression unless it + involves one of the following as a potentially evaluated + subexpression (3.2), but subexpressions of logical AND (5.14), + logical OR (5.15), and conditional (5.16) operations that are + not evaluated are not considered. */ + +static bool +potential_constant_expression_1 (tree t, bool want_rval, bool strict, bool now, + tsubst_flags_t flags, tree *jump_target) +{ +#define RECUR(T, RV) \ + potential_constant_expression_1 ((T), (RV), strict, now, flags, jump_target) + + enum + { + any = false, + rval = true + }; + int i; + tree tmp; + + if (t == error_mark_node) + return false; + if (t == NULL_TREE) + return true; + location_t loc = rs_expr_loc_or_input_loc (t); + + if (*jump_target) + /* If we are jumping, ignore everything. This is simpler than the + cxx_eval_constant_expression handling because we only need to be + conservatively correct, and we don't necessarily have a constant value + available, so we don't bother with switch tracking. */ + return true; + + if (TREE_THIS_VOLATILE (t) && want_rval) + { + if (flags & tf_error) + error_at (loc, + "lvalue-to-rvalue conversion of a volatile lvalue " + "%qE with type %qT", + t, TREE_TYPE (t)); + return false; + } + if (CONSTANT_CLASS_P (t)) + return true; + if (CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED) + && TREE_TYPE (t) == error_mark_node) + return false; + + switch (TREE_CODE (t)) + { + case FUNCTION_DECL: + case OVERLOAD: + case LABEL_DECL: + case CASE_LABEL_EXPR: + case PREDICT_EXPR: + case CONST_DECL: + case IDENTIFIER_NODE: + /* We can see a FIELD_DECL in a pointer-to-member expression. */ + case FIELD_DECL: + case RESULT_DECL: + case PLACEHOLDER_EXPR: + case STATIC_ASSERT: + return true; + + case RETURN_EXPR: + if (!RECUR (TREE_OPERAND (t, 0), any)) + return false; + /* FALLTHROUGH */ + + case BREAK_STMT: + case CONTINUE_STMT: + *jump_target = t; + return true; + + case PARM_DECL: + if (now && want_rval) + { + tree type = TREE_TYPE (t); + if (is_really_empty_class (type, /*ignore_vptr*/ false)) + /* An empty class has no data to read. */ + return true; + if (flags & tf_error) + error ("%qE is not a constant expression", t); + return false; + } + return true; + + case CALL_EXPR: + /* -- an invocation of a function other than a constexpr function + or a constexpr constructor. */ + { + tree fun = get_function_named_in_call (t); + const int nargs = call_expr_nargs (t); + i = 0; + + if (fun == NULL_TREE) + { + /* Reset to allow the function to continue past the end + of the block below. Otherwise return early. */ + bool bail = true; + + if (TREE_CODE (t) == CALL_EXPR && CALL_EXPR_FN (t) == NULL_TREE) + switch (CALL_EXPR_IFN (t)) + { + /* These should be ignored, they are optimized away from + constexpr functions. */ + case IFN_UBSAN_NULL: + case IFN_UBSAN_BOUNDS: + case IFN_UBSAN_VPTR: + case IFN_FALLTHROUGH: + return true; + + case IFN_ADD_OVERFLOW: + case IFN_SUB_OVERFLOW: + case IFN_MUL_OVERFLOW: + case IFN_LAUNDER: + case IFN_VEC_CONVERT: + bail = false; + break; + + default: + break; + } + + if (bail) + { + /* fold_call_expr can't do anything with IFN calls. */ + if (flags & tf_error) + error_at (loc, "call to internal function %qE", t); + return false; + } + } + + if (fun && is_overloaded_fn (fun)) + { + if (TREE_CODE (fun) == FUNCTION_DECL) + { + if (builtin_valid_in_constant_expr_p (fun)) + return true; + if (!maybe_constexpr_fn (fun) + /* Allow any built-in function; if the expansion + isn't constant, we'll deal with that then. */ + && !fndecl_built_in_p (fun) + /* In C++20, replaceable global allocation functions + are constant expressions. */ + && (/* !cxx_replaceable_global_alloc_fn (fun) + ||*/ TREE_CODE (t) != CALL_EXPR + || (!CALL_FROM_NEW_OR_DELETE_P (t) + && (current_function_decl == NULL_TREE + /*|| !is_std_allocator_allocate(current_function_decl)*/))) + /* Allow placement new in std::construct_at. */ + && (/*!cxx_placement_new_fn (fun) + ||*/ TREE_CODE (t) != CALL_EXPR + || current_function_decl == NULL_TREE + /*|| !is_std_construct_at (current_function_decl)*/) + /* && !cxx_dynamic_cast_fn_p (fun)*/) + { + if (flags & tf_error) + { + error_at (loc, "call to non-%<constexpr%> function %qD", + fun); + explain_invalid_constexpr_fn (fun); + } + return false; + } + /* A call to a non-static member function takes the address + of the object as the first argument. But in a constant + expression the address will be folded away, so look + through it now. */ + if (DECL_NONSTATIC_MEMBER_FUNCTION_P (fun) + && !DECL_CONSTRUCTOR_P (fun)) + { + tree x = get_nth_callarg (t, 0); + + /* Don't require an immediately constant value, as + constexpr substitution might not use the value. */ + bool sub_now = false; + if (!potential_constant_expression_1 (x, rval, strict, + sub_now, flags, + jump_target)) + return false; + i = 1; + } + } + else + { + if (!RECUR (fun, true)) + return false; + fun = get_first_fn (fun); + } + fun = DECL_ORIGIN (fun); + } + else if (fun) + { + if (RECUR (fun, rval)) + /* Might end up being a constant function pointer. */; + else + return false; + } + for (; i < nargs; ++i) + { + tree x = get_nth_callarg (t, i); + /* In a template, reference arguments haven't been converted to + REFERENCE_TYPE and we might not even know if the parameter + is a reference, so accept lvalue constants too. */ + bool rv = rval; + /* Don't require an immediately constant value, as constexpr + substitution might not use the value of the argument. */ + bool sub_now = false; + if (!potential_constant_expression_1 (x, rv, strict, sub_now, flags, + jump_target)) + return false; + } + return true; + } + + case NON_LVALUE_EXPR: + /* -- an lvalue-to-rvalue conversion (4.1) unless it is applied to + -- an lvalue of integral type that refers to a non-volatile + const variable or static data member initialized with + constant expressions, or + + -- an lvalue of literal type that refers to non-volatile + object defined with constexpr, or that refers to a + sub-object of such an object; */ + return RECUR (TREE_OPERAND (t, 0), rval); + + case VAR_DECL: + if (DECL_HAS_VALUE_EXPR_P (t)) + { + return RECUR (DECL_VALUE_EXPR (t), rval); + } + if (want_rval && !var_in_maybe_constexpr_fn (t) + && !decl_maybe_constant_var_p (t) + && (strict || !RS_TYPE_CONST_NON_VOLATILE_P (TREE_TYPE (t)) + || (DECL_INITIAL (t) + && !DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (t))) + && COMPLETE_TYPE_P (TREE_TYPE (t)) + && !is_really_empty_class (TREE_TYPE (t), /*ignore_vptr*/ false)) + { + if (flags & tf_error) + non_const_var_error (loc, t); + return false; + } + return true; + + /* FALLTHRU */ + case NOP_EXPR: + case CONVERT_EXPR: + case VIEW_CONVERT_EXPR: + /* -- a reinterpret_cast. FIXME not implemented, and this rule + may change to something more specific to type-punning (DR 1312). */ + { + tree from = TREE_OPERAND (t, 0); + if (location_wrapper_p (t)) + return (RECUR (from, want_rval)); + if (INDIRECT_TYPE_P (TREE_TYPE (t))) + { + STRIP_ANY_LOCATION_WRAPPER (from); + if (TREE_CODE (from) == INTEGER_CST && !integer_zerop (from)) + { + if (flags & tf_error) + error_at (loc, + "%<reinterpret_cast%> from integer to pointer"); + return false; + } + } + return (RECUR (from, TREE_CODE (t) != VIEW_CONVERT_EXPR)); + } + + case ADDR_EXPR: + /* -- a unary operator & that is applied to an lvalue that + designates an object with thread or automatic storage + duration; */ + t = TREE_OPERAND (t, 0); + + if (TREE_CODE (t) == OFFSET_REF && PTRMEM_OK_P (t)) + /* A pointer-to-member constant. */ + return true; + + // handle_addr_expr: +#if 0 + /* FIXME adjust when issue 1197 is fully resolved. For now don't do + any checking here, as we might dereference the pointer later. If + we remove this code, also remove check_automatic_or_tls. */ + i = check_automatic_or_tls (t); + if (i == ck_ok) + return true; + if (i == ck_bad) + { + if (flags & tf_error) + error ("address-of an object %qE with thread local or " + "automatic storage is not a constant expression", t); + return false; + } +#endif + return RECUR (t, any); + + case COMPONENT_REF: + /* -- a class member access unless its postfix-expression is + of literal type or of pointer to literal type. */ + /* This test would be redundant, as it follows from the + postfix-expression being a potential constant expression. */ + if (type_unknown_p (t)) + return true; + if (is_overloaded_fn (t)) + /* In a template, a COMPONENT_REF of a function expresses ob.fn(), + which uses ob as an lvalue. */ + want_rval = false; + gcc_fallthrough (); + + case REALPART_EXPR: + case IMAGPART_EXPR: + case BIT_FIELD_REF: + return RECUR (TREE_OPERAND (t, 0), want_rval); + + case INDIRECT_REF: { + tree x = TREE_OPERAND (t, 0); + STRIP_NOPS (x); + return RECUR (x, rval); + } + + case STATEMENT_LIST: + for (tree stmt : tsi_range (t)) + if (!RECUR (stmt, any)) + return false; + return true; + + case MODIFY_EXPR: + if (!RECUR (TREE_OPERAND (t, 0), any)) + return false; + /* Just ignore clobbers. */ + if (TREE_CLOBBER_P (TREE_OPERAND (t, 1))) + return true; + if (!RECUR (TREE_OPERAND (t, 1), rval)) + return false; + return true; + + case FOR_STMT: + if (!RECUR (FOR_INIT_STMT (t), any)) + return false; + tmp = FOR_COND (t); + if (!RECUR (tmp, rval)) + return false; + if (tmp) + { + tmp = cxx_eval_outermost_constant_expr (tmp, true); + /* If we couldn't evaluate the condition, it might not ever be + true. */ + if (!integer_onep (tmp)) + { + /* Before returning true, check if the for body can contain + a return. */ + hash_set<tree> pset; + check_for_return_continue_data data + = {&pset, NULL_TREE, NULL_TREE}; + if (tree ret_expr + = rs_walk_tree (&FOR_BODY (t), check_for_return_continue, + &data, &pset)) + *jump_target = ret_expr; + return true; + } + } + if (!RECUR (FOR_EXPR (t), any)) + return false; + if (!RECUR (FOR_BODY (t), any)) + return false; + if (breaks (jump_target) || continues (jump_target)) + *jump_target = NULL_TREE; + return true; + + case WHILE_STMT: + tmp = WHILE_COND (t); + if (!RECUR (tmp, rval)) + return false; + + tmp = cxx_eval_outermost_constant_expr (tmp, true); + /* If we couldn't evaluate the condition, it might not ever be true. */ + if (!integer_onep (tmp)) + { + /* Before returning true, check if the while body can contain + a return. */ + hash_set<tree> pset; + check_for_return_continue_data data = {&pset, NULL_TREE, NULL_TREE}; + if (tree ret_expr + = rs_walk_tree (&WHILE_BODY (t), check_for_return_continue, &data, + &pset)) + *jump_target = ret_expr; + return true; + } + if (!RECUR (WHILE_BODY (t), any)) + return false; + if (breaks (jump_target) || continues (jump_target)) + *jump_target = NULL_TREE; + return true; + + case SWITCH_STMT: + if (!RECUR (SWITCH_STMT_COND (t), rval)) + return false; + /* FIXME we don't check SWITCH_STMT_BODY currently, because even + unreachable labels would be checked and it is enough if there is + a single switch cond value for which it is a valid constant + expression. We need to check if there are any RETURN_EXPRs + or CONTINUE_STMTs inside of the body though, as in that case + we need to set *jump_target. */ + else + { + hash_set<tree> pset; + check_for_return_continue_data data = {&pset, NULL_TREE, NULL_TREE}; + if (tree ret_expr + = rs_walk_tree (&SWITCH_STMT_BODY (t), check_for_return_continue, + &data, &pset)) + /* The switch might return. */ + *jump_target = ret_expr; + else if (data.continue_stmt) + /* The switch can't return, but might continue. */ + *jump_target = data.continue_stmt; + } + return true; + + case DYNAMIC_CAST_EXPR: + case PSEUDO_DTOR_EXPR: + case NEW_EXPR: + case VEC_NEW_EXPR: + case DELETE_EXPR: + case VEC_DELETE_EXPR: + case THROW_EXPR: + case OMP_PARALLEL: + case OMP_TASK: + case OMP_FOR: + case OMP_SIMD: + case OMP_DISTRIBUTE: + case OMP_TASKLOOP: + case OMP_LOOP: + case OMP_TEAMS: + case OMP_TARGET_DATA: + case OMP_TARGET: + case OMP_SECTIONS: + case OMP_ORDERED: + case OMP_CRITICAL: + case OMP_SINGLE: + case OMP_SECTION: + case OMP_MASTER: + case OMP_MASKED: + case OMP_TASKGROUP: + case OMP_TARGET_UPDATE: + case OMP_TARGET_ENTER_DATA: + case OMP_TARGET_EXIT_DATA: + case OMP_ATOMIC: + case OMP_ATOMIC_READ: + case OMP_ATOMIC_CAPTURE_OLD: + case OMP_ATOMIC_CAPTURE_NEW: + case OMP_DEPOBJ: + case OACC_PARALLEL: + case OACC_KERNELS: + case OACC_SERIAL: + case OACC_DATA: + case OACC_HOST_DATA: + case OACC_LOOP: + case OACC_CACHE: + case OACC_DECLARE: + case OACC_ENTER_DATA: + case OACC_EXIT_DATA: + case OACC_UPDATE: + /* GCC internal stuff. */ + case VA_ARG_EXPR: + case TRANSACTION_EXPR: + case AT_ENCODE_EXPR: + + if (flags & tf_error) + error_at (loc, "expression %qE is not a constant expression", t); + return false; + + case ASM_EXPR: + if (flags & tf_error) + inline_asm_in_constexpr_error (loc); + return false; + + case OBJ_TYPE_REF: + return true; + + case POINTER_DIFF_EXPR: + case MINUS_EXPR: + want_rval = true; + goto binary; + + case LT_EXPR: + case LE_EXPR: + case GT_EXPR: + case GE_EXPR: + case EQ_EXPR: + case NE_EXPR: + case SPACESHIP_EXPR: + want_rval = true; + goto binary; + + case PREINCREMENT_EXPR: + case POSTINCREMENT_EXPR: + case PREDECREMENT_EXPR: + case POSTDECREMENT_EXPR: + goto unary; + + case BIT_NOT_EXPR: + /* A destructor. */ + if (TYPE_P (TREE_OPERAND (t, 0))) + return true; + /* fall through. */ + + case CONJ_EXPR: + case SAVE_EXPR: + case FIX_TRUNC_EXPR: + case FLOAT_EXPR: + case NEGATE_EXPR: + case ABS_EXPR: + case ABSU_EXPR: + case TRUTH_NOT_EXPR: + case FIXED_CONVERT_EXPR: + case UNARY_PLUS_EXPR: + case UNARY_LEFT_FOLD_EXPR: + case UNARY_RIGHT_FOLD_EXPR: + unary: + return RECUR (TREE_OPERAND (t, 0), rval); + + case BIND_EXPR: + return RECUR (BIND_EXPR_BODY (t), want_rval); + + case CLEANUP_POINT_EXPR: + case EXPR_STMT: + case PAREN_EXPR: + case NON_DEPENDENT_EXPR: + /* For convenience. */ + case LOOP_EXPR: + case EXIT_EXPR: + return RECUR (TREE_OPERAND (t, 0), want_rval); + + case DECL_EXPR: + tmp = DECL_EXPR_DECL (t); + if (VAR_P (tmp) && !DECL_ARTIFICIAL (tmp)) + { + if (RS_DECL_THREAD_LOCAL_P (tmp)) + { + if (flags & tf_error) + error_at (DECL_SOURCE_LOCATION (tmp), + "%qD declared " + "%<thread_local%> in %<constexpr%> context", + tmp); + return false; + } + else if (TREE_STATIC (tmp)) + { + if (flags & tf_error) + error_at (DECL_SOURCE_LOCATION (tmp), + "%qD declared " + "%<static%> in %<constexpr%> context", + tmp); + return false; + } + else if (!check_for_uninitialized_const_var ( + tmp, /*constexpr_context_p=*/true, flags)) + return false; + } + return RECUR (tmp, want_rval); + + case TRY_FINALLY_EXPR: + return (RECUR (TREE_OPERAND (t, 0), want_rval) + && RECUR (TREE_OPERAND (t, 1), any)); + + case SCOPE_REF: + return RECUR (TREE_OPERAND (t, 1), want_rval); + + case TARGET_EXPR: + if (!TARGET_EXPR_DIRECT_INIT_P (t) && !literal_type_p (TREE_TYPE (t))) + { + if (flags & tf_error) + { + auto_diagnostic_group d; + error_at (loc, + "temporary of non-literal type %qT in a " + "constant expression", + TREE_TYPE (t)); + explain_non_literal_class (TREE_TYPE (t)); + } + return false; + } + /* FALLTHRU */ + case INIT_EXPR: + return RECUR (TREE_OPERAND (t, 1), rval); + + case CONSTRUCTOR: { + vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (t); + constructor_elt *ce; + for (i = 0; vec_safe_iterate (v, i, &ce); ++i) + if (!RECUR (ce->value, want_rval)) + return false; + return true; + } + + case TREE_LIST: { + gcc_assert (TREE_PURPOSE (t) == NULL_TREE || DECL_P (TREE_PURPOSE (t))); + if (!RECUR (TREE_VALUE (t), want_rval)) + return false; + if (TREE_CHAIN (t) == NULL_TREE) + return true; + return RECUR (TREE_CHAIN (t), want_rval); + } + + case TRUNC_DIV_EXPR: + case CEIL_DIV_EXPR: + case FLOOR_DIV_EXPR: + case ROUND_DIV_EXPR: + case TRUNC_MOD_EXPR: + case CEIL_MOD_EXPR: + case ROUND_MOD_EXPR: { + tree denom = TREE_OPERAND (t, 1); + if (!RECUR (denom, rval)) + return false; + /* We can't call cxx_eval_outermost_constant_expr on an expression + that hasn't been through instantiate_non_dependent_expr yet. */ + denom = cxx_eval_outermost_constant_expr (denom, true); + if (integer_zerop (denom)) + { + if (flags & tf_error) + error ("division by zero is not a constant expression"); + return false; + } + else + { + want_rval = true; + return RECUR (TREE_OPERAND (t, 0), want_rval); + } + } + + case COMPOUND_EXPR: { + /* check_return_expr sometimes wraps a TARGET_EXPR in a + COMPOUND_EXPR; don't get confused. */ + tree op0 = TREE_OPERAND (t, 0); + tree op1 = TREE_OPERAND (t, 1); + STRIP_NOPS (op1); + if (TREE_CODE (op0) == TARGET_EXPR && op1 == TARGET_EXPR_SLOT (op0)) + return RECUR (op0, want_rval); + else + goto binary; + } + + /* If the first operand is the non-short-circuit constant, look at + the second operand; otherwise we only care about the first one for + potentiality. */ + case TRUTH_AND_EXPR: + case TRUTH_ANDIF_EXPR: + tmp = boolean_true_node; + goto truth; + case TRUTH_OR_EXPR: + case TRUTH_ORIF_EXPR: + tmp = boolean_false_node; + truth : { + tree op0 = TREE_OPERAND (t, 0); + tree op1 = TREE_OPERAND (t, 1); + if (!RECUR (op0, rval)) + return false; + if (!(flags & tf_error) && RECUR (op1, rval)) + /* When quiet, try to avoid expensive trial evaluation by first + checking potentiality of the second operand. */ + return true; + op0 = cxx_eval_outermost_constant_expr (op0, true); + if (tree_int_cst_equal (op0, tmp)) + return (flags & tf_error) ? RECUR (op1, rval) : false; + else + return true; + } + + case PLUS_EXPR: + case MULT_EXPR: + case POINTER_PLUS_EXPR: + case RDIV_EXPR: + case EXACT_DIV_EXPR: + case MIN_EXPR: + case MAX_EXPR: + case LSHIFT_EXPR: + case RSHIFT_EXPR: + case LROTATE_EXPR: + case RROTATE_EXPR: + case BIT_IOR_EXPR: + case BIT_XOR_EXPR: + case BIT_AND_EXPR: + case TRUTH_XOR_EXPR: + case UNORDERED_EXPR: + case ORDERED_EXPR: + case UNLT_EXPR: + case UNLE_EXPR: + case UNGT_EXPR: + case UNGE_EXPR: + case UNEQ_EXPR: + case LTGT_EXPR: + case RANGE_EXPR: + case COMPLEX_EXPR: + want_rval = true; + /* Fall through. */ + case ARRAY_REF: + case ARRAY_RANGE_REF: + case MEMBER_REF: + case DOTSTAR_EXPR: + case MEM_REF: + case BINARY_LEFT_FOLD_EXPR: + case BINARY_RIGHT_FOLD_EXPR: + binary: + for (i = 0; i < 2; ++i) + if (!RECUR (TREE_OPERAND (t, i), want_rval)) + return false; + return true; + + case VEC_PERM_EXPR: + for (i = 0; i < 3; ++i) + if (!RECUR (TREE_OPERAND (t, i), true)) + return false; + return true; + + case COND_EXPR: + if (COND_EXPR_IS_VEC_DELETE (t)) + { + if (flags & tf_error) + error_at (loc, "%<delete[]%> is not a constant expression"); + return false; + } + /* Fall through. */ + case IF_STMT: + case VEC_COND_EXPR: + /* If the condition is a known constant, we know which of the legs we + care about; otherwise we only require that the condition and + either of the legs be potentially constant. */ + tmp = TREE_OPERAND (t, 0); + if (!RECUR (tmp, rval)) + return false; + + tmp = cxx_eval_outermost_constant_expr (tmp, true); + /* potential_constant_expression* isn't told if it is called for + manifestly_const_eval or not, so for consteval if always + process both branches as if the condition is not a known + constant. */ + if (TREE_CODE (t) != IF_STMT || !IF_STMT_CONSTEVAL_P (t)) + { + if (integer_zerop (tmp)) + return RECUR (TREE_OPERAND (t, 2), want_rval); + else if (TREE_CODE (tmp) == INTEGER_CST) + return RECUR (TREE_OPERAND (t, 1), want_rval); + } + tmp = *jump_target; + for (i = 1; i < 3; ++i) + { + tree this_jump_target = tmp; + if (potential_constant_expression_1 (TREE_OPERAND (t, i), want_rval, + strict, now, tf_none, + &this_jump_target)) + { + if (returns (&this_jump_target)) + *jump_target = this_jump_target; + else if (!returns (jump_target)) + { + if (breaks (&this_jump_target) + || continues (&this_jump_target)) + *jump_target = this_jump_target; + if (i == 1) + { + /* If the then branch is potentially constant, but + does not return, check if the else branch + couldn't return, break or continue. */ + hash_set<tree> pset; + check_for_return_continue_data data + = {&pset, NULL_TREE, NULL_TREE}; + if (tree ret_expr + = rs_walk_tree (&TREE_OPERAND (t, 2), + check_for_return_continue, &data, + &pset)) + *jump_target = ret_expr; + else if (*jump_target == NULL_TREE) + { + if (data.continue_stmt) + *jump_target = data.continue_stmt; + else if (data.break_stmt) + *jump_target = data.break_stmt; + } + } + } + return true; + } + } + if (flags & tf_error) + error_at (loc, "expression %qE is not a constant expression", t); + return false; + + case TYPE_DECL: + /* We can see these in statement-expressions. */ + return true; + + case LABEL_EXPR: + t = LABEL_EXPR_LABEL (t); + if (DECL_ARTIFICIAL (t)) + return true; + else if (flags & tf_error) + error_at (loc, "label definition in %<constexpr%> function only " + "available with %<-std=c++2b%> or %<-std=gnu++2b%>"); + return false; + + case ANNOTATE_EXPR: + return RECUR (TREE_OPERAND (t, 0), rval); + + case BIT_CAST_EXPR: + return RECUR (TREE_OPERAND (t, 0), rval); + + default: + sorry ("unexpected AST of kind %s", get_tree_code_name (TREE_CODE (t))); + gcc_unreachable (); + return false; + } +#undef RECUR +} + +bool +potential_constant_expression_1 (tree t, bool want_rval, bool strict, bool now, + tsubst_flags_t flags) +{ + if (flags & tf_error) + { + /* Check potentiality quietly first, as that could be performed more + efficiently in some cases (currently only for TRUTH_*_EXPR). If + that fails, replay the check noisily to give errors. */ + flags &= ~tf_error; + if (potential_constant_expression_1 (t, want_rval, strict, now, flags)) + return true; + flags |= tf_error; + } + + tree target = NULL_TREE; + return potential_constant_expression_1 (t, want_rval, strict, now, flags, + &target); +} + +// forked from gcc/cp/constexpr.cc fold_non_dependent_init + +/* Like maybe_constant_init but first fully instantiate the argument. */ + +tree +fold_non_dependent_init (tree t, + tsubst_flags_t complain /*=tf_warning_or_error*/, + bool manifestly_const_eval /*=false*/, + tree object /* = NULL_TREE */) +{ + if (t == NULL_TREE) + return NULL_TREE; + + return maybe_constant_init (t, object, manifestly_const_eval); +} + +// #include "gt-rust-rust-constexpr.h" + } // namespace Compile } // namespace Rust diff --git a/gcc/rust/backend/rust-constexpr.h b/gcc/rust/backend/rust-constexpr.h index 3cfcec8..77a0797 100644 --- a/gcc/rust/backend/rust-constexpr.h +++ b/gcc/rust/backend/rust-constexpr.h @@ -24,6 +24,8 @@ namespace Rust { namespace Compile { extern tree fold_expr (tree); +extern void +maybe_save_constexpr_fundef (tree fun); } // namespace Compile } // namespace Rust diff --git a/gcc/rust/backend/rust-tree.cc b/gcc/rust/backend/rust-tree.cc index 6ecc690..d79cd96 100644 --- a/gcc/rust/backend/rust-tree.cc +++ b/gcc/rust/backend/rust-tree.cc @@ -21,12 +21,37 @@ #include "stringpool.h" #include "attribs.h" #include "escaped_string.h" +#include "libiberty.h" +#include "stor-layout.h" +#include "hash-map.h" +#include "diagnostic.h" +#include "timevar.h" +#include "convert.h" +#include "gimple-expr.h" +#include "gimplify.h" +#include "function.h" +#include "gcc-rich-location.h" +#include "target.h" +#include "file-prefix-map.h" +#include "cgraph.h" + +#include "output.h" + +// forked from gcc/c-family/c-common.cc c_global_trees +tree c_global_trees[CTI_MAX]; +// forked from gcc/cp/decl.cc cp_global_trees +tree cp_global_trees[CPTI_MAX]; + +struct saved_scope *scope_chain; namespace Rust { void mark_exp_read (tree exp) { + char tmp_name[32]; + ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lsrc_loc", 1); + if (exp == NULL) return; @@ -955,4 +980,5220 @@ rs_type_quals (const_tree type) return quals; } +// forked from gcc/cp/decl.cc cp_global_trees + +/* The following symbols are subsumed in the cp_global_trees array, and + listed here individually for documentation purposes. + + C++ extensions + tree wchar_decl_node; + + tree vtable_entry_type; + tree delta_type_node; + tree __t_desc_type_node; + + tree class_type_node; + tree unknown_type_node; + + Array type `vtable_entry_type[]' + + tree vtbl_type_node; + tree vtbl_ptr_type_node; + + Namespaces, + + tree std_node; + tree abi_node; + + A FUNCTION_DECL which can call `abort'. Not necessarily the + one that the user will declare, but sufficient to be called + by routines that want to abort the program. + + tree abort_fndecl; + + Used by RTTI + tree type_info_type_node, tinfo_decl_id, tinfo_decl_type; + tree tinfo_var_id; */ + +/* The following symbols are subsumed in the c_global_trees array, and + listed here individually for documentation purposes. + + INTEGER_TYPE and REAL_TYPE nodes for the standard data types. + + tree short_integer_type_node; + tree long_integer_type_node; + tree long_long_integer_type_node; + + tree short_unsigned_type_node; + tree long_unsigned_type_node; + tree long_long_unsigned_type_node; + + tree truthvalue_type_node; + tree truthvalue_false_node; + tree truthvalue_true_node; + + tree ptrdiff_type_node; + + tree unsigned_char_type_node; + tree signed_char_type_node; + tree wchar_type_node; + + tree char8_type_node; + tree char16_type_node; + tree char32_type_node; + + tree float_type_node; + tree double_type_node; + tree long_double_type_node; + + tree complex_integer_type_node; + tree complex_float_type_node; + tree complex_double_type_node; + tree complex_long_double_type_node; + + tree dfloat32_type_node; + tree dfloat64_type_node; + tree_dfloat128_type_node; + + tree intQI_type_node; + tree intHI_type_node; + tree intSI_type_node; + tree intDI_type_node; + tree intTI_type_node; + + tree unsigned_intQI_type_node; + tree unsigned_intHI_type_node; + tree unsigned_intSI_type_node; + tree unsigned_intDI_type_node; + tree unsigned_intTI_type_node; + + tree widest_integer_literal_type_node; + tree widest_unsigned_literal_type_node; + + Nodes for types `void *' and `const void *'. + + tree ptr_type_node, const_ptr_type_node; + + Nodes for types `char *' and `const char *'. + + tree string_type_node, const_string_type_node; + + Type `char[SOMENUMBER]'. + Used when an array of char is needed and the size is irrelevant. + + tree char_array_type_node; + + Type `wchar_t[SOMENUMBER]' or something like it. + Used when a wide string literal is created. + + tree wchar_array_type_node; + + Type `char8_t[SOMENUMBER]' or something like it. + Used when a UTF-8 string literal is created. + + tree char8_array_type_node; + + Type `char16_t[SOMENUMBER]' or something like it. + Used when a UTF-16 string literal is created. + + tree char16_array_type_node; + + Type `char32_t[SOMENUMBER]' or something like it. + Used when a UTF-32 string literal is created. + + tree char32_array_type_node; + + Type `int ()' -- used for implicit declaration of functions. + + tree default_function_type; + + A VOID_TYPE node, packaged in a TREE_LIST. + + tree void_list_node; + + The lazily created VAR_DECLs for __FUNCTION__, __PRETTY_FUNCTION__, + and __func__. (C doesn't generate __FUNCTION__ and__PRETTY_FUNCTION__ + VAR_DECLS, but C++ does.) + + tree function_name_decl_node; + tree pretty_function_name_decl_node; + tree c99_function_name_decl_node; + + Stack of nested function name VAR_DECLs. + + tree saved_function_name_decls; + +*/ + +// forked from gcc/cp/module.cc fixed_trees + +static GTY (()) vec<tree, va_gc> *fixed_trees; + +// forked from gcc/cp/module.cc maybe_add_global + +/* VAL is a global tree, add it to the global vec if it is + interesting. Add some of its targets, if they too are + interesting. We do not add identifiers, as they can be re-found + via the identifier hash table. There is a cost to the number of + global trees. */ + +static int +maybe_add_global (tree val, unsigned &crc) +{ + int v = 0; + + if (val && !(TREE_CODE (val) == IDENTIFIER_NODE || TREE_VISITED (val))) + { + TREE_VISITED (val) = true; + crc = crc32_unsigned (crc, fixed_trees->length ()); + vec_safe_push (fixed_trees, val); + v++; + + if (CODE_CONTAINS_STRUCT (TREE_CODE (val), TS_TYPED)) + v += maybe_add_global (TREE_TYPE (val), crc); + if (CODE_CONTAINS_STRUCT (TREE_CODE (val), TS_TYPE_COMMON)) + v += maybe_add_global (TYPE_NAME (val), crc); + } + + return v; +} + +// forked from gcc/cp/module.cc global_tree_arys + +/* Global trees. */ +static const std::pair<tree *, unsigned> global_tree_arys[] = { + std::pair<tree *, unsigned> (cp_global_trees, CPTI_MODULE_HWM), + std::pair<tree *, unsigned> (c_global_trees, CTI_MODULE_HWM), +}; + +// forked from gcc/cp/module.cc init_modules + +void +init_modules () +{ + unsigned crc = 0; + vec_alloc (fixed_trees, 200); + + const tree *ptr = global_tree_arys[0].first; + unsigned limit = global_tree_arys[0].second; + for (unsigned ix = 0; ix != limit; ix++, ptr++) + { + maybe_add_global (*ptr, crc); + } + + ptr = global_tree_arys[1].first; + limit = global_tree_arys[1].second; + for (unsigned ix = 0; ix != limit; ix++, ptr++) + { + maybe_add_global (*ptr, crc); + } +} + +// forked from gcc/cp/constexpr.cc var_in_constexpr_fn + +/* True if T was declared in a function declared to be constexpr, and + therefore potentially constant in C++14. */ + +bool +var_in_constexpr_fn (tree t) +{ + tree ctx = DECL_CONTEXT (t); + return (ctx && TREE_CODE (ctx) == FUNCTION_DECL + && DECL_DECLARED_CONSTEXPR_P (ctx)); +} + +// forked from gcc/cp/name-lookup.cc member_vec_linear_search + +/* Linear search of (unordered) MEMBER_VEC for NAME. */ + +static tree +member_vec_linear_search (vec<tree, va_gc> *member_vec, tree name) +{ + for (int ix = member_vec->length (); ix--;) + if (tree binding = (*member_vec)[ix]) + if (OVL_NAME (binding) == name) + return binding; + + return NULL_TREE; +} + +// forked from gcc/cp/name-lookup.cc member_vec_binary_search + +/* Binary search of (ordered) MEMBER_VEC for NAME. */ + +static tree +member_vec_binary_search (vec<tree, va_gc> *member_vec, tree name) +{ + for (unsigned lo = 0, hi = member_vec->length (); lo < hi;) + { + unsigned mid = (lo + hi) / 2; + tree binding = (*member_vec)[mid]; + tree binding_name = OVL_NAME (binding); + + if (binding_name > name) + hi = mid; + else if (binding_name < name) + lo = mid + 1; + else + return binding; + } + + return NULL_TREE; +} + +// forked from gcc/cp/tree.cc is_overloaded_fn + +/* Returns nonzero if X is an expression for a (possibly overloaded) + function. If "f" is a function or function template, "f", "c->f", + "c.f", "C::f", and "f<int>" will all be considered possibly + overloaded functions. Returns 2 if the function is actually + overloaded, i.e., if it is impossible to know the type of the + function without performing overload resolution. */ + +int +is_overloaded_fn (tree x) +{ + STRIP_ANY_LOCATION_WRAPPER (x); + + if (TREE_CODE (x) == COMPONENT_REF) + x = TREE_OPERAND (x, 1); + + return OVL_P (x); +} + +// forked from gcc/cp/tree.cc ovl_make + +/* Make a raw overload node containing FN. */ + +tree +ovl_make (tree fn, tree next) +{ + tree result = make_node (OVERLOAD); + + if (TREE_CODE (fn) == OVERLOAD) + OVL_NESTED_P (result) = true; + + TREE_TYPE (result) = (next ? unknown_type_node : TREE_TYPE (fn)); + if (next && TREE_CODE (next) == OVERLOAD && OVL_DEDUP_P (next)) + OVL_DEDUP_P (result) = true; + OVL_FUNCTION (result) = fn; + OVL_CHAIN (result) = next; + return result; +} + +// forked from gcc/cp/name-lookup.cc lookup_add + +/* Add a set of new FNS into a lookup. */ + +tree +lookup_add (tree fns, tree lookup) +{ + if (fns == error_mark_node || lookup == error_mark_node) + return error_mark_node; + + lookup = fns; + + return lookup; +} + +// forked from gcc/cp/typeck.cc type_memfn_quals + +/* Returns the function-cv-quals for TYPE, which must be a FUNCTION_TYPE or + METHOD_TYPE. */ + +int +type_memfn_quals (const_tree type) +{ + if (TREE_CODE (type) == FUNCTION_TYPE) + return TYPE_QUALS (type); + else if (TREE_CODE (type) == METHOD_TYPE) + return rs_type_quals (class_of_this_parm (type)); + else + gcc_unreachable (); +} + +// forked from gcc/cp/pt.cc find_parameter_pack_data + +/* Structure used to track the progress of find_parameter_packs_r. */ +struct find_parameter_pack_data +{ + /* TREE_LIST that will contain all of the parameter packs found by + the traversal. */ + tree *parameter_packs; + + /* Set of AST nodes that have been visited by the traversal. */ + hash_set<tree> *visited; + + /* True iff we're making a type pack expansion. */ + bool type_pack_expansion_p; + + /* True iff we found a subtree that has the extra args mechanism. */ + bool found_extra_args_tree_p = false; +}; + +// forked from gcc/cp/lex.cc conv_type_hasher + +/* Hasher for the conversion operator name hash table. */ +struct conv_type_hasher : ggc_ptr_hash<tree_node> +{ + /* Hash NODE, an identifier node in the table. TYPE_UID is + suitable, as we're not concerned about matching canonicalness + here. */ + static hashval_t hash (tree node) + { + return (hashval_t) TYPE_UID (TREE_TYPE (node)); + } + + /* Compare NODE, an identifier node in the table, against TYPE, an + incoming TYPE being looked up. */ + static bool equal (tree node, tree type) { return TREE_TYPE (node) == type; } +}; + +static GTY (()) hash_table<conv_type_hasher> *conv_type_names; + +// forked from gcc/cp/lex.cc make_conv_op_name + +/* Return an identifier for a conversion operator to TYPE. We can get + from the returned identifier to the type. We store TYPE, which is + not necessarily the canonical type, which allows us to report the + form the user used in error messages. All these identifiers are + not in the identifier hash table, and have the same string name. + These IDENTIFIERS are not in the identifier hash table, and all + have the same IDENTIFIER_STRING. */ + +tree +make_conv_op_name (tree type) +{ + if (type == error_mark_node) + return error_mark_node; + + if (conv_type_names == NULL) + conv_type_names = hash_table<conv_type_hasher>::create_ggc (31); + + tree *slot + = conv_type_names->find_slot_with_hash (type, (hashval_t) TYPE_UID (type), + INSERT); + tree identifier = *slot; + if (!identifier) + { + /* Create a raw IDENTIFIER outside of the identifier hash + table. */ + identifier = copy_node (conv_op_identifier); + + /* Just in case something managed to bind. */ + IDENTIFIER_BINDING (identifier) = NULL; + + /* Hang TYPE off the identifier so it can be found easily later + when performing conversions. */ + TREE_TYPE (identifier) = type; + + *slot = identifier; + } + + return identifier; +} + +// forked from gcc/cp/pt.cc builtin_pack_fn_p + +/* True iff FN is a function representing a built-in variadic parameter + pack. */ + +bool +builtin_pack_fn_p (tree fn) +{ + if (!fn || TREE_CODE (fn) != FUNCTION_DECL + || !DECL_IS_UNDECLARED_BUILTIN (fn)) + return false; + + if (id_equal (DECL_NAME (fn), "__integer_pack")) + return true; + + return false; +} + +// forked from gcc/cp/pt.cc builtin_pack_call_p + +/* True iff CALL is a call to a function representing a built-in variadic + parameter pack. */ + +static bool +builtin_pack_call_p (tree call) +{ + if (TREE_CODE (call) != CALL_EXPR) + return false; + return builtin_pack_fn_p (CALL_EXPR_FN (call)); +} + +//// forked from gcc/cp/pt.cc has_extra_args_mechanism_p +// +///* Return true if the tree T has the extra args mechanism for +// avoiding partial instantiation. */ +// +// static bool +// has_extra_args_mechanism_p (const_tree t) +//{ +// return false; +//} + +// forked from gcc/cp/pt.cc find_parameter_packs_r + +/* Identifies all of the argument packs that occur in a template + argument and appends them to the TREE_LIST inside DATA, which is a + find_parameter_pack_data structure. This is a subroutine of + make_pack_expansion and uses_parameter_packs. */ +static tree +find_parameter_packs_r (tree *tp, int *walk_subtrees, void *data) +{ + tree t = *tp; + struct find_parameter_pack_data *ppd + = (struct find_parameter_pack_data *) data; + bool parameter_pack_p = false; + +#define WALK_SUBTREE(NODE) \ + rs_walk_tree (&(NODE), &find_parameter_packs_r, ppd, ppd->visited) + + /* Don't look through typedefs; we are interested in whether a + parameter pack is actually written in the expression/type we're + looking at, not the target type. */ + if (TYPE_P (t) && typedef_variant_p (t)) + { + *walk_subtrees = 0; + return NULL_TREE; + } + + /* Identify whether this is a parameter pack or not. */ + switch (TREE_CODE (t)) + { + case FIELD_DECL: + case PARM_DECL: + break; + + case VAR_DECL: + break; + + case CALL_EXPR: + if (builtin_pack_call_p (t)) + parameter_pack_p = true; + break; + + case BASES: + parameter_pack_p = true; + break; + default: + /* Not a parameter pack. */ + break; + } + + if (parameter_pack_p) + { + /* Add this parameter pack to the list. */ + *ppd->parameter_packs = tree_cons (NULL_TREE, t, *ppd->parameter_packs); + } + + if (TYPE_P (t)) + rs_walk_tree (&TYPE_CONTEXT (t), &find_parameter_packs_r, ppd, + ppd->visited); + + /* This switch statement will return immediately if we don't find a + parameter pack. ??? Should some of these be in cp_walk_subtrees? */ + switch (TREE_CODE (t)) + { + case DECL_EXPR: { + tree decl = DECL_EXPR_DECL (t); + if (is_typedef_decl (decl)) + /* Since we stop at typedefs above, we need to look through them at + the point of the DECL_EXPR. */ + rs_walk_tree (&DECL_ORIGINAL_TYPE (decl), &find_parameter_packs_r, + ppd, ppd->visited); + return NULL_TREE; + } + + case INTEGER_TYPE: + rs_walk_tree (&TYPE_MAX_VALUE (t), &find_parameter_packs_r, ppd, + ppd->visited); + *walk_subtrees = 0; + return NULL_TREE; + + case IDENTIFIER_NODE: + rs_walk_tree (&TREE_TYPE (t), &find_parameter_packs_r, ppd, ppd->visited); + *walk_subtrees = 0; + return NULL_TREE; + + case DECLTYPE_TYPE: { + /* When traversing a DECLTYPE_TYPE_EXPR, we need to set + type_pack_expansion_p to false so that any placeholders + within the expression don't get marked as parameter packs. */ + bool type_pack_expansion_p = ppd->type_pack_expansion_p; + ppd->type_pack_expansion_p = false; + rs_walk_tree (&DECLTYPE_TYPE_EXPR (t), &find_parameter_packs_r, ppd, + ppd->visited); + ppd->type_pack_expansion_p = type_pack_expansion_p; + *walk_subtrees = 0; + return NULL_TREE; + } + + case IF_STMT: + rs_walk_tree (&IF_COND (t), &find_parameter_packs_r, ppd, ppd->visited); + rs_walk_tree (&THEN_CLAUSE (t), &find_parameter_packs_r, ppd, + ppd->visited); + rs_walk_tree (&ELSE_CLAUSE (t), &find_parameter_packs_r, ppd, + ppd->visited); + /* Don't walk into IF_STMT_EXTRA_ARGS. */ + *walk_subtrees = 0; + return NULL_TREE; + + case FUNCTION_TYPE: + case METHOD_TYPE: + WALK_SUBTREE (TYPE_RAISES_EXCEPTIONS (t)); + break; + + default: + return NULL_TREE; + } + +#undef WALK_SUBTREE + + return NULL_TREE; +} + +// forked from gcc/cp/typeck.cc type_memfn_rqual + +/* Returns the function-ref-qualifier for TYPE */ + +rs_ref_qualifier +type_memfn_rqual (const_tree type) +{ + gcc_assert (FUNC_OR_METHOD_TYPE_P (type)); + + if (!FUNCTION_REF_QUALIFIED (type)) + return REF_QUAL_NONE; + else if (FUNCTION_RVALUE_QUALIFIED (type)) + return REF_QUAL_RVALUE; + else + return REF_QUAL_LVALUE; +} + +// forked from gcc/cp/lex.cc maybe_add_lang_type_raw + +/* Add a raw lang_type to T, a type, should it need one. */ + +bool +maybe_add_lang_type_raw (tree t) +{ + if (!RECORD_OR_UNION_CODE_P (TREE_CODE (t))) + return false; + + auto *lt = (struct lang_type *) (ggc_internal_cleared_alloc ( + sizeof (struct lang_type))); + TYPE_LANG_SPECIFIC (t) = lt; + + if (GATHER_STATISTICS) + { + tree_node_counts[(int) lang_type] += 1; + tree_node_sizes[(int) lang_type] += sizeof (struct lang_type); + } + + return true; +} + +// forked from gcc/c-family/c-lex.cc get_fileinfo + +static splay_tree file_info_tree; + +struct c_fileinfo * +get_fileinfo (const char *name) +{ + splay_tree_node n; + struct c_fileinfo *fi; + + if (!file_info_tree) + file_info_tree = splay_tree_new (splay_tree_compare_strings, 0, + splay_tree_delete_pointers); + + n = splay_tree_lookup (file_info_tree, (splay_tree_key) name); + if (n) + return (struct c_fileinfo *) n->value; + + fi = XNEW (struct c_fileinfo); + fi->time = 0; + fi->interface_only = 0; + fi->interface_unknown = 1; + splay_tree_insert (file_info_tree, (splay_tree_key) name, + (splay_tree_value) fi); + return fi; +} + +// forked from gcc/cp/lex.cc cxx_make_type + +tree +cxx_make_type (enum tree_code code MEM_STAT_DECL) +{ + tree t = make_node (code PASS_MEM_STAT); + + if (maybe_add_lang_type_raw (t)) + { + /* Set up some flags that give proper default behavior. */ + struct c_fileinfo *finfo = get_fileinfo (LOCATION_FILE (input_location)); + SET_CLASSTYPE_INTERFACE_UNKNOWN_X (t, finfo->interface_unknown); + CLASSTYPE_INTERFACE_ONLY (t) = finfo->interface_only; + } + + if (code == RECORD_TYPE || code == UNION_TYPE) + TYPE_CXX_ODR_P (t) = 1; + + return t; +} + +// forked from gcc/cp/tree.cc build_min_array_type + +/* Build an ARRAY_TYPE without laying it out. */ + +static tree +build_min_array_type (tree elt_type, tree index_type) +{ + tree t = cxx_make_type (ARRAY_TYPE); + TREE_TYPE (t) = elt_type; + TYPE_DOMAIN (t) = index_type; + return t; +} + +// forked from gcc/cp/name-lookup.cc fields_linear_search + +/* Linear search of (partially ordered) fields of KLASS for NAME. */ + +static tree +fields_linear_search (tree klass, tree name, bool want_type) +{ + for (tree fields = TYPE_FIELDS (klass); fields; fields = DECL_CHAIN (fields)) + { + tree decl = fields; + + if (DECL_NAME (decl) != name) + continue; + + if (DECL_DECLARES_FUNCTION_P (decl)) + /* Functions are found separately. */ + continue; + + if (!want_type || DECL_DECLARES_TYPE_P (decl)) + return decl; + } + + return NULL_TREE; +} + +// forked from gcc/cp/except.cc canonnothrow_spec_pical_eh_spec + +/* Return true iff SPEC is throw() or noexcept(true). */ + +bool +nothrow_spec_p (const_tree spec) +{ + if (spec == empty_except_spec || spec == noexcept_true_spec) + return true; + + gcc_assert (!spec || TREE_VALUE (spec) || spec == noexcept_false_spec + || TREE_PURPOSE (spec) == error_mark_node); + + return false; +} + +// forked from gcc/cp/tree.cc may_get_fns + +/* Get the overload set FROM refers to. Returns NULL if it's not an + overload set. */ + +tree +maybe_get_fns (tree from) +{ + STRIP_ANY_LOCATION_WRAPPER (from); + + /* A baselink is also considered an overloaded function. */ + if (TREE_CODE (from) == COMPONENT_REF) + from = TREE_OPERAND (from, 1); + + if (OVL_P (from)) + return from; + + return NULL; +} + +// forked from gcc/cp/tree.cc get_fns + +/* FROM refers to an overload set. Return that set (or die). */ + +tree +get_fns (tree from) +{ + tree res = maybe_get_fns (from); + + gcc_assert (res); + return res; +} + +// forked from gcc/cp/tree.cc get_first_fn + +/* Return the first function of the overload set FROM refers to. */ + +tree +get_first_fn (tree from) +{ + return OVL_FIRST (get_fns (from)); +} + +// forked from gcc/cp/tree.cc dependent_name + +/* X is the CALL_EXPR_FN of a CALL_EXPR. If X represents a dependent name + (14.6.2), return the IDENTIFIER_NODE for that name. Otherwise, return + NULL_TREE. */ + +tree +dependent_name (tree x) +{ + /* FIXME a dependent name must be unqualified, but this function doesn't + distinguish between qualified and unqualified identifiers. */ + if (identifier_p (x)) + return x; + + if (OVL_P (x)) + return OVL_NAME (x); + return NULL_TREE; +} + +// forked from gcc/cp/tree.cc called_fns_equal + +/* Subroutine of rs_tree_equal: t1 and t2 are the CALL_EXPR_FNs of two + CALL_EXPRS. Return whether they are equivalent. */ + +static bool +called_fns_equal (tree t1, tree t2) +{ + /* Core 1321: dependent names are equivalent even if the overload sets + are different. But do compare explicit template arguments. */ + tree name1 = dependent_name (t1); + tree name2 = dependent_name (t2); + if (name1 || name2) + { + tree targs1 = NULL_TREE, targs2 = NULL_TREE; + + if (name1 != name2) + return false; + + /* FIXME dependent_name currently returns an unqualified name regardless + of whether the function was named with a qualified- or unqualified-id. + Until that's fixed, check that we aren't looking at overload sets from + different scopes. */ + if (is_overloaded_fn (t1) && is_overloaded_fn (t2) + && (DECL_CONTEXT (get_first_fn (t1)) + != DECL_CONTEXT (get_first_fn (t2)))) + return false; + + return rs_tree_equal (targs1, targs2); + } + else + return rs_tree_equal (t1, t2); +} + +// forked from gcc/cp/tree.cc canonical_eh_spec + +/* Return the canonical version of exception-specification RAISES for a C++17 + function type, for use in type comparison and building TYPE_CANONICAL. */ + +tree +canonical_eh_spec (tree raises) +{ + if (raises == NULL_TREE) + return raises; + else if (nothrow_spec_p (raises)) + /* throw() -> noexcept. */ + return noexcept_true_spec; + else + /* For C++17 type matching, anything else -> nothing. */ + return NULL_TREE; +} + +/* Like cp_tree_operand_length, but takes a tree_code CODE. */ + +int +rs_tree_code_length (enum tree_code code) +{ + gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp); + + switch (code) + { + case PREINCREMENT_EXPR: + case PREDECREMENT_EXPR: + case POSTINCREMENT_EXPR: + case POSTDECREMENT_EXPR: + return 1; + + case ARRAY_REF: + return 2; + + default: + return TREE_CODE_LENGTH (code); + } +} + +// forked from gcc/cp/tree.cc rs_tree_operand_length + +/* Return the number of operands in T that we care about for things like + mangling. */ + +int +rs_tree_operand_length (const_tree t) +{ + enum tree_code code = TREE_CODE (t); + + if (TREE_CODE_CLASS (code) == tcc_vl_exp) + return VL_EXP_OPERAND_LENGTH (t); + + return rs_tree_code_length (code); +} + +// forked from gcc/cp/tree.cc cp_tree_equal + +/* Return truthvalue of whether T1 is the same tree structure as T2. + Return 1 if they are the same. Return 0 if they are different. */ + +bool +rs_tree_equal (tree t1, tree t2) +{ + enum tree_code code1, code2; + + if (t1 == t2) + return true; + if (!t1 || !t2) + return false; + + code1 = TREE_CODE (t1); + code2 = TREE_CODE (t2); + + if (code1 != code2) + return false; + + if (CONSTANT_CLASS_P (t1) && !same_type_p (TREE_TYPE (t1), TREE_TYPE (t2))) + return false; + + switch (code1) + { + case VOID_CST: + /* There's only a single VOID_CST node, so we should never reach + here. */ + gcc_unreachable (); + + case INTEGER_CST: + return tree_int_cst_equal (t1, t2); + + case REAL_CST: + return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2)); + + case STRING_CST: + return TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2) + && !memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2), + TREE_STRING_LENGTH (t1)); + + case FIXED_CST: + return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2)); + + case COMPLEX_CST: + return rs_tree_equal (TREE_REALPART (t1), TREE_REALPART (t2)) + && rs_tree_equal (TREE_IMAGPART (t1), TREE_IMAGPART (t2)); + + case VECTOR_CST: + return operand_equal_p (t1, t2, OEP_ONLY_CONST); + + case CONSTRUCTOR: + /* We need to do this when determining whether or not two + non-type pointer to member function template arguments + are the same. */ + if (!same_type_p (TREE_TYPE (t1), TREE_TYPE (t2)) + || CONSTRUCTOR_NELTS (t1) != CONSTRUCTOR_NELTS (t2)) + return false; + { + tree field, value; + unsigned int i; + FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t1), i, field, value) + { + constructor_elt *elt2 = CONSTRUCTOR_ELT (t2, i); + if (!rs_tree_equal (field, elt2->index) + || !rs_tree_equal (value, elt2->value)) + return false; + } + } + return true; + + case TREE_LIST: + if (!rs_tree_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))) + return false; + if (!rs_tree_equal (TREE_VALUE (t1), TREE_VALUE (t2))) + return false; + return rs_tree_equal (TREE_CHAIN (t1), TREE_CHAIN (t2)); + + case SAVE_EXPR: + return rs_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)); + + case CALL_EXPR: { + if (KOENIG_LOOKUP_P (t1) != KOENIG_LOOKUP_P (t2)) + return false; + + if (!called_fns_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2))) + return false; + + call_expr_arg_iterator iter1, iter2; + init_call_expr_arg_iterator (t1, &iter1); + init_call_expr_arg_iterator (t2, &iter2); + if (iter1.n != iter2.n) + return false; + + while (more_call_expr_args_p (&iter1)) + { + tree arg1 = next_call_expr_arg (&iter1); + tree arg2 = next_call_expr_arg (&iter2); + + gcc_checking_assert (arg1 && arg2); + if (!rs_tree_equal (arg1, arg2)) + return false; + } + + return true; + } + + case TARGET_EXPR: { + tree o1 = TREE_OPERAND (t1, 0); + tree o2 = TREE_OPERAND (t2, 0); + + /* Special case: if either target is an unallocated VAR_DECL, + it means that it's going to be unified with whatever the + TARGET_EXPR is really supposed to initialize, so treat it + as being equivalent to anything. */ + if (VAR_P (o1) && DECL_NAME (o1) == NULL_TREE && !DECL_RTL_SET_P (o1)) + /*Nop*/; + else if (VAR_P (o2) && DECL_NAME (o2) == NULL_TREE + && !DECL_RTL_SET_P (o2)) + /*Nop*/; + else if (!rs_tree_equal (o1, o2)) + return false; + + return rs_tree_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1)); + } + + case PARM_DECL: + /* For comparing uses of parameters in late-specified return types + with an out-of-class definition of the function, but can also come + up for expressions that involve 'this' in a member function + template. */ + + if (same_type_p (TREE_TYPE (t1), TREE_TYPE (t2))) + { + if (DECL_ARTIFICIAL (t1) ^ DECL_ARTIFICIAL (t2)) + return false; + if (CONSTRAINT_VAR_P (t1) ^ CONSTRAINT_VAR_P (t2)) + return false; + if (DECL_ARTIFICIAL (t1) + || (DECL_PARM_LEVEL (t1) == DECL_PARM_LEVEL (t2) + && DECL_PARM_INDEX (t1) == DECL_PARM_INDEX (t2))) + return true; + } + return false; + + case VAR_DECL: + case CONST_DECL: + case FIELD_DECL: + case FUNCTION_DECL: + case IDENTIFIER_NODE: + case SSA_NAME: + return false; + + case TREE_VEC: + return true; + + case NON_LVALUE_EXPR: + case VIEW_CONVERT_EXPR: + /* Used for location wrappers with possibly NULL types. */ + if (!TREE_TYPE (t1) || !TREE_TYPE (t2)) + { + if (TREE_TYPE (t1) || TREE_TYPE (t2)) + return false; + break; + } + + default: + break; + } + + switch (TREE_CODE_CLASS (code1)) + { + case tcc_unary: + case tcc_binary: + case tcc_comparison: + case tcc_expression: + case tcc_vl_exp: + case tcc_reference: + case tcc_statement: { + int n = rs_tree_operand_length (t1); + if (TREE_CODE_CLASS (code1) == tcc_vl_exp + && n != TREE_OPERAND_LENGTH (t2)) + return false; + + for (int i = 0; i < n; ++i) + if (!rs_tree_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i))) + return false; + + return true; + } + + case tcc_type: + return same_type_p (t1, t2); + + default: + gcc_unreachable (); + } + + /* We can get here with --disable-checking. */ + return false; +} + +// forked from gcc/cp/class.cc publicly_uniquely_derived_p + +/* TRUE iff TYPE is publicly & uniquely derived from PARENT. */ + +bool +publicly_uniquely_derived_p (tree parent, tree type) +{ + return false; +} + +// forked from gcc/cp/typeck.cc comp_except_types + +/* Compare two exception specifier types for exactness or subsetness, if + allowed. Returns false for mismatch, true for match (same, or + derived and !exact). + + [except.spec] "If a class X ... objects of class X or any class publicly + and unambiguously derived from X. Similarly, if a pointer type Y * ... + exceptions of type Y * or that are pointers to any type publicly and + unambiguously derived from Y. Otherwise a function only allows exceptions + that have the same type ..." + This does not mention cv qualifiers and is different to what throw + [except.throw] and catch [except.catch] will do. They will ignore the + top level cv qualifiers, and allow qualifiers in the pointer to class + example. + + We implement the letter of the standard. */ + +static bool +comp_except_types (tree a, tree b, bool exact) +{ + if (same_type_p (a, b)) + return true; + else if (!exact) + { + if (rs_type_quals (a) || rs_type_quals (b)) + return false; + + if (TYPE_PTR_P (a) && TYPE_PTR_P (b)) + { + a = TREE_TYPE (a); + b = TREE_TYPE (b); + if (rs_type_quals (a) || rs_type_quals (b)) + return false; + } + + if (TREE_CODE (a) != RECORD_TYPE || TREE_CODE (b) != RECORD_TYPE) + return false; + + if (publicly_uniquely_derived_p (a, b)) + return true; + } + return false; +} + +// forked from gcc/cp/typeck.cc comp_except_specs + +/* Return true if TYPE1 and TYPE2 are equivalent exception specifiers. + If EXACT is ce_derived, T2 can be stricter than T1 (according to 15.4/5). + If EXACT is ce_type, the C++17 type compatibility rules apply. + If EXACT is ce_normal, the compatibility rules in 15.4/3 apply. + If EXACT is ce_exact, the specs must be exactly the same. Exception lists + are unordered, but we've already filtered out duplicates. Most lists will + be in order, we should try to make use of that. */ + +bool +comp_except_specs (const_tree t1, const_tree t2, int exact) +{ + const_tree probe; + const_tree base; + int length = 0; + + if (t1 == t2) + return true; + + /* First handle noexcept. */ + if (exact < ce_exact) + { + if (exact == ce_type + && (canonical_eh_spec (CONST_CAST_TREE (t1)) + == canonical_eh_spec (CONST_CAST_TREE (t2)))) + return true; + + /* noexcept(false) is compatible with no exception-specification, + and less strict than any spec. */ + if (t1 == noexcept_false_spec) + return t2 == NULL_TREE || exact == ce_derived; + /* Even a derived noexcept(false) is compatible with no + exception-specification. */ + if (t2 == noexcept_false_spec) + return t1 == NULL_TREE; + + /* Otherwise, if we aren't looking for an exact match, noexcept is + equivalent to throw(). */ + if (t1 == noexcept_true_spec) + t1 = empty_except_spec; + if (t2 == noexcept_true_spec) + t2 = empty_except_spec; + } + + /* If any noexcept is left, it is only comparable to itself; + either we're looking for an exact match or we're redeclaring a + template with dependent noexcept. */ + if ((t1 && TREE_PURPOSE (t1)) || (t2 && TREE_PURPOSE (t2))) + return (t1 && t2 && rs_tree_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))); + + if (t1 == NULL_TREE) /* T1 is ... */ + return t2 == NULL_TREE || exact == ce_derived; + if (!TREE_VALUE (t1)) /* t1 is EMPTY */ + return t2 != NULL_TREE && !TREE_VALUE (t2); + if (t2 == NULL_TREE) /* T2 is ... */ + return false; + if (TREE_VALUE (t1) && !TREE_VALUE (t2)) /* T2 is EMPTY, T1 is not */ + return exact == ce_derived; + + /* Neither set is ... or EMPTY, make sure each part of T2 is in T1. + Count how many we find, to determine exactness. For exact matching and + ordered T1, T2, this is an O(n) operation, otherwise its worst case is + O(nm). */ + for (base = t1; t2 != NULL_TREE; t2 = TREE_CHAIN (t2)) + { + for (probe = base; probe != NULL_TREE; probe = TREE_CHAIN (probe)) + { + tree a = TREE_VALUE (probe); + tree b = TREE_VALUE (t2); + + if (comp_except_types (a, b, exact)) + { + if (probe == base && exact > ce_derived) + base = TREE_CHAIN (probe); + length++; + break; + } + } + if (probe == NULL_TREE) + return false; + } + return exact == ce_derived || base == NULL_TREE || length == list_length (t1); +} + +// forked from gcc/cp/typeck.cc compparms + +/* Subroutines of `comptypes'. */ + +/* Return true if two parameter type lists PARMS1 and PARMS2 are + equivalent in the sense that functions with those parameter types + can have equivalent types. The two lists must be equivalent, + element by element. */ + +bool +compparms (const_tree parms1, const_tree parms2) +{ + const_tree t1, t2; + + /* An unspecified parmlist matches any specified parmlist + whose argument types don't need default promotions. */ + + for (t1 = parms1, t2 = parms2; t1 || t2; + t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2)) + { + /* If one parmlist is shorter than the other, + they fail to match. */ + if (!t1 || !t2) + return false; + if (!same_type_p (TREE_VALUE (t1), TREE_VALUE (t2))) + return false; + } + return true; +} + +/* Set TYPE_CANONICAL like build_array_type_1, but using + build_cplus_array_type. */ + +static void +set_array_type_canon (tree t, tree elt_type, tree index_type, bool dep) +{ + /* Set the canonical type for this new node. */ + if (TYPE_STRUCTURAL_EQUALITY_P (elt_type) + || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))) + SET_TYPE_STRUCTURAL_EQUALITY (t); + else if (TYPE_CANONICAL (elt_type) != elt_type + || (index_type && TYPE_CANONICAL (index_type) != index_type)) + TYPE_CANONICAL (t) + = build_cplus_array_type (TYPE_CANONICAL (elt_type), + index_type ? TYPE_CANONICAL (index_type) + : index_type, + dep); + else + TYPE_CANONICAL (t) = t; +} + +// forked from gcc/cp/tree.cc cplus_array_info + +struct cplus_array_info +{ + tree type; + tree domain; +}; + +// forked from gcc/cp/tree.cc cplus_array_hasher + +struct cplus_array_hasher : ggc_ptr_hash<tree_node> +{ + typedef cplus_array_info *compare_type; + + static hashval_t hash (tree t); + static bool equal (tree, cplus_array_info *); +}; + +/* Hash an ARRAY_TYPE. K is really of type `tree'. */ + +hashval_t +cplus_array_hasher::hash (tree t) +{ + hashval_t hash; + + hash = TYPE_UID (TREE_TYPE (t)); + if (TYPE_DOMAIN (t)) + hash ^= TYPE_UID (TYPE_DOMAIN (t)); + return hash; +} + +/* Compare two ARRAY_TYPEs. K1 is really of type `tree', K2 is really + of type `cplus_array_info*'. */ + +bool +cplus_array_hasher::equal (tree t1, cplus_array_info *t2) +{ + return (TREE_TYPE (t1) == t2->type && TYPE_DOMAIN (t1) == t2->domain); +} + +// forked from gcc/cp/tree.cc cplus_array_htab + +/* Hash table containing dependent array types, which are unsuitable for + the language-independent type hash table. */ +static GTY (()) hash_table<cplus_array_hasher> *cplus_array_htab; + +// forked from gcc/cp/tree.cc is_byte_access_type + +/* Returns true if TYPE is char, unsigned char, or std::byte. */ + +bool +is_byte_access_type (tree type) +{ + type = TYPE_MAIN_VARIANT (type); + if (type == char_type_node || type == unsigned_char_type_node) + return true; + + return (TREE_CODE (type) == ENUMERAL_TYPE && TYPE_CONTEXT (type) == std_node + && !strcmp ("byte", TYPE_NAME_STRING (type))); +} + +// forked from gcc/cp/tree.cc build_cplus_array_type + +/* Like build_array_type, but handle special C++ semantics: an array of a + variant element type is a variant of the array of the main variant of + the element type. IS_DEPENDENT is -ve if we should determine the + dependency. Otherwise its bool value indicates dependency. */ + +tree +build_cplus_array_type (tree elt_type, tree index_type, int dependent) +{ + tree t; + + if (elt_type == error_mark_node || index_type == error_mark_node) + return error_mark_node; + + if (dependent < 0) + dependent = 0; + + if (elt_type != TYPE_MAIN_VARIANT (elt_type)) + /* Start with an array of the TYPE_MAIN_VARIANT. */ + t = build_cplus_array_type (TYPE_MAIN_VARIANT (elt_type), index_type, + dependent); + else if (dependent) + { + /* Since type_hash_canon calls layout_type, we need to use our own + hash table. */ + cplus_array_info cai; + hashval_t hash; + + if (cplus_array_htab == NULL) + cplus_array_htab = hash_table<cplus_array_hasher>::create_ggc (61); + + hash = TYPE_UID (elt_type); + if (index_type) + hash ^= TYPE_UID (index_type); + cai.type = elt_type; + cai.domain = index_type; + + tree *e = cplus_array_htab->find_slot_with_hash (&cai, hash, INSERT); + if (*e) + /* We have found the type: we're done. */ + return (tree) *e; + else + { + /* Build a new array type. */ + t = build_min_array_type (elt_type, index_type); + + /* Store it in the hash table. */ + *e = t; + + /* Set the canonical type for this new node. */ + set_array_type_canon (t, elt_type, index_type, dependent); + + /* Mark it as dependent now, this saves time later. */ + TYPE_DEPENDENT_P_VALID (t) = true; + TYPE_DEPENDENT_P (t) = true; + } + } + else + { + bool typeless_storage = is_byte_access_type (elt_type); + t = build_array_type (elt_type, index_type, typeless_storage); + + /* Mark as non-dependenty now, this will save time later. */ + TYPE_DEPENDENT_P_VALID (t) = true; + } + + /* Now check whether we already have this array variant. */ + if (elt_type != TYPE_MAIN_VARIANT (elt_type)) + { + tree m = t; + for (t = m; t; t = TYPE_NEXT_VARIANT (t)) + if (TREE_TYPE (t) == elt_type && TYPE_NAME (t) == NULL_TREE + && TYPE_ATTRIBUTES (t) == NULL_TREE) + break; + if (!t) + { + t = build_min_array_type (elt_type, index_type); + /* Mark dependency now, this saves time later. */ + TYPE_DEPENDENT_P_VALID (t) = true; + TYPE_DEPENDENT_P (t) = dependent; + set_array_type_canon (t, elt_type, index_type, dependent); + if (!dependent) + { + layout_type (t); + /* Make sure sizes are shared with the main variant. + layout_type can't be called after setting TYPE_NEXT_VARIANT, + as it will overwrite alignment etc. of all variants. */ + TYPE_SIZE (t) = TYPE_SIZE (m); + TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (m); + TYPE_TYPELESS_STORAGE (t) = TYPE_TYPELESS_STORAGE (m); + } + + TYPE_MAIN_VARIANT (t) = m; + TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m); + TYPE_NEXT_VARIANT (m) = t; + } + } + + /* Avoid spurious warnings with VLAs (c++/54583). */ + if (TYPE_SIZE (t) && EXPR_P (TYPE_SIZE (t))) + suppress_warning (TYPE_SIZE (t), OPT_Wunused); + + /* Push these needs up to the ARRAY_TYPE so that initialization takes + place more easily. */ + bool needs_ctor + = (TYPE_NEEDS_CONSTRUCTING (t) = TYPE_NEEDS_CONSTRUCTING (elt_type)); + bool needs_dtor = (TYPE_HAS_NONTRIVIAL_DESTRUCTOR (t) + = TYPE_HAS_NONTRIVIAL_DESTRUCTOR (elt_type)); + + if (!dependent && t == TYPE_MAIN_VARIANT (t) && !COMPLETE_TYPE_P (t) + && COMPLETE_TYPE_P (elt_type)) + { + /* The element type has been completed since the last time we saw + this array type; update the layout and 'tor flags for any variants + that need it. */ + layout_type (t); + for (tree v = TYPE_NEXT_VARIANT (t); v; v = TYPE_NEXT_VARIANT (v)) + { + TYPE_NEEDS_CONSTRUCTING (v) = needs_ctor; + TYPE_HAS_NONTRIVIAL_DESTRUCTOR (v) = needs_dtor; + } + } + + return t; +} + +// forked from gcc/cp/tree.cc cp_build_qualified_type_real + +/* Make a variant of TYPE, qualified with the TYPE_QUALS. Handles + arrays correctly. In particular, if TYPE is an array of T's, and + TYPE_QUALS is non-empty, returns an array of qualified T's. + + FLAGS determines how to deal with ill-formed qualifications. If + tf_ignore_bad_quals is set, then bad qualifications are dropped + (this is permitted if TYPE was introduced via a typedef or template + type parameter). If bad qualifications are dropped and tf_warning + is set, then a warning is issued for non-const qualifications. If + tf_ignore_bad_quals is not set and tf_error is not set, we + return error_mark_node. Otherwise, we issue an error, and ignore + the qualifications. + + Qualification of a reference type is valid when the reference came + via a typedef or template type argument. [dcl.ref] No such + dispensation is provided for qualifying a function type. [dcl.fct] + DR 295 queries this and the proposed resolution brings it into line + with qualifying a reference. We implement the DR. We also behave + in a similar manner for restricting non-pointer types. */ + +tree +rs_build_qualified_type_real (tree type, int type_quals, + tsubst_flags_t complain) +{ + tree result; + int bad_quals = TYPE_UNQUALIFIED; + + if (type == error_mark_node) + return type; + + if (type_quals == rs_type_quals (type)) + return type; + + if (TREE_CODE (type) == ARRAY_TYPE) + { + /* In C++, the qualification really applies to the array element + type. Obtain the appropriately qualified element type. */ + tree t; + tree element_type + = rs_build_qualified_type_real (TREE_TYPE (type), type_quals, complain); + + if (element_type == error_mark_node) + return error_mark_node; + + /* See if we already have an identically qualified type. Tests + should be equivalent to those in check_qualified_type. */ + for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t)) + if (TREE_TYPE (t) == element_type && TYPE_NAME (t) == TYPE_NAME (type) + && TYPE_CONTEXT (t) == TYPE_CONTEXT (type) + && attribute_list_equal (TYPE_ATTRIBUTES (t), + TYPE_ATTRIBUTES (type))) + break; + + if (!t) + { + /* If we already know the dependentness, tell the array type + constructor. This is important for module streaming, as we cannot + dynamically determine that on read in. */ + t = build_cplus_array_type (element_type, TYPE_DOMAIN (type), + TYPE_DEPENDENT_P_VALID (type) + ? int (TYPE_DEPENDENT_P (type)) + : -1); + + /* Keep the typedef name. */ + if (TYPE_NAME (t) != TYPE_NAME (type)) + { + t = build_variant_type_copy (t); + TYPE_NAME (t) = TYPE_NAME (type); + SET_TYPE_ALIGN (t, TYPE_ALIGN (type)); + TYPE_USER_ALIGN (t) = TYPE_USER_ALIGN (type); + } + } + + /* Even if we already had this variant, we update + TYPE_NEEDS_CONSTRUCTING and TYPE_HAS_NONTRIVIAL_DESTRUCTOR in case + they changed since the variant was originally created. + + This seems hokey; if there is some way to use a previous + variant *without* coming through here, + TYPE_NEEDS_CONSTRUCTING will never be updated. */ + TYPE_NEEDS_CONSTRUCTING (t) + = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (element_type)); + TYPE_HAS_NONTRIVIAL_DESTRUCTOR (t) + = TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TYPE_MAIN_VARIANT (element_type)); + return t; + } + + /* A reference or method type shall not be cv-qualified. + [dcl.ref], [dcl.fct]. This used to be an error, but as of DR 295 + (in CD1) we always ignore extra cv-quals on functions. */ + + /* [dcl.ref/1] Cv-qualified references are ill-formed except when + the cv-qualifiers are introduced through the use of a typedef-name + ([dcl.typedef], [temp.param]) or decltype-specifier + ([dcl.type.decltype]),in which case the cv-qualifiers are + ignored. */ + if (type_quals & (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE) + && (TYPE_REF_P (type) || FUNC_OR_METHOD_TYPE_P (type))) + { + if (TYPE_REF_P (type) + && (!typedef_variant_p (type) || FUNC_OR_METHOD_TYPE_P (type))) + bad_quals |= type_quals & (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE); + type_quals &= ~(TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE); + } + + /* But preserve any function-cv-quals on a FUNCTION_TYPE. */ + if (TREE_CODE (type) == FUNCTION_TYPE) + type_quals |= type_memfn_quals (type); + + /* A restrict-qualified type must be a pointer (or reference) + to object or incomplete type. */ + if ((type_quals & TYPE_QUAL_RESTRICT) && TREE_CODE (type) != TYPENAME_TYPE + && !INDIRECT_TYPE_P (type)) + { + bad_quals |= TYPE_QUAL_RESTRICT; + type_quals &= ~TYPE_QUAL_RESTRICT; + } + + if (bad_quals == TYPE_UNQUALIFIED || (complain & tf_ignore_bad_quals)) + /*OK*/; + else if (!(complain & tf_error)) + return error_mark_node; + else + { + tree bad_type = build_qualified_type (ptr_type_node, bad_quals); + error ("%qV qualifiers cannot be applied to %qT", bad_type, type); + } + + /* Retrieve (or create) the appropriately qualified variant. */ + result = build_qualified_type (type, type_quals); + + return result; +} + +// forked from gcc/cp/c-common.cc vector_targets_convertible_p + +/* vector_targets_convertible_p is used for vector pointer types. The + callers perform various checks that the qualifiers are satisfactory, + while OTOH vector_targets_convertible_p ignores the number of elements + in the vectors. That's fine with vector pointers as we can consider, + say, a vector of 8 elements as two consecutive vectors of 4 elements, + and that does not require and conversion of the pointer values. + In contrast, vector_types_convertible_p and + vector_types_compatible_elements_p are used for vector value types. */ +/* True if pointers to distinct types T1 and T2 can be converted to + each other without an explicit cast. Only returns true for opaque + vector types. */ +bool +vector_targets_convertible_p (const_tree t1, const_tree t2) +{ + if (VECTOR_TYPE_P (t1) && VECTOR_TYPE_P (t2) + && (TYPE_VECTOR_OPAQUE (t1) || TYPE_VECTOR_OPAQUE (t2)) + && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2))) + return true; + + return false; +} + +// forked from gcc/cp/typeck.cc comp_array_types + +/* Compare the array types T1 and T2. CB says how we should behave when + comparing array bounds: bounds_none doesn't allow dimensionless arrays, + bounds_either says than any array can be [], bounds_first means that + onlt T1 can be an array with unknown bounds. STRICT is true if + qualifiers must match when comparing the types of the array elements. */ + +static bool +comp_array_types (const_tree t1, const_tree t2, compare_bounds_t cb, + bool strict) +{ + tree d1; + tree d2; + tree max1, max2; + + if (t1 == t2) + return true; + + /* The type of the array elements must be the same. */ + if (strict ? !same_type_p (TREE_TYPE (t1), TREE_TYPE (t2)) + : !similar_type_p (TREE_TYPE (t1), TREE_TYPE (t2))) + return false; + + d1 = TYPE_DOMAIN (t1); + d2 = TYPE_DOMAIN (t2); + + if (d1 == d2) + return true; + + /* If one of the arrays is dimensionless, and the other has a + dimension, they are of different types. However, it is valid to + write: + + extern int a[]; + int a[3]; + + by [basic.link]: + + declarations for an array object can specify + array types that differ by the presence or absence of a major + array bound (_dcl.array_). */ + if (!d1 && d2) + return cb >= bounds_either; + else if (d1 && !d2) + return cb == bounds_either; + + /* Check that the dimensions are the same. */ + + if (!rs_tree_equal (TYPE_MIN_VALUE (d1), TYPE_MIN_VALUE (d2))) + return false; + max1 = TYPE_MAX_VALUE (d1); + max2 = TYPE_MAX_VALUE (d2); + + if (!rs_tree_equal (max1, max2)) + return false; + + return true; +} + +// forked from gcc/cp/typeck.cc same_type_ignoring_top_level_qualifiers_p + +/* Returns nonzero iff TYPE1 and TYPE2 are the same type, ignoring + top-level qualifiers. */ + +bool +same_type_ignoring_top_level_qualifiers_p (tree type1, tree type2) +{ + if (type1 == error_mark_node || type2 == error_mark_node) + return false; + if (type1 == type2) + return true; + + type1 = rs_build_qualified_type (type1, TYPE_UNQUALIFIED); + type2 = rs_build_qualified_type (type2, TYPE_UNQUALIFIED); + return same_type_p (type1, type2); +} + +// forked from gcc/cp/typeck.cc comp_ptr_ttypes_const + +/* Return true if TO and FROM (both of which are POINTER_TYPEs or + pointer-to-member types) are the same, ignoring cv-qualification at + all levels. CB says how we should behave when comparing array bounds. */ + +bool +comp_ptr_ttypes_const (tree to, tree from, compare_bounds_t cb) +{ + bool is_opaque_pointer = false; + + for (;; to = TREE_TYPE (to), from = TREE_TYPE (from)) + { + if (TREE_CODE (to) != TREE_CODE (from)) + return false; + + if (TREE_CODE (from) == OFFSET_TYPE + && same_type_p (TYPE_OFFSET_BASETYPE (from), + TYPE_OFFSET_BASETYPE (to))) + continue; + + if (VECTOR_TYPE_P (to)) + is_opaque_pointer = vector_targets_convertible_p (to, from); + + if (TREE_CODE (to) == ARRAY_TYPE + /* Ignore cv-qualification, but if we see e.g. int[3] and int[4], + we must fail. */ + && !comp_array_types (to, from, cb, /*strict=*/false)) + return false; + + /* CWG 330 says we need to look through arrays. */ + if (!TYPE_PTR_P (to) && TREE_CODE (to) != ARRAY_TYPE) + return (is_opaque_pointer + || same_type_ignoring_top_level_qualifiers_p (to, from)); + } +} + +// forked from gcc/cp/typeck.cc similar_type_p + +/* Returns nonzero iff TYPE1 and TYPE2 are similar, as per [conv.qual]. */ + +bool +similar_type_p (tree type1, tree type2) +{ + if (type1 == error_mark_node || type2 == error_mark_node) + return false; + + /* Informally, two types are similar if, ignoring top-level cv-qualification: + * they are the same type; or + * they are both pointers, and the pointed-to types are similar; or + * they are both pointers to member of the same class, and the types of + the pointed-to members are similar; or + * they are both arrays of the same size or both arrays of unknown bound, + and the array element types are similar. */ + + if (same_type_ignoring_top_level_qualifiers_p (type1, type2)) + return true; + + if ((TYPE_PTR_P (type1) && TYPE_PTR_P (type2)) + || (TYPE_PTRDATAMEM_P (type1) && TYPE_PTRDATAMEM_P (type2)) + || (TREE_CODE (type1) == ARRAY_TYPE && TREE_CODE (type2) == ARRAY_TYPE)) + return comp_ptr_ttypes_const (type1, type2, bounds_either); + + return false; +} + +// forked from gcc/cp/typeck.cc structural_comptypes +// note: this fork only handles strict == COMPARE_STRICT +// if you pass in any other value for strict i.e. COMPARE_BASE, +// COMPARE_DERIVED, COMPARE_REDECLARATION or COMPARE_STRUCTURAL +// see the original function in gcc/cp/typeck.cc and port the required bits +// specifically under case UNION_TYPE. + +/* Subroutine in comptypes. */ + +static bool +structural_comptypes (tree t1, tree t2, int strict) +{ + /* Both should be types that are not obviously the same. */ + gcc_checking_assert (t1 != t2 && TYPE_P (t1) && TYPE_P (t2)); + + if (TYPE_PTRMEMFUNC_P (t1)) + t1 = TYPE_PTRMEMFUNC_FN_TYPE (t1); + if (TYPE_PTRMEMFUNC_P (t2)) + t2 = TYPE_PTRMEMFUNC_FN_TYPE (t2); + + /* Different classes of types can't be compatible. */ + if (TREE_CODE (t1) != TREE_CODE (t2)) + return false; + + /* Qualifiers must match. For array types, we will check when we + recur on the array element types. */ + if (TREE_CODE (t1) != ARRAY_TYPE && rs_type_quals (t1) != rs_type_quals (t2)) + return false; + if (TREE_CODE (t1) == FUNCTION_TYPE + && type_memfn_quals (t1) != type_memfn_quals (t2)) + return false; + /* Need to check this before TYPE_MAIN_VARIANT. + FIXME function qualifiers should really change the main variant. */ + if (FUNC_OR_METHOD_TYPE_P (t1)) + { + if (type_memfn_rqual (t1) != type_memfn_rqual (t2)) + return false; + if (/* cxx_dialect >= cxx17 && */ + !comp_except_specs (TYPE_RAISES_EXCEPTIONS (t1), + TYPE_RAISES_EXCEPTIONS (t2), ce_type)) + return false; + } + + /* Allow for two different type nodes which have essentially the same + definition. Note that we already checked for equality of the type + qualifiers (just above). */ + if (TREE_CODE (t1) != ARRAY_TYPE + && TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2)) + return true; + + /* Compare the types. Return false on known not-same. Break on not + known. Never return true from this switch -- you'll break + specialization comparison. */ + switch (TREE_CODE (t1)) + { + case VOID_TYPE: + case BOOLEAN_TYPE: + /* All void and bool types are the same. */ + break; + + case OPAQUE_TYPE: + case INTEGER_TYPE: + case FIXED_POINT_TYPE: + case REAL_TYPE: + /* With these nodes, we can't determine type equivalence by + looking at what is stored in the nodes themselves, because + two nodes might have different TYPE_MAIN_VARIANTs but still + represent the same type. For example, wchar_t and int could + have the same properties (TYPE_PRECISION, TYPE_MIN_VALUE, + TYPE_MAX_VALUE, etc.), but have different TYPE_MAIN_VARIANTs + and are distinct types. On the other hand, int and the + following typedef + + typedef int INT __attribute((may_alias)); + + have identical properties, different TYPE_MAIN_VARIANTs, but + represent the same type. The canonical type system keeps + track of equivalence in this case, so we fall back on it. */ + if (TYPE_CANONICAL (t1) != TYPE_CANONICAL (t2)) + return false; + + /* We don't need or want the attribute comparison. */ + return true; + + case RECORD_TYPE: + case UNION_TYPE: + return false; + + case OFFSET_TYPE: + if (!comptypes (TYPE_OFFSET_BASETYPE (t1), TYPE_OFFSET_BASETYPE (t2), + strict & ~COMPARE_REDECLARATION)) + return false; + if (!same_type_p (TREE_TYPE (t1), TREE_TYPE (t2))) + return false; + break; + + case REFERENCE_TYPE: + if (TYPE_REF_IS_RVALUE (t1) != TYPE_REF_IS_RVALUE (t2)) + return false; + /* fall through to checks for pointer types */ + gcc_fallthrough (); + + case POINTER_TYPE: + if (TYPE_MODE (t1) != TYPE_MODE (t2) + || !same_type_p (TREE_TYPE (t1), TREE_TYPE (t2))) + return false; + break; + + case METHOD_TYPE: + case FUNCTION_TYPE: + /* Exception specs and memfn_rquals were checked above. */ + if (!same_type_p (TREE_TYPE (t1), TREE_TYPE (t2))) + return false; + if (!compparms (TYPE_ARG_TYPES (t1), TYPE_ARG_TYPES (t2))) + return false; + break; + + case ARRAY_TYPE: + /* Target types must match incl. qualifiers. */ + if (!comp_array_types (t1, t2, + ((strict & COMPARE_REDECLARATION) ? bounds_either + : bounds_none), + /*strict=*/true)) + return false; + break; + + case COMPLEX_TYPE: + if (!same_type_p (TREE_TYPE (t1), TREE_TYPE (t2))) + return false; + break; + + case VECTOR_TYPE: + if (gnu_vector_type_p (t1) != gnu_vector_type_p (t2) + || maybe_ne (TYPE_VECTOR_SUBPARTS (t1), TYPE_VECTOR_SUBPARTS (t2)) + || !same_type_p (TREE_TYPE (t1), TREE_TYPE (t2))) + return false; + break; + + default: + return false; + } + + /* If we get here, we know that from a target independent POV the + types are the same. Make sure the target attributes are also + the same. */ + if (!comp_type_attributes (t1, t2)) + return false; + + return true; +} + +// forked from gcc/cp/typeck.cc comptypes + +/* Return true if T1 and T2 are related as allowed by STRICT. STRICT + is a bitwise-or of the COMPARE_* flags. */ + +bool +comptypes (tree t1, tree t2, int strict) +{ + gcc_checking_assert (t1 && t2); + + /* TYPE_ARGUMENT_PACKS are not really types. */ + gcc_checking_assert (TREE_CODE (t1) != TYPE_ARGUMENT_PACK + && TREE_CODE (t2) != TYPE_ARGUMENT_PACK); + + if (t1 == t2) + return true; + + /* Suppress errors caused by previously reported errors. */ + if (t1 == error_mark_node || t2 == error_mark_node) + return false; + + if (strict == COMPARE_STRICT) + { + if (TYPE_STRUCTURAL_EQUALITY_P (t1) || TYPE_STRUCTURAL_EQUALITY_P (t2)) + /* At least one of the types requires structural equality, so + perform a deep check. */ + return structural_comptypes (t1, t2, strict); + + if (flag_checking && param_use_canonical_types) + { + bool result = structural_comptypes (t1, t2, strict); + + if (result && TYPE_CANONICAL (t1) != TYPE_CANONICAL (t2)) + /* The two types are structurally equivalent, but their + canonical types were different. This is a failure of the + canonical type propagation code.*/ + internal_error ( + "canonical types differ for identical types %qT and %qT", t1, t2); + else if (!result && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2)) + /* Two types are structurally different, but the canonical + types are the same. This means we were over-eager in + assigning canonical types. */ + internal_error ( + "same canonical type node for different types %qT and %qT", t1, + t2); + + return result; + } + if (!flag_checking && param_use_canonical_types) + return TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2); + else + return structural_comptypes (t1, t2, strict); + } + else if (strict == COMPARE_STRUCTURAL) + return structural_comptypes (t1, t2, COMPARE_STRICT); + else + return structural_comptypes (t1, t2, strict); +} + +// forked from gcc/cp/decl.cc next_initializable_field + +/* FIELD is an element of TYPE_FIELDS or NULL. In the former case, the value + returned is the next FIELD_DECL (possibly FIELD itself) that can be + initialized. If there are no more such fields, the return value + will be NULL. */ + +tree +next_initializable_field (tree field) +{ + while (field + && (TREE_CODE (field) != FIELD_DECL || DECL_UNNAMED_BIT_FIELD (field) + || (DECL_ARTIFICIAL (field) + /* Don't skip vptr fields. We might see them when we're + called from reduced_constant_expression_p. */ + && !DECL_VIRTUAL_P (field)))) + field = DECL_CHAIN (field); + + return field; +} + +// forked from gcc/cp/call.cc sufficient_parms_p + +/* Returns nonzero if PARMLIST consists of only default parms, + ellipsis, and/or undeduced parameter packs. */ + +bool +sufficient_parms_p (const_tree parmlist) +{ + for (; parmlist && parmlist != void_list_node; + parmlist = TREE_CHAIN (parmlist)) + if (!TREE_PURPOSE (parmlist)) + return false; + return true; +} + +// forked from gcc/cp/class.cc default_ctor_p + +/* Returns true if FN is a default constructor. */ + +bool +default_ctor_p (const_tree fn) +{ + return (DECL_CONSTRUCTOR_P (fn) + && sufficient_parms_p (FUNCTION_FIRST_USER_PARMTYPE (fn))); +} + +// forked from gcc/cp/class.cc user_provided_p + +/* Returns true iff FN is a user-provided function, i.e. user-declared + and not defaulted at its first declaration. */ + +bool +user_provided_p (tree fn) +{ + return (!DECL_ARTIFICIAL (fn) + && !(DECL_INITIALIZED_IN_CLASS_P (fn) + && (DECL_DEFAULTED_FN (fn) || DECL_DELETED_FN (fn)))); +} + +// forked from gcc/cp/class.cc type_has_non_user_provided_default_constructor + +/* Returns true iff class T has a non-user-provided (i.e. implicitly + declared or explicitly defaulted in the class body) default + constructor. */ + +bool +type_has_non_user_provided_default_constructor (tree t) +{ + if (!TYPE_HAS_DEFAULT_CONSTRUCTOR (t)) + return false; + if (CLASSTYPE_LAZY_DEFAULT_CTOR (t)) + return true; + + for (ovl_iterator iter (CLASSTYPE_CONSTRUCTORS (t)); iter; ++iter) + { + tree fn = *iter; + if (TREE_CODE (fn) == FUNCTION_DECL && default_ctor_p (fn) + && !user_provided_p (fn)) + return true; + } + + return false; +} + +// forked from gcc/cp/class.cc default_init_uninitialized_part + +/* If default-initialization leaves part of TYPE uninitialized, returns + a DECL for the field or TYPE itself (DR 253). */ + +tree +default_init_uninitialized_part (tree type) +{ + tree t, r, binfo; + int i; + + type = strip_array_types (type); + if (!CLASS_TYPE_P (type)) + return type; + if (!type_has_non_user_provided_default_constructor (type)) + return NULL_TREE; + for (binfo = TYPE_BINFO (type), i = 0; BINFO_BASE_ITERATE (binfo, i, t); ++i) + { + r = default_init_uninitialized_part (BINFO_TYPE (t)); + if (r) + return r; + } + for (t = next_initializable_field (TYPE_FIELDS (type)); t; + t = next_initializable_field (DECL_CHAIN (t))) + if (!DECL_INITIAL (t) && !DECL_ARTIFICIAL (t)) + { + r = default_init_uninitialized_part (TREE_TYPE (t)); + if (r) + return DECL_P (r) ? r : t; + } + + return NULL_TREE; +} + +// forked from gcc/cp/name-lookup.cc extract_conversion_operator + +/* FNS is an overload set of conversion functions. Return the + overloads converting to TYPE. */ + +static tree +extract_conversion_operator (tree fns, tree type) +{ + tree convs = NULL_TREE; + tree tpls = NULL_TREE; + + for (ovl_iterator iter (fns); iter; ++iter) + { + if (same_type_p (DECL_CONV_FN_TYPE (*iter), type)) + convs = lookup_add (*iter, convs); + } + + if (!convs) + convs = tpls; + + return convs; +} + +// forked from gcc/cp/name-lookup.cc + +/* Look for NAME as an immediate member of KLASS (including + anon-members or unscoped enum member). TYPE_OR_FNS is zero for + regular search. >0 to get a type binding (if there is one) and <0 + if you want (just) the member function binding. + + Use this if you do not want lazy member creation. */ + +tree +get_class_binding_direct (tree klass, tree name, bool want_type) +{ + gcc_checking_assert (RECORD_OR_UNION_TYPE_P (klass)); + + /* Conversion operators can only be found by the marker conversion + operator name. */ + bool conv_op = IDENTIFIER_CONV_OP_P (name); + tree lookup = conv_op ? conv_op_identifier : name; + tree val = NULL_TREE; + vec<tree, va_gc> *member_vec = CLASSTYPE_MEMBER_VEC (klass); + + if (COMPLETE_TYPE_P (klass) && member_vec) + { + val = member_vec_binary_search (member_vec, lookup); + if (!val) + ; + else if (STAT_HACK_P (val)) + val = want_type ? STAT_TYPE (val) : STAT_DECL (val); + else if (want_type && !DECL_DECLARES_TYPE_P (val)) + val = NULL_TREE; + } + else + { + if (member_vec && !want_type) + val = member_vec_linear_search (member_vec, lookup); + + if (!val || (TREE_CODE (val) == OVERLOAD && OVL_DEDUP_P (val))) + /* Dependent using declarations are a 'field', make sure we + return that even if we saw an overload already. */ + if (tree field_val = fields_linear_search (klass, lookup, want_type)) + { + if (!val) + val = field_val; + else if (TREE_CODE (field_val) == USING_DECL) + val = ovl_make (field_val, val); + } + } + + /* Extract the conversion operators asked for, unless the general + conversion operator was requested. */ + if (val && conv_op) + { + gcc_checking_assert (OVL_FUNCTION (val) == conv_op_marker); + val = OVL_CHAIN (val); + if (tree type = TREE_TYPE (name)) + val = extract_conversion_operator (val, type); + } + + return val; +} + +#if defined ENABLE_TREE_CHECKING + +// forked from gcc/cp/tree.cc lang_check_failed + +/* Complain that some language-specific thing hanging off a tree + node has been accessed improperly. */ + +void +lang_check_failed (const char *file, int line, const char *function) +{ + internal_error ("%<lang_*%> check: failed in %s, at %s:%d", function, + trim_filename (file), line); +} +#endif /* ENABLE_TREE_CHECKING */ + +// forked from gcc/cp/tree.cc skip_artificial_parms_for + +/* Given a FUNCTION_DECL FN and a chain LIST, skip as many elements of LIST + as there are artificial parms in FN. */ + +tree +skip_artificial_parms_for (const_tree fn, tree list) +{ + if (DECL_NONSTATIC_MEMBER_FUNCTION_P (fn)) + list = TREE_CHAIN (list); + else + return list; + + if (DECL_HAS_IN_CHARGE_PARM_P (fn)) + list = TREE_CHAIN (list); + if (DECL_HAS_VTT_PARM_P (fn)) + list = TREE_CHAIN (list); + return list; +} + +// forked from gcc/cp/class.cc in_class_defaulted_default_constructor + +/* Returns the defaulted constructor if T has one. Otherwise, returns + NULL_TREE. */ + +tree +in_class_defaulted_default_constructor (tree t) +{ + if (!TYPE_HAS_USER_CONSTRUCTOR (t)) + return NULL_TREE; + + for (ovl_iterator iter (CLASSTYPE_CONSTRUCTORS (t)); iter; ++iter) + { + tree fn = *iter; + + if (DECL_DEFAULTED_IN_CLASS_P (fn) && default_ctor_p (fn)) + return fn; + } + + return NULL_TREE; +} + +// forked from gcc/cp/constexpr.cc + +/* Returns true iff FUN is an instantiation of a constexpr function + template or a defaulted constexpr function. */ + +bool +is_instantiation_of_constexpr (tree fun) +{ + return ((DECL_DEFAULTED_FN (fun) && DECL_DECLARED_CONSTEXPR_P (fun))); +} + +// forked from gcc/cp/decl.cc check_for_uninitialized_const_var + +/* Issue an error message if DECL is an uninitialized const variable. + CONSTEXPR_CONTEXT_P is true when the function is called in a constexpr + context from potential_constant_expression. Returns true if all is well, + false otherwise. */ + +bool +check_for_uninitialized_const_var (tree decl, bool constexpr_context_p, + tsubst_flags_t complain) +{ + tree type = strip_array_types (TREE_TYPE (decl)); + + /* ``Unless explicitly declared extern, a const object does not have + external linkage and must be initialized. ($8.4; $12.1)'' ARM + 7.1.6 */ + if (VAR_P (decl) && !TYPE_REF_P (type) && (RS_TYPE_CONST_P (type)) + && !DECL_NONTRIVIALLY_INITIALIZED_P (decl)) + { + tree field = default_init_uninitialized_part (type); + if (!field) + return true; + + bool show_notes = true; + + if (!constexpr_context_p) + { + if (RS_TYPE_CONST_P (type)) + { + if (complain & tf_error) + show_notes = permerror (DECL_SOURCE_LOCATION (decl), + "uninitialized %<const %D%>", decl); + } + else + { + if (!is_instantiation_of_constexpr (current_function_decl) + && (complain & tf_error)) + error_at (DECL_SOURCE_LOCATION (decl), + "uninitialized variable %qD in %<constexpr%> " + "function", + decl); + else + show_notes = false; + } + } + else if (complain & tf_error) + error_at (DECL_SOURCE_LOCATION (decl), + "uninitialized variable %qD in %<constexpr%> context", decl); + + if (show_notes && CLASS_TYPE_P (type) && (complain & tf_error)) + { + // tree defaulted_ctor; + + // inform (DECL_SOURCE_LOCATION (TYPE_MAIN_DECL (type)), + // "%q#T has no user-provided default constructor", type); + // defaulted_ctor = in_class_defaulted_default_constructor (type); + // if (defaulted_ctor) + // inform (DECL_SOURCE_LOCATION (defaulted_ctor), + // "constructor is not user-provided because it is " + // "explicitly defaulted in the class body"); + // inform (DECL_SOURCE_LOCATION (field), + // "and the implicitly-defined constructor does not " + // "initialize %q#D", + // field); + } + + return false; + } + + return true; +} + +// forked from gcc/cp/tree.cc cv_unqualified + +/* Return TYPE with const and volatile removed. */ + +tree +cv_unqualified (tree type) +{ + int quals; + + if (type == error_mark_node) + return type; + + quals = rs_type_quals (type); + quals &= ~(TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE); + return rs_build_qualified_type (type, quals); +} + +/* The C and C++ parsers both use vectors to hold function arguments. + For efficiency, we keep a cache of unused vectors. This is the + cache. */ + +typedef vec<tree, va_gc> *tree_gc_vec; +static GTY ((deletable)) vec<tree_gc_vec, va_gc> *tree_vector_cache; + +// forked from gcc/c-family/c-common.c make_tree_vector + +/* Return a new vector from the cache. If the cache is empty, + allocate a new vector. These vectors are GC'ed, so it is OK if the + pointer is not released.. */ + +vec<tree, va_gc> * +make_tree_vector (void) +{ + if (tree_vector_cache && !tree_vector_cache->is_empty ()) + return tree_vector_cache->pop (); + else + { + /* Passing 0 to vec::alloc returns NULL, and our callers require + that we always return a non-NULL value. The vector code uses + 4 when growing a NULL vector, so we do too. */ + vec<tree, va_gc> *v; + vec_alloc (v, 4); + return v; + } +} + +// forked from gcc/c-family/c-common.c release_tree_vector + +/* Release a vector of trees back to the cache. */ + +void +release_tree_vector (vec<tree, va_gc> *vec) +{ + if (vec != NULL) + { + if (vec->allocated () >= 16) + /* Don't cache vecs that have expanded more than once. On a p64 + target, vecs double in alloc size with each power of 2 elements, e.g + at 16 elements the alloc increases from 128 to 256 bytes. */ + vec_free (vec); + else + { + vec->truncate (0); + vec_safe_push (tree_vector_cache, vec); + } + } +} + +// forked from gcc/cp/cvt.cc instantiation_dependent_expression_p + +/* As above, but also check value-dependence of the expression as a whole. */ + +bool +instantiation_dependent_expression_p (tree expression) +{ + return false; +} + +// forked from gcc/cp/cvt.cc cp_get_callee + +/* If CALL is a call, return the callee; otherwise null. */ + +tree +cp_get_callee (tree call) +{ + if (call == NULL_TREE) + return call; + else if (TREE_CODE (call) == CALL_EXPR) + return CALL_EXPR_FN (call); + return NULL_TREE; +} + +// forked from gcc/cp/typeck.cc build_nop + +/* Return a NOP_EXPR converting EXPR to TYPE. */ + +tree +build_nop (tree type, tree expr) +{ + if (type == error_mark_node || error_operand_p (expr)) + return expr; + return build1_loc (EXPR_LOCATION (expr), NOP_EXPR, type, expr); +} + +// forked from gcc/cp/tree.cc scalarish_type_p + +/* Returns 1 iff type T is something we want to treat as a scalar type for + the purpose of deciding whether it is trivial/POD/standard-layout. */ + +bool +scalarish_type_p (const_tree t) +{ + if (t == error_mark_node) + return 1; + + return (SCALAR_TYPE_P (t) || VECTOR_TYPE_P (t)); +} + +// forked from gcc/cp/tree.cc type_has_nontrivial_copy_init + +/* Returns true iff copying an object of type T (including via move + constructor) is non-trivial. That is, T has no non-trivial copy + constructors and no non-trivial move constructors, and not all copy/move + constructors are deleted. This function implements the ABI notion of + non-trivial copy, which has diverged from the one in the standard. */ + +bool +type_has_nontrivial_copy_init (const_tree type) +{ + return false; +} + +// forked from gcc/cp/tree.cc build_local_temp + +/* Return an undeclared local temporary of type TYPE for use in building a + TARGET_EXPR. */ + +tree +build_local_temp (tree type) +{ + tree slot = build_decl (input_location, VAR_DECL, NULL_TREE, type); + DECL_ARTIFICIAL (slot) = 1; + DECL_IGNORED_P (slot) = 1; + DECL_CONTEXT (slot) = current_function_decl; + layout_decl (slot, 0); + return slot; +} + +// forked from gcc/cp/lambda.cc is_normal_capture_proxy + +/* Returns true iff DECL is a capture proxy for a normal capture + (i.e. without explicit initializer). */ + +bool +is_normal_capture_proxy (tree decl) +{ + return false; +} + +// forked from gcc/cp/c-common.cc reject_gcc_builtin + +/* For an EXPR of a FUNCTION_TYPE that references a GCC built-in function + with no library fallback or for an ADDR_EXPR whose operand is such type + issues an error pointing to the location LOC. + Returns true when the expression has been diagnosed and false + otherwise. */ + +bool +reject_gcc_builtin (const_tree expr, location_t loc /* = UNKNOWN_LOCATION */) +{ + if (TREE_CODE (expr) == ADDR_EXPR) + expr = TREE_OPERAND (expr, 0); + + STRIP_ANY_LOCATION_WRAPPER (expr); + + if (TREE_TYPE (expr) && TREE_CODE (TREE_TYPE (expr)) == FUNCTION_TYPE + && TREE_CODE (expr) == FUNCTION_DECL + /* The intersection of DECL_BUILT_IN and DECL_IS_UNDECLARED_BUILTIN avoids + false positives for user-declared built-ins such as abs or + strlen, and for C++ operators new and delete. + The c_decl_implicit() test avoids false positives for implicitly + declared built-ins with library fallbacks (such as abs). */ + && fndecl_built_in_p (expr) && DECL_IS_UNDECLARED_BUILTIN (expr) + && !DECL_ASSEMBLER_NAME_SET_P (expr)) + { + if (loc == UNKNOWN_LOCATION) + loc = EXPR_LOC_OR_LOC (expr, input_location); + + /* Reject arguments that are built-in functions with + no library fallback. */ + error_at (loc, "built-in function %qE must be directly called", expr); + + return true; + } + + return false; +} + +// forked from gcc/cp/typeck.cc is_bitfield_expr_with_lowered_type + +/* If EXP is a reference to a bit-field, and the type of EXP does not + match the declared type of the bit-field, return the declared type + of the bit-field. Otherwise, return NULL_TREE. */ + +tree +is_bitfield_expr_with_lowered_type (const_tree exp) +{ + switch (TREE_CODE (exp)) + { + case COND_EXPR: + if (!is_bitfield_expr_with_lowered_type (TREE_OPERAND (exp, 1) + ? TREE_OPERAND (exp, 1) + : TREE_OPERAND (exp, 0))) + return NULL_TREE; + return is_bitfield_expr_with_lowered_type (TREE_OPERAND (exp, 2)); + + case COMPOUND_EXPR: + return is_bitfield_expr_with_lowered_type (TREE_OPERAND (exp, 1)); + + case MODIFY_EXPR: + case SAVE_EXPR: + case UNARY_PLUS_EXPR: + case PREDECREMENT_EXPR: + case PREINCREMENT_EXPR: + case POSTDECREMENT_EXPR: + case POSTINCREMENT_EXPR: + case NEGATE_EXPR: + case NON_LVALUE_EXPR: + case BIT_NOT_EXPR: + return is_bitfield_expr_with_lowered_type (TREE_OPERAND (exp, 0)); + + case COMPONENT_REF: { + tree field; + + field = TREE_OPERAND (exp, 1); + if (TREE_CODE (field) != FIELD_DECL || !DECL_BIT_FIELD_TYPE (field)) + return NULL_TREE; + if (same_type_ignoring_top_level_qualifiers_p ( + TREE_TYPE (exp), DECL_BIT_FIELD_TYPE (field))) + return NULL_TREE; + return DECL_BIT_FIELD_TYPE (field); + } + + case VAR_DECL: + if (DECL_HAS_VALUE_EXPR_P (exp)) + return is_bitfield_expr_with_lowered_type ( + DECL_VALUE_EXPR (CONST_CAST_TREE (exp))); + return NULL_TREE; + + case VIEW_CONVERT_EXPR: + if (location_wrapper_p (exp)) + return is_bitfield_expr_with_lowered_type (TREE_OPERAND (exp, 0)); + else + return NULL_TREE; + + default: + return NULL_TREE; + } +} + +// forked from gcc/cp/semantics.cc maybe_undo_parenthesized_ref + +/* If T is an id-expression obfuscated by force_paren_expr, undo the + obfuscation and return the underlying id-expression. Otherwise + return T. */ + +tree +maybe_undo_parenthesized_ref (tree t) +{ + if ((TREE_CODE (t) == PAREN_EXPR || TREE_CODE (t) == VIEW_CONVERT_EXPR) + && REF_PARENTHESIZED_P (t)) + t = TREE_OPERAND (t, 0); + + return t; +} + +// forked from gcc/c-family/c-common.cc fold_offsetof + +/* Fold an offsetof-like expression. EXPR is a nested sequence of component + references with an INDIRECT_REF of a constant at the bottom; much like the + traditional rendering of offsetof as a macro. TYPE is the desired type of + the whole expression. Return the folded result. */ + +tree +fold_offsetof (tree expr, tree type, enum tree_code ctx) +{ + tree base, off, t; + tree_code code = TREE_CODE (expr); + switch (code) + { + case ERROR_MARK: + return expr; + + case VAR_DECL: + error ("cannot apply %<offsetof%> to static data member %qD", expr); + return error_mark_node; + + case CALL_EXPR: + case TARGET_EXPR: + error ("cannot apply %<offsetof%> when %<operator[]%> is overloaded"); + return error_mark_node; + + case NOP_EXPR: + case INDIRECT_REF: + if (!TREE_CONSTANT (TREE_OPERAND (expr, 0))) + { + error ("cannot apply %<offsetof%> to a non constant address"); + return error_mark_node; + } + return convert (type, TREE_OPERAND (expr, 0)); + + case COMPONENT_REF: + base = fold_offsetof (TREE_OPERAND (expr, 0), type, code); + if (base == error_mark_node) + return base; + + t = TREE_OPERAND (expr, 1); + if (DECL_C_BIT_FIELD (t)) + { + error ("attempt to take address of bit-field structure " + "member %qD", + t); + return error_mark_node; + } + off = size_binop_loc (input_location, PLUS_EXPR, DECL_FIELD_OFFSET (t), + size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t)) + / BITS_PER_UNIT)); + break; + + case ARRAY_REF: + base = fold_offsetof (TREE_OPERAND (expr, 0), type, code); + if (base == error_mark_node) + return base; + + t = TREE_OPERAND (expr, 1); + STRIP_ANY_LOCATION_WRAPPER (t); + + /* Check if the offset goes beyond the upper bound of the array. */ + if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) >= 0) + { + tree upbound = array_ref_up_bound (expr); + if (upbound != NULL_TREE && TREE_CODE (upbound) == INTEGER_CST + && !tree_int_cst_equal (upbound, + TYPE_MAX_VALUE (TREE_TYPE (upbound)))) + { + if (ctx != ARRAY_REF && ctx != COMPONENT_REF) + upbound = size_binop (PLUS_EXPR, upbound, + build_int_cst (TREE_TYPE (upbound), 1)); + if (tree_int_cst_lt (upbound, t)) + { + tree v; + + for (v = TREE_OPERAND (expr, 0); + TREE_CODE (v) == COMPONENT_REF; v = TREE_OPERAND (v, 0)) + if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0))) + == RECORD_TYPE) + { + tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1)); + for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain)) + if (TREE_CODE (fld_chain) == FIELD_DECL) + break; + + if (fld_chain) + break; + } + /* Don't warn if the array might be considered a poor + man's flexible array member with a very permissive + definition thereof. */ + if (TREE_CODE (v) == ARRAY_REF + || TREE_CODE (v) == COMPONENT_REF) + warning (OPT_Warray_bounds_, + "index %E denotes an offset " + "greater than size of %qT", + t, TREE_TYPE (TREE_OPERAND (expr, 0))); + } + } + } + + t = convert (sizetype, t); + off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t); + break; + + case COMPOUND_EXPR: + /* Handle static members of volatile structs. */ + t = TREE_OPERAND (expr, 1); + gcc_checking_assert (VAR_P (get_base_address (t))); + return fold_offsetof (t, type); + + default: + gcc_unreachable (); + } + + if (!POINTER_TYPE_P (type)) + return size_binop (PLUS_EXPR, base, convert (type, off)); + return fold_build_pointer_plus (base, off); +} + +// forked from gcc/cp/tree.cc char_type_p + +/* Returns nonzero if TYPE is a character type, including wchar_t. */ + +int +char_type_p (tree type) +{ + return (same_type_p (type, char_type_node) + || same_type_p (type, unsigned_char_type_node) + || same_type_p (type, signed_char_type_node) + || same_type_p (type, char8_type_node) + || same_type_p (type, char16_type_node) + || same_type_p (type, char32_type_node) + || same_type_p (type, wchar_type_node)); +} + +// forked from gcc/cp/pt.cc resolve_nondeduced_context + +/* Core DR 115: In contexts where deduction is done and fails, or in + contexts where deduction is not done, if a template argument list is + specified and it, along with any default template arguments, identifies + a single function template specialization, then the template-id is an + lvalue for the function template specialization. */ + +tree +resolve_nondeduced_context (tree orig_expr, tsubst_flags_t complain) +{ + return orig_expr; +} + +// forked from gcc/cp/pt.cc instantiate_non_dependent_or_null + +/* Like instantiate_non_dependent_expr, but return NULL_TREE rather than + an uninstantiated expression. */ + +tree +instantiate_non_dependent_or_null (tree expr) +{ + if (expr == NULL_TREE) + return NULL_TREE; + + return expr; +} + +// forked from gcc/cp/pt.cc resolve_nondeduced_context_or_error + +/* As above, but error out if the expression remains overloaded. */ + +tree +resolve_nondeduced_context_or_error (tree exp, tsubst_flags_t complain) +{ + exp = resolve_nondeduced_context (exp, complain); + if (type_unknown_p (exp)) + { + if (complain & tf_error) + cxx_incomplete_type_error (exp, TREE_TYPE (exp)); + return error_mark_node; + } + return exp; +} + +// forked from gcc/cp/tree.cc really_overloaded_fn + +/* Returns true iff X is an expression for an overloaded function + whose type cannot be known without performing overload + resolution. */ + +bool +really_overloaded_fn (tree x) +{ + return is_overloaded_fn (x) == 2; +} + +// forked from gcc/cp/typeck..cc invalid_nonstatic_memfn_p + +/* EXPR is being used in a context that is not a function call. + Enforce: + + [expr.ref] + + The expression can be used only as the left-hand operand of a + member function call. + + [expr.mptr.operator] + + If the result of .* or ->* is a function, then that result can be + used only as the operand for the function call operator (). + + by issuing an error message if appropriate. Returns true iff EXPR + violates these rules. */ + +bool +invalid_nonstatic_memfn_p (location_t loc, tree expr, tsubst_flags_t complain) +{ + if (expr == NULL_TREE) + return false; + /* Don't enforce this in MS mode. */ + if (flag_ms_extensions) + return false; + if (is_overloaded_fn (expr) && !really_overloaded_fn (expr)) + expr = get_first_fn (expr); + if (DECL_NONSTATIC_MEMBER_FUNCTION_P (expr)) + { + if (complain & tf_error) + { + if (DECL_P (expr)) + { + error_at (loc, "invalid use of non-static member function %qD", + expr); + inform (DECL_SOURCE_LOCATION (expr), "declared here"); + } + else + error_at (loc, + "invalid use of non-static member function of " + "type %qT", + TREE_TYPE (expr)); + } + return true; + } + return false; +} + +// forked from gcc/cp/call.cc strip_top_quals + +tree +strip_top_quals (tree t) +{ + if (TREE_CODE (t) == ARRAY_TYPE) + return t; + return rs_build_qualified_type (t, 0); +} + +// forked from gcc/cp/typeck2.cc cxx_incomplete_type_inform + +/* Print an inform about the declaration of the incomplete type TYPE. */ + +// void +// cxx_incomplete_type_inform (const_tree type) +// { +// if (!TYPE_MAIN_DECL (type)) +// return; + +// location_t loc = DECL_SOURCE_LOCATION (TYPE_MAIN_DECL (type)); +// tree ptype = strip_top_quals (CONST_CAST_TREE (type)); + +// if (current_class_type && TYPE_BEING_DEFINED (current_class_type) +// && same_type_p (ptype, current_class_type)) +// inform (loc, +// "definition of %q#T is not complete until " +// "the closing brace", +// ptype); +// else +// inform (loc, "forward declaration of %q#T", ptype); +// } + +// forked from gcc/cp/typeck2.cc cxx_incomplete_type_diagnostic + +/* Print an error message for invalid use of an incomplete type. + VALUE is the expression that was used (or 0 if that isn't known) + and TYPE is the type that was invalid. DIAG_KIND indicates the + type of diagnostic (see diagnostic.def). */ + +void +cxx_incomplete_type_diagnostic (location_t loc, const_tree value, + const_tree type, diagnostic_t diag_kind) +{ + // bool is_decl = false, complained = false; + + gcc_assert (diag_kind == DK_WARNING || diag_kind == DK_PEDWARN + || diag_kind == DK_ERROR); + + /* Avoid duplicate error message. */ + if (TREE_CODE (type) == ERROR_MARK) + return; + + if (value) + { + STRIP_ANY_LOCATION_WRAPPER (value); + + if (VAR_P (value) || TREE_CODE (value) == PARM_DECL + || TREE_CODE (value) == FIELD_DECL) + { + // complained = emit_diagnostic (diag_kind, DECL_SOURCE_LOCATION + // (value), + // 0, "%qD has incomplete type", value); + // is_decl = true; + } + } +retry: + /* We must print an error message. Be clever about what it says. */ + + switch (TREE_CODE (type)) + { + // case RECORD_TYPE: + // case UNION_TYPE: + // case ENUMERAL_TYPE: + // if (!is_decl) + // complained + // = emit_diagnostic (diag_kind, loc, 0, + // "invalid use of incomplete type %q#T", type); + // if (complained) + // cxx_incomplete_type_inform (type); + // break; + + case VOID_TYPE: + emit_diagnostic (diag_kind, loc, 0, "invalid use of %qT", type); + break; + + case ARRAY_TYPE: + if (TYPE_DOMAIN (type)) + { + type = TREE_TYPE (type); + goto retry; + } + emit_diagnostic (diag_kind, loc, 0, + "invalid use of array with unspecified bounds"); + break; + + case OFFSET_TYPE: + bad_member : { + tree member = TREE_OPERAND (value, 1); + if (is_overloaded_fn (member)) + member = get_first_fn (member); + + if (DECL_FUNCTION_MEMBER_P (member) && !flag_ms_extensions) + { + gcc_rich_location richloc (loc); + /* If "member" has no arguments (other than "this"), then + add a fix-it hint. */ + if (type_num_arguments (TREE_TYPE (member)) == 1) + richloc.add_fixit_insert_after ("()"); + emit_diagnostic (diag_kind, &richloc, 0, + "invalid use of member function %qD " + "(did you forget the %<()%> ?)", + member); + } + else + emit_diagnostic (diag_kind, loc, 0, + "invalid use of member %qD " + "(did you forget the %<&%> ?)", + member); + } + break; + + case LANG_TYPE: + if (type == init_list_type_node) + { + emit_diagnostic (diag_kind, loc, 0, + "invalid use of brace-enclosed initializer list"); + break; + } + gcc_assert (type == unknown_type_node); + if (value && TREE_CODE (value) == COMPONENT_REF) + goto bad_member; + else if (value && TREE_CODE (value) == ADDR_EXPR) + emit_diagnostic (diag_kind, loc, 0, + "address of overloaded function with no contextual " + "type information"); + else if (value && TREE_CODE (value) == OVERLOAD) + emit_diagnostic ( + diag_kind, loc, 0, + "overloaded function with no contextual type information"); + else + emit_diagnostic ( + diag_kind, loc, 0, + "insufficient contextual information to determine type"); + break; + + default: + gcc_unreachable (); + } +} + +// forked from gcc/cp/decl2.cc decl_constant_var_p + +/* Nonzero for a VAR_DECL whose value can be used in a constant expression. + + [expr.const] + + An integral constant-expression can only involve ... const + variables of integral or enumeration types initialized with + constant expressions ... + + C++0x also allows constexpr variables and temporaries initialized + with constant expressions. We handle the former here, but the latter + are just folded away in cxx_eval_constant_expression. + + The standard does not require that the expression be non-volatile. + G++ implements the proposed correction in DR 457. */ + +bool +decl_constant_var_p (tree decl) +{ + if (!decl_maybe_constant_var_p (decl)) + return false; + + return DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (decl); +} + +// forked from gcc/cp/decl.cc undeduced_auto_decl + +/* Returns true iff DECL is a variable or function declared with an auto type + that has not yet been deduced to a real type. */ + +bool +undeduced_auto_decl (tree decl) +{ + return false; +} + +// forked from gcc/cp/decl.cc require_deduced_type + +/* Complain if DECL has an undeduced return type. */ + +bool +require_deduced_type (tree decl, tsubst_flags_t complain) +{ + return true; +} + +/* Return the location of a tree passed to %+ formats. */ + +location_t +location_of (tree t) +{ + if (TYPE_P (t)) + { + t = TYPE_MAIN_DECL (t); + if (t == NULL_TREE) + return input_location; + } + else if (TREE_CODE (t) == OVERLOAD) + t = OVL_FIRST (t); + + if (DECL_P (t)) + return DECL_SOURCE_LOCATION (t); + + return EXPR_LOCATION (t); +} + +/* For element type ELT_TYPE, return the appropriate type of the heap object + containing such element(s). COOKIE_SIZE is NULL or the size of cookie + in bytes. FULL_SIZE is NULL if it is unknown how big the heap allocation + will be, otherwise size of the heap object. If COOKIE_SIZE is NULL, + return array type ELT_TYPE[FULL_SIZE / sizeof(ELT_TYPE)], otherwise return + struct { size_t[COOKIE_SIZE/sizeof(size_t)]; ELT_TYPE[N]; } + where N is nothing (flexible array member) if FULL_SIZE is NULL, otherwise + it is computed such that the size of the struct fits into FULL_SIZE. */ + +tree +build_new_constexpr_heap_type (tree elt_type, tree cookie_size, tree full_size) +{ + gcc_assert (cookie_size == NULL_TREE || tree_fits_uhwi_p (cookie_size)); + gcc_assert (full_size == NULL_TREE || tree_fits_uhwi_p (full_size)); + unsigned HOST_WIDE_INT csz = cookie_size ? tree_to_uhwi (cookie_size) : 0; + tree itype2 = NULL_TREE; + if (full_size) + { + unsigned HOST_WIDE_INT fsz = tree_to_uhwi (full_size); + gcc_assert (fsz >= csz); + fsz -= csz; + fsz /= int_size_in_bytes (elt_type); + itype2 = build_index_type (size_int (fsz - 1)); + if (!cookie_size) + return build_cplus_array_type (elt_type, itype2); + } + else + gcc_assert (cookie_size); + csz /= int_size_in_bytes (sizetype); + tree itype1 = build_index_type (size_int (csz - 1)); + tree atype1 = build_cplus_array_type (sizetype, itype1); + tree atype2 = build_cplus_array_type (elt_type, itype2); + tree rtype = cxx_make_type (RECORD_TYPE); + TYPE_NAME (rtype) = heap_identifier; + tree fld1 = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE, atype1); + tree fld2 = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE, atype2); + DECL_FIELD_CONTEXT (fld1) = rtype; + DECL_FIELD_CONTEXT (fld2) = rtype; + DECL_ARTIFICIAL (fld1) = true; + DECL_ARTIFICIAL (fld2) = true; + TYPE_FIELDS (rtype) = fld1; + DECL_CHAIN (fld1) = fld2; + layout_type (rtype); + return rtype; +} + +// forked from gcc/cp/class.cc field_poverlapping_p + +/* Return true iff FIELD_DECL DECL is potentially overlapping. */ + +static bool +field_poverlapping_p (tree decl) +{ + return lookup_attribute ("no_unique_address", DECL_ATTRIBUTES (decl)); +} + +// forked from gcc/cp/class.cc is_empty_field + +/* Return true iff DECL is an empty field, either for an empty base or a + [[no_unique_address]] data member. */ + +bool +is_empty_field (tree decl) +{ + if (!decl || TREE_CODE (decl) != FIELD_DECL) + return false; + + bool r = (is_empty_class (TREE_TYPE (decl)) && (field_poverlapping_p (decl))); + + /* Empty fields should have size zero. */ + gcc_checking_assert (!r || integer_zerop (DECL_SIZE (decl))); + + return r; +} + +// forked from gcc/cp/call.cc in_immediate_context + +/* Return true if in an immediate function context, or an unevaluated operand, + or a subexpression of an immediate invocation. */ + +bool +in_immediate_context () +{ + return false; +} + +// forked from gcc/cp/cvt.cc cp_get_fndecl_from_callee + +/* FN is the callee of a CALL_EXPR or AGGR_INIT_EXPR; return the FUNCTION_DECL + if we can. */ + +tree +rs_get_fndecl_from_callee (tree fn, bool fold /* = true */) +{ + if (fn == NULL_TREE) + return fn; + if (TREE_CODE (fn) == FUNCTION_DECL) + return fn; + tree type = TREE_TYPE (fn); + if (type == NULL_TREE || !INDIRECT_TYPE_P (type)) + return NULL_TREE; + if (fold) + fn = Compile::maybe_constant_init (fn); + STRIP_NOPS (fn); + if (TREE_CODE (fn) == ADDR_EXPR || TREE_CODE (fn) == FDESC_EXPR) + fn = TREE_OPERAND (fn, 0); + if (TREE_CODE (fn) == FUNCTION_DECL) + return fn; + return NULL_TREE; +} + +// forked from gcc/cp/cvt.cc cp_get_callee_fndecl_nofold +tree +rs_get_callee_fndecl_nofold (tree call) +{ + return rs_get_fndecl_from_callee (cp_get_callee (call), false); +} + +// forked from gcc/cp/init.cc is_class_type + +/* Report an error if TYPE is not a user-defined, class type. If + OR_ELSE is nonzero, give an error message. */ + +int +is_class_type (tree type, int or_else) +{ + if (type == error_mark_node) + return 0; + + if (!CLASS_TYPE_P (type)) + { + if (or_else) + error ("%qT is not a class type", type); + return 0; + } + return 1; +} + +// forked from gcc/cp/decl.cc lookup_enumerator + +/* Look for an enumerator with the given NAME within the enumeration + type ENUMTYPE. This routine is used primarily for qualified name + lookup into an enumerator in C++0x, e.g., + + enum class Color { Red, Green, Blue }; + + Color color = Color::Red; + + Returns the value corresponding to the enumerator, or + NULL_TREE if no such enumerator was found. */ +tree +lookup_enumerator (tree enumtype, tree name) +{ + tree e; + gcc_assert (enumtype && TREE_CODE (enumtype) == ENUMERAL_TYPE); + + e = purpose_member (name, TYPE_VALUES (enumtype)); + return e ? TREE_VALUE (e) : NULL_TREE; +} + +// forked from gcc/cp/init.cc constant_value_1 +// commented out mark_used + +/* If DECL is a scalar enumeration constant or variable with a + constant initializer, return the initializer (or, its initializers, + recursively); otherwise, return DECL. If STRICT_P, the + initializer is only returned if DECL is a + constant-expression. If RETURN_AGGREGATE_CST_OK_P, it is ok to + return an aggregate constant. If UNSHARE_P, return an unshared + copy of the initializer. */ + +static tree +constant_value_1 (tree decl, bool strict_p, bool return_aggregate_cst_ok_p, + bool unshare_p) +{ + while (TREE_CODE (decl) == CONST_DECL || decl_constant_var_p (decl) + || (!strict_p && VAR_P (decl) + && RS_TYPE_CONST_NON_VOLATILE_P (TREE_TYPE (decl)))) + { + tree init; + /* If DECL is a static data member in a template + specialization, we must instantiate it here. The + initializer for the static data member is not processed + until needed; we need it now. */ + // mark_used (decl, tf_none); + init = DECL_INITIAL (decl); + if (init == error_mark_node) + { + if (TREE_CODE (decl) == CONST_DECL + || DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (decl)) + /* Treat the error as a constant to avoid cascading errors on + excessively recursive template instantiation (c++/9335). */ + return init; + else + return decl; + } + + /* Instantiate a non-dependent initializer for user variables. We + mustn't do this for the temporary for an array compound literal; + trying to instatiate the initializer will keep creating new + temporaries until we crash. Probably it's not useful to do it for + other artificial variables, either. */ + if (!DECL_ARTIFICIAL (decl)) + init = instantiate_non_dependent_or_null (init); + if (!init || !TREE_TYPE (init) || !TREE_CONSTANT (init) + || (!return_aggregate_cst_ok_p + /* Unless RETURN_AGGREGATE_CST_OK_P is true, do not + return an aggregate constant (of which string + literals are a special case), as we do not want + to make inadvertent copies of such entities, and + we must be sure that their addresses are the + same everywhere. */ + && (TREE_CODE (init) == CONSTRUCTOR + || TREE_CODE (init) == STRING_CST))) + break; + /* Don't return a CONSTRUCTOR for a variable with partial run-time + initialization, since it doesn't represent the entire value. + Similarly for VECTOR_CSTs created by cp_folding those + CONSTRUCTORs. */ + if ((TREE_CODE (init) == CONSTRUCTOR || TREE_CODE (init) == VECTOR_CST) + && !DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (decl)) + break; + /* If the variable has a dynamic initializer, don't use its + DECL_INITIAL which doesn't reflect the real value. */ + if (VAR_P (decl) && TREE_STATIC (decl) + && !DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (decl) + && DECL_NONTRIVIALLY_INITIALIZED_P (decl)) + break; + decl = init; + } + return unshare_p ? unshare_expr (decl) : decl; +} + +// forked from gcc/cp/init.cc decl_constant_value + +/* A more relaxed version of decl_really_constant_value, used by the + common C/C++ code. */ + +tree +decl_constant_value (tree decl, bool unshare_p) +{ + return constant_value_1 (decl, /*strict_p=*/false, + /*return_aggregate_cst_ok_p=*/true, + /*unshare_p=*/unshare_p); +} + +// Below is forked from gcc/cp/init.cc decl_constant_value + +tree +decl_constant_value (tree decl) +{ + return decl_constant_value (decl, /*unshare_p=*/true); +} + +// Below is forked from gcc/cp/cp-gimplify.cc + +/* Type for source_location_table hash_set. */ +struct GTY ((for_user)) source_location_table_entry +{ + location_t loc; + unsigned uid; + tree var; +}; + +/* Traits class for function start hash maps below. */ + +struct source_location_table_entry_hash + : ggc_remove<source_location_table_entry> +{ + typedef source_location_table_entry value_type; + typedef source_location_table_entry compare_type; + + static hashval_t hash (const source_location_table_entry &ref) + { + inchash::hash hstate (0); + hstate.add_int (ref.loc); + hstate.add_int (ref.uid); + return hstate.end (); + } + + static bool equal (const source_location_table_entry &ref1, + const source_location_table_entry &ref2) + { + return ref1.loc == ref2.loc && ref1.uid == ref2.uid; + } + + static void mark_deleted (source_location_table_entry &ref) + { + ref.loc = UNKNOWN_LOCATION; + ref.uid = -1U; + ref.var = NULL_TREE; + } + + static const bool empty_zero_p = true; + + static void mark_empty (source_location_table_entry &ref) + { + ref.loc = UNKNOWN_LOCATION; + ref.uid = 0; + ref.var = NULL_TREE; + } + + static bool is_deleted (const source_location_table_entry &ref) + { + return (ref.loc == UNKNOWN_LOCATION && ref.uid == -1U + && ref.var == NULL_TREE); + } + + static bool is_empty (const source_location_table_entry &ref) + { + return (ref.loc == UNKNOWN_LOCATION && ref.uid == 0 + && ref.var == NULL_TREE); + } + + static void pch_nx (source_location_table_entry &p) + { + extern void gt_pch_nx (source_location_table_entry &); + gt_pch_nx (p); + } + + static void pch_nx (source_location_table_entry &p, gt_pointer_operator op, + void *cookie) + { + extern void gt_pch_nx (source_location_table_entry *, gt_pointer_operator, + void *); + gt_pch_nx (&p, op, cookie); + } +}; + +static GTY (()) + hash_table<source_location_table_entry_hash> *source_location_table; +static GTY (()) unsigned int source_location_id; + +// Above is forked from gcc/cp/cp-gimplify.cc + +// forked from gcc/cp/tree.cc lvalue_kind + +/* If REF is an lvalue, returns the kind of lvalue that REF is. + Otherwise, returns clk_none. */ + +cp_lvalue_kind +lvalue_kind (const_tree ref) +{ + cp_lvalue_kind op1_lvalue_kind = clk_none; + cp_lvalue_kind op2_lvalue_kind = clk_none; + + /* Expressions of reference type are sometimes wrapped in + INDIRECT_REFs. INDIRECT_REFs are just internal compiler + representation, not part of the language, so we have to look + through them. */ + if (REFERENCE_REF_P (ref)) + return lvalue_kind (TREE_OPERAND (ref, 0)); + + if (TREE_TYPE (ref) && TYPE_REF_P (TREE_TYPE (ref))) + { + /* unnamed rvalue references are rvalues */ + if (TYPE_REF_IS_RVALUE (TREE_TYPE (ref)) && TREE_CODE (ref) != PARM_DECL + && !VAR_P (ref) + && TREE_CODE (ref) != COMPONENT_REF + /* Functions are always lvalues. */ + && TREE_CODE (TREE_TYPE (TREE_TYPE (ref))) != FUNCTION_TYPE) + { + op1_lvalue_kind = clk_rvalueref; + if (implicit_rvalue_p (ref)) + op1_lvalue_kind |= clk_implicit_rval; + return op1_lvalue_kind; + } + + /* lvalue references and named rvalue references are lvalues. */ + return clk_ordinary; + } + + if (ref == current_class_ptr) + return clk_none; + + /* Expressions with cv void type are prvalues. */ + if (TREE_TYPE (ref) && VOID_TYPE_P (TREE_TYPE (ref))) + return clk_none; + + switch (TREE_CODE (ref)) + { + case SAVE_EXPR: + return clk_none; + + /* preincrements and predecrements are valid lvals, provided + what they refer to are valid lvals. */ + case PREINCREMENT_EXPR: + case PREDECREMENT_EXPR: + case TRY_CATCH_EXPR: + case REALPART_EXPR: + case IMAGPART_EXPR: + case VIEW_CONVERT_EXPR: + return lvalue_kind (TREE_OPERAND (ref, 0)); + + case ARRAY_REF: { + tree op1 = TREE_OPERAND (ref, 0); + if (TREE_CODE (TREE_TYPE (op1)) == ARRAY_TYPE) + { + op1_lvalue_kind = lvalue_kind (op1); + if (op1_lvalue_kind == clk_class) + /* in the case of an array operand, the result is an lvalue if + that operand is an lvalue and an xvalue otherwise */ + op1_lvalue_kind = clk_rvalueref; + return op1_lvalue_kind; + } + else + return clk_ordinary; + } + + case MEMBER_REF: + case DOTSTAR_EXPR: + if (TREE_CODE (ref) == MEMBER_REF) + op1_lvalue_kind = clk_ordinary; + else + op1_lvalue_kind = lvalue_kind (TREE_OPERAND (ref, 0)); + if (TYPE_PTRMEMFUNC_P (TREE_TYPE (TREE_OPERAND (ref, 1)))) + op1_lvalue_kind = clk_none; + else if (op1_lvalue_kind == clk_class) + /* The result of a .* expression whose second operand is a pointer to a + data member is an lvalue if the first operand is an lvalue and an + xvalue otherwise. */ + op1_lvalue_kind = clk_rvalueref; + return op1_lvalue_kind; + + case COMPONENT_REF: + op1_lvalue_kind = lvalue_kind (TREE_OPERAND (ref, 0)); + if (op1_lvalue_kind == clk_class) + /* If E1 is an lvalue, then E1.E2 is an lvalue; + otherwise E1.E2 is an xvalue. */ + op1_lvalue_kind = clk_rvalueref; + + /* Look at the member designator. */ + if (!op1_lvalue_kind) + ; + else if (is_overloaded_fn (TREE_OPERAND (ref, 1))) + /* The "field" can be a FUNCTION_DECL or an OVERLOAD in some + situations. If we're seeing a COMPONENT_REF, it's a non-static + member, so it isn't an lvalue. */ + op1_lvalue_kind = clk_none; + else if (TREE_CODE (TREE_OPERAND (ref, 1)) != FIELD_DECL) + /* This can be IDENTIFIER_NODE in a template. */; + else if (DECL_C_BIT_FIELD (TREE_OPERAND (ref, 1))) + { + /* Clear the ordinary bit. If this object was a class + rvalue we want to preserve that information. */ + op1_lvalue_kind &= ~clk_ordinary; + /* The lvalue is for a bitfield. */ + op1_lvalue_kind |= clk_bitfield; + } + else if (DECL_PACKED (TREE_OPERAND (ref, 1))) + op1_lvalue_kind |= clk_packed; + + return op1_lvalue_kind; + + case STRING_CST: + case COMPOUND_LITERAL_EXPR: + return clk_ordinary; + + case CONST_DECL: + /* CONST_DECL without TREE_STATIC are enumeration values and + thus not lvalues. With TREE_STATIC they are used by ObjC++ + in objc_build_string_object and need to be considered as + lvalues. */ + if (!TREE_STATIC (ref)) + return clk_none; + /* FALLTHRU */ + case VAR_DECL: + if (VAR_P (ref) && DECL_HAS_VALUE_EXPR_P (ref)) + return lvalue_kind (DECL_VALUE_EXPR (CONST_CAST_TREE (ref))); + + if (TREE_READONLY (ref) && !TREE_STATIC (ref) && DECL_LANG_SPECIFIC (ref) + && DECL_IN_AGGR_P (ref)) + return clk_none; + /* FALLTHRU */ + case INDIRECT_REF: + case ARROW_EXPR: + case PARM_DECL: + case RESULT_DECL: + case PLACEHOLDER_EXPR: + return clk_ordinary; + + case MAX_EXPR: + case MIN_EXPR: + /* Disallow <? and >? as lvalues if either argument side-effects. */ + if (TREE_SIDE_EFFECTS (TREE_OPERAND (ref, 0)) + || TREE_SIDE_EFFECTS (TREE_OPERAND (ref, 1))) + return clk_none; + op1_lvalue_kind = lvalue_kind (TREE_OPERAND (ref, 0)); + op2_lvalue_kind = lvalue_kind (TREE_OPERAND (ref, 1)); + break; + + case COND_EXPR: { + tree op1 = TREE_OPERAND (ref, 1); + if (!op1) + op1 = TREE_OPERAND (ref, 0); + tree op2 = TREE_OPERAND (ref, 2); + op1_lvalue_kind = lvalue_kind (op1); + op2_lvalue_kind = lvalue_kind (op2); + if (!op1_lvalue_kind != !op2_lvalue_kind) + { + /* The second or the third operand (but not both) is a + throw-expression; the result is of the type + and value category of the other. */ + if (op1_lvalue_kind && TREE_CODE (op2) == THROW_EXPR) + op2_lvalue_kind = op1_lvalue_kind; + else if (op2_lvalue_kind && TREE_CODE (op1) == THROW_EXPR) + op1_lvalue_kind = op2_lvalue_kind; + } + } + break; + + case MODIFY_EXPR: + case TYPEID_EXPR: + return clk_ordinary; + + case COMPOUND_EXPR: + return lvalue_kind (TREE_OPERAND (ref, 1)); + + case TARGET_EXPR: + return clk_class; + + case VA_ARG_EXPR: + return (CLASS_TYPE_P (TREE_TYPE (ref)) ? clk_class : clk_none); + + case CALL_EXPR: + /* We can see calls outside of TARGET_EXPR in templates. */ + if (CLASS_TYPE_P (TREE_TYPE (ref))) + return clk_class; + return clk_none; + + case FUNCTION_DECL: + /* All functions (except non-static-member functions) are + lvalues. */ + return (DECL_NONSTATIC_MEMBER_FUNCTION_P (ref) ? clk_none : clk_ordinary); + + case NON_DEPENDENT_EXPR: + case PAREN_EXPR: + return lvalue_kind (TREE_OPERAND (ref, 0)); + + case TEMPLATE_PARM_INDEX: + if (CLASS_TYPE_P (TREE_TYPE (ref))) + /* A template parameter object is an lvalue. */ + return clk_ordinary; + return clk_none; + + default: + if (!TREE_TYPE (ref)) + return clk_none; + if (CLASS_TYPE_P (TREE_TYPE (ref)) + || TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE) + return clk_class; + return clk_none; + } + + /* If one operand is not an lvalue at all, then this expression is + not an lvalue. */ + if (!op1_lvalue_kind || !op2_lvalue_kind) + return clk_none; + + /* Otherwise, it's an lvalue, and it has all the odd properties + contributed by either operand. */ + op1_lvalue_kind = op1_lvalue_kind | op2_lvalue_kind; + /* It's not an ordinary lvalue if it involves any other kind. */ + if ((op1_lvalue_kind & ~clk_ordinary) != clk_none) + op1_lvalue_kind &= ~clk_ordinary; + /* It can't be both a pseudo-lvalue and a non-addressable lvalue. + A COND_EXPR of those should be wrapped in a TARGET_EXPR. */ + if ((op1_lvalue_kind & (clk_rvalueref | clk_class)) + && (op1_lvalue_kind & (clk_bitfield | clk_packed))) + op1_lvalue_kind = clk_none; + return op1_lvalue_kind; +} + +// forked from gcc/cp/tree.cc glvalue_p + +/* This differs from lvalue_p in that xvalues are included. */ + +bool +glvalue_p (const_tree ref) +{ + cp_lvalue_kind kind = lvalue_kind (ref); + if (kind & clk_class) + return false; + else + return (kind != clk_none); +} + +// forked from gcc/cp/init.cc cv_qualified_p + +/* Returns nonzero if TYPE is const or volatile. */ + +bool +cv_qualified_p (const_tree type) +{ + int quals = rs_type_quals (type); + return (quals & (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE)) != 0; +} + +// forked from gcc/cp/tree.cc rvalue + +/* EXPR is being used in an rvalue context. Return a version of EXPR + that is marked as an rvalue. */ + +tree +rvalue (tree expr) +{ + tree type; + + if (error_operand_p (expr)) + return expr; + + expr = mark_rvalue_use (expr); + + /* [basic.lval] + + Non-class rvalues always have cv-unqualified types. */ + type = TREE_TYPE (expr); + if (!CLASS_TYPE_P (type) && cv_qualified_p (type)) + type = cv_unqualified (type); + + /* We need to do this for rvalue refs as well to get the right answer + from decltype; see c++/36628. */ + if (glvalue_p (expr)) + { + /* But don't use this function for class lvalues; use move (to treat an + lvalue as an xvalue) or force_rvalue (to make a prvalue copy). */ + gcc_checking_assert (!CLASS_TYPE_P (type)); + expr = build1 (NON_LVALUE_EXPR, type, expr); + } + else if (type != TREE_TYPE (expr)) + expr = build_nop (type, expr); + + return expr; +} + +// forked from gcc/cp/tree.cc bitfield_p + +/* True if REF is a bit-field. */ + +bool +bitfield_p (const_tree ref) +{ + return (lvalue_kind (ref) & clk_bitfield); +} + +// forked from gcc/cp/typeck.cc cxx_mark_addressable + +/* Mark EXP saying that we need to be able to take the + address of it; it should not be allocated in a register. + Value is true if successful. ARRAY_REF_P is true if this + is for ARRAY_REF construction - in that case we don't want + to look through VIEW_CONVERT_EXPR from VECTOR_TYPE to ARRAY_TYPE, + it is fine to use ARRAY_REFs for vector subscripts on vector + register variables. + + C++: we do not allow `current_class_ptr' to be addressable. */ + +bool +cxx_mark_addressable (tree exp, bool array_ref_p) +{ + tree x = exp; + + while (1) + switch (TREE_CODE (x)) + { + case VIEW_CONVERT_EXPR: + if (array_ref_p && TREE_CODE (TREE_TYPE (x)) == ARRAY_TYPE + && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0)))) + return true; + x = TREE_OPERAND (x, 0); + break; + + case COMPONENT_REF: + if (bitfield_p (x)) + error ("attempt to take address of bit-field"); + /* FALLTHRU */ + case ADDR_EXPR: + case ARRAY_REF: + case REALPART_EXPR: + case IMAGPART_EXPR: + x = TREE_OPERAND (x, 0); + break; + + case PARM_DECL: + if (x == current_class_ptr) + { + error ("cannot take the address of %<this%>, which is an rvalue " + "expression"); + TREE_ADDRESSABLE (x) = 1; /* so compiler doesn't die later. */ + return true; + } + /* Fall through. */ + + case VAR_DECL: + /* Caller should not be trying to mark initialized + constant fields addressable. */ + gcc_assert (DECL_LANG_SPECIFIC (x) == 0 || DECL_IN_AGGR_P (x) == 0 + || TREE_STATIC (x) || DECL_EXTERNAL (x)); + /* Fall through. */ + + case RESULT_DECL: + if (DECL_REGISTER (x) && !TREE_ADDRESSABLE (x) && !DECL_ARTIFICIAL (x)) + { + if (VAR_P (x) && DECL_HARD_REGISTER (x)) + { + error ("address of explicit register variable %qD requested", + x); + return false; + } + else if (extra_warnings) + warning ( + OPT_Wextra, + "address requested for %qD, which is declared %<register%>", x); + } + TREE_ADDRESSABLE (x) = 1; + return true; + + case CONST_DECL: + case FUNCTION_DECL: + TREE_ADDRESSABLE (x) = 1; + return true; + + case CONSTRUCTOR: + TREE_ADDRESSABLE (x) = 1; + return true; + + case TARGET_EXPR: + TREE_ADDRESSABLE (x) = 1; + cxx_mark_addressable (TREE_OPERAND (x, 0)); + return true; + + default: + return true; + } +} + +// forked from gcc/cp/typeck.cc build_address + +/* Returns the address of T. This function will fold away + ADDR_EXPR of INDIRECT_REF. This is only for low-level usage; + most places should use cp_build_addr_expr instead. */ + +tree +build_address (tree t) +{ + if (error_operand_p (t) || !cxx_mark_addressable (t)) + return error_mark_node; + gcc_checking_assert (TREE_CODE (t) != CONSTRUCTOR); + t = build_fold_addr_expr_loc (EXPR_LOCATION (t), t); + if (TREE_CODE (t) != ADDR_EXPR) + t = rvalue (t); + return t; +} + +// forked from gcc/cp/gp-gimplify.cc fold_builtin_source_location + +/* Fold __builtin_source_location () call. LOC is the location + of the call. */ + +tree +fold_builtin_source_location (location_t loc) +{ + // if (source_location_impl == NULL_TREE) + // { + // auto_diagnostic_group d; + // source_location_impl = get_source_location_impl_type (loc); + // if (source_location_impl == error_mark_node) + // inform (loc, "evaluating %qs", "__builtin_source_location"); + // } + if (source_location_impl == error_mark_node) + return build_zero_cst (const_ptr_type_node); + if (source_location_table == NULL) + source_location_table + = hash_table<source_location_table_entry_hash>::create_ggc (64); + const line_map_ordinary *map; + source_location_table_entry entry; + entry.loc = linemap_resolve_location (line_table, loc, + LRK_MACRO_EXPANSION_POINT, &map); + entry.uid = current_function_decl ? DECL_UID (current_function_decl) : -1; + entry.var = error_mark_node; + source_location_table_entry *entryp + = source_location_table->find_slot (entry, INSERT); + tree var; + if (entryp->var) + var = entryp->var; + else + { + char tmp_name[32]; + ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lsrc_loc", source_location_id++); + var = build_decl (loc, VAR_DECL, get_identifier (tmp_name), + source_location_impl); + TREE_STATIC (var) = 1; + TREE_PUBLIC (var) = 0; + DECL_ARTIFICIAL (var) = 1; + DECL_IGNORED_P (var) = 1; + DECL_EXTERNAL (var) = 0; + DECL_DECLARED_CONSTEXPR_P (var) = 1; + DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (var) = 1; + layout_decl (var, 0); + + vec<constructor_elt, va_gc> *v = NULL; + vec_alloc (v, 4); + for (tree field = TYPE_FIELDS (source_location_impl); + (field = next_initializable_field (field)) != NULL_TREE; + field = DECL_CHAIN (field)) + { + const char *n = IDENTIFIER_POINTER (DECL_NAME (field)); + tree val = NULL_TREE; + if (strcmp (n, "_M_file_name") == 0) + { + if (const char *fname = LOCATION_FILE (loc)) + { + fname = remap_macro_filename (fname); + val = build_string_literal (strlen (fname) + 1, fname); + } + else + val = build_string_literal (1, ""); + } + else if (strcmp (n, "_M_function_name") == 0) + { + const char *name = "todo: add funciton name here"; + + // if (current_function_decl) + // name = cxx_printable_name (current_function_decl, 2); + + val = build_string_literal (strlen (name) + 1, name); + } + else if (strcmp (n, "_M_line") == 0) + val = build_int_cst (TREE_TYPE (field), LOCATION_LINE (loc)); + else if (strcmp (n, "_M_column") == 0) + val = build_int_cst (TREE_TYPE (field), LOCATION_COLUMN (loc)); + else + gcc_unreachable (); + CONSTRUCTOR_APPEND_ELT (v, field, val); + } + + tree ctor = build_constructor (source_location_impl, v); + TREE_CONSTANT (ctor) = 1; + TREE_STATIC (ctor) = 1; + DECL_INITIAL (var) = ctor; + varpool_node::finalize_decl (var); + *entryp = entry; + entryp->var = var; + } + + return build_fold_addr_expr_with_type_loc (loc, var, const_ptr_type_node); +} + +// forked from gcc/c-family/c-common.cc braced_lists_to_strings + +/* Attempt to convert a braced array initializer list CTOR for array + TYPE into a STRING_CST for convenience and efficiency. Return + the converted string on success or the original ctor on failure. */ + +static tree +braced_list_to_string (tree type, tree ctor, bool member) +{ + /* Ignore non-members with unknown size like arrays with unspecified + bound. */ + tree typesize = TYPE_SIZE_UNIT (type); + if (!member && !tree_fits_uhwi_p (typesize)) + return ctor; + + /* If the target char size differes from the host char size, we'd risk + loosing data and getting object sizes wrong by converting to + host chars. */ + if (TYPE_PRECISION (char_type_node) != CHAR_BIT) + return ctor; + + /* If the array has an explicit bound, use it to constrain the size + of the string. If it doesn't, be sure to create a string that's + as long as implied by the index of the last zero specified via + a designator, as in: + const char a[] = { [7] = 0 }; */ + unsigned HOST_WIDE_INT maxelts; + if (typesize) + { + maxelts = tree_to_uhwi (typesize); + maxelts /= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (type))); + } + else + maxelts = HOST_WIDE_INT_M1U; + + /* Avoid converting initializers for zero-length arrays (but do + create them for flexible array members). */ + if (!maxelts) + return ctor; + + unsigned HOST_WIDE_INT nelts = CONSTRUCTOR_NELTS (ctor); + + auto_vec<char> str; + str.reserve (nelts + 1); + + unsigned HOST_WIDE_INT i; + tree index, value; + + FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), i, index, value) + { + unsigned HOST_WIDE_INT idx = i; + if (index) + { + if (!tree_fits_uhwi_p (index)) + return ctor; + idx = tree_to_uhwi (index); + } + + /* auto_vec is limited to UINT_MAX elements. */ + if (idx > UINT_MAX) + return ctor; + + /* Avoid non-constant initializers. */ + if (!tree_fits_shwi_p (value)) + return ctor; + + /* Skip over embedded nuls except the last one (initializer + elements are in ascending order of indices). */ + HOST_WIDE_INT val = tree_to_shwi (value); + if (!val && i + 1 < nelts) + continue; + + if (idx < str.length ()) + return ctor; + + /* Bail if the CTOR has a block of more than 256 embedded nuls + due to implicitly initialized elements. */ + unsigned nchars = (idx - str.length ()) + 1; + if (nchars > 256) + return ctor; + + if (nchars > 1) + { + str.reserve (idx); + str.quick_grow_cleared (idx); + } + + if (idx >= maxelts) + return ctor; + + str.safe_insert (idx, val); + } + + /* Append a nul string termination. */ + if (maxelts != HOST_WIDE_INT_M1U && str.length () < maxelts) + str.safe_push (0); + + /* Build a STRING_CST with the same type as the array. */ + tree res = build_string (str.length (), str.begin ()); + TREE_TYPE (res) = type; + return res; +} + +// forked from gcc/c-family/c-common.cc braced_lists_to_strings + +/* Implementation of the two-argument braced_lists_to_string withe + the same arguments plus MEMBER which is set for struct members + to allow initializers for flexible member arrays. */ + +static tree +braced_lists_to_strings (tree type, tree ctor, bool member) +{ + if (TREE_CODE (ctor) != CONSTRUCTOR) + return ctor; + + tree_code code = TREE_CODE (type); + + tree ttp; + if (code == ARRAY_TYPE) + ttp = TREE_TYPE (type); + else if (code == RECORD_TYPE) + { + ttp = TREE_TYPE (ctor); + if (TREE_CODE (ttp) == ARRAY_TYPE) + { + type = ttp; + ttp = TREE_TYPE (ttp); + } + } + else + return ctor; + + if ((TREE_CODE (ttp) == ARRAY_TYPE || TREE_CODE (ttp) == INTEGER_TYPE) + && TYPE_STRING_FLAG (ttp)) + return braced_list_to_string (type, ctor, member); + + code = TREE_CODE (ttp); + if (code == ARRAY_TYPE || RECORD_OR_UNION_TYPE_P (ttp)) + { + bool rec = RECORD_OR_UNION_TYPE_P (ttp); + + /* Handle array of arrays or struct member initializers. */ + tree val; + unsigned HOST_WIDE_INT idx; + FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), idx, val) + { + val = braced_lists_to_strings (ttp, val, rec); + CONSTRUCTOR_ELT (ctor, idx)->value = val; + } + } + + return ctor; +} + +// forked from gcc/c-family/c-common.cc braced_lists_to_strings + +/* Attempt to convert a CTOR containing braced array initializer lists + for array TYPE into one containing STRING_CSTs, for convenience and + efficiency. Recurse for arrays of arrays and member initializers. + Return the converted CTOR or STRING_CST on success or the original + CTOR otherwise. */ + +tree +braced_lists_to_strings (tree type, tree ctor) +{ + return braced_lists_to_strings (type, ctor, false); +} + +/*--------------------------------------------------------------------------- + Constraint satisfaction +---------------------------------------------------------------------------*/ + +// forked from gcc/cp/constraint.cc satisfying_constraint + +/* True if we are currently satisfying a failed_type_completions. */ + +static bool satisfying_constraint; + +// forked from gcc/cp/constraint.cc satisfying_constraint + +/* A vector of incomplete types (and of declarations with undeduced return + type), appended to by note_failed_type_completion_for_satisfaction. The + satisfaction caches use this in order to keep track of "potentially unstable" + satisfaction results. + + Since references to entries in this vector are stored only in the + GC-deletable sat_cache, it's safe to make this deletable as well. */ + +static GTY ((deletable)) vec<tree, va_gc> *failed_type_completions; + +// forked from gcc/cp/constraint.cc note_failed_type_completion_for_satisfaction + +/* Called whenever a type completion (or return type deduction) failure occurs + that definitely affects the meaning of the program, by e.g. inducing + substitution failure. */ + +void +note_failed_type_completion_for_satisfaction (tree t) +{ + if (satisfying_constraint) + { + gcc_checking_assert ((TYPE_P (t) && !COMPLETE_TYPE_P (t)) + || (DECL_P (t) && undeduced_auto_decl (t))); + vec_safe_push (failed_type_completions, t); + } +} + +// forked from gcc/cp/typeck.cc complete_type + +/* Try to complete TYPE, if it is incomplete. For example, if TYPE is + a template instantiation, do the instantiation. Returns TYPE, + whether or not it could be completed, unless something goes + horribly wrong, in which case the error_mark_node is returned. */ + +tree +complete_type (tree type) +{ + if (type == NULL_TREE) + /* Rather than crash, we return something sure to cause an error + at some point. */ + return error_mark_node; + + if (type == error_mark_node || COMPLETE_TYPE_P (type)) + ; + else if (TREE_CODE (type) == ARRAY_TYPE) + { + tree t = complete_type (TREE_TYPE (type)); + unsigned int needs_constructing, has_nontrivial_dtor; + if (COMPLETE_TYPE_P (t)) + layout_type (type); + needs_constructing = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (t)); + has_nontrivial_dtor + = TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TYPE_MAIN_VARIANT (t)); + for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t)) + { + TYPE_NEEDS_CONSTRUCTING (t) = needs_constructing; + TYPE_HAS_NONTRIVIAL_DESTRUCTOR (t) = has_nontrivial_dtor; + } + } + + return type; +} + +// forked from gcc/cp/typeck.cc complete_type_or_maybe_complain + +/* Like complete_type, but issue an error if the TYPE cannot be completed. + VALUE is used for informative diagnostics. + Returns NULL_TREE if the type cannot be made complete. */ + +tree +complete_type_or_maybe_complain (tree type, tree value, tsubst_flags_t complain) +{ + type = complete_type (type); + if (type == error_mark_node) + /* We already issued an error. */ + return NULL_TREE; + else if (!COMPLETE_TYPE_P (type)) + { + if (complain & tf_error) + cxx_incomplete_type_diagnostic (value, type, DK_ERROR); + note_failed_type_completion_for_satisfaction (type); + return NULL_TREE; + } + else + return type; +} + +// forked from gcc/cp/typeck.cc complete_type_or_else + +tree +complete_type_or_else (tree type, tree value) +{ + return complete_type_or_maybe_complain (type, value, tf_warning_or_error); +} + +// forked from gcc/cp/tree.cc std_layout_type_p + +/* Returns true iff T is a standard-layout type, as defined in + [basic.types]. */ + +bool +std_layout_type_p (const_tree t) +{ + t = strip_array_types (CONST_CAST_TREE (t)); + + if (CLASS_TYPE_P (t)) + return !CLASSTYPE_NON_STD_LAYOUT (t); + else + return scalarish_type_p (t); +} + +// forked from /gcc/cp/semantics.cc first_nonstatic_data_member_p + +/* Helper function for fold_builtin_is_pointer_inverconvertible_with_class, + return true if MEMBERTYPE is the type of the first non-static data member + of TYPE or for unions of any members. */ +static bool +first_nonstatic_data_member_p (tree type, tree membertype) +{ + for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) + { + if (TREE_CODE (field) != FIELD_DECL) + continue; + if (DECL_FIELD_IS_BASE (field) && is_empty_field (field)) + continue; + if (DECL_FIELD_IS_BASE (field)) + return first_nonstatic_data_member_p (TREE_TYPE (field), membertype); + if (ANON_AGGR_TYPE_P (TREE_TYPE (field))) + { + if ((TREE_CODE (TREE_TYPE (field)) == UNION_TYPE + || std_layout_type_p (TREE_TYPE (field))) + && first_nonstatic_data_member_p (TREE_TYPE (field), membertype)) + return true; + } + else if (same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (field), + membertype)) + return true; + if (TREE_CODE (type) != UNION_TYPE) + return false; + } + return false; +} + +// forked from gcc/cp/semantics.cc +// fold_builtin_is_pointer_inverconvertible_with_class + +/* Fold __builtin_is_pointer_interconvertible_with_class call. */ + +tree +fold_builtin_is_pointer_inverconvertible_with_class (location_t loc, int nargs, + tree *args) +{ + /* Unless users call the builtin directly, the following 3 checks should be + ensured from std::is_pointer_interconvertible_with_class function + template. */ + if (nargs != 1) + { + error_at (loc, "%<__builtin_is_pointer_interconvertible_with_class%> " + "needs a single argument"); + return boolean_false_node; + } + tree arg = args[0]; + if (error_operand_p (arg)) + return boolean_false_node; + if (!TYPE_PTRMEM_P (TREE_TYPE (arg))) + { + error_at (loc, "%<__builtin_is_pointer_interconvertible_with_class%> " + "argument is not pointer to member"); + return boolean_false_node; + } + + if (!TYPE_PTRDATAMEM_P (TREE_TYPE (arg))) + return boolean_false_node; + + tree membertype = TREE_TYPE (TREE_TYPE (arg)); + tree basetype = TYPE_OFFSET_BASETYPE (TREE_TYPE (arg)); + if (!complete_type_or_else (basetype, NULL_TREE)) + return boolean_false_node; + + if (TREE_CODE (basetype) != UNION_TYPE && !std_layout_type_p (basetype)) + return boolean_false_node; + + if (!first_nonstatic_data_member_p (basetype, membertype)) + return boolean_false_node; + + if (integer_nonzerop (arg)) + return boolean_false_node; + if (integer_zerop (arg)) + return boolean_true_node; + + return fold_build2 (EQ_EXPR, boolean_type_node, arg, + build_zero_cst (TREE_TYPE (arg))); +} + +// forked from gcc/c-family/c-common.cc registered_builtin_types + +/* Used for communication between c_common_type_for_mode and + c_register_builtin_type. */ +tree registered_builtin_types; + +/* Return a data type that has machine mode MODE. + If the mode is an integer, + then UNSIGNEDP selects between signed and unsigned types. + If the mode is a fixed-point mode, + then UNSIGNEDP selects between saturating and nonsaturating types. */ + +// forked from gcc/c-family/c-common.cc c_common_type_for_mode + +tree +c_common_type_for_mode (machine_mode mode, int unsignedp) +{ + tree t; + int i; + + if (mode == TYPE_MODE (integer_type_node)) + return unsignedp ? unsigned_type_node : integer_type_node; + + if (mode == TYPE_MODE (signed_char_type_node)) + return unsignedp ? unsigned_char_type_node : signed_char_type_node; + + if (mode == TYPE_MODE (short_integer_type_node)) + return unsignedp ? short_unsigned_type_node : short_integer_type_node; + + if (mode == TYPE_MODE (long_integer_type_node)) + return unsignedp ? long_unsigned_type_node : long_integer_type_node; + + if (mode == TYPE_MODE (long_long_integer_type_node)) + return unsignedp ? long_long_unsigned_type_node + : long_long_integer_type_node; + + for (i = 0; i < NUM_INT_N_ENTS; i++) + if (int_n_enabled_p[i] && mode == int_n_data[i].m) + return (unsignedp ? int_n_trees[i].unsigned_type + : int_n_trees[i].signed_type); + + if (mode == QImode) + return unsignedp ? unsigned_intQI_type_node : intQI_type_node; + + if (mode == HImode) + return unsignedp ? unsigned_intHI_type_node : intHI_type_node; + + if (mode == SImode) + return unsignedp ? unsigned_intSI_type_node : intSI_type_node; + + if (mode == DImode) + return unsignedp ? unsigned_intDI_type_node : intDI_type_node; + +#if HOST_BITS_PER_WIDE_INT >= 64 + if (mode == TYPE_MODE (intTI_type_node)) + return unsignedp ? unsigned_intTI_type_node : intTI_type_node; +#endif + + if (mode == TYPE_MODE (float_type_node)) + return float_type_node; + + if (mode == TYPE_MODE (double_type_node)) + return double_type_node; + + if (mode == TYPE_MODE (long_double_type_node)) + return long_double_type_node; + + for (i = 0; i < NUM_FLOATN_NX_TYPES; i++) + if (FLOATN_NX_TYPE_NODE (i) != NULL_TREE + && mode == TYPE_MODE (FLOATN_NX_TYPE_NODE (i))) + return FLOATN_NX_TYPE_NODE (i); + + if (mode == TYPE_MODE (void_type_node)) + return void_type_node; + + if (mode == TYPE_MODE (build_pointer_type (char_type_node)) + || mode == TYPE_MODE (build_pointer_type (integer_type_node))) + { + unsigned int precision + = GET_MODE_PRECISION (as_a<scalar_int_mode> (mode)); + return (unsignedp ? make_unsigned_type (precision) + : make_signed_type (precision)); + } + + if (COMPLEX_MODE_P (mode)) + { + machine_mode inner_mode; + tree inner_type; + + if (mode == TYPE_MODE (complex_float_type_node)) + return complex_float_type_node; + if (mode == TYPE_MODE (complex_double_type_node)) + return complex_double_type_node; + if (mode == TYPE_MODE (complex_long_double_type_node)) + return complex_long_double_type_node; + + for (i = 0; i < NUM_FLOATN_NX_TYPES; i++) + if (COMPLEX_FLOATN_NX_TYPE_NODE (i) != NULL_TREE + && mode == TYPE_MODE (COMPLEX_FLOATN_NX_TYPE_NODE (i))) + return COMPLEX_FLOATN_NX_TYPE_NODE (i); + + if (mode == TYPE_MODE (complex_integer_type_node) && !unsignedp) + return complex_integer_type_node; + + inner_mode = GET_MODE_INNER (mode); + inner_type = c_common_type_for_mode (inner_mode, unsignedp); + if (inner_type != NULL_TREE) + return build_complex_type (inner_type); + } + else if (GET_MODE_CLASS (mode) == MODE_VECTOR_BOOL + && valid_vector_subparts_p (GET_MODE_NUNITS (mode))) + { + unsigned int elem_bits + = vector_element_size (GET_MODE_BITSIZE (mode), GET_MODE_NUNITS (mode)); + tree bool_type = build_nonstandard_boolean_type (elem_bits); + return build_vector_type_for_mode (bool_type, mode); + } + else if (VECTOR_MODE_P (mode) + && valid_vector_subparts_p (GET_MODE_NUNITS (mode))) + { + machine_mode inner_mode = GET_MODE_INNER (mode); + tree inner_type = c_common_type_for_mode (inner_mode, unsignedp); + if (inner_type != NULL_TREE) + return build_vector_type_for_mode (inner_type, mode); + } + + if (dfloat32_type_node != NULL_TREE && mode == TYPE_MODE (dfloat32_type_node)) + return dfloat32_type_node; + if (dfloat64_type_node != NULL_TREE && mode == TYPE_MODE (dfloat64_type_node)) + return dfloat64_type_node; + if (dfloat128_type_node != NULL_TREE + && mode == TYPE_MODE (dfloat128_type_node)) + return dfloat128_type_node; + + if (ALL_SCALAR_FIXED_POINT_MODE_P (mode)) + { + if (mode == TYPE_MODE (short_fract_type_node)) + return unsignedp ? sat_short_fract_type_node : short_fract_type_node; + if (mode == TYPE_MODE (fract_type_node)) + return unsignedp ? sat_fract_type_node : fract_type_node; + if (mode == TYPE_MODE (long_fract_type_node)) + return unsignedp ? sat_long_fract_type_node : long_fract_type_node; + if (mode == TYPE_MODE (long_long_fract_type_node)) + return unsignedp ? sat_long_long_fract_type_node + : long_long_fract_type_node; + + if (mode == TYPE_MODE (unsigned_short_fract_type_node)) + return unsignedp ? sat_unsigned_short_fract_type_node + : unsigned_short_fract_type_node; + if (mode == TYPE_MODE (unsigned_fract_type_node)) + return unsignedp ? sat_unsigned_fract_type_node + : unsigned_fract_type_node; + if (mode == TYPE_MODE (unsigned_long_fract_type_node)) + return unsignedp ? sat_unsigned_long_fract_type_node + : unsigned_long_fract_type_node; + if (mode == TYPE_MODE (unsigned_long_long_fract_type_node)) + return unsignedp ? sat_unsigned_long_long_fract_type_node + : unsigned_long_long_fract_type_node; + + if (mode == TYPE_MODE (short_accum_type_node)) + return unsignedp ? sat_short_accum_type_node : short_accum_type_node; + if (mode == TYPE_MODE (accum_type_node)) + return unsignedp ? sat_accum_type_node : accum_type_node; + if (mode == TYPE_MODE (long_accum_type_node)) + return unsignedp ? sat_long_accum_type_node : long_accum_type_node; + if (mode == TYPE_MODE (long_long_accum_type_node)) + return unsignedp ? sat_long_long_accum_type_node + : long_long_accum_type_node; + + if (mode == TYPE_MODE (unsigned_short_accum_type_node)) + return unsignedp ? sat_unsigned_short_accum_type_node + : unsigned_short_accum_type_node; + if (mode == TYPE_MODE (unsigned_accum_type_node)) + return unsignedp ? sat_unsigned_accum_type_node + : unsigned_accum_type_node; + if (mode == TYPE_MODE (unsigned_long_accum_type_node)) + return unsignedp ? sat_unsigned_long_accum_type_node + : unsigned_long_accum_type_node; + if (mode == TYPE_MODE (unsigned_long_long_accum_type_node)) + return unsignedp ? sat_unsigned_long_long_accum_type_node + : unsigned_long_long_accum_type_node; + + if (mode == QQmode) + return unsignedp ? sat_qq_type_node : qq_type_node; + if (mode == HQmode) + return unsignedp ? sat_hq_type_node : hq_type_node; + if (mode == SQmode) + return unsignedp ? sat_sq_type_node : sq_type_node; + if (mode == DQmode) + return unsignedp ? sat_dq_type_node : dq_type_node; + if (mode == TQmode) + return unsignedp ? sat_tq_type_node : tq_type_node; + + if (mode == UQQmode) + return unsignedp ? sat_uqq_type_node : uqq_type_node; + if (mode == UHQmode) + return unsignedp ? sat_uhq_type_node : uhq_type_node; + if (mode == USQmode) + return unsignedp ? sat_usq_type_node : usq_type_node; + if (mode == UDQmode) + return unsignedp ? sat_udq_type_node : udq_type_node; + if (mode == UTQmode) + return unsignedp ? sat_utq_type_node : utq_type_node; + + if (mode == HAmode) + return unsignedp ? sat_ha_type_node : ha_type_node; + if (mode == SAmode) + return unsignedp ? sat_sa_type_node : sa_type_node; + if (mode == DAmode) + return unsignedp ? sat_da_type_node : da_type_node; + if (mode == TAmode) + return unsignedp ? sat_ta_type_node : ta_type_node; + + if (mode == UHAmode) + return unsignedp ? sat_uha_type_node : uha_type_node; + if (mode == USAmode) + return unsignedp ? sat_usa_type_node : usa_type_node; + if (mode == UDAmode) + return unsignedp ? sat_uda_type_node : uda_type_node; + if (mode == UTAmode) + return unsignedp ? sat_uta_type_node : uta_type_node; + } + + for (t = registered_builtin_types; t; t = TREE_CHAIN (t)) + { + tree type = TREE_VALUE (t); + if (TYPE_MODE (type) == mode + && VECTOR_TYPE_P (type) == VECTOR_MODE_P (mode) + && !!unsignedp == !!TYPE_UNSIGNED (type)) + return type; + } + return NULL_TREE; +} + +// forked from gcc/cp/semantics.cc finish_underlying_type + +/* Implement the __underlying_type keyword: Return the underlying + type of TYPE, suitable for use as a type-specifier. */ + +tree +finish_underlying_type (tree type) +{ + tree underlying_type; + + if (!complete_type_or_else (type, NULL_TREE)) + return error_mark_node; + + if (TREE_CODE (type) != ENUMERAL_TYPE) + { + error ("%qT is not an enumeration type", type); + return error_mark_node; + } + + underlying_type = ENUM_UNDERLYING_TYPE (type); + + /* Fixup necessary in this case because ENUM_UNDERLYING_TYPE + includes TYPE_MIN_VALUE and TYPE_MAX_VALUE information. + See finish_enum_value_list for details. */ + if (!ENUM_FIXED_UNDERLYING_TYPE_P (type)) + underlying_type = c_common_type_for_mode (TYPE_MODE (underlying_type), + TYPE_UNSIGNED (underlying_type)); + + return underlying_type; +} + +// forked from gcc/cp/typeck.cc layout_compatible_type_p + +/* Return true if TYPE1 and TYPE2 are layout-compatible types. */ + +bool +layout_compatible_type_p (tree type1, tree type2) +{ + if (type1 == error_mark_node || type2 == error_mark_node) + return false; + if (type1 == type2) + return true; + if (TREE_CODE (type1) != TREE_CODE (type2)) + return false; + + type1 = rs_build_qualified_type (type1, TYPE_UNQUALIFIED); + type2 = rs_build_qualified_type (type2, TYPE_UNQUALIFIED); + + if (TREE_CODE (type1) == ENUMERAL_TYPE) + return (TYPE_ALIGN (type1) == TYPE_ALIGN (type2) + && tree_int_cst_equal (TYPE_SIZE (type1), TYPE_SIZE (type2)) + && same_type_p (finish_underlying_type (type1), + finish_underlying_type (type2))); + + if (CLASS_TYPE_P (type1) && std_layout_type_p (type1) + && std_layout_type_p (type2) && TYPE_ALIGN (type1) == TYPE_ALIGN (type2) + && tree_int_cst_equal (TYPE_SIZE (type1), TYPE_SIZE (type2))) + { + tree field1 = TYPE_FIELDS (type1); + tree field2 = TYPE_FIELDS (type2); + if (TREE_CODE (type1) == RECORD_TYPE) + { + while (1) + { + if (!next_common_initial_seqence (field1, field2)) + return false; + if (field1 == NULL_TREE) + return true; + field1 = DECL_CHAIN (field1); + field2 = DECL_CHAIN (field2); + } + } + /* Otherwise both types must be union types. + The standard says: + "Two standard-layout unions are layout-compatible if they have + the same number of non-static data members and corresponding + non-static data members (in any order) have layout-compatible + types." + but the code anticipates that bitfield vs. non-bitfield, + different bitfield widths or presence/absence of + [[no_unique_address]] should be checked as well. */ + auto_vec<tree, 16> vec; + unsigned int count = 0; + for (; field1; field1 = DECL_CHAIN (field1)) + if (TREE_CODE (field1) == FIELD_DECL) + count++; + for (; field2; field2 = DECL_CHAIN (field2)) + if (TREE_CODE (field2) == FIELD_DECL) + vec.safe_push (field2); + /* Discussions on core lean towards treating multiple union fields + of the same type as the same field, so this might need changing + in the future. */ + if (count != vec.length ()) + return false; + for (field1 = TYPE_FIELDS (type1); field1; field1 = DECL_CHAIN (field1)) + { + if (TREE_CODE (field1) != FIELD_DECL) + continue; + unsigned int j; + tree t1 = DECL_BIT_FIELD_TYPE (field1); + if (t1 == NULL_TREE) + t1 = TREE_TYPE (field1); + FOR_EACH_VEC_ELT (vec, j, field2) + { + tree t2 = DECL_BIT_FIELD_TYPE (field2); + if (t2 == NULL_TREE) + t2 = TREE_TYPE (field2); + if (DECL_BIT_FIELD_TYPE (field1)) + { + if (!DECL_BIT_FIELD_TYPE (field2)) + continue; + if (TYPE_PRECISION (TREE_TYPE (field1)) + != TYPE_PRECISION (TREE_TYPE (field2))) + continue; + } + else if (DECL_BIT_FIELD_TYPE (field2)) + continue; + if (!layout_compatible_type_p (t1, t2)) + continue; + if ((!lookup_attribute ("no_unique_address", + DECL_ATTRIBUTES (field1))) + != !lookup_attribute ("no_unique_address", + DECL_ATTRIBUTES (field2))) + continue; + break; + } + if (j == vec.length ()) + return false; + vec.unordered_remove (j); + } + return true; + } + + return same_type_p (type1, type2); +} + +// forked from gcc/cp/semnatics.cc is_corresponding_member_union + +/* Helper function for is_corresponding_member_aggr. Return true if + MEMBERTYPE pointer-to-data-member ARG can be found in anonymous + union or structure BASETYPE. */ + +static bool +is_corresponding_member_union (tree basetype, tree membertype, tree arg) +{ + for (tree field = TYPE_FIELDS (basetype); field; field = DECL_CHAIN (field)) + if (TREE_CODE (field) != FIELD_DECL || DECL_BIT_FIELD_TYPE (field)) + continue; + else if (same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (field), + membertype)) + { + if (TREE_CODE (arg) != INTEGER_CST + || tree_int_cst_equal (arg, byte_position (field))) + return true; + } + else if (ANON_AGGR_TYPE_P (TREE_TYPE (field))) + { + tree narg = arg; + if (TREE_CODE (basetype) != UNION_TYPE + && TREE_CODE (narg) == INTEGER_CST) + narg = size_binop (MINUS_EXPR, arg, byte_position (field)); + if (is_corresponding_member_union (TREE_TYPE (field), membertype, narg)) + return true; + } + return false; +} + +// forked from gcc/cp/typeck.cc next_common_initial_seqence + +/* Helper function for layout_compatible_type_p and + is_corresponding_member_aggr. Advance to next members (NULL if + no further ones) and return true if those members are still part of + the common initial sequence. */ + +bool +next_common_initial_seqence (tree &memb1, tree &memb2) +{ + while (memb1) + { + if (TREE_CODE (memb1) != FIELD_DECL + || (DECL_FIELD_IS_BASE (memb1) && is_empty_field (memb1))) + { + memb1 = DECL_CHAIN (memb1); + continue; + } + if (DECL_FIELD_IS_BASE (memb1)) + { + memb1 = TYPE_FIELDS (TREE_TYPE (memb1)); + continue; + } + break; + } + while (memb2) + { + if (TREE_CODE (memb2) != FIELD_DECL + || (DECL_FIELD_IS_BASE (memb2) && is_empty_field (memb2))) + { + memb2 = DECL_CHAIN (memb2); + continue; + } + if (DECL_FIELD_IS_BASE (memb2)) + { + memb2 = TYPE_FIELDS (TREE_TYPE (memb2)); + continue; + } + break; + } + if (memb1 == NULL_TREE && memb2 == NULL_TREE) + return true; + if (memb1 == NULL_TREE || memb2 == NULL_TREE) + return false; + if (DECL_BIT_FIELD_TYPE (memb1)) + { + if (!DECL_BIT_FIELD_TYPE (memb2)) + return false; + if (!layout_compatible_type_p (DECL_BIT_FIELD_TYPE (memb1), + DECL_BIT_FIELD_TYPE (memb2))) + return false; + if (TYPE_PRECISION (TREE_TYPE (memb1)) + != TYPE_PRECISION (TREE_TYPE (memb2))) + return false; + } + else if (DECL_BIT_FIELD_TYPE (memb2)) + return false; + else if (!layout_compatible_type_p (TREE_TYPE (memb1), TREE_TYPE (memb2))) + return false; + if ((!lookup_attribute ("no_unique_address", DECL_ATTRIBUTES (memb1))) + != !lookup_attribute ("no_unique_address", DECL_ATTRIBUTES (memb2))) + return false; + if (!tree_int_cst_equal (bit_position (memb1), bit_position (memb2))) + return false; + return true; +} + +// forked from gcc/cp/semantics.cc is_corresponding_member_aggr + +/* Helper function for fold_builtin_is_corresponding_member call. + Return boolean_false_node if MEMBERTYPE1 BASETYPE1::*ARG1 and + MEMBERTYPE2 BASETYPE2::*ARG2 aren't corresponding members, + boolean_true_node if they are corresponding members, or for + non-constant ARG2 the highest member offset for corresponding + members. */ + +static tree +is_corresponding_member_aggr (location_t loc, tree basetype1, tree membertype1, + tree arg1, tree basetype2, tree membertype2, + tree arg2) +{ + tree field1 = TYPE_FIELDS (basetype1); + tree field2 = TYPE_FIELDS (basetype2); + tree ret = boolean_false_node; + while (1) + { + bool r = next_common_initial_seqence (field1, field2); + if (field1 == NULL_TREE || field2 == NULL_TREE) + break; + if (r + && same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (field1), + membertype1) + && same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (field2), + membertype2)) + { + tree pos = byte_position (field1); + if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_equal (arg1, pos)) + { + if (TREE_CODE (arg2) == INTEGER_CST) + return boolean_true_node; + return pos; + } + else if (TREE_CODE (arg1) != INTEGER_CST) + ret = pos; + } + else if (ANON_AGGR_TYPE_P (TREE_TYPE (field1)) + && ANON_AGGR_TYPE_P (TREE_TYPE (field2))) + { + if ((!lookup_attribute ("no_unique_address", + DECL_ATTRIBUTES (field1))) + != !lookup_attribute ("no_unique_address", + DECL_ATTRIBUTES (field2))) + break; + if (!tree_int_cst_equal (bit_position (field1), + bit_position (field2))) + break; + bool overlap = true; + tree pos = byte_position (field1); + if (TREE_CODE (arg1) == INTEGER_CST) + { + tree off1 = fold_convert (sizetype, arg1); + tree sz1 = TYPE_SIZE_UNIT (TREE_TYPE (field1)); + if (tree_int_cst_lt (off1, pos) + || tree_int_cst_le (size_binop (PLUS_EXPR, pos, sz1), off1)) + overlap = false; + } + if (TREE_CODE (arg2) == INTEGER_CST) + { + tree off2 = fold_convert (sizetype, arg2); + tree sz2 = TYPE_SIZE_UNIT (TREE_TYPE (field2)); + if (tree_int_cst_lt (off2, pos) + || tree_int_cst_le (size_binop (PLUS_EXPR, pos, sz2), off2)) + overlap = false; + } + if (overlap && NON_UNION_CLASS_TYPE_P (TREE_TYPE (field1)) + && NON_UNION_CLASS_TYPE_P (TREE_TYPE (field2))) + { + tree narg1 = arg1; + if (TREE_CODE (arg1) == INTEGER_CST) + narg1 + = size_binop (MINUS_EXPR, fold_convert (sizetype, arg1), pos); + tree narg2 = arg2; + if (TREE_CODE (arg2) == INTEGER_CST) + narg2 + = size_binop (MINUS_EXPR, fold_convert (sizetype, arg2), pos); + tree t1 = TREE_TYPE (field1); + tree t2 = TREE_TYPE (field2); + tree nret + = is_corresponding_member_aggr (loc, t1, membertype1, narg1, t2, + membertype2, narg2); + if (nret != boolean_false_node) + { + if (nret == boolean_true_node) + return nret; + if (TREE_CODE (arg1) == INTEGER_CST) + return size_binop (PLUS_EXPR, nret, pos); + ret = size_binop (PLUS_EXPR, nret, pos); + } + } + else if (overlap && TREE_CODE (TREE_TYPE (field1)) == UNION_TYPE + && TREE_CODE (TREE_TYPE (field2)) == UNION_TYPE) + { + tree narg1 = arg1; + if (TREE_CODE (arg1) == INTEGER_CST) + narg1 + = size_binop (MINUS_EXPR, fold_convert (sizetype, arg1), pos); + tree narg2 = arg2; + if (TREE_CODE (arg2) == INTEGER_CST) + narg2 + = size_binop (MINUS_EXPR, fold_convert (sizetype, arg2), pos); + if (is_corresponding_member_union (TREE_TYPE (field1), + membertype1, narg1) + && is_corresponding_member_union (TREE_TYPE (field2), + membertype2, narg2)) + { + sorry_at (loc, "%<__builtin_is_corresponding_member%> " + "not well defined for anonymous unions"); + return boolean_false_node; + } + } + } + if (!r) + break; + field1 = DECL_CHAIN (field1); + field2 = DECL_CHAIN (field2); + } + return ret; +} + +// forked from gcc/cp/call.cc null_member_pointer_value_p + +/* Returns true iff T is a null member pointer value (4.11). */ + +bool +null_member_pointer_value_p (tree t) +{ + tree type = TREE_TYPE (t); + if (!type) + return false; + else if (TYPE_PTRMEMFUNC_P (type)) + return (TREE_CODE (t) == CONSTRUCTOR && CONSTRUCTOR_NELTS (t) + && integer_zerop (CONSTRUCTOR_ELT (t, 0)->value)); + else if (TYPE_PTRDATAMEM_P (type)) + return integer_all_onesp (t); + else + return false; +} + +// forked from gcc/cp/semantics.cc fold_builtin_is_corresponding_member + +/* Fold __builtin_is_corresponding_member call. */ + +tree +fold_builtin_is_corresponding_member (location_t loc, int nargs, tree *args) +{ + /* Unless users call the builtin directly, the following 3 checks should be + ensured from std::is_corresponding_member function template. */ + if (nargs != 2) + { + error_at (loc, "%<__builtin_is_corresponding_member%> " + "needs two arguments"); + return boolean_false_node; + } + tree arg1 = args[0]; + tree arg2 = args[1]; + if (error_operand_p (arg1) || error_operand_p (arg2)) + return boolean_false_node; + if (!TYPE_PTRMEM_P (TREE_TYPE (arg1)) || !TYPE_PTRMEM_P (TREE_TYPE (arg2))) + { + error_at (loc, "%<__builtin_is_corresponding_member%> " + "argument is not pointer to member"); + return boolean_false_node; + } + + if (!TYPE_PTRDATAMEM_P (TREE_TYPE (arg1)) + || !TYPE_PTRDATAMEM_P (TREE_TYPE (arg2))) + return boolean_false_node; + + tree membertype1 = TREE_TYPE (TREE_TYPE (arg1)); + tree basetype1 = TYPE_OFFSET_BASETYPE (TREE_TYPE (arg1)); + if (!complete_type_or_else (basetype1, NULL_TREE)) + return boolean_false_node; + + tree membertype2 = TREE_TYPE (TREE_TYPE (arg2)); + tree basetype2 = TYPE_OFFSET_BASETYPE (TREE_TYPE (arg2)); + if (!complete_type_or_else (basetype2, NULL_TREE)) + return boolean_false_node; + + if (!NON_UNION_CLASS_TYPE_P (basetype1) || !NON_UNION_CLASS_TYPE_P (basetype2) + || !std_layout_type_p (basetype1) || !std_layout_type_p (basetype2)) + return boolean_false_node; + + /* If the member types aren't layout compatible, then they + can't be corresponding members. */ + if (!layout_compatible_type_p (membertype1, membertype2)) + return boolean_false_node; + + if (null_member_pointer_value_p (arg1) || null_member_pointer_value_p (arg2)) + return boolean_false_node; + + if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST + && !tree_int_cst_equal (arg1, arg2)) + return boolean_false_node; + + if (TREE_CODE (arg2) == INTEGER_CST && TREE_CODE (arg1) != INTEGER_CST) + { + std::swap (arg1, arg2); + std::swap (membertype1, membertype2); + std::swap (basetype1, basetype2); + } + + tree ret = is_corresponding_member_aggr (loc, basetype1, membertype1, arg1, + basetype2, membertype2, arg2); + if (TREE_TYPE (ret) == boolean_type_node) + return ret; + /* If both arg1 and arg2 are INTEGER_CSTs, is_corresponding_member_aggr + already returns boolean_{true,false}_node whether those particular + members are corresponding members or not. Otherwise, if only + one of them is INTEGER_CST (canonicalized to first being INTEGER_CST + above), it returns boolean_false_node if it is certainly not a + corresponding member and otherwise we need to do a runtime check that + those two OFFSET_TYPE offsets are equal. + If neither of the operands is INTEGER_CST, is_corresponding_member_aggr + returns the largest offset at which the members would be corresponding + members, so perform arg1 <= ret && arg1 == arg2 runtime check. */ + gcc_assert (TREE_CODE (arg2) != INTEGER_CST); + if (TREE_CODE (arg1) == INTEGER_CST) + return fold_build2 (EQ_EXPR, boolean_type_node, arg1, + fold_convert (TREE_TYPE (arg1), arg2)); + ret = fold_build2 (LE_EXPR, boolean_type_node, + fold_convert (pointer_sized_int_node, arg1), + fold_convert (pointer_sized_int_node, ret)); + return fold_build2 (TRUTH_AND_EXPR, boolean_type_node, ret, + fold_build2 (EQ_EXPR, boolean_type_node, arg1, + fold_convert (TREE_TYPE (arg1), arg2))); +} + +// forked from gcc/cp/tree.cc lvalue_type + +/* The type of ARG when used as an lvalue. */ + +tree +lvalue_type (tree arg) +{ + tree type = TREE_TYPE (arg); + return type; +} + +// forked from gcc/c-family/c-warn.cc lvalue_error + +/* Print an error message for an invalid lvalue. USE says + how the lvalue is being used and so selects the error message. LOC + is the location for the error. */ + +void +lvalue_error (location_t loc, enum lvalue_use use) +{ + switch (use) + { + case lv_assign: + error_at (loc, "lvalue required as left operand of assignment"); + break; + case lv_increment: + error_at (loc, "lvalue required as increment operand"); + break; + case lv_decrement: + error_at (loc, "lvalue required as decrement operand"); + break; + case lv_addressof: + error_at (loc, "lvalue required as unary %<&%> operand"); + break; + case lv_asm: + error_at (loc, "lvalue required in %<asm%> statement"); + break; + default: + gcc_unreachable (); + } +} + +// forked from gcc/cp/cp--gimplify.cc cp_fold_maybe_rvalue + +/* Fold expression X which is used as an rvalue if RVAL is true. */ + +tree +cp_fold_maybe_rvalue (tree x, bool rval) +{ + while (true) + { + x = fold (x); + if (rval) + x = mark_rvalue_use (x); + if (rval && DECL_P (x) && !TYPE_REF_P (TREE_TYPE (x))) + { + tree v = decl_constant_value (x); + if (v != x && v != error_mark_node) + { + x = v; + continue; + } + } + break; + } + return x; +} + +// forked from gcc/cp/cp--gimplify.cc cp_fold_rvalue + +/* Fold expression X which is used as an rvalue. */ + +tree +cp_fold_rvalue (tree x) +{ + return cp_fold_maybe_rvalue (x, true); +} + +/* Returns true iff class T has a constexpr destructor or has an + implicitly declared destructor that we can't tell if it's constexpr + without forcing a lazy declaration (which might cause undesired + instantiations). */ + +static bool +type_maybe_constexpr_destructor (tree t) +{ + /* Until C++20, only trivial destruction is constexpr. */ + if (TYPE_HAS_TRIVIAL_DESTRUCTOR (t)) + return true; + + if (CLASS_TYPE_P (t) && CLASSTYPE_LAZY_DESTRUCTOR (t)) + /* Assume it's constexpr. */ + return true; + tree fn = CLASSTYPE_DESTRUCTOR (t); + return (fn && Compile::maybe_constexpr_fn (fn)); +} + +/* T is a non-literal type used in a context which requires a constant + expression. Explain why it isn't literal. */ + +void +explain_non_literal_class (tree t) +{ + static hash_set<tree> *diagnosed; + + if (!CLASS_TYPE_P (t)) + return; + t = TYPE_MAIN_VARIANT (t); + + if (diagnosed == NULL) + diagnosed = new hash_set<tree>; + if (diagnosed->add (t)) + /* Already explained. */ + return; + + auto_diagnostic_group d; + inform (UNKNOWN_LOCATION, "%q+T is not literal because:", t); + if (LAMBDA_TYPE_P (t)) + inform (UNKNOWN_LOCATION, + " %qT is a closure type, which is only literal in " + "C++17 and later", + t); + else if (TYPE_HAS_NONTRIVIAL_DESTRUCTOR (t) + && !type_maybe_constexpr_destructor (t)) + inform (UNKNOWN_LOCATION, " %q+T does not have %<constexpr%> destructor", + t); + else if (TYPE_HAS_NONTRIVIAL_DESTRUCTOR (t)) + inform (UNKNOWN_LOCATION, " %q+T has a non-trivial destructor", t); + else if (CLASSTYPE_NON_AGGREGATE (t) && !TYPE_HAS_TRIVIAL_DFLT (t) + && !LAMBDA_TYPE_P (t) && !TYPE_HAS_CONSTEXPR_CTOR (t)) + { + inform (UNKNOWN_LOCATION, + " %q+T is not an aggregate, does not have a trivial " + "default constructor, and has no %<constexpr%> constructor that " + "is not a copy or move constructor", + t); + if (type_has_non_user_provided_default_constructor (t)) + /* Note that we can't simply call locate_ctor because when the + constructor is deleted it just returns NULL_TREE. */ + for (ovl_iterator iter (CLASSTYPE_CONSTRUCTORS (t)); iter; ++iter) + { + tree fn = *iter; + tree parms = TYPE_ARG_TYPES (TREE_TYPE (fn)); + + parms = skip_artificial_parms_for (fn, parms); + + if (sufficient_parms_p (parms)) + { + Compile::explain_invalid_constexpr_fn (fn); + break; + } + } + } + else + { + tree binfo, base_binfo, field; + int i; + for (binfo = TYPE_BINFO (t), i = 0; + BINFO_BASE_ITERATE (binfo, i, base_binfo); i++) + { + tree basetype = TREE_TYPE (base_binfo); + if (!CLASSTYPE_LITERAL_P (basetype)) + { + inform (UNKNOWN_LOCATION, + " base class %qT of %q+T is non-literal", basetype, t); + explain_non_literal_class (basetype); + return; + } + } + for (field = TYPE_FIELDS (t); field; field = TREE_CHAIN (field)) + { + tree ftype; + if (TREE_CODE (field) != FIELD_DECL) + continue; + ftype = TREE_TYPE (field); + if (!Compile::literal_type_p (ftype)) + { + inform (DECL_SOURCE_LOCATION (field), + " non-static data member %qD has non-literal type", + field); + if (CLASS_TYPE_P (ftype)) + explain_non_literal_class (ftype); + } + if (RS_TYPE_VOLATILE_P (ftype)) + inform (DECL_SOURCE_LOCATION (field), + " non-static data member %qD has volatile type", field); + } + } +} + +// forked from gcc/cp/call.cc reference_related_p + +/* Returns nonzero if T1 is reference-related to T2. */ + +bool +reference_related_p (tree t1, tree t2) +{ + if (t1 == error_mark_node || t2 == error_mark_node) + return false; + + t1 = TYPE_MAIN_VARIANT (t1); + t2 = TYPE_MAIN_VARIANT (t2); + + /* [dcl.init.ref] + + Given types "cv1 T1" and "cv2 T2," "cv1 T1" is reference-related + to "cv2 T2" if T1 is similar to T2, or T1 is a base class of T2. */ + return (similar_type_p (t1, t2) + /*|| (CLASS_TYPE_P (t1) && CLASS_TYPE_P (t2) + && DERIVED_FROM_P (t1, t2))*/); +} + +// forked from gcc/cp/typeck2.cc ordinary_char_type_p + +/* True iff TYPE is a C++20 "ordinary" character type. */ + +bool +ordinary_char_type_p (tree type) +{ + type = TYPE_MAIN_VARIANT (type); + return (type == char_type_node || type == signed_char_type_node + || type == unsigned_char_type_node); +} + +// forked from gcc/cp/typeck2.cc array_string_literal_compatible_p + +/* True iff the string literal INIT has a type suitable for initializing array + TYPE. */ + +bool +array_string_literal_compatible_p (tree type, tree init) +{ + tree to_char_type = TYPE_MAIN_VARIANT (TREE_TYPE (type)); + tree from_char_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (init))); + + if (to_char_type == from_char_type) + return true; + /* The array element type does not match the initializing string + literal element type; this is only allowed when both types are + ordinary character type. There are no string literals of + signed or unsigned char type in the language, but we can get + them internally from converting braced-init-lists to + STRING_CST. */ + if (ordinary_char_type_p (to_char_type) + && ordinary_char_type_p (from_char_type)) + return true; + return false; +} + } // namespace Rust diff --git a/gcc/rust/backend/rust-tree.h b/gcc/rust/backend/rust-tree.h index f331112..41dd012b 100644 --- a/gcc/rust/backend/rust-tree.h +++ b/gcc/rust/backend/rust-tree.h @@ -22,6 +22,8 @@ #include "rust-system.h" #include "coretypes.h" #include "tree.h" +#include "cpplib.h" +#include "splay-tree.h" /* Returns true if NODE is a pointer. */ #define TYPE_PTR_P(NODE) (TREE_CODE (NODE) == POINTER_TYPE) @@ -156,6 +158,1454 @@ #define VAR_OR_FUNCTION_DECL_CHECK(NODE) \ TREE_CHECK2 (NODE, VAR_DECL, FUNCTION_DECL) +// forked from gcc/cp/c-common.h c_tree_index + +/* Standard named or nameless data types of the C compiler. */ + +enum c_tree_index +{ + CTI_CHAR8_TYPE, + CTI_CHAR16_TYPE, + CTI_CHAR32_TYPE, + CTI_WCHAR_TYPE, + CTI_UNDERLYING_WCHAR_TYPE, + CTI_WINT_TYPE, + CTI_SIGNED_SIZE_TYPE, /* For format checking only. */ + CTI_UNSIGNED_PTRDIFF_TYPE, /* For format checking only. */ + CTI_INTMAX_TYPE, + CTI_UINTMAX_TYPE, + CTI_WIDEST_INT_LIT_TYPE, + CTI_WIDEST_UINT_LIT_TYPE, + + /* Types for <stdint.h>, that may not be defined on all + targets. */ + CTI_SIG_ATOMIC_TYPE, + CTI_INT8_TYPE, + CTI_INT16_TYPE, + CTI_INT32_TYPE, + CTI_INT64_TYPE, + CTI_UINT8_TYPE, + CTI_UINT16_TYPE, + CTI_UINT32_TYPE, + CTI_UINT64_TYPE, + CTI_INT_LEAST8_TYPE, + CTI_INT_LEAST16_TYPE, + CTI_INT_LEAST32_TYPE, + CTI_INT_LEAST64_TYPE, + CTI_UINT_LEAST8_TYPE, + CTI_UINT_LEAST16_TYPE, + CTI_UINT_LEAST32_TYPE, + CTI_UINT_LEAST64_TYPE, + CTI_INT_FAST8_TYPE, + CTI_INT_FAST16_TYPE, + CTI_INT_FAST32_TYPE, + CTI_INT_FAST64_TYPE, + CTI_UINT_FAST8_TYPE, + CTI_UINT_FAST16_TYPE, + CTI_UINT_FAST32_TYPE, + CTI_UINT_FAST64_TYPE, + CTI_INTPTR_TYPE, + CTI_UINTPTR_TYPE, + + CTI_CHAR_ARRAY_TYPE, + CTI_CHAR8_ARRAY_TYPE, + CTI_CHAR16_ARRAY_TYPE, + CTI_CHAR32_ARRAY_TYPE, + CTI_WCHAR_ARRAY_TYPE, + CTI_STRING_TYPE, + CTI_CONST_STRING_TYPE, + + /* Type for boolean expressions (bool in C++, int in C). */ + CTI_TRUTHVALUE_TYPE, + CTI_TRUTHVALUE_TRUE, + CTI_TRUTHVALUE_FALSE, + + CTI_DEFAULT_FUNCTION_TYPE, + + CTI_NULL, + + /* These are not types, but we have to look them up all the time. */ + CTI_FUNCTION_NAME_DECL, + CTI_PRETTY_FUNCTION_NAME_DECL, + CTI_C99_FUNCTION_NAME_DECL, + + CTI_MODULE_HWM, + /* Below here entities change during compilation. */ + + CTI_SAVED_FUNCTION_NAME_DECLS, + + CTI_MAX +}; + +// forked from gcc/c-family/c-common.h c_tree_index + +extern GTY (()) tree c_global_trees[CTI_MAX]; + +// forked from gcc/cp/cp-tree.h cp_tree_index + +enum cp_tree_index +{ + CPTI_WCHAR_DECL, + CPTI_VTABLE_ENTRY_TYPE, + CPTI_DELTA_TYPE, + CPTI_VTABLE_INDEX_TYPE, + CPTI_CLEANUP_TYPE, + CPTI_VTT_PARM_TYPE, + + CPTI_CLASS_TYPE, + CPTI_UNKNOWN_TYPE, + CPTI_INIT_LIST_TYPE, + CPTI_EXPLICIT_VOID_LIST, + CPTI_VTBL_TYPE, + CPTI_VTBL_PTR_TYPE, + CPTI_GLOBAL, + CPTI_ABORT_FNDECL, + CPTI_AGGR_TAG, + CPTI_CONV_OP_MARKER, + + CPTI_CTOR_IDENTIFIER, + CPTI_COMPLETE_CTOR_IDENTIFIER, + CPTI_BASE_CTOR_IDENTIFIER, + CPTI_DTOR_IDENTIFIER, + CPTI_COMPLETE_DTOR_IDENTIFIER, + CPTI_BASE_DTOR_IDENTIFIER, + CPTI_DELETING_DTOR_IDENTIFIER, + CPTI_CONV_OP_IDENTIFIER, + CPTI_DELTA_IDENTIFIER, + CPTI_IN_CHARGE_IDENTIFIER, + CPTI_VTT_PARM_IDENTIFIER, + CPTI_AS_BASE_IDENTIFIER, + CPTI_THIS_IDENTIFIER, + CPTI_PFN_IDENTIFIER, + CPTI_VPTR_IDENTIFIER, + CPTI_GLOBAL_IDENTIFIER, + CPTI_ANON_IDENTIFIER, + CPTI_AUTO_IDENTIFIER, + CPTI_DECLTYPE_AUTO_IDENTIFIER, + CPTI_INIT_LIST_IDENTIFIER, + CPTI_FOR_RANGE__IDENTIFIER, + CPTI_FOR_BEGIN__IDENTIFIER, + CPTI_FOR_END__IDENTIFIER, + CPTI_FOR_RANGE_IDENTIFIER, + CPTI_FOR_BEGIN_IDENTIFIER, + CPTI_FOR_END_IDENTIFIER, + CPTI_ABI_TAG_IDENTIFIER, + CPTI_ALIGNED_IDENTIFIER, + CPTI_BEGIN_IDENTIFIER, + CPTI_END_IDENTIFIER, + CPTI_GET_IDENTIFIER, + CPTI_GNU_IDENTIFIER, + CPTI_TUPLE_ELEMENT_IDENTIFIER, + CPTI_TUPLE_SIZE_IDENTIFIER, + CPTI_TYPE_IDENTIFIER, + CPTI_VALUE_IDENTIFIER, + CPTI_FUN_IDENTIFIER, + CPTI_CLOSURE_IDENTIFIER, + CPTI_HEAP_UNINIT_IDENTIFIER, + CPTI_HEAP_IDENTIFIER, + CPTI_HEAP_DELETED_IDENTIFIER, + CPTI_HEAP_VEC_UNINIT_IDENTIFIER, + CPTI_HEAP_VEC_IDENTIFIER, + CPTI_OMP_IDENTIFIER, + + CPTI_LANG_NAME_C, + CPTI_LANG_NAME_CPLUSPLUS, + + CPTI_EMPTY_EXCEPT_SPEC, + CPTI_NOEXCEPT_TRUE_SPEC, + CPTI_NOEXCEPT_FALSE_SPEC, + CPTI_NOEXCEPT_DEFERRED_SPEC, + + CPTI_NULLPTR, + CPTI_NULLPTR_TYPE, + + CPTI_ANY_TARG, + + CPTI_MODULE_HWM, + /* Nodes after here change during compilation, or should not be in + the module's global tree table. Such nodes must be locatable + via name lookup or type-construction, as those are the only + cross-TU matching capabilities remaining. */ + + /* We must find these via the global namespace. */ + CPTI_STD, + CPTI_ABI, + + /* These are created at init time, but the library/headers provide + definitions. */ + CPTI_ALIGN_TYPE, + CPTI_TERMINATE_FN, + CPTI_CALL_UNEXPECTED_FN, + + /* These are lazily inited. */ + CPTI_CONST_TYPE_INFO_TYPE, + CPTI_GET_EXCEPTION_PTR_FN, + CPTI_BEGIN_CATCH_FN, + CPTI_END_CATCH_FN, + CPTI_ALLOCATE_EXCEPTION_FN, + CPTI_FREE_EXCEPTION_FN, + CPTI_THROW_FN, + CPTI_RETHROW_FN, + CPTI_ATEXIT_FN_PTR_TYPE, + CPTI_ATEXIT, + CPTI_DSO_HANDLE, + CPTI_DCAST, + + CPTI_SOURCE_LOCATION_IMPL, + + CPTI_FALLBACK_DFLOAT32_TYPE, + CPTI_FALLBACK_DFLOAT64_TYPE, + CPTI_FALLBACK_DFLOAT128_TYPE, + + CPTI_MAX +}; + +// forked from gcc/cp/cp-tree.h cp_global_trees + +extern GTY (()) tree cp_global_trees[CPTI_MAX]; + +#define wchar_decl_node cp_global_trees[CPTI_WCHAR_DECL] +#define vtable_entry_type cp_global_trees[CPTI_VTABLE_ENTRY_TYPE] +/* The type used to represent an offset by which to adjust the `this' + pointer in pointer-to-member types. */ +#define delta_type_node cp_global_trees[CPTI_DELTA_TYPE] +/* The type used to represent an index into the vtable. */ +#define vtable_index_type cp_global_trees[CPTI_VTABLE_INDEX_TYPE] + +#define class_type_node cp_global_trees[CPTI_CLASS_TYPE] +#define unknown_type_node cp_global_trees[CPTI_UNKNOWN_TYPE] +#define init_list_type_node cp_global_trees[CPTI_INIT_LIST_TYPE] +#define explicit_void_list_node cp_global_trees[CPTI_EXPLICIT_VOID_LIST] +#define vtbl_type_node cp_global_trees[CPTI_VTBL_TYPE] +#define vtbl_ptr_type_node cp_global_trees[CPTI_VTBL_PTR_TYPE] +#define std_node cp_global_trees[CPTI_STD] +#define abi_node cp_global_trees[CPTI_ABI] +#define global_namespace cp_global_trees[CPTI_GLOBAL] +#define const_type_info_type_node cp_global_trees[CPTI_CONST_TYPE_INFO_TYPE] +#define conv_op_marker cp_global_trees[CPTI_CONV_OP_MARKER] +#define abort_fndecl cp_global_trees[CPTI_ABORT_FNDECL] +#define current_aggr cp_global_trees[CPTI_AGGR_TAG] +#define nullptr_node cp_global_trees[CPTI_NULLPTR] +#define nullptr_type_node cp_global_trees[CPTI_NULLPTR_TYPE] +/* std::align_val_t */ +#define align_type_node cp_global_trees[CPTI_ALIGN_TYPE] + +#define char8_type_node c_global_trees[CTI_CHAR8_TYPE] +#define char16_type_node c_global_trees[CTI_CHAR16_TYPE] +#define char32_type_node c_global_trees[CTI_CHAR32_TYPE] +#define wchar_type_node c_global_trees[CTI_WCHAR_TYPE] +#define underlying_wchar_type_node c_global_trees[CTI_UNDERLYING_WCHAR_TYPE] +#define wint_type_node c_global_trees[CTI_WINT_TYPE] +#define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE] +#define unsigned_ptrdiff_type_node c_global_trees[CTI_UNSIGNED_PTRDIFF_TYPE] +#define intmax_type_node c_global_trees[CTI_INTMAX_TYPE] +#define uintmax_type_node c_global_trees[CTI_UINTMAX_TYPE] +#define widest_integer_literal_type_node c_global_trees[CTI_WIDEST_INT_LIT_TYPE] +#define widest_unsigned_literal_type_node \ + c_global_trees[CTI_WIDEST_UINT_LIT_TYPE] + +#define sig_atomic_type_node c_global_trees[CTI_SIG_ATOMIC_TYPE] +#define int8_type_node c_global_trees[CTI_INT8_TYPE] +#define int16_type_node c_global_trees[CTI_INT16_TYPE] +#define int32_type_node c_global_trees[CTI_INT32_TYPE] +#define int64_type_node c_global_trees[CTI_INT64_TYPE] +#define uint8_type_node c_global_trees[CTI_UINT8_TYPE] +#define c_uint16_type_node c_global_trees[CTI_UINT16_TYPE] +#define c_uint32_type_node c_global_trees[CTI_UINT32_TYPE] +#define c_uint64_type_node c_global_trees[CTI_UINT64_TYPE] +#define int_least8_type_node c_global_trees[CTI_INT_LEAST8_TYPE] +#define int_least16_type_node c_global_trees[CTI_INT_LEAST16_TYPE] +#define int_least32_type_node c_global_trees[CTI_INT_LEAST32_TYPE] +#define int_least64_type_node c_global_trees[CTI_INT_LEAST64_TYPE] +#define uint_least8_type_node c_global_trees[CTI_UINT_LEAST8_TYPE] +#define uint_least16_type_node c_global_trees[CTI_UINT_LEAST16_TYPE] +#define uint_least32_type_node c_global_trees[CTI_UINT_LEAST32_TYPE] +#define uint_least64_type_node c_global_trees[CTI_UINT_LEAST64_TYPE] +#define int_fast8_type_node c_global_trees[CTI_INT_FAST8_TYPE] +#define int_fast16_type_node c_global_trees[CTI_INT_FAST16_TYPE] +#define int_fast32_type_node c_global_trees[CTI_INT_FAST32_TYPE] +#define int_fast64_type_node c_global_trees[CTI_INT_FAST64_TYPE] +#define uint_fast8_type_node c_global_trees[CTI_UINT_FAST8_TYPE] +#define uint_fast16_type_node c_global_trees[CTI_UINT_FAST16_TYPE] +#define uint_fast32_type_node c_global_trees[CTI_UINT_FAST32_TYPE] +#define uint_fast64_type_node c_global_trees[CTI_UINT_FAST64_TYPE] +#define intptr_type_node c_global_trees[CTI_INTPTR_TYPE] +#define uintptr_type_node c_global_trees[CTI_UINTPTR_TYPE] + +#define truthvalue_type_node c_global_trees[CTI_TRUTHVALUE_TYPE] +#define truthvalue_true_node c_global_trees[CTI_TRUTHVALUE_TRUE] +#define truthvalue_false_node c_global_trees[CTI_TRUTHVALUE_FALSE] + +#define char_array_type_node c_global_trees[CTI_CHAR_ARRAY_TYPE] +#define char8_array_type_node c_global_trees[CTI_CHAR8_ARRAY_TYPE] +#define char16_array_type_node c_global_trees[CTI_CHAR16_ARRAY_TYPE] +#define char32_array_type_node c_global_trees[CTI_CHAR32_ARRAY_TYPE] +#define wchar_array_type_node c_global_trees[CTI_WCHAR_ARRAY_TYPE] +#define string_type_node c_global_trees[CTI_STRING_TYPE] +#define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE] + +#define default_function_type c_global_trees[CTI_DEFAULT_FUNCTION_TYPE] + +#define function_name_decl_node c_global_trees[CTI_FUNCTION_NAME_DECL] +#define pretty_function_name_decl_node \ + c_global_trees[CTI_PRETTY_FUNCTION_NAME_DECL] +#define c99_function_name_decl_node c_global_trees[CTI_C99_FUNCTION_NAME_DECL] +#define saved_function_name_decls c_global_trees[CTI_SAVED_FUNCTION_NAME_DECLS] + +/* The node for C++ `__null'. */ +#define null_node c_global_trees[CTI_NULL] + +/* We cache these tree nodes so as to call get_identifier less frequently. + For identifiers for functions, including special member functions such + as ctors and assignment operators, the nodes can be used (among other + things) to iterate over their overloads defined by/for a type. For + example: + + tree ovlid = assign_op_identifier; + tree overloads = get_class_binding (type, ovlid); + for (ovl_iterator it (overloads); it; ++it) { ... } + + iterates over the set of implicitly and explicitly defined overloads + of the assignment operator for type (including the copy and move + assignment operators, whether deleted or not). */ + +/* The name of a constructor that takes an in-charge parameter to + decide whether or not to construct virtual base classes. */ +#define ctor_identifier cp_global_trees[CPTI_CTOR_IDENTIFIER] +/* The name of a constructor that constructs virtual base classes. */ +#define complete_ctor_identifier cp_global_trees[CPTI_COMPLETE_CTOR_IDENTIFIER] +/* The name of a constructor that does not construct virtual base classes. */ +#define base_ctor_identifier cp_global_trees[CPTI_BASE_CTOR_IDENTIFIER] +/* The name of a destructor that takes an in-charge parameter to + decide whether or not to destroy virtual base classes and whether + or not to delete the object. */ +#define dtor_identifier cp_global_trees[CPTI_DTOR_IDENTIFIER] +/* The name of a destructor that destroys virtual base classes. */ +#define complete_dtor_identifier cp_global_trees[CPTI_COMPLETE_DTOR_IDENTIFIER] +/* The name of a destructor that does not destroy virtual base + classes. */ +#define base_dtor_identifier cp_global_trees[CPTI_BASE_DTOR_IDENTIFIER] +/* The name of a destructor that destroys virtual base classes, and + then deletes the entire object. */ +#define deleting_dtor_identifier cp_global_trees[CPTI_DELETING_DTOR_IDENTIFIER] + +/* The name used for conversion operators -- but note that actual + conversion functions use special identifiers outside the identifier + table. */ +#define conv_op_identifier cp_global_trees[CPTI_CONV_OP_IDENTIFIER] + +#define delta_identifier cp_global_trees[CPTI_DELTA_IDENTIFIER] +#define in_charge_identifier cp_global_trees[CPTI_IN_CHARGE_IDENTIFIER] +/* The name of the parameter that contains a pointer to the VTT to use + for this subobject constructor or destructor. */ +#define vtt_parm_identifier cp_global_trees[CPTI_VTT_PARM_IDENTIFIER] +#define as_base_identifier cp_global_trees[CPTI_AS_BASE_IDENTIFIER] +#define this_identifier cp_global_trees[CPTI_THIS_IDENTIFIER] +#define pfn_identifier cp_global_trees[CPTI_PFN_IDENTIFIER] +#define vptr_identifier cp_global_trees[CPTI_VPTR_IDENTIFIER] +/* The name of the ::, std & anon namespaces. */ +#define global_identifier cp_global_trees[CPTI_GLOBAL_IDENTIFIER] +#define anon_identifier cp_global_trees[CPTI_ANON_IDENTIFIER] +/* auto and declspec(auto) identifiers. */ +#define auto_identifier cp_global_trees[CPTI_AUTO_IDENTIFIER] +#define decltype_auto_identifier cp_global_trees[CPTI_DECLTYPE_AUTO_IDENTIFIER] +#define init_list_identifier cp_global_trees[CPTI_INIT_LIST_IDENTIFIER] +#define for_range__identifier cp_global_trees[CPTI_FOR_RANGE__IDENTIFIER] +#define for_begin__identifier cp_global_trees[CPTI_FOR_BEGIN__IDENTIFIER] +#define for_end__identifier cp_global_trees[CPTI_FOR_END__IDENTIFIER] +#define for_range_identifier cp_global_trees[CPTI_FOR_RANGE_IDENTIFIER] +#define for_begin_identifier cp_global_trees[CPTI_FOR_BEGIN_IDENTIFIER] +#define for_end_identifier cp_global_trees[CPTI_FOR_END_IDENTIFIER] +#define abi_tag_identifier cp_global_trees[CPTI_ABI_TAG_IDENTIFIER] +#define aligned_identifier cp_global_trees[CPTI_ALIGNED_IDENTIFIER] +#define begin_identifier cp_global_trees[CPTI_BEGIN_IDENTIFIER] +#define end_identifier cp_global_trees[CPTI_END_IDENTIFIER] +#define get__identifier cp_global_trees[CPTI_GET_IDENTIFIER] +#define gnu_identifier cp_global_trees[CPTI_GNU_IDENTIFIER] +#define tuple_element_identifier cp_global_trees[CPTI_TUPLE_ELEMENT_IDENTIFIER] +#define tuple_size_identifier cp_global_trees[CPTI_TUPLE_SIZE_IDENTIFIER] +#define type_identifier cp_global_trees[CPTI_TYPE_IDENTIFIER] +#define value_identifier cp_global_trees[CPTI_VALUE_IDENTIFIER] +#define fun_identifier cp_global_trees[CPTI_FUN_IDENTIFIER] +#define closure_identifier cp_global_trees[CPTI_CLOSURE_IDENTIFIER] +#define heap_uninit_identifier cp_global_trees[CPTI_HEAP_UNINIT_IDENTIFIER] +#define heap_identifier cp_global_trees[CPTI_HEAP_IDENTIFIER] +#define heap_deleted_identifier cp_global_trees[CPTI_HEAP_DELETED_IDENTIFIER] +#define heap_vec_uninit_identifier \ + cp_global_trees[CPTI_HEAP_VEC_UNINIT_IDENTIFIER] +#define heap_vec_identifier cp_global_trees[CPTI_HEAP_VEC_IDENTIFIER] +#define omp_identifier cp_global_trees[CPTI_OMP_IDENTIFIER] +#define lang_name_c cp_global_trees[CPTI_LANG_NAME_C] +#define lang_name_cplusplus cp_global_trees[CPTI_LANG_NAME_CPLUSPLUS] + +/* Exception specifiers used for throw(), noexcept(true), + noexcept(false) and deferred noexcept. We rely on these being + uncloned. */ +#define empty_except_spec cp_global_trees[CPTI_EMPTY_EXCEPT_SPEC] +#define noexcept_true_spec cp_global_trees[CPTI_NOEXCEPT_TRUE_SPEC] +#define noexcept_false_spec cp_global_trees[CPTI_NOEXCEPT_FALSE_SPEC] +#define noexcept_deferred_spec cp_global_trees[CPTI_NOEXCEPT_DEFERRED_SPEC] + +/* Exception handling function declarations. */ +#define terminate_fn cp_global_trees[CPTI_TERMINATE_FN] +#define call_unexpected_fn cp_global_trees[CPTI_CALL_UNEXPECTED_FN] +#define get_exception_ptr_fn cp_global_trees[CPTI_GET_EXCEPTION_PTR_FN] +#define begin_catch_fn cp_global_trees[CPTI_BEGIN_CATCH_FN] +#define end_catch_fn cp_global_trees[CPTI_END_CATCH_FN] +#define allocate_exception_fn cp_global_trees[CPTI_ALLOCATE_EXCEPTION_FN] +#define free_exception_fn cp_global_trees[CPTI_FREE_EXCEPTION_FN] +#define throw_fn cp_global_trees[CPTI_THROW_FN] +#define rethrow_fn cp_global_trees[CPTI_RETHROW_FN] + +/* The type of the function-pointer argument to "__cxa_atexit" (or + "std::atexit", if "__cxa_atexit" is not being used). */ +#define atexit_fn_ptr_type_node cp_global_trees[CPTI_ATEXIT_FN_PTR_TYPE] + +/* A pointer to `std::atexit'. */ +#define atexit_node cp_global_trees[CPTI_ATEXIT] + +/* A pointer to `__dso_handle'. */ +#define dso_handle_node cp_global_trees[CPTI_DSO_HANDLE] + +/* The declaration of the dynamic_cast runtime. */ +#define dynamic_cast_node cp_global_trees[CPTI_DCAST] + +/* The type of a destructor. */ +#define cleanup_type cp_global_trees[CPTI_CLEANUP_TYPE] + +/* The type of the vtt parameter passed to subobject constructors and + destructors. */ +#define vtt_parm_type cp_global_trees[CPTI_VTT_PARM_TYPE] + +/* A node which matches any template argument. */ +#define any_targ_node cp_global_trees[CPTI_ANY_TARG] + +/* std::source_location::__impl class. */ +#define source_location_impl cp_global_trees[CPTI_SOURCE_LOCATION_IMPL] + +/* These two accessors should only be used by OVL manipulators. + Other users should use iterators and convenience functions. */ +#define OVL_FUNCTION(NODE) \ + (((struct tree_overload *) OVERLOAD_CHECK (NODE))->function) +#define OVL_CHAIN(NODE) \ + (((struct tree_overload *) OVERLOAD_CHECK (NODE))->common.chain) + +/* If set, this or a subsequent overload contains decls that need deduping. */ +#define OVL_DEDUP_P(NODE) TREE_LANG_FLAG_0 (OVERLOAD_CHECK (NODE)) +/* If set, this was imported in a using declaration. */ +#define OVL_USING_P(NODE) TREE_LANG_FLAG_1 (OVERLOAD_CHECK (NODE)) +/* If set, this overload is a hidden decl. */ +#define OVL_HIDDEN_P(NODE) TREE_LANG_FLAG_2 (OVERLOAD_CHECK (NODE)) +/* If set, this overload contains a nested overload. */ +#define OVL_NESTED_P(NODE) TREE_LANG_FLAG_3 (OVERLOAD_CHECK (NODE)) +/* If set, this overload was constructed during lookup. */ +#define OVL_LOOKUP_P(NODE) TREE_LANG_FLAG_4 (OVERLOAD_CHECK (NODE)) +/* If set, this OVL_USING_P overload is exported. */ +#define OVL_EXPORT_P(NODE) TREE_LANG_FLAG_5 (OVERLOAD_CHECK (NODE)) + +/* The first decl of an overload. */ +#define OVL_FIRST(NODE) ovl_first (NODE) +/* The name of the overload set. */ +#define OVL_NAME(NODE) DECL_NAME (OVL_FIRST (NODE)) + +/* Whether this is a set of overloaded functions. TEMPLATE_DECLS are + always wrapped in an OVERLOAD, so we don't need to check them + here. */ +#define OVL_P(NODE) \ + (TREE_CODE (NODE) == FUNCTION_DECL || TREE_CODE (NODE) == OVERLOAD) +/* Whether this is a single member overload. */ +#define OVL_SINGLE_P(NODE) (TREE_CODE (NODE) != OVERLOAD || !OVL_CHAIN (NODE)) + +/* Nonzero means that this type has an X() constructor. */ +#define TYPE_HAS_DEFAULT_CONSTRUCTOR(NODE) \ + (LANG_TYPE_CLASS_CHECK (NODE)->has_default_ctor) + +/* Nonzero means that NODE (a class type) has a default constructor -- + but that it has not yet been declared. */ +#define CLASSTYPE_LAZY_DEFAULT_CTOR(NODE) \ + (LANG_TYPE_CLASS_CHECK (NODE)->lazy_default_ctor) + +/* A FUNCTION_DECL or OVERLOAD for the constructors for NODE. These + are the constructors that take an in-charge parameter. */ +#define CLASSTYPE_CONSTRUCTORS(NODE) \ + (get_class_binding_direct (NODE, ctor_identifier)) + +/* In a TREE_LIST in an attribute list, indicates that the attribute + must be applied at instantiation time. */ +#define ATTR_IS_DEPENDENT(NODE) TREE_LANG_FLAG_0 (TREE_LIST_CHECK (NODE)) + +/* In a TREE_LIST in the argument of attribute abi_tag, indicates that the tag + was inherited from a template parameter, not explicitly indicated. */ +#define ABI_TAG_IMPLICIT(NODE) TREE_LANG_FLAG_0 (TREE_LIST_CHECK (NODE)) + +/* In a TREE_LIST for a parameter-declaration-list, indicates that all the + parameters in the list have declarators enclosed in (). */ +#define PARENTHESIZED_LIST_P(NODE) TREE_LANG_FLAG_0 (TREE_LIST_CHECK (NODE)) + +/* Non zero if this is a using decl for a dependent scope. */ +#define DECL_DEPENDENT_P(NODE) DECL_LANG_FLAG_0 (USING_DECL_CHECK (NODE)) + +/* The scope named in a using decl. */ +#define USING_DECL_SCOPE(NODE) DECL_RESULT_FLD (USING_DECL_CHECK (NODE)) + +/* The decls named by a using decl. */ +#define USING_DECL_DECLS(NODE) DECL_INITIAL (USING_DECL_CHECK (NODE)) + +/* Non zero if the using decl refers to a dependent type. */ +#define USING_DECL_TYPENAME_P(NODE) DECL_LANG_FLAG_1 (USING_DECL_CHECK (NODE)) + +/* True if member using decl NODE refers to a non-inherited NODE. */ +#define USING_DECL_UNRELATED_P(NODE) DECL_LANG_FLAG_2 (USING_DECL_CHECK (NODE)) + +/* Nonzero if NODE declares a function. */ +#define DECL_DECLARES_FUNCTION_P(NODE) (TREE_CODE (NODE) == FUNCTION_DECL) + +/* Nonzero for a NODE which declares a type. */ +#define DECL_DECLARES_TYPE_P(NODE) (TREE_CODE (NODE) == TYPE_DECL) + +/* Kind bits. */ +#define IDENTIFIER_KIND_BIT_0(NODE) \ + TREE_LANG_FLAG_0 (IDENTIFIER_NODE_CHECK (NODE)) +#define IDENTIFIER_KIND_BIT_1(NODE) \ + TREE_LANG_FLAG_1 (IDENTIFIER_NODE_CHECK (NODE)) +#define IDENTIFIER_KIND_BIT_2(NODE) \ + TREE_LANG_FLAG_2 (IDENTIFIER_NODE_CHECK (NODE)) + +/* Used by various search routines. */ +#define IDENTIFIER_MARKED(NODE) TREE_LANG_FLAG_4 (IDENTIFIER_NODE_CHECK (NODE)) + +/* Nonzero if this identifier is used as a virtual function name somewhere + (optimizes searches). */ +#define IDENTIFIER_VIRTUAL_P(NODE) \ + TREE_LANG_FLAG_5 (IDENTIFIER_NODE_CHECK (NODE)) + +/* True if this identifier is a reserved word. C_RID_CODE (node) is + then the RID_* value of the keyword. Value 1. */ +#define IDENTIFIER_KEYWORD_P(NODE) \ + ((!IDENTIFIER_KIND_BIT_2 (NODE)) & (!IDENTIFIER_KIND_BIT_1 (NODE)) \ + & IDENTIFIER_KIND_BIT_0 (NODE)) + +/* True if this identifier is the name of a constructor or + destructor. Value 2 or 3. */ +#define IDENTIFIER_CDTOR_P(NODE) \ + ((!IDENTIFIER_KIND_BIT_2 (NODE)) & IDENTIFIER_KIND_BIT_1 (NODE)) + +/* True if this identifier is the name of a constructor. Value 2. */ +#define IDENTIFIER_CTOR_P(NODE) \ + (IDENTIFIER_CDTOR_P (NODE) & (!IDENTIFIER_KIND_BIT_0 (NODE))) + +/* True if this identifier is the name of a destructor. Value 3. */ +#define IDENTIFIER_DTOR_P(NODE) \ + (IDENTIFIER_CDTOR_P (NODE) & IDENTIFIER_KIND_BIT_0 (NODE)) + +/* True if this identifier is for any operator name (including + conversions). Value 4, 5, 6 or 7. */ +#define IDENTIFIER_ANY_OP_P(NODE) (IDENTIFIER_KIND_BIT_2 (NODE)) + +/* True if this identifier is for an overloaded operator. Values 4, 5. */ +#define IDENTIFIER_OVL_OP_P(NODE) \ + (IDENTIFIER_ANY_OP_P (NODE) & (!IDENTIFIER_KIND_BIT_1 (NODE))) + +/* True if this identifier is for any assignment. Values 5. */ +#define IDENTIFIER_ASSIGN_OP_P(NODE) \ + (IDENTIFIER_OVL_OP_P (NODE) & IDENTIFIER_KIND_BIT_0 (NODE)) + +/* True if this identifier is the name of a type-conversion + operator. Value 7. */ +#define IDENTIFIER_CONV_OP_P(NODE) \ + (IDENTIFIER_ANY_OP_P (NODE) & IDENTIFIER_KIND_BIT_1 (NODE) \ + & (!IDENTIFIER_KIND_BIT_0 (NODE))) + +/* True if this identifier is a new or delete operator. */ +#define IDENTIFIER_NEWDEL_OP_P(NODE) \ + (IDENTIFIER_OVL_OP_P (NODE) \ + && IDENTIFIER_OVL_OP_FLAGS (NODE) & OVL_OP_FLAG_ALLOC) + +/* True if this identifier is a new operator. */ +#define IDENTIFIER_NEW_OP_P(NODE) \ + (IDENTIFIER_OVL_OP_P (NODE) \ + && (IDENTIFIER_OVL_OP_FLAGS (NODE) \ + & (OVL_OP_FLAG_ALLOC | OVL_OP_FLAG_DELETE)) \ + == OVL_OP_FLAG_ALLOC) + +/* Nonzero if the class NODE has multiple paths to the same (virtual) + base object. */ +#define CLASSTYPE_DIAMOND_SHAPED_P(NODE) \ + (LANG_TYPE_CLASS_CHECK (NODE)->diamond_shaped) + +/* Nonzero if the class NODE has multiple instances of the same base + type. */ +#define CLASSTYPE_REPEATED_BASE_P(NODE) \ + (LANG_TYPE_CLASS_CHECK (NODE)->repeated_base) + +/* The member function with which the vtable will be emitted: + the first noninline non-pure-virtual member function. NULL_TREE + if there is no key function or if this is a class template */ +#define CLASSTYPE_KEY_METHOD(NODE) (LANG_TYPE_CLASS_CHECK (NODE)->key_method) + +/* Vector of members. During definition, it is unordered and only + member functions are present. After completion it is sorted and + contains both member functions and non-functions. STAT_HACK is + involved to preserve oneslot per name invariant. */ +#define CLASSTYPE_MEMBER_VEC(NODE) (LANG_TYPE_CLASS_CHECK (NODE)->members) + +/* For class templates, this is a TREE_LIST of all member data, + functions, types, and friends in the order of declaration. + The TREE_PURPOSE of each TREE_LIST is NULL_TREE for a friend, + and the RECORD_TYPE for the class template otherwise. */ +#define CLASSTYPE_DECL_LIST(NODE) (LANG_TYPE_CLASS_CHECK (NODE)->decl_list) + +/* A FUNCTION_DECL or OVERLOAD for the constructors for NODE. These + are the constructors that take an in-charge parameter. */ +#define CLASSTYPE_CONSTRUCTORS(NODE) \ + (get_class_binding_direct (NODE, ctor_identifier)) + +/* A FUNCTION_DECL for the destructor for NODE. This is the + destructors that take an in-charge parameter. If + CLASSTYPE_LAZY_DESTRUCTOR is true, then this entry will be NULL + until the destructor is created with lazily_declare_fn. */ +#define CLASSTYPE_DESTRUCTOR(NODE) \ + (get_class_binding_direct (NODE, dtor_identifier)) + +/* Nonzero if NODE has a primary base class, i.e., a base class with + which it shares the virtual function table pointer. */ +#define CLASSTYPE_HAS_PRIMARY_BASE_P(NODE) \ + (CLASSTYPE_PRIMARY_BINFO (NODE) != NULL_TREE) + +/* If non-NULL, this is the binfo for the primary base class, i.e., + the base class which contains the virtual function table pointer + for this class. */ +#define CLASSTYPE_PRIMARY_BINFO(NODE) \ + (LANG_TYPE_CLASS_CHECK (NODE)->primary_base) + +/* A vector of BINFOs for the direct and indirect virtual base classes + that this type uses in a post-order depth-first left-to-right + order. (In other words, these bases appear in the order that they + should be initialized.) */ +#define CLASSTYPE_VBASECLASSES(NODE) (LANG_TYPE_CLASS_CHECK (NODE)->vbases) + +/* The type corresponding to NODE when NODE is used as a base class, + i.e., NODE without virtual base classes or tail padding. */ +#define CLASSTYPE_AS_BASE(NODE) (LANG_TYPE_CLASS_CHECK (NODE)->as_base) + +/* Nonzero if NODE is a user-defined conversion operator. */ +#define DECL_CONV_FN_P(NODE) IDENTIFIER_CONV_OP_P (DECL_NAME (NODE)) + +/* The type to which conversion operator FN converts to. */ +#define DECL_CONV_FN_TYPE(FN) \ + TREE_TYPE ((gcc_checking_assert (DECL_CONV_FN_P (FN)), DECL_NAME (FN))) + +/* Returns nonzero iff TYPE1 and TYPE2 are the same type, in the usual + sense of `same'. */ +#define same_type_p(TYPE1, TYPE2) comptypes ((TYPE1), (TYPE2), COMPARE_STRICT) + +/* Nonzero if T is a type that could resolve to any kind of concrete type + at instantiation time. */ +#define WILDCARD_TYPE_P(T) \ + (TREE_CODE (T) == TEMPLATE_TYPE_PARM || TREE_CODE (T) == TYPENAME_TYPE \ + || TREE_CODE (T) == TYPEOF_TYPE \ + || TREE_CODE (T) == BOUND_TEMPLATE_TEMPLATE_PARM \ + || TREE_CODE (T) == DECLTYPE_TYPE \ + || TREE_CODE (T) == DEPENDENT_OPERATOR_TYPE) + +/* Nonzero if T is a class (or struct or union) type. Also nonzero + for template type parameters, typename types, and instantiated + template template parameters. Keep these checks in ascending code + order. */ +#define MAYBE_CLASS_TYPE_P(T) (WILDCARD_TYPE_P (T) || CLASS_TYPE_P (T)) + +/* 1 iff FUNCTION_TYPE or METHOD_TYPE has a ref-qualifier (either & or &&). */ +#define FUNCTION_REF_QUALIFIED(NODE) \ + TREE_LANG_FLAG_4 (FUNC_OR_METHOD_CHECK (NODE)) + +/* 1 iff FUNCTION_TYPE or METHOD_TYPE has &&-ref-qualifier. */ +#define FUNCTION_RVALUE_QUALIFIED(NODE) \ + TREE_LANG_FLAG_5 (FUNC_OR_METHOD_CHECK (NODE)) + +/* Get the POINTER_TYPE to the METHOD_TYPE associated with this + pointer to member function. TYPE_PTRMEMFUNC_P _must_ be true, + before using this macro. */ +#define TYPE_PTRMEMFUNC_FN_TYPE(NODE) \ + (rs_build_qualified_type (TREE_TYPE (TYPE_FIELDS (NODE)), \ + rs_type_quals (NODE))) + +/* As above, but can be used in places that want an lvalue at the expense + of not necessarily having the correct cv-qualifiers. */ +#define TYPE_PTRMEMFUNC_FN_TYPE_RAW(NODE) (TREE_TYPE (TYPE_FIELDS (NODE))) + +/* True if this type is dependent. This predicate is only valid if + TYPE_DEPENDENT_P_VALID is true. */ +#define TYPE_DEPENDENT_P(NODE) TYPE_LANG_FLAG_0 (NODE) + +/* True if dependent_type_p has been called for this type, with the + result that TYPE_DEPENDENT_P is valid. */ +#define TYPE_DEPENDENT_P_VALID(NODE) TYPE_LANG_FLAG_6 (NODE) + +/* Nonzero for _TYPE node means that this type does not have a trivial + destructor. Therefore, destroying an object of this type will + involve a call to a destructor. This can apply to objects of + ARRAY_TYPE if the type of the elements needs a destructor. */ +#define TYPE_HAS_NONTRIVIAL_DESTRUCTOR(NODE) (TYPE_LANG_FLAG_4 (NODE)) + +/* For FUNCTION_TYPE or METHOD_TYPE, a list of the exceptions that + this type can raise. Each TREE_VALUE is a _TYPE. The TREE_VALUE + will be NULL_TREE to indicate a throw specification of `()', or + no exceptions allowed. For a noexcept specification, TREE_VALUE + is NULL_TREE and TREE_PURPOSE is the constant-expression. For + a deferred noexcept-specification, TREE_PURPOSE is a DEFERRED_NOEXCEPT + (for templates) or an OVERLOAD list of functions (for implicitly + declared functions). */ +#define TYPE_RAISES_EXCEPTIONS(NODE) \ + TYPE_LANG_SLOT_1 (FUNC_OR_METHOD_CHECK (NODE)) + +/* Identifiers map directly to block or class-scope bindings. + Namespace-scope bindings are held in hash tables on the respective + namespaces. The identifier bindings are the innermost active + binding, from whence you can get the decl and/or implicit-typedef + of an elaborated type. When not bound to a local entity the + values are NULL. */ +#define IDENTIFIER_BINDING(NODE) (LANG_IDENTIFIER_CAST (NODE)->bindings) + +#define LANG_IDENTIFIER_CAST(NODE) \ + ((struct lang_identifier *) IDENTIFIER_NODE_CHECK (NODE)) + +/* IF_STMT accessors. These give access to the condition of the if + statement, the then block of the if statement, and the else block + of the if statement if it exists. */ +#define IF_COND(NODE) TREE_OPERAND (IF_STMT_CHECK (NODE), 0) +#define THEN_CLAUSE(NODE) TREE_OPERAND (IF_STMT_CHECK (NODE), 1) +#define ELSE_CLAUSE(NODE) TREE_OPERAND (IF_STMT_CHECK (NODE), 2) +#define IF_SCOPE(NODE) TREE_OPERAND (IF_STMT_CHECK (NODE), 3) +#define IF_STMT_CONSTEXPR_P(NODE) TREE_LANG_FLAG_0 (IF_STMT_CHECK (NODE)) +#define IF_STMT_CONSTEVAL_P(NODE) TREE_LANG_FLAG_2 (IF_STMT_CHECK (NODE)) + +/* The expression in question for a DECLTYPE_TYPE. */ +#define DECLTYPE_TYPE_EXPR(NODE) (TYPE_VALUES_RAW (DECLTYPE_TYPE_CHECK (NODE))) + +#define SET_CLASSTYPE_INTERFACE_UNKNOWN_X(NODE, X) \ + (LANG_TYPE_CLASS_CHECK (NODE)->interface_unknown = !!(X)) + +/* Nonzero if this class is included from a header file which employs + `#pragma interface', and it is not included in its implementation file. */ +#define CLASSTYPE_INTERFACE_ONLY(NODE) \ + (LANG_TYPE_CLASS_CHECK (NODE)->interface_only) + +#define TYPE_NAME_STRING(NODE) (IDENTIFIER_POINTER (TYPE_IDENTIFIER (NODE))) +#define TYPE_NAME_LENGTH(NODE) (IDENTIFIER_LENGTH (TYPE_IDENTIFIER (NODE))) + +/* Whether a PARM_DECL represents a local parameter in a + requires-expression. */ +#define CONSTRAINT_VAR_P(NODE) DECL_LANG_FLAG_2 (TREE_CHECK (NODE, PARM_DECL)) + +/* In a CALL_EXPR appearing in a template, true if Koenig lookup + should be performed at instantiation time. */ +#define KOENIG_LOOKUP_P(NODE) TREE_LANG_FLAG_0 (CALL_EXPR_CHECK (NODE)) + +/* The index of a user-declared parameter in its function, starting at 1. + All artificial parameters will have index 0. */ +#define DECL_PARM_INDEX(NODE) (LANG_DECL_PARM_CHECK (NODE)->index) + +/* The level of a user-declared parameter in its function, starting at 1. + A parameter of the function will have level 1; a parameter of the first + nested function declarator (i.e. t in void f (void (*p)(T t))) will have + level 2. */ +#define DECL_PARM_LEVEL(NODE) (LANG_DECL_PARM_CHECK (NODE)->level) + +/* These flags are used by the conversion code. + CONV_IMPLICIT : Perform implicit conversions (standard and user-defined). + CONV_STATIC : Perform the explicit conversions for static_cast. + CONV_CONST : Perform the explicit conversions for const_cast. + CONV_REINTERPRET: Perform the explicit conversions for reinterpret_cast. + CONV_PRIVATE : Perform upcasts to private bases. + CONV_FORCE_TEMP : Require a new temporary when converting to the same + aggregate type. */ + +#define CONV_IMPLICIT 1 +#define CONV_STATIC 2 +#define CONV_CONST 4 +#define CONV_REINTERPRET 8 +#define CONV_PRIVATE 16 +#define CONV_FORCE_TEMP 32 +#define CONV_FOLD 64 +#define CONV_OLD_CONVERT \ + (CONV_IMPLICIT | CONV_STATIC | CONV_CONST | CONV_REINTERPRET) +#define CONV_C_CAST \ + (CONV_IMPLICIT | CONV_STATIC | CONV_CONST | CONV_REINTERPRET | CONV_PRIVATE \ + | CONV_FORCE_TEMP) +#define CONV_BACKEND_CONVERT (CONV_OLD_CONVERT | CONV_FOLD) + +/* Used by build_expr_type_conversion to indicate which types are + acceptable as arguments to the expression under consideration. */ + +#define WANT_INT 1 /* integer types, including bool */ +#define WANT_FLOAT 2 /* floating point types */ +#define WANT_ENUM 4 /* enumerated types */ +#define WANT_POINTER 8 /* pointer types */ +#define WANT_NULL 16 /* null pointer constant */ +#define WANT_VECTOR_OR_COMPLEX 32 /* vector or complex types */ +#define WANT_ARITH (WANT_INT | WANT_FLOAT | WANT_VECTOR_OR_COMPLEX) + +/* Used with comptypes, and related functions, to guide type + comparison. */ + +#define COMPARE_STRICT \ + 0 /* Just check if the types are the \ + same. */ +#define COMPARE_BASE \ + 1 /* Check to see if the second type is \ + derived from the first. */ +#define COMPARE_DERIVED \ + 2 /* Like COMPARE_BASE, but in \ + reverse. */ +#define COMPARE_REDECLARATION \ + 4 /* The comparison is being done when \ + another declaration of an existing \ + entity is seen. */ +#define COMPARE_STRUCTURAL \ + 8 /* The comparison is intended to be \ + structural. The actual comparison \ + will be identical to \ + COMPARE_STRICT. */ + +/* Used with start function. */ +#define SF_DEFAULT 0 /* No flags. */ +#define SF_PRE_PARSED \ + 1 /* The function declaration has \ + already been parsed. */ +#define SF_INCLASS_INLINE \ + 2 /* The function is an inline, defined \ + in the class body. */ + +/* Used with start_decl's initialized parameter. */ +#define SD_UNINITIALIZED 0 +#define SD_INITIALIZED 1 +/* Like SD_INITIALIZED, but also mark the new decl as DECL_DECOMPOSITION_P. */ +#define SD_DECOMPOSITION 2 +#define SD_DEFAULTED 3 +#define SD_DELETED 4 + +/* Returns nonzero iff TYPE1 and TYPE2 are the same type, in the usual + sense of `same'. */ +#define same_type_p(TYPE1, TYPE2) comptypes ((TYPE1), (TYPE2), COMPARE_STRICT) + +/* Returns true if NODE is a pointer-to-data-member. */ +#define TYPE_PTRDATAMEM_P(NODE) (TREE_CODE (NODE) == OFFSET_TYPE) + +/* Nonzero if this type is const-qualified. */ +#define RS_TYPE_CONST_P(NODE) ((rs_type_quals (NODE) & TYPE_QUAL_CONST) != 0) + +/* The _DECL for this _TYPE. */ +#define TYPE_MAIN_DECL(NODE) (TYPE_STUB_DECL (TYPE_MAIN_VARIANT (NODE))) + +/* Nonzero for a VAR_DECL iff an explicit initializer was provided + or a non-trivial constructor is called. */ +#define DECL_NONTRIVIALLY_INITIALIZED_P(NODE) \ + (TREE_LANG_FLAG_6 (VAR_DECL_CHECK (NODE))) + +/* Nonzero if DECL was declared with '= default' (maybe implicitly). */ +#define DECL_DEFAULTED_FN(DECL) (LANG_DECL_FN_CHECK (DECL)->defaulted_p) + +/* Nonzero for a class type means that the class type has a + user-declared constructor. */ +#define TYPE_HAS_USER_CONSTRUCTOR(NODE) (TYPE_LANG_FLAG_1 (NODE)) + +/* A FUNCTION_DECL or OVERLOAD for the constructors for NODE. These + are the constructors that take an in-charge parameter. */ +#define CLASSTYPE_CONSTRUCTORS(NODE) \ + (get_class_binding_direct (NODE, ctor_identifier)) + +/* Nonzero if the DECL was initialized in the class definition itself, + rather than outside the class. This is used for both static member + VAR_DECLS, and FUNCTION_DECLS that are defined in the class. */ +#define DECL_INITIALIZED_IN_CLASS_P(DECL) \ + (DECL_LANG_SPECIFIC (VAR_OR_FUNCTION_DECL_CHECK (DECL)) \ + ->u.base.initialized_in_class) + +/* Nonzero if DECL is explicitly defaulted in the class body. */ +#define DECL_DEFAULTED_IN_CLASS_P(DECL) \ + (DECL_DEFAULTED_FN (DECL) && DECL_INITIALIZED_IN_CLASS_P (DECL)) + +/* Nonzero for FUNCTION_DECL means that this decl is a non-static + member function. */ +#define DECL_NONSTATIC_MEMBER_FUNCTION_P(NODE) \ + (TREE_CODE (TREE_TYPE (NODE)) == METHOD_TYPE) + +/* For FUNCTION_DECLs: nonzero means that this function is a + constructor or a destructor with an extra in-charge parameter to + control whether or not virtual bases are constructed. */ +#define DECL_HAS_IN_CHARGE_PARM_P(NODE) \ + (LANG_DECL_FN_CHECK (NODE)->has_in_charge_parm_p) + +/* Nonzero if the VTT parm has been added to NODE. */ +#define DECL_HAS_VTT_PARM_P(NODE) (LANG_DECL_FN_CHECK (NODE)->has_vtt_parm_p) + +/* Given a FUNCTION_DECL, returns the first TREE_LIST out of TYPE_ARG_TYPES + which refers to a user-written parameter. */ +#define FUNCTION_FIRST_USER_PARMTYPE(NODE) \ + skip_artificial_parms_for ((NODE), TYPE_ARG_TYPES (TREE_TYPE (NODE))) + +/* Similarly, but for DECL_ARGUMENTS. */ +#define FUNCTION_FIRST_USER_PARM(NODE) \ + skip_artificial_parms_for ((NODE), DECL_ARGUMENTS (NODE)) + +/* For FUNCTION_DECLs and TEMPLATE_DECLs: nonzero means that this function + is a constructor. */ +#define DECL_CONSTRUCTOR_P(NODE) DECL_CXX_CONSTRUCTOR_P (NODE) + +/* Nonzero if DECL was declared with '= delete'. */ +#define DECL_DELETED_FN(DECL) \ + (LANG_DECL_FN_CHECK (DECL)->min.base.threadprivate_or_deleted_p) + +/* Nonzero if DECL was declared with '= default' (maybe implicitly). */ +#define DECL_DEFAULTED_FN(DECL) (LANG_DECL_FN_CHECK (DECL)->defaulted_p) + +/* True if NODE is a brace-enclosed initializer. */ +#define BRACE_ENCLOSED_INITIALIZER_P(NODE) \ + (TREE_CODE (NODE) == CONSTRUCTOR && TREE_TYPE (NODE) == init_list_type_node) + +/* True if FNDECL is an immediate function. */ +#define DECL_IMMEDIATE_FUNCTION_P(NODE) \ + (DECL_LANG_SPECIFIC (FUNCTION_DECL_CHECK (NODE)) \ + ? LANG_DECL_FN_CHECK (NODE)->immediate_fn_p \ + : false) +#define SET_DECL_IMMEDIATE_FUNCTION_P(NODE) \ + (retrofit_lang_decl (FUNCTION_DECL_CHECK (NODE)), \ + LANG_DECL_FN_CHECK (NODE)->immediate_fn_p = true) + +/* True if this CONSTRUCTOR should not be used as a variable initializer + because it was loaded from a constexpr variable with mutable fields. */ +#define CONSTRUCTOR_MUTABLE_POISON(NODE) \ + (TREE_LANG_FLAG_2 (CONSTRUCTOR_CHECK (NODE))) + +/* For a pointer-to-member constant `X::Y' this is the _DECL for + `Y'. */ +#define PTRMEM_CST_MEMBER(NODE) \ + (((ptrmem_cst_t) PTRMEM_CST_CHECK (NODE))->member) + +/* Indicates whether a COMPONENT_REF or a SCOPE_REF has been parenthesized, an + INDIRECT_REF comes from parenthesizing a _DECL, or a PAREN_EXPR identifies a + parenthesized initializer relevant for decltype(auto). Currently only set + some of the time in C++14 mode. */ + +#define REF_PARENTHESIZED_P(NODE) \ + TREE_LANG_FLAG_2 (TREE_CHECK5 ((NODE), COMPONENT_REF, INDIRECT_REF, \ + SCOPE_REF, VIEW_CONVERT_EXPR, PAREN_EXPR)) + +/* Returns true if NODE is a pointer-to-member. */ +#define TYPE_PTRMEM_P(NODE) \ + (TYPE_PTRDATAMEM_P (NODE) || TYPE_PTRMEMFUNC_P (NODE)) + +/* Returns true if NODE is a pointer or a pointer-to-member. */ +#define TYPE_PTR_OR_PTRMEM_P(NODE) (TYPE_PTR_P (NODE) || TYPE_PTRMEM_P (NODE)) + +/* Nonzero if NODE is an artificial VAR_DECL for a C++17 structured binding + declaration or one of VAR_DECLs for the user identifiers in it. */ +#define DECL_DECOMPOSITION_P(NODE) \ + (VAR_P (NODE) && DECL_LANG_SPECIFIC (NODE) \ + ? DECL_LANG_SPECIFIC (NODE)->u.base.selector == lds_decomp \ + : false) + +/* The underlying artificial VAR_DECL for structured binding. */ +#define DECL_DECOMP_BASE(NODE) (LANG_DECL_DECOMP_CHECK (NODE)->base) + +/* Nonzero if either DECL_MAYBE_IN_CHARGE_CONSTRUCTOR_P or + DECL_MAYBE_IN_CHARGE_DESTRUCTOR_P is true of NODE. */ +#define DECL_MAYBE_IN_CHARGE_CDTOR_P(NODE) \ + (DECL_MAYBE_IN_CHARGE_CONSTRUCTOR_P (NODE) \ + || DECL_MAYBE_IN_CHARGE_DESTRUCTOR_P (NODE)) + +/* Nonzero if NODE (a FUNCTION_DECL) is a destructor, but not the + specialized in-charge constructor, in-charge deleting constructor, + or the base destructor. */ +#define DECL_MAYBE_IN_CHARGE_DESTRUCTOR_P(NODE) \ + (DECL_NAME (NODE) == dtor_identifier) + +/* Nonzero if NODE (a _DECL) is a cloned constructor or + destructor. */ +#define DECL_CLONED_FUNCTION_P(NODE) \ + (DECL_NAME (NODE) && IDENTIFIER_CDTOR_P (DECL_NAME (NODE)) \ + && !DECL_MAYBE_IN_CHARGE_CDTOR_P (NODE)) + +/* If DECL_CLONED_FUNCTION_P holds, this is the function that was + cloned. */ +#define DECL_CLONED_FUNCTION(NODE) \ + (DECL_LANG_SPECIFIC (FUNCTION_DECL_CHECK (NODE))->u.fn.u5.cloned_function) + +/* Nonzero if NODE (a _DECL) is a cloned constructor or + destructor. */ +#define DECL_CLONED_FUNCTION_P(NODE) \ + (DECL_NAME (NODE) && IDENTIFIER_CDTOR_P (DECL_NAME (NODE)) \ + && !DECL_MAYBE_IN_CHARGE_CDTOR_P (NODE)) + +/* Nonzero if NODE (a FUNCTION_DECL) is a constructor, but not either the + specialized in-charge constructor or the specialized not-in-charge + constructor. */ +#define DECL_MAYBE_IN_CHARGE_CONSTRUCTOR_P(NODE) \ + (DECL_NAME (NODE) == ctor_identifier) + +/* The current C++-specific per-function global variables. */ + +#define cp_function_chain (cfun->language) + +/* In a constructor destructor, the point at which all derived class + destroying/construction has been done. I.e., just before a + constructor returns, or before any base class destroying will be done + in a destructor. */ + +#define cdtor_label cp_function_chain->x_cdtor_label + +/* When we're processing a member function, current_class_ptr is the + PARM_DECL for the `this' pointer. The current_class_ref is an + expression for `*this'. */ + +#define current_class_ptr \ + (*(cfun && cp_function_chain ? &cp_function_chain->x_current_class_ptr \ + : &scope_chain->x_current_class_ptr)) +#define current_class_ref \ + (*(cfun && cp_function_chain ? &cp_function_chain->x_current_class_ref \ + : &scope_chain->x_current_class_ref)) + +/* The EH_SPEC_BLOCK for the exception-specifiers for the current + function, if any. */ + +#define current_eh_spec_block cp_function_chain->x_eh_spec_block + +/* The `__in_chrg' parameter for the current function. Only used for + constructors and destructors. */ + +#define current_in_charge_parm cp_function_chain->x_in_charge_parm + +/* The `__vtt_parm' parameter for the current function. Only used for + constructors and destructors. */ + +#define current_vtt_parm cp_function_chain->x_vtt_parm + +/* A boolean flag to control whether we need to clean up the return value if a + local destructor throws. Only used in functions that return by value a + class with a destructor. Which 'tors don't, so we can use the same + field as current_vtt_parm. */ + +#define current_retval_sentinel current_vtt_parm + +/* Set to 0 at beginning of a function definition, set to 1 if + a return statement that specifies a return value is seen. */ + +#define current_function_returns_value cp_function_chain->returns_value + +/* Set to 0 at beginning of a function definition, set to 1 if + a return statement with no argument is seen. */ + +#define current_function_returns_null cp_function_chain->returns_null + +/* Set to 0 at beginning of a function definition, set to 1 if + a call to a noreturn function is seen. */ + +#define current_function_returns_abnormally \ + cp_function_chain->returns_abnormally + +/* Set to 0 at beginning of a function definition, set to 1 if we see an + obvious infinite loop. This can have false positives and false + negatives, so it should only be used as a heuristic. */ + +#define current_function_infinite_loop cp_function_chain->infinite_loop + +/* Nonzero if we are processing a base initializer. Zero elsewhere. */ +#define in_base_initializer cp_function_chain->x_in_base_initializer + +#define in_function_try_handler cp_function_chain->x_in_function_try_handler + +/* Expression always returned from function, or error_mark_node + otherwise, for use by the automatic named return value optimization. */ + +#define current_function_return_value (cp_function_chain->x_return_value) + +#define current_class_type scope_chain->class_type + +#define in_discarded_stmt scope_chain->discarded_stmt +#define in_consteval_if_p scope_chain->consteval_if_p + +/* Nonzero means that this type is being defined. I.e., the left brace + starting the definition of this type has been seen. */ +#define TYPE_BEING_DEFINED(NODE) (LANG_TYPE_CLASS_CHECK (NODE)->being_defined) + +/* Nonzero for FUNCTION_DECL means that this decl is a static + member function. */ +#define DECL_STATIC_FUNCTION_P(NODE) \ + (LANG_DECL_FN_CHECK (NODE)->static_function) + +/* Nonzero for FUNCTION_DECL means that this decl is a non-static + member function. */ +#define DECL_NONSTATIC_MEMBER_FUNCTION_P(NODE) \ + (TREE_CODE (TREE_TYPE (NODE)) == METHOD_TYPE) + +/* Nonzero for FUNCTION_DECL means that this decl is a member function + (static or non-static). */ +#define DECL_FUNCTION_MEMBER_P(NODE) \ + (DECL_NONSTATIC_MEMBER_FUNCTION_P (NODE) || DECL_STATIC_FUNCTION_P (NODE)) + +/* Nonzero if NODE is the target for genericization of 'return' stmts + in constructors/destructors of targetm.cxx.cdtor_returns_this targets. */ +#define LABEL_DECL_CDTOR(NODE) DECL_LANG_FLAG_2 (LABEL_DECL_CHECK (NODE)) + +/* Nonzero if this NOP_EXPR is a reinterpret_cast. Such conversions + are not constexprs. Other NOP_EXPRs are. */ +#define REINTERPRET_CAST_P(NODE) TREE_LANG_FLAG_0 (NOP_EXPR_CHECK (NODE)) + +/* Returns true if NODE is an object type: + + [basic.types] + + An object type is a (possibly cv-qualified) type that is not a + function type, not a reference type, and not a void type. + + Keep these checks in ascending order, for speed. */ +#define TYPE_OBJ_P(NODE) \ + (!TYPE_REF_P (NODE) && !VOID_TYPE_P (NODE) && !FUNC_OR_METHOD_TYPE_P (NODE)) + +/* Returns true if NODE is a pointer to an object. Keep these checks + in ascending tree code order. */ +#define TYPE_PTROB_P(NODE) (TYPE_PTR_P (NODE) && TYPE_OBJ_P (TREE_TYPE (NODE))) + +/* True if this CONSTRUCTOR contains PLACEHOLDER_EXPRs referencing the + CONSTRUCTOR's type not nested inside another CONSTRUCTOR marked with + CONSTRUCTOR_PLACEHOLDER_BOUNDARY. */ +#define CONSTRUCTOR_PLACEHOLDER_BOUNDARY(NODE) \ + (TREE_LANG_FLAG_5 (CONSTRUCTOR_CHECK (NODE))) + +#define AGGR_INIT_EXPR_SLOT(NODE) TREE_OPERAND (AGGR_INIT_EXPR_CHECK (NODE), 2) + +/* True if this TARGET_EXPR expresses direct-initialization of an object + to be named later. */ +#define TARGET_EXPR_DIRECT_INIT_P(NODE) \ + TREE_LANG_FLAG_2 (TARGET_EXPR_CHECK (NODE)) + +/* Nonzero if DECL is a declaration of __builtin_constant_p. */ +#define DECL_IS_BUILTIN_CONSTANT_P(NODE) \ + (TREE_CODE (NODE) == FUNCTION_DECL \ + && DECL_BUILT_IN_CLASS (NODE) == BUILT_IN_NORMAL \ + && DECL_FUNCTION_CODE (NODE) == BUILT_IN_CONSTANT_P) + +/* True iff this represents an lvalue being treated as an rvalue during return + or throw as per [class.copy.elision]. */ +#define IMPLICIT_RVALUE_P(NODE) \ + TREE_LANG_FLAG_3 (TREE_CHECK2 ((NODE), NON_LVALUE_EXPR, STATIC_CAST_EXPR)) + +/* Nonzero for _DECL means that this decl appears in (or will appear + in) as a member in a RECORD_TYPE or UNION_TYPE node. It is also for + detecting circularity in case members are multiply defined. In the + case of a VAR_DECL, it means that no definition has been seen, even + if an initializer has been. */ +#define DECL_IN_AGGR_P(NODE) (DECL_LANG_FLAG_3 (NODE)) + +/* Nonzero means that this class type is a non-standard-layout class. */ +#define CLASSTYPE_NON_STD_LAYOUT(NODE) \ + (LANG_TYPE_CLASS_CHECK (NODE)->non_std_layout) + +/* Nonzero for FIELD_DECL node means that this field is a base class + of the parent object, as opposed to a member field. */ +#define DECL_FIELD_IS_BASE(NODE) DECL_LANG_FLAG_6 (FIELD_DECL_CHECK (NODE)) + +/* Nonzero if TYPE is an anonymous union type. */ +#define ANON_UNION_TYPE_P(NODE) \ + (TREE_CODE (NODE) == UNION_TYPE && ANON_AGGR_TYPE_P (NODE)) + +/* For an ANON_AGGR_TYPE_P the single FIELD_DECL it is used with. */ +#define ANON_AGGR_TYPE_FIELD(NODE) (LANG_TYPE_CLASS_CHECK (NODE)->typeinfo_var) + +/* Nonzero if TYPE is an anonymous union or struct type. We have to use a + flag for this because "A union for which objects or pointers are + declared is not an anonymous union" [class.union]. */ +#define ANON_AGGR_TYPE_P(NODE) \ + (CLASS_TYPE_P (NODE) && LANG_TYPE_CLASS_CHECK (NODE)->anon_aggr) +#define SET_ANON_AGGR_TYPE_P(NODE) (LANG_TYPE_CLASS_CHECK (NODE)->anon_aggr = 1) + +/* Nonzero if T is a class type but not a union. */ +#define NON_UNION_CLASS_TYPE_P(T) \ + (TREE_CODE (T) == RECORD_TYPE && TYPE_LANG_FLAG_5 (T)) + +/* Determines whether an ENUMERAL_TYPE has an explicit + underlying type. */ +#define ENUM_FIXED_UNDERLYING_TYPE_P(NODE) (TYPE_LANG_FLAG_5 (NODE)) + +/* Returns the underlying type of the given enumeration type. The + underlying type is determined in different ways, depending on the + properties of the enum: + + - In C++0x, the underlying type can be explicitly specified, e.g., + + enum E1 : char { ... } // underlying type is char + + - In a C++0x scoped enumeration, the underlying type is int + unless otherwises specified: + + enum class E2 { ... } // underlying type is int + + - Otherwise, the underlying type is determined based on the + values of the enumerators. In this case, the + ENUM_UNDERLYING_TYPE will not be set until after the definition + of the enumeration is completed by finish_enum. */ +#define ENUM_UNDERLYING_TYPE(TYPE) TREE_TYPE (ENUMERAL_TYPE_CHECK (TYPE)) + +/* Nonzero if this type is volatile-qualified. */ +#define RS_TYPE_VOLATILE_P(NODE) \ + ((rs_type_quals (NODE) & TYPE_QUAL_VOLATILE) != 0) + +/* Nonzero means that this type is either complete or being defined, so we + can do lookup in it. */ +#define COMPLETE_OR_OPEN_TYPE_P(NODE) \ + (COMPLETE_TYPE_P (NODE) || (CLASS_TYPE_P (NODE) && TYPE_BEING_DEFINED (NODE))) + +/* Indicates when overload resolution may resolve to a pointer to + member function. [expr.unary.op]/3 */ +#define PTRMEM_OK_P(NODE) \ + TREE_LANG_FLAG_0 (TREE_CHECK3 ((NODE), ADDR_EXPR, OFFSET_REF, SCOPE_REF)) + +/* Returns nonzero iff NODE is a declaration for the global function + `main'. */ +#define DECL_MAIN_P(NODE) \ + (DECL_NAME (NODE) != NULL_TREE && MAIN_NAME_P (DECL_NAME (NODE)) \ + && flag_hosted) + +/* Nonzero if the variable was declared to be thread-local. + We need a special C++ version of this test because the middle-end + DECL_THREAD_LOCAL_P uses the symtab, so we can't use it for + templates. */ +#define RS_DECL_THREAD_LOCAL_P(NODE) (TREE_LANG_FLAG_0 (VAR_DECL_CHECK (NODE))) + +#define COND_EXPR_IS_VEC_DELETE(NODE) TREE_LANG_FLAG_0 (COND_EXPR_CHECK (NODE)) + +/* RANGE_FOR_STMT accessors. These give access to the declarator, + expression, body, and scope of the statement, respectively. */ +#define RANGE_FOR_DECL(NODE) TREE_OPERAND (RANGE_FOR_STMT_CHECK (NODE), 0) +#define RANGE_FOR_EXPR(NODE) TREE_OPERAND (RANGE_FOR_STMT_CHECK (NODE), 1) +#define RANGE_FOR_BODY(NODE) TREE_OPERAND (RANGE_FOR_STMT_CHECK (NODE), 2) +#define RANGE_FOR_SCOPE(NODE) TREE_OPERAND (RANGE_FOR_STMT_CHECK (NODE), 3) +#define RANGE_FOR_UNROLL(NODE) TREE_OPERAND (RANGE_FOR_STMT_CHECK (NODE), 4) +#define RANGE_FOR_INIT_STMT(NODE) TREE_OPERAND (RANGE_FOR_STMT_CHECK (NODE), 5) +#define RANGE_FOR_IVDEP(NODE) TREE_LANG_FLAG_6 (RANGE_FOR_STMT_CHECK (NODE)) + +#define CP_DECL_CONTEXT(NODE) \ + (!DECL_FILE_SCOPE_P (NODE) ? DECL_CONTEXT (NODE) : global_namespace) +#define CP_TYPE_CONTEXT(NODE) \ + (!TYPE_FILE_SCOPE_P (NODE) ? TYPE_CONTEXT (NODE) : global_namespace) +#define FROB_CONTEXT(NODE) \ + ((NODE) == global_namespace ? DECL_CONTEXT (NODE) : (NODE)) + +/* Nonzero if NODE is the std namespace. */ +#define DECL_NAMESPACE_STD_P(NODE) ((NODE) == std_node) + +/* Whether the namepace is an inline namespace. */ +#define DECL_NAMESPACE_INLINE_P(NODE) \ + TREE_LANG_FLAG_0 (NAMESPACE_DECL_CHECK (NODE)) + +#define CP_DECL_CONTEXT(NODE) \ + (!DECL_FILE_SCOPE_P (NODE) ? DECL_CONTEXT (NODE) : global_namespace) + +/* Based off of TYPE_UNNAMED_P. */ +#define LAMBDA_TYPE_P(NODE) \ + (TREE_CODE (NODE) == RECORD_TYPE && TYPE_LINKAGE_IDENTIFIER (NODE) \ + && IDENTIFIER_LAMBDA_P (TYPE_LINKAGE_IDENTIFIER (NODE))) + +/* Macros to make error reporting functions' lives easier. */ +#define TYPE_LINKAGE_IDENTIFIER(NODE) \ + (TYPE_IDENTIFIER (TYPE_MAIN_VARIANT (NODE))) + +/* Identifiers used for lambda types are almost anonymous. Use this + spare flag to distinguish them (they also have the anonymous flag). */ +#define IDENTIFIER_LAMBDA_P(NODE) \ + (IDENTIFIER_NODE_CHECK (NODE)->base.protected_flag) + +/* If NODE, a FUNCTION_DECL, is a C++11 inheriting constructor, then this + is the constructor it inherits from. */ +#define DECL_INHERITED_CTOR(NODE) \ + (DECL_DECLARES_FUNCTION_P (NODE) && DECL_CONSTRUCTOR_P (NODE) \ + ? LANG_DECL_FN_CHECK (NODE)->context \ + : NULL_TREE) + +/* True if the class type TYPE is a literal type. */ +#define CLASSTYPE_LITERAL_P(TYPE) (LANG_TYPE_CLASS_CHECK (TYPE)->is_literal) + +/* Nonzero if NODE (a FUNCTION_DECL or TEMPLATE_DECL) + is a destructor. */ +#define DECL_DESTRUCTOR_P(NODE) DECL_CXX_DESTRUCTOR_P (NODE) + +/* Nonzero if TYPE has a trivial destructor. From [class.dtor]: + + A destructor is trivial if it is an implicitly declared + destructor and if: + + - all of the direct base classes of its class have trivial + destructors, + + - for all of the non-static data members of its class that are + of class type (or array thereof), each such class has a + trivial destructor. */ +#define TYPE_HAS_TRIVIAL_DESTRUCTOR(NODE) \ + (!TYPE_HAS_NONTRIVIAL_DESTRUCTOR (NODE)) + +/* Nonzero means that NODE (a class type) has a destructor -- but that + it has not yet been declared. */ +#define CLASSTYPE_LAZY_DESTRUCTOR(NODE) \ + (LANG_TYPE_CLASS_CHECK (NODE)->lazy_destructor) + +/* Nonzero if NODE (a FUNCTION_DECL) is a constructor for a complete + object. */ +#define DECL_COMPLETE_CONSTRUCTOR_P(NODE) \ + (DECL_NAME (NODE) == complete_ctor_identifier) + +/* Nonzero if NODE (a FUNCTION_DECL) is a constructor for a base + object. */ +#define DECL_BASE_CONSTRUCTOR_P(NODE) (DECL_NAME (NODE) == base_ctor_identifier) + +/* Nonzero if NODE (a FUNCTION_DECL) is a constructor, but not either the + specialized in-charge constructor or the specialized not-in-charge + constructor. */ +#define DECL_MAYBE_IN_CHARGE_CONSTRUCTOR_P(NODE) \ + (DECL_NAME (NODE) == ctor_identifier) + +/* Nonzero if NODE (a FUNCTION_DECL) is a copy constructor. */ +#define DECL_COPY_CONSTRUCTOR_P(NODE) \ + (DECL_CONSTRUCTOR_P (NODE) && copy_fn_p (NODE) > 0) + +/* Nonzero if NODE (a FUNCTION_DECL) is a move constructor. */ +#define DECL_MOVE_CONSTRUCTOR_P(NODE) \ + (DECL_CONSTRUCTOR_P (NODE) && move_fn_p (NODE)) + +/* Nonzero if NODE (a FUNCTION_DECL) is a destructor, but not the + specialized in-charge constructor, in-charge deleting constructor, + or the base destructor. */ +#define DECL_MAYBE_IN_CHARGE_DESTRUCTOR_P(NODE) \ + (DECL_NAME (NODE) == dtor_identifier) + +/* Nonzero if NODE (a FUNCTION_DECL) is a destructor for a complete + object. */ +#define DECL_COMPLETE_DESTRUCTOR_P(NODE) \ + (DECL_NAME (NODE) == complete_dtor_identifier) + +/* Nonzero if NODE (a FUNCTION_DECL) is a destructor for a base + object. */ +#define DECL_BASE_DESTRUCTOR_P(NODE) (DECL_NAME (NODE) == base_dtor_identifier) + +/* Nonzero if NODE (a FUNCTION_DECL) is a destructor for a complete + object that deletes the object after it has been destroyed. */ +#define DECL_DELETING_DESTRUCTOR_P(NODE) \ + (DECL_NAME (NODE) == deleting_dtor_identifier) + +/* Nonzero if either DECL_MAYBE_IN_CHARGE_CONSTRUCTOR_P or + DECL_MAYBE_IN_CHARGE_DESTRUCTOR_P is true of NODE. */ +#define DECL_MAYBE_IN_CHARGE_CDTOR_P(NODE) \ + (DECL_MAYBE_IN_CHARGE_CONSTRUCTOR_P (NODE) \ + || DECL_MAYBE_IN_CHARGE_DESTRUCTOR_P (NODE)) + +/* Nonzero if NODE (a _DECL) is a cloned constructor or + destructor. */ +#define DECL_CLONED_FUNCTION_P(NODE) \ + (DECL_NAME (NODE) && IDENTIFIER_CDTOR_P (DECL_NAME (NODE)) \ + && !DECL_MAYBE_IN_CHARGE_CDTOR_P (NODE)) + +/* If DECL_CLONED_FUNCTION_P holds, this is the function that was + cloned. */ +#define DECL_CLONED_FUNCTION(NODE) \ + (DECL_LANG_SPECIFIC (FUNCTION_DECL_CHECK (NODE))->u.fn.u5.cloned_function) + +/* Nonzero means that an object of this type cannot be initialized using + an initializer list. */ +#define CLASSTYPE_NON_AGGREGATE(NODE) \ + (LANG_TYPE_CLASS_CHECK (NODE)->non_aggregate) +#define TYPE_NON_AGGREGATE_CLASS(NODE) \ + (CLASS_TYPE_P (NODE) && CLASSTYPE_NON_AGGREGATE (NODE)) + +/* Nonzero for class type means that the default constructor is trivial. */ +#define TYPE_HAS_TRIVIAL_DFLT(NODE) \ + (TYPE_HAS_DEFAULT_CONSTRUCTOR (NODE) && !TYPE_HAS_COMPLEX_DFLT (NODE)) + +/* Nonzero if this class has a constexpr constructor other than a copy/move + constructor. Note that a class can have constexpr constructors for + static initialization even if it isn't a literal class. */ +#define TYPE_HAS_CONSTEXPR_CTOR(NODE) \ + (LANG_TYPE_CLASS_CHECK (NODE)->has_constexpr_ctor) + +/* Nonzero if there is no trivial default constructor for this class. */ +#define TYPE_HAS_COMPLEX_DFLT(NODE) \ + (LANG_TYPE_CLASS_CHECK (NODE)->has_complex_dflt) + +/* [dcl.init.aggr] + + An aggregate is an array or a class with no user-provided + constructors, no brace-or-equal-initializers for non-static data + members, no private or protected non-static data members, no + base classes, and no virtual functions. + + As an extension, we also treat vectors as aggregates. Keep these + checks in ascending code order. */ +#define CP_AGGREGATE_TYPE_P(TYPE) \ + (gnu_vector_type_p (TYPE) || TREE_CODE (TYPE) == ARRAY_TYPE \ + || (CLASS_TYPE_P (TYPE) && COMPLETE_TYPE_P (TYPE) \ + && !CLASSTYPE_NON_AGGREGATE (TYPE))) + +/* Nonzero for a FIELD_DECL means that this member object type + is mutable. */ +#define DECL_MUTABLE_P(NODE) (DECL_LANG_FLAG_0 (FIELD_DECL_CHECK (NODE))) + +#if defined ENABLE_TREE_CHECKING + +#define LANG_DECL_MIN_CHECK(NODE) \ + __extension__({ \ + struct lang_decl *lt = DECL_LANG_SPECIFIC (NODE); \ + if (!LANG_DECL_HAS_MIN (NODE)) \ + lang_check_failed (__FILE__, __LINE__, __FUNCTION__); \ + <->u.min; \ + }) + +/* We want to be able to check DECL_CONSTRUCTOR_P and such on a function + template, not just on a FUNCTION_DECL. So when looking for things in + lang_decl_fn, look down through a TEMPLATE_DECL into its result. */ +#define LANG_DECL_FN_CHECK(NODE) \ + __extension__({ \ + struct lang_decl *lt = DECL_LANG_SPECIFIC (NODE); \ + if (!DECL_DECLARES_FUNCTION_P (NODE) || lt->u.base.selector != lds_fn) \ + lang_check_failed (__FILE__, __LINE__, __FUNCTION__); \ + <->u.fn; \ + }) + +#define LANG_DECL_NS_CHECK(NODE) \ + __extension__({ \ + struct lang_decl *lt = DECL_LANG_SPECIFIC (NODE); \ + if (TREE_CODE (NODE) != NAMESPACE_DECL || lt->u.base.selector != lds_ns) \ + lang_check_failed (__FILE__, __LINE__, __FUNCTION__); \ + <->u.ns; \ + }) + +#define LANG_DECL_PARM_CHECK(NODE) \ + __extension__({ \ + struct lang_decl *lt = DECL_LANG_SPECIFIC (NODE); \ + if (TREE_CODE (NODE) != PARM_DECL || lt->u.base.selector != lds_parm) \ + lang_check_failed (__FILE__, __LINE__, __FUNCTION__); \ + <->u.parm; \ + }) + +#define LANG_DECL_DECOMP_CHECK(NODE) \ + __extension__({ \ + struct lang_decl *lt = DECL_LANG_SPECIFIC (NODE); \ + if (!VAR_P (NODE) || lt->u.base.selector != lds_decomp) \ + lang_check_failed (__FILE__, __LINE__, __FUNCTION__); \ + <->u.decomp; \ + }) + +#else + +#define LANG_DECL_MIN_CHECK(NODE) (&DECL_LANG_SPECIFIC (NODE)->u.min) + +#define LANG_DECL_FN_CHECK(NODE) (&DECL_LANG_SPECIFIC (NODE)->u.fn) + +#define LANG_DECL_NS_CHECK(NODE) (&DECL_LANG_SPECIFIC (NODE)->u.ns) + +#define LANG_DECL_PARM_CHECK(NODE) (&DECL_LANG_SPECIFIC (NODE)->u.parm) + +#define LANG_DECL_DECOMP_CHECK(NODE) (&DECL_LANG_SPECIFIC (NODE)->u.decomp) + +#endif /* ENABLE_TREE_CHECKING */ + // Below macros are copied from gcc/c-family/c-common.h /* In a FIELD_DECL, nonzero if the decl was originally a bitfield. */ @@ -181,17 +1631,6 @@ ((rs_type_quals (NODE) & (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE)) \ == TYPE_QUAL_CONST) -/* [basic.fundamental] - - Types bool, char, wchar_t, and the signed and unsigned integer types - are collectively called integral types. - - Note that INTEGRAL_TYPE_P, as defined in tree.h, allows enumeration - types as well, which is incorrect in C++. Keep these checks in - ascending code order. */ -#define RS_INTEGRAL_TYPE_P(TYPE) \ - (TREE_CODE (TYPE) == BOOLEAN_TYPE || TREE_CODE (TYPE) == INTEGER_TYPE) - /* Returns true if TYPE is an integral or enumeration name. Keep these checks in ascending code order. */ #define INTEGRAL_OR_ENUMERATION_TYPE_P(TYPE) \ @@ -202,8 +1641,898 @@ #define DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P(NODE) \ (TREE_LANG_FLAG_2 (VAR_DECL_CHECK (NODE))) +/* WHILE_STMT accessors. These give access to the condition of the + while statement and the body of the while statement, respectively. */ +#define WHILE_COND(NODE) TREE_OPERAND (WHILE_STMT_CHECK (NODE), 0) +#define WHILE_BODY(NODE) TREE_OPERAND (WHILE_STMT_CHECK (NODE), 1) + +/* FOR_STMT accessors. These give access to the init statement, + condition, update expression, and body of the for statement, + respectively. */ +#define FOR_INIT_STMT(NODE) TREE_OPERAND (FOR_STMT_CHECK (NODE), 0) +#define FOR_COND(NODE) TREE_OPERAND (FOR_STMT_CHECK (NODE), 1) +#define FOR_EXPR(NODE) TREE_OPERAND (FOR_STMT_CHECK (NODE), 2) +#define FOR_BODY(NODE) TREE_OPERAND (FOR_STMT_CHECK (NODE), 3) +#define FOR_SCOPE(NODE) TREE_OPERAND (FOR_STMT_CHECK (NODE), 4) + +#define SWITCH_STMT_COND(NODE) TREE_OPERAND (SWITCH_STMT_CHECK (NODE), 0) +#define SWITCH_STMT_BODY(NODE) TREE_OPERAND (SWITCH_STMT_CHECK (NODE), 1) +#define SWITCH_STMT_TYPE(NODE) TREE_OPERAND (SWITCH_STMT_CHECK (NODE), 2) +#define SWITCH_STMT_SCOPE(NODE) TREE_OPERAND (SWITCH_STMT_CHECK (NODE), 3) + +/* Nonzero if NODE is the target for genericization of 'break' stmts. */ +#define LABEL_DECL_BREAK(NODE) DECL_LANG_FLAG_0 (LABEL_DECL_CHECK (NODE)) + +/* Nonzero if NODE is the target for genericization of 'continue' stmts. */ +#define LABEL_DECL_CONTINUE(NODE) DECL_LANG_FLAG_1 (LABEL_DECL_CHECK (NODE)) + // Above macros are copied from gcc/c-family/c-common.h +// Below macros are copied from gcc/cp/name-lookup.h + +/* Lookup walker marking. */ +#define LOOKUP_SEEN_P(NODE) TREE_VISITED (NODE) +#define LOOKUP_FOUND_P(NODE) \ + TREE_LANG_FLAG_4 (TREE_CHECK4 (NODE, RECORD_TYPE, UNION_TYPE, ENUMERAL_TYPE, \ + NAMESPACE_DECL)) + +// Above macros are copied from gcc/cp/name-lookup.h + +// Below macros are copied from gcc/cp/name-lookup.cc + +/* Create an overload suitable for recording an artificial TYPE_DECL + and another decl. We use this machanism to implement the struct + stat hack. */ + +#define STAT_HACK_P(N) ((N) && TREE_CODE (N) == OVERLOAD && OVL_LOOKUP_P (N)) +#define STAT_TYPE_VISIBLE_P(N) TREE_USED (OVERLOAD_CHECK (N)) +#define STAT_TYPE(N) TREE_TYPE (N) +#define STAT_DECL(N) OVL_FUNCTION (N) +#define STAT_VISIBLE(N) OVL_CHAIN (N) +#define MAYBE_STAT_DECL(N) (STAT_HACK_P (N) ? STAT_DECL (N) : N) +#define MAYBE_STAT_TYPE(N) (STAT_HACK_P (N) ? STAT_TYPE (N) : NULL_TREE) + +/* When a STAT_HACK_P is true, OVL_USING_P and OVL_EXPORT_P are valid + and apply to the hacked type. */ + +/* For regular (maybe) overloaded functions, we have OVL_HIDDEN_P. + But we also need to indicate hiddenness on implicit type decls + (injected friend classes), and (coming soon) decls injected from + block-scope externs. It is too awkward to press the existing + overload marking for that. If we have a hidden non-function, we + always create a STAT_HACK, and use these two markers as needed. */ +#define STAT_TYPE_HIDDEN_P(N) OVL_HIDDEN_P (N) +#define STAT_DECL_HIDDEN_P(N) OVL_DEDUP_P (N) + +/* The binding level currently in effect. */ + +#define current_binding_level \ + (*(cfun && cp_function_chain && cp_function_chain->bindings \ + ? &cp_function_chain->bindings \ + : &scope_chain->bindings)) + +// Above macros are copied from gcc/cp/name-lookup.cc + +/* The various kinds of special functions. If you add to this list, + you should update special_function_p as well. */ +enum special_function_kind +{ + sfk_none = 0, /* Not a special function. This enumeral + must have value zero; see + special_function_p. */ + /* The following are ordered, for use by member synthesis fns. */ + sfk_destructor, /* A destructor. */ + sfk_constructor, /* A constructor. */ + sfk_inheriting_constructor, /* An inheriting constructor */ + sfk_copy_constructor, /* A copy constructor. */ + sfk_move_constructor, /* A move constructor. */ + sfk_copy_assignment, /* A copy assignment operator. */ + sfk_move_assignment, /* A move assignment operator. */ + /* The following are unordered. */ + sfk_complete_destructor, /* A destructor for complete objects. */ + sfk_base_destructor, /* A destructor for base subobjects. */ + sfk_deleting_destructor, /* A destructor for complete objects that + deletes the object after it has been + destroyed. */ + sfk_conversion, /* A conversion operator. */ + sfk_deduction_guide, /* A class template deduction guide. */ + sfk_comparison, /* A comparison operator (e.g. ==, <, <=>). */ + sfk_virtual_destructor /* Used by member synthesis fns. */ +}; + +/* Places where an lvalue, or modifiable lvalue, may be required. + Used to select diagnostic messages in lvalue_error and + readonly_error. */ +enum lvalue_use +{ + lv_assign, + lv_increment, + lv_decrement, + lv_addressof, + lv_asm +}; + +/* A class for recording information about access failures (e.g. private + fields), so that we can potentially supply a fix-it hint about + an accessor (from a context in which the constness of the object + is known). */ + +class access_failure_info +{ +public: + access_failure_info () + : m_was_inaccessible (false), m_basetype_path (NULL_TREE), + m_decl (NULL_TREE), m_diag_decl (NULL_TREE) + {} + + void record_access_failure (tree basetype_path, tree decl, tree diag_decl); + + bool was_inaccessible_p () const { return m_was_inaccessible; } + tree get_decl () const { return m_decl; } + tree get_diag_decl () const { return m_diag_decl; } + tree get_any_accessor (bool const_p) const; + void maybe_suggest_accessor (bool const_p) const; + static void add_fixit_hint (rich_location *richloc, tree accessor); + +private: + bool m_was_inaccessible; + tree m_basetype_path; + tree m_decl; + tree m_diag_decl; +}; + +/* The various kinds of access check during parsing. */ +enum deferring_kind +{ + dk_no_deferred = 0, /* Check access immediately */ + dk_deferred = 1, /* Deferred check */ + dk_no_check = 2 /* No access check */ +}; + +/* The representation of a deferred access check. */ + +struct GTY (()) deferred_access_check +{ + /* The base class in which the declaration is referenced. */ + tree binfo; + /* The declaration whose access must be checked. */ + tree decl; + /* The declaration that should be used in the error message. */ + tree diag_decl; + /* The location of this access. */ + location_t loc; +}; + +struct GTY (()) tree_template_info +{ + struct tree_base base; + tree tmpl; + tree args; + vec<deferred_access_check, va_gc> *deferred_access_checks; +}; + +/* The various kinds of lvalues we distinguish. */ +enum cp_lvalue_kind_flags +{ + clk_none = 0, /* Things that are not an lvalue. */ + clk_ordinary = 1, /* An ordinary lvalue. */ + clk_rvalueref = 2, /* An xvalue (rvalue formed using an rvalue reference) */ + clk_class = 4, /* A prvalue of class or array type. */ + clk_bitfield = 8, /* An lvalue for a bit-field. */ + clk_packed = 16, /* An lvalue for a packed field. */ + clk_implicit_rval = 1 << 5 /* An lvalue being treated as an xvalue. */ +}; + +/* This type is used for parameters and variables which hold + combinations of the flags in enum cp_lvalue_kind_flags. */ +typedef int cp_lvalue_kind; + +// forked from gcc/cp/name_lookup.h scope_kind + +/* The kinds of scopes we recognize. */ +enum scope_kind +{ + sk_block = 0, /* An ordinary block scope. This enumerator must + have the value zero because "cp_binding_level" + is initialized by using "memset" to set the + contents to zero, and the default scope kind + is "sk_block". */ + sk_cleanup, /* A scope for (pseudo-)scope for cleanup. It is + pseudo in that it is transparent to name lookup + activities. */ + sk_try, /* A try-block. */ + sk_catch, /* A catch-block. */ + sk_for, /* The scope of the variable declared in a + init-statement. */ + sk_cond, /* The scope of the variable declared in the condition + of an if or switch statement. */ + sk_function_parms, /* The scope containing function parameters. */ + sk_class, /* The scope containing the members of a class. */ + sk_scoped_enum, /* The scope containing the enumerators of a C++11 + scoped enumeration. */ + sk_namespace, /* The scope containing the members of a + namespace, including the global scope. */ + sk_template_parms, /* A scope for template parameters. */ + sk_template_spec, /* Like sk_template_parms, but for an explicit + specialization. Since, by definition, an + explicit specialization is introduced by + "template <>", this scope is always empty. */ + sk_transaction, /* A synchronized or atomic statement. */ + sk_omp /* An OpenMP structured block. */ +}; + +// forked from gcc/cp/cp-tree.h cp_built_in_function + +/* BUILT_IN_FRONTEND function codes. */ +enum cp_built_in_function +{ + CP_BUILT_IN_IS_CONSTANT_EVALUATED, + CP_BUILT_IN_INTEGER_PACK, + CP_BUILT_IN_IS_CORRESPONDING_MEMBER, + CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS, + CP_BUILT_IN_SOURCE_LOCATION, + CP_BUILT_IN_LAST +}; + +// forked from gcc/cp/cp-tree.h warning_sentinel + +/* RAII sentinel to disable certain warnings during template substitution + and elsewhere. */ + +class warning_sentinel +{ +public: + int &flag; + int val; + warning_sentinel (int &flag, bool suppress = true) : flag (flag), val (flag) + { + if (suppress) + flag = 0; + } + ~warning_sentinel () { flag = val; } +}; + +// forked from gcc/cp/cp-tree.h uid_sensitive_constexpr_evaluation_checker + +/* Used to determine whether uid_sensitive_constexpr_evaluation_p was + called and returned true, indicating that we've restricted constexpr + evaluation in order to avoid UID generation. We use this to control + updates to the fold_cache and cv_cache. */ + +struct uid_sensitive_constexpr_evaluation_checker +{ + const unsigned saved_counter; + uid_sensitive_constexpr_evaluation_checker (); + bool evaluation_restricted_p () const; +}; + +// forked from gcc/cp/cp-tree.h iloc_sentinel + +/* RAII sentinel to temporarily override input_location. This will not set + input_location to UNKNOWN_LOCATION or BUILTINS_LOCATION. */ + +class iloc_sentinel +{ + location_t saved_loc; + +public: + iloc_sentinel (location_t loc) : saved_loc (input_location) + { + if (loc >= RESERVED_LOCATION_COUNT) + input_location = loc; + } + ~iloc_sentinel () { input_location = saved_loc; } +}; + +// forked from gcc/cp/cp-tree.h ptrmem_cst + +struct GTY (()) ptrmem_cst +{ + struct tree_common common; + tree member; + location_t locus; +}; +typedef struct ptrmem_cst *ptrmem_cst_t; + +// forked from gcc/cp/cp-tree.h named_decl_hash + +/* hash traits for declarations. Hashes potential overload sets via + DECL_NAME. */ + +struct named_decl_hash : ggc_remove<tree> +{ + typedef tree value_type; /* A DECL or OVERLOAD */ + typedef tree compare_type; /* An identifier. */ + + inline static hashval_t hash (const value_type decl); + inline static bool equal (const value_type existing, compare_type candidate); + + static const bool empty_zero_p = true; + static inline void mark_empty (value_type &p) { p = NULL_TREE; } + static inline bool is_empty (value_type p) { return !p; } + + /* Nothing is deletable. Everything is insertable. */ + static bool is_deleted (value_type) { return false; } + static void mark_deleted (value_type) { gcc_unreachable (); } +}; + +// forked from gcc/cp/cp-tree.h lang_decl_selector + +/* Discriminator values for lang_decl. */ + +enum lang_decl_selector +{ + lds_min, + lds_fn, + lds_ns, + lds_parm, + lds_decomp +}; + +// forked from gcc/cp/cp-tree.h lang_decl_base + +/* Flags shared by all forms of DECL_LANG_SPECIFIC. + + Some of the flags live here only to make lang_decl_min/fn smaller. Do + not make this struct larger than 32 bits. */ + +struct GTY (()) lang_decl_base +{ + ENUM_BITFIELD (lang_decl_selector) selector : 3; + unsigned use_template : 2; + unsigned not_really_extern : 1; /* var or fn */ + unsigned initialized_in_class : 1; /* var or fn */ + + unsigned threadprivate_or_deleted_p : 1; /* var or fn */ + /* anticipated_p is no longer used for anticipated_decls (fn, type + or template). It is used as DECL_OMP_PRIVATIZED_MEMBER in + var. */ + unsigned anticipated_p : 1; + unsigned friend_or_tls : 1; /* var, fn, type or template */ + unsigned unknown_bound_p : 1; /* var */ + unsigned odr_used : 1; /* var or fn */ + unsigned concept_p : 1; /* applies to vars and functions */ + unsigned var_declared_inline_p : 1; /* var */ + unsigned dependent_init_p : 1; /* var */ + + /* The following apply to VAR, FUNCTION, TYPE, CONCEPT, & NAMESPACE + decls. */ + unsigned module_purview_p : 1; /* in module purview (not GMF) */ + unsigned module_import_p : 1; /* from an import */ + unsigned module_entity_p : 1; /* is in the entitity ary & + hash. */ + /* VAR_DECL or FUNCTION_DECL has attached decls. */ + unsigned module_attached_p : 1; + + /* 12 spare bits. */ +}; + +/* True for DECL codes which have template info and access. */ +#define LANG_DECL_HAS_MIN(NODE) \ + (VAR_OR_FUNCTION_DECL_P (NODE) || TREE_CODE (NODE) == FIELD_DECL \ + || TREE_CODE (NODE) == CONST_DECL || TREE_CODE (NODE) == TYPE_DECL \ + || TREE_CODE (NODE) == TEMPLATE_DECL || TREE_CODE (NODE) == USING_DECL \ + || TREE_CODE (NODE) == CONCEPT_DECL) + +// forked from gcc/c-family-common.h stmt_tree_s + +/* Information about a statement tree. */ + +struct GTY (()) stmt_tree_s +{ + /* A stack of statement lists being collected. */ + vec<tree, va_gc> *x_cur_stmt_list; + + /* In C++, Nonzero if we should treat statements as full + expressions. In particular, this variable is non-zero if at the + end of a statement we should destroy any temporaries created + during that statement. Similarly, if, at the end of a block, we + should destroy any local variables in this block. Normally, this + variable is nonzero, since those are the normal semantics of + C++. + + This flag has no effect in C. */ + int stmts_are_full_exprs_p; +}; + +// forked from gcc/c-family-common.h stmt_tree_s + +typedef struct stmt_tree_s *stmt_tree; + +// forked from gcc/c-family-common.h c_language_function + +/* Global state pertinent to the current function. Some C dialects + extend this structure with additional fields. */ + +struct GTY (()) c_language_function +{ + /* While we are parsing the function, this contains information + about the statement-tree that we are building. */ + struct stmt_tree_s x_stmt_tree; + + /* Vector of locally defined typedefs, for + -Wunused-local-typedefs. */ + vec<tree, va_gc> *local_typedefs; +}; + +// forked from gcc/cp/cp-tree.h omp_declare_target_attr + +struct GTY (()) omp_declare_target_attr +{ + bool attr_syntax; +}; + +// forked from gcc/cp/name-lookup.h cxx_binding + +/* Datatype that represents binding established by a declaration between + a name and a C++ entity. */ +struct GTY (()) cxx_binding +{ + /* Link to chain together various bindings for this name. */ + cxx_binding *previous; + /* The non-type entity this name is bound to. */ + tree value; + /* The type entity this name is bound to. */ + tree type; + + bool value_is_inherited : 1; + bool is_local : 1; + bool type_is_hidden : 1; +}; + +// forked from gcc/cp/name-lookup.h cxx_saved_binding + +/* Datatype used to temporarily save C++ bindings (for implicit + instantiations purposes and like). Implemented in decl.cc. */ +struct GTY (()) cxx_saved_binding +{ + /* The name of the current binding. */ + tree identifier; + /* The binding we're saving. */ + cxx_binding *binding; + tree real_type_value; +}; + +// forked from gcc/cp/cp-tree.h saved_scope + +/* Global state. */ + +struct GTY (()) saved_scope +{ + vec<cxx_saved_binding, va_gc> *old_bindings; + tree old_namespace; + vec<tree, va_gc> *decl_ns_list; + tree class_name; + tree class_type; + tree access_specifier; + tree function_decl; + vec<tree, va_gc> *lang_base; + tree lang_name; + tree template_parms; + tree x_saved_tree; + + /* Only used for uses of this in trailing return type. */ + tree x_current_class_ptr; + tree x_current_class_ref; + + int x_processing_template_decl; + int x_processing_specialization; + int x_processing_constraint; + int suppress_location_wrappers; + BOOL_BITFIELD x_processing_explicit_instantiation : 1; + BOOL_BITFIELD need_pop_function_context : 1; + + /* Nonzero if we are parsing the discarded statement of a constexpr + if-statement. */ + BOOL_BITFIELD discarded_stmt : 1; + /* Nonzero if we are parsing or instantiating the compound-statement + of consteval if statement. Also set while processing an immediate + invocation. */ + BOOL_BITFIELD consteval_if_p : 1; + + int unevaluated_operand; + int inhibit_evaluation_warnings; + int noexcept_operand; + int ref_temp_count; + + struct stmt_tree_s x_stmt_tree; + + hash_map<tree, tree> *GTY ((skip)) x_local_specializations; + vec<omp_declare_target_attr, va_gc> *omp_declare_target_attribute; + + struct saved_scope *prev; +}; + +extern GTY (()) struct saved_scope *scope_chain; + +// forked from gcc/cp/cp-tree.h named_label_hash + +struct named_label_entry; /* Defined in decl.cc. */ + +struct named_label_hash : ggc_remove<named_label_entry *> +{ + typedef named_label_entry *value_type; + typedef tree compare_type; /* An identifier. */ + + inline static hashval_t hash (value_type); + inline static bool equal (const value_type, compare_type); + + static const bool empty_zero_p = true; + inline static void mark_empty (value_type &p) { p = NULL; } + inline static bool is_empty (value_type p) { return !p; } + + /* Nothing is deletable. Everything is insertable. */ + inline static bool is_deleted (value_type) { return false; } + inline static void mark_deleted (value_type) { gcc_unreachable (); } +}; + +// forked from gcc/cp/cp-tree.h + +/* Global state pertinent to the current function. */ + +struct GTY (()) language_function +{ + struct c_language_function base; + + tree x_cdtor_label; + tree x_current_class_ptr; + tree x_current_class_ref; + tree x_eh_spec_block; + tree x_in_charge_parm; + tree x_vtt_parm; + tree x_return_value; + + BOOL_BITFIELD returns_value : 1; + BOOL_BITFIELD returns_null : 1; + BOOL_BITFIELD returns_abnormally : 1; + BOOL_BITFIELD infinite_loop : 1; + BOOL_BITFIELD x_in_function_try_handler : 1; + BOOL_BITFIELD x_in_base_initializer : 1; + + /* True if this function can throw an exception. */ + BOOL_BITFIELD can_throw : 1; + + BOOL_BITFIELD invalid_constexpr : 1; + BOOL_BITFIELD throwing_cleanup : 1; + + hash_table<named_label_hash> *x_named_labels; + + /* Tracking possibly infinite loops. This is a vec<tree> only because + vec<bool> doesn't work with gtype. */ + vec<tree, va_gc> *infinite_loops; +}; + +// forked from gcc/c-family/c-common.h ref_operator + +/* The various name of operator that appears in error messages. */ +enum ref_operator +{ + /* NULL */ + RO_NULL, + /* array indexing */ + RO_ARRAY_INDEXING, + /* unary * */ + RO_UNARY_STAR, + /* -> */ + RO_ARROW, + /* implicit conversion */ + RO_IMPLICIT_CONVERSION, + /* ->* */ + RO_ARROW_STAR +}; + +// forked from gcc/cp/cp-tree.h lang_decl_min + +/* DECL_LANG_SPECIFIC for the above codes. */ + +struct GTY (()) lang_decl_min +{ + struct lang_decl_base base; /* 32-bits. */ + + /* In a FUNCTION_DECL for which DECL_THUNK_P holds, this is + THUNK_ALIAS. + In a FUNCTION_DECL for which DECL_THUNK_P does not hold, + VAR_DECL, TYPE_DECL, or TEMPLATE_DECL, this is + DECL_TEMPLATE_INFO. */ + tree template_info; + + /* In a DECL_THUNK_P FUNCTION_DECL, this is THUNK_VIRTUAL_OFFSET. + In a lambda-capture proxy VAR_DECL, this is DECL_CAPTURED_VARIABLE. + In a function-scope TREE_STATIC VAR_DECL or IMPLICIT_TYPEDEF_P TYPE_DECL, + this is DECL_DISCRIMINATOR. + In a DECL_LOCAL_DECL_P decl, this is the namespace decl it aliases. + Otherwise, in a class-scope DECL, this is DECL_ACCESS. */ + tree access; +}; + +// forked from gcc/cp/cp-tree.h lang_decl_fn + +/* Additional DECL_LANG_SPECIFIC information for functions. */ + +struct GTY (()) lang_decl_fn +{ + struct lang_decl_min min; + + /* In a overloaded operator, this is the compressed operator code. */ + unsigned ovl_op_code : 6; + unsigned global_ctor_p : 1; + unsigned global_dtor_p : 1; + + unsigned static_function : 1; + unsigned pure_virtual : 1; + unsigned defaulted_p : 1; + unsigned has_in_charge_parm_p : 1; + unsigned has_vtt_parm_p : 1; + unsigned pending_inline_p : 1; + unsigned nonconverting : 1; + unsigned thunk_p : 1; + + unsigned this_thunk_p : 1; + unsigned omp_declare_reduction_p : 1; + unsigned has_dependent_explicit_spec_p : 1; + unsigned immediate_fn_p : 1; + unsigned maybe_deleted : 1; + unsigned coroutine_p : 1; + unsigned implicit_constexpr : 1; + + unsigned spare : 9; + + /* 32-bits padding on 64-bit host. */ + + /* For a non-thunk function decl, this is a tree list of + friendly classes. For a thunk function decl, it is the + thunked to function decl. */ + tree befriending_classes; + + /* For a virtual FUNCTION_DECL for which + DECL_THIS_THUNK_P does not hold, this is DECL_THUNKS. Both + this pointer and result pointer adjusting thunks are + chained here. This pointer thunks to return pointer thunks + will be chained on the return pointer thunk. + For a DECL_CONSTUCTOR_P FUNCTION_DECL, this is the base from + whence we inherit. Otherwise, it is the class in which a + (namespace-scope) friend is defined (if any). */ + tree context; + + union lang_decl_u5 + { + /* In a non-thunk FUNCTION_DECL, this is DECL_CLONED_FUNCTION. */ + tree GTY ((tag ("0"))) cloned_function; + + /* In a FUNCTION_DECL for which THUNK_P holds this is the + THUNK_FIXED_OFFSET. */ + HOST_WIDE_INT GTY ((tag ("1"))) fixed_offset; + } GTY ((desc ("%1.thunk_p"))) u5; + + union lang_decl_u3 + { + struct cp_token_cache *GTY ((tag ("1"))) pending_inline_info; + tree GTY ((tag ("0"))) saved_auto_return_type; + } GTY ((desc ("%1.pending_inline_p"))) u; +}; + +// forked from gcc/cp/cp-tree.h lang_decl_ns + +/* DECL_LANG_SPECIFIC for namespaces. */ + +struct GTY (()) lang_decl_ns +{ + struct lang_decl_base base; /* 32 bits. */ + + /* Inline children. Needs to be va_gc, because of PCH. */ + vec<tree, va_gc> *inlinees; + + /* Hash table of bound decls. It'd be nice to have this inline, but + as the hash_map has a dtor, we can't then put this struct into a + union (until moving to c++11). */ + hash_table<named_decl_hash> *bindings; +}; + +// forked from gcc/cp/cp-tree.h lang_decl_parm + +/* DECL_LANG_SPECIFIC for parameters. */ + +struct GTY (()) lang_decl_parm +{ + struct lang_decl_base base; /* 32 bits. */ + int level; + int index; +}; + +// forked from gcc/cp/cp-tree.h lang_decl_decomp + +/* Additional DECL_LANG_SPECIFIC information for structured bindings. */ + +struct GTY (()) lang_decl_decomp +{ + struct lang_decl_min min; + /* The artificial underlying "e" variable of the structured binding + variable. */ + tree base; +}; + +// forked from gcc/cp/cp-tree.h lang_decl + +/* DECL_LANG_SPECIFIC for all types. It would be nice to just make this a + union rather than a struct containing a union as its only field, but + tree.h declares it as a struct. */ + +struct GTY (()) lang_decl +{ + union GTY ((desc ("%h.base.selector"))) lang_decl_u + { + /* Nothing of only the base type exists. */ + struct lang_decl_base GTY ((default)) base; + struct lang_decl_min GTY ((tag ("lds_min"))) min; + struct lang_decl_fn GTY ((tag ("lds_fn"))) fn; + struct lang_decl_ns GTY ((tag ("lds_ns"))) ns; + struct lang_decl_parm GTY ((tag ("lds_parm"))) parm; + struct lang_decl_decomp GTY ((tag ("lds_decomp"))) decomp; + } u; +}; + +// forked from gcc/c-family/c-common.h c_fileinfo + +/* Information recorded about each file examined during compilation. */ + +struct c_fileinfo +{ + int time; /* Time spent in the file. */ + + /* Flags used only by C++. + INTERFACE_ONLY nonzero means that we are in an "interface" section + of the compiler. INTERFACE_UNKNOWN nonzero means we cannot trust + the value of INTERFACE_ONLY. If INTERFACE_UNKNOWN is zero and + INTERFACE_ONLY is zero, it means that we are responsible for + exporting definitions that others might need. */ + short interface_only; + short interface_unknown; +}; + +// forked from gcc/c-family/c-common.h c_common_identifier + +/* Identifier part common to the C front ends. Inherits from + tree_identifier, despite appearances. */ +struct GTY (()) c_common_identifier +{ + struct tree_common common; + struct cpp_hashnode node; // from cpplib.h +}; + +// forked from gcc/cp/cp-tree.h lang_identifier + +/* Language-dependent contents of an identifier. */ + +struct GTY (()) lang_identifier +{ + struct c_common_identifier c_common; + cxx_binding *bindings; +}; + +// forked from gcc/cp/cp-tree.h tree_overload + +/* OVL_HIDDEN_P nodes come before other nodes. */ + +struct GTY (()) tree_overload +{ + struct tree_common common; + tree function; +}; + +// forked from gcc/cp/cp-tree.h ovl_iterator + +class ovl_iterator +{ + tree ovl; + const bool allow_inner; /* Only used when checking. */ + +public: + explicit ovl_iterator (tree o, bool allow = false) + : ovl (o), allow_inner (allow) + {} + +public: + operator bool () const { return ovl; } + ovl_iterator &operator++ () + { + ovl = TREE_CODE (ovl) != OVERLOAD ? NULL_TREE : OVL_CHAIN (ovl); + return *this; + } + tree operator* () const + { + tree fn = TREE_CODE (ovl) != OVERLOAD ? ovl : OVL_FUNCTION (ovl); + + /* Check this is not an unexpected 2-dimensional overload. */ + gcc_checking_assert (allow_inner || TREE_CODE (fn) != OVERLOAD); + + return fn; + } + bool operator== (const ovl_iterator &o) const { return ovl == o.ovl; } + tree get_using () const + { + gcc_checking_assert (using_p ()); + return ovl; + } + +public: + /* Whether this overload was introduced by a using decl. */ + bool using_p () const + { + return (TREE_CODE (ovl) == USING_DECL + || (TREE_CODE (ovl) == OVERLOAD && OVL_USING_P (ovl))); + } + /* Whether this using is being exported. */ + bool exporting_p () const { return OVL_EXPORT_P (get_using ()); } + + bool hidden_p () const + { + return TREE_CODE (ovl) == OVERLOAD && OVL_HIDDEN_P (ovl); + } + +public: + tree remove_node (tree head) { return remove_node (head, ovl); } + tree reveal_node (tree head) { return reveal_node (head, ovl); } + +protected: + /* If we have a nested overload, point at the inner overload and + return the next link on the outer one. */ + tree maybe_push () + { + tree r = NULL_TREE; + + if (ovl && TREE_CODE (ovl) == OVERLOAD && OVL_NESTED_P (ovl)) + { + r = OVL_CHAIN (ovl); + ovl = OVL_FUNCTION (ovl); + } + return r; + } + /* Restore an outer nested overload. */ + void pop (tree outer) + { + gcc_checking_assert (!ovl); + ovl = outer; + } + +private: + /* We make these static functions to avoid the address of the + iterator escaping the local context. */ + static tree remove_node (tree head, tree node); + static tree reveal_node (tree ovl, tree node); +}; + +// forked from gcc/cp/cp-tree.h lkp_iterator + +/* Iterator over a (potentially) 2 dimensional overload, which is + produced by name lookup. */ + +class lkp_iterator : public ovl_iterator +{ + typedef ovl_iterator parent; + + tree outer; + +public: + explicit lkp_iterator (tree o) : parent (o, true), outer (maybe_push ()) {} + +public: + lkp_iterator &operator++ () + { + bool repush = !outer; + + if (!parent::operator++ () && !repush) + { + pop (outer); + repush = true; + } + + if (repush) + outer = maybe_push (); + + return *this; + } +}; + // forked from gcc/cp/cp-tree.h treee_pair_s struct GTY (()) tree_pair_s @@ -325,6 +2654,80 @@ struct GTY (()) lang_type namespace Rust { +// forked from gcc/cp/cp-tree.h cp_ref_qualifier + +enum rs_ref_qualifier +{ + REF_QUAL_NONE = 0, + REF_QUAL_LVALUE = 1, + REF_QUAL_RVALUE = 2 +}; + +// forked from gcc/cp/cp-tree.h tsubst_flags + +/* Bitmask flags to control type substitution. */ +enum tsubst_flags +{ + tf_none = 0, /* nothing special */ + tf_error = 1 << 0, /* give error messages */ + tf_warning = 1 << 1, /* give warnings too */ + tf_ignore_bad_quals = 1 << 2, /* ignore bad cvr qualifiers */ + tf_keep_type_decl = 1 << 3, /* retain typedef type decls + (make_typename_type use) */ + tf_ptrmem_ok = 1 << 4, /* pointers to member ok (internal + instantiate_type use) */ + tf_user = 1 << 5, /* found template must be a user template + (lookup_template_class use) */ + tf_conv = 1 << 6, /* We are determining what kind of + conversion might be permissible, + not actually performing the + conversion. */ + tf_decltype = 1 << 7, /* We are the operand of decltype. + Used to implement the special rules + for calls in decltype (5.2.2/11). */ + tf_partial = 1 << 8, /* Doing initial explicit argument + substitution in fn_type_unification. */ + tf_fndecl_type = 1 << 9, /* Substituting the type of a function + declaration. */ + tf_no_cleanup = 1 << 10, /* Do not build a cleanup + (build_target_expr and friends) */ + tf_norm = 1 << 11, /* Build diagnostic information during + constraint normalization. */ + /* Convenient substitution flags combinations. */ + tf_warning_or_error = tf_warning | tf_error +}; + +// forked from gcc/cp/cp-tree.h cp_identifier_kind + +/* Kinds of identifiers. Values are carefully chosen. */ +enum cp_identifier_kind +{ + cik_normal = 0, /* Not a special identifier. */ + cik_keyword = 1, /* A keyword. */ + cik_ctor = 2, /* Constructor (in-chg, complete or base). */ + cik_dtor = 3, /* Destructor (in-chg, deleting, complete or + base). */ + cik_simple_op = 4, /* Non-assignment operator name. */ + cik_assign_op = 5, /* An assignment operator name. */ + cik_conv_op = 6, /* Conversion operator name. */ + cik_reserved_for_udlit = 7, /* Not yet in use */ + cik_max +}; + +// forked from gcc/cp/cp-tree.h tag_types + +/* An enumeration of the kind of tags that C++ accepts. */ +enum tag_types +{ + none_type = 0, /* Not a tag type. */ + record_type, /* "struct" types. */ + class_type, /* "class" types. */ + union_type, /* "union" types. */ + enum_type, /* "enum" types. */ + typename_type, /* "typename" types. */ + scope_type /* namespace or tagged type name followed by :: */ +}; + // forked from gcc/cp/cp-tree.h tsubst_flags_t /* This type is used for parameters and variables which hold @@ -375,6 +2778,18 @@ enum rs_built_in_function RS_BUILT_IN_LAST }; +// forked from gcc/cp/cp-tree.h compare_bounds_t + +/* in typeck.cc */ +/* Says how we should behave when comparing two arrays one of which + has unknown bounds. */ +enum compare_bounds_t +{ + bounds_none, + bounds_either, + bounds_first +}; + extern tree convert_to_void (tree expr, impl_conv_void implicit); @@ -411,8 +2826,8 @@ mark_use (tree expr, bool rvalue_p, bool read_p, location_t loc, // function with no library fallback (or any of its bits, such as in // a conversion to bool). extern tree -mark_rvalue_use (tree e, location_t loc /* = UNKNOWN_LOCATION */, - bool reject_builtin /* = true */); +mark_rvalue_use (tree, location_t = UNKNOWN_LOCATION, + bool reject_builtin = true); // Called whenever an expression is used in an lvalue context. extern tree @@ -475,8 +2890,271 @@ extern bool var_in_maybe_constexpr_fn (tree); extern int rs_type_quals (const_tree type); +inline bool type_unknown_p (const_tree); + extern bool decl_maybe_constant_var_p (tree); +extern void +init_modules (); + +extern bool var_in_constexpr_fn (tree); + +inline tree ovl_first (tree) ATTRIBUTE_PURE; + +inline bool type_unknown_p (const_tree); + +extern tree +lookup_add (tree fns, tree lookup); + +extern tree +ovl_make (tree fn, tree next = NULL_TREE); + +extern int is_overloaded_fn (tree) ATTRIBUTE_PURE; + +extern bool maybe_add_lang_type_raw (tree); + +extern rs_ref_qualifier type_memfn_rqual (const_tree); + +extern bool builtin_pack_fn_p (tree); + +extern tree make_conv_op_name (tree); + +extern int type_memfn_quals (const_tree); + +struct c_fileinfo * +get_fileinfo (const char *); + +extern tree +cxx_make_type (enum tree_code CXX_MEM_STAT_INFO); + +extern tree +build_cplus_array_type (tree, tree, int is_dep = -1); + +extern bool is_byte_access_type (tree); + +extern bool +comptypes (tree, tree, int); + +extern tree canonical_eh_spec (tree); + +extern int cp_tree_operand_length (const_tree); + +extern bool rs_tree_equal (tree, tree); + +extern bool compparms (const_tree, const_tree); + +extern tree +rs_build_qualified_type_real (tree, int, tsubst_flags_t); +#define rs_build_qualified_type(TYPE, QUALS) \ + rs_build_qualified_type_real ((TYPE), (QUALS), tf_warning_or_error) +extern bool cv_qualified_p (const_tree); + +extern bool similar_type_p (tree, tree); + +extern bool rs_tree_equal (tree, tree); + +extern bool +vector_targets_convertible_p (const_tree t1, const_tree t2); + +extern bool same_type_ignoring_top_level_qualifiers_p (tree, tree); + +extern bool comp_ptr_ttypes_const (tree, tree, compare_bounds_t); + +extern tree +get_class_binding_direct (tree, tree, bool want_type = false); + +extern tree skip_artificial_parms_for (const_tree, tree); + +extern void +lang_check_failed (const char *, int, + const char *) ATTRIBUTE_NORETURN ATTRIBUTE_COLD; + +extern tree default_init_uninitialized_part (tree); + +extern bool type_has_non_user_provided_default_constructor (tree); + +extern bool default_ctor_p (const_tree); + +extern bool user_provided_p (tree); + +extern bool sufficient_parms_p (const_tree); + +extern tree next_initializable_field (tree); + +extern tree in_class_defaulted_default_constructor (tree); + +extern bool is_instantiation_of_constexpr (tree); + +extern bool +check_for_uninitialized_const_var (tree, bool, tsubst_flags_t); + +extern bool reduced_constant_expression_p (tree); + +extern tree cv_unqualified (tree); + +extern tree cp_get_callee (tree); +extern tree rs_get_callee_fndecl_nofold (tree); + +extern bool is_nondependent_static_init_expression (tree); + +extern tree build_nop (tree, tree); + +extern bool scalarish_type_p (const_tree); + +extern tree is_bitfield_expr_with_lowered_type (const_tree); + +extern tree convert_bitfield_to_declared_type (tree); + +extern tree +cp_fold_maybe_rvalue (tree, bool); + +extern tree maybe_undo_parenthesized_ref (tree); + +extern tree +fold_offsetof (tree, tree = size_type_node, tree_code ctx = ERROR_MARK); + +extern tree cp_truthvalue_conversion (tree, tsubst_flags_t); + +extern tree +fold_non_dependent_expr (tree, tsubst_flags_t = tf_warning_or_error, + bool = false, tree = NULL_TREE); + +extern int char_type_p (tree); + +extern bool instantiation_dependent_expression_p (tree); + +extern bool type_has_nontrivial_copy_init (const_tree); + +extern tree build_local_temp (tree); + +extern bool is_normal_capture_proxy (tree); + +extern bool reject_gcc_builtin (const_tree, location_t = UNKNOWN_LOCATION); + +extern tree resolve_nondeduced_context (tree, tsubst_flags_t); + +extern void cxx_incomplete_type_diagnostic (location_t, const_tree, const_tree, + diagnostic_t); + +extern void cxx_incomplete_type_error (location_t, const_tree, const_tree); + +extern bool invalid_nonstatic_memfn_p (location_t, tree, tsubst_flags_t); + +extern bool really_overloaded_fn (tree) ATTRIBUTE_PURE; + +extern tree resolve_nondeduced_context_or_error (tree, tsubst_flags_t); + +extern tree instantiate_non_dependent_or_null (tree); + +extern void cxx_incomplete_type_inform (const_tree); + +extern tree strip_top_quals (tree); + +extern bool undeduced_auto_decl (tree); + +extern bool require_deduced_type (tree, tsubst_flags_t = tf_warning_or_error); + +extern bool decl_constant_var_p (tree); + +extern tree build_new_constexpr_heap_type (tree, tree, tree); + +extern bool is_empty_field (tree); + +extern bool +in_immediate_context (); + +extern tree cp_get_callee_fndecl_nofold (tree); + +extern bool +cxx_mark_addressable (tree, bool = false); + +extern tree fold_builtin_source_location (location_t); + +extern tree build_address (tree); + +extern bool bitfield_p (const_tree); + +extern tree rvalue (tree); + +extern bool glvalue_p (const_tree); + +extern cp_lvalue_kind lvalue_kind (const_tree); + +extern tree +decl_constant_value (tree, bool); + +extern tree lookup_enumerator (tree, tree); + +extern int +is_class_type (tree, int); + +extern tree braced_lists_to_strings (tree, tree); + +extern tree +fold_builtin_is_pointer_inverconvertible_with_class (location_t, int, tree *); + +extern bool layout_compatible_type_p (tree, tree); + +extern tree finish_underlying_type (tree); + +extern tree +c_common_type_for_mode (machine_mode, int); + +extern bool std_layout_type_p (const_tree); + +extern tree complete_type (tree); + +extern tree complete_type_or_else (tree, tree); + +extern void note_failed_type_completion_for_satisfaction (tree); + +extern tree complete_type_or_maybe_complain (tree, tree, tsubst_flags_t); + +extern bool +next_common_initial_seqence (tree &, tree &); + +extern bool null_member_pointer_value_p (tree); + +extern tree +fold_builtin_is_corresponding_member (location_t, int, tree *); + +extern tree cp_fold_rvalue (tree); + +extern tree +maybe_constant_value (tree, tree = NULL_TREE, bool = false); + +extern tree lvalue_type (tree); + +extern void lvalue_error (location_t, enum lvalue_use); + +extern tree +cp_fold_maybe_rvalue (tree, bool); + +extern tree get_first_fn (tree) ATTRIBUTE_PURE; + +extern void explain_non_literal_class (tree); + +extern bool reference_related_p (tree, tree); + +extern bool ordinary_char_type_p (tree); + +extern bool array_string_literal_compatible_p (tree, tree); + +// forked from gcc/cp/cp-tree.h + +enum +{ + ce_derived, + ce_type, + ce_normal, + ce_exact +}; + +extern tree +rs_build_qualified_type_real (tree, int, tsubst_flags_t); +#define rs_build_qualified_type(TYPE, QUALS) \ + rs_build_qualified_type_real ((TYPE), (QUALS), tf_warning_or_error) + extern tree rs_walk_subtrees (tree *, int *, walk_tree_fn, void *, hash_set<tree> *); #define rs_walk_tree(tp, func, data, pset) \ @@ -503,6 +3181,206 @@ rs_expr_loc_or_input_loc (const_tree t) return rs_expr_loc_or_loc (t, input_location); } +// forked from gcc/cp/cp-tree.h type_unknown_p + +inline bool +type_unknown_p (const_tree expr) +{ + return TREE_TYPE (expr) == unknown_type_node; +} + +// forked from gcc/cp/cp-tree.h ovl_first + +/* Inline bodies. */ + +inline tree +ovl_first (tree node) +{ + while (TREE_CODE (node) == OVERLOAD) + node = OVL_FUNCTION (node); + return node; +} + +// forked from gcc/cp/cp-tree.h type_of_this_parm + +/* Return the type of the `this' parameter of FNTYPE. */ + +inline tree +type_of_this_parm (const_tree fntype) +{ + function_args_iterator iter; + gcc_assert (TREE_CODE (fntype) == METHOD_TYPE); + function_args_iter_init (&iter, fntype); + return function_args_iter_cond (&iter); +} + +// forked from gcc/cp/cp-tree.h class_of_this_parm + +/* Return the class of the `this' parameter of FNTYPE. */ + +inline tree +class_of_this_parm (const_tree fntype) +{ + return TREE_TYPE (type_of_this_parm (fntype)); +} + +// forked from gcc/cp/cp-tree.h identifier_p + +/* Return a typed pointer version of T if it designates a + C++ front-end identifier. */ +inline lang_identifier * +identifier_p (tree t) +{ + if (TREE_CODE (t) == IDENTIFIER_NODE) + return (lang_identifier *) t; + return NULL; +} + +// forked from gcc/c-family/c-common.h gnu_vector_type_p + +/* Return true if TYPE is a vector type that should be subject to the GNU + vector extensions (as opposed to a vector type that is used only for + the purposes of defining target-specific built-in functions). */ + +inline bool +gnu_vector_type_p (const_tree type) +{ + return TREE_CODE (type) == VECTOR_TYPE && !TYPE_INDIVISIBLE_P (type); +} + +extern vec<tree, va_gc> * +make_tree_vector (void); + +extern void +release_tree_vector (vec<tree, va_gc> *); + +/* Simplified unique_ptr clone to release a tree vec on exit. */ + +class releasing_vec +{ +public: + typedef vec<tree, va_gc> vec_t; + + releasing_vec (vec_t *v) : v (v) {} + releasing_vec () : v (make_tree_vector ()) {} + + /* Copy ops are deliberately declared but not defined, + copies must always be elided. */ + releasing_vec (const releasing_vec &); + releasing_vec &operator= (const releasing_vec &); + + vec_t &operator* () const { return *v; } + vec_t *operator-> () const { return v; } + vec_t *get () const { return v; } + operator vec_t * () const { return v; } + vec_t **operator& () { return &v; } + + /* Breaks pointer/value consistency for convenience. This takes ptrdiff_t + rather than unsigned to avoid ambiguity with the built-in operator[] + (bootstrap/91828). */ + tree &operator[] (ptrdiff_t i) const { return (*v)[i]; } + + tree *begin () { return ::begin (v); } + tree *end () { return ::end (v); } + + void release () + { + release_tree_vector (v); + v = NULL; + } + + ~releasing_vec () { release_tree_vector (v); } + +private: + vec_t *v; +}; + +inline tree * +vec_safe_push (releasing_vec &r, const tree &t CXX_MEM_STAT_INFO) +{ + return vec_safe_push (*&r, t PASS_MEM_STAT); +} + +inline bool +vec_safe_reserve (releasing_vec &r, unsigned n, + bool e = false CXX_MEM_STAT_INFO) +{ + return vec_safe_reserve (*&r, n, e PASS_MEM_STAT); +} +inline unsigned +vec_safe_length (releasing_vec &r) +{ + return r->length (); +} +inline void +vec_safe_splice (releasing_vec &r, vec<tree, va_gc> *p CXX_MEM_STAT_INFO) +{ + vec_safe_splice (*&r, p PASS_MEM_STAT); +} + +inline bool +null_node_p (const_tree expr) +{ + STRIP_ANY_LOCATION_WRAPPER (expr); + return expr == null_node; +} + +inline void +cxx_incomplete_type_diagnostic (const_tree value, const_tree type, + diagnostic_t diag_kind) +{ + cxx_incomplete_type_diagnostic (rs_expr_loc_or_input_loc (value), value, type, + diag_kind); +} + +inline void +cxx_incomplete_type_error (const_tree value, const_tree type) +{ + cxx_incomplete_type_diagnostic (value, type, DK_ERROR); +} + +extern location_t +location_of (tree t); + +/* Helpers for IMPLICIT_RVALUE_P to look through automatic dereference. */ + +inline bool +implicit_rvalue_p (const_tree t) +{ + if (REFERENCE_REF_P (t)) + t = TREE_OPERAND (t, 0); + return ((TREE_CODE (t) == NON_LVALUE_EXPR) && IMPLICIT_RVALUE_P (t)); +} +inline tree +set_implicit_rvalue_p (tree ot) +{ + tree t = ot; + if (REFERENCE_REF_P (t)) + t = TREE_OPERAND (t, 0); + IMPLICIT_RVALUE_P (t) = 1; + return ot; +} + +namespace Compile { +extern tree +maybe_constant_init (tree, tree = NULL_TREE, bool = false); + +extern void +explain_invalid_constexpr_fn (tree fun); + +extern bool potential_constant_expression (tree); + +extern bool +literal_type_p (tree t); + +extern bool +maybe_constexpr_fn (tree t); + +extern tree +fold_non_dependent_init (tree, tsubst_flags_t = tf_warning_or_error, + bool = false, tree = NULL_TREE); +} // namespace Compile + } // namespace Rust #endif // RUST_TREE |