diff options
Diffstat (limited to 'gcc/rust/backend')
| -rw-r--r-- | gcc/rust/backend/rust-compile-base.cc | 24 | ||||
| -rw-r--r-- | gcc/rust/backend/rust-compile-base.h | 2 | ||||
| -rw-r--r-- | gcc/rust/backend/rust-compile-context.h | 24 | ||||
| -rw-r--r-- | gcc/rust/backend/rust-compile-expr.cc | 73 | ||||
| -rw-r--r-- | gcc/rust/backend/rust-compile-extern.h | 46 | ||||
| -rw-r--r-- | gcc/rust/backend/rust-compile-pattern.cc | 510 | ||||
| -rw-r--r-- | gcc/rust/backend/rust-compile-resolve-path.cc | 7 | ||||
| -rw-r--r-- | gcc/rust/backend/rust-compile-type.cc | 98 | ||||
| -rw-r--r-- | gcc/rust/backend/rust-compile-type.h | 5 | ||||
| -rw-r--r-- | gcc/rust/backend/rust-compile-var-decl.h | 70 | ||||
| -rw-r--r-- | gcc/rust/backend/rust-compile.cc | 14 | ||||
| -rw-r--r-- | gcc/rust/backend/rust-constexpr.cc | 698 |
12 files changed, 1204 insertions, 367 deletions
diff --git a/gcc/rust/backend/rust-compile-base.cc b/gcc/rust/backend/rust-compile-base.cc index 73c34b2..d1db58e 100644 --- a/gcc/rust/backend/rust-compile-base.cc +++ b/gcc/rust/backend/rust-compile-base.cc @@ -32,6 +32,7 @@ #include "rust-type-util.h" #include "rust-compile-implitem.h" #include "rust-attribute-values.h" +#include "rust-attributes.h" #include "rust-immutable-name-resolution-context.h" #include "fold-const.h" @@ -251,25 +252,21 @@ void HIRCompileBase::handle_link_section_attribute_on_fndecl ( tree fndecl, const AST::Attribute &attr) { - if (!attr.has_attr_input ()) + auto msg_str = Analysis::Attributes::extract_string_literal (attr); + + if (!msg_str.has_value ()) { rust_error_at (attr.get_locus (), - "%<link_section%> expects exactly one argment"); + "malformed %<link_section%> attribute input"); return; } - rust_assert (attr.get_attr_input ().get_attr_input_type () - == AST::AttrInput::AttrInputType::LITERAL); - - auto &literal = static_cast<AST::AttrInputLiteral &> (attr.get_attr_input ()); - const auto &msg_str = literal.get_literal ().as_string (); - if (decl_section_name (fndecl)) { rust_warning_at (attr.get_locus (), 0, "section name redefined"); } - set_decl_section_name (fndecl, msg_str.c_str ()); + set_decl_section_name (fndecl, msg_str->c_str ()); } void @@ -416,13 +413,10 @@ HIRCompileBase::handle_must_use_attribute_on_fndecl (tree fndecl, if (attr.has_attr_input ()) { - rust_assert (attr.get_attr_input ().get_attr_input_type () - == AST::AttrInput::AttrInputType::LITERAL); + auto msg_str = Analysis::Attributes::extract_string_literal (attr); + rust_assert (msg_str.has_value ()); - auto &literal - = static_cast<AST::AttrInputLiteral &> (attr.get_attr_input ()); - const auto &msg_str = literal.get_literal ().as_string (); - tree message = build_string (msg_str.size (), msg_str.c_str ()); + tree message = build_string (msg_str->size (), msg_str->c_str ()); value = tree_cons (nodiscard, message, NULL_TREE); } diff --git a/gcc/rust/backend/rust-compile-base.h b/gcc/rust/backend/rust-compile-base.h index e9b8596..3bf26af 100644 --- a/gcc/rust/backend/rust-compile-base.h +++ b/gcc/rust/backend/rust-compile-base.h @@ -56,7 +56,7 @@ protected: TyTy::BaseType *expected, location_t lvalue_locus, location_t rvalue_locus); - tree coerce_to_dyn_object (tree compiled_ref, const TyTy::BaseType *actual, + tree coerce_to_dyn_object (tree compiled_ref, TyTy::BaseType *actual, const TyTy::DynamicObjectType *ty, location_t locus); diff --git a/gcc/rust/backend/rust-compile-context.h b/gcc/rust/backend/rust-compile-context.h index d4a642b..e98bbc2 100644 --- a/gcc/rust/backend/rust-compile-context.h +++ b/gcc/rust/backend/rust-compile-context.h @@ -34,6 +34,10 @@ namespace Compile { struct fncontext { + fncontext (tree fndecl, ::Bvariable *ret_addr, TyTy::BaseType *retty) + : fndecl (fndecl), ret_addr (ret_addr), retty (retty) + {} + tree fndecl; ::Bvariable *ret_addr; TyTy::BaseType *retty; @@ -154,7 +158,7 @@ public: if (it == mono_fns.end ()) mono_fns[dId] = {}; - mono_fns[dId].push_back ({ref, fn}); + mono_fns[dId].emplace_back (ref, fn); } void insert_closure_decl (const TyTy::ClosureType *ref, tree fn) @@ -164,7 +168,7 @@ public: if (it == mono_closure_fns.end ()) mono_closure_fns[dId] = {}; - mono_closure_fns[dId].push_back ({ref, fn}); + mono_closure_fns[dId].emplace_back (ref, fn); } tree lookup_closure_decl (const TyTy::ClosureType *ref) @@ -279,7 +283,7 @@ public: void push_fn (tree fn, ::Bvariable *ret_addr, TyTy::BaseType *retty) { - fn_stack.push_back (fncontext{fn, ret_addr, retty}); + fn_stack.emplace_back (fn, ret_addr, retty); } void pop_fn () { fn_stack.pop_back (); } @@ -318,7 +322,13 @@ public: void push_loop_context (Bvariable *var) { loop_value_stack.push_back (var); } - Bvariable *peek_loop_context () { return loop_value_stack.back (); } + bool have_loop_context () const { return !loop_value_stack.empty (); } + + Bvariable *peek_loop_context () + { + rust_assert (!loop_value_stack.empty ()); + return loop_value_stack.back (); + } Bvariable *pop_loop_context () { @@ -332,7 +342,11 @@ public: loop_begin_labels.push_back (label); } - tree peek_loop_begin_label () { return loop_begin_labels.back (); } + tree peek_loop_begin_label () + { + rust_assert (!loop_begin_labels.empty ()); + return loop_begin_labels.back (); + } tree pop_loop_begin_label () { diff --git a/gcc/rust/backend/rust-compile-expr.cc b/gcc/rust/backend/rust-compile-expr.cc index 6433923..0a627f3 100644 --- a/gcc/rust/backend/rust-compile-expr.cc +++ b/gcc/rust/backend/rust-compile-expr.cc @@ -828,6 +828,10 @@ CompileExpr::visit (HIR::BreakExpr &expr) { tree compiled_expr = CompileExpr::Compile (expr.get_expr (), ctx); + translated = error_mark_node; + if (!ctx->have_loop_context ()) + return; + Bvariable *loop_result_holder = ctx->peek_loop_context (); tree result_reference = Backend::var_expression (loop_result_holder, @@ -891,6 +895,10 @@ CompileExpr::visit (HIR::BreakExpr &expr) void CompileExpr::visit (HIR::ContinueExpr &expr) { + translated = error_mark_node; + if (!ctx->have_loop_context ()) + return; + tree label = ctx->peek_loop_begin_label (); if (expr.has_label ()) { @@ -1647,37 +1655,39 @@ CompileExpr::compile_integer_literal (const HIR::LiteralExpr &expr, const TyTy::BaseType *tyty) { rust_assert (expr.get_lit_type () == HIR::Literal::INT); - const auto literal_value = expr.get_literal (); - + const auto &literal_value = expr.get_literal (); tree type = TyTyResolveCompile::compile (ctx, tyty); + std::string s = literal_value.as_string (); + s.erase (std::remove (s.begin (), s.end (), '_'), s.end ()); + + int base = 0; mpz_t ival; - if (mpz_init_set_str (ival, literal_value.as_string ().c_str (), 10) != 0) + if (mpz_init_set_str (ival, s.c_str (), base) != 0) { - rust_error_at (expr.get_locus (), "bad number in literal"); + rust_error_at (expr.get_locus (), "failed to load number literal"); return error_mark_node; } + if (expr.is_negative ()) + mpz_neg (ival, ival); - mpz_t type_min; - mpz_t type_max; + mpz_t type_min, type_max; mpz_init (type_min); mpz_init (type_max); get_type_static_bounds (type, type_min, type_max); - if (expr.is_negative ()) - { - mpz_neg (ival, ival); - } if (mpz_cmp (ival, type_min) < 0 || mpz_cmp (ival, type_max) > 0) { rust_error_at (expr.get_locus (), "integer overflows the respective type %qs", tyty->get_name ().c_str ()); + mpz_clear (type_min); + mpz_clear (type_max); + mpz_clear (ival); return error_mark_node; } tree result = wide_int_to_tree (type, wi::from_mpz (type, ival, true)); - mpz_clear (type_min); mpz_clear (type_max); mpz_clear (ival); @@ -2000,13 +2010,25 @@ CompileExpr::array_copied_expr (location_t expr_locus, return error_mark_node; } - ctx->push_const_context (); - tree capacity_expr = CompileExpr::Compile (elems.get_num_copies_expr (), ctx); - ctx->pop_const_context (); + auto capacity_ty = array_tyty.get_capacity (); + + // Check if capacity is a const type + if (capacity_ty->get_kind () != TyTy::TypeKind::CONST) + { + rust_error_at (array_tyty.get_locus (), + "array capacity is not a const type"); + return error_mark_node; + } + + auto *capacity_const = capacity_ty->as_const_type (); - if (!TREE_CONSTANT (capacity_expr)) + rust_assert (capacity_const->const_kind () + == TyTy::BaseConstType::ConstKind::Value); + auto &capacity_value = *static_cast<TyTy::ConstValueType *> (capacity_const); + auto cap_tree = capacity_value.get_value (); + if (error_operand_p (cap_tree) || !TREE_CONSTANT (cap_tree)) { - rust_error_at (expr_locus, "non const num copies %qT", array_type); + rust_error_at (expr_locus, "non const num copies %qT", cap_tree); return error_mark_node; } @@ -2059,9 +2081,9 @@ CompileExpr::array_copied_expr (location_t expr_locus, ctx->push_block (init_block); tree tmp; - tree stmts = Backend::array_initializer (fndecl, init_block, array_type, - capacity_expr, translated_expr, - &tmp, expr_locus); + tree stmts + = Backend::array_initializer (fndecl, init_block, array_type, cap_tree, + translated_expr, &tmp, expr_locus); ctx->add_statement (stmts); tree block = ctx->pop_block (); @@ -2209,11 +2231,10 @@ HIRCompileBase::resolve_unsized_dyn_adjustment ( tree rvalue = expression; location_t rvalue_locus = locus; - const TyTy::BaseType *actual = adjustment.get_actual (); - const TyTy::BaseType *expected = adjustment.get_expected (); + auto actual = adjustment.get_actual (); + auto expected = adjustment.get_expected (); - const TyTy::DynamicObjectType *dyn - = static_cast<const TyTy::DynamicObjectType *> (expected); + const auto dyn = static_cast<const TyTy::DynamicObjectType *> (expected); rust_debug ("resolve_unsized_dyn_adjustment actual={%s} dyn={%s}", actual->debug_str ().c_str (), dyn->debug_str ().c_str ()); @@ -2617,15 +2638,15 @@ CompileExpr::generate_closure_fntype (HIR::ClosureExpr &expr, TyTy::TypeBoundPredicateItem item = TyTy::TypeBoundPredicateItem::error (); if (predicate.get_name ().compare ("FnOnce") == 0) { - item = predicate.lookup_associated_item ("call_once"); + item = predicate.lookup_associated_item ("call_once").value (); } else if (predicate.get_name ().compare ("FnMut") == 0) { - item = predicate.lookup_associated_item ("call_mut"); + item = predicate.lookup_associated_item ("call_mut").value (); } else if (predicate.get_name ().compare ("Fn") == 0) { - item = predicate.lookup_associated_item ("call"); + item = predicate.lookup_associated_item ("call").value (); } else { diff --git a/gcc/rust/backend/rust-compile-extern.h b/gcc/rust/backend/rust-compile-extern.h index d6aa589..a78f9ee 100644 --- a/gcc/rust/backend/rust-compile-extern.h +++ b/gcc/rust/backend/rust-compile-extern.h @@ -24,6 +24,8 @@ #include "rust-compile-type.h" #include "rust-diagnostics.h" #include "rust-hir-full-decls.h" +#include "rust-attributes.h" +#include "rust-attribute-values.h" namespace Rust { namespace Compile { @@ -57,8 +59,7 @@ public: rust_assert (ok); std::string name = item.get_item_name ().as_string (); - // FIXME this is assuming C ABI - std::string asm_name = name; + GGC::Ident asm_name = get_link_name (item); tree type = TyTyResolveCompile::compile (ctx, resolved_type); bool is_external = true; @@ -124,16 +125,7 @@ public: tree compiled_fn_type = TyTyResolveCompile::compile (ctx, fntype); std::string ir_symbol_name = function.get_item_name ().as_string (); - std::string asm_name = function.get_item_name ().as_string (); - if (fntype->get_abi () == ABI::RUST) - { - // then we need to get the canonical path of it and mangle it - auto canonical_path = ctx->get_mappings ().lookup_canonical_path ( - function.get_mappings ().get_nodeid ()); - - ir_symbol_name = canonical_path->get () + fntype->subst_as_string (); - asm_name = ctx->mangle_item (fntype, *canonical_path); - } + GGC::Ident asm_name = get_link_name (function); const unsigned int flags = Backend::function_is_declaration; tree fndecl = Backend::function (compiled_fn_type, ir_symbol_name, asm_name, @@ -158,6 +150,36 @@ private: ref_locus (ref_locus) {} + template <typename T> static GGC::Ident get_link_name (T &obj) + { + AST::Attribute *use_attr = nullptr; + + for (auto &attr : obj.get_outer_attrs ()) + { + if (attr.get_path ().as_string () == Values::Attributes::LINK_NAME) + { + // later attributes override earlier ones + // TODO: add warning -- should duplicate + // attributes be folded elsewhere? + use_attr = &attr; + } + } + + if (use_attr) + { + auto link_name + = Analysis::Attributes::extract_string_literal (*use_attr); + + if (!link_name.has_value ()) + rust_error_at (use_attr->get_locus (), + "malformed %<link_name%> attribute input"); + else + return *link_name; + } + + return obj.get_item_name (); + } + TyTy::BaseType *concrete; tree reference; location_t ref_locus; diff --git a/gcc/rust/backend/rust-compile-pattern.cc b/gcc/rust/backend/rust-compile-pattern.cc index fe65921..82333dc 100644 --- a/gcc/rust/backend/rust-compile-pattern.cc +++ b/gcc/rust/backend/rust-compile-pattern.cc @@ -27,6 +27,7 @@ #include "rust-hir-pattern.h" #include "rust-system.h" #include "rust-tyty.h" +#include "tree.h" namespace Rust { namespace Compile { @@ -158,13 +159,15 @@ CompilePatternCheckExpr::visit (HIR::RangePattern &pattern) pattern.get_mappings (), pattern.get_locus (), ctx); + ComparisonOperator upper_cmp = pattern.is_inclusive_range () + ? ComparisonOperator::LESS_OR_EQUAL + : ComparisonOperator::LESS_THAN; tree check_lower = Backend::comparison_expression (ComparisonOperator::GREATER_OR_EQUAL, match_scrutinee_expr, lower, pattern.get_locus ()); tree check_upper - = Backend::comparison_expression (ComparisonOperator::LESS_OR_EQUAL, - match_scrutinee_expr, upper, + = Backend::comparison_expression (upper_cmp, match_scrutinee_expr, upper, pattern.get_locus ()); check_expr = Backend::arithmetic_or_logical_expression ( ArithmeticOrLogicalOperator::BITWISE_AND, check_lower, check_upper, @@ -354,17 +357,51 @@ CompilePatternCheckExpr::visit (HIR::TupleStructPattern &pattern) HIR::TupleStructItems &items = pattern.get_items (); switch (items.get_item_type ()) { - case HIR::TupleStructItems::RANGED: + case HIR::TupleStructItems::HAS_REST: { - // TODO - rust_unreachable (); + HIR::TupleStructItemsHasRest &items_has_rest + = static_cast<HIR::TupleStructItemsHasRest &> (items); + size_t num_patterns = items_has_rest.get_lower_patterns ().size () + + items_has_rest.get_upper_patterns ().size (); + + // enums cases shouldn't reach here + rust_assert (num_patterns <= variant->num_fields () + && (!adt->is_enum ())); + + size_t tuple_field_index = 0; + for (auto &pattern : items_has_rest.get_lower_patterns ()) + { + tree field_expr + = Backend::struct_field_expression (match_scrutinee_expr, + tuple_field_index++, + pattern->get_locus ()); + tree check_expr_sub + = CompilePatternCheckExpr::Compile (*pattern, field_expr, ctx); + check_expr = Backend::arithmetic_or_logical_expression ( + ArithmeticOrLogicalOperator::BITWISE_AND, check_expr, + check_expr_sub, pattern->get_locus ()); + } + tuple_field_index = variant->num_fields () + - items_has_rest.get_upper_patterns ().size (); + for (auto &pattern : items_has_rest.get_upper_patterns ()) + { + tree field_expr + = Backend::struct_field_expression (match_scrutinee_expr, + tuple_field_index++, + pattern->get_locus ()); + tree check_expr_sub + = CompilePatternCheckExpr::Compile (*pattern, field_expr, ctx); + check_expr = Backend::arithmetic_or_logical_expression ( + ArithmeticOrLogicalOperator::BITWISE_AND, check_expr, + check_expr_sub, pattern->get_locus ()); + } } break; - case HIR::TupleStructItems::MULTIPLE: + case HIR::TupleStructItems::NO_REST: { - HIR::TupleStructItemsNoRange &items_no_range - = static_cast<HIR::TupleStructItemsNoRange &> (items); + HIR::TupleStructItemsNoRest &items_no_range + = static_cast<HIR::TupleStructItemsNoRest &> (items); rust_assert (items_no_range.get_patterns ().size () == variant->num_fields ()); @@ -428,10 +465,10 @@ CompilePatternCheckExpr::visit (HIR::TuplePattern &pattern) switch (pattern.get_items ().get_item_type ()) { - case HIR::TuplePatternItems::RANGED: + case HIR::TuplePatternItems::HAS_REST: { auto &items - = static_cast<HIR::TuplePatternItemsRanged &> (pattern.get_items ()); + = static_cast<HIR::TuplePatternItemsHasRest &> (pattern.get_items ()); size_t tuple_field_index = 0; // lookup the type to find out number of fields @@ -477,10 +514,10 @@ CompilePatternCheckExpr::visit (HIR::TuplePattern &pattern) } break; - case HIR::TuplePatternItems::MULTIPLE: + case HIR::TuplePatternItems::NO_REST: { - auto &items = static_cast<HIR::TuplePatternItemsMultiple &> ( - pattern.get_items ()); + auto &items + = static_cast<HIR::TuplePatternItemsNoRest &> (pattern.get_items ()); size_t tuple_field_index = 0; for (auto &pat : items.get_patterns ()) @@ -532,25 +569,14 @@ CompilePatternCheckExpr::visit (HIR::SlicePattern &pattern) || lookup->get_kind () == TyTy::TypeKind::SLICE || lookup->get_kind () == TyTy::REF); - size_t array_element_index = 0; + // function ptr that points to either array_index_expression or + // slice_index_expression depending on the scrutinee's type + tree (*scrutinee_index_expr_func) (tree, tree, location_t) = nullptr; + switch (lookup->get_kind ()) { case TyTy::TypeKind::ARRAY: - for (auto &pattern_member : pattern.get_items ()) - { - tree array_index_tree - = Backend::size_constant_expression (array_element_index++); - tree element_expr - = Backend::array_index_expression (match_scrutinee_expr, - array_index_tree, - pattern.get_locus ()); - tree check_expr_sub - = CompilePatternCheckExpr::Compile (*pattern_member, element_expr, - ctx); - check_expr = Backend::arithmetic_or_logical_expression ( - ArithmeticOrLogicalOperator::BITWISE_AND, check_expr, - check_expr_sub, pattern.get_locus ()); - } + scrutinee_index_expr_func = Backend::array_index_expression; break; case TyTy::TypeKind::SLICE: rust_sorry_at ( @@ -560,25 +586,81 @@ CompilePatternCheckExpr::visit (HIR::SlicePattern &pattern) case TyTy::TypeKind::REF: { rust_assert (RS_DST_FLAG_P (TREE_TYPE (match_scrutinee_expr))); + scrutinee_index_expr_func = Backend::slice_index_expression; tree size_field = Backend::struct_field_expression (match_scrutinee_expr, 1, pattern.get_locus ()); - // First compare the size - check_expr = Backend::comparison_expression ( - ComparisonOperator::EQUAL, size_field, - build_int_cst (size_type_node, pattern.get_items ().size ()), - pattern.get_locus ()); + // for slices, generate a dynamic size comparison expression tree + // because size checking is done at runtime. + switch (pattern.get_items ().get_item_type ()) + { + case HIR::SlicePatternItems::ItemType::NO_REST: + { + auto &items = static_cast<HIR::SlicePatternItemsNoRest &> ( + pattern.get_items ()); + check_expr = Backend::comparison_expression ( + ComparisonOperator::EQUAL, size_field, + build_int_cst (size_type_node, items.get_patterns ().size ()), + pattern.get_locus ()); + } + break; + case HIR::SlicePatternItems::ItemType::HAS_REST: + { + auto &items = static_cast<HIR::SlicePatternItemsHasRest &> ( + pattern.get_items ()); + auto pattern_min_cap = items.get_lower_patterns ().size () + + items.get_upper_patterns ().size (); + check_expr = Backend::comparison_expression ( + ComparisonOperator::GREATER_OR_EQUAL, size_field, + build_int_cst (size_type_node, pattern_min_cap), + pattern.get_locus ()); + } + break; + } + } + break; + default: + rust_unreachable (); + } + + rust_assert (scrutinee_index_expr_func != nullptr); - // Then compare each element in the slice pattern - for (auto &pattern_member : pattern.get_items ()) + // Generate tree to compare every element within array/slice + size_t element_index = 0; + switch (pattern.get_items ().get_item_type ()) + { + case HIR::SlicePatternItems::ItemType::NO_REST: + { + auto &items + = static_cast<HIR::SlicePatternItemsNoRest &> (pattern.get_items ()); + for (auto &pattern_member : items.get_patterns ()) { - tree slice_index_tree - = Backend::size_constant_expression (array_element_index++); + tree index_tree + = Backend::size_constant_expression (element_index++); tree element_expr - = Backend::slice_index_expression (match_scrutinee_expr, - slice_index_tree, - pattern.get_locus ()); + = scrutinee_index_expr_func (match_scrutinee_expr, index_tree, + pattern.get_locus ()); + tree check_expr_sub + = CompilePatternCheckExpr::Compile (*pattern_member, element_expr, + ctx); + check_expr = Backend::arithmetic_or_logical_expression ( + ArithmeticOrLogicalOperator::BITWISE_AND, check_expr, + check_expr_sub, pattern.get_locus ()); + } + break; + } + case HIR::SlicePatternItems::ItemType::HAS_REST: + { + auto &items + = static_cast<HIR::SlicePatternItemsHasRest &> (pattern.get_items ()); + for (auto &pattern_member : items.get_lower_patterns ()) + { + tree index_tree + = Backend::size_constant_expression (element_index++); + tree element_expr + = scrutinee_index_expr_func (match_scrutinee_expr, index_tree, + pattern.get_locus ()); tree check_expr_sub = CompilePatternCheckExpr::Compile (*pattern_member, element_expr, ctx); @@ -586,10 +668,82 @@ CompilePatternCheckExpr::visit (HIR::SlicePattern &pattern) ArithmeticOrLogicalOperator::BITWISE_AND, check_expr, check_expr_sub, pattern.get_locus ()); } + + // handle codegen for upper patterns differently for both types + switch (lookup->get_kind ()) + { + case TyTy::TypeKind::ARRAY: + { + // for array type scrutinee, we can simply get the capacity as a + // const and calculate how many elements to skip + auto array_ty = static_cast<TyTy::ArrayType *> (lookup); + auto capacity_ty = array_ty->get_capacity (); + + rust_assert (capacity_ty->get_kind () == TyTy::TypeKind::CONST); + auto *capacity_const = capacity_ty->as_const_type (); + rust_assert (capacity_const->const_kind () + == TyTy::BaseConstType::ConstKind::Value); + auto &capacity_value + = *static_cast<TyTy::ConstValueType *> (capacity_const); + auto cap_tree = capacity_value.get_value (); + + rust_assert (!error_operand_p (cap_tree)); + + size_t cap_wi = (size_t) wi::to_wide (cap_tree).to_uhwi (); + element_index = cap_wi - items.get_upper_patterns ().size (); + for (auto &pattern_member : items.get_upper_patterns ()) + { + tree index_tree + = Backend::size_constant_expression (element_index++); + tree element_expr + = scrutinee_index_expr_func (match_scrutinee_expr, + index_tree, + pattern.get_locus ()); + tree check_expr_sub + = CompilePatternCheckExpr::Compile (*pattern_member, + element_expr, ctx); + check_expr = Backend::arithmetic_or_logical_expression ( + ArithmeticOrLogicalOperator::BITWISE_AND, check_expr, + check_expr_sub, pattern.get_locus ()); + } + } + break; + case TyTy::TypeKind::REF: + { + // for slice type scrutinee, size is dyanamic, so number of + // elements to skip is calculated during runtime + tree slice_size + = Backend::struct_field_expression (match_scrutinee_expr, 1, + pattern.get_locus ()); + tree upper_patterns_size = Backend::size_constant_expression ( + items.get_upper_patterns ().size ()); + tree index_tree = Backend::arithmetic_or_logical_expression ( + ArithmeticOrLogicalOperator::SUBTRACT, slice_size, + upper_patterns_size, pattern.get_locus ()); + for (auto &pattern_member : items.get_upper_patterns ()) + { + tree element_expr + = scrutinee_index_expr_func (match_scrutinee_expr, + index_tree, + pattern.get_locus ()); + tree check_expr_sub + = CompilePatternCheckExpr::Compile (*pattern_member, + element_expr, ctx); + check_expr = Backend::arithmetic_or_logical_expression ( + ArithmeticOrLogicalOperator::BITWISE_AND, check_expr, + check_expr_sub, pattern.get_locus ()); + index_tree = Backend::arithmetic_or_logical_expression ( + ArithmeticOrLogicalOperator::ADD, index_tree, + Backend::size_constant_expression (1), + pattern.get_locus ()); + } + } + break; + default: + rust_unreachable (); + } } break; - default: - rust_unreachable (); } } @@ -628,25 +782,54 @@ CompilePatternBindings::visit (HIR::TupleStructPattern &pattern) HIR::TupleStructItems &items = pattern.get_items (); switch (items.get_item_type ()) { - case HIR::TupleStructItems::RANGED: + case HIR::TupleStructItems::HAS_REST: { - // TODO - rust_unreachable (); + HIR::TupleStructItemsHasRest &items_has_rest + = static_cast<HIR::TupleStructItemsHasRest &> (items); + size_t num_patterns = items_has_rest.get_lower_patterns ().size () + + items_has_rest.get_upper_patterns ().size (); + + // enums cases shouldn't reach here + rust_assert (num_patterns <= variant->num_fields () + && (!adt->is_enum ())); + + size_t tuple_field_index = 0; + for (auto &pattern : items_has_rest.get_lower_patterns ()) + { + tree binding + = Backend::struct_field_expression (match_scrutinee_expr, + tuple_field_index++, + pattern->get_locus ()); + + CompilePatternBindings::Compile (*pattern, binding, ctx); + } + + tuple_field_index = variant->num_fields () + - items_has_rest.get_upper_patterns ().size (); + + for (auto &pattern : items_has_rest.get_upper_patterns ()) + { + tree binding + = Backend::struct_field_expression (match_scrutinee_expr, + tuple_field_index++, + pattern->get_locus ()); + + CompilePatternBindings::Compile (*pattern, binding, ctx); + } } break; - case HIR::TupleStructItems::MULTIPLE: + case HIR::TupleStructItems::NO_REST: { - HIR::TupleStructItemsNoRange &items_no_range - = static_cast<HIR::TupleStructItemsNoRange &> (items); - - rust_assert (items_no_range.get_patterns ().size () + HIR::TupleStructItemsNoRest &items_no_rest + = static_cast<HIR::TupleStructItemsNoRest &> (items); + rust_assert (items_no_rest.get_patterns ().size () == variant->num_fields ()); if (adt->is_enum ()) { size_t tuple_field_index = 0; - for (auto &pattern : items_no_range.get_patterns ()) + for (auto &pattern : items_no_rest.get_patterns ()) { tree payload_accessor_union = Backend::struct_field_expression (match_scrutinee_expr, 1, @@ -668,12 +851,10 @@ CompilePatternBindings::visit (HIR::TupleStructPattern &pattern) else { size_t tuple_field_index = 0; - for (auto &pattern : items_no_range.get_patterns ()) + for (auto &pattern : items_no_rest.get_patterns ()) { - tree variant_accessor = match_scrutinee_expr; - tree binding - = Backend::struct_field_expression (variant_accessor, + = Backend::struct_field_expression (match_scrutinee_expr, tuple_field_index++, pattern->get_locus ()); @@ -777,8 +958,9 @@ CompilePatternBindings::visit (HIR::StructPattern &pattern) rust_assert (ok); } - rust_assert (variant->get_variant_type () - == TyTy::VariantDef::VariantType::STRUCT); + rust_assert ( + variant->get_variant_type () == TyTy::VariantDef::VariantType::STRUCT + || variant->get_variant_type () == TyTy::VariantDef::VariantType::TUPLE); auto &struct_pattern_elems = pattern.get_struct_pattern_elems (); for (auto &field : struct_pattern_elems.get_struct_pattern_fields ()) @@ -843,11 +1025,11 @@ CompilePatternBindings::visit (HIR::TuplePattern &pattern) switch (pattern.get_items ().get_item_type ()) { - case HIR::TuplePatternItems::ItemType::RANGED: + case HIR::TuplePatternItems::ItemType::HAS_REST: { size_t tuple_idx = 0; auto &items - = static_cast<HIR::TuplePatternItemsRanged &> (pattern.get_items ()); + = static_cast<HIR::TuplePatternItemsHasRest &> (pattern.get_items ()); auto &items_lower = items.get_lower_patterns (); auto &items_upper = items.get_upper_patterns (); @@ -887,11 +1069,11 @@ CompilePatternBindings::visit (HIR::TuplePattern &pattern) return; } - case HIR::TuplePatternItems::ItemType::MULTIPLE: + case HIR::TuplePatternItems::ItemType::NO_REST: { size_t tuple_idx = 0; - auto &items = static_cast<HIR::TuplePatternItemsMultiple &> ( - pattern.get_items ()); + auto &items + = static_cast<HIR::TuplePatternItemsNoRest &> (pattern.get_items ()); for (auto &sub : items.get_patterns ()) { @@ -930,43 +1112,132 @@ CompilePatternBindings::visit (HIR::SlicePattern &pattern) || lookup->get_kind () == TyTy::TypeKind::SLICE || lookup->get_kind () == TyTy::REF); - size_t array_element_index = 0; + // function ptr that points to either array_index_expression or + // slice_index_expression depending on the scrutinee's type + tree (*scrutinee_index_expr_func) (tree, tree, location_t) = nullptr; + switch (lookup->get_kind ()) { case TyTy::TypeKind::ARRAY: - for (auto &pattern_member : pattern.get_items ()) - { - tree array_index_tree - = Backend::size_constant_expression (array_element_index++); - tree element_expr - = Backend::array_index_expression (match_scrutinee_expr, - array_index_tree, - pattern.get_locus ()); - CompilePatternBindings::Compile (*pattern_member, element_expr, ctx); - } + scrutinee_index_expr_func = Backend::array_index_expression; break; case TyTy::TypeKind::SLICE: - rust_sorry_at ( - pattern.get_locus (), - "SlicePattern matching against non-ref slices are not yet supported"); + rust_sorry_at (pattern.get_locus (), + "SlicePattern matching against non-ref slices are " + "not yet supported"); break; case TyTy::TypeKind::REF: + scrutinee_index_expr_func = Backend::slice_index_expression; + break; + default: + rust_unreachable (); + } + + rust_assert (scrutinee_index_expr_func != nullptr); + + size_t element_index = 0; + + switch (pattern.get_items ().get_item_type ()) + { + case HIR::SlicePatternItems::ItemType::NO_REST: { - for (auto &pattern_member : pattern.get_items ()) + auto &items + = static_cast<HIR::SlicePatternItemsNoRest &> (pattern.get_items ()); + for (auto &pattern_member : items.get_patterns ()) { - tree slice_index_tree - = Backend::size_constant_expression (array_element_index++); + tree index_tree + = Backend::size_constant_expression (element_index++); tree element_expr - = Backend::slice_index_expression (match_scrutinee_expr, - slice_index_tree, - pattern.get_locus ()); + = scrutinee_index_expr_func (match_scrutinee_expr, index_tree, + pattern.get_locus ()); CompilePatternBindings::Compile (*pattern_member, element_expr, ctx); } - break; } - default: - rust_unreachable (); + break; + case HIR::SlicePatternItems::ItemType::HAS_REST: + { + auto &items + = static_cast<HIR::SlicePatternItemsHasRest &> (pattern.get_items ()); + for (auto &pattern_member : items.get_lower_patterns ()) + { + tree index_tree + = Backend::size_constant_expression (element_index++); + tree element_expr + = scrutinee_index_expr_func (match_scrutinee_expr, index_tree, + pattern.get_locus ()); + CompilePatternBindings::Compile (*pattern_member, element_expr, + ctx); + } + + // handle codegen for upper patterns differently for both types + switch (lookup->get_kind ()) + { + case TyTy::TypeKind::ARRAY: + { + auto array_ty = static_cast<TyTy::ArrayType *> (lookup); + auto capacity_ty = array_ty->get_capacity (); + + rust_assert (capacity_ty->get_kind () == TyTy::TypeKind::CONST); + auto *capacity_const = capacity_ty->as_const_type (); + rust_assert (capacity_const->const_kind () + == TyTy::BaseConstType::ConstKind::Value); + auto &capacity_value + = *static_cast<TyTy::ConstValueType *> (capacity_const); + auto cap_tree = capacity_value.get_value (); + + rust_assert (!error_operand_p (cap_tree)); + + size_t cap_wi = (size_t) wi::to_wide (cap_tree).to_uhwi (); + element_index = cap_wi - items.get_upper_patterns ().size (); + for (auto &pattern_member : items.get_upper_patterns ()) + { + tree index_tree + = Backend::size_constant_expression (element_index++); + tree element_expr + = scrutinee_index_expr_func (match_scrutinee_expr, + index_tree, + pattern.get_locus ()); + CompilePatternBindings::Compile (*pattern_member, + element_expr, ctx); + } + } + break; + case TyTy::TypeKind::SLICE: + rust_sorry_at (pattern.get_locus (), + "SlicePattern matching against non-ref slices are " + "not yet supported"); + break; + case TyTy::TypeKind::REF: + { + tree slice_size + = Backend::struct_field_expression (match_scrutinee_expr, 1, + pattern.get_locus ()); + tree upper_patterns_size = Backend::size_constant_expression ( + items.get_upper_patterns ().size ()); + tree index_tree = Backend::arithmetic_or_logical_expression ( + ArithmeticOrLogicalOperator::SUBTRACT, slice_size, + upper_patterns_size, pattern.get_locus ()); + for (auto &pattern_member : items.get_upper_patterns ()) + { + tree element_expr + = scrutinee_index_expr_func (match_scrutinee_expr, + index_tree, + pattern.get_locus ()); + CompilePatternBindings::Compile (*pattern_member, + element_expr, ctx); + index_tree = Backend::arithmetic_or_logical_expression ( + ArithmeticOrLogicalOperator::ADD, index_tree, + Backend::size_constant_expression (1), + pattern.get_locus ()); + } + } + break; + default: + rust_unreachable (); + } + } + break; } } @@ -979,6 +1250,11 @@ CompilePatternLet::visit (HIR::IdentifierPattern &pattern) rust_assert ( ctx->lookup_var_decl (pattern.get_mappings ().get_hirid (), &var)); + if (pattern.get_is_ref ()) + { + init_expr = address_expression (init_expr, EXPR_LOCATION (init_expr)); + } + auto fnctx = ctx->peek_fn (); if (ty->is_unit ()) { @@ -990,6 +1266,11 @@ CompilePatternLet::visit (HIR::IdentifierPattern &pattern) } else { + if (pattern.has_subpattern ()) + { + CompilePatternLet::Compile (&pattern.get_subpattern (), init_expr, ty, + rval_locus, ctx); + } auto s = Backend::init_statement (fnctx.fndecl, var, init_expr); ctx->add_statement (s); } @@ -1013,22 +1294,65 @@ CompilePatternLet::visit (HIR::TuplePattern &pattern) { rust_assert (pattern.has_tuple_pattern_items ()); - tree tuple_type = TyTyResolveCompile::compile (ctx, ty); + bool has_by_ref = false; + auto check_refs + = [] (const std::vector<std::unique_ptr<HIR::Pattern>> &patterns) { + for (const auto &sub : patterns) + { + switch (sub->get_pattern_type ()) + { + case HIR::Pattern::PatternType::IDENTIFIER: + { + auto id = static_cast<HIR::IdentifierPattern *> (sub.get ()); + if (id->get_is_ref ()) + return true; + break; + } + case HIR::Pattern::PatternType::REFERENCE: + return true; + default: + break; + } + } + return false; + }; + switch (pattern.get_items ().get_item_type ()) + { + case HIR::TuplePatternItems::ItemType::NO_REST: + { + auto &items + = static_cast<HIR::TuplePatternItemsNoRest &> (pattern.get_items ()); + has_by_ref = check_refs (items.get_patterns ()); + break; + } + case HIR::TuplePatternItems::ItemType::HAS_REST: + { + auto &items + = static_cast<HIR::TuplePatternItemsHasRest &> (pattern.get_items ()); + has_by_ref = check_refs (items.get_lower_patterns ()) + || check_refs (items.get_upper_patterns ()); + break; + } + default: + break; + } + + tree rhs_tuple_type = TYPE_MAIN_VARIANT (TREE_TYPE (init_expr)); tree init_stmt; Bvariable *tmp_var = Backend::temporary_variable (ctx->peek_fn ().fndecl, NULL_TREE, - tuple_type, init_expr, false, + rhs_tuple_type, init_expr, has_by_ref, pattern.get_locus (), &init_stmt); tree access_expr = Backend::var_expression (tmp_var, pattern.get_locus ()); ctx->add_statement (init_stmt); switch (pattern.get_items ().get_item_type ()) { - case HIR::TuplePatternItems::ItemType::RANGED: + case HIR::TuplePatternItems::ItemType::HAS_REST: { size_t tuple_idx = 0; auto &items - = static_cast<HIR::TuplePatternItemsRanged &> (pattern.get_items ()); + = static_cast<HIR::TuplePatternItemsHasRest &> (pattern.get_items ()); auto &items_lower = items.get_lower_patterns (); auto &items_upper = items.get_upper_patterns (); @@ -1069,11 +1393,11 @@ CompilePatternLet::visit (HIR::TuplePattern &pattern) return; } - case HIR::TuplePatternItems::ItemType::MULTIPLE: + case HIR::TuplePatternItems::ItemType::NO_REST: { size_t tuple_idx = 0; - auto &items = static_cast<HIR::TuplePatternItemsMultiple &> ( - pattern.get_items ()); + auto &items + = static_cast<HIR::TuplePatternItemsNoRest &> (pattern.get_items ()); for (auto &sub : items.get_patterns ()) { diff --git a/gcc/rust/backend/rust-compile-resolve-path.cc b/gcc/rust/backend/rust-compile-resolve-path.cc index f3b9dc2..c33d0b0 100644 --- a/gcc/rust/backend/rust-compile-resolve-path.cc +++ b/gcc/rust/backend/rust-compile-resolve-path.cc @@ -97,8 +97,11 @@ ResolvePathRef::attempt_constructor_expression_lookup ( // this can only be for discriminant variants the others are built up // using call-expr or struct-init - rust_assert (variant->get_variant_type () - == TyTy::VariantDef::VariantType::NUM); + if (variant->get_variant_type () != TyTy::VariantDef::VariantType::NUM) + { + rust_error_at (expr_locus, "variant expected constructor call"); + return error_mark_node; + } // we need the actual gcc type tree compiled_adt_type = TyTyResolveCompile::compile (ctx, adt); diff --git a/gcc/rust/backend/rust-compile-type.cc b/gcc/rust/backend/rust-compile-type.cc index 0622954..5b00afa 100644 --- a/gcc/rust/backend/rust-compile-type.cc +++ b/gcc/rust/backend/rust-compile-type.cc @@ -136,13 +136,31 @@ TyTyResolveCompile::visit (const TyTy::InferType &type) } void -TyTyResolveCompile::visit (const TyTy::ParamType &) +TyTyResolveCompile::visit (const TyTy::ParamType &type) { translated = error_mark_node; } void -TyTyResolveCompile::visit (const TyTy::ConstType &) +TyTyResolveCompile::visit (const TyTy::ConstParamType &type) +{ + translated = error_mark_node; +} + +void +TyTyResolveCompile::visit (const TyTy::ConstValueType &type) +{ + translated = error_mark_node; +} + +void +TyTyResolveCompile::visit (const TyTy::ConstInferType &type) +{ + translated = error_mark_node; +} + +void +TyTyResolveCompile::visit (const TyTy::ConstErrorType &type) { translated = error_mark_node; } @@ -186,8 +204,8 @@ TyTyResolveCompile::visit (const TyTy::ClosureType &type) // this should be based on the closure move-ability tree decl_type = TyTyResolveCompile::compile (ctx, lookup); tree capture_type = build_reference_type (decl_type); - fields.push_back (Backend::typed_identifier (mappings_name, capture_type, - type.get_ident ().locus)); + fields.emplace_back (mappings_name, capture_type, + type.get_ident ().locus); } tree type_record = Backend::struct_type (fields); @@ -217,8 +235,7 @@ TyTyResolveCompile::visit (const TyTy::FnType &type) auto ret = TyTyResolveCompile::compile (ctx, hir_type, trait_object_mode); location_t return_type_locus = ctx->get_mappings ().lookup_location (hir_type->get_ref ()); - results.push_back ( - Backend::typed_identifier ("_", ret, return_type_locus)); + results.emplace_back ("_", ret, return_type_locus); } for (auto ¶m_pair : type.get_params ()) @@ -227,11 +244,10 @@ TyTyResolveCompile::visit (const TyTy::FnType &type) auto compiled_param_type = TyTyResolveCompile::compile (ctx, param_tyty, trait_object_mode); - auto compiled_param = Backend::typed_identifier ( - param_pair.get_pattern ().as_string (), compiled_param_type, - ctx->get_mappings ().lookup_location (param_tyty->get_ref ())); - - parameters.push_back (compiled_param); + parameters.emplace_back (param_pair.get_pattern ().as_string (), + compiled_param_type, + ctx->get_mappings ().lookup_location ( + param_tyty->get_ref ())); } if (!type.is_variadic ()) @@ -277,10 +293,9 @@ TyTyResolveCompile::visit (const TyTy::ADTType &type) tree compiled_field_ty = TyTyResolveCompile::compile (ctx, field->get_field_type ()); - Backend::typed_identifier f (field->get_name (), compiled_field_ty, - ctx->get_mappings ().lookup_location ( - type.get_ty_ref ())); - fields.push_back (std::move (f)); + fields.emplace_back (field->get_name (), compiled_field_ty, + ctx->get_mappings ().lookup_location ( + type.get_ty_ref ())); } type_record = type.is_union () ? Backend::union_type (fields, false) @@ -357,10 +372,9 @@ TyTyResolveCompile::visit (const TyTy::ADTType &type) == TyTy::VariantDef::VariantType::TUPLE) field_name = "__" + field->get_name (); - Backend::typed_identifier f ( - field_name, compiled_field_ty, - ctx->get_mappings ().lookup_location (type.get_ty_ref ())); - fields.push_back (std::move (f)); + fields.emplace_back (field_name, compiled_field_ty, + ctx->get_mappings ().lookup_location ( + type.get_ty_ref ())); } tree variant_record = Backend::struct_type (fields); @@ -382,10 +396,9 @@ TyTyResolveCompile::visit (const TyTy::ADTType &type) TyTy::VariantDef *variant = type.get_variants ().at (i++); std::string implicit_variant_name = variant->get_identifier (); - Backend::typed_identifier f (implicit_variant_name, variant_record, - ctx->get_mappings ().lookup_location ( - type.get_ty_ref ())); - enum_fields.push_back (std::move (f)); + enum_fields.emplace_back (implicit_variant_name, variant_record, + ctx->get_mappings ().lookup_location ( + type.get_ty_ref ())); } // @@ -460,10 +473,9 @@ TyTyResolveCompile::visit (const TyTy::TupleType &type) // this, rather than simply emitting the integer, is that this // approach makes it simpler to use a C-only debugger, or // GDB's C mode, when debugging Rust. - Backend::typed_identifier f ("__" + std::to_string (i), compiled_field_ty, - ctx->get_mappings ().lookup_location ( - type.get_ty_ref ())); - fields.push_back (std::move (f)); + fields.emplace_back ("__" + std::to_string (i), compiled_field_ty, + ctx->get_mappings ().lookup_location ( + type.get_ty_ref ())); } tree struct_type_record = Backend::struct_type (fields); @@ -476,8 +488,22 @@ TyTyResolveCompile::visit (const TyTy::ArrayType &type) { tree element_type = TyTyResolveCompile::compile (ctx, type.get_element_type ()); - TyTy::ConstType *const_capacity = type.get_capacity (); - tree folded_capacity_expr = const_capacity->get_value (); + auto const_capacity = type.get_capacity (); + + // Check if capacity is a const type + if (const_capacity->get_kind () != TyTy::TypeKind::CONST) + { + rust_error_at (type.get_locus (), "array capacity is not a const type"); + translated = error_mark_node; + return; + } + + auto *capacity_const = const_capacity->as_const_type (); + + rust_assert (capacity_const->const_kind () + == TyTy::BaseConstType::ConstKind::Value); + auto &capacity_value = *static_cast<TyTy::ConstValueType *> (capacity_const); + auto folded_capacity_expr = capacity_value.get_value (); // build_index_type takes the maximum index, which is one less than // the length. @@ -772,17 +798,15 @@ TyTyResolveCompile::create_dyn_obj_record (const TyTy::DynamicObjectType &type) tree uint = Backend::integer_type (true, Backend::get_pointer_size ()); tree uintptr_ty = build_pointer_type (uint); - Backend::typed_identifier f ("pointer", uintptr_ty, - ctx->get_mappings ().lookup_location ( - type.get_ty_ref ())); - fields.push_back (std::move (f)); + fields.emplace_back ("pointer", uintptr_ty, + ctx->get_mappings ().lookup_location ( + type.get_ty_ref ())); tree vtable_size = build_int_cst (size_type_node, items.size ()); tree vtable_type = Backend::array_type (uintptr_ty, vtable_size); - Backend::typed_identifier vtf ("vtable", vtable_type, - ctx->get_mappings ().lookup_location ( - type.get_ty_ref ())); - fields.push_back (std::move (vtf)); + fields.emplace_back ("vtable", vtable_type, + ctx->get_mappings ().lookup_location ( + type.get_ty_ref ())); tree record = Backend::struct_type (fields); RS_DST_FLAG (record) = 1; diff --git a/gcc/rust/backend/rust-compile-type.h b/gcc/rust/backend/rust-compile-type.h index 0675343..d6c3259 100644 --- a/gcc/rust/backend/rust-compile-type.h +++ b/gcc/rust/backend/rust-compile-type.h @@ -50,7 +50,10 @@ public: void visit (const TyTy::ReferenceType &) override; void visit (const TyTy::PointerType &) override; void visit (const TyTy::ParamType &) override; - void visit (const TyTy::ConstType &) override; + void visit (const TyTy::ConstParamType &) override; + void visit (const TyTy::ConstValueType &) override; + void visit (const TyTy::ConstInferType &) override; + void visit (const TyTy::ConstErrorType &) override; void visit (const TyTy::StrType &) override; void visit (const TyTy::NeverType &) override; void visit (const TyTy::PlaceholderType &) override; diff --git a/gcc/rust/backend/rust-compile-var-decl.h b/gcc/rust/backend/rust-compile-var-decl.h index 5c6d145..1f306ad 100644 --- a/gcc/rust/backend/rust-compile-var-decl.h +++ b/gcc/rust/backend/rust-compile-var-decl.h @@ -64,31 +64,79 @@ public: ctx->insert_var_decl (stmt_id, var); vars.push_back (var); + + if (pattern.has_subpattern ()) + { + auto subpattern_vars + = CompileVarDecl::compile (fndecl, translated_type, + &pattern.get_subpattern (), ctx); + vars.insert (vars.end (), subpattern_vars.begin (), + subpattern_vars.end ()); + } } void visit (HIR::TuplePattern &pattern) override { + rust_assert (TREE_CODE (translated_type) == RECORD_TYPE); switch (pattern.get_items ().get_item_type ()) { - case HIR::TuplePatternItems::ItemType::MULTIPLE: + case HIR::TuplePatternItems::ItemType::NO_REST: { - rust_assert (TREE_CODE (translated_type) == RECORD_TYPE); - auto &items = static_cast<HIR::TuplePatternItemsMultiple &> ( + auto &items_no_rest = static_cast<HIR::TuplePatternItemsNoRest &> ( pattern.get_items ()); - size_t offs = 0; - for (auto &sub : items.get_patterns ()) + tree field = TYPE_FIELDS (translated_type); + for (auto &sub : items_no_rest.get_patterns ()) { - tree sub_ty = error_mark_node; - tree field = TYPE_FIELDS (translated_type); - for (size_t i = 0; i < offs; i++) + gcc_assert (field != NULL_TREE); + tree sub_ty = TREE_TYPE (field); + CompileVarDecl::compile (fndecl, sub_ty, sub.get (), ctx); + field = DECL_CHAIN (field); + } + } + break; + + case HIR::TuplePatternItems::ItemType::HAS_REST: + { + auto &items_has_rest = static_cast<HIR::TuplePatternItemsHasRest &> ( + pattern.get_items ()); + + // count total fields in translated_type + size_t total_fields = 0; + for (tree t = TYPE_FIELDS (translated_type); t; t = DECL_CHAIN (t)) + { + total_fields++; + } + + // process lower patterns + tree field = TYPE_FIELDS (translated_type); + for (auto &sub : items_has_rest.get_lower_patterns ()) + { + gcc_assert (field != NULL_TREE); + tree sub_ty = TREE_TYPE (field); + CompileVarDecl::compile (fndecl, sub_ty, sub.get (), ctx); + field = DECL_CHAIN (field); + } + + // process upper patterns + if (!items_has_rest.get_upper_patterns ().empty ()) + { + size_t upper_start + = total_fields - items_has_rest.get_upper_patterns ().size (); + field = TYPE_FIELDS (translated_type); + for (size_t i = 0; i < upper_start; i++) { field = DECL_CHAIN (field); gcc_assert (field != NULL_TREE); } - sub_ty = TREE_TYPE (field); - CompileVarDecl::compile (fndecl, sub_ty, sub.get (), ctx); - offs++; + + for (auto &sub : items_has_rest.get_upper_patterns ()) + { + gcc_assert (field != NULL_TREE); + tree sub_ty = TREE_TYPE (field); + CompileVarDecl::compile (fndecl, sub_ty, sub.get (), ctx); + field = DECL_CHAIN (field); + } } } break; diff --git a/gcc/rust/backend/rust-compile.cc b/gcc/rust/backend/rust-compile.cc index dbd8515..40f16e4 100644 --- a/gcc/rust/backend/rust-compile.cc +++ b/gcc/rust/backend/rust-compile.cc @@ -183,8 +183,7 @@ HIRCompileBase::coercion_site1 (tree rvalue, TyTy::BaseType *rval, } tree -HIRCompileBase::coerce_to_dyn_object (tree compiled_ref, - const TyTy::BaseType *actual, +HIRCompileBase::coerce_to_dyn_object (tree compiled_ref, TyTy::BaseType *actual, const TyTy::DynamicObjectType *ty, location_t locus) { @@ -201,9 +200,7 @@ HIRCompileBase::coerce_to_dyn_object (tree compiled_ref, // __trait_object_ptr // [list of function ptrs] - std::vector<std::pair<Resolver::TraitReference *, HIR::ImplBlock *>> - probed_bounds_for_receiver = Resolver::TypeBoundsProbe::Probe (actual); - + auto probed_bounds_for_receiver = Resolver::TypeBoundsProbe::Probe (actual); tree address_of_compiled_ref = null_pointer_node; if (!actual->is_unit ()) address_of_compiled_ref = address_expression (compiled_ref, locus); @@ -241,12 +238,13 @@ HIRCompileBase::compute_address_for_trait_item ( &receiver_bounds, const TyTy::BaseType *receiver, const TyTy::BaseType *root, location_t locus) { - TyTy::TypeBoundPredicateItem predicate_item + tl::optional<TyTy::TypeBoundPredicateItem> predicate_item = predicate->lookup_associated_item (ref->get_identifier ()); - rust_assert (!predicate_item.is_error ()); + rust_assert (predicate_item.has_value ()); // This is the expected end type - TyTy::BaseType *trait_item_type = predicate_item.get_tyty_for_receiver (root); + TyTy::BaseType *trait_item_type + = predicate_item->get_tyty_for_receiver (root); rust_assert (trait_item_type->get_kind () == TyTy::TypeKind::FNDEF); TyTy::FnType *trait_item_fntype = static_cast<TyTy::FnType *> (trait_item_type); diff --git a/gcc/rust/backend/rust-constexpr.cc b/gcc/rust/backend/rust-constexpr.cc index d524d09..d04f864 100644 --- a/gcc/rust/backend/rust-constexpr.cc +++ b/gcc/rust/backend/rust-constexpr.cc @@ -101,12 +101,54 @@ struct constexpr_global_ctx auto_vec<tree, 16> heap_vars; /* Cleanups that need to be evaluated at the end of CLEANUP_POINT_EXPR. */ vec<tree> *cleanups; + /* If non-null, only allow modification of existing values of the variables + in this set. Set by modifiable_tracker, below. */ + hash_set<tree> *modifiable; /* Number of heap VAR_DECL deallocations. */ unsigned heap_dealloc_count; /* Constructor. */ constexpr_global_ctx () : constexpr_ops_count (0), cleanups (NULL), heap_dealloc_count (0) {} + + tree get_value (tree t) + { + if (tree *p = values.get (t)) + if (*p != void_node) + return *p; + return NULL_TREE; + } + tree *get_value_ptr (tree t, bool initializing) + { + if (modifiable && !modifiable->contains (t)) + return nullptr; + if (tree *p = values.get (t)) + { + if (*p != void_node) + return p; + else if (initializing) + { + *p = NULL_TREE; + return p; + } + } + return nullptr; + } + void put_value (tree t, tree v) + { + bool already_in_map = values.put (t, v); + if (!already_in_map && modifiable) + modifiable->add (t); + } + void destroy_value (tree t) + { + if (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL + || TREE_CODE (t) == RESULT_DECL) + values.put (t, void_node); + else + values.remove (t); + } + void clear_value (tree t) { values.remove (t); } }; /* In constexpr.cc */ @@ -457,23 +499,24 @@ save_fundef_copy (tree fun, tree copy) static tree constant_value_1 (tree decl, bool strict_p, bool return_aggregate_cst_ok_p, bool unshare_p); +static tree decl_really_constant_value (tree decl, bool unshare_p /*= true*/); tree decl_constant_value (tree decl, bool unshare_p); static void non_const_var_error (location_t loc, tree r); static tree eval_constant_expression (const constexpr_ctx *ctx, tree, bool, - bool *, bool *, tree * = NULL); + bool *, bool *, tree *jump_target); static tree constexpr_fn_retval (const constexpr_ctx *ctx, tree r); static tree eval_store_expression (const constexpr_ctx *ctx, tree r, bool, - bool *, bool *); + bool *, bool *, tree *jump_target); static tree eval_call_expression (const constexpr_ctx *ctx, tree r, bool, - bool *, bool *); + bool *, bool *, tree *); static tree eval_binary_expression (const constexpr_ctx *ctx, tree r, bool, - bool *, bool *); + bool *, bool *, tree *jump_target); static tree get_function_named_in_call (tree t); @@ -488,7 +531,8 @@ static tree eval_conditional_expression (const constexpr_ctx *ctx, tree t, bool *overflow_p, tree *jump_target); static tree eval_bit_field_ref (const constexpr_ctx *ctx, tree t, bool lval, - bool *non_constant_p, bool *overflow_p); + bool *non_constant_p, bool *overflow_p, + tree *jump_target); static tree eval_loop_expr (const constexpr_ctx *ctx, tree t, bool *non_constant_p, bool *overflow_p, @@ -500,7 +544,15 @@ static tree eval_switch_expr (const constexpr_ctx *ctx, tree t, static tree eval_unary_expression (const constexpr_ctx *ctx, tree t, bool /*lval*/, bool *non_constant_p, - bool *overflow_p); + bool *overflow_p, tree *jump_target); +static bool eval_check_shift_p (location_t loc, const constexpr_ctx *ctx, + enum tree_code code, tree type, tree lhs, + tree rhs); +static tree fold_pointer_plus_expression (const constexpr_ctx *ctx, tree t, + tree lhs, tree rhs, + bool *non_constant_p, + bool *overflow_p, tree *jump_target); +static tree maybe_fold_addr_pointer_plus (tree t); /* Variables and functions to manage constexpr call expansion context. These do not need to be marked for PCH or GC. */ @@ -562,8 +614,9 @@ fold_expr (tree expr) bool non_constant_p = false; bool overflow_p = false; + tree jump_target = NULL_TREE; tree folded = eval_constant_expression (&ctx, expr, false, &non_constant_p, - &overflow_p); + &overflow_p, &jump_target); rust_assert (folded != NULL_TREE); // more logic here to possibly port @@ -589,13 +642,13 @@ same_type_ignoring_tlq_and_bounds_p (tree type1, tree type2) otherwise return NULL_TREE. */ static tree -union_active_member (const constexpr_ctx *ctx, tree t) +union_active_member (const constexpr_ctx *ctx, tree t, tree *jump_target) { constexpr_ctx new_ctx = *ctx; new_ctx.quiet = true; bool non_constant_p = false, overflow_p = false; tree ctor = eval_constant_expression (&new_ctx, t, false, &non_constant_p, - &overflow_p); + &overflow_p, jump_target); if (TREE_CODE (ctor) == CONSTRUCTOR && CONSTRUCTOR_NELTS (ctor) == 1 && CONSTRUCTOR_ELT (ctor, 0)->index && TREE_CODE (CONSTRUCTOR_ELT (ctor, 0)->index) == FIELD_DECL) @@ -607,7 +660,8 @@ union_active_member (const constexpr_ctx *ctx, tree t) static tree fold_indirect_ref_1 (const constexpr_ctx *ctx, location_t loc, tree type, - tree op, unsigned HOST_WIDE_INT off, bool *empty_base) + tree op, unsigned HOST_WIDE_INT off, bool *empty_base, + tree *jump_target) { tree optype = TREE_TYPE (op); unsigned HOST_WIDE_INT const_nunits; @@ -654,7 +708,8 @@ fold_indirect_ref_1 (const constexpr_ctx *ctx, location_t loc, tree type, tree index = size_int (idx + tree_to_uhwi (min_val)); op = build4_loc (loc, ARRAY_REF, TREE_TYPE (optype), op, index, NULL_TREE, NULL_TREE); - return fold_indirect_ref_1 (ctx, loc, type, op, rem, empty_base); + return fold_indirect_ref_1 (ctx, loc, type, op, rem, empty_base, + jump_target); } } /* ((foo *)&struct_with_foo_field)[x] => COMPONENT_REF */ @@ -663,7 +718,7 @@ fold_indirect_ref_1 (const constexpr_ctx *ctx, location_t loc, tree type, { if (TREE_CODE (optype) == UNION_TYPE) /* For unions prefer the currently active member. */ - if (tree field = union_active_member (ctx, op)) + if (tree field = union_active_member (ctx, op, jump_target)) { unsigned HOST_WIDE_INT el_sz = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (field))); @@ -672,7 +727,7 @@ fold_indirect_ref_1 (const constexpr_ctx *ctx, location_t loc, tree type, tree cop = build3 (COMPONENT_REF, TREE_TYPE (field), op, field, NULL_TREE); if (tree ret = fold_indirect_ref_1 (ctx, loc, type, cop, off, - empty_base)) + empty_base, jump_target)) return ret; } } @@ -691,8 +746,9 @@ fold_indirect_ref_1 (const constexpr_ctx *ctx, location_t loc, tree type, { tree cop = build3 (COMPONENT_REF, TREE_TYPE (field), op, field, NULL_TREE); - if (tree ret = fold_indirect_ref_1 (ctx, loc, type, cop, - off - upos, empty_base)) + if (tree ret + = fold_indirect_ref_1 (ctx, loc, type, cop, off - upos, + empty_base, jump_target)) return ret; } } @@ -721,7 +777,7 @@ fold_indirect_ref_1 (const constexpr_ctx *ctx, location_t loc, tree type, static tree rs_fold_indirect_ref (const constexpr_ctx *ctx, location_t loc, tree type, - tree op0, bool *empty_base) + tree op0, bool *empty_base, tree *jump_target) { tree sub = op0; tree subtype; @@ -780,7 +836,8 @@ rs_fold_indirect_ref (const constexpr_ctx *ctx, location_t loc, tree type, tree off = integer_zero_node; canonicalize_obj_off (op, off); gcc_assert (integer_zerop (off)); - return fold_indirect_ref_1 (ctx, loc, type, op, 0, empty_base); + return fold_indirect_ref_1 (ctx, loc, type, op, 0, empty_base, + jump_target); } } else if (TREE_CODE (sub) == POINTER_PLUS_EXPR @@ -795,7 +852,7 @@ rs_fold_indirect_ref (const constexpr_ctx *ctx, location_t loc, tree type, tree obj = TREE_OPERAND (op00, 0); canonicalize_obj_off (obj, off); return fold_indirect_ref_1 (ctx, loc, type, obj, tree_to_uhwi (off), - empty_base); + empty_base, jump_target); } } /* *(foo *)fooarrptr => (*fooarrptr)[0] */ @@ -804,8 +861,8 @@ rs_fold_indirect_ref (const constexpr_ctx *ctx, location_t loc, tree type, { tree type_domain; tree min_val = size_zero_node; - tree newsub - = rs_fold_indirect_ref (ctx, loc, TREE_TYPE (subtype), sub, NULL); + tree newsub = rs_fold_indirect_ref (ctx, loc, TREE_TYPE (subtype), sub, + NULL, jump_target); if (newsub) sub = newsub; else @@ -824,7 +881,7 @@ rs_fold_indirect_ref (const constexpr_ctx *ctx, location_t loc, tree type, static tree rs_eval_indirect_ref (const constexpr_ctx *ctx, tree t, bool lval, - bool *non_constant_p, bool *overflow_p) + bool *non_constant_p, bool *overflow_p, tree *jump_target) { tree orig_op0 = TREE_OPERAND (t, 0); bool empty_base = false; @@ -842,13 +899,13 @@ rs_eval_indirect_ref (const constexpr_ctx *ctx, tree t, bool lval, /* First try to simplify it directly. */ tree r = rs_fold_indirect_ref (ctx, EXPR_LOCATION (t), TREE_TYPE (t), - orig_op0, &empty_base); + orig_op0, &empty_base, jump_target); if (!r) { /* If that didn't work, evaluate the operand first. */ - tree op0 - = eval_constant_expression (ctx, orig_op0, - /*lval*/ false, non_constant_p, overflow_p); + tree op0 = eval_constant_expression (ctx, orig_op0, + /*lval*/ false, non_constant_p, + overflow_p, jump_target); /* Don't VERIFY_CONSTANT here. */ if (*non_constant_p) return t; @@ -862,7 +919,7 @@ rs_eval_indirect_ref (const constexpr_ctx *ctx, tree t, bool lval, } r = rs_fold_indirect_ref (ctx, EXPR_LOCATION (t), TREE_TYPE (t), op0, - &empty_base); + &empty_base, jump_target); if (r == NULL_TREE) { /* We couldn't fold to a constant value. Make sure it's not @@ -891,7 +948,8 @@ rs_eval_indirect_ref (const constexpr_ctx *ctx, tree t, bool lval, } } - r = eval_constant_expression (ctx, r, lval, non_constant_p, overflow_p); + r = eval_constant_expression (ctx, r, lval, non_constant_p, overflow_p, + jump_target); if (*non_constant_p) return t; @@ -917,17 +975,17 @@ rs_eval_indirect_ref (const constexpr_ctx *ctx, tree t, bool lval, static tree eval_logical_expression (const constexpr_ctx *ctx, tree t, tree bailout_value, tree continue_value, bool lval, bool *non_constant_p, - bool *overflow_p) + bool *overflow_p, tree *jump_target) { tree r; tree lhs = eval_constant_expression (ctx, TREE_OPERAND (t, 0), lval, - non_constant_p, overflow_p); + non_constant_p, overflow_p, jump_target); VERIFY_CONSTANT (lhs); if (tree_int_cst_equal (lhs, bailout_value)) return lhs; gcc_assert (tree_int_cst_equal (lhs, continue_value)); r = eval_constant_expression (ctx, TREE_OPERAND (t, 1), lval, non_constant_p, - overflow_p); + overflow_p, jump_target); VERIFY_CONSTANT (r); return r; } @@ -1243,19 +1301,20 @@ get_or_insert_ctor_field (tree ctor, tree index, int pos_hint = -1) static tree eval_vector_conditional_expression (const constexpr_ctx *ctx, tree t, - bool *non_constant_p, bool *overflow_p) + bool *non_constant_p, bool *overflow_p, + tree *jump_target) { - tree arg1 - = eval_constant_expression (ctx, TREE_OPERAND (t, 0), - /*lval*/ false, non_constant_p, overflow_p); + tree arg1 = eval_constant_expression (ctx, TREE_OPERAND (t, 0), + /*lval*/ false, non_constant_p, + overflow_p, jump_target); VERIFY_CONSTANT (arg1); - tree arg2 - = eval_constant_expression (ctx, TREE_OPERAND (t, 1), - /*lval*/ false, non_constant_p, overflow_p); + tree arg2 = eval_constant_expression (ctx, TREE_OPERAND (t, 1), + /*lval*/ false, non_constant_p, + overflow_p, jump_target); VERIFY_CONSTANT (arg2); - tree arg3 - = eval_constant_expression (ctx, TREE_OPERAND (t, 2), - /*lval*/ false, non_constant_p, overflow_p); + tree arg3 = eval_constant_expression (ctx, TREE_OPERAND (t, 2), + /*lval*/ false, non_constant_p, + overflow_p, jump_target); VERIFY_CONSTANT (arg3); location_t loc = EXPR_LOCATION (t); tree type = TREE_TYPE (t); @@ -1280,7 +1339,7 @@ eval_vector_conditional_expression (const constexpr_ctx *ctx, tree t, static tree eval_bare_aggregate (const constexpr_ctx *ctx, tree t, bool lval, - bool *non_constant_p, bool *overflow_p) + bool *non_constant_p, bool *overflow_p, tree *jump_target) { vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (t); bool changed = false; @@ -1331,8 +1390,9 @@ eval_bare_aggregate (const constexpr_ctx *ctx, tree t, bool lval, the active union member now so that we can later detect and diagnose if its initializer attempts to activate another member. */ get_or_insert_ctor_field (ctx->ctor, index); - tree elt = eval_constant_expression (&new_ctx, value, lval, - non_constant_p, overflow_p); + tree elt + = eval_constant_expression (&new_ctx, value, lval, non_constant_p, + overflow_p, jump_target); /* Don't VERIFY_CONSTANT here. */ if (ctx->quiet && *non_constant_p) break; @@ -1401,7 +1461,8 @@ eval_bare_aggregate (const constexpr_ctx *ctx, tree t, bool lval, static tree cxx_eval_trinary_expression (const constexpr_ctx *ctx, tree t, bool lval, - bool *non_constant_p, bool *overflow_p) + bool *non_constant_p, bool *overflow_p, + tree *jump_target) { int i; tree args[3]; @@ -1409,8 +1470,9 @@ cxx_eval_trinary_expression (const constexpr_ctx *ctx, tree t, bool lval, for (i = 0; i < 3; i++) { - args[i] = eval_constant_expression (ctx, TREE_OPERAND (t, i), lval, - non_constant_p, overflow_p); + args[i] + = eval_constant_expression (ctx, TREE_OPERAND (t, i), lval, + non_constant_p, overflow_p, jump_target); VERIFY_CONSTANT (args[i]); } @@ -1555,7 +1617,8 @@ free_constructor (tree t) static tree eval_and_check_array_index (const constexpr_ctx *ctx, tree t, bool allow_one_past, - bool *non_constant_p, bool *overflow_p); + bool *non_constant_p, bool *overflow_p, + tree *jump_target); // forked from gcc/cp/constexpr.cc cxx_eval_array_reference @@ -1564,11 +1627,11 @@ static tree eval_and_check_array_index (const constexpr_ctx *ctx, tree t, static tree eval_array_reference (const constexpr_ctx *ctx, tree t, bool lval, - bool *non_constant_p, bool *overflow_p) + bool *non_constant_p, bool *overflow_p, tree *jump_target) { tree oldary = TREE_OPERAND (t, 0); - tree ary - = eval_constant_expression (ctx, oldary, lval, non_constant_p, overflow_p); + tree ary = eval_constant_expression (ctx, oldary, lval, non_constant_p, + overflow_p, jump_target); if (*non_constant_p) return t; if (!lval && TREE_CODE (ary) == VIEW_CONVERT_EXPR @@ -1577,8 +1640,8 @@ eval_array_reference (const constexpr_ctx *ctx, tree t, bool lval, ary = TREE_OPERAND (ary, 0); tree oldidx = TREE_OPERAND (t, 1); - tree index - = eval_and_check_array_index (ctx, t, lval, non_constant_p, overflow_p); + tree index = eval_and_check_array_index (ctx, t, lval, non_constant_p, + overflow_p, jump_target); if (*non_constant_p) return t; @@ -1680,7 +1743,8 @@ eval_array_reference (const constexpr_ctx *ctx, tree t, bool lval, new_ctx.ctor = build_constructor (elem_type, NULL); ctx = &new_ctx; } - t = eval_constant_expression (ctx, val, lval, non_constant_p, overflow_p); + t = eval_constant_expression (ctx, val, lval, non_constant_p, overflow_p, + jump_target); if (!SCALAR_TYPE_P (elem_type) && t != ctx->ctor) free_constructor (ctx->ctor); return t; @@ -1693,7 +1757,8 @@ eval_array_reference (const constexpr_ctx *ctx, tree t, bool lval, static tree eval_component_reference (const constexpr_ctx *ctx, tree t, bool lval, - bool *non_constant_p, bool *overflow_p) + bool *non_constant_p, bool *overflow_p, + tree *jump_target) { unsigned HOST_WIDE_INT i; tree field; @@ -1701,7 +1766,7 @@ eval_component_reference (const constexpr_ctx *ctx, tree t, bool lval, tree part = TREE_OPERAND (t, 1); tree orig_whole = TREE_OPERAND (t, 0); tree whole = eval_constant_expression (ctx, orig_whole, lval, non_constant_p, - overflow_p); + overflow_p, jump_target); if (INDIRECT_REF_P (whole) && integer_zerop (TREE_OPERAND (whole, 0))) { if (!ctx->quiet) @@ -1790,8 +1855,8 @@ eval_component_reference (const constexpr_ctx *ctx, tree t, bool lval, // back to handle this to assign suitable value to value before sending it in // eval_constant_expression below // value = build_value_init (TREE_TYPE (t), tf_warning_or_error); - return eval_constant_expression (ctx, value, lval, non_constant_p, - overflow_p); + return eval_constant_expression (ctx, value, lval, non_constant_p, overflow_p, + jump_target); } /* Subroutine of cxx_eval_statement_list. Determine whether the statement @@ -1850,7 +1915,7 @@ label_matches (const constexpr_ctx *ctx, tree *jump_target, tree stmt) static tree eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval, bool *non_constant_p, bool *overflow_p, - tree *jump_target /* = NULL */) + tree *jump_target) { if (jump_target && *jump_target) { @@ -1921,34 +1986,44 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval, { r = DECL_VALUE_EXPR (t); return eval_constant_expression (ctx, r, lval, non_constant_p, - overflow_p); + overflow_p, jump_target); } /* fall through */ case CONST_DECL: - { - /* We used to not check lval for CONST_DECL, but darwin.cc uses - CONST_DECL for aggregate constants. */ - if (lval) - return t; - else if (t == ctx->object) - return ctx->ctor; - if (VAR_P (t)) - if (tree *p = ctx->global->values.get (t)) - if (*p != NULL_TREE) - { - r = *p; - break; - } + /* We used to not check lval for CONST_DECL, but darwin.cc uses + CONST_DECL for aggregate constants. */ + if (lval) + return t; + else if (t == ctx->object) + return ctx->ctor; + if (VAR_P (t)) + { + if (tree v = ctx->global->get_value (t)) + { + r = v; + break; + } + } + if (COMPLETE_TYPE_P (TREE_TYPE (t)) + && is_really_empty_class (TREE_TYPE (t), /*ignore_vptr*/ false)) + { + /* If the class is empty, we aren't actually loading anything. */ + r = build_constructor (TREE_TYPE (t), NULL); + TREE_CONSTANT (r) = true; + } + else if (ctx->strict) + r = decl_really_constant_value (t, /*unshare_p=*/false); + else r = decl_constant_value (t, /*unshare_p=*/false); - if (TREE_CODE (r) == TARGET_EXPR - && TREE_CODE (TARGET_EXPR_INITIAL (r)) == CONSTRUCTOR) - r = TARGET_EXPR_INITIAL (r); - if (DECL_P (r)) - { + if (TREE_CODE (r) == TARGET_EXPR + && TREE_CODE (TARGET_EXPR_INITIAL (r)) == CONSTRUCTOR) + r = TARGET_EXPR_INITIAL (r); + if (DECL_P (r) && !(VAR_P (t) && TYPE_REF_P (TREE_TYPE (t)))) + { + if (!ctx->quiet) non_const_var_error (loc, r); - return r; - } - } + *non_constant_p = true; + } break; case PARM_DECL: @@ -2014,7 +2089,8 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval, case LTGT_EXPR: case RANGE_EXPR: case COMPLEX_EXPR: - r = eval_binary_expression (ctx, t, lval, non_constant_p, overflow_p); + r = eval_binary_expression (ctx, t, lval, non_constant_p, overflow_p, + jump_target); break; /* fold can introduce non-IF versions of these; still treat them as @@ -2023,14 +2099,14 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval, case TRUTH_ANDIF_EXPR: r = eval_logical_expression (ctx, t, boolean_false_node, boolean_true_node, lval, non_constant_p, - overflow_p); + overflow_p, jump_target); break; case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR: r = eval_logical_expression (ctx, t, boolean_true_node, boolean_false_node, lval, non_constant_p, - overflow_p); + overflow_p, jump_target); break; case TARGET_EXPR: @@ -2079,7 +2155,7 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval, /* Pass false for 'lval' because this indicates initialization of a temporary. */ r = eval_constant_expression (ctx, TREE_OPERAND (t, 1), false, - non_constant_p, overflow_p); + non_constant_p, overflow_p, jump_target); if (*non_constant_p) break; /* Adjust the type of the result to the type of the temporary. */ @@ -2096,13 +2172,14 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval, break; case CALL_EXPR: - r = eval_call_expression (ctx, t, lval, non_constant_p, overflow_p); + r = eval_call_expression (ctx, t, lval, non_constant_p, overflow_p, + jump_target); break; case RETURN_EXPR: if (TREE_OPERAND (t, 0) != NULL_TREE) r = eval_constant_expression (ctx, TREE_OPERAND (t, 0), lval, - non_constant_p, overflow_p); + non_constant_p, overflow_p, jump_target); /* FALLTHRU */ case BREAK_STMT: case CONTINUE_STMT: @@ -2135,7 +2212,7 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval, if (tree init = DECL_INITIAL (r)) { init = eval_constant_expression (ctx, init, false, non_constant_p, - overflow_p); + overflow_p, jump_target); /* Don't share a CONSTRUCTOR that might be changed. */ init = unshare_constructor (init); /* Remember that a constant object's constructor has already @@ -2156,12 +2233,13 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval, constant without its operand being, and vice versa. */ case MEM_REF: case INDIRECT_REF: - r = rs_eval_indirect_ref (ctx, t, lval, non_constant_p, overflow_p); + r = rs_eval_indirect_ref (ctx, t, lval, non_constant_p, overflow_p, + jump_target); break; case VEC_PERM_EXPR: - r = cxx_eval_trinary_expression (ctx, t, lval, non_constant_p, - overflow_p); + r = cxx_eval_trinary_expression (ctx, t, lval, non_constant_p, overflow_p, + jump_target); break; case PAREN_EXPR: @@ -2169,11 +2247,12 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval, /* A PAREN_EXPR resulting from __builtin_assoc_barrier has no effect in constant expressions since it's unaffected by -fassociative-math. */ r = eval_constant_expression (ctx, TREE_OPERAND (t, 0), lval, - non_constant_p, overflow_p); + non_constant_p, overflow_p, jump_target); break; case MODIFY_EXPR: - r = eval_store_expression (ctx, t, false, non_constant_p, overflow_p); + r = eval_store_expression (ctx, t, false, non_constant_p, overflow_p, + jump_target); break; case STATEMENT_LIST: @@ -2189,13 +2268,14 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval, case OBJ_TYPE_REF: /* Virtual function lookup. We don't need to do anything fancy. */ return eval_constant_expression (ctx, OBJ_TYPE_REF_EXPR (t), lval, - non_constant_p, overflow_p); + non_constant_p, overflow_p, jump_target); case EXIT_EXPR: { tree cond = TREE_OPERAND (t, 0); - cond = eval_constant_expression (ctx, cond, /*lval*/ false, - non_constant_p, overflow_p); + cond + = eval_constant_expression (ctx, cond, /*lval*/ false, non_constant_p, + overflow_p, jump_target); VERIFY_CONSTANT (cond); if (integer_nonzerop (cond)) *jump_target = t; @@ -2225,7 +2305,8 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval, else { r = eval_constant_expression (ctx, TREE_OPERAND (t, 0), false, - non_constant_p, overflow_p); + non_constant_p, overflow_p, + jump_target); if (*non_constant_p) break; ctx->global->values.put (t, r); @@ -2239,7 +2320,7 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval, tree oldop = TREE_OPERAND (t, 0); tree op = eval_constant_expression (ctx, oldop, /*lval*/ true, non_constant_p, - overflow_p); + overflow_p, jump_target); /* Don't VERIFY_CONSTANT here. */ if (*non_constant_p) return t; @@ -2284,7 +2365,8 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval, if (lval) { r = eval_constant_expression (ctx, TREE_OPERAND (t, 0), lval, - non_constant_p, overflow_p); + non_constant_p, overflow_p, + jump_target); if (r == error_mark_node) ; else if (r == TREE_OPERAND (t, 0)) @@ -2303,7 +2385,8 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval, case BIT_NOT_EXPR: case TRUTH_NOT_EXPR: case FIXED_CONVERT_EXPR: - r = eval_unary_expression (ctx, t, lval, non_constant_p, overflow_p); + r = eval_unary_expression (ctx, t, lval, non_constant_p, overflow_p, + jump_target); break; case LOOP_EXPR: @@ -2318,7 +2401,8 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval, break; case ARRAY_REF: - r = eval_array_reference (ctx, t, lval, non_constant_p, overflow_p); + r = eval_array_reference (ctx, t, lval, non_constant_p, overflow_p, + jump_target); break; case COMPONENT_REF: @@ -2332,11 +2416,13 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval, *non_constant_p = true; return t; } - r = eval_component_reference (ctx, t, lval, non_constant_p, overflow_p); + r = eval_component_reference (ctx, t, lval, non_constant_p, overflow_p, + jump_target); break; case BIT_FIELD_REF: - r = eval_bit_field_ref (ctx, t, lval, non_constant_p, overflow_p); + r = eval_bit_field_ref (ctx, t, lval, non_constant_p, overflow_p, + jump_target); break; case COND_EXPR: @@ -2381,7 +2467,7 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval, case VEC_COND_EXPR: r = eval_vector_conditional_expression (ctx, t, non_constant_p, - overflow_p); + overflow_p, jump_target); break; case TRY_CATCH_EXPR: @@ -2407,7 +2493,7 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval, /* Evaluate the cleanups. */ FOR_EACH_VEC_ELT_REVERSE (cleanups, i, cleanup) eval_constant_expression (ctx, cleanup, false, non_constant_p, - overflow_p); + overflow_p, jump_target); } break; @@ -2417,7 +2503,7 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval, if (!*non_constant_p) /* Also evaluate the cleanup. */ eval_constant_expression (ctx, TREE_OPERAND (t, 1), true, - non_constant_p, overflow_p); + non_constant_p, overflow_p, jump_target); break; case CONSTRUCTOR: @@ -2429,7 +2515,8 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval, if (TREE_CONSTANT (t)) return fold (t); } - r = eval_bare_aggregate (ctx, t, lval, non_constant_p, overflow_p); + r = eval_bare_aggregate (ctx, t, lval, non_constant_p, overflow_p, + jump_target); break; /* FALLTHROUGH. */ @@ -2440,7 +2527,7 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval, tree oldop = TREE_OPERAND (t, 0); tree op = eval_constant_expression (ctx, oldop, lval, non_constant_p, - overflow_p); + overflow_p, jump_target); if (*non_constant_p) return t; tree type = TREE_TYPE (t); @@ -2571,7 +2658,7 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval, return ctor; else return eval_constant_expression (ctx, ctor, lval, non_constant_p, - overflow_p); + overflow_p, jump_target); } /* A placeholder without a referent. We can get here when checking whether NSDMIs are noexcept, or in massage_init_elt; @@ -2632,7 +2719,8 @@ is_empty_field (tree decl) static tree eval_store_expression (const constexpr_ctx *ctx, tree t, bool lval, - bool *non_constant_p, bool *overflow_p) + bool *non_constant_p, bool *overflow_p, + tree *jump_target) { constexpr_ctx new_ctx = *ctx; @@ -2653,7 +2741,7 @@ eval_store_expression (const constexpr_ctx *ctx, tree t, bool lval, if (!SCALAR_TYPE_P (type)) new_ctx.ctor = new_ctx.object = NULL_TREE; init = eval_constant_expression (&new_ctx, init, false, non_constant_p, - overflow_p); + overflow_p, jump_target); if (*non_constant_p) return t; } @@ -2665,7 +2753,7 @@ eval_store_expression (const constexpr_ctx *ctx, tree t, bool lval, as a whole; otherwise, only evaluate the innermost piece to avoid building up unnecessary *_REFs. */ target = eval_constant_expression (ctx, target, true, non_constant_p, - overflow_p); + overflow_p, jump_target); evaluated = true; if (*non_constant_p) return t; @@ -2694,7 +2782,8 @@ eval_store_expression (const constexpr_ctx *ctx, tree t, bool lval, if (TREE_CODE (probe) == ARRAY_REF) { elt = eval_and_check_array_index (ctx, probe, false, - non_constant_p, overflow_p); + non_constant_p, overflow_p, + jump_target); if (*non_constant_p) return t; } @@ -2718,8 +2807,9 @@ eval_store_expression (const constexpr_ctx *ctx, tree t, bool lval, object = probe; else { - probe = eval_constant_expression (ctx, probe, true, - non_constant_p, overflow_p); + probe + = eval_constant_expression (ctx, probe, true, non_constant_p, + overflow_p, jump_target); evaluated = true; if (*non_constant_p) return t; @@ -2910,7 +3000,7 @@ eval_store_expression (const constexpr_ctx *ctx, tree t, bool lval, if (tree tinit = TARGET_EXPR_INITIAL (init)) init = tinit; init = eval_constant_expression (&new_ctx, init, false, non_constant_p, - overflow_p); + overflow_p, jump_target); /* The hash table might have moved since the get earlier, and the initializer might have mutated the underlying CONSTRUCTORs, so we must recompute VALP. */ @@ -3012,22 +3102,115 @@ eval_store_expression (const constexpr_ctx *ctx, tree t, bool lval, Like cxx_eval_unary_expression, except for binary expressions. */ static tree eval_binary_expression (const constexpr_ctx *ctx, tree t, bool lval, - bool *non_constant_p, bool *overflow_p) + bool *non_constant_p, bool *overflow_p, + tree *jump_target) { + tree r = NULL_TREE; tree orig_lhs = TREE_OPERAND (t, 0); tree orig_rhs = TREE_OPERAND (t, 1); tree lhs, rhs; - lhs = eval_constant_expression (ctx, orig_lhs, lval, non_constant_p, - overflow_p); + overflow_p, jump_target); + /* Don't VERIFY_CONSTANT here, it's unnecessary and will break pointer + subtraction. */ + if (*non_constant_p) + return t; + if (*jump_target) + return NULL_TREE; + rhs = eval_constant_expression (ctx, orig_rhs, lval, non_constant_p, - overflow_p); + overflow_p, jump_target); + if (*non_constant_p) + return t; + if (*jump_target) + return NULL_TREE; location_t loc = EXPR_LOCATION (t); enum tree_code code = TREE_CODE (t); tree type = TREE_TYPE (t); - return fold_binary_loc (loc, code, type, lhs, rhs); + if (code == EQ_EXPR || code == NE_EXPR) + { + bool is_code_eq = (code == EQ_EXPR); + + if (TREE_CODE (lhs) == PTRMEM_CST && TREE_CODE (rhs) == PTRMEM_CST) + { + tree lmem = PTRMEM_CST_MEMBER (lhs); + tree rmem = PTRMEM_CST_MEMBER (rhs); + bool eq; + if (TREE_CODE (lmem) == TREE_CODE (rmem) + && TREE_CODE (lmem) == FIELD_DECL + && TREE_CODE (DECL_CONTEXT (lmem)) == UNION_TYPE + && same_type_p (DECL_CONTEXT (lmem), DECL_CONTEXT (rmem))) + /* If both refer to (possibly different) members of the same union + (12.3), they compare equal. */ + eq = true; + // else + // eq = cp_tree_equal (lhs, rhs); + r = constant_boolean_node (eq == is_code_eq, type); + } + else if ((TREE_CODE (lhs) == PTRMEM_CST || TREE_CODE (rhs) == PTRMEM_CST) + && (null_member_pointer_value_p (lhs) + || null_member_pointer_value_p (rhs))) + r = constant_boolean_node (!is_code_eq, type); + } + if (r == NULL_TREE && TREE_CODE_CLASS (code) == tcc_comparison + && POINTER_TYPE_P (TREE_TYPE (lhs))) + { + if (tree lhso = maybe_fold_addr_pointer_plus (lhs)) + lhs = fold_convert (TREE_TYPE (lhs), lhso); + if (tree rhso = maybe_fold_addr_pointer_plus (rhs)) + rhs = fold_convert (TREE_TYPE (rhs), rhso); + } + if (code == POINTER_PLUS_EXPR && !*non_constant_p && integer_zerop (lhs) + && !integer_zerop (rhs)) + { + if (!ctx->quiet) + error ("arithmetic involving a null pointer in %qE", lhs); + *non_constant_p = true; + return t; + } + else if (code == POINTER_PLUS_EXPR) + { + r = fold_pointer_plus_expression (ctx, t, lhs, rhs, non_constant_p, + overflow_p, jump_target); + if (*jump_target) + return NULL_TREE; + } + + if (r == NULL_TREE) + { + r = fold_binary_loc (loc, code, type, lhs, rhs); + } + + if (r == NULL_TREE && (code == LSHIFT_EXPR || code == RSHIFT_EXPR) + && TREE_CODE (lhs) == INTEGER_CST && TREE_CODE (rhs) == INTEGER_CST + && wi::neg_p (wi::to_wide (rhs))) + { + /* For diagnostics and -fpermissive emulate previous behavior of + handling shifts by negative amount. */ + tree nrhs = const_unop (NEGATE_EXPR, TREE_TYPE (rhs), rhs); + if (nrhs) + r = fold_binary_loc (loc, + code == LSHIFT_EXPR ? RSHIFT_EXPR : LSHIFT_EXPR, + type, lhs, nrhs); + } + + if (r == NULL_TREE) + { + if (lhs == orig_lhs && rhs == orig_rhs) + r = t; + else + r = build2_loc (loc, code, type, lhs, rhs); + } + else if (eval_check_shift_p (loc, ctx, code, type, lhs, rhs)) + *non_constant_p = true; + /* Don't VERIFY_CONSTANT if this might be dealing with a pointer to + a local array in a constexpr function. */ + bool ptr = INDIRECT_TYPE_P (TREE_TYPE (lhs)); + if (!ptr) + VERIFY_CONSTANT (r); + return r; } /* Helper function of cxx_bind_parameters_in_call. Return non-NULL @@ -3065,7 +3248,7 @@ addr_of_non_const_var (tree *tp, int *walk_subtrees, void *data) static tree rs_bind_parameters_in_call (const constexpr_ctx *ctx, tree t, tree fun, bool *non_constant_p, bool *overflow_p, - bool *non_constant_args) + bool *non_constant_args, tree *jump_target) { const int nargs = call_expr_nargs (t); tree parms = DECL_ARGUMENTS (fun); @@ -3089,7 +3272,7 @@ rs_bind_parameters_in_call (const constexpr_ctx *ctx, tree t, tree fun, such as this, but here we do the elision differently: we keep the TARGET_EXPR, and use its CONSTRUCTOR as the value of the parm. */ arg = eval_constant_expression (ctx, x, /*lval=*/false, non_constant_p, - overflow_p); + overflow_p, jump_target); /* Don't VERIFY_CONSTANT here. */ if (*non_constant_p && ctx->quiet) break; @@ -3155,7 +3338,8 @@ rs_bind_parameters_in_call (const constexpr_ctx *ctx, tree t, tree fun, static tree eval_builtin_function_call (const constexpr_ctx *ctx, tree t, tree fun, - bool lval, bool *non_constant_p, bool *overflow_p) + bool lval, bool *non_constant_p, bool *overflow_p, + tree *jump_target) { const int nargs = call_expr_nargs (t); tree *args = (tree *) alloca (nargs * sizeof (tree)); @@ -3258,8 +3442,9 @@ eval_builtin_function_call (const constexpr_ctx *ctx, tree t, tree fun, || potential_constant_expression (arg)) { bool dummy1 = false, dummy2 = false; - arg - = eval_constant_expression (&new_ctx, arg, false, &dummy1, &dummy2); + tree dummy_jump_target = NULL_TREE; + arg = eval_constant_expression (&new_ctx, arg, false, &dummy1, + &dummy2, &dummy_jump_target); } if (bi_const_p) @@ -3355,7 +3540,7 @@ eval_builtin_function_call (const constexpr_ctx *ctx, tree t, tree fun, } return eval_constant_expression (&new_ctx, new_call, lval, non_constant_p, - overflow_p); + overflow_p, jump_target); } // Subroutine of cxx_eval_constant_expression. @@ -3363,7 +3548,7 @@ eval_builtin_function_call (const constexpr_ctx *ctx, tree t, tree fun, // evaluation. static tree eval_call_expression (const constexpr_ctx *ctx, tree t, bool lval, - bool *non_constant_p, bool *overflow_p) + bool *non_constant_p, bool *overflow_p, tree *jump_target) { location_t loc = EXPR_LOCATION (t); tree fun = get_function_named_in_call (t); @@ -3392,12 +3577,12 @@ eval_call_expression (const constexpr_ctx *ctx, tree t, bool lval, if (fndecl_built_in_p (fun)) return eval_builtin_function_call (ctx, t, fun, lval, non_constant_p, - overflow_p); + overflow_p, jump_target); bool non_constant_args = false; new_call.bindings = rs_bind_parameters_in_call (ctx, t, fun, non_constant_p, overflow_p, - &non_constant_args); + &non_constant_args, jump_target); /* We build up the bindings list before we know whether we already have this call cached. If we don't end up saving these bindings, ggc_free them when @@ -3960,7 +4145,7 @@ constexpr_fn_retval (const constexpr_ctx *ctx, tree body) bool non_constant_p = false; bool overflow_p = false; return eval_constant_expression (ctx, body, false, &non_constant_p, - &overflow_p); + &overflow_p, NULL); } case DECL_EXPR: { @@ -4024,6 +4209,17 @@ constant_value_1 (tree decl, bool, bool, bool unshare_p) return unshare_p ? unshare_expr (decl) : decl; } +/* Like scalar_constant_value, but can also return aggregate initializers. + If UNSHARE_P, return an unshared copy of the initializer. */ + +tree +decl_really_constant_value (tree decl, bool unshare_p /*= true*/) +{ + return constant_value_1 (decl, /*strict_p=*/true, + /*return_aggregate_cst_ok_p=*/true, + /*unshare_p=*/unshare_p); +} + // A more relaxed version of decl_really_constant_value, used by the // common C/C++ code. tree @@ -4037,15 +4233,38 @@ decl_constant_value (tree decl, bool unshare_p) static void non_const_var_error (location_t loc, tree r) { - error_at (loc, - "the value of %qD is not usable in a constant " - "expression", - r); + tree type = TREE_TYPE (r); + /* Avoid error cascade. */ if (DECL_INITIAL (r) == error_mark_node) return; - - // more in cp/constexpr.cc + if (DECL_DECLARED_CONSTEXPR_P (r)) + inform (DECL_SOURCE_LOCATION (r), "%qD used in its own initializer", r); + else if (INTEGRAL_OR_ENUMERATION_TYPE_P (type)) + { + if (!DECL_INITIAL (r) || !TREE_CONSTANT (DECL_INITIAL (r)) + || !DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (r)) + inform (DECL_SOURCE_LOCATION (r), + "%qD was not initialized with a constant " + "expression", + r); + else + gcc_unreachable (); + } + else if (TYPE_REF_P (type)) + inform (DECL_SOURCE_LOCATION (r), + "%qD was not initialized with a constant " + "expression", + r); + else + { + if (!DECL_DECLARED_CONSTEXPR_P (r)) + inform (DECL_SOURCE_LOCATION (r), "%qD was not declared %<constexpr%>", + r); + else + inform (DECL_SOURCE_LOCATION (r), + "%qD does not have integral or enumeration type", r); + } } static tree @@ -4322,7 +4541,7 @@ verify_constant (tree t, bool allow_non_constant, bool *non_constant_p, if (!*non_constant_p && !reduced_constant_expression_p (t) && t != void_node) { if (!allow_non_constant) - error ("%q+E is not a constant expression", t); + error_at (EXPR_LOCATION (t), "is not a constant expression"); *non_constant_p = true; } if (TREE_OVERFLOW_P (t)) @@ -4427,7 +4646,8 @@ diag_array_subscript (location_t loc, const constexpr_ctx *ctx, tree array, static tree get_array_or_vector_nelts (const constexpr_ctx *ctx, tree type, - bool *non_constant_p, bool *overflow_p) + bool *non_constant_p, bool *overflow_p, + tree *jump_target) { tree nelts; if (TREE_CODE (type) == ARRAY_TYPE) @@ -4443,8 +4663,8 @@ get_array_or_vector_nelts (const constexpr_ctx *ctx, tree type, rust_unreachable (); /* For VLAs, the number of elements won't be an integer constant. */ - nelts - = eval_constant_expression (ctx, nelts, false, non_constant_p, overflow_p); + nelts = eval_constant_expression (ctx, nelts, false, non_constant_p, + overflow_p, jump_target); return nelts; } @@ -4456,13 +4676,13 @@ get_array_or_vector_nelts (const constexpr_ctx *ctx, tree type, static tree eval_and_check_array_index (const constexpr_ctx *ctx, tree t, bool allow_one_past, bool *non_constant_p, - bool *overflow_p) + bool *overflow_p, tree *jump_target) { location_t loc = rs_expr_loc_or_input_loc (t); tree ary = TREE_OPERAND (t, 0); t = TREE_OPERAND (t, 1); tree index = eval_constant_expression (ctx, t, allow_one_past, non_constant_p, - overflow_p); + overflow_p, jump_target); VERIFY_CONSTANT (index); if (!tree_fits_shwi_p (index) || tree_int_cst_sgn (index) < 0) @@ -4473,7 +4693,7 @@ eval_and_check_array_index (const constexpr_ctx *ctx, tree t, } tree nelts = get_array_or_vector_nelts (ctx, TREE_TYPE (ary), non_constant_p, - overflow_p); + overflow_p, jump_target); VERIFY_CONSTANT (nelts); if (allow_one_past ? !tree_int_cst_le (index, nelts) : !tree_int_cst_lt (index, nelts)) @@ -4683,9 +4903,9 @@ eval_conditional_expression (const constexpr_ctx *ctx, tree t, bool lval, bool *non_constant_p, bool *overflow_p, tree *jump_target) { - tree val - = eval_constant_expression (ctx, TREE_OPERAND (t, 0), - /*lval*/ false, non_constant_p, overflow_p); + tree val = eval_constant_expression (ctx, TREE_OPERAND (t, 0), + /*lval*/ false, non_constant_p, + overflow_p, jump_target); VERIFY_CONSTANT (val); if (TREE_CODE (t) == IF_STMT && IF_STMT_CONSTEVAL_P (t)) { @@ -4722,14 +4942,14 @@ eval_conditional_expression (const constexpr_ctx *ctx, tree t, bool lval, static tree eval_bit_field_ref (const constexpr_ctx *ctx, tree t, bool lval, - bool *non_constant_p, bool *overflow_p) + bool *non_constant_p, bool *overflow_p, tree *jump_target) { tree orig_whole = TREE_OPERAND (t, 0); tree retval, fldval, utype, mask; bool fld_seen = false; HOST_WIDE_INT istart, isize; tree whole = eval_constant_expression (ctx, orig_whole, lval, non_constant_p, - overflow_p); + overflow_p, jump_target); tree start, field, value; unsigned HOST_WIDE_INT i; @@ -4960,8 +5180,8 @@ eval_switch_expr (const constexpr_ctx *ctx, tree t, bool *non_constant_p, { tree cond = TREE_CODE (t) == SWITCH_STMT ? SWITCH_STMT_COND (t) : SWITCH_COND (t); - cond - = eval_constant_expression (ctx, cond, false, non_constant_p, overflow_p); + cond = eval_constant_expression (ctx, cond, false, non_constant_p, overflow_p, + jump_target); VERIFY_CONSTANT (cond); *jump_target = cond; @@ -4995,12 +5215,13 @@ eval_switch_expr (const constexpr_ctx *ctx, tree t, bool *non_constant_p, static tree eval_unary_expression (const constexpr_ctx *ctx, tree t, bool /*lval*/, - bool *non_constant_p, bool *overflow_p) + bool *non_constant_p, bool *overflow_p, + tree *jump_target) { tree r; tree orig_arg = TREE_OPERAND (t, 0); tree arg = eval_constant_expression (ctx, orig_arg, /*lval*/ false, - non_constant_p, overflow_p); + non_constant_p, overflow_p, jump_target); VERIFY_CONSTANT (arg); location_t loc = EXPR_LOCATION (t); enum tree_code code = TREE_CODE (t); @@ -5136,7 +5357,9 @@ cxx_eval_outermost_constant_expr (tree t, bool allow_non_constant, if (manifestly_const_eval) instantiate_constexpr_fns (r); - r = eval_constant_expression (&ctx, r, false, &non_constant_p, &overflow_p); + tree jump_target = NULL_TREE; + r = eval_constant_expression (&ctx, r, false, &non_constant_p, &overflow_p, + &jump_target); if (!constexpr_dtor) verify_constant (r, allow_non_constant, &non_constant_p, &overflow_p); @@ -5148,7 +5371,7 @@ cxx_eval_outermost_constant_expr (tree t, bool allow_non_constant, /* Evaluate the cleanups. */ FOR_EACH_VEC_ELT_REVERSE (cleanups, i, cleanup) eval_constant_expression (&ctx, cleanup, false, &non_constant_p, - &overflow_p); + &overflow_p, NULL); /* Mutable logic is a bit tricky: we want to allow initialization of constexpr variables with mutable members, but we can't copy those @@ -5908,7 +6131,6 @@ potential_constant_expression_1 (tree t, bool want_rval, bool strict, bool now, /* A pointer-to-member constant. */ return true; - // handle_addr_expr: #if 0 /* FIXME adjust when issue 1197 is fully resolved. For now don't do any checking here, as we might dereference the pointer later. If @@ -6484,6 +6706,170 @@ fold_non_dependent_init (tree t, tsubst_flags_t /*=tf_warning_or_error*/, return maybe_constant_init (t, object, manifestly_const_eval); } +/* Check whether the shift operation with code CODE and type TYPE on LHS + and RHS is undefined. If it is, give an error with an explanation, + and return true; return false otherwise. */ + +static bool +eval_check_shift_p (location_t loc, const constexpr_ctx *ctx, + enum tree_code code, tree type, tree lhs, tree rhs) +{ + if ((code != LSHIFT_EXPR && code != RSHIFT_EXPR) + || TREE_CODE (lhs) != INTEGER_CST || TREE_CODE (rhs) != INTEGER_CST) + return false; + + tree lhstype = TREE_TYPE (lhs); + unsigned HOST_WIDE_INT uprec = TYPE_PRECISION (TREE_TYPE (lhs)); + + /* [expr.shift] The behavior is undefined if the right operand + is negative, or greater than or equal to the length in bits + of the promoted left operand. */ + if (tree_int_cst_sgn (rhs) == -1) + { + if (!ctx->quiet) + permerror (loc, "right operand of shift expression %q+E is negative", + build2_loc (loc, code, type, lhs, rhs)); + return (!flag_permissive || ctx->quiet); + } + if (compare_tree_int (rhs, uprec) >= 0) + { + if (!ctx->quiet) + permerror (loc, + "right operand of shift expression %q+E is greater " + "than or equal to the precision %wu of the left operand", + build2_loc (loc, code, type, lhs, rhs), uprec); + return (!flag_permissive || ctx->quiet); + } + + /* The value of E1 << E2 is E1 left-shifted E2 bit positions; [...] + if E1 has a signed type and non-negative value, and E1x2^E2 is + representable in the corresponding unsigned type of the result type, + then that value, converted to the result type, is the resulting value; + otherwise, the behavior is undefined. + For C++20: + The value of E1 << E2 is the unique value congruent to E1 x 2^E2 modulo + 2^N, where N is the range exponent of the type of the result. */ + if (code == LSHIFT_EXPR && !TYPE_OVERFLOW_WRAPS (lhstype)) + { + if (tree_int_cst_sgn (lhs) == -1) + { + if (!ctx->quiet) + permerror (loc, "left operand of shift expression %q+E is negative", + build2_loc (loc, code, type, lhs, rhs)); + return (!flag_permissive || ctx->quiet); + } + /* For signed x << y the following: + (unsigned) x >> ((prec (lhs) - 1) - y) + if > 1, is undefined. The right-hand side of this formula + is the highest bit of the LHS that can be set (starting from 0), + so that the shift doesn't overflow. We then right-shift the LHS + to see whether any other bit is set making the original shift + undefined -- the result is not representable in the corresponding + unsigned type. */ + tree t = build_int_cst (unsigned_type_node, uprec - 1); + t = fold_build2 (MINUS_EXPR, unsigned_type_node, t, rhs); + tree ulhs = fold_convert (unsigned_type_for (lhstype), lhs); + t = fold_build2 (RSHIFT_EXPR, TREE_TYPE (ulhs), ulhs, t); + if (tree_int_cst_lt (integer_one_node, t)) + { + if (!ctx->quiet) + permerror (loc, "shift expression %q+E overflows", + build2_loc (loc, code, type, lhs, rhs)); + return (!flag_permissive || ctx->quiet); + } + } + return false; +} + +/* Helper function for cxx_eval_binary_expression. Try to optimize + original POINTER_PLUS_EXPR T, LHS p+ RHS, return NULL_TREE if the + generic folding should be used. */ + +static tree +fold_pointer_plus_expression (const constexpr_ctx *ctx, tree t, tree lhs, + tree rhs, bool *non_constant_p, bool *overflow_p, + tree *jump_target) +{ + STRIP_NOPS (lhs); + if (TREE_CODE (lhs) != ADDR_EXPR) + return NULL_TREE; + + lhs = TREE_OPERAND (lhs, 0); + + /* &A[i] p+ j => &A[i + j] */ + if (TREE_CODE (lhs) == ARRAY_REF + && TREE_CODE (TREE_OPERAND (lhs, 1)) == INTEGER_CST + && TREE_CODE (rhs) == INTEGER_CST && TYPE_SIZE_UNIT (TREE_TYPE (lhs)) + && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (lhs))) == INTEGER_CST) + { + tree orig_type = TREE_TYPE (t); + location_t loc = EXPR_LOCATION (t); + tree type = TREE_TYPE (lhs); + + t = fold_convert_loc (loc, ssizetype, TREE_OPERAND (lhs, 1)); + tree nelts = array_type_nelts_top (TREE_TYPE (TREE_OPERAND (lhs, 0))); + nelts = eval_constant_expression (ctx, nelts, true, non_constant_p, + overflow_p, jump_target); + if (*non_constant_p) + return NULL_TREE; + if (*jump_target) + return NULL_TREE; + /* Don't fold an out-of-bound access. */ + if (!tree_int_cst_le (t, nelts)) + return NULL_TREE; + rhs = fold_convert (ssizetype, rhs); + /* Don't fold if rhs can't be divided exactly by TYPE_SIZE_UNIT. + constexpr int A[1]; ... (char *)&A[0] + 1 */ + if (!integer_zerop (fold_build2_loc (loc, TRUNC_MOD_EXPR, sizetype, rhs, + TYPE_SIZE_UNIT (type)))) + return NULL_TREE; + /* Make sure to treat the second operand of POINTER_PLUS_EXPR + as signed. */ + rhs = fold_build2_loc (loc, EXACT_DIV_EXPR, ssizetype, rhs, + TYPE_SIZE_UNIT (type)); + t = size_binop_loc (loc, PLUS_EXPR, rhs, t); + t = build4_loc (loc, ARRAY_REF, type, TREE_OPERAND (lhs, 0), t, NULL_TREE, + NULL_TREE); + t = build_fold_addr_expr (t); + t = fold_convert (orig_type, t); + return eval_constant_expression (ctx, t, true, non_constant_p, overflow_p, + jump_target); + } + + return NULL_TREE; +} + +/* Try to fold expressions like + (struct S *) (&a[0].D.2378 + 12) + into + &MEM <struct T> [(void *)&a + 12B] + This is something normally done by gimple_fold_stmt_to_constant_1 + on GIMPLE, but is undesirable on GENERIC if we are e.g. going to + dereference the address because some details are lost. + For pointer comparisons we want such folding though so that + match.pd address_compare optimization works. */ + +static tree +maybe_fold_addr_pointer_plus (tree t) +{ + while (CONVERT_EXPR_P (t) && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0)))) + t = TREE_OPERAND (t, 0); + if (TREE_CODE (t) != POINTER_PLUS_EXPR) + return NULL_TREE; + tree op0 = TREE_OPERAND (t, 0); + tree op1 = TREE_OPERAND (t, 1); + if (TREE_CODE (op1) != INTEGER_CST) + return NULL_TREE; + while (CONVERT_EXPR_P (op0) + && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))) + op0 = TREE_OPERAND (op0, 0); + if (TREE_CODE (op0) != ADDR_EXPR) + return NULL_TREE; + op1 = fold_convert (ptr_type_node, op1); + tree r = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (op0)), op0, op1); + return build1_loc (EXPR_LOCATION (t), ADDR_EXPR, TREE_TYPE (op0), r); +} + } // namespace Compile } // namespace Rust |
