aboutsummaryrefslogtreecommitdiff
path: root/gcc/rust/backend
diff options
context:
space:
mode:
Diffstat (limited to 'gcc/rust/backend')
-rw-r--r--gcc/rust/backend/rust-compile-asm.cc159
-rw-r--r--gcc/rust/backend/rust-compile-asm.h14
-rw-r--r--gcc/rust/backend/rust-compile-base.cc95
-rw-r--r--gcc/rust/backend/rust-compile-base.h19
-rw-r--r--gcc/rust/backend/rust-compile-block.cc1
-rw-r--r--gcc/rust/backend/rust-compile-block.h14
-rw-r--r--gcc/rust/backend/rust-compile-context.cc28
-rw-r--r--gcc/rust/backend/rust-compile-context.h35
-rw-r--r--gcc/rust/backend/rust-compile-expr.cc263
-rw-r--r--gcc/rust/backend/rust-compile-expr.h4
-rw-r--r--gcc/rust/backend/rust-compile-extern.h52
-rw-r--r--gcc/rust/backend/rust-compile-implitem.cc44
-rw-r--r--gcc/rust/backend/rust-compile-implitem.h6
-rw-r--r--gcc/rust/backend/rust-compile-intrinsic.cc84
-rw-r--r--gcc/rust/backend/rust-compile-item.cc140
-rw-r--r--gcc/rust/backend/rust-compile-item.h12
-rw-r--r--gcc/rust/backend/rust-compile-pattern.cc860
-rw-r--r--gcc/rust/backend/rust-compile-pattern.h24
-rw-r--r--gcc/rust/backend/rust-compile-resolve-path.cc122
-rw-r--r--gcc/rust/backend/rust-compile-stmt.cc3
-rw-r--r--gcc/rust/backend/rust-compile-type.cc145
-rw-r--r--gcc/rust/backend/rust-compile-type.h6
-rw-r--r--gcc/rust/backend/rust-compile-var-decl.h71
-rw-r--r--gcc/rust/backend/rust-compile.cc16
-rw-r--r--gcc/rust/backend/rust-constexpr.cc883
-rw-r--r--gcc/rust/backend/rust-constexpr.h3
-rw-r--r--gcc/rust/backend/rust-mangle-v0.cc18
-rw-r--r--gcc/rust/backend/rust-mangle.h11
-rw-r--r--gcc/rust/backend/rust-tree.cc94
-rw-r--r--gcc/rust/backend/rust-tree.h189
30 files changed, 2434 insertions, 981 deletions
diff --git a/gcc/rust/backend/rust-compile-asm.cc b/gcc/rust/backend/rust-compile-asm.cc
index 22498bc..b7143a8 100644
--- a/gcc/rust/backend/rust-compile-asm.cc
+++ b/gcc/rust/backend/rust-compile-asm.cc
@@ -1,5 +1,7 @@
#include "rust-compile-asm.h"
#include "rust-compile-expr.h"
+#include "rust-system.h"
+
namespace Rust {
namespace Compile {
@@ -72,57 +74,94 @@ CompileAsm::asm_construct_string_tree (HIR::InlineAsm &expr)
return Backend::string_constant_expression (result);
}
+tl::optional<std::reference_wrapper<HIR::Expr>>
+get_out_expr (HIR::InlineAsmOperand &operand)
+{
+ switch (operand.get_register_type ())
+ {
+ case HIR::InlineAsmOperand::RegisterType::Out:
+ return *operand.get_out ().expr;
+ case HIR::InlineAsmOperand::RegisterType::InOut:
+ return *operand.get_in_out ().expr;
+ case HIR::InlineAsmOperand::RegisterType::SplitInOut:
+ return *operand.get_split_in_out ().out_expr;
+ case HIR::InlineAsmOperand::RegisterType::Const:
+ case HIR::InlineAsmOperand::RegisterType::Sym:
+ case HIR::InlineAsmOperand::RegisterType::Label:
+ case HIR::InlineAsmOperand::RegisterType::In:
+ break;
+ }
+ return tl::nullopt;
+}
+
tree
CompileAsm::asm_construct_outputs (HIR::InlineAsm &expr)
{
// TODO: Do i need to do this?
tree head = NULL_TREE;
- for (auto &output : expr.get_operands ())
+ for (auto &operand : expr.get_operands ())
{
- if (output.get_register_type ()
- == AST::InlineAsmOperand::RegisterType::Out)
- {
- auto out = output.get_out ();
-
- tree out_tree = CompileExpr::Compile (*out.expr, this->ctx);
- // expects a tree list
- // TODO: This assumes that the output is a register
- std::string expr_name = "=r";
- auto name = build_string (expr_name.size () + 1, expr_name.c_str ());
- head
- = chainon (head, build_tree_list (build_tree_list (NULL_TREE, name),
- out_tree));
-
- /*Backend::debug (head);*/
- /*head = chainon (head, out_tree);*/
- }
+ tl::optional<std::reference_wrapper<HIR::Expr>> out_expr
+ = get_out_expr (operand);
+ if (!out_expr.has_value ())
+ continue;
+
+ tree out_tree = CompileExpr::Compile (*out_expr, this->ctx);
+ // expects a tree list
+ // TODO: This assumes that the output is a register
+ std::string expr_name = "=r";
+ auto name = build_string (expr_name.size () + 1, expr_name.c_str ());
+ head = chainon (head, build_tree_list (build_tree_list (NULL_TREE, name),
+ out_tree));
+
+ /*Backend::debug (head);*/
+ /*head = chainon (head, out_tree);*/
}
return head;
}
+tl::optional<std::reference_wrapper<HIR::Expr>>
+get_in_expr (HIR::InlineAsmOperand &operand)
+{
+ switch (operand.get_register_type ())
+ {
+ case HIR::InlineAsmOperand::RegisterType::In:
+ return *operand.get_in ().expr;
+ case HIR::InlineAsmOperand::RegisterType::InOut:
+ return *operand.get_in_out ().expr;
+ case HIR::InlineAsmOperand::RegisterType::SplitInOut:
+ return *operand.get_split_in_out ().in_expr;
+ case HIR::InlineAsmOperand::RegisterType::Const:
+ case HIR::InlineAsmOperand::RegisterType::Sym:
+ case HIR::InlineAsmOperand::RegisterType::Label:
+ case HIR::InlineAsmOperand::RegisterType::Out:
+ break;
+ }
+ return tl::nullopt;
+}
+
tree
CompileAsm::asm_construct_inputs (HIR::InlineAsm &expr)
{
// TODO: Do i need to do this?
tree head = NULL_TREE;
- for (auto &input : expr.get_operands ())
+ for (auto &operand : expr.get_operands ())
{
- if (input.get_register_type () == AST::InlineAsmOperand::RegisterType::In)
- {
- auto in = input.get_in ();
-
- tree in_tree = CompileExpr::Compile (*in.expr, this->ctx);
- // expects a tree list
- // TODO: This assumes that the input is a register
- std::string expr_name = "r";
- auto name = build_string (expr_name.size () + 1, expr_name.c_str ());
- head
- = chainon (head, build_tree_list (build_tree_list (NULL_TREE, name),
- in_tree));
-
- /*head = chainon (head, out_tree);*/
- }
+ tl::optional<std::reference_wrapper<HIR::Expr>> in_expr
+ = get_in_expr (operand);
+ if (!in_expr.has_value ())
+ continue;
+
+ tree in_tree = CompileExpr::Compile (*in_expr, this->ctx);
+ // expects a tree list
+ // TODO: This assumes that the input is a register
+ std::string expr_name = "r";
+ auto name = build_string (expr_name.size () + 1, expr_name.c_str ());
+ head = chainon (head, build_tree_list (build_tree_list (NULL_TREE, name),
+ in_tree));
+
+ /*head = chainon (head, out_tree);*/
}
return head;
}
@@ -141,5 +180,57 @@ CompileAsm::asm_construct_label_tree (HIR::InlineAsm &expr)
return NULL_TREE;
}
+CompileLlvmAsm::CompileLlvmAsm (Context *ctx) : HIRCompileBase (ctx) {}
+
+tree
+CompileLlvmAsm::construct_operands (std::vector<HIR::LlvmOperand> operands)
+{
+ tree head = NULL_TREE;
+ for (auto &operand : operands)
+ {
+ tree t = CompileExpr::Compile (*operand.expr, this->ctx);
+ auto name = build_string (operand.constraint.size () + 1,
+ operand.constraint.c_str ());
+ head = chainon (head,
+ build_tree_list (build_tree_list (NULL_TREE, name), t));
+ }
+ return head;
+}
+
+tree
+CompileLlvmAsm::construct_clobbers (std::vector<AST::TupleClobber> clobbers)
+{
+ tree head = NULL_TREE;
+ for (auto &clobber : clobbers)
+ {
+ auto name
+ = build_string (clobber.symbol.size () + 1, clobber.symbol.c_str ());
+ head = chainon (head, build_tree_list (NULL_TREE, name));
+ }
+ return head;
+}
+
+tree
+CompileLlvmAsm::tree_codegen_asm (HIR::LlvmInlineAsm &expr)
+{
+ tree ret = make_node (ASM_EXPR);
+ TREE_TYPE (ret) = void_type_node;
+ SET_EXPR_LOCATION (ret, expr.get_locus ());
+ ASM_VOLATILE_P (ret) = expr.options.is_volatile;
+
+ std::stringstream ss;
+ for (const auto &template_str : expr.templates)
+ {
+ ss << template_str.symbol << "\n";
+ }
+
+ ASM_STRING (ret) = Backend::string_constant_expression (ss.str ());
+ ASM_INPUTS (ret) = construct_operands (expr.inputs);
+ ASM_OUTPUTS (ret) = construct_operands (expr.outputs);
+ ASM_CLOBBERS (ret) = construct_clobbers (expr.get_clobbers ());
+
+ return ret;
+}
+
} // namespace Compile
} // namespace Rust
diff --git a/gcc/rust/backend/rust-compile-asm.h b/gcc/rust/backend/rust-compile-asm.h
index 4abd24e..22be94a 100644
--- a/gcc/rust/backend/rust-compile-asm.h
+++ b/gcc/rust/backend/rust-compile-asm.h
@@ -56,6 +56,20 @@ public:
tree tree_codegen_asm (HIR::InlineAsm &);
};
+
+class CompileLlvmAsm : private HIRCompileBase
+{
+private:
+ tree construct_operands (std::vector<HIR::LlvmOperand> operands);
+
+ tree construct_clobbers (std::vector<AST::TupleClobber>);
+
+public:
+ CompileLlvmAsm (Context *ctx);
+
+ tree tree_codegen_asm (HIR::LlvmInlineAsm &);
+};
+
} // namespace Compile
} // namespace Rust
#endif // RUST_COMPILE_ASM
diff --git a/gcc/rust/backend/rust-compile-base.cc b/gcc/rust/backend/rust-compile-base.cc
index bcc7fc4..d1db58e 100644
--- a/gcc/rust/backend/rust-compile-base.cc
+++ b/gcc/rust/backend/rust-compile-base.cc
@@ -32,6 +32,7 @@
#include "rust-type-util.h"
#include "rust-compile-implitem.h"
#include "rust-attribute-values.h"
+#include "rust-attributes.h"
#include "rust-immutable-name-resolution-context.h"
#include "fold-const.h"
@@ -251,25 +252,21 @@ void
HIRCompileBase::handle_link_section_attribute_on_fndecl (
tree fndecl, const AST::Attribute &attr)
{
- if (!attr.has_attr_input ())
+ auto msg_str = Analysis::Attributes::extract_string_literal (attr);
+
+ if (!msg_str.has_value ())
{
rust_error_at (attr.get_locus (),
- "%<link_section%> expects exactly one argment");
+ "malformed %<link_section%> attribute input");
return;
}
- rust_assert (attr.get_attr_input ().get_attr_input_type ()
- == AST::AttrInput::AttrInputType::LITERAL);
-
- auto &literal = static_cast<AST::AttrInputLiteral &> (attr.get_attr_input ());
- const auto &msg_str = literal.get_literal ().as_string ();
-
if (decl_section_name (fndecl))
{
rust_warning_at (attr.get_locus (), 0, "section name redefined");
}
- set_decl_section_name (fndecl, msg_str.c_str ());
+ set_decl_section_name (fndecl, msg_str->c_str ());
}
void
@@ -416,13 +413,10 @@ HIRCompileBase::handle_must_use_attribute_on_fndecl (tree fndecl,
if (attr.has_attr_input ())
{
- rust_assert (attr.get_attr_input ().get_attr_input_type ()
- == AST::AttrInput::AttrInputType::LITERAL);
+ auto msg_str = Analysis::Attributes::extract_string_literal (attr);
+ rust_assert (msg_str.has_value ());
- auto &literal
- = static_cast<AST::AttrInputLiteral &> (attr.get_attr_input ());
- const auto &msg_str = literal.get_literal ().as_string ();
- tree message = build_string (msg_str.size (), msg_str.c_str ());
+ tree message = build_string (msg_str->size (), msg_str->c_str ());
value = tree_cons (nodiscard, message, NULL_TREE);
}
@@ -549,7 +543,7 @@ HIRCompileBase::mark_addressable (tree exp, location_t locus)
}
tree
-HIRCompileBase::address_expression (tree expr, location_t location)
+HIRCompileBase::address_expression (tree expr, location_t location, tree ptrty)
{
if (expr == error_mark_node)
return error_mark_node;
@@ -557,7 +551,41 @@ HIRCompileBase::address_expression (tree expr, location_t location)
if (!mark_addressable (expr, location))
return error_mark_node;
- return build_fold_addr_expr_loc (location, expr);
+ if (ptrty == NULL || ptrty == error_mark_node)
+ ptrty = build_pointer_type (TREE_TYPE (expr));
+
+ return build_fold_addr_expr_with_type_loc (location, expr, ptrty);
+}
+
+tree
+HIRCompileBase::compile_constant_expr (
+ Context *ctx, HirId coercion_id, TyTy::BaseType *resolved_type,
+ TyTy::BaseType *expected_type, const Resolver::CanonicalPath &canonical_path,
+ HIR::Expr &const_value_expr, location_t locus, location_t expr_locus)
+{
+ HIRCompileBase c (ctx);
+ return c.compile_constant_item (coercion_id, resolved_type, expected_type,
+ canonical_path, const_value_expr, locus,
+ expr_locus);
+}
+
+tree
+HIRCompileBase::query_compile_const_expr (Context *ctx, TyTy::BaseType *expr_ty,
+ HIR::Expr &const_value_expr)
+{
+ HIRCompileBase c (ctx);
+
+ ctx->push_const_context ();
+
+ HirId expr_id = const_value_expr.get_mappings ().get_hirid ();
+ location_t locus = const_value_expr.get_locus ();
+ tree capacity_expr = HIRCompileBase::compile_constant_expr (
+ ctx, expr_id, expr_ty, expr_ty, Resolver::CanonicalPath::create_empty (),
+ const_value_expr, locus, locus);
+
+ ctx->pop_const_context ();
+
+ return fold_expr (capacity_expr);
}
tree
@@ -651,7 +679,8 @@ get_abi (const AST::AttrVec &outer_attrs,
tree
HIRCompileBase::compile_function (
- const std::string &fn_name, HIR::SelfParam &self_param,
+ bool is_root_item, const std::string &fn_name,
+ tl::optional<HIR::SelfParam> &self_param,
std::vector<HIR::FunctionParam> &function_params,
const HIR::FunctionQualifiers &qualifiers, HIR::Visibility &visibility,
AST::AttrVec &outer_attrs, location_t locus, HIR::BlockExpr *function_body,
@@ -661,8 +690,12 @@ HIRCompileBase::compile_function (
std::string ir_symbol_name
= canonical_path.get () + fntype->subst_as_string ();
+ rust_debug_loc (locus, "--> Compiling [%s] - %s", ir_symbol_name.c_str (),
+ fntype->get_name ().c_str ());
+
// we don't mangle the main fn since we haven't implemented the main shim
- bool is_main_fn = fn_name.compare ("main") == 0;
+ bool is_main_fn = fn_name.compare ("main") == 0 && is_root_item
+ && canonical_path.size () <= 2;
if (is_main_fn)
{
rust_assert (!main_identifier_node);
@@ -671,14 +704,9 @@ HIRCompileBase::compile_function (
}
std::string asm_name = fn_name;
- auto &mappings = Analysis::Mappings::get ();
-
- if (flag_name_resolution_2_0)
- ir_symbol_name = mappings.get_current_crate_name () + "::" + ir_symbol_name;
-
unsigned int flags = 0;
tree fndecl = Backend::function (compiled_fn_type, ir_symbol_name,
- "" /* asm_name */, flags, locus);
+ tl::nullopt /* asm_name */, flags, locus);
setup_fndecl (fndecl, is_main_fn, fntype->has_substitutions_defined (),
visibility, qualifiers, outer_attrs);
@@ -698,24 +726,24 @@ HIRCompileBase::compile_function (
// setup the params
TyTy::BaseType *tyret = fntype->get_return_type ();
std::vector<Bvariable *> param_vars;
- if (!self_param.is_error ())
+ if (self_param)
{
rust_assert (fntype->is_method ());
TyTy::BaseType *self_tyty_lookup = fntype->get_self_type ();
tree self_type = TyTyResolveCompile::compile (ctx, self_tyty_lookup);
Bvariable *compiled_self_param
- = CompileSelfParam::compile (ctx, fndecl, self_param, self_type,
- self_param.get_locus ());
+ = CompileSelfParam::compile (ctx, fndecl, self_param.value (),
+ self_type, self_param->get_locus ());
param_vars.push_back (compiled_self_param);
- ctx->insert_var_decl (self_param.get_mappings ().get_hirid (),
+ ctx->insert_var_decl (self_param->get_mappings ().get_hirid (),
compiled_self_param);
}
// offset from + 1 for the TyTy::FnType being used when this is a method to
// skip over Self on the FnType
- bool is_method = !self_param.is_error ();
+ bool is_method = self_param.has_value ();
size_t i = is_method ? 1 : 0;
for (auto &referenced_param : function_params)
{
@@ -796,11 +824,12 @@ HIRCompileBase::compile_constant_item (
// machineary that we already have. This means the best approach is to
// make a _fake_ function with a block so it can hold onto temps then
// use our constexpr code to fold it completely or error_mark_node
- Backend::typed_identifier receiver;
+ Backend::typed_identifier receiver ("", NULL_TREE, UNKNOWN_LOCATION);
tree compiled_fn_type = Backend::function_type (
receiver, {}, {Backend::typed_identifier ("_", const_type, locus)}, NULL,
locus);
- tree fndecl = Backend::function (compiled_fn_type, ident, "", 0, locus);
+ tree fndecl
+ = Backend::function (compiled_fn_type, ident, tl::nullopt, 0, locus);
TREE_READONLY (fndecl) = 1;
tree enclosing_scope = NULL_TREE;
@@ -1011,7 +1040,7 @@ HIRCompileBase::resolve_method_address (TyTy::FnType *fntype,
tree
HIRCompileBase::unit_expression (location_t locus)
{
- tree unit_type = TyTyResolveCompile::get_unit_type ();
+ tree unit_type = TyTyResolveCompile::get_unit_type (ctx);
return Backend::constructor_expression (unit_type, false, {}, -1, locus);
}
diff --git a/gcc/rust/backend/rust-compile-base.h b/gcc/rust/backend/rust-compile-base.h
index 9328a7f..3bf26af 100644
--- a/gcc/rust/backend/rust-compile-base.h
+++ b/gcc/rust/backend/rust-compile-base.h
@@ -29,7 +29,17 @@ class HIRCompileBase
public:
virtual ~HIRCompileBase () {}
- static tree address_expression (tree expr, location_t locus);
+ static tree address_expression (tree expr, location_t locus,
+ tree ptrty = NULL_TREE);
+
+ static tree compile_constant_expr (
+ Context *ctx, HirId coercion_id, TyTy::BaseType *resolved_type,
+ TyTy::BaseType *expected_type,
+ const Resolver::CanonicalPath &canonical_path, HIR::Expr &const_value_expr,
+ location_t locus, location_t expr_locus);
+
+ static tree query_compile_const_expr (Context *ctx, TyTy::BaseType *expr_ty,
+ HIR::Expr &const_value_expr);
protected:
HIRCompileBase (Context *ctx) : ctx (ctx) {}
@@ -46,7 +56,7 @@ protected:
TyTy::BaseType *expected, location_t lvalue_locus,
location_t rvalue_locus);
- tree coerce_to_dyn_object (tree compiled_ref, const TyTy::BaseType *actual,
+ tree coerce_to_dyn_object (tree compiled_ref, TyTy::BaseType *actual,
const TyTy::DynamicObjectType *ty,
location_t locus);
@@ -96,7 +106,8 @@ protected:
HIR::Expr &const_value_expr, location_t locus,
location_t expr_locus);
- tree compile_function (const std::string &fn_name, HIR::SelfParam &self_param,
+ tree compile_function (bool is_root_item, const std::string &fn_name,
+ tl::optional<HIR::SelfParam> &self_param,
std::vector<HIR::FunctionParam> &function_params,
const HIR::FunctionQualifiers &qualifiers,
HIR::Visibility &visibility, AST::AttrVec &outer_attrs,
@@ -104,7 +115,7 @@ protected:
const Resolver::CanonicalPath &canonical_path,
TyTy::FnType *fntype);
- static tree unit_expression (location_t locus);
+ tree unit_expression (location_t locus);
void setup_fndecl (tree fndecl, bool is_main_entry_point, bool is_generic_fn,
HIR::Visibility &visibility,
diff --git a/gcc/rust/backend/rust-compile-block.cc b/gcc/rust/backend/rust-compile-block.cc
index f844a27..03c36d2 100644
--- a/gcc/rust/backend/rust-compile-block.cc
+++ b/gcc/rust/backend/rust-compile-block.cc
@@ -19,6 +19,7 @@
#include "rust-compile-block.h"
#include "rust-compile-stmt.h"
#include "rust-compile-expr.h"
+#include "rust-hir-expr.h"
namespace Rust {
namespace Compile {
diff --git a/gcc/rust/backend/rust-compile-block.h b/gcc/rust/backend/rust-compile-block.h
index 37e3980..f84bace 100644
--- a/gcc/rust/backend/rust-compile-block.h
+++ b/gcc/rust/backend/rust-compile-block.h
@@ -20,6 +20,7 @@
#define RUST_COMPILE_BLOCK
#include "rust-compile-base.h"
+#include "rust-hir-expr.h"
#include "rust-hir-visitor.h"
namespace Rust {
@@ -83,6 +84,8 @@ public:
void visit (HIR::MethodCallExpr &) override {}
void visit (HIR::FieldAccessExpr &) override {}
void visit (HIR::BlockExpr &) override {}
+ void visit (HIR::AnonConst &) override {}
+ void visit (HIR::ConstBlock &) override {}
void visit (HIR::ContinueExpr &) override {}
void visit (HIR::BreakExpr &) override {}
void visit (HIR::RangeFromToExpr &) override {}
@@ -100,6 +103,8 @@ public:
void visit (HIR::AwaitExpr &) override {}
void visit (HIR::AsyncBlockExpr &) override {}
void visit (HIR::InlineAsm &) override {}
+ void visit (HIR::LlvmInlineAsm &) override {}
+ void visit (HIR::OffsetOf &) override {}
private:
CompileConditionalBlocks (Context *ctx, Bvariable *result)
@@ -137,6 +142,12 @@ public:
translated = CompileBlock::compile (expr, ctx, result);
}
+ void visit (HIR::ConstBlock &expr) override
+ {
+ rust_unreachable ();
+ // translated = CompileExpr::compile (expr, ctx, result);
+ }
+
// Empty visit for unused Expression HIR nodes.
void visit (HIR::PathInExpression &) override {}
void visit (HIR::QualifiedPathInExpression &) override {}
@@ -182,6 +193,9 @@ public:
void visit (HIR::AwaitExpr &) override {}
void visit (HIR::AsyncBlockExpr &) override {}
void visit (HIR::InlineAsm &) override {}
+ void visit (HIR::LlvmInlineAsm &) override {}
+ void visit (HIR::OffsetOf &) override {}
+ void visit (HIR::AnonConst &) override {}
private:
CompileExprWithBlock (Context *ctx, Bvariable *result)
diff --git a/gcc/rust/backend/rust-compile-context.cc b/gcc/rust/backend/rust-compile-context.cc
index 86f0894..349d492 100644
--- a/gcc/rust/backend/rust-compile-context.cc
+++ b/gcc/rust/backend/rust-compile-context.cc
@@ -22,9 +22,18 @@
namespace Rust {
namespace Compile {
+Context *
+Context::get ()
+{
+ static Context *instance;
+ if (instance == nullptr)
+ instance = new Context ();
+
+ return instance;
+}
+
Context::Context ()
- : resolver (Resolver::Resolver::get ()),
- tyctx (Resolver::TypeCheckContext::get ()),
+ : tyctx (Resolver::TypeCheckContext::get ()),
mappings (Analysis::Mappings::get ()), mangler (Mangler ())
{
setup_builtins ();
@@ -70,7 +79,8 @@ Context::type_hasher (tree type)
hstate.add_object (TYPE_HASH (TYPE_OFFSET_BASETYPE (type)));
break;
- case ARRAY_TYPE: {
+ case ARRAY_TYPE:
+ {
if (TYPE_DOMAIN (type))
hstate.add_object (TYPE_HASH (TYPE_DOMAIN (type)));
if (!AGGREGATE_TYPE_P (TREE_TYPE (type)))
@@ -81,7 +91,8 @@ Context::type_hasher (tree type)
}
break;
- case INTEGER_TYPE: {
+ case INTEGER_TYPE:
+ {
tree t = TYPE_MAX_VALUE (type);
if (!t)
t = TYPE_MIN_VALUE (type);
@@ -91,7 +102,8 @@ Context::type_hasher (tree type)
}
case REAL_TYPE:
- case FIXED_POINT_TYPE: {
+ case FIXED_POINT_TYPE:
+ {
unsigned prec = TYPE_PRECISION (type);
hstate.add_object (prec);
break;
@@ -103,7 +115,8 @@ Context::type_hasher (tree type)
case RECORD_TYPE:
case UNION_TYPE:
- case QUAL_UNION_TYPE: {
+ case QUAL_UNION_TYPE:
+ {
for (tree t = TYPE_FIELDS (type); t; t = TREE_CHAIN (t))
{
hashval_t name_hash = IDENTIFIER_HASH_VALUE (DECL_NAME (t));
@@ -118,7 +131,8 @@ Context::type_hasher (tree type)
break;
case REFERENCE_TYPE:
- case POINTER_TYPE: {
+ case POINTER_TYPE:
+ {
hashval_t type_hash = type_hasher (TREE_TYPE (type));
hstate.add_object (type_hash);
}
diff --git a/gcc/rust/backend/rust-compile-context.h b/gcc/rust/backend/rust-compile-context.h
index a446388..e98bbc2 100644
--- a/gcc/rust/backend/rust-compile-context.h
+++ b/gcc/rust/backend/rust-compile-context.h
@@ -34,6 +34,10 @@ namespace Compile {
struct fncontext
{
+ fncontext (tree fndecl, ::Bvariable *ret_addr, TyTy::BaseType *retty)
+ : fndecl (fndecl), ret_addr (ret_addr), retty (retty)
+ {}
+
tree fndecl;
::Bvariable *ret_addr;
TyTy::BaseType *retty;
@@ -49,7 +53,7 @@ struct CustomDeriveInfo
class Context
{
public:
- Context ();
+ static Context *get ();
void setup_builtins ();
@@ -72,7 +76,10 @@ public:
return it->second;
compiled_type_map.insert ({h, type});
- push_type (type);
+
+ if (TYPE_NAME (type) != NULL)
+ push_type (type);
+
return type;
}
@@ -87,7 +94,6 @@ public:
return type;
}
- Resolver::Resolver *get_resolver () { return resolver; }
Resolver::TypeCheckContext *get_tyctx () { return tyctx; }
Analysis::Mappings &get_mappings () { return mappings; }
@@ -152,7 +158,7 @@ public:
if (it == mono_fns.end ())
mono_fns[dId] = {};
- mono_fns[dId].push_back ({ref, fn});
+ mono_fns[dId].emplace_back (ref, fn);
}
void insert_closure_decl (const TyTy::ClosureType *ref, tree fn)
@@ -162,7 +168,7 @@ public:
if (it == mono_closure_fns.end ())
mono_closure_fns[dId] = {};
- mono_closure_fns[dId].push_back ({ref, fn});
+ mono_closure_fns[dId].emplace_back (ref, fn);
}
tree lookup_closure_decl (const TyTy::ClosureType *ref)
@@ -277,7 +283,7 @@ public:
void push_fn (tree fn, ::Bvariable *ret_addr, TyTy::BaseType *retty)
{
- fn_stack.push_back (fncontext{fn, ret_addr, retty});
+ fn_stack.emplace_back (fn, ret_addr, retty);
}
void pop_fn () { fn_stack.pop_back (); }
@@ -316,7 +322,13 @@ public:
void push_loop_context (Bvariable *var) { loop_value_stack.push_back (var); }
- Bvariable *peek_loop_context () { return loop_value_stack.back (); }
+ bool have_loop_context () const { return !loop_value_stack.empty (); }
+
+ Bvariable *peek_loop_context ()
+ {
+ rust_assert (!loop_value_stack.empty ());
+ return loop_value_stack.back ();
+ }
Bvariable *pop_loop_context ()
{
@@ -330,7 +342,11 @@ public:
loop_begin_labels.push_back (label);
}
- tree peek_loop_begin_label () { return loop_begin_labels.back (); }
+ tree peek_loop_begin_label ()
+ {
+ rust_assert (!loop_begin_labels.empty ());
+ return loop_begin_labels.back ();
+ }
tree pop_loop_begin_label ()
{
@@ -388,7 +404,8 @@ public:
}
private:
- Resolver::Resolver *resolver;
+ Context ();
+
Resolver::TypeCheckContext *tyctx;
Analysis::Mappings &mappings;
Mangler mangler;
diff --git a/gcc/rust/backend/rust-compile-expr.cc b/gcc/rust/backend/rust-compile-expr.cc
index 21f4ca1..9a9c315 100644
--- a/gcc/rust/backend/rust-compile-expr.cc
+++ b/gcc/rust/backend/rust-compile-expr.cc
@@ -17,6 +17,8 @@
// <http://www.gnu.org/licenses/>.
#include "rust-compile-expr.h"
+#include "rust-backend.h"
+#include "rust-compile-type.h"
#include "rust-compile-struct-field-expr.h"
#include "rust-compile-pattern.h"
#include "rust-compile-resolve-path.h"
@@ -30,8 +32,11 @@
#include "realmpfr.h"
#include "convert.h"
#include "print-tree.h"
+#include "rust-hir-expr.h"
#include "rust-system.h"
+#include "rust-tree.h"
#include "rust-tyty.h"
+#include "tree-core.h"
namespace Rust {
namespace Compile {
@@ -170,7 +175,7 @@ CompileExpr::visit (HIR::ArithmeticOrLogicalExpr &expr)
}
auto receiver_tmp = NULL_TREE;
- auto receiver
+ Bvariable *receiver
= Backend::temporary_variable (ctx->peek_fn ().fndecl, NULL_TREE,
TREE_TYPE (lhs), lhs, true,
expr.get_locus (), &receiver_tmp);
@@ -209,7 +214,7 @@ CompileExpr::visit (HIR::CompoundAssignmentExpr &expr)
if (ctx->in_fn () && !ctx->const_context_p ())
{
auto tmp = NULL_TREE;
- auto receiver
+ Bvariable *receiver
= Backend::temporary_variable (ctx->peek_fn ().fndecl, NULL_TREE,
TREE_TYPE (lhs), lhs, true,
expr.get_locus (), &tmp);
@@ -368,6 +373,38 @@ CompileExpr::visit (HIR::InlineAsm &expr)
}
void
+CompileExpr::visit (HIR::LlvmInlineAsm &expr)
+{
+ CompileLlvmAsm asm_codegen (ctx);
+ ctx->add_statement (asm_codegen.tree_codegen_asm (expr));
+}
+
+void
+CompileExpr::visit (HIR::OffsetOf &expr)
+{
+ TyTy::BaseType *type = nullptr;
+ if (!ctx->get_tyctx ()->lookup_type (
+ expr.get_type ().get_mappings ().get_hirid (), &type))
+ {
+ translated = error_mark_node;
+ return;
+ }
+
+ auto compiled_ty = TyTyResolveCompile::compile (ctx, type);
+
+ rust_assert (TREE_CODE (compiled_ty) == RECORD_TYPE);
+
+ // Create an identifier node for the field
+ auto field_id = Backend::get_identifier_node (expr.get_field ().as_string ());
+
+ // And now look it up and get its value for `byte_position`
+ auto field = Backend::lookup_field (compiled_ty, field_id);
+ auto field_value = TREE_VALUE (field);
+
+ translated = byte_position (field_value);
+}
+
+void
CompileExpr::visit (HIR::IfExprConseqElse &expr)
{
TyTy::BaseType *if_type = nullptr;
@@ -434,6 +471,18 @@ CompileExpr::visit (HIR::BlockExpr &expr)
}
void
+CompileExpr::visit (HIR::AnonConst &expr)
+{
+ expr.get_inner_expr ().accept_vis (*this);
+}
+
+void
+CompileExpr::visit (HIR::ConstBlock &expr)
+{
+ expr.get_const_expr ().accept_vis (*this);
+}
+
+void
CompileExpr::visit (HIR::UnsafeBlockExpr &expr)
{
expr.get_block_expr ().accept_vis (*this);
@@ -464,6 +513,8 @@ CompileExpr::visit (HIR::StructExprStructFields &struct_expr)
rust_error_at (struct_expr.get_locus (), "unknown type");
return;
}
+ if (!tyty->is<TyTy::ADTType> ())
+ return;
// it must be an ADT
rust_assert (tyty->get_kind () == TyTy::TypeKind::ADT);
@@ -662,6 +713,15 @@ void
CompileExpr::visit (HIR::LoopExpr &expr)
{
TyTy::BaseType *block_tyty = nullptr;
+ fncontext fnctx = ctx->peek_fn ();
+ if (ctx->const_context_p () && !DECL_DECLARED_CONSTEXPR_P (fnctx.fndecl))
+ {
+ rich_location r (line_table, expr.get_locus ());
+ rust_error_at (r, ErrorCode::E0658,
+ "%<loop%> is not allowed in const context");
+ return;
+ }
+
if (!ctx->get_tyctx ()->lookup_type (expr.get_mappings ().get_hirid (),
&block_tyty))
{
@@ -669,7 +729,6 @@ CompileExpr::visit (HIR::LoopExpr &expr)
return;
}
- fncontext fnctx = ctx->peek_fn ();
tree enclosing_scope = ctx->peek_enclosing_scope ();
tree block_type = TyTyResolveCompile::compile (ctx, block_tyty);
@@ -694,7 +753,8 @@ CompileExpr::visit (HIR::LoopExpr &expr)
loop_label.get_lifetime ().get_mappings ().get_hirid (), label);
}
- tree loop_begin_label = Backend::label (fnctx.fndecl, "", expr.get_locus ());
+ tree loop_begin_label
+ = Backend::label (fnctx.fndecl, tl::nullopt, expr.get_locus ());
tree loop_begin_label_decl
= Backend::label_definition_statement (loop_begin_label);
ctx->add_statement (loop_begin_label_decl);
@@ -736,7 +796,8 @@ CompileExpr::visit (HIR::WhileLoopExpr &expr)
start_location, end_location);
ctx->push_block (loop_block);
- tree loop_begin_label = Backend::label (fnctx.fndecl, "", expr.get_locus ());
+ tree loop_begin_label
+ = Backend::label (fnctx.fndecl, tl::nullopt, expr.get_locus ());
tree loop_begin_label_decl
= Backend::label_definition_statement (loop_begin_label);
ctx->add_statement (loop_begin_label_decl);
@@ -767,6 +828,10 @@ CompileExpr::visit (HIR::BreakExpr &expr)
{
tree compiled_expr = CompileExpr::Compile (expr.get_expr (), ctx);
+ translated = error_mark_node;
+ if (!ctx->have_loop_context ())
+ return;
+
Bvariable *loop_result_holder = ctx->peek_loop_context ();
tree result_reference
= Backend::var_expression (loop_result_holder,
@@ -780,25 +845,16 @@ CompileExpr::visit (HIR::BreakExpr &expr)
if (expr.has_label ())
{
- NodeId resolved_node_id = UNKNOWN_NODEID;
- if (flag_name_resolution_2_0)
- {
- auto &nr_ctx
- = Resolver2_0::ImmutableNameResolutionContext::get ().resolver ();
+ auto &nr_ctx
+ = Resolver2_0::ImmutableNameResolutionContext::get ().resolver ();
- if (auto id
- = nr_ctx.lookup (expr.get_label ().get_mappings ().get_nodeid ()))
- resolved_node_id = *id;
- }
- else
+ NodeId resolved_node_id;
+ if (auto id
+ = nr_ctx.lookup (expr.get_label ().get_mappings ().get_nodeid ()))
{
- NodeId tmp = UNKNOWN_NODEID;
- if (ctx->get_resolver ()->lookup_resolved_label (
- expr.get_label ().get_mappings ().get_nodeid (), &tmp))
- resolved_node_id = tmp;
+ resolved_node_id = *id;
}
-
- if (resolved_node_id == UNKNOWN_NODEID)
+ else
{
rust_error_at (
expr.get_label ().get_locus (),
@@ -839,29 +895,23 @@ CompileExpr::visit (HIR::BreakExpr &expr)
void
CompileExpr::visit (HIR::ContinueExpr &expr)
{
+ translated = error_mark_node;
+ if (!ctx->have_loop_context ())
+ return;
+
tree label = ctx->peek_loop_begin_label ();
if (expr.has_label ())
{
- NodeId resolved_node_id = UNKNOWN_NODEID;
- if (flag_name_resolution_2_0)
- {
- auto &nr_ctx
- = Resolver2_0::ImmutableNameResolutionContext::get ().resolver ();
+ auto &nr_ctx
+ = Resolver2_0::ImmutableNameResolutionContext::get ().resolver ();
- if (auto id
- = nr_ctx.lookup (expr.get_label ().get_mappings ().get_nodeid ()))
- resolved_node_id = *id;
- }
- else
+ NodeId resolved_node_id;
+ if (auto id
+ = nr_ctx.lookup (expr.get_label ().get_mappings ().get_nodeid ()))
{
- NodeId tmp = UNKNOWN_NODEID;
-
- if (ctx->get_resolver ()->lookup_resolved_label (
- expr.get_label ().get_mappings ().get_nodeid (), &tmp))
- resolved_node_id = tmp;
+ resolved_node_id = *id;
}
-
- if (resolved_node_id == UNKNOWN_NODEID)
+ else
{
rust_error_at (
expr.get_label ().get_locus (),
@@ -905,7 +955,8 @@ CompileExpr::visit (HIR::BorrowExpr &expr)
&tyty))
return;
- translated = address_expression (main_expr, expr.get_locus ());
+ tree expected_type = TyTyResolveCompile::compile (ctx, tyty);
+ translated = address_expression (main_expr, expr.get_locus (), expected_type);
}
void
@@ -1122,9 +1173,8 @@ CompileExpr::visit (HIR::MatchExpr &expr)
// setup the end label so the cases can exit properly
tree fndecl = fnctx.fndecl;
location_t end_label_locus = expr.get_locus (); // FIXME
- tree end_label
- = Backend::label (fndecl, "" /* empty creates an artificial label */,
- end_label_locus);
+ // tl::nullopt creates an artificial label
+ tree end_label = Backend::label (fndecl, tl::nullopt, end_label_locus);
tree end_label_decl_statement
= Backend::label_definition_statement (end_label);
@@ -1317,6 +1367,28 @@ CompileExpr::visit (HIR::CallExpr &expr)
};
auto fn_address = CompileExpr::Compile (expr.get_fnexpr (), ctx);
+ if (ctx->const_context_p ())
+ {
+ if (!FUNCTION_POINTER_TYPE_P (TREE_TYPE (fn_address)))
+ {
+ rust_error_at (expr.get_locus (),
+ "calls in constants are limited to constant "
+ "functions, tuple structs and tuple variants");
+ return;
+ }
+
+ if (TREE_CODE (fn_address) == ADDR_EXPR)
+ {
+ tree fndecl = TREE_OPERAND (fn_address, 0);
+ if (!DECL_DECLARED_CONSTEXPR_P (fndecl))
+ {
+ rust_error_at (expr.get_locus (),
+ "calls in constants are limited to constant "
+ "functions, tuple structs and tuple variants");
+ return;
+ }
+ }
+ }
// is this a closure call?
bool possible_trait_call
@@ -1583,37 +1655,39 @@ CompileExpr::compile_integer_literal (const HIR::LiteralExpr &expr,
const TyTy::BaseType *tyty)
{
rust_assert (expr.get_lit_type () == HIR::Literal::INT);
- const auto literal_value = expr.get_literal ();
-
+ const auto &literal_value = expr.get_literal ();
tree type = TyTyResolveCompile::compile (ctx, tyty);
+ std::string s = literal_value.as_string ();
+ s.erase (std::remove (s.begin (), s.end (), '_'), s.end ());
+
+ int base = 0;
mpz_t ival;
- if (mpz_init_set_str (ival, literal_value.as_string ().c_str (), 10) != 0)
+ if (mpz_init_set_str (ival, s.c_str (), base) != 0)
{
- rust_error_at (expr.get_locus (), "bad number in literal");
+ rust_error_at (expr.get_locus (), "failed to load number literal");
return error_mark_node;
}
+ if (expr.is_negative ())
+ mpz_neg (ival, ival);
- mpz_t type_min;
- mpz_t type_max;
+ mpz_t type_min, type_max;
mpz_init (type_min);
mpz_init (type_max);
get_type_static_bounds (type, type_min, type_max);
- if (expr.is_negative ())
- {
- mpz_neg (ival, ival);
- }
if (mpz_cmp (ival, type_min) < 0 || mpz_cmp (ival, type_max) > 0)
{
rust_error_at (expr.get_locus (),
"integer overflows the respective type %qs",
tyty->get_name ().c_str ());
+ mpz_clear (type_min);
+ mpz_clear (type_max);
+ mpz_clear (ival);
return error_mark_node;
}
tree result = wide_int_to_tree (type, wi::from_mpz (type, ival, true));
-
mpz_clear (type_min);
mpz_clear (type_max);
mpz_clear (ival);
@@ -1638,6 +1712,8 @@ CompileExpr::compile_float_literal (const HIR::LiteralExpr &expr,
rust_error_at (expr.get_locus (), "bad number in literal");
return error_mark_node;
}
+ if (expr.is_negative ())
+ mpfr_neg (fval, fval, MPFR_RNDN);
// taken from:
// see go/gofrontend/expressions.cc:check_float_type
@@ -1875,7 +1951,8 @@ CompileExpr::visit (HIR::ArrayExpr &expr)
HIR::ArrayElems &elements = expr.get_internal_elements ();
switch (elements.get_array_expr_type ())
{
- case HIR::ArrayElems::ArrayExprType::VALUES: {
+ case HIR::ArrayElems::ArrayExprType::VALUES:
+ {
HIR::ArrayElemsValues &elems
= static_cast<HIR::ArrayElemsValues &> (elements);
translated
@@ -1902,6 +1979,14 @@ CompileExpr::array_value_expr (location_t expr_locus,
for (auto &elem : elems.get_values ())
{
tree translated_expr = CompileExpr::Compile (*elem, ctx);
+ if (translated_expr == error_mark_node)
+ {
+ rich_location r (line_table, expr_locus);
+ r.add_fixit_replace (elem->get_locus (), "not a value");
+ rust_error_at (r, ErrorCode::E0423, "expected value");
+ return error_mark_node;
+ }
+
constructor.push_back (translated_expr);
indexes.push_back (i++);
}
@@ -1927,13 +2012,25 @@ CompileExpr::array_copied_expr (location_t expr_locus,
return error_mark_node;
}
- ctx->push_const_context ();
- tree capacity_expr = CompileExpr::Compile (elems.get_num_copies_expr (), ctx);
- ctx->pop_const_context ();
+ auto capacity_ty = array_tyty.get_capacity ();
- if (!TREE_CONSTANT (capacity_expr))
+ // Check if capacity is a const type
+ if (capacity_ty->get_kind () != TyTy::TypeKind::CONST)
{
- rust_error_at (expr_locus, "non const num copies %qT", array_type);
+ rust_error_at (array_tyty.get_locus (),
+ "array capacity is not a const type");
+ return error_mark_node;
+ }
+
+ auto *capacity_const = capacity_ty->as_const_type ();
+
+ rust_assert (capacity_const->const_kind ()
+ == TyTy::BaseConstType::ConstKind::Value);
+ auto &capacity_value = *static_cast<TyTy::ConstValueType *> (capacity_const);
+ auto cap_tree = capacity_value.get_value ();
+ if (error_operand_p (cap_tree) || !TREE_CONSTANT (cap_tree))
+ {
+ rust_error_at (expr_locus, "non const num copies %qT", cap_tree);
return error_mark_node;
}
@@ -1956,8 +2053,12 @@ CompileExpr::array_copied_expr (location_t expr_locus,
if (ctx->const_context_p ())
{
size_t idx = 0;
+
std::vector<unsigned long> indexes;
std::vector<tree> constructor;
+
+ indexes.reserve (len);
+ constructor.reserve (len);
for (unsigned HOST_WIDE_INT i = 0; i < len; i++)
{
constructor.push_back (translated_expr);
@@ -1982,9 +2083,9 @@ CompileExpr::array_copied_expr (location_t expr_locus,
ctx->push_block (init_block);
tree tmp;
- tree stmts = Backend::array_initializer (fndecl, init_block, array_type,
- capacity_expr, translated_expr,
- &tmp, expr_locus);
+ tree stmts
+ = Backend::array_initializer (fndecl, init_block, array_type, cap_tree,
+ translated_expr, &tmp, expr_locus);
ctx->add_statement (stmts);
tree block = ctx->pop_block ();
@@ -2003,13 +2104,17 @@ HIRCompileBase::resolve_adjustements (
tree e = expression;
for (auto &adjustment : adjustments)
{
+ if (e == error_mark_node)
+ return error_mark_node;
+
switch (adjustment.get_type ())
{
case Resolver::Adjustment::AdjustmentType::ERROR:
return error_mark_node;
case Resolver::Adjustment::AdjustmentType::IMM_REF:
- case Resolver::Adjustment::AdjustmentType::MUT_REF: {
+ case Resolver::Adjustment::AdjustmentType::MUT_REF:
+ {
if (!RS_DST_FLAG (TREE_TYPE (e)))
{
e = address_expression (e, locus);
@@ -2128,11 +2233,10 @@ HIRCompileBase::resolve_unsized_dyn_adjustment (
tree rvalue = expression;
location_t rvalue_locus = locus;
- const TyTy::BaseType *actual = adjustment.get_actual ();
- const TyTy::BaseType *expected = adjustment.get_expected ();
+ auto actual = adjustment.get_actual ();
+ auto expected = adjustment.get_expected ();
- const TyTy::DynamicObjectType *dyn
- = static_cast<const TyTy::DynamicObjectType *> (expected);
+ const auto dyn = static_cast<const TyTy::DynamicObjectType *> (expected);
rust_debug ("resolve_unsized_dyn_adjustment actual={%s} dyn={%s}",
actual->debug_str ().c_str (), dyn->debug_str ().c_str ());
@@ -2451,23 +2555,12 @@ CompileExpr::generate_closure_function (HIR::ClosureExpr &expr,
if (is_block_expr)
{
auto body_mappings = function_body.get_mappings ();
- if (flag_name_resolution_2_0)
- {
- auto &nr_ctx
- = Resolver2_0::ImmutableNameResolutionContext::get ().resolver ();
+ auto &nr_ctx
+ = Resolver2_0::ImmutableNameResolutionContext::get ().resolver ();
- auto candidate = nr_ctx.values.to_rib (body_mappings.get_nodeid ());
+ auto candidate = nr_ctx.values.to_rib (body_mappings.get_nodeid ());
- rust_assert (candidate.has_value ());
- }
- else
- {
- Resolver::Rib *rib = nullptr;
- bool ok
- = ctx->get_resolver ()->find_name_rib (body_mappings.get_nodeid (),
- &rib);
- rust_assert (ok);
- }
+ rust_assert (candidate.has_value ());
}
tree enclosing_scope = NULL_TREE;
@@ -2547,15 +2640,15 @@ CompileExpr::generate_closure_fntype (HIR::ClosureExpr &expr,
TyTy::TypeBoundPredicateItem item = TyTy::TypeBoundPredicateItem::error ();
if (predicate.get_name ().compare ("FnOnce") == 0)
{
- item = predicate.lookup_associated_item ("call_once");
+ item = predicate.lookup_associated_item ("call_once").value ();
}
else if (predicate.get_name ().compare ("FnMut") == 0)
{
- item = predicate.lookup_associated_item ("call_mut");
+ item = predicate.lookup_associated_item ("call_mut").value ();
}
else if (predicate.get_name ().compare ("Fn") == 0)
{
- item = predicate.lookup_associated_item ("call");
+ item = predicate.lookup_associated_item ("call").value ();
}
else
{
diff --git a/gcc/rust/backend/rust-compile-expr.h b/gcc/rust/backend/rust-compile-expr.h
index dc78dee..b8b4e8d 100644
--- a/gcc/rust/backend/rust-compile-expr.h
+++ b/gcc/rust/backend/rust-compile-expr.h
@@ -48,6 +48,8 @@ public:
void visit (HIR::IfExpr &expr) override;
void visit (HIR::IfExprConseqElse &expr) override;
void visit (HIR::BlockExpr &expr) override;
+ void visit (HIR::AnonConst &expr) override;
+ void visit (HIR::ConstBlock &expr) override;
void visit (HIR::UnsafeBlockExpr &expr) override;
void visit (HIR::StructExprStruct &struct_expr) override;
void visit (HIR::StructExprStructFields &struct_expr) override;
@@ -69,6 +71,8 @@ public:
void visit (HIR::RangeFromToInclExpr &expr) override;
void visit (HIR::ClosureExpr &expr) override;
void visit (HIR::InlineAsm &expr) override;
+ void visit (HIR::LlvmInlineAsm &expr) override;
+ void visit (HIR::OffsetOf &expr) override;
// TODO
void visit (HIR::ErrorPropagationExpr &) override {}
diff --git a/gcc/rust/backend/rust-compile-extern.h b/gcc/rust/backend/rust-compile-extern.h
index bacd1c0..a78f9ee 100644
--- a/gcc/rust/backend/rust-compile-extern.h
+++ b/gcc/rust/backend/rust-compile-extern.h
@@ -24,6 +24,8 @@
#include "rust-compile-type.h"
#include "rust-diagnostics.h"
#include "rust-hir-full-decls.h"
+#include "rust-attributes.h"
+#include "rust-attribute-values.h"
namespace Rust {
namespace Compile {
@@ -34,16 +36,10 @@ class CompileExternItem : public HIRCompileBase,
public:
static tree compile (HIR::ExternalItem *item, Context *ctx,
TyTy::BaseType *concrete = nullptr,
- bool is_query_mode = false,
location_t ref_locus = UNDEF_LOCATION)
{
CompileExternItem compiler (ctx, concrete, ref_locus);
item->accept_vis (compiler);
-
- if (is_query_mode && compiler.reference == error_mark_node)
- rust_internal_error_at (ref_locus, "failed to compile extern item: %s",
- item->as_string ().c_str ());
-
return compiler.reference;
}
@@ -63,8 +59,7 @@ public:
rust_assert (ok);
std::string name = item.get_item_name ().as_string ();
- // FIXME this is assuming C ABI
- std::string asm_name = name;
+ GGC::Ident asm_name = get_link_name (item);
tree type = TyTyResolveCompile::compile (ctx, resolved_type);
bool is_external = true;
@@ -130,16 +125,7 @@ public:
tree compiled_fn_type = TyTyResolveCompile::compile (ctx, fntype);
std::string ir_symbol_name = function.get_item_name ().as_string ();
- std::string asm_name = function.get_item_name ().as_string ();
- if (fntype->get_abi () == ABI::RUST)
- {
- // then we need to get the canonical path of it and mangle it
- auto canonical_path = ctx->get_mappings ().lookup_canonical_path (
- function.get_mappings ().get_nodeid ());
-
- ir_symbol_name = canonical_path->get () + fntype->subst_as_string ();
- asm_name = ctx->mangle_item (fntype, *canonical_path);
- }
+ GGC::Ident asm_name = get_link_name (function);
const unsigned int flags = Backend::function_is_declaration;
tree fndecl = Backend::function (compiled_fn_type, ir_symbol_name, asm_name,
@@ -164,6 +150,36 @@ private:
ref_locus (ref_locus)
{}
+ template <typename T> static GGC::Ident get_link_name (T &obj)
+ {
+ AST::Attribute *use_attr = nullptr;
+
+ for (auto &attr : obj.get_outer_attrs ())
+ {
+ if (attr.get_path ().as_string () == Values::Attributes::LINK_NAME)
+ {
+ // later attributes override earlier ones
+ // TODO: add warning -- should duplicate
+ // attributes be folded elsewhere?
+ use_attr = &attr;
+ }
+ }
+
+ if (use_attr)
+ {
+ auto link_name
+ = Analysis::Attributes::extract_string_literal (*use_attr);
+
+ if (!link_name.has_value ())
+ rust_error_at (use_attr->get_locus (),
+ "malformed %<link_name%> attribute input");
+ else
+ return *link_name;
+ }
+
+ return obj.get_item_name ();
+ }
+
TyTy::BaseType *concrete;
tree reference;
location_t ref_locus;
diff --git a/gcc/rust/backend/rust-compile-implitem.cc b/gcc/rust/backend/rust-compile-implitem.cc
index 71b3e8d..63df2f5 100644
--- a/gcc/rust/backend/rust-compile-implitem.cc
+++ b/gcc/rust/backend/rust-compile-implitem.cc
@@ -27,22 +27,11 @@ CompileTraitItem::visit (HIR::TraitItemConst &constant)
rust_assert (concrete != nullptr);
TyTy::BaseType *resolved_type = concrete;
- tl::optional<Resolver::CanonicalPath> canonical_path;
- if (flag_name_resolution_2_0)
- {
- auto &nr_ctx
- = Resolver2_0::ImmutableNameResolutionContext::get ().resolver ();
-
- canonical_path = nr_ctx.values.to_canonical_path (
- constant.get_mappings ().get_nodeid ());
- }
- else
- {
- canonical_path = ctx->get_mappings ().lookup_canonical_path (
- constant.get_mappings ().get_nodeid ());
- }
+ auto &nr_ctx
+ = Resolver2_0::ImmutableNameResolutionContext::get ().resolver ();
- rust_assert (canonical_path);
+ Resolver::CanonicalPath canonical_path
+ = nr_ctx.to_canonical_path (constant.get_mappings ().get_nodeid ());
HIR::Expr &const_value_expr = constant.get_expr ();
TyTy::BaseType *expr_type = nullptr;
@@ -52,7 +41,7 @@ CompileTraitItem::visit (HIR::TraitItemConst &constant)
tree const_expr
= compile_constant_item (constant.get_mappings ().get_hirid (), expr_type,
- resolved_type, *canonical_path, const_value_expr,
+ resolved_type, canonical_path, const_value_expr,
constant.get_locus (),
const_value_expr.get_locus ());
ctx->push_const (const_expr);
@@ -96,32 +85,21 @@ CompileTraitItem::visit (HIR::TraitItemFunc &func)
fntype->override_context ();
}
- tl::optional<Resolver::CanonicalPath> canonical_path;
- if (flag_name_resolution_2_0)
- {
- auto &nr_ctx
- = Resolver2_0::ImmutableNameResolutionContext::get ().resolver ();
-
- canonical_path
- = nr_ctx.values.to_canonical_path (func.get_mappings ().get_nodeid ());
- }
- else
- {
- canonical_path = ctx->get_mappings ().lookup_canonical_path (
- func.get_mappings ().get_nodeid ());
- }
+ auto &nr_ctx
+ = Resolver2_0::ImmutableNameResolutionContext::get ().resolver ();
- rust_assert (canonical_path);
+ Resolver::CanonicalPath canonical_path
+ = nr_ctx.to_canonical_path (func.get_mappings ().get_nodeid ());
// FIXME: How do we get the proper visibility here?
auto vis = HIR::Visibility (HIR::Visibility::VisType::PUBLIC);
HIR::TraitFunctionDecl &function = func.get_decl ();
tree fndecl
- = compile_function (function.get_function_name ().as_string (),
+ = compile_function (false, function.get_function_name ().as_string (),
function.get_self (), function.get_function_params (),
function.get_qualifiers (), vis,
func.get_outer_attrs (), func.get_locus (),
- &func.get_block_expr (), *canonical_path, fntype);
+ &func.get_block_expr (), canonical_path, fntype);
reference = address_expression (fndecl, ref_locus);
}
diff --git a/gcc/rust/backend/rust-compile-implitem.h b/gcc/rust/backend/rust-compile-implitem.h
index d2ef989..2d18dbf 100644
--- a/gcc/rust/backend/rust-compile-implitem.h
+++ b/gcc/rust/backend/rust-compile-implitem.h
@@ -30,16 +30,10 @@ class CompileInherentImplItem : public CompileItem
public:
static tree Compile (HIR::ImplItem *item, Context *ctx,
TyTy::BaseType *concrete = nullptr,
- bool is_query_mode = false,
location_t ref_locus = UNDEF_LOCATION)
{
CompileInherentImplItem compiler (ctx, concrete, ref_locus);
item->accept_vis (compiler);
-
- if (is_query_mode && compiler.reference == error_mark_node)
- rust_internal_error_at (ref_locus, "failed to compile impl item: %s",
- item->as_string ().c_str ());
-
return compiler.reference;
}
diff --git a/gcc/rust/backend/rust-compile-intrinsic.cc b/gcc/rust/backend/rust-compile-intrinsic.cc
index fb0c661..450a869 100644
--- a/gcc/rust/backend/rust-compile-intrinsic.cc
+++ b/gcc/rust/backend/rust-compile-intrinsic.cc
@@ -32,8 +32,7 @@
// declaration taken from "stringpool.h"
// the get_identifier macro causes compilation issues
-extern tree
-get_identifier (const char *);
+extern tree get_identifier (const char *);
namespace Rust {
namespace Compile {
@@ -70,28 +69,19 @@ check_for_basic_integer_type (const std::string &intrinsic_str,
return is_basic_integer;
}
-static tree
-offset_handler (Context *ctx, TyTy::FnType *fntype);
-static tree
-sizeof_handler (Context *ctx, TyTy::FnType *fntype);
-static tree
-transmute_handler (Context *ctx, TyTy::FnType *fntype);
-static tree
-rotate_handler (Context *ctx, TyTy::FnType *fntype, tree_code op);
-static tree
-wrapping_op_handler_inner (Context *ctx, TyTy::FnType *fntype, tree_code op);
-static tree
-op_with_overflow_inner (Context *ctx, TyTy::FnType *fntype, tree_code op);
-static tree
-uninit_handler (Context *ctx, TyTy::FnType *fntype);
-static tree
-move_val_init_handler (Context *ctx, TyTy::FnType *fntype);
-static tree
-assume_handler (Context *ctx, TyTy::FnType *fntype);
-static tree
-discriminant_value_handler (Context *ctx, TyTy::FnType *fntype);
-static tree
-variant_count_handler (Context *ctx, TyTy::FnType *fntype);
+static tree offset_handler (Context *ctx, TyTy::FnType *fntype);
+static tree sizeof_handler (Context *ctx, TyTy::FnType *fntype);
+static tree transmute_handler (Context *ctx, TyTy::FnType *fntype);
+static tree rotate_handler (Context *ctx, TyTy::FnType *fntype, tree_code op);
+static tree wrapping_op_handler_inner (Context *ctx, TyTy::FnType *fntype,
+ tree_code op);
+static tree op_with_overflow_inner (Context *ctx, TyTy::FnType *fntype,
+ tree_code op);
+static tree uninit_handler (Context *ctx, TyTy::FnType *fntype);
+static tree move_val_init_handler (Context *ctx, TyTy::FnType *fntype);
+static tree assume_handler (Context *ctx, TyTy::FnType *fntype);
+static tree discriminant_value_handler (Context *ctx, TyTy::FnType *fntype);
+static tree variant_count_handler (Context *ctx, TyTy::FnType *fntype);
enum class Prefetch
{
@@ -99,8 +89,8 @@ enum class Prefetch
Write
};
-static tree
-prefetch_data_handler (Context *ctx, TyTy::FnType *fntype, Prefetch kind);
+static tree prefetch_data_handler (Context *ctx, TyTy::FnType *fntype,
+ Prefetch kind);
static inline tree
rotate_left_handler (Context *ctx, TyTy::FnType *fntype)
@@ -140,10 +130,10 @@ prefetch_write_data (Context *ctx, TyTy::FnType *fntype)
return prefetch_data_handler (ctx, fntype, Prefetch::Write);
}
-static tree
-atomic_store_handler_inner (Context *ctx, TyTy::FnType *fntype, int ordering);
-static tree
-atomic_load_handler_inner (Context *ctx, TyTy::FnType *fntype, int ordering);
+static tree atomic_store_handler_inner (Context *ctx, TyTy::FnType *fntype,
+ int ordering);
+static tree atomic_load_handler_inner (Context *ctx, TyTy::FnType *fntype,
+ int ordering);
static inline std::function<tree (Context *, TyTy::FnType *)>
atomic_store_handler (int ordering)
@@ -161,8 +151,8 @@ atomic_load_handler (int ordering)
};
}
-static inline tree
-unchecked_op_inner (Context *ctx, TyTy::FnType *fntype, tree_code op);
+static inline tree unchecked_op_inner (Context *ctx, TyTy::FnType *fntype,
+ tree_code op);
const static std::function<tree (Context *, TyTy::FnType *)>
unchecked_op_handler (tree_code op)
@@ -172,8 +162,8 @@ unchecked_op_handler (tree_code op)
};
}
-static inline tree
-copy_handler_inner (Context *ctx, TyTy::FnType *fntype, bool overlaps);
+static inline tree copy_handler_inner (Context *ctx, TyTy::FnType *fntype,
+ bool overlaps);
const static std::function<tree (Context *, TyTy::FnType *)>
copy_handler (bool overlaps)
@@ -183,8 +173,8 @@ copy_handler (bool overlaps)
};
}
-static inline tree
-expect_handler_inner (Context *ctx, TyTy::FnType *fntype, bool likely);
+static inline tree expect_handler_inner (Context *ctx, TyTy::FnType *fntype,
+ bool likely);
const static std::function<tree (Context *, TyTy::FnType *)>
expect_handler (bool likely)
@@ -194,8 +184,8 @@ expect_handler (bool likely)
};
}
-static tree
-try_handler_inner (Context *ctx, TyTy::FnType *fntype, bool is_new_api);
+static tree try_handler_inner (Context *ctx, TyTy::FnType *fntype,
+ bool is_new_api);
const static std::function<tree (Context *, TyTy::FnType *)>
try_handler (bool is_new_api)
@@ -447,8 +437,8 @@ sizeof_handler (Context *ctx, TyTy::FnType *fntype)
// get the template parameter type tree fn size_of<T>();
rust_assert (fntype->get_num_substitutions () == 1);
auto &param_mapping = fntype->get_substs ().at (0);
- const TyTy::ParamType *param_tyty = param_mapping.get_param_ty ();
- TyTy::BaseType *resolved_tyty = param_tyty->resolve ();
+ const auto param_tyty = param_mapping.get_param_ty ();
+ auto resolved_tyty = param_tyty->resolve ();
tree template_parameter_type
= TyTyResolveCompile::compile (ctx, resolved_tyty);
@@ -653,8 +643,8 @@ op_with_overflow_inner (Context *ctx, TyTy::FnType *fntype, tree_code op)
rust_assert (fntype->get_num_substitutions () == 1);
auto &param_mapping = fntype->get_substs ().at (0);
- const TyTy::ParamType *param_tyty = param_mapping.get_param_ty ();
- TyTy::BaseType *resolved_tyty = param_tyty->resolve ();
+ const auto param_tyty = param_mapping.get_param_ty ();
+ auto resolved_tyty = param_tyty->resolve ();
tree template_parameter_type
= TyTyResolveCompile::compile (ctx, resolved_tyty);
@@ -1089,8 +1079,8 @@ uninit_handler (Context *ctx, TyTy::FnType *fntype)
// get the template parameter type tree fn uninit<T>();
rust_assert (fntype->get_num_substitutions () == 1);
auto &param_mapping = fntype->get_substs ().at (0);
- const TyTy::ParamType *param_tyty = param_mapping.get_param_ty ();
- TyTy::BaseType *resolved_tyty = param_tyty->resolve ();
+ const auto param_tyty = param_mapping.get_param_ty ();
+ auto resolved_tyty = param_tyty->resolve ();
tree template_parameter_type
= TyTyResolveCompile::compile (ctx, resolved_tyty);
@@ -1154,8 +1144,8 @@ move_val_init_handler (Context *ctx, TyTy::FnType *fntype)
// get the template parameter type tree fn size_of<T>();
rust_assert (fntype->get_num_substitutions () == 1);
auto &param_mapping = fntype->get_substs ().at (0);
- const TyTy::ParamType *param_tyty = param_mapping.get_param_ty ();
- TyTy::BaseType *resolved_tyty = param_tyty->resolve ();
+ auto param_tyty = param_mapping.get_param_ty ();
+ auto resolved_tyty = param_tyty->resolve ();
tree template_parameter_type
= TyTyResolveCompile::compile (ctx, resolved_tyty);
@@ -1322,7 +1312,7 @@ try_handler_inner (Context *ctx, TyTy::FnType *fntype, bool is_new_api)
if (is_new_api)
{
- auto ret_type = TyTyResolveCompile::get_unit_type ();
+ auto ret_type = TyTyResolveCompile::get_unit_type (ctx);
auto ret_expr = Backend::constructor_expression (ret_type, false, {}, -1,
UNDEF_LOCATION);
normal_return_stmt
diff --git a/gcc/rust/backend/rust-compile-item.cc b/gcc/rust/backend/rust-compile-item.cc
index 7ce9848..d908710 100644
--- a/gcc/rust/backend/rust-compile-item.cc
+++ b/gcc/rust/backend/rust-compile-item.cc
@@ -50,33 +50,21 @@ CompileItem::visit (HIR::StaticItem &var)
tree type = TyTyResolveCompile::compile (ctx, resolved_type);
- tl::optional<Resolver::CanonicalPath> canonical_path;
+ auto &nr_ctx
+ = Resolver2_0::ImmutableNameResolutionContext::get ().resolver ();
- if (flag_name_resolution_2_0)
- {
- auto &nr_ctx
- = Resolver2_0::ImmutableNameResolutionContext::get ().resolver ();
-
- canonical_path
- = nr_ctx.values.to_canonical_path (var.get_mappings ().get_nodeid ());
- }
- else
- {
- canonical_path = ctx->get_mappings ().lookup_canonical_path (
- var.get_mappings ().get_nodeid ());
- }
-
- rust_assert (canonical_path.has_value ());
+ Resolver::CanonicalPath canonical_path
+ = nr_ctx.to_canonical_path (var.get_mappings ().get_nodeid ());
ctx->push_const_context ();
tree value
= compile_constant_item (var.get_mappings ().get_hirid (), expr_type,
- resolved_type, *canonical_path, const_value_expr,
+ resolved_type, canonical_path, const_value_expr,
var.get_locus (), const_value_expr.get_locus ());
ctx->pop_const_context ();
- std::string name = canonical_path->get ();
- std::string asm_name = ctx->mangle_item (resolved_type, *canonical_path);
+ std::string name = canonical_path.get ();
+ std::string asm_name = ctx->mangle_item (resolved_type, canonical_path);
bool is_external = false;
bool is_hidden = false;
@@ -115,23 +103,22 @@ CompileItem::visit (HIR::ConstantItem &constant)
const_value_expr.get_mappings ().get_hirid (), &expr_type);
rust_assert (ok);
+ auto &nr_ctx
+ = Resolver2_0::ImmutableNameResolutionContext::get ().resolver ();
+
// canonical path
Resolver::CanonicalPath canonical_path
- = Resolver::CanonicalPath::create_empty ();
-
- if (flag_name_resolution_2_0)
+ = nr_ctx.to_canonical_path (mappings.get_nodeid ());
+ if (constant_type->is<const TyTy::FnType> ())
{
- auto &nr_ctx
- = Resolver2_0::ImmutableNameResolutionContext::get ().resolver ();
+ if (concrete == nullptr)
+ return;
- canonical_path
- = nr_ctx.values.to_canonical_path (mappings.get_nodeid ()).value ();
- }
- else
- {
- canonical_path = ctx->get_mappings ()
- .lookup_canonical_path (mappings.get_nodeid ())
- .value ();
+ rust_assert (concrete->get_kind () == TyTy::TypeKind::FNDEF);
+ TyTy::FnType *concrete_fnty = static_cast<TyTy::FnType *> (concrete);
+
+ concrete_fnty->override_context ();
+ constant_type = expr_type = concrete_fnty->get_return_type ();
}
ctx->push_const_context ();
@@ -210,26 +197,11 @@ CompileItem::visit (HIR::Function &function)
}
}
- Resolver::CanonicalPath canonical_path
- = Resolver::CanonicalPath::create_empty ();
-
- if (flag_name_resolution_2_0)
- {
- auto &nr_ctx
- = Resolver2_0::ImmutableNameResolutionContext::get ().resolver ();
-
- auto path = nr_ctx.values.to_canonical_path (
- function.get_mappings ().get_nodeid ());
+ auto &nr_ctx
+ = Resolver2_0::ImmutableNameResolutionContext::get ().resolver ();
- canonical_path = path.value ();
- }
- else
- {
- auto path = ctx->get_mappings ().lookup_canonical_path (
- function.get_mappings ().get_nodeid ());
-
- canonical_path = *path;
- }
+ Resolver::CanonicalPath canonical_path
+ = nr_ctx.to_canonical_path (function.get_mappings ().get_nodeid ());
const std::string asm_name = ctx->mangle_item (fntype, canonical_path);
@@ -252,8 +224,12 @@ CompileItem::visit (HIR::Function &function)
if (function.get_qualifiers ().is_const ())
ctx->push_const_context ();
+ auto lookup_root_item = ctx->get_mappings ().lookup_hir_item (
+ function.get_mappings ().get_hirid ());
+ bool is_root_item = lookup_root_item.has_value ();
tree fndecl
- = compile_function (function.get_function_name ().as_string (),
+ = compile_function (is_root_item,
+ function.get_function_name ().as_string (),
function.get_self_param (),
function.get_function_params (),
function.get_qualifiers (), function.get_visibility (),
@@ -296,5 +272,65 @@ CompileItem::visit (HIR::Module &module)
CompileItem::compile (item.get (), ctx);
}
+void
+CompileItem::visit (HIR::TupleStruct &tuple_struct_decl)
+{
+ TyTy::BaseType *lookup = nullptr;
+ if (!ctx->get_tyctx ()->lookup_type (
+ tuple_struct_decl.get_mappings ().get_hirid (), &lookup))
+ {
+ rust_error_at (tuple_struct_decl.get_locus (), "failed to resolve type");
+ return;
+ }
+
+ if (lookup->is_concrete ())
+ TyTyResolveCompile::compile (ctx, lookup);
+}
+
+void
+CompileItem::visit (HIR::Enum &enum_decl)
+{
+ TyTy::BaseType *lookup = nullptr;
+ if (!ctx->get_tyctx ()->lookup_type (enum_decl.get_mappings ().get_hirid (),
+ &lookup))
+ {
+ rust_error_at (enum_decl.get_locus (), "failed to resolve type");
+ return;
+ }
+
+ if (lookup->is_concrete ())
+ TyTyResolveCompile::compile (ctx, lookup);
+}
+
+void
+CompileItem::visit (HIR::Union &union_decl)
+{
+ TyTy::BaseType *lookup = nullptr;
+ if (!ctx->get_tyctx ()->lookup_type (union_decl.get_mappings ().get_hirid (),
+ &lookup))
+ {
+ rust_error_at (union_decl.get_locus (), "failed to resolve type");
+ return;
+ }
+
+ if (lookup->is_concrete ())
+ TyTyResolveCompile::compile (ctx, lookup);
+}
+
+void
+CompileItem::visit (HIR::StructStruct &struct_decl)
+{
+ TyTy::BaseType *lookup = nullptr;
+ if (!ctx->get_tyctx ()->lookup_type (struct_decl.get_mappings ().get_hirid (),
+ &lookup))
+ {
+ rust_error_at (struct_decl.get_locus (), "failed to resolve type");
+ return;
+ }
+
+ if (lookup->is_concrete ())
+ TyTyResolveCompile::compile (ctx, lookup);
+}
+
} // namespace Compile
} // namespace Rust
diff --git a/gcc/rust/backend/rust-compile-item.h b/gcc/rust/backend/rust-compile-item.h
index eccb040..56baaab 100644
--- a/gcc/rust/backend/rust-compile-item.h
+++ b/gcc/rust/backend/rust-compile-item.h
@@ -35,10 +35,6 @@ public:
{
CompileItem compiler (ctx, concrete, ref_locus);
item->accept_vis (compiler);
-
- if (compiler.reference == error_mark_node)
- rust_debug ("failed to compile item: %s", item->as_string ().c_str ());
-
return compiler.reference;
}
@@ -48,9 +44,12 @@ public:
void visit (HIR::ImplBlock &impl_block) override;
void visit (HIR::ExternBlock &extern_block) override;
void visit (HIR::Module &module) override;
+ void visit (HIR::TupleStruct &tuple_struct) override;
+ void visit (HIR::Enum &enum_decl) override;
+ void visit (HIR::Union &union_decl) override;
+ void visit (HIR::StructStruct &struct_decl) override;
// Empty visit for unused Stmt HIR nodes.
- void visit (HIR::TupleStruct &) override {}
void visit (HIR::EnumItem &) override {}
void visit (HIR::EnumItemTuple &) override {}
void visit (HIR::EnumItemStruct &) override {}
@@ -61,9 +60,6 @@ public:
void visit (HIR::ExternCrate &) override {}
void visit (HIR::UseDeclaration &) override {}
void visit (HIR::TypeAlias &) override {}
- void visit (HIR::StructStruct &) override {}
- void visit (HIR::Enum &) override {}
- void visit (HIR::Union &) override {}
void visit (HIR::Trait &) override {}
void visit (HIR::EmptyStmt &) override {}
void visit (HIR::LetStmt &) override {}
diff --git a/gcc/rust/backend/rust-compile-pattern.cc b/gcc/rust/backend/rust-compile-pattern.cc
index e83717b..af5f453 100644
--- a/gcc/rust/backend/rust-compile-pattern.cc
+++ b/gcc/rust/backend/rust-compile-pattern.cc
@@ -22,6 +22,12 @@
#include "rust-constexpr.h"
#include "rust-compile-type.h"
#include "print-tree.h"
+#include "rust-diagnostics.h"
+#include "rust-hir-pattern-abstract.h"
+#include "rust-hir-pattern.h"
+#include "rust-system.h"
+#include "rust-tyty.h"
+#include "tree.h"
namespace Rust {
namespace Compile {
@@ -81,6 +87,8 @@ CompilePatternCheckExpr::visit (HIR::LiteralPattern &pattern)
auto litexpr = std::make_unique<HIR::LiteralExpr> (
HIR::LiteralExpr (pattern.get_mappings (), pattern.get_literal (),
pattern.get_locus (), std::vector<AST::Attribute> ()));
+ if (pattern.get_has_minus ())
+ litexpr->set_negative ();
// Note: Floating point literals are currently accepted but will likely be
// forbidden in LiteralPatterns in a future version of Rust.
@@ -107,17 +115,21 @@ compile_range_pattern_bound (HIR::RangePatternBound &bound,
tree result = NULL_TREE;
switch (bound.get_bound_type ())
{
- case HIR::RangePatternBound::RangePatternBoundType::LITERAL: {
+ case HIR::RangePatternBound::RangePatternBoundType::LITERAL:
+ {
auto &ref = static_cast<HIR::RangePatternBoundLiteral &> (bound);
HIR::LiteralExpr litexpr (mappings, ref.get_literal (), locus,
std::vector<AST::Attribute> ());
+ if (ref.get_has_minus ())
+ litexpr.set_negative ();
result = CompileExpr::Compile (litexpr, ctx);
}
break;
- case HIR::RangePatternBound::RangePatternBoundType::PATH: {
+ case HIR::RangePatternBound::RangePatternBoundType::PATH:
+ {
auto &ref = static_cast<HIR::RangePatternBoundPath &> (bound);
result = ResolvePathRef::Compile (ref.get_path (), ctx);
@@ -127,7 +139,8 @@ compile_range_pattern_bound (HIR::RangePatternBound &bound,
}
break;
- case HIR::RangePatternBound::RangePatternBoundType::QUALPATH: {
+ case HIR::RangePatternBound::RangePatternBoundType::QUALPATH:
+ {
auto &ref = static_cast<HIR::RangePatternBoundQualPath &> (bound);
result = ResolvePathRef::Compile (ref.get_qualified_path (), ctx);
@@ -150,13 +163,39 @@ CompilePatternCheckExpr::visit (HIR::RangePattern &pattern)
pattern.get_mappings (),
pattern.get_locus (), ctx);
+ rust_assert (
+ (TREE_CODE (upper) == REAL_CST && TREE_CODE (lower) == REAL_CST)
+ || (TREE_CODE (upper) == INTEGER_CST && TREE_CODE (lower) == INTEGER_CST));
+
+ bool error_E0579 = false;
+ if (TREE_CODE (upper) == REAL_CST)
+ {
+ const REAL_VALUE_TYPE *upper_r = TREE_REAL_CST_PTR (upper);
+ const REAL_VALUE_TYPE *lower_r = TREE_REAL_CST_PTR (lower);
+ if (real_compare (GE_EXPR, lower_r, upper_r))
+ error_E0579 = true;
+ }
+ else if (TREE_CODE (upper) == INTEGER_CST)
+ {
+ auto upper_wi = wi::to_wide (upper).to_shwi ();
+ auto lower_wi = wi::to_wide (lower).to_shwi ();
+ if (lower_wi >= upper_wi)
+ error_E0579 = true;
+ }
+
+ if (error_E0579)
+ rust_error_at (pattern.get_locus (), ErrorCode::E0579,
+ "lower range bound must be less than upper");
+
+ ComparisonOperator upper_cmp = pattern.is_inclusive_range ()
+ ? ComparisonOperator::LESS_OR_EQUAL
+ : ComparisonOperator::LESS_THAN;
tree check_lower
= Backend::comparison_expression (ComparisonOperator::GREATER_OR_EQUAL,
match_scrutinee_expr, lower,
pattern.get_locus ());
tree check_upper
- = Backend::comparison_expression (ComparisonOperator::LESS_OR_EQUAL,
- match_scrutinee_expr, upper,
+ = Backend::comparison_expression (upper_cmp, match_scrutinee_expr, upper,
pattern.get_locus ());
check_expr = Backend::arithmetic_or_logical_expression (
ArithmeticOrLogicalOperator::BITWISE_AND, check_lower, check_upper,
@@ -204,6 +243,7 @@ CompilePatternCheckExpr::visit (HIR::StructPattern &pattern)
rust_assert (adt->number_of_variants () > 0);
TyTy::VariantDef *variant = nullptr;
+ tree variant_accesser_expr = nullptr;
if (adt->is_enum ())
{
// lookup the variant
@@ -218,9 +258,7 @@ CompilePatternCheckExpr::visit (HIR::StructPattern &pattern)
// find expected discriminant
// // need to access qualifier the field, if we use QUAL_UNION_TYPE this
- // // would be DECL_QUALIFIER i think. For now this will just access the
- // // first record field and its respective qualifier because it will
- // // always be set because this is all a big special union
+ // // would be DECL_QUALIFIER i think.
HIR::Expr &discrim_expr = variant->get_discriminant ();
tree discrim_expr_node = CompileExpr::Compile (discrim_expr, ctx);
@@ -229,6 +267,14 @@ CompilePatternCheckExpr::visit (HIR::StructPattern &pattern)
= Backend::struct_field_expression (match_scrutinee_expr, 0,
pattern.get_path ().get_locus ());
+ // access variant data
+ tree scrutinee_union_expr
+ = Backend::struct_field_expression (match_scrutinee_expr, 1,
+ pattern.get_path ().get_locus ());
+ variant_accesser_expr
+ = Backend::struct_field_expression (scrutinee_union_expr, variant_index,
+ pattern.get_path ().get_locus ());
+
check_expr
= Backend::comparison_expression (ComparisonOperator::EQUAL,
scrutinee_expr_qualifier_expr,
@@ -240,6 +286,7 @@ CompilePatternCheckExpr::visit (HIR::StructPattern &pattern)
else
{
variant = adt->get_variants ().at (0);
+ variant_accesser_expr = match_scrutinee_expr;
check_expr = boolean_true_node;
}
@@ -248,13 +295,15 @@ CompilePatternCheckExpr::visit (HIR::StructPattern &pattern)
{
switch (field->get_item_type ())
{
- case HIR::StructPatternField::ItemType::TUPLE_PAT: {
+ case HIR::StructPatternField::ItemType::TUPLE_PAT:
+ {
// TODO
rust_unreachable ();
}
break;
- case HIR::StructPatternField::ItemType::IDENT_PAT: {
+ case HIR::StructPatternField::ItemType::IDENT_PAT:
+ {
HIR::StructPatternFieldIdentPat &ident
= static_cast<HIR::StructPatternFieldIdentPat &> (*field.get ());
@@ -263,11 +312,8 @@ CompilePatternCheckExpr::visit (HIR::StructPattern &pattern)
nullptr, &offs);
rust_assert (ok);
- // we may be offsetting by + 1 here since the first field in the
- // record is always the discriminator
- offs += adt->is_enum ();
tree field_expr
- = Backend::struct_field_expression (match_scrutinee_expr, offs,
+ = Backend::struct_field_expression (variant_accesser_expr, offs,
ident.get_locus ());
tree check_expr_sub
@@ -279,7 +325,8 @@ CompilePatternCheckExpr::visit (HIR::StructPattern &pattern)
}
break;
- case HIR::StructPatternField::ItemType::IDENT: {
+ case HIR::StructPatternField::ItemType::IDENT:
+ {
// ident pattern always matches - do nothing
}
break;
@@ -338,36 +385,38 @@ CompilePatternCheckExpr::visit (HIR::TupleStructPattern &pattern)
HIR::TupleStructItems &items = pattern.get_items ();
switch (items.get_item_type ())
{
- case HIR::TupleStructItems::RANGED: {
- // TODO
- rust_unreachable ();
- }
- break;
+ case HIR::TupleStructItems::HAS_REST:
+ {
+ HIR::TupleStructItemsHasRest &items_has_rest
+ = static_cast<HIR::TupleStructItemsHasRest &> (items);
+ size_t num_patterns = items_has_rest.get_lower_patterns ().size ()
+ + items_has_rest.get_upper_patterns ().size ();
- case HIR::TupleStructItems::MULTIPLE: {
- HIR::TupleStructItemsNoRange &items_no_range
- = static_cast<HIR::TupleStructItemsNoRange &> (items);
-
- rust_assert (items_no_range.get_patterns ().size ()
- == variant->num_fields ());
+ // enums cases shouldn't reach here
+ rust_assert (num_patterns <= variant->num_fields ()
+ && (!adt->is_enum ()));
size_t tuple_field_index = 0;
- for (auto &pattern : items_no_range.get_patterns ())
+ for (auto &pattern : items_has_rest.get_lower_patterns ())
{
- // find payload union field of scrutinee
- tree payload_ref
- = Backend::struct_field_expression (match_scrutinee_expr, 1,
- pattern->get_locus ());
-
- tree variant_ref
- = Backend::struct_field_expression (payload_ref, variant_index,
+ tree field_expr
+ = Backend::struct_field_expression (match_scrutinee_expr,
+ tuple_field_index++,
pattern->get_locus ());
-
+ tree check_expr_sub
+ = CompilePatternCheckExpr::Compile (*pattern, field_expr, ctx);
+ check_expr = Backend::arithmetic_or_logical_expression (
+ ArithmeticOrLogicalOperator::BITWISE_AND, check_expr,
+ check_expr_sub, pattern->get_locus ());
+ }
+ tuple_field_index = variant->num_fields ()
+ - items_has_rest.get_upper_patterns ().size ();
+ for (auto &pattern : items_has_rest.get_upper_patterns ())
+ {
tree field_expr
- = Backend::struct_field_expression (variant_ref,
+ = Backend::struct_field_expression (match_scrutinee_expr,
tuple_field_index++,
pattern->get_locus ());
-
tree check_expr_sub
= CompilePatternCheckExpr::Compile (*pattern, field_expr, ctx);
check_expr = Backend::arithmetic_or_logical_expression (
@@ -376,6 +425,64 @@ CompilePatternCheckExpr::visit (HIR::TupleStructPattern &pattern)
}
}
break;
+
+ case HIR::TupleStructItems::NO_REST:
+ {
+ HIR::TupleStructItemsNoRest &items_no_range
+ = static_cast<HIR::TupleStructItemsNoRest &> (items);
+
+ rust_assert (items_no_range.get_patterns ().size ()
+ == variant->num_fields ());
+
+ if (adt->is_enum ())
+ {
+ size_t tuple_field_index = 0;
+ for (auto &pattern : items_no_range.get_patterns ())
+ {
+ // find payload union field of scrutinee
+ tree payload_ref
+ = Backend::struct_field_expression (match_scrutinee_expr, 1,
+ pattern->get_locus ());
+
+ tree variant_ref
+ = Backend::struct_field_expression (payload_ref,
+ variant_index,
+ pattern->get_locus ());
+
+ tree field_expr
+ = Backend::struct_field_expression (variant_ref,
+ tuple_field_index++,
+ pattern->get_locus ());
+
+ tree check_expr_sub
+ = CompilePatternCheckExpr::Compile (*pattern, field_expr,
+ ctx);
+ check_expr = Backend::arithmetic_or_logical_expression (
+ ArithmeticOrLogicalOperator::BITWISE_AND, check_expr,
+ check_expr_sub, pattern->get_locus ());
+ }
+ }
+ else
+ {
+ // For non-enum TupleStructPatterns
+ size_t tuple_field_index = 0;
+ for (auto &pattern : items_no_range.get_patterns ())
+ {
+ tree field_expr
+ = Backend::struct_field_expression (match_scrutinee_expr,
+ tuple_field_index++,
+ pattern->get_locus ());
+
+ tree check_expr_sub
+ = CompilePatternCheckExpr::Compile (*pattern, field_expr,
+ ctx);
+ check_expr = Backend::arithmetic_or_logical_expression (
+ ArithmeticOrLogicalOperator::BITWISE_AND, check_expr,
+ check_expr_sub, pattern->get_locus ());
+ }
+ }
+ break;
+ }
}
}
@@ -386,15 +493,59 @@ CompilePatternCheckExpr::visit (HIR::TuplePattern &pattern)
switch (pattern.get_items ().get_item_type ())
{
- case HIR::TuplePatternItems::RANGED: {
- // TODO
- gcc_unreachable ();
+ case HIR::TuplePatternItems::HAS_REST:
+ {
+ auto &items
+ = static_cast<HIR::TuplePatternItemsHasRest &> (pattern.get_items ());
+ size_t tuple_field_index = 0;
+
+ // lookup the type to find out number of fields
+ TyTy::BaseType *ty = nullptr;
+ bool ok = ctx->get_tyctx ()->lookup_type (
+ pattern.get_mappings ().get_hirid (), &ty);
+ rust_assert (ok);
+ rust_assert (ty->get_kind () == TyTy::TypeKind::TUPLE);
+
+ // compile check expr for lower patterns
+ for (auto &pat : items.get_lower_patterns ())
+ {
+ tree field_expr
+ = Backend::struct_field_expression (match_scrutinee_expr,
+ tuple_field_index++,
+ pat->get_locus ());
+
+ tree check_expr_sub
+ = CompilePatternCheckExpr::Compile (*pat, field_expr, ctx);
+ check_expr = Backend::arithmetic_or_logical_expression (
+ ArithmeticOrLogicalOperator::BITWISE_AND, check_expr,
+ check_expr_sub, pat->get_locus ());
+ }
+
+ // skip the fields that are not checked
+ tuple_field_index = static_cast<TyTy::TupleType &> (*ty).num_fields ()
+ - items.get_upper_patterns ().size ();
+
+ // compile check expr for upper patterns
+ for (auto &pat : items.get_upper_patterns ())
+ {
+ tree field_expr
+ = Backend::struct_field_expression (match_scrutinee_expr,
+ tuple_field_index++,
+ pat->get_locus ());
+
+ tree check_expr_sub
+ = CompilePatternCheckExpr::Compile (*pat, field_expr, ctx);
+ check_expr = Backend::arithmetic_or_logical_expression (
+ ArithmeticOrLogicalOperator::BITWISE_AND, check_expr,
+ check_expr_sub, pat->get_locus ());
+ }
}
break;
- case HIR::TuplePatternItems::MULTIPLE: {
- auto &items = static_cast<HIR::TuplePatternItemsMultiple &> (
- pattern.get_items ());
+ case HIR::TuplePatternItems::NO_REST:
+ {
+ auto &items
+ = static_cast<HIR::TuplePatternItemsNoRest &> (pattern.get_items ());
size_t tuple_field_index = 0;
for (auto &pat : items.get_patterns ())
@@ -414,6 +565,216 @@ CompilePatternCheckExpr::visit (HIR::TuplePattern &pattern)
}
}
+void
+CompilePatternCheckExpr::visit (HIR::IdentifierPattern &pattern)
+{
+ if (pattern.has_subpattern ())
+ {
+ check_expr = CompilePatternCheckExpr::Compile (pattern.get_subpattern (),
+ match_scrutinee_expr, ctx);
+ }
+ else
+ {
+ check_expr = boolean_true_node;
+ }
+}
+
+void
+CompilePatternCheckExpr::visit (HIR::SlicePattern &pattern)
+{
+ check_expr = boolean_true_node;
+
+ // lookup the type
+ TyTy::BaseType *lookup = nullptr;
+ bool ok
+ = ctx->get_tyctx ()->lookup_type (pattern.get_mappings ().get_hirid (),
+ &lookup);
+ rust_assert (ok);
+
+ // pattern must either be ArrayType or SliceType, should be already confirmed
+ // by type checking
+ rust_assert (lookup->get_kind () == TyTy::TypeKind::ARRAY
+ || lookup->get_kind () == TyTy::TypeKind::SLICE
+ || lookup->get_kind () == TyTy::REF);
+
+ // function ptr that points to either array_index_expression or
+ // slice_index_expression depending on the scrutinee's type
+ tree (*scrutinee_index_expr_func) (tree, tree, location_t) = nullptr;
+
+ switch (lookup->get_kind ())
+ {
+ case TyTy::TypeKind::ARRAY:
+ scrutinee_index_expr_func = Backend::array_index_expression;
+ break;
+ case TyTy::TypeKind::SLICE:
+ rust_sorry_at (
+ pattern.get_locus (),
+ "SlicePattern matching against non-ref slices are not yet supported");
+ break;
+ case TyTy::TypeKind::REF:
+ {
+ rust_assert (RS_DST_FLAG_P (TREE_TYPE (match_scrutinee_expr)));
+ scrutinee_index_expr_func = Backend::slice_index_expression;
+ tree size_field
+ = Backend::struct_field_expression (match_scrutinee_expr, 1,
+ pattern.get_locus ());
+
+ // for slices, generate a dynamic size comparison expression tree
+ // because size checking is done at runtime.
+ switch (pattern.get_items ().get_item_type ())
+ {
+ case HIR::SlicePatternItems::ItemType::NO_REST:
+ {
+ auto &items = static_cast<HIR::SlicePatternItemsNoRest &> (
+ pattern.get_items ());
+ check_expr = Backend::comparison_expression (
+ ComparisonOperator::EQUAL, size_field,
+ build_int_cst (size_type_node, items.get_patterns ().size ()),
+ pattern.get_locus ());
+ }
+ break;
+ case HIR::SlicePatternItems::ItemType::HAS_REST:
+ {
+ auto &items = static_cast<HIR::SlicePatternItemsHasRest &> (
+ pattern.get_items ());
+ auto pattern_min_cap = items.get_lower_patterns ().size ()
+ + items.get_upper_patterns ().size ();
+ check_expr = Backend::comparison_expression (
+ ComparisonOperator::GREATER_OR_EQUAL, size_field,
+ build_int_cst (size_type_node, pattern_min_cap),
+ pattern.get_locus ());
+ }
+ break;
+ }
+ }
+ break;
+ default:
+ rust_unreachable ();
+ }
+
+ rust_assert (scrutinee_index_expr_func != nullptr);
+
+ // Generate tree to compare every element within array/slice
+ size_t element_index = 0;
+ switch (pattern.get_items ().get_item_type ())
+ {
+ case HIR::SlicePatternItems::ItemType::NO_REST:
+ {
+ auto &items
+ = static_cast<HIR::SlicePatternItemsNoRest &> (pattern.get_items ());
+ for (auto &pattern_member : items.get_patterns ())
+ {
+ tree index_tree
+ = Backend::size_constant_expression (element_index++);
+ tree element_expr
+ = scrutinee_index_expr_func (match_scrutinee_expr, index_tree,
+ pattern.get_locus ());
+ tree check_expr_sub
+ = CompilePatternCheckExpr::Compile (*pattern_member, element_expr,
+ ctx);
+ check_expr = Backend::arithmetic_or_logical_expression (
+ ArithmeticOrLogicalOperator::BITWISE_AND, check_expr,
+ check_expr_sub, pattern.get_locus ());
+ }
+ break;
+ }
+ case HIR::SlicePatternItems::ItemType::HAS_REST:
+ {
+ auto &items
+ = static_cast<HIR::SlicePatternItemsHasRest &> (pattern.get_items ());
+ for (auto &pattern_member : items.get_lower_patterns ())
+ {
+ tree index_tree
+ = Backend::size_constant_expression (element_index++);
+ tree element_expr
+ = scrutinee_index_expr_func (match_scrutinee_expr, index_tree,
+ pattern.get_locus ());
+ tree check_expr_sub
+ = CompilePatternCheckExpr::Compile (*pattern_member, element_expr,
+ ctx);
+ check_expr = Backend::arithmetic_or_logical_expression (
+ ArithmeticOrLogicalOperator::BITWISE_AND, check_expr,
+ check_expr_sub, pattern.get_locus ());
+ }
+
+ // handle codegen for upper patterns differently for both types
+ switch (lookup->get_kind ())
+ {
+ case TyTy::TypeKind::ARRAY:
+ {
+ // for array type scrutinee, we can simply get the capacity as a
+ // const and calculate how many elements to skip
+ auto array_ty = static_cast<TyTy::ArrayType *> (lookup);
+ auto capacity_ty = array_ty->get_capacity ();
+
+ rust_assert (capacity_ty->get_kind () == TyTy::TypeKind::CONST);
+ auto *capacity_const = capacity_ty->as_const_type ();
+ rust_assert (capacity_const->const_kind ()
+ == TyTy::BaseConstType::ConstKind::Value);
+ auto &capacity_value
+ = *static_cast<TyTy::ConstValueType *> (capacity_const);
+ auto cap_tree = capacity_value.get_value ();
+
+ rust_assert (!error_operand_p (cap_tree));
+
+ size_t cap_wi = (size_t) wi::to_wide (cap_tree).to_uhwi ();
+ element_index = cap_wi - items.get_upper_patterns ().size ();
+ for (auto &pattern_member : items.get_upper_patterns ())
+ {
+ tree index_tree
+ = Backend::size_constant_expression (element_index++);
+ tree element_expr
+ = scrutinee_index_expr_func (match_scrutinee_expr,
+ index_tree,
+ pattern.get_locus ());
+ tree check_expr_sub
+ = CompilePatternCheckExpr::Compile (*pattern_member,
+ element_expr, ctx);
+ check_expr = Backend::arithmetic_or_logical_expression (
+ ArithmeticOrLogicalOperator::BITWISE_AND, check_expr,
+ check_expr_sub, pattern.get_locus ());
+ }
+ }
+ break;
+ case TyTy::TypeKind::REF:
+ {
+ // for slice type scrutinee, size is dyanamic, so number of
+ // elements to skip is calculated during runtime
+ tree slice_size
+ = Backend::struct_field_expression (match_scrutinee_expr, 1,
+ pattern.get_locus ());
+ tree upper_patterns_size = Backend::size_constant_expression (
+ items.get_upper_patterns ().size ());
+ tree index_tree = Backend::arithmetic_or_logical_expression (
+ ArithmeticOrLogicalOperator::SUBTRACT, slice_size,
+ upper_patterns_size, pattern.get_locus ());
+ for (auto &pattern_member : items.get_upper_patterns ())
+ {
+ tree element_expr
+ = scrutinee_index_expr_func (match_scrutinee_expr,
+ index_tree,
+ pattern.get_locus ());
+ tree check_expr_sub
+ = CompilePatternCheckExpr::Compile (*pattern_member,
+ element_expr, ctx);
+ check_expr = Backend::arithmetic_or_logical_expression (
+ ArithmeticOrLogicalOperator::BITWISE_AND, check_expr,
+ check_expr_sub, pattern.get_locus ());
+ index_tree = Backend::arithmetic_or_logical_expression (
+ ArithmeticOrLogicalOperator::ADD, index_tree,
+ Backend::size_constant_expression (1),
+ pattern.get_locus ());
+ }
+ }
+ break;
+ default:
+ rust_unreachable ();
+ }
+ }
+ break;
+ }
+}
+
// setup the bindings
void
@@ -449,23 +810,54 @@ CompilePatternBindings::visit (HIR::TupleStructPattern &pattern)
HIR::TupleStructItems &items = pattern.get_items ();
switch (items.get_item_type ())
{
- case HIR::TupleStructItems::RANGED: {
- // TODO
- rust_unreachable ();
+ case HIR::TupleStructItems::HAS_REST:
+ {
+ HIR::TupleStructItemsHasRest &items_has_rest
+ = static_cast<HIR::TupleStructItemsHasRest &> (items);
+ size_t num_patterns = items_has_rest.get_lower_patterns ().size ()
+ + items_has_rest.get_upper_patterns ().size ();
+
+ // enums cases shouldn't reach here
+ rust_assert (num_patterns <= variant->num_fields ()
+ && (!adt->is_enum ()));
+
+ size_t tuple_field_index = 0;
+ for (auto &pattern : items_has_rest.get_lower_patterns ())
+ {
+ tree binding
+ = Backend::struct_field_expression (match_scrutinee_expr,
+ tuple_field_index++,
+ pattern->get_locus ());
+
+ CompilePatternBindings::Compile (*pattern, binding, ctx);
+ }
+
+ tuple_field_index = variant->num_fields ()
+ - items_has_rest.get_upper_patterns ().size ();
+
+ for (auto &pattern : items_has_rest.get_upper_patterns ())
+ {
+ tree binding
+ = Backend::struct_field_expression (match_scrutinee_expr,
+ tuple_field_index++,
+ pattern->get_locus ());
+
+ CompilePatternBindings::Compile (*pattern, binding, ctx);
+ }
}
break;
- case HIR::TupleStructItems::MULTIPLE: {
- HIR::TupleStructItemsNoRange &items_no_range
- = static_cast<HIR::TupleStructItemsNoRange &> (items);
-
- rust_assert (items_no_range.get_patterns ().size ()
+ case HIR::TupleStructItems::NO_REST:
+ {
+ HIR::TupleStructItemsNoRest &items_no_rest
+ = static_cast<HIR::TupleStructItemsNoRest &> (items);
+ rust_assert (items_no_rest.get_patterns ().size ()
== variant->num_fields ());
if (adt->is_enum ())
{
size_t tuple_field_index = 0;
- for (auto &pattern : items_no_range.get_patterns ())
+ for (auto &pattern : items_no_rest.get_patterns ())
{
tree payload_accessor_union
= Backend::struct_field_expression (match_scrutinee_expr, 1,
@@ -487,12 +879,10 @@ CompilePatternBindings::visit (HIR::TupleStructPattern &pattern)
else
{
size_t tuple_field_index = 0;
- for (auto &pattern : items_no_range.get_patterns ())
+ for (auto &pattern : items_no_rest.get_patterns ())
{
- tree variant_accessor = match_scrutinee_expr;
-
tree binding
- = Backend::struct_field_expression (variant_accessor,
+ = Backend::struct_field_expression (match_scrutinee_expr,
tuple_field_index++,
pattern->get_locus ());
@@ -504,6 +894,71 @@ CompilePatternBindings::visit (HIR::TupleStructPattern &pattern)
}
}
+tree
+CompilePatternBindings::make_struct_access (TyTy::ADTType *adt,
+ TyTy::VariantDef *variant,
+ const Identifier &ident,
+ int variant_index)
+{
+ size_t offs = 0;
+ auto ok = variant->lookup_field (ident.as_string (), nullptr, &offs);
+ rust_assert (ok);
+
+ if (adt->is_enum ())
+ {
+ tree payload_accessor_union
+ = Backend::struct_field_expression (match_scrutinee_expr, 1,
+ ident.get_locus ());
+
+ tree variant_accessor
+ = Backend::struct_field_expression (payload_accessor_union,
+ variant_index, ident.get_locus ());
+
+ return Backend::struct_field_expression (variant_accessor, offs,
+ ident.get_locus ());
+ }
+ else
+ {
+ tree variant_accessor = match_scrutinee_expr;
+
+ return Backend::struct_field_expression (variant_accessor, offs,
+ ident.get_locus ());
+ }
+}
+
+void
+CompilePatternBindings::handle_struct_pattern_ident (
+ HIR::StructPatternField &pat, TyTy::ADTType *adt, TyTy::VariantDef *variant,
+ int variant_index)
+{
+ HIR::StructPatternFieldIdent &ident
+ = static_cast<HIR::StructPatternFieldIdent &> (pat);
+
+ auto identifier = ident.get_identifier ();
+ tree binding = make_struct_access (adt, variant, identifier, variant_index);
+
+ ctx->insert_pattern_binding (ident.get_mappings ().get_hirid (), binding);
+}
+
+void
+CompilePatternBindings::handle_struct_pattern_ident_pat (
+ HIR::StructPatternField &pat, TyTy::ADTType *adt, TyTy::VariantDef *variant,
+ int variant_index)
+{
+ auto &pattern = static_cast<HIR::StructPatternFieldIdentPat &> (pat);
+
+ tree binding = make_struct_access (adt, variant, pattern.get_identifier (),
+ variant_index);
+ CompilePatternBindings::Compile (pattern.get_pattern (), binding, ctx);
+}
+
+void
+CompilePatternBindings::handle_struct_pattern_tuple_pat (
+ HIR::StructPatternField &pat)
+{
+ rust_unreachable ();
+}
+
void
CompilePatternBindings::visit (HIR::StructPattern &pattern)
{
@@ -531,62 +986,23 @@ CompilePatternBindings::visit (HIR::StructPattern &pattern)
rust_assert (ok);
}
- rust_assert (variant->get_variant_type ()
- == TyTy::VariantDef::VariantType::STRUCT);
+ rust_assert (
+ variant->get_variant_type () == TyTy::VariantDef::VariantType::STRUCT
+ || variant->get_variant_type () == TyTy::VariantDef::VariantType::TUPLE);
auto &struct_pattern_elems = pattern.get_struct_pattern_elems ();
for (auto &field : struct_pattern_elems.get_struct_pattern_fields ())
{
switch (field->get_item_type ())
{
- case HIR::StructPatternField::ItemType::TUPLE_PAT: {
- // TODO
- rust_unreachable ();
- }
+ case HIR::StructPatternField::ItemType::TUPLE_PAT:
+ handle_struct_pattern_tuple_pat (*field);
break;
-
- case HIR::StructPatternField::ItemType::IDENT_PAT: {
- // TODO
- rust_unreachable ();
- }
+ case HIR::StructPatternField::ItemType::IDENT_PAT:
+ handle_struct_pattern_ident_pat (*field, adt, variant, variant_index);
break;
-
- case HIR::StructPatternField::ItemType::IDENT: {
- HIR::StructPatternFieldIdent &ident
- = static_cast<HIR::StructPatternFieldIdent &> (*field.get ());
-
- size_t offs = 0;
- ok = variant->lookup_field (ident.get_identifier ().as_string (),
- nullptr, &offs);
- rust_assert (ok);
-
- tree binding = error_mark_node;
- if (adt->is_enum ())
- {
- tree payload_accessor_union
- = Backend::struct_field_expression (match_scrutinee_expr, 1,
- ident.get_locus ());
-
- tree variant_accessor
- = Backend::struct_field_expression (payload_accessor_union,
- variant_index,
- ident.get_locus ());
-
- binding
- = Backend::struct_field_expression (variant_accessor, offs,
- ident.get_locus ());
- }
- else
- {
- tree variant_accessor = match_scrutinee_expr;
- binding
- = Backend::struct_field_expression (variant_accessor, offs,
- ident.get_locus ());
- }
-
- ctx->insert_pattern_binding (ident.get_mappings ().get_hirid (),
- binding);
- }
+ case HIR::StructPatternField::ItemType::IDENT:
+ handle_struct_pattern_ident (*field, adt, variant, variant_index);
break;
}
}
@@ -605,6 +1021,12 @@ CompilePatternBindings::visit (HIR::ReferencePattern &pattern)
void
CompilePatternBindings::visit (HIR::IdentifierPattern &pattern)
{
+ if (pattern.has_subpattern ())
+ {
+ CompilePatternBindings::Compile (pattern.get_subpattern (),
+ match_scrutinee_expr, ctx);
+ }
+
if (!pattern.get_is_ref ())
{
ctx->insert_pattern_binding (pattern.get_mappings ().get_hirid (),
@@ -631,10 +1053,11 @@ CompilePatternBindings::visit (HIR::TuplePattern &pattern)
switch (pattern.get_items ().get_item_type ())
{
- case HIR::TuplePatternItems::ItemType::RANGED: {
+ case HIR::TuplePatternItems::ItemType::HAS_REST:
+ {
size_t tuple_idx = 0;
auto &items
- = static_cast<HIR::TuplePatternItemsRanged &> (pattern.get_items ());
+ = static_cast<HIR::TuplePatternItemsHasRest &> (pattern.get_items ());
auto &items_lower = items.get_lower_patterns ();
auto &items_upper = items.get_upper_patterns ();
@@ -674,10 +1097,11 @@ CompilePatternBindings::visit (HIR::TuplePattern &pattern)
return;
}
- case HIR::TuplePatternItems::ItemType::MULTIPLE: {
+ case HIR::TuplePatternItems::ItemType::NO_REST:
+ {
size_t tuple_idx = 0;
- auto &items = static_cast<HIR::TuplePatternItemsMultiple &> (
- pattern.get_items ());
+ auto &items
+ = static_cast<HIR::TuplePatternItemsNoRest &> (pattern.get_items ());
for (auto &sub : items.get_patterns ())
{
@@ -695,12 +1119,156 @@ CompilePatternBindings::visit (HIR::TuplePattern &pattern)
return;
}
- default: {
+ default:
+ {
rust_unreachable ();
}
}
}
+void
+CompilePatternBindings::visit (HIR::SlicePattern &pattern)
+{
+ // lookup the type
+ TyTy::BaseType *lookup = nullptr;
+ bool ok
+ = ctx->get_tyctx ()->lookup_type (pattern.get_mappings ().get_hirid (),
+ &lookup);
+ rust_assert (ok);
+
+ rust_assert (lookup->get_kind () == TyTy::TypeKind::ARRAY
+ || lookup->get_kind () == TyTy::TypeKind::SLICE
+ || lookup->get_kind () == TyTy::REF);
+
+ // function ptr that points to either array_index_expression or
+ // slice_index_expression depending on the scrutinee's type
+ tree (*scrutinee_index_expr_func) (tree, tree, location_t) = nullptr;
+
+ switch (lookup->get_kind ())
+ {
+ case TyTy::TypeKind::ARRAY:
+ scrutinee_index_expr_func = Backend::array_index_expression;
+ break;
+ case TyTy::TypeKind::SLICE:
+ rust_sorry_at (pattern.get_locus (),
+ "SlicePattern matching against non-ref slices are "
+ "not yet supported");
+ break;
+ case TyTy::TypeKind::REF:
+ scrutinee_index_expr_func = Backend::slice_index_expression;
+ break;
+ default:
+ rust_unreachable ();
+ }
+
+ rust_assert (scrutinee_index_expr_func != nullptr);
+
+ size_t element_index = 0;
+
+ switch (pattern.get_items ().get_item_type ())
+ {
+ case HIR::SlicePatternItems::ItemType::NO_REST:
+ {
+ auto &items
+ = static_cast<HIR::SlicePatternItemsNoRest &> (pattern.get_items ());
+ for (auto &pattern_member : items.get_patterns ())
+ {
+ tree index_tree
+ = Backend::size_constant_expression (element_index++);
+ tree element_expr
+ = scrutinee_index_expr_func (match_scrutinee_expr, index_tree,
+ pattern.get_locus ());
+ CompilePatternBindings::Compile (*pattern_member, element_expr,
+ ctx);
+ }
+ }
+ break;
+ case HIR::SlicePatternItems::ItemType::HAS_REST:
+ {
+ auto &items
+ = static_cast<HIR::SlicePatternItemsHasRest &> (pattern.get_items ());
+ for (auto &pattern_member : items.get_lower_patterns ())
+ {
+ tree index_tree
+ = Backend::size_constant_expression (element_index++);
+ tree element_expr
+ = scrutinee_index_expr_func (match_scrutinee_expr, index_tree,
+ pattern.get_locus ());
+ CompilePatternBindings::Compile (*pattern_member, element_expr,
+ ctx);
+ }
+
+ // handle codegen for upper patterns differently for both types
+ switch (lookup->get_kind ())
+ {
+ case TyTy::TypeKind::ARRAY:
+ {
+ auto array_ty = static_cast<TyTy::ArrayType *> (lookup);
+ auto capacity_ty = array_ty->get_capacity ();
+
+ rust_assert (capacity_ty->get_kind () == TyTy::TypeKind::CONST);
+ auto *capacity_const = capacity_ty->as_const_type ();
+ rust_assert (capacity_const->const_kind ()
+ == TyTy::BaseConstType::ConstKind::Value);
+ auto &capacity_value
+ = *static_cast<TyTy::ConstValueType *> (capacity_const);
+ auto cap_tree = capacity_value.get_value ();
+
+ rust_assert (!error_operand_p (cap_tree));
+
+ size_t cap_wi = (size_t) wi::to_wide (cap_tree).to_uhwi ();
+ element_index = cap_wi - items.get_upper_patterns ().size ();
+ for (auto &pattern_member : items.get_upper_patterns ())
+ {
+ tree index_tree
+ = Backend::size_constant_expression (element_index++);
+ tree element_expr
+ = scrutinee_index_expr_func (match_scrutinee_expr,
+ index_tree,
+ pattern.get_locus ());
+ CompilePatternBindings::Compile (*pattern_member,
+ element_expr, ctx);
+ }
+ }
+ break;
+ case TyTy::TypeKind::SLICE:
+ rust_sorry_at (pattern.get_locus (),
+ "SlicePattern matching against non-ref slices are "
+ "not yet supported");
+ break;
+ case TyTy::TypeKind::REF:
+ {
+ tree slice_size
+ = Backend::struct_field_expression (match_scrutinee_expr, 1,
+ pattern.get_locus ());
+ tree upper_patterns_size = Backend::size_constant_expression (
+ items.get_upper_patterns ().size ());
+ tree index_tree = Backend::arithmetic_or_logical_expression (
+ ArithmeticOrLogicalOperator::SUBTRACT, slice_size,
+ upper_patterns_size, pattern.get_locus ());
+ for (auto &pattern_member : items.get_upper_patterns ())
+ {
+ tree element_expr
+ = scrutinee_index_expr_func (match_scrutinee_expr,
+ index_tree,
+ pattern.get_locus ());
+ CompilePatternBindings::Compile (*pattern_member,
+ element_expr, ctx);
+ index_tree = Backend::arithmetic_or_logical_expression (
+ ArithmeticOrLogicalOperator::ADD, index_tree,
+ Backend::size_constant_expression (1),
+ pattern.get_locus ());
+ }
+ }
+ break;
+ default:
+ rust_unreachable ();
+ }
+ }
+ break;
+ }
+}
+
//
void
@@ -710,6 +1278,11 @@ CompilePatternLet::visit (HIR::IdentifierPattern &pattern)
rust_assert (
ctx->lookup_var_decl (pattern.get_mappings ().get_hirid (), &var));
+ if (pattern.get_is_ref ())
+ {
+ init_expr = address_expression (init_expr, EXPR_LOCATION (init_expr));
+ }
+
auto fnctx = ctx->peek_fn ();
if (ty->is_unit ())
{
@@ -721,6 +1294,11 @@ CompilePatternLet::visit (HIR::IdentifierPattern &pattern)
}
else
{
+ if (pattern.has_subpattern ())
+ {
+ CompilePatternLet::Compile (&pattern.get_subpattern (), init_expr, ty,
+ rval_locus, ctx);
+ }
auto s = Backend::init_statement (fnctx.fndecl, var, init_expr);
ctx->add_statement (s);
}
@@ -744,21 +1322,65 @@ CompilePatternLet::visit (HIR::TuplePattern &pattern)
{
rust_assert (pattern.has_tuple_pattern_items ());
- tree tuple_type = TyTyResolveCompile::compile (ctx, ty);
+ bool has_by_ref = false;
+ auto check_refs
+ = [] (const std::vector<std::unique_ptr<HIR::Pattern>> &patterns) {
+ for (const auto &sub : patterns)
+ {
+ switch (sub->get_pattern_type ())
+ {
+ case HIR::Pattern::PatternType::IDENTIFIER:
+ {
+ auto id = static_cast<HIR::IdentifierPattern *> (sub.get ());
+ if (id->get_is_ref ())
+ return true;
+ break;
+ }
+ case HIR::Pattern::PatternType::REFERENCE:
+ return true;
+ default:
+ break;
+ }
+ }
+ return false;
+ };
+ switch (pattern.get_items ().get_item_type ())
+ {
+ case HIR::TuplePatternItems::ItemType::NO_REST:
+ {
+ auto &items
+ = static_cast<HIR::TuplePatternItemsNoRest &> (pattern.get_items ());
+ has_by_ref = check_refs (items.get_patterns ());
+ break;
+ }
+ case HIR::TuplePatternItems::ItemType::HAS_REST:
+ {
+ auto &items
+ = static_cast<HIR::TuplePatternItemsHasRest &> (pattern.get_items ());
+ has_by_ref = check_refs (items.get_lower_patterns ())
+ || check_refs (items.get_upper_patterns ());
+ break;
+ }
+ default:
+ break;
+ }
+
+ tree rhs_tuple_type = TYPE_MAIN_VARIANT (TREE_TYPE (init_expr));
tree init_stmt;
Bvariable *tmp_var
= Backend::temporary_variable (ctx->peek_fn ().fndecl, NULL_TREE,
- tuple_type, init_expr, false,
+ rhs_tuple_type, init_expr, has_by_ref,
pattern.get_locus (), &init_stmt);
tree access_expr = Backend::var_expression (tmp_var, pattern.get_locus ());
ctx->add_statement (init_stmt);
switch (pattern.get_items ().get_item_type ())
{
- case HIR::TuplePatternItems::ItemType::RANGED: {
+ case HIR::TuplePatternItems::ItemType::HAS_REST:
+ {
size_t tuple_idx = 0;
auto &items
- = static_cast<HIR::TuplePatternItemsRanged &> (pattern.get_items ());
+ = static_cast<HIR::TuplePatternItemsHasRest &> (pattern.get_items ());
auto &items_lower = items.get_lower_patterns ();
auto &items_upper = items.get_upper_patterns ();
@@ -799,10 +1421,11 @@ CompilePatternLet::visit (HIR::TuplePattern &pattern)
return;
}
- case HIR::TuplePatternItems::ItemType::MULTIPLE: {
+ case HIR::TuplePatternItems::ItemType::NO_REST:
+ {
size_t tuple_idx = 0;
- auto &items = static_cast<HIR::TuplePatternItemsMultiple &> (
- pattern.get_items ());
+ auto &items
+ = static_cast<HIR::TuplePatternItemsNoRest &> (pattern.get_items ());
for (auto &sub : items.get_patterns ())
{
@@ -821,7 +1444,8 @@ CompilePatternLet::visit (HIR::TuplePattern &pattern)
return;
}
- default: {
+ default:
+ {
rust_unreachable ();
}
}
diff --git a/gcc/rust/backend/rust-compile-pattern.h b/gcc/rust/backend/rust-compile-pattern.h
index c7a62fc..233799e 100644
--- a/gcc/rust/backend/rust-compile-pattern.h
+++ b/gcc/rust/backend/rust-compile-pattern.h
@@ -17,7 +17,9 @@
// <http://www.gnu.org/licenses/>.
#include "rust-compile-base.h"
+#include "rust-hir-pattern.h"
#include "rust-hir-visitor.h"
+#include "rust-tyty.h"
namespace Rust {
namespace Compile {
@@ -43,12 +45,10 @@ public:
void visit (HIR::StructPattern &) override;
void visit (HIR::TupleStructPattern &) override;
void visit (HIR::TuplePattern &) override;
+ void visit (HIR::IdentifierPattern &) override;
+ void visit (HIR::SlicePattern &) override;
// Always succeeds
- void visit (HIR::IdentifierPattern &) override
- {
- check_expr = boolean_true_node;
- }
void visit (HIR::WildcardPattern &) override
{
check_expr = boolean_true_node;
@@ -56,7 +56,6 @@ public:
// Empty visit for unused Pattern HIR nodes.
void visit (HIR::QualifiedPathInExpression &) override {}
- void visit (HIR::SlicePattern &) override {}
CompilePatternCheckExpr (Context *ctx, tree match_scrutinee_expr)
: HIRCompileBase (ctx), match_scrutinee_expr (match_scrutinee_expr),
@@ -78,11 +77,25 @@ public:
pattern.accept_vis (compiler);
}
+ tree make_struct_access (TyTy::ADTType *adt, TyTy::VariantDef *variant,
+ const Identifier &ident, int variant_index);
+
+ void handle_struct_pattern_ident (HIR::StructPatternField &pat,
+ TyTy::ADTType *adt,
+ TyTy::VariantDef *variant,
+ int variant_index);
+ void handle_struct_pattern_ident_pat (HIR::StructPatternField &pat,
+ TyTy::ADTType *adt,
+ TyTy::VariantDef *variant,
+ int variant_index);
+ void handle_struct_pattern_tuple_pat (HIR::StructPatternField &pat);
+
void visit (HIR::StructPattern &pattern) override;
void visit (HIR::TupleStructPattern &pattern) override;
void visit (HIR::ReferencePattern &pattern) override;
void visit (HIR::IdentifierPattern &) override;
void visit (HIR::TuplePattern &pattern) override;
+ void visit (HIR::SlicePattern &) override;
// Empty visit for unused Pattern HIR nodes.
void visit (HIR::AltPattern &) override {}
@@ -90,7 +103,6 @@ public:
void visit (HIR::PathInExpression &) override {}
void visit (HIR::QualifiedPathInExpression &) override {}
void visit (HIR::RangePattern &) override {}
- void visit (HIR::SlicePattern &) override {}
void visit (HIR::WildcardPattern &) override {}
protected:
diff --git a/gcc/rust/backend/rust-compile-resolve-path.cc b/gcc/rust/backend/rust-compile-resolve-path.cc
index 2b6880c..5f30662 100644
--- a/gcc/rust/backend/rust-compile-resolve-path.cc
+++ b/gcc/rust/backend/rust-compile-resolve-path.cc
@@ -97,15 +97,20 @@ ResolvePathRef::attempt_constructor_expression_lookup (
// this can only be for discriminant variants the others are built up
// using call-expr or struct-init
- rust_assert (variant->get_variant_type ()
- == TyTy::VariantDef::VariantType::NUM);
+ if (variant->get_variant_type () != TyTy::VariantDef::VariantType::NUM)
+ {
+ rust_error_at (expr_locus, "variant expected constructor call");
+ return error_mark_node;
+ }
// we need the actual gcc type
tree compiled_adt_type = TyTyResolveCompile::compile (ctx, adt);
// make the ctor for the union
HIR::Expr &discrim_expr = variant->get_discriminant ();
+ ctx->push_const_context ();
tree discrim_expr_node = CompileExpr::Compile (discrim_expr, ctx);
+ ctx->pop_const_context ();
tree folded_discrim_expr = fold_expr (discrim_expr_node);
tree qualifier = folded_discrim_expr;
@@ -127,10 +132,8 @@ ResolvePathRef::resolve_with_node_id (
tl::optional<HirId> hid
= ctx->get_mappings ().lookup_node_to_hir (resolved_node_id);
if (!hid.has_value ())
- {
- rust_error_at (expr_locus, "reverse call path lookup failure");
- return error_mark_node;
- }
+ return error_mark_node;
+
auto ref = hid.value ();
// might be a constant
@@ -184,18 +187,32 @@ ResolvePathRef::resolve_with_node_id (
}
}
+ // possibly a const expr value
+ if (lookup->get_kind () == TyTy::TypeKind::CONST)
+ {
+ auto d = lookup->destructure ();
+ rust_assert (d->get_kind () == TyTy::TypeKind::CONST);
+ auto c = d->as_const_type ();
+ rust_assert (c->const_kind () == TyTy::BaseConstType::ConstKind::Value);
+ auto val = static_cast<TyTy::ConstValueType *> (c);
+ return val->get_value ();
+ }
+
// Handle unit struct
+ tree resolved_item = error_mark_node;
if (lookup->get_kind () == TyTy::TypeKind::ADT)
- return attempt_constructor_expression_lookup (lookup, ctx, mappings,
- expr_locus);
+ resolved_item
+ = attempt_constructor_expression_lookup (lookup, ctx, mappings,
+ expr_locus);
+
+ if (!error_operand_p (resolved_item))
+ return resolved_item;
// let the query system figure it out
- tree resolved_item = query_compile (ref, lookup, final_segment, mappings,
- expr_locus, is_qualified_path);
+ resolved_item = query_compile (ref, lookup, final_segment, mappings,
+ expr_locus, is_qualified_path);
if (resolved_item != error_mark_node)
- {
- TREE_USED (resolved_item) = 1;
- }
+ TREE_USED (resolved_item) = 1;
return resolved_item;
}
@@ -207,36 +224,24 @@ ResolvePathRef::resolve (const HIR::PathIdentSegment &final_segment,
{
TyTy::BaseType *lookup = nullptr;
bool ok = ctx->get_tyctx ()->lookup_type (mappings.get_hirid (), &lookup);
- rust_assert (ok);
+ if (!ok)
+ return error_mark_node;
// need to look up the reference for this identifier
// this can fail because it might be a Constructor for something
// in that case the caller should attempt ResolvePathType::Compile
- NodeId ref_node_id = UNKNOWN_NODEID;
- if (flag_name_resolution_2_0)
- {
- auto &nr_ctx
- = Resolver2_0::ImmutableNameResolutionContext::get ().resolver ();
-
- auto resolved = nr_ctx.lookup (mappings.get_nodeid ());
+ auto &nr_ctx
+ = Resolver2_0::ImmutableNameResolutionContext::get ().resolver ();
- if (!resolved)
- return attempt_constructor_expression_lookup (lookup, ctx, mappings,
- expr_locus);
+ auto resolved = nr_ctx.lookup (mappings.get_nodeid ());
- ref_node_id = *resolved;
- }
- else
- {
- if (!ctx->get_resolver ()->lookup_resolved_name (mappings.get_nodeid (),
- &ref_node_id))
- return attempt_constructor_expression_lookup (lookup, ctx, mappings,
- expr_locus);
- }
+ if (!resolved)
+ return attempt_constructor_expression_lookup (lookup, ctx, mappings,
+ expr_locus);
return resolve_with_node_id (final_segment, mappings, expr_locus,
- is_qualified_path, ref_node_id);
+ is_qualified_path, *resolved);
}
tree
@@ -259,10 +264,10 @@ HIRCompileBase::query_compile (HirId ref, TyTy::BaseType *lookup,
HIR::ExternalItem *resolved_extern_item = hir_extern_item->first;
if (!lookup->has_substitutions_defined ())
return CompileExternItem::compile (resolved_extern_item, ctx, nullptr,
- true, expr_locus);
+ expr_locus);
else
return CompileExternItem::compile (resolved_extern_item, ctx, lookup,
- true, expr_locus);
+ expr_locus);
}
else
{
@@ -284,10 +289,10 @@ HIRCompileBase::query_compile (HirId ref, TyTy::BaseType *lookup,
{
if (!lookup->has_substitutions_defined ())
return CompileInherentImplItem::Compile (resolved_item->first, ctx,
- nullptr, true, expr_locus);
+ nullptr, expr_locus);
else
return CompileInherentImplItem::Compile (resolved_item->first, ctx,
- lookup, true, expr_locus);
+ lookup, expr_locus);
}
else if (auto trait_item
= ctx->get_mappings ().lookup_hir_trait_item (ref))
@@ -301,6 +306,27 @@ HIRCompileBase::query_compile (HirId ref, TyTy::BaseType *lookup,
trait->get_mappings ().get_defid (), &trait_ref);
rust_assert (ok);
+ if (trait_item.value ()->get_item_kind ()
+ == HIR::TraitItem::TraitItemKind::CONST)
+ {
+ auto &c
+ = *static_cast<HIR::TraitItemConst *> (trait_item.value ());
+ if (!c.has_expr ())
+ {
+ rich_location r (line_table, expr_locus);
+ r.add_range (trait->get_locus ());
+ r.add_range (c.get_locus ());
+ rust_error_at (r, "no default expression on trait constant");
+ return error_mark_node;
+ }
+
+ return CompileExpr::Compile (c.get_expr (), ctx);
+ }
+
+ if (trait_item.value ()->get_item_kind ()
+ != HIR::TraitItem::TraitItemKind::FUNC)
+ return error_mark_node;
+
// the type resolver can only resolve type bounds to their trait
// item so its up to us to figure out if this path should resolve
// to an trait-impl-block-item or if it can be defaulted to the
@@ -313,11 +339,18 @@ HIRCompileBase::query_compile (HirId ref, TyTy::BaseType *lookup,
rust_assert (lookup->is<TyTy::FnType> ());
auto fn = lookup->as<TyTy::FnType> ();
rust_assert (fn->get_num_type_params () > 0);
- auto &self = fn->get_substs ().at (0);
- auto receiver = self.get_param_ty ();
+ TyTy::SubstitutionParamMapping &self = fn->get_substs ().at (0);
+ TyTy::BaseGeneric *receiver = self.get_param_ty ();
+ TyTy::BaseType *r = receiver;
+ if (!receiver->can_resolve ())
+ {
+ bool ok
+ = ctx->get_tyctx ()->lookup_type (receiver->get_ref (), &r);
+ rust_assert (ok);
+ }
+
auto candidates
- = Resolver::PathProbeImplTrait::Probe (receiver, final_segment,
- trait_ref);
+ = Resolver::PathProbeImplTrait::Probe (r, final_segment, trait_ref);
if (candidates.size () == 0)
{
// this means we are defaulting back to the trait_item if
@@ -349,11 +382,10 @@ HIRCompileBase::query_compile (HirId ref, TyTy::BaseType *lookup,
if (!lookup->has_substitutions_defined ())
return CompileInherentImplItem::Compile (impl_item, ctx,
- nullptr, true,
- expr_locus);
+ nullptr, expr_locus);
else
return CompileInherentImplItem::Compile (impl_item, ctx, lookup,
- true, expr_locus);
+ expr_locus);
}
}
}
diff --git a/gcc/rust/backend/rust-compile-stmt.cc b/gcc/rust/backend/rust-compile-stmt.cc
index a4b5a98..b520baf 100644
--- a/gcc/rust/backend/rust-compile-stmt.cc
+++ b/gcc/rust/backend/rust-compile-stmt.cc
@@ -58,6 +58,9 @@ CompileStmt::visit (HIR::LetStmt &stmt)
return;
}
+ rust_debug_loc (stmt.get_locus (), " -> LetStmt %s",
+ ty->as_string ().c_str ());
+
// setup var decl nodes
fncontext fnctx = ctx->peek_fn ();
tree fndecl = fnctx.fndecl;
diff --git a/gcc/rust/backend/rust-compile-type.cc b/gcc/rust/backend/rust-compile-type.cc
index d8af1d1..5b00afa 100644
--- a/gcc/rust/backend/rust-compile-type.cc
+++ b/gcc/rust/backend/rust-compile-type.cc
@@ -17,11 +17,11 @@
// <http://www.gnu.org/licenses/>.
#include "rust-compile-type.h"
-#include "rust-compile-expr.h"
#include "rust-constexpr.h"
-#include "rust-gcc.h"
+#include "rust-compile-base.h"
#include "tree.h"
+#include "fold-const.h"
#include "stor-layout.h"
namespace Rust {
@@ -81,13 +81,22 @@ TyTyResolveCompile::get_implicit_enumeral_node_type (TyTy::BaseType *repr)
}
tree
-TyTyResolveCompile::get_unit_type ()
+TyTyResolveCompile::get_unit_type (Context *ctx)
{
static tree unit_type;
if (unit_type == nullptr)
{
+ auto cn = ctx->get_mappings ().get_current_crate ();
+ auto &c = ctx->get_mappings ().get_ast_crate (cn);
+ location_t locus = BUILTINS_LOCATION;
+ if (c.items.size () > 0)
+ {
+ auto &item = c.items[0];
+ locus = item->get_locus ();
+ }
+
auto unit_type_node = Backend::struct_type ({});
- unit_type = Backend::named_type ("()", unit_type_node, BUILTINS_LOCATION);
+ unit_type = Backend::named_type ("()", unit_type_node, locus);
}
return unit_type;
}
@@ -112,6 +121,13 @@ TyTyResolveCompile::visit (const TyTy::InferType &type)
if (orig == lookup)
{
+ TyTy::BaseType *def = nullptr;
+ if (type.default_type (&def))
+ {
+ translated = TyTyResolveCompile::compile (ctx, def);
+ return;
+ }
+
translated = error_mark_node;
return;
}
@@ -120,7 +136,31 @@ TyTyResolveCompile::visit (const TyTy::InferType &type)
}
void
-TyTyResolveCompile::visit (const TyTy::ParamType &)
+TyTyResolveCompile::visit (const TyTy::ParamType &type)
+{
+ translated = error_mark_node;
+}
+
+void
+TyTyResolveCompile::visit (const TyTy::ConstParamType &type)
+{
+ translated = error_mark_node;
+}
+
+void
+TyTyResolveCompile::visit (const TyTy::ConstValueType &type)
+{
+ translated = error_mark_node;
+}
+
+void
+TyTyResolveCompile::visit (const TyTy::ConstInferType &type)
+{
+ translated = error_mark_node;
+}
+
+void
+TyTyResolveCompile::visit (const TyTy::ConstErrorType &type)
{
translated = error_mark_node;
}
@@ -164,8 +204,8 @@ TyTyResolveCompile::visit (const TyTy::ClosureType &type)
// this should be based on the closure move-ability
tree decl_type = TyTyResolveCompile::compile (ctx, lookup);
tree capture_type = build_reference_type (decl_type);
- fields.push_back (Backend::typed_identifier (mappings_name, capture_type,
- type.get_ident ().locus));
+ fields.emplace_back (mappings_name, capture_type,
+ type.get_ident ().locus);
}
tree type_record = Backend::struct_type (fields);
@@ -180,7 +220,7 @@ TyTyResolveCompile::visit (const TyTy::ClosureType &type)
void
TyTyResolveCompile::visit (const TyTy::FnType &type)
{
- Backend::typed_identifier receiver;
+ Backend::typed_identifier receiver ("", NULL_TREE, UNKNOWN_LOCATION);
std::vector<Backend::typed_identifier> parameters;
std::vector<Backend::typed_identifier> results;
@@ -195,8 +235,7 @@ TyTyResolveCompile::visit (const TyTy::FnType &type)
auto ret = TyTyResolveCompile::compile (ctx, hir_type, trait_object_mode);
location_t return_type_locus
= ctx->get_mappings ().lookup_location (hir_type->get_ref ());
- results.push_back (
- Backend::typed_identifier ("_", ret, return_type_locus));
+ results.emplace_back ("_", ret, return_type_locus);
}
for (auto &param_pair : type.get_params ())
@@ -205,11 +244,10 @@ TyTyResolveCompile::visit (const TyTy::FnType &type)
auto compiled_param_type
= TyTyResolveCompile::compile (ctx, param_tyty, trait_object_mode);
- auto compiled_param = Backend::typed_identifier (
- param_pair.get_pattern ().as_string (), compiled_param_type,
- ctx->get_mappings ().lookup_location (param_tyty->get_ref ()));
-
- parameters.push_back (compiled_param);
+ parameters.emplace_back (param_pair.get_pattern ().as_string (),
+ compiled_param_type,
+ ctx->get_mappings ().lookup_location (
+ param_tyty->get_ref ()));
}
if (!type.is_variadic ())
@@ -255,10 +293,9 @@ TyTyResolveCompile::visit (const TyTy::ADTType &type)
tree compiled_field_ty
= TyTyResolveCompile::compile (ctx, field->get_field_type ());
- Backend::typed_identifier f (field->get_name (), compiled_field_ty,
- ctx->get_mappings ().lookup_location (
- type.get_ty_ref ()));
- fields.push_back (std::move (f));
+ fields.emplace_back (field->get_name (), compiled_field_ty,
+ ctx->get_mappings ().lookup_location (
+ type.get_ty_ref ()));
}
type_record = type.is_union () ? Backend::union_type (fields, false)
@@ -335,10 +372,9 @@ TyTyResolveCompile::visit (const TyTy::ADTType &type)
== TyTy::VariantDef::VariantType::TUPLE)
field_name = "__" + field->get_name ();
- Backend::typed_identifier f (
- field_name, compiled_field_ty,
- ctx->get_mappings ().lookup_location (type.get_ty_ref ()));
- fields.push_back (std::move (f));
+ fields.emplace_back (field_name, compiled_field_ty,
+ ctx->get_mappings ().lookup_location (
+ type.get_ty_ref ()));
}
tree variant_record = Backend::struct_type (fields);
@@ -360,10 +396,9 @@ TyTyResolveCompile::visit (const TyTy::ADTType &type)
TyTy::VariantDef *variant = type.get_variants ().at (i++);
std::string implicit_variant_name = variant->get_identifier ();
- Backend::typed_identifier f (implicit_variant_name, variant_record,
- ctx->get_mappings ().lookup_location (
- type.get_ty_ref ()));
- enum_fields.push_back (std::move (f));
+ enum_fields.emplace_back (implicit_variant_name, variant_record,
+ ctx->get_mappings ().lookup_location (
+ type.get_ty_ref ()));
}
//
@@ -421,7 +456,7 @@ TyTyResolveCompile::visit (const TyTy::TupleType &type)
{
if (type.num_fields () == 0)
{
- translated = get_unit_type ();
+ translated = get_unit_type (ctx);
return;
}
@@ -438,14 +473,13 @@ TyTyResolveCompile::visit (const TyTy::TupleType &type)
// this, rather than simply emitting the integer, is that this
// approach makes it simpler to use a C-only debugger, or
// GDB's C mode, when debugging Rust.
- Backend::typed_identifier f ("__" + std::to_string (i), compiled_field_ty,
- ctx->get_mappings ().lookup_location (
- type.get_ty_ref ()));
- fields.push_back (std::move (f));
+ fields.emplace_back ("__" + std::to_string (i), compiled_field_ty,
+ ctx->get_mappings ().lookup_location (
+ type.get_ty_ref ()));
}
tree struct_type_record = Backend::struct_type (fields);
- translated = Backend::named_type (type.as_string (), struct_type_record,
+ translated = Backend::named_type (type.get_name (), struct_type_record,
type.get_ident ().locus);
}
@@ -454,14 +488,29 @@ TyTyResolveCompile::visit (const TyTy::ArrayType &type)
{
tree element_type
= TyTyResolveCompile::compile (ctx, type.get_element_type ());
+ auto const_capacity = type.get_capacity ();
+
+ // Check if capacity is a const type
+ if (const_capacity->get_kind () != TyTy::TypeKind::CONST)
+ {
+ rust_error_at (type.get_locus (), "array capacity is not a const type");
+ translated = error_mark_node;
+ return;
+ }
+
+ auto *capacity_const = const_capacity->as_const_type ();
- ctx->push_const_context ();
- tree capacity_expr = CompileExpr::Compile (type.get_capacity_expr (), ctx);
- ctx->pop_const_context ();
+ rust_assert (capacity_const->const_kind ()
+ == TyTy::BaseConstType::ConstKind::Value);
+ auto &capacity_value = *static_cast<TyTy::ConstValueType *> (capacity_const);
+ auto folded_capacity_expr = capacity_value.get_value ();
- tree folded_capacity_expr = fold_expr (capacity_expr);
+ // build_index_type takes the maximum index, which is one less than
+ // the length.
+ tree index_type_tree = build_index_type (
+ fold_build2 (MINUS_EXPR, sizetype, folded_capacity_expr, size_one_node));
- translated = Backend::array_type (element_type, folded_capacity_expr);
+ translated = build_array_type (element_type, index_type_tree, false);
}
void
@@ -714,7 +763,7 @@ TyTyResolveCompile::visit (const TyTy::StrType &type)
void
TyTyResolveCompile::visit (const TyTy::NeverType &)
{
- translated = get_unit_type ();
+ translated = get_unit_type (ctx);
}
void
@@ -734,7 +783,9 @@ TyTyResolveCompile::visit (const TyTy::DynamicObjectType &type)
void
TyTyResolveCompile::visit (const TyTy::OpaqueType &type)
{
- translated = error_mark_node;
+ rust_assert (type.can_resolve ());
+ auto underlying = type.resolve ();
+ translated = TyTyResolveCompile::compile (ctx, underlying, trait_object_mode);
}
tree
@@ -747,17 +798,15 @@ TyTyResolveCompile::create_dyn_obj_record (const TyTy::DynamicObjectType &type)
tree uint = Backend::integer_type (true, Backend::get_pointer_size ());
tree uintptr_ty = build_pointer_type (uint);
- Backend::typed_identifier f ("pointer", uintptr_ty,
- ctx->get_mappings ().lookup_location (
- type.get_ty_ref ()));
- fields.push_back (std::move (f));
+ fields.emplace_back ("pointer", uintptr_ty,
+ ctx->get_mappings ().lookup_location (
+ type.get_ty_ref ()));
tree vtable_size = build_int_cst (size_type_node, items.size ());
tree vtable_type = Backend::array_type (uintptr_ty, vtable_size);
- Backend::typed_identifier vtf ("vtable", vtable_type,
- ctx->get_mappings ().lookup_location (
- type.get_ty_ref ()));
- fields.push_back (std::move (vtf));
+ fields.emplace_back ("vtable", vtable_type,
+ ctx->get_mappings ().lookup_location (
+ type.get_ty_ref ()));
tree record = Backend::struct_type (fields);
RS_DST_FLAG (record) = 1;
diff --git a/gcc/rust/backend/rust-compile-type.h b/gcc/rust/backend/rust-compile-type.h
index bc611cf..d6c3259 100644
--- a/gcc/rust/backend/rust-compile-type.h
+++ b/gcc/rust/backend/rust-compile-type.h
@@ -30,7 +30,7 @@ public:
static tree compile (Context *ctx, const TyTy::BaseType *ty,
bool trait_object_mode = false);
- static tree get_unit_type ();
+ static tree get_unit_type (Context *ctx);
void visit (const TyTy::InferType &) override;
void visit (const TyTy::ADTType &) override;
@@ -50,6 +50,10 @@ public:
void visit (const TyTy::ReferenceType &) override;
void visit (const TyTy::PointerType &) override;
void visit (const TyTy::ParamType &) override;
+ void visit (const TyTy::ConstParamType &) override;
+ void visit (const TyTy::ConstValueType &) override;
+ void visit (const TyTy::ConstInferType &) override;
+ void visit (const TyTy::ConstErrorType &) override;
void visit (const TyTy::StrType &) override;
void visit (const TyTy::NeverType &) override;
void visit (const TyTy::PlaceholderType &) override;
diff --git a/gcc/rust/backend/rust-compile-var-decl.h b/gcc/rust/backend/rust-compile-var-decl.h
index 4c46a7b..1f306ad 100644
--- a/gcc/rust/backend/rust-compile-var-decl.h
+++ b/gcc/rust/backend/rust-compile-var-decl.h
@@ -64,30 +64,79 @@ public:
ctx->insert_var_decl (stmt_id, var);
vars.push_back (var);
+
+ if (pattern.has_subpattern ())
+ {
+ auto subpattern_vars
+ = CompileVarDecl::compile (fndecl, translated_type,
+ &pattern.get_subpattern (), ctx);
+ vars.insert (vars.end (), subpattern_vars.begin (),
+ subpattern_vars.end ());
+ }
}
void visit (HIR::TuplePattern &pattern) override
{
+ rust_assert (TREE_CODE (translated_type) == RECORD_TYPE);
switch (pattern.get_items ().get_item_type ())
{
- case HIR::TuplePatternItems::ItemType::MULTIPLE: {
- rust_assert (TREE_CODE (translated_type) == RECORD_TYPE);
- auto &items = static_cast<HIR::TuplePatternItemsMultiple &> (
+ case HIR::TuplePatternItems::ItemType::NO_REST:
+ {
+ auto &items_no_rest = static_cast<HIR::TuplePatternItemsNoRest &> (
+ pattern.get_items ());
+
+ tree field = TYPE_FIELDS (translated_type);
+ for (auto &sub : items_no_rest.get_patterns ())
+ {
+ gcc_assert (field != NULL_TREE);
+ tree sub_ty = TREE_TYPE (field);
+ CompileVarDecl::compile (fndecl, sub_ty, sub.get (), ctx);
+ field = DECL_CHAIN (field);
+ }
+ }
+ break;
+
+ case HIR::TuplePatternItems::ItemType::HAS_REST:
+ {
+ auto &items_has_rest = static_cast<HIR::TuplePatternItemsHasRest &> (
pattern.get_items ());
- size_t offs = 0;
- for (auto &sub : items.get_patterns ())
+ // count total fields in translated_type
+ size_t total_fields = 0;
+ for (tree t = TYPE_FIELDS (translated_type); t; t = DECL_CHAIN (t))
+ {
+ total_fields++;
+ }
+
+ // process lower patterns
+ tree field = TYPE_FIELDS (translated_type);
+ for (auto &sub : items_has_rest.get_lower_patterns ())
+ {
+ gcc_assert (field != NULL_TREE);
+ tree sub_ty = TREE_TYPE (field);
+ CompileVarDecl::compile (fndecl, sub_ty, sub.get (), ctx);
+ field = DECL_CHAIN (field);
+ }
+
+ // process upper patterns
+ if (!items_has_rest.get_upper_patterns ().empty ())
{
- tree sub_ty = error_mark_node;
- tree field = TYPE_FIELDS (translated_type);
- for (size_t i = 0; i < offs; i++)
+ size_t upper_start
+ = total_fields - items_has_rest.get_upper_patterns ().size ();
+ field = TYPE_FIELDS (translated_type);
+ for (size_t i = 0; i < upper_start; i++)
{
field = DECL_CHAIN (field);
gcc_assert (field != NULL_TREE);
}
- sub_ty = TREE_TYPE (field);
- CompileVarDecl::compile (fndecl, sub_ty, sub.get (), ctx);
- offs++;
+
+ for (auto &sub : items_has_rest.get_upper_patterns ())
+ {
+ gcc_assert (field != NULL_TREE);
+ tree sub_ty = TREE_TYPE (field);
+ CompileVarDecl::compile (fndecl, sub_ty, sub.get (), ctx);
+ field = DECL_CHAIN (field);
+ }
}
}
break;
diff --git a/gcc/rust/backend/rust-compile.cc b/gcc/rust/backend/rust-compile.cc
index 7b00066..40f16e4 100644
--- a/gcc/rust/backend/rust-compile.cc
+++ b/gcc/rust/backend/rust-compile.cc
@@ -183,8 +183,7 @@ HIRCompileBase::coercion_site1 (tree rvalue, TyTy::BaseType *rval,
}
tree
-HIRCompileBase::coerce_to_dyn_object (tree compiled_ref,
- const TyTy::BaseType *actual,
+HIRCompileBase::coerce_to_dyn_object (tree compiled_ref, TyTy::BaseType *actual,
const TyTy::DynamicObjectType *ty,
location_t locus)
{
@@ -201,9 +200,7 @@ HIRCompileBase::coerce_to_dyn_object (tree compiled_ref,
// __trait_object_ptr
// [list of function ptrs]
- std::vector<std::pair<Resolver::TraitReference *, HIR::ImplBlock *>>
- probed_bounds_for_receiver = Resolver::TypeBoundsProbe::Probe (actual);
-
+ auto probed_bounds_for_receiver = Resolver::TypeBoundsProbe::Probe (actual);
tree address_of_compiled_ref = null_pointer_node;
if (!actual->is_unit ())
address_of_compiled_ref = address_expression (compiled_ref, locus);
@@ -241,12 +238,13 @@ HIRCompileBase::compute_address_for_trait_item (
&receiver_bounds,
const TyTy::BaseType *receiver, const TyTy::BaseType *root, location_t locus)
{
- TyTy::TypeBoundPredicateItem predicate_item
+ tl::optional<TyTy::TypeBoundPredicateItem> predicate_item
= predicate->lookup_associated_item (ref->get_identifier ());
- rust_assert (!predicate_item.is_error ());
+ rust_assert (predicate_item.has_value ());
// This is the expected end type
- TyTy::BaseType *trait_item_type = predicate_item.get_tyty_for_receiver (root);
+ TyTy::BaseType *trait_item_type
+ = predicate_item->get_tyty_for_receiver (root);
rust_assert (trait_item_type->get_kind () == TyTy::TypeKind::FNDEF);
TyTy::FnType *trait_item_fntype
= static_cast<TyTy::FnType *> (trait_item_type);
@@ -338,7 +336,7 @@ HIRCompileBase::compute_address_for_trait_item (
}
return CompileInherentImplItem::Compile (associated_function, ctx,
- lookup_fntype, true, locus);
+ lookup_fntype, locus);
}
// we can only compile trait-items with a body
diff --git a/gcc/rust/backend/rust-constexpr.cc b/gcc/rust/backend/rust-constexpr.cc
index 2f2bbbd..e32ba3a 100644
--- a/gcc/rust/backend/rust-constexpr.cc
+++ b/gcc/rust/backend/rust-constexpr.cc
@@ -68,32 +68,24 @@ literal_type_p (tree t)
return false;
}
-static bool
-verify_constant (tree, bool, bool *, bool *);
-
-static HOST_WIDE_INT
-find_array_ctor_elt (tree ary, tree dindex, bool insert = false);
-static int
-array_index_cmp (tree key, tree index);
-static bool
-potential_constant_expression_1 (tree t, bool want_rval, bool strict, bool now,
- tsubst_flags_t flags, tree *jump_target);
-bool
-potential_constant_expression_1 (tree t, bool want_rval, bool strict, bool now,
- tsubst_flags_t flags);
-tree
-unshare_constructor (tree t MEM_STAT_DECL);
-void
-maybe_save_constexpr_fundef (tree fun);
-
-static bool
-returns (tree *jump_target);
-static bool
-breaks (tree *jump_target);
-static bool
-continues (tree *jump_target);
-static bool
-switches (tree *jump_target);
+static bool verify_constant (tree, bool, bool *, bool *);
+
+static HOST_WIDE_INT find_array_ctor_elt (tree ary, tree dindex,
+ bool insert = false);
+static int array_index_cmp (tree key, tree index);
+static bool potential_constant_expression_1 (tree t, bool want_rval,
+ bool strict, bool now,
+ tsubst_flags_t flags,
+ tree *jump_target);
+bool potential_constant_expression_1 (tree t, bool want_rval, bool strict,
+ bool now, tsubst_flags_t flags);
+tree unshare_constructor (tree t MEM_STAT_DECL);
+void maybe_save_constexpr_fundef (tree fun);
+
+static bool returns (tree *jump_target);
+static bool breaks (tree *jump_target);
+static bool continues (tree *jump_target);
+static bool switches (tree *jump_target);
struct constexpr_global_ctx
{
@@ -109,12 +101,54 @@ struct constexpr_global_ctx
auto_vec<tree, 16> heap_vars;
/* Cleanups that need to be evaluated at the end of CLEANUP_POINT_EXPR. */
vec<tree> *cleanups;
+ /* If non-null, only allow modification of existing values of the variables
+ in this set. Set by modifiable_tracker, below. */
+ hash_set<tree> *modifiable;
/* Number of heap VAR_DECL deallocations. */
unsigned heap_dealloc_count;
/* Constructor. */
constexpr_global_ctx ()
: constexpr_ops_count (0), cleanups (NULL), heap_dealloc_count (0)
{}
+
+ tree get_value (tree t)
+ {
+ if (tree *p = values.get (t))
+ if (*p != void_node)
+ return *p;
+ return NULL_TREE;
+ }
+ tree *get_value_ptr (tree t, bool initializing)
+ {
+ if (modifiable && !modifiable->contains (t))
+ return nullptr;
+ if (tree *p = values.get (t))
+ {
+ if (*p != void_node)
+ return p;
+ else if (initializing)
+ {
+ *p = NULL_TREE;
+ return p;
+ }
+ }
+ return nullptr;
+ }
+ void put_value (tree t, tree v)
+ {
+ bool already_in_map = values.put (t, v);
+ if (!already_in_map && modifiable)
+ modifiable->add (t);
+ }
+ void destroy_value (tree t)
+ {
+ if (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL
+ || TREE_CODE (t) == RESULT_DECL)
+ values.put (t, void_node);
+ else
+ values.remove (t);
+ }
+ void clear_value (tree t) { values.remove (t); }
};
/* In constexpr.cc */
@@ -463,60 +497,62 @@ save_fundef_copy (tree fun, tree copy)
*slot = copy;
}
-static tree
-constant_value_1 (tree decl, bool strict_p, bool return_aggregate_cst_ok_p,
- bool unshare_p);
-tree
-decl_constant_value (tree decl, bool unshare_p);
+static tree constant_value_1 (tree decl, bool strict_p,
+ bool return_aggregate_cst_ok_p, bool unshare_p);
+static tree decl_really_constant_value (tree decl, bool unshare_p /*= true*/);
+tree decl_constant_value (tree decl, bool unshare_p);
-static void
-non_const_var_error (location_t loc, tree r);
+static void non_const_var_error (location_t loc, tree r);
-static tree
-eval_constant_expression (const constexpr_ctx *ctx, tree, bool, bool *, bool *,
- tree * = NULL);
+static tree eval_constant_expression (const constexpr_ctx *ctx, tree, bool,
+ bool *, bool *, tree *jump_target);
-static tree
-constexpr_fn_retval (const constexpr_ctx *ctx, tree r);
+static tree constexpr_fn_retval (const constexpr_ctx *ctx, tree r);
-static tree
-eval_store_expression (const constexpr_ctx *ctx, tree r, bool, bool *, bool *);
+static tree eval_store_expression (const constexpr_ctx *ctx, tree r, bool,
+ bool *, bool *, tree *jump_target);
-static tree
-eval_call_expression (const constexpr_ctx *ctx, tree r, bool, bool *, bool *);
+static tree eval_call_expression (const constexpr_ctx *ctx, tree r, bool,
+ bool *, bool *, tree *);
-static tree
-eval_binary_expression (const constexpr_ctx *ctx, tree r, bool, bool *, bool *);
+static tree eval_binary_expression (const constexpr_ctx *ctx, tree r, bool,
+ bool *, bool *, tree *jump_target);
-static tree
-get_function_named_in_call (tree t);
+static tree get_function_named_in_call (tree t);
-static tree
-eval_statement_list (const constexpr_ctx *ctx, tree t, bool *non_constant_p,
- bool *overflow_p, tree *jump_target);
-static tree
-extract_string_elt (tree string, unsigned chars_per_elt, unsigned index);
+static tree eval_statement_list (const constexpr_ctx *ctx, tree t,
+ bool *non_constant_p, bool *overflow_p,
+ tree *jump_target);
+static tree extract_string_elt (tree string, unsigned chars_per_elt,
+ unsigned index);
-static tree
-eval_conditional_expression (const constexpr_ctx *ctx, tree t, bool lval,
- bool *non_constant_p, bool *overflow_p,
- tree *jump_target);
-
-static tree
-eval_bit_field_ref (const constexpr_ctx *ctx, tree t, bool lval,
- bool *non_constant_p, bool *overflow_p);
+static tree eval_conditional_expression (const constexpr_ctx *ctx, tree t,
+ bool lval, bool *non_constant_p,
+ bool *overflow_p, tree *jump_target);
-static tree
-eval_loop_expr (const constexpr_ctx *ctx, tree t, bool *non_constant_p,
- bool *overflow_p, tree *jump_target);
+static tree eval_bit_field_ref (const constexpr_ctx *ctx, tree t, bool lval,
+ bool *non_constant_p, bool *overflow_p,
+ tree *jump_target);
-static tree
-eval_switch_expr (const constexpr_ctx *ctx, tree t, bool *non_constant_p,
- bool *overflow_p, tree *jump_target);
-
-static tree
-eval_unary_expression (const constexpr_ctx *ctx, tree t, bool /*lval*/,
- bool *non_constant_p, bool *overflow_p);
+static tree eval_loop_expr (const constexpr_ctx *ctx, tree t,
+ bool *non_constant_p, bool *overflow_p,
+ tree *jump_target);
+
+static tree eval_switch_expr (const constexpr_ctx *ctx, tree t,
+ bool *non_constant_p, bool *overflow_p,
+ tree *jump_target);
+
+static tree eval_unary_expression (const constexpr_ctx *ctx, tree t,
+ bool /*lval*/, bool *non_constant_p,
+ bool *overflow_p, tree *jump_target);
+static bool eval_check_shift_p (location_t loc, const constexpr_ctx *ctx,
+ enum tree_code code, tree type, tree lhs,
+ tree rhs);
+static tree fold_pointer_plus_expression (const constexpr_ctx *ctx, tree t,
+ tree lhs, tree rhs,
+ bool *non_constant_p,
+ bool *overflow_p, tree *jump_target);
+static tree maybe_fold_addr_pointer_plus (tree t);
/* Variables and functions to manage constexpr call expansion context.
These do not need to be marked for PCH or GC. */
@@ -578,8 +614,9 @@ fold_expr (tree expr)
bool non_constant_p = false;
bool overflow_p = false;
+ tree jump_target = NULL_TREE;
tree folded = eval_constant_expression (&ctx, expr, false, &non_constant_p,
- &overflow_p);
+ &overflow_p, &jump_target);
rust_assert (folded != NULL_TREE);
// more logic here to possibly port
@@ -605,13 +642,13 @@ same_type_ignoring_tlq_and_bounds_p (tree type1, tree type2)
otherwise return NULL_TREE. */
static tree
-union_active_member (const constexpr_ctx *ctx, tree t)
+union_active_member (const constexpr_ctx *ctx, tree t, tree *jump_target)
{
constexpr_ctx new_ctx = *ctx;
new_ctx.quiet = true;
bool non_constant_p = false, overflow_p = false;
tree ctor = eval_constant_expression (&new_ctx, t, false, &non_constant_p,
- &overflow_p);
+ &overflow_p, jump_target);
if (TREE_CODE (ctor) == CONSTRUCTOR && CONSTRUCTOR_NELTS (ctor) == 1
&& CONSTRUCTOR_ELT (ctor, 0)->index
&& TREE_CODE (CONSTRUCTOR_ELT (ctor, 0)->index) == FIELD_DECL)
@@ -623,7 +660,8 @@ union_active_member (const constexpr_ctx *ctx, tree t)
static tree
fold_indirect_ref_1 (const constexpr_ctx *ctx, location_t loc, tree type,
- tree op, unsigned HOST_WIDE_INT off, bool *empty_base)
+ tree op, unsigned HOST_WIDE_INT off, bool *empty_base,
+ tree *jump_target)
{
tree optype = TREE_TYPE (op);
unsigned HOST_WIDE_INT const_nunits;
@@ -670,7 +708,8 @@ fold_indirect_ref_1 (const constexpr_ctx *ctx, location_t loc, tree type,
tree index = size_int (idx + tree_to_uhwi (min_val));
op = build4_loc (loc, ARRAY_REF, TREE_TYPE (optype), op, index,
NULL_TREE, NULL_TREE);
- return fold_indirect_ref_1 (ctx, loc, type, op, rem, empty_base);
+ return fold_indirect_ref_1 (ctx, loc, type, op, rem, empty_base,
+ jump_target);
}
}
/* ((foo *)&struct_with_foo_field)[x] => COMPONENT_REF */
@@ -679,7 +718,7 @@ fold_indirect_ref_1 (const constexpr_ctx *ctx, location_t loc, tree type,
{
if (TREE_CODE (optype) == UNION_TYPE)
/* For unions prefer the currently active member. */
- if (tree field = union_active_member (ctx, op))
+ if (tree field = union_active_member (ctx, op, jump_target))
{
unsigned HOST_WIDE_INT el_sz
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (field)));
@@ -688,7 +727,7 @@ fold_indirect_ref_1 (const constexpr_ctx *ctx, location_t loc, tree type,
tree cop = build3 (COMPONENT_REF, TREE_TYPE (field), op, field,
NULL_TREE);
if (tree ret = fold_indirect_ref_1 (ctx, loc, type, cop, off,
- empty_base))
+ empty_base, jump_target))
return ret;
}
}
@@ -707,8 +746,9 @@ fold_indirect_ref_1 (const constexpr_ctx *ctx, location_t loc, tree type,
{
tree cop = build3 (COMPONENT_REF, TREE_TYPE (field), op, field,
NULL_TREE);
- if (tree ret = fold_indirect_ref_1 (ctx, loc, type, cop,
- off - upos, empty_base))
+ if (tree ret
+ = fold_indirect_ref_1 (ctx, loc, type, cop, off - upos,
+ empty_base, jump_target))
return ret;
}
}
@@ -737,7 +777,7 @@ fold_indirect_ref_1 (const constexpr_ctx *ctx, location_t loc, tree type,
static tree
rs_fold_indirect_ref (const constexpr_ctx *ctx, location_t loc, tree type,
- tree op0, bool *empty_base)
+ tree op0, bool *empty_base, tree *jump_target)
{
tree sub = op0;
tree subtype;
@@ -796,7 +836,8 @@ rs_fold_indirect_ref (const constexpr_ctx *ctx, location_t loc, tree type,
tree off = integer_zero_node;
canonicalize_obj_off (op, off);
gcc_assert (integer_zerop (off));
- return fold_indirect_ref_1 (ctx, loc, type, op, 0, empty_base);
+ return fold_indirect_ref_1 (ctx, loc, type, op, 0, empty_base,
+ jump_target);
}
}
else if (TREE_CODE (sub) == POINTER_PLUS_EXPR
@@ -811,7 +852,7 @@ rs_fold_indirect_ref (const constexpr_ctx *ctx, location_t loc, tree type,
tree obj = TREE_OPERAND (op00, 0);
canonicalize_obj_off (obj, off);
return fold_indirect_ref_1 (ctx, loc, type, obj, tree_to_uhwi (off),
- empty_base);
+ empty_base, jump_target);
}
}
/* *(foo *)fooarrptr => (*fooarrptr)[0] */
@@ -820,8 +861,8 @@ rs_fold_indirect_ref (const constexpr_ctx *ctx, location_t loc, tree type,
{
tree type_domain;
tree min_val = size_zero_node;
- tree newsub
- = rs_fold_indirect_ref (ctx, loc, TREE_TYPE (subtype), sub, NULL);
+ tree newsub = rs_fold_indirect_ref (ctx, loc, TREE_TYPE (subtype), sub,
+ NULL, jump_target);
if (newsub)
sub = newsub;
else
@@ -840,7 +881,7 @@ rs_fold_indirect_ref (const constexpr_ctx *ctx, location_t loc, tree type,
static tree
rs_eval_indirect_ref (const constexpr_ctx *ctx, tree t, bool lval,
- bool *non_constant_p, bool *overflow_p)
+ bool *non_constant_p, bool *overflow_p, tree *jump_target)
{
tree orig_op0 = TREE_OPERAND (t, 0);
bool empty_base = false;
@@ -858,13 +899,13 @@ rs_eval_indirect_ref (const constexpr_ctx *ctx, tree t, bool lval,
/* First try to simplify it directly. */
tree r = rs_fold_indirect_ref (ctx, EXPR_LOCATION (t), TREE_TYPE (t),
- orig_op0, &empty_base);
+ orig_op0, &empty_base, jump_target);
if (!r)
{
/* If that didn't work, evaluate the operand first. */
- tree op0
- = eval_constant_expression (ctx, orig_op0,
- /*lval*/ false, non_constant_p, overflow_p);
+ tree op0 = eval_constant_expression (ctx, orig_op0,
+ /*lval*/ false, non_constant_p,
+ overflow_p, jump_target);
/* Don't VERIFY_CONSTANT here. */
if (*non_constant_p)
return t;
@@ -878,7 +919,7 @@ rs_eval_indirect_ref (const constexpr_ctx *ctx, tree t, bool lval,
}
r = rs_fold_indirect_ref (ctx, EXPR_LOCATION (t), TREE_TYPE (t), op0,
- &empty_base);
+ &empty_base, jump_target);
if (r == NULL_TREE)
{
/* We couldn't fold to a constant value. Make sure it's not
@@ -907,7 +948,8 @@ rs_eval_indirect_ref (const constexpr_ctx *ctx, tree t, bool lval,
}
}
- r = eval_constant_expression (ctx, r, lval, non_constant_p, overflow_p);
+ r = eval_constant_expression (ctx, r, lval, non_constant_p, overflow_p,
+ jump_target);
if (*non_constant_p)
return t;
@@ -933,17 +975,17 @@ rs_eval_indirect_ref (const constexpr_ctx *ctx, tree t, bool lval,
static tree
eval_logical_expression (const constexpr_ctx *ctx, tree t, tree bailout_value,
tree continue_value, bool lval, bool *non_constant_p,
- bool *overflow_p)
+ bool *overflow_p, tree *jump_target)
{
tree r;
tree lhs = eval_constant_expression (ctx, TREE_OPERAND (t, 0), lval,
- non_constant_p, overflow_p);
+ non_constant_p, overflow_p, jump_target);
VERIFY_CONSTANT (lhs);
if (tree_int_cst_equal (lhs, bailout_value))
return lhs;
gcc_assert (tree_int_cst_equal (lhs, continue_value));
r = eval_constant_expression (ctx, TREE_OPERAND (t, 1), lval, non_constant_p,
- overflow_p);
+ overflow_p, jump_target);
VERIFY_CONSTANT (r);
return r;
}
@@ -1235,7 +1277,8 @@ get_or_insert_ctor_field (tree ctor, tree index, int pos_hint = -1)
/* We fell off the end of the CONSTRUCTOR, so insert a new
entry at the end. */
- insert : {
+ insert:
+ {
constructor_elt ce = {index, NULL_TREE};
vec_safe_insert (CONSTRUCTOR_ELTS (ctor), idx, ce);
@@ -1258,19 +1301,20 @@ get_or_insert_ctor_field (tree ctor, tree index, int pos_hint = -1)
static tree
eval_vector_conditional_expression (const constexpr_ctx *ctx, tree t,
- bool *non_constant_p, bool *overflow_p)
+ bool *non_constant_p, bool *overflow_p,
+ tree *jump_target)
{
- tree arg1
- = eval_constant_expression (ctx, TREE_OPERAND (t, 0),
- /*lval*/ false, non_constant_p, overflow_p);
+ tree arg1 = eval_constant_expression (ctx, TREE_OPERAND (t, 0),
+ /*lval*/ false, non_constant_p,
+ overflow_p, jump_target);
VERIFY_CONSTANT (arg1);
- tree arg2
- = eval_constant_expression (ctx, TREE_OPERAND (t, 1),
- /*lval*/ false, non_constant_p, overflow_p);
+ tree arg2 = eval_constant_expression (ctx, TREE_OPERAND (t, 1),
+ /*lval*/ false, non_constant_p,
+ overflow_p, jump_target);
VERIFY_CONSTANT (arg2);
- tree arg3
- = eval_constant_expression (ctx, TREE_OPERAND (t, 2),
- /*lval*/ false, non_constant_p, overflow_p);
+ tree arg3 = eval_constant_expression (ctx, TREE_OPERAND (t, 2),
+ /*lval*/ false, non_constant_p,
+ overflow_p, jump_target);
VERIFY_CONSTANT (arg3);
location_t loc = EXPR_LOCATION (t);
tree type = TREE_TYPE (t);
@@ -1295,7 +1339,7 @@ eval_vector_conditional_expression (const constexpr_ctx *ctx, tree t,
static tree
eval_bare_aggregate (const constexpr_ctx *ctx, tree t, bool lval,
- bool *non_constant_p, bool *overflow_p)
+ bool *non_constant_p, bool *overflow_p, tree *jump_target)
{
vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (t);
bool changed = false;
@@ -1346,8 +1390,9 @@ eval_bare_aggregate (const constexpr_ctx *ctx, tree t, bool lval,
the active union member now so that we can later detect and diagnose
if its initializer attempts to activate another member. */
get_or_insert_ctor_field (ctx->ctor, index);
- tree elt = eval_constant_expression (&new_ctx, value, lval,
- non_constant_p, overflow_p);
+ tree elt
+ = eval_constant_expression (&new_ctx, value, lval, non_constant_p,
+ overflow_p, jump_target);
/* Don't VERIFY_CONSTANT here. */
if (ctx->quiet && *non_constant_p)
break;
@@ -1416,7 +1461,8 @@ eval_bare_aggregate (const constexpr_ctx *ctx, tree t, bool lval,
static tree
cxx_eval_trinary_expression (const constexpr_ctx *ctx, tree t, bool lval,
- bool *non_constant_p, bool *overflow_p)
+ bool *non_constant_p, bool *overflow_p,
+ tree *jump_target)
{
int i;
tree args[3];
@@ -1424,8 +1470,9 @@ cxx_eval_trinary_expression (const constexpr_ctx *ctx, tree t, bool lval,
for (i = 0; i < 3; i++)
{
- args[i] = eval_constant_expression (ctx, TREE_OPERAND (t, i), lval,
- non_constant_p, overflow_p);
+ args[i]
+ = eval_constant_expression (ctx, TREE_OPERAND (t, i), lval,
+ non_constant_p, overflow_p, jump_target);
VERIFY_CONSTANT (args[i]);
}
@@ -1568,10 +1615,10 @@ free_constructor (tree t)
}
}
-static tree
-eval_and_check_array_index (const constexpr_ctx *ctx, tree t,
- bool allow_one_past, bool *non_constant_p,
- bool *overflow_p);
+static tree eval_and_check_array_index (const constexpr_ctx *ctx, tree t,
+ bool allow_one_past,
+ bool *non_constant_p, bool *overflow_p,
+ tree *jump_target);
// forked from gcc/cp/constexpr.cc cxx_eval_array_reference
@@ -1580,11 +1627,11 @@ eval_and_check_array_index (const constexpr_ctx *ctx, tree t,
static tree
eval_array_reference (const constexpr_ctx *ctx, tree t, bool lval,
- bool *non_constant_p, bool *overflow_p)
+ bool *non_constant_p, bool *overflow_p, tree *jump_target)
{
tree oldary = TREE_OPERAND (t, 0);
- tree ary
- = eval_constant_expression (ctx, oldary, lval, non_constant_p, overflow_p);
+ tree ary = eval_constant_expression (ctx, oldary, lval, non_constant_p,
+ overflow_p, jump_target);
if (*non_constant_p)
return t;
if (!lval && TREE_CODE (ary) == VIEW_CONVERT_EXPR
@@ -1593,8 +1640,8 @@ eval_array_reference (const constexpr_ctx *ctx, tree t, bool lval,
ary = TREE_OPERAND (ary, 0);
tree oldidx = TREE_OPERAND (t, 1);
- tree index
- = eval_and_check_array_index (ctx, t, lval, non_constant_p, overflow_p);
+ tree index = eval_and_check_array_index (ctx, t, lval, non_constant_p,
+ overflow_p, jump_target);
if (*non_constant_p)
return t;
@@ -1696,7 +1743,8 @@ eval_array_reference (const constexpr_ctx *ctx, tree t, bool lval,
new_ctx.ctor = build_constructor (elem_type, NULL);
ctx = &new_ctx;
}
- t = eval_constant_expression (ctx, val, lval, non_constant_p, overflow_p);
+ t = eval_constant_expression (ctx, val, lval, non_constant_p, overflow_p,
+ jump_target);
if (!SCALAR_TYPE_P (elem_type) && t != ctx->ctor)
free_constructor (ctx->ctor);
return t;
@@ -1709,7 +1757,8 @@ eval_array_reference (const constexpr_ctx *ctx, tree t, bool lval,
static tree
eval_component_reference (const constexpr_ctx *ctx, tree t, bool lval,
- bool *non_constant_p, bool *overflow_p)
+ bool *non_constant_p, bool *overflow_p,
+ tree *jump_target)
{
unsigned HOST_WIDE_INT i;
tree field;
@@ -1717,7 +1766,7 @@ eval_component_reference (const constexpr_ctx *ctx, tree t, bool lval,
tree part = TREE_OPERAND (t, 1);
tree orig_whole = TREE_OPERAND (t, 0);
tree whole = eval_constant_expression (ctx, orig_whole, lval, non_constant_p,
- overflow_p);
+ overflow_p, jump_target);
if (INDIRECT_REF_P (whole) && integer_zerop (TREE_OPERAND (whole, 0)))
{
if (!ctx->quiet)
@@ -1806,8 +1855,8 @@ eval_component_reference (const constexpr_ctx *ctx, tree t, bool lval,
// back to handle this to assign suitable value to value before sending it in
// eval_constant_expression below
// value = build_value_init (TREE_TYPE (t), tf_warning_or_error);
- return eval_constant_expression (ctx, value, lval, non_constant_p,
- overflow_p);
+ return eval_constant_expression (ctx, value, lval, non_constant_p, overflow_p,
+ jump_target);
}
/* Subroutine of cxx_eval_statement_list. Determine whether the statement
@@ -1866,7 +1915,7 @@ label_matches (const constexpr_ctx *ctx, tree *jump_target, tree stmt)
static tree
eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval,
bool *non_constant_p, bool *overflow_p,
- tree *jump_target /* = NULL */)
+ tree *jump_target)
{
if (jump_target && *jump_target)
{
@@ -1901,6 +1950,9 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval,
location_t loc = EXPR_LOCATION (t);
+ if (t == NULL_TREE)
+ return NULL_TREE;
+
if (CONSTANT_CLASS_P (t))
{
if (TREE_OVERFLOW (t))
@@ -1934,33 +1986,44 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval,
{
r = DECL_VALUE_EXPR (t);
return eval_constant_expression (ctx, r, lval, non_constant_p,
- overflow_p);
+ overflow_p, jump_target);
}
- /* fall through */
- case CONST_DECL: {
- /* We used to not check lval for CONST_DECL, but darwin.cc uses
- CONST_DECL for aggregate constants. */
- if (lval)
- return t;
- else if (t == ctx->object)
- return ctx->ctor;
- if (VAR_P (t))
- if (tree *p = ctx->global->values.get (t))
- if (*p != NULL_TREE)
- {
- r = *p;
- break;
- }
+ /* fall through */
+ case CONST_DECL:
+ /* We used to not check lval for CONST_DECL, but darwin.cc uses
+ CONST_DECL for aggregate constants. */
+ if (lval)
+ return t;
+ else if (t == ctx->object)
+ return ctx->ctor;
+ if (VAR_P (t))
+ {
+ if (tree v = ctx->global->get_value (t))
+ {
+ r = v;
+ break;
+ }
+ }
+ if (COMPLETE_TYPE_P (TREE_TYPE (t))
+ && is_really_empty_class (TREE_TYPE (t), /*ignore_vptr*/ false))
+ {
+ /* If the class is empty, we aren't actually loading anything. */
+ r = build_constructor (TREE_TYPE (t), NULL);
+ TREE_CONSTANT (r) = true;
+ }
+ else if (ctx->strict)
+ r = decl_really_constant_value (t, /*unshare_p=*/false);
+ else
r = decl_constant_value (t, /*unshare_p=*/false);
- if (TREE_CODE (r) == TARGET_EXPR
- && TREE_CODE (TARGET_EXPR_INITIAL (r)) == CONSTRUCTOR)
- r = TARGET_EXPR_INITIAL (r);
- if (DECL_P (r))
- {
+ if (TREE_CODE (r) == TARGET_EXPR
+ && TREE_CODE (TARGET_EXPR_INITIAL (r)) == CONSTRUCTOR)
+ r = TARGET_EXPR_INITIAL (r);
+ if (DECL_P (r) && !(VAR_P (t) && TYPE_REF_P (TREE_TYPE (t))))
+ {
+ if (!ctx->quiet)
non_const_var_error (loc, r);
- return r;
- }
- }
+ *non_constant_p = true;
+ }
break;
case PARM_DECL:
@@ -2026,7 +2089,8 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval,
case LTGT_EXPR:
case RANGE_EXPR:
case COMPLEX_EXPR:
- r = eval_binary_expression (ctx, t, lval, non_constant_p, overflow_p);
+ r = eval_binary_expression (ctx, t, lval, non_constant_p, overflow_p,
+ jump_target);
break;
/* fold can introduce non-IF versions of these; still treat them as
@@ -2035,17 +2099,18 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval,
case TRUTH_ANDIF_EXPR:
r = eval_logical_expression (ctx, t, boolean_false_node,
boolean_true_node, lval, non_constant_p,
- overflow_p);
+ overflow_p, jump_target);
break;
case TRUTH_OR_EXPR:
case TRUTH_ORIF_EXPR:
r = eval_logical_expression (ctx, t, boolean_true_node,
boolean_false_node, lval, non_constant_p,
- overflow_p);
+ overflow_p, jump_target);
break;
- case TARGET_EXPR: {
+ case TARGET_EXPR:
+ {
tree type = TREE_TYPE (t);
if (!literal_type_p (type))
@@ -2090,7 +2155,7 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval,
/* Pass false for 'lval' because this indicates
initialization of a temporary. */
r = eval_constant_expression (ctx, TREE_OPERAND (t, 1), false,
- non_constant_p, overflow_p);
+ non_constant_p, overflow_p, jump_target);
if (*non_constant_p)
break;
/* Adjust the type of the result to the type of the temporary. */
@@ -2107,13 +2172,14 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval,
break;
case CALL_EXPR:
- r = eval_call_expression (ctx, t, lval, non_constant_p, overflow_p);
+ r = eval_call_expression (ctx, t, lval, non_constant_p, overflow_p,
+ jump_target);
break;
case RETURN_EXPR:
if (TREE_OPERAND (t, 0) != NULL_TREE)
r = eval_constant_expression (ctx, TREE_OPERAND (t, 0), lval,
- non_constant_p, overflow_p);
+ non_constant_p, overflow_p, jump_target);
/* FALLTHRU */
case BREAK_STMT:
case CONTINUE_STMT:
@@ -2129,7 +2195,8 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval,
}
break;
- case DECL_EXPR: {
+ case DECL_EXPR:
+ {
r = DECL_EXPR_DECL (t);
if (AGGREGATE_TYPE_P (TREE_TYPE (r)) || VECTOR_TYPE_P (TREE_TYPE (r)))
@@ -2145,7 +2212,7 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval,
if (tree init = DECL_INITIAL (r))
{
init = eval_constant_expression (ctx, init, false, non_constant_p,
- overflow_p);
+ overflow_p, jump_target);
/* Don't share a CONSTRUCTOR that might be changed. */
init = unshare_constructor (init);
/* Remember that a constant object's constructor has already
@@ -2166,12 +2233,13 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval,
constant without its operand being, and vice versa. */
case MEM_REF:
case INDIRECT_REF:
- r = rs_eval_indirect_ref (ctx, t, lval, non_constant_p, overflow_p);
+ r = rs_eval_indirect_ref (ctx, t, lval, non_constant_p, overflow_p,
+ jump_target);
break;
case VEC_PERM_EXPR:
- r = cxx_eval_trinary_expression (ctx, t, lval, non_constant_p,
- overflow_p);
+ r = cxx_eval_trinary_expression (ctx, t, lval, non_constant_p, overflow_p,
+ jump_target);
break;
case PAREN_EXPR:
@@ -2179,11 +2247,12 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval,
/* A PAREN_EXPR resulting from __builtin_assoc_barrier has no effect in
constant expressions since it's unaffected by -fassociative-math. */
r = eval_constant_expression (ctx, TREE_OPERAND (t, 0), lval,
- non_constant_p, overflow_p);
+ non_constant_p, overflow_p, jump_target);
break;
case MODIFY_EXPR:
- r = eval_store_expression (ctx, t, false, non_constant_p, overflow_p);
+ r = eval_store_expression (ctx, t, false, non_constant_p, overflow_p,
+ jump_target);
break;
case STATEMENT_LIST:
@@ -2199,12 +2268,14 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval,
case OBJ_TYPE_REF:
/* Virtual function lookup. We don't need to do anything fancy. */
return eval_constant_expression (ctx, OBJ_TYPE_REF_EXPR (t), lval,
- non_constant_p, overflow_p);
+ non_constant_p, overflow_p, jump_target);
- case EXIT_EXPR: {
+ case EXIT_EXPR:
+ {
tree cond = TREE_OPERAND (t, 0);
- cond = eval_constant_expression (ctx, cond, /*lval*/ false,
- non_constant_p, overflow_p);
+ cond
+ = eval_constant_expression (ctx, cond, /*lval*/ false, non_constant_p,
+ overflow_p, jump_target);
VERIFY_CONSTANT (cond);
if (integer_nonzerop (cond))
*jump_target = t;
@@ -2234,7 +2305,8 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval,
else
{
r = eval_constant_expression (ctx, TREE_OPERAND (t, 0), false,
- non_constant_p, overflow_p);
+ non_constant_p, overflow_p,
+ jump_target);
if (*non_constant_p)
break;
ctx->global->values.put (t, r);
@@ -2243,11 +2315,12 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval,
}
break;
- case ADDR_EXPR: {
+ case ADDR_EXPR:
+ {
tree oldop = TREE_OPERAND (t, 0);
tree op = eval_constant_expression (ctx, oldop,
/*lval*/ true, non_constant_p,
- overflow_p);
+ overflow_p, jump_target);
/* Don't VERIFY_CONSTANT here. */
if (*non_constant_p)
return t;
@@ -2261,7 +2334,8 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval,
break;
}
- case COMPOUND_EXPR: {
+ case COMPOUND_EXPR:
+ {
/* check_return_expr sometimes wraps a TARGET_EXPR in a
COMPOUND_EXPR; don't get confused. Also handle EMPTY_CLASS_EXPR
introduced by build_call_a. */
@@ -2291,7 +2365,8 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval,
if (lval)
{
r = eval_constant_expression (ctx, TREE_OPERAND (t, 0), lval,
- non_constant_p, overflow_p);
+ non_constant_p, overflow_p,
+ jump_target);
if (r == error_mark_node)
;
else if (r == TREE_OPERAND (t, 0))
@@ -2310,7 +2385,8 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval,
case BIT_NOT_EXPR:
case TRUTH_NOT_EXPR:
case FIXED_CONVERT_EXPR:
- r = eval_unary_expression (ctx, t, lval, non_constant_p, overflow_p);
+ r = eval_unary_expression (ctx, t, lval, non_constant_p, overflow_p,
+ jump_target);
break;
case LOOP_EXPR:
@@ -2325,7 +2401,8 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval,
break;
case ARRAY_REF:
- r = eval_array_reference (ctx, t, lval, non_constant_p, overflow_p);
+ r = eval_array_reference (ctx, t, lval, non_constant_p, overflow_p,
+ jump_target);
break;
case COMPONENT_REF:
@@ -2339,11 +2416,13 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval,
*non_constant_p = true;
return t;
}
- r = eval_component_reference (ctx, t, lval, non_constant_p, overflow_p);
+ r = eval_component_reference (ctx, t, lval, non_constant_p, overflow_p,
+ jump_target);
break;
case BIT_FIELD_REF:
- r = eval_bit_field_ref (ctx, t, lval, non_constant_p, overflow_p);
+ r = eval_bit_field_ref (ctx, t, lval, non_constant_p, overflow_p,
+ jump_target);
break;
case COND_EXPR:
@@ -2388,7 +2467,7 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval,
case VEC_COND_EXPR:
r = eval_vector_conditional_expression (ctx, t, non_constant_p,
- overflow_p);
+ overflow_p, jump_target);
break;
case TRY_CATCH_EXPR:
@@ -2401,7 +2480,8 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval,
non_constant_p, overflow_p, jump_target);
break;
- case CLEANUP_POINT_EXPR: {
+ case CLEANUP_POINT_EXPR:
+ {
auto_vec<tree, 2> cleanups;
vec<tree> *prev_cleanups = ctx->global->cleanups;
ctx->global->cleanups = &cleanups;
@@ -2413,7 +2493,7 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval,
/* Evaluate the cleanups. */
FOR_EACH_VEC_ELT_REVERSE (cleanups, i, cleanup)
eval_constant_expression (ctx, cleanup, false, non_constant_p,
- overflow_p);
+ overflow_p, jump_target);
}
break;
@@ -2423,7 +2503,7 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval,
if (!*non_constant_p)
/* Also evaluate the cleanup. */
eval_constant_expression (ctx, TREE_OPERAND (t, 1), true,
- non_constant_p, overflow_p);
+ non_constant_p, overflow_p, jump_target);
break;
case CONSTRUCTOR:
@@ -2435,17 +2515,19 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval,
if (TREE_CONSTANT (t))
return fold (t);
}
- r = eval_bare_aggregate (ctx, t, lval, non_constant_p, overflow_p);
+ r = eval_bare_aggregate (ctx, t, lval, non_constant_p, overflow_p,
+ jump_target);
break;
/* FALLTHROUGH. */
case NOP_EXPR:
case CONVERT_EXPR:
- case VIEW_CONVERT_EXPR: {
+ case VIEW_CONVERT_EXPR:
+ {
tree oldop = TREE_OPERAND (t, 0);
tree op = eval_constant_expression (ctx, oldop, lval, non_constant_p,
- overflow_p);
+ overflow_p, jump_target);
if (*non_constant_p)
return t;
tree type = TREE_TYPE (t);
@@ -2576,7 +2658,7 @@ eval_constant_expression (const constexpr_ctx *ctx, tree t, bool lval,
return ctor;
else
return eval_constant_expression (ctx, ctor, lval, non_constant_p,
- overflow_p);
+ overflow_p, jump_target);
}
/* A placeholder without a referent. We can get here when
checking whether NSDMIs are noexcept, or in massage_init_elt;
@@ -2637,7 +2719,8 @@ is_empty_field (tree decl)
static tree
eval_store_expression (const constexpr_ctx *ctx, tree t, bool lval,
- bool *non_constant_p, bool *overflow_p)
+ bool *non_constant_p, bool *overflow_p,
+ tree *jump_target)
{
constexpr_ctx new_ctx = *ctx;
@@ -2658,7 +2741,7 @@ eval_store_expression (const constexpr_ctx *ctx, tree t, bool lval,
if (!SCALAR_TYPE_P (type))
new_ctx.ctor = new_ctx.object = NULL_TREE;
init = eval_constant_expression (&new_ctx, init, false, non_constant_p,
- overflow_p);
+ overflow_p, jump_target);
if (*non_constant_p)
return t;
}
@@ -2670,7 +2753,7 @@ eval_store_expression (const constexpr_ctx *ctx, tree t, bool lval,
as a whole; otherwise, only evaluate the innermost piece to avoid
building up unnecessary *_REFs. */
target = eval_constant_expression (ctx, target, true, non_constant_p,
- overflow_p);
+ overflow_p, jump_target);
evaluated = true;
if (*non_constant_p)
return t;
@@ -2688,7 +2771,8 @@ eval_store_expression (const constexpr_ctx *ctx, tree t, bool lval,
{
case BIT_FIELD_REF:
case COMPONENT_REF:
- case ARRAY_REF: {
+ case ARRAY_REF:
+ {
tree ob = TREE_OPERAND (probe, 0);
tree elt = TREE_OPERAND (probe, 1);
if (TREE_CODE (elt) == FIELD_DECL /*&& DECL_MUTABLE_P (elt)*/)
@@ -2697,10 +2781,9 @@ eval_store_expression (const constexpr_ctx *ctx, tree t, bool lval,
}
if (TREE_CODE (probe) == ARRAY_REF)
{
- // TODO
- rust_unreachable ();
- // elt = eval_and_check_array_index (ctx, probe, false,
- // non_constant_p, overflow_p);
+ elt = eval_and_check_array_index (ctx, probe, false,
+ non_constant_p, overflow_p,
+ jump_target);
if (*non_constant_p)
return t;
}
@@ -2724,8 +2807,9 @@ eval_store_expression (const constexpr_ctx *ctx, tree t, bool lval,
object = probe;
else
{
- probe = eval_constant_expression (ctx, probe, true,
- non_constant_p, overflow_p);
+ probe
+ = eval_constant_expression (ctx, probe, true, non_constant_p,
+ overflow_p, jump_target);
evaluated = true;
if (*non_constant_p)
return t;
@@ -2916,7 +3000,7 @@ eval_store_expression (const constexpr_ctx *ctx, tree t, bool lval,
if (tree tinit = TARGET_EXPR_INITIAL (init))
init = tinit;
init = eval_constant_expression (&new_ctx, init, false, non_constant_p,
- overflow_p);
+ overflow_p, jump_target);
/* The hash table might have moved since the get earlier, and the
initializer might have mutated the underlying CONSTRUCTORs, so we must
recompute VALP. */
@@ -3018,22 +3102,115 @@ eval_store_expression (const constexpr_ctx *ctx, tree t, bool lval,
Like cxx_eval_unary_expression, except for binary expressions. */
static tree
eval_binary_expression (const constexpr_ctx *ctx, tree t, bool lval,
- bool *non_constant_p, bool *overflow_p)
+ bool *non_constant_p, bool *overflow_p,
+ tree *jump_target)
{
+ tree r = NULL_TREE;
tree orig_lhs = TREE_OPERAND (t, 0);
tree orig_rhs = TREE_OPERAND (t, 1);
tree lhs, rhs;
-
lhs = eval_constant_expression (ctx, orig_lhs, lval, non_constant_p,
- overflow_p);
+ overflow_p, jump_target);
+ /* Don't VERIFY_CONSTANT here, it's unnecessary and will break pointer
+ subtraction. */
+ if (*non_constant_p)
+ return t;
+ if (*jump_target)
+ return NULL_TREE;
+
rhs = eval_constant_expression (ctx, orig_rhs, lval, non_constant_p,
- overflow_p);
+ overflow_p, jump_target);
+ if (*non_constant_p)
+ return t;
+ if (*jump_target)
+ return NULL_TREE;
location_t loc = EXPR_LOCATION (t);
enum tree_code code = TREE_CODE (t);
tree type = TREE_TYPE (t);
- return fold_binary_loc (loc, code, type, lhs, rhs);
+ if (code == EQ_EXPR || code == NE_EXPR)
+ {
+ bool is_code_eq = (code == EQ_EXPR);
+
+ if (TREE_CODE (lhs) == PTRMEM_CST && TREE_CODE (rhs) == PTRMEM_CST)
+ {
+ tree lmem = PTRMEM_CST_MEMBER (lhs);
+ tree rmem = PTRMEM_CST_MEMBER (rhs);
+ bool eq = false;
+ if (TREE_CODE (lmem) == TREE_CODE (rmem)
+ && TREE_CODE (lmem) == FIELD_DECL
+ && TREE_CODE (DECL_CONTEXT (lmem)) == UNION_TYPE
+ && same_type_p (DECL_CONTEXT (lmem), DECL_CONTEXT (rmem)))
+ /* If both refer to (possibly different) members of the same union
+ (12.3), they compare equal. */
+ eq = true;
+ // else
+ // eq = cp_tree_equal (lhs, rhs);
+ r = constant_boolean_node (eq == is_code_eq, type);
+ }
+ else if ((TREE_CODE (lhs) == PTRMEM_CST || TREE_CODE (rhs) == PTRMEM_CST)
+ && (null_member_pointer_value_p (lhs)
+ || null_member_pointer_value_p (rhs)))
+ r = constant_boolean_node (!is_code_eq, type);
+ }
+ if (r == NULL_TREE && TREE_CODE_CLASS (code) == tcc_comparison
+ && POINTER_TYPE_P (TREE_TYPE (lhs)))
+ {
+ if (tree lhso = maybe_fold_addr_pointer_plus (lhs))
+ lhs = fold_convert (TREE_TYPE (lhs), lhso);
+ if (tree rhso = maybe_fold_addr_pointer_plus (rhs))
+ rhs = fold_convert (TREE_TYPE (rhs), rhso);
+ }
+ if (code == POINTER_PLUS_EXPR && !*non_constant_p && integer_zerop (lhs)
+ && !integer_zerop (rhs))
+ {
+ if (!ctx->quiet)
+ error ("arithmetic involving a null pointer in %qE", lhs);
+ *non_constant_p = true;
+ return t;
+ }
+ else if (code == POINTER_PLUS_EXPR)
+ {
+ r = fold_pointer_plus_expression (ctx, t, lhs, rhs, non_constant_p,
+ overflow_p, jump_target);
+ if (*jump_target)
+ return NULL_TREE;
+ }
+
+ if (r == NULL_TREE)
+ {
+ r = fold_binary_loc (loc, code, type, lhs, rhs);
+ }
+
+ if (r == NULL_TREE && (code == LSHIFT_EXPR || code == RSHIFT_EXPR)
+ && TREE_CODE (lhs) == INTEGER_CST && TREE_CODE (rhs) == INTEGER_CST
+ && wi::neg_p (wi::to_wide (rhs)))
+ {
+ /* For diagnostics and -fpermissive emulate previous behavior of
+ handling shifts by negative amount. */
+ tree nrhs = const_unop (NEGATE_EXPR, TREE_TYPE (rhs), rhs);
+ if (nrhs)
+ r = fold_binary_loc (loc,
+ code == LSHIFT_EXPR ? RSHIFT_EXPR : LSHIFT_EXPR,
+ type, lhs, nrhs);
+ }
+
+ if (r == NULL_TREE)
+ {
+ if (lhs == orig_lhs && rhs == orig_rhs)
+ r = t;
+ else
+ r = build2_loc (loc, code, type, lhs, rhs);
+ }
+ else if (eval_check_shift_p (loc, ctx, code, type, lhs, rhs))
+ *non_constant_p = true;
+ /* Don't VERIFY_CONSTANT if this might be dealing with a pointer to
+ a local array in a constexpr function. */
+ bool ptr = INDIRECT_TYPE_P (TREE_TYPE (lhs));
+ if (!ptr)
+ VERIFY_CONSTANT (r);
+ return r;
}
/* Helper function of cxx_bind_parameters_in_call. Return non-NULL
@@ -3071,7 +3248,7 @@ addr_of_non_const_var (tree *tp, int *walk_subtrees, void *data)
static tree
rs_bind_parameters_in_call (const constexpr_ctx *ctx, tree t, tree fun,
bool *non_constant_p, bool *overflow_p,
- bool *non_constant_args)
+ bool *non_constant_args, tree *jump_target)
{
const int nargs = call_expr_nargs (t);
tree parms = DECL_ARGUMENTS (fun);
@@ -3095,7 +3272,7 @@ rs_bind_parameters_in_call (const constexpr_ctx *ctx, tree t, tree fun,
such as this, but here we do the elision differently: we keep the
TARGET_EXPR, and use its CONSTRUCTOR as the value of the parm. */
arg = eval_constant_expression (ctx, x, /*lval=*/false, non_constant_p,
- overflow_p);
+ overflow_p, jump_target);
/* Don't VERIFY_CONSTANT here. */
if (*non_constant_p && ctx->quiet)
break;
@@ -3161,7 +3338,8 @@ rs_bind_parameters_in_call (const constexpr_ctx *ctx, tree t, tree fun,
static tree
eval_builtin_function_call (const constexpr_ctx *ctx, tree t, tree fun,
- bool lval, bool *non_constant_p, bool *overflow_p)
+ bool lval, bool *non_constant_p, bool *overflow_p,
+ tree *jump_target)
{
const int nargs = call_expr_nargs (t);
tree *args = (tree *) alloca (nargs * sizeof (tree));
@@ -3264,8 +3442,9 @@ eval_builtin_function_call (const constexpr_ctx *ctx, tree t, tree fun,
|| potential_constant_expression (arg))
{
bool dummy1 = false, dummy2 = false;
- arg
- = eval_constant_expression (&new_ctx, arg, false, &dummy1, &dummy2);
+ tree dummy_jump_target = NULL_TREE;
+ arg = eval_constant_expression (&new_ctx, arg, false, &dummy1,
+ &dummy2, &dummy_jump_target);
}
if (bi_const_p)
@@ -3361,7 +3540,7 @@ eval_builtin_function_call (const constexpr_ctx *ctx, tree t, tree fun,
}
return eval_constant_expression (&new_ctx, new_call, lval, non_constant_p,
- overflow_p);
+ overflow_p, jump_target);
}
// Subroutine of cxx_eval_constant_expression.
@@ -3369,7 +3548,7 @@ eval_builtin_function_call (const constexpr_ctx *ctx, tree t, tree fun,
// evaluation.
static tree
eval_call_expression (const constexpr_ctx *ctx, tree t, bool lval,
- bool *non_constant_p, bool *overflow_p)
+ bool *non_constant_p, bool *overflow_p, tree *jump_target)
{
location_t loc = EXPR_LOCATION (t);
tree fun = get_function_named_in_call (t);
@@ -3398,12 +3577,12 @@ eval_call_expression (const constexpr_ctx *ctx, tree t, bool lval,
if (fndecl_built_in_p (fun))
return eval_builtin_function_call (ctx, t, fun, lval, non_constant_p,
- overflow_p);
+ overflow_p, jump_target);
bool non_constant_args = false;
new_call.bindings
= rs_bind_parameters_in_call (ctx, t, fun, non_constant_p, overflow_p,
- &non_constant_args);
+ &non_constant_args, jump_target);
/* We build up the bindings list before we know whether we already have this
call cached. If we don't end up saving these bindings, ggc_free them when
@@ -3942,7 +4121,8 @@ constexpr_fn_retval (const constexpr_ctx *ctx, tree body)
{
switch (TREE_CODE (body))
{
- case STATEMENT_LIST: {
+ case STATEMENT_LIST:
+ {
tree expr = NULL_TREE;
for (tree stmt : tsi_range (body))
{
@@ -3960,13 +4140,15 @@ constexpr_fn_retval (const constexpr_ctx *ctx, tree body)
return expr;
}
- case RETURN_EXPR: {
+ case RETURN_EXPR:
+ {
bool non_constant_p = false;
bool overflow_p = false;
return eval_constant_expression (ctx, body, false, &non_constant_p,
- &overflow_p);
+ &overflow_p, NULL);
}
- case DECL_EXPR: {
+ case DECL_EXPR:
+ {
tree decl = DECL_EXPR_DECL (body);
if (TREE_CODE (decl) == USING_DECL
/* Accept __func__, __FUNCTION__, and __PRETTY_FUNCTION__. */
@@ -3978,7 +4160,8 @@ constexpr_fn_retval (const constexpr_ctx *ctx, tree body)
case CLEANUP_POINT_EXPR:
return constexpr_fn_retval (ctx, TREE_OPERAND (body, 0));
- case BIND_EXPR: {
+ case BIND_EXPR:
+ {
tree b = BIND_EXPR_BODY (body);
return constexpr_fn_retval (ctx, b);
}
@@ -4026,6 +4209,17 @@ constant_value_1 (tree decl, bool, bool, bool unshare_p)
return unshare_p ? unshare_expr (decl) : decl;
}
+/* Like scalar_constant_value, but can also return aggregate initializers.
+ If UNSHARE_P, return an unshared copy of the initializer. */
+
+tree
+decl_really_constant_value (tree decl, bool unshare_p /*= true*/)
+{
+ return constant_value_1 (decl, /*strict_p=*/true,
+ /*return_aggregate_cst_ok_p=*/true,
+ /*unshare_p=*/unshare_p);
+}
+
// A more relaxed version of decl_really_constant_value, used by the
// common C/C++ code.
tree
@@ -4039,15 +4233,38 @@ decl_constant_value (tree decl, bool unshare_p)
static void
non_const_var_error (location_t loc, tree r)
{
- error_at (loc,
- "the value of %qD is not usable in a constant "
- "expression",
- r);
+ tree type = TREE_TYPE (r);
+
/* Avoid error cascade. */
if (DECL_INITIAL (r) == error_mark_node)
return;
-
- // more in cp/constexpr.cc
+ if (DECL_DECLARED_CONSTEXPR_P (r))
+ inform (DECL_SOURCE_LOCATION (r), "%qD used in its own initializer", r);
+ else if (INTEGRAL_OR_ENUMERATION_TYPE_P (type))
+ {
+ if (!DECL_INITIAL (r) || !TREE_CONSTANT (DECL_INITIAL (r))
+ || !DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (r))
+ inform (DECL_SOURCE_LOCATION (r),
+ "%qD was not initialized with a constant "
+ "expression",
+ r);
+ else
+ gcc_unreachable ();
+ }
+ else if (TYPE_REF_P (type))
+ inform (DECL_SOURCE_LOCATION (r),
+ "%qD was not initialized with a constant "
+ "expression",
+ r);
+ else
+ {
+ if (!DECL_DECLARED_CONSTEXPR_P (r))
+ inform (DECL_SOURCE_LOCATION (r), "%qD was not declared %<constexpr%>",
+ r);
+ else
+ inform (DECL_SOURCE_LOCATION (r),
+ "%qD does not have integral or enumeration type", r);
+ }
}
static tree
@@ -4138,7 +4355,8 @@ array_index_cmp (tree key, tree index)
{
case INTEGER_CST:
return tree_int_cst_compare (key, index);
- case RANGE_EXPR: {
+ case RANGE_EXPR:
+ {
tree lo = TREE_OPERAND (index, 0);
tree hi = TREE_OPERAND (index, 1);
if (tree_int_cst_lt (key, lo))
@@ -4323,7 +4541,7 @@ verify_constant (tree t, bool allow_non_constant, bool *non_constant_p,
if (!*non_constant_p && !reduced_constant_expression_p (t) && t != void_node)
{
if (!allow_non_constant)
- error ("%q+E is not a constant expression", t);
+ error_at (EXPR_LOCATION (t), "is not a constant expression");
*non_constant_p = true;
}
if (TREE_OVERFLOW_P (t))
@@ -4428,7 +4646,8 @@ diag_array_subscript (location_t loc, const constexpr_ctx *ctx, tree array,
static tree
get_array_or_vector_nelts (const constexpr_ctx *ctx, tree type,
- bool *non_constant_p, bool *overflow_p)
+ bool *non_constant_p, bool *overflow_p,
+ tree *jump_target)
{
tree nelts;
if (TREE_CODE (type) == ARRAY_TYPE)
@@ -4444,8 +4663,8 @@ get_array_or_vector_nelts (const constexpr_ctx *ctx, tree type,
rust_unreachable ();
/* For VLAs, the number of elements won't be an integer constant. */
- nelts
- = eval_constant_expression (ctx, nelts, false, non_constant_p, overflow_p);
+ nelts = eval_constant_expression (ctx, nelts, false, non_constant_p,
+ overflow_p, jump_target);
return nelts;
}
@@ -4457,13 +4676,13 @@ get_array_or_vector_nelts (const constexpr_ctx *ctx, tree type,
static tree
eval_and_check_array_index (const constexpr_ctx *ctx, tree t,
bool allow_one_past, bool *non_constant_p,
- bool *overflow_p)
+ bool *overflow_p, tree *jump_target)
{
location_t loc = rs_expr_loc_or_input_loc (t);
tree ary = TREE_OPERAND (t, 0);
t = TREE_OPERAND (t, 1);
tree index = eval_constant_expression (ctx, t, allow_one_past, non_constant_p,
- overflow_p);
+ overflow_p, jump_target);
VERIFY_CONSTANT (index);
if (!tree_fits_shwi_p (index) || tree_int_cst_sgn (index) < 0)
@@ -4474,7 +4693,7 @@ eval_and_check_array_index (const constexpr_ctx *ctx, tree t,
}
tree nelts = get_array_or_vector_nelts (ctx, TREE_TYPE (ary), non_constant_p,
- overflow_p);
+ overflow_p, jump_target);
VERIFY_CONSTANT (nelts);
if (allow_one_past ? !tree_int_cst_le (index, nelts)
: !tree_int_cst_lt (index, nelts))
@@ -4684,9 +4903,9 @@ eval_conditional_expression (const constexpr_ctx *ctx, tree t, bool lval,
bool *non_constant_p, bool *overflow_p,
tree *jump_target)
{
- tree val
- = eval_constant_expression (ctx, TREE_OPERAND (t, 0),
- /*lval*/ false, non_constant_p, overflow_p);
+ tree val = eval_constant_expression (ctx, TREE_OPERAND (t, 0),
+ /*lval*/ false, non_constant_p,
+ overflow_p, jump_target);
VERIFY_CONSTANT (val);
if (TREE_CODE (t) == IF_STMT && IF_STMT_CONSTEVAL_P (t))
{
@@ -4723,14 +4942,14 @@ eval_conditional_expression (const constexpr_ctx *ctx, tree t, bool lval,
static tree
eval_bit_field_ref (const constexpr_ctx *ctx, tree t, bool lval,
- bool *non_constant_p, bool *overflow_p)
+ bool *non_constant_p, bool *overflow_p, tree *jump_target)
{
tree orig_whole = TREE_OPERAND (t, 0);
tree retval, fldval, utype, mask;
bool fld_seen = false;
HOST_WIDE_INT istart, isize;
tree whole = eval_constant_expression (ctx, orig_whole, lval, non_constant_p,
- overflow_p);
+ overflow_p, jump_target);
tree start, field, value;
unsigned HOST_WIDE_INT i;
@@ -4961,8 +5180,8 @@ eval_switch_expr (const constexpr_ctx *ctx, tree t, bool *non_constant_p,
{
tree cond
= TREE_CODE (t) == SWITCH_STMT ? SWITCH_STMT_COND (t) : SWITCH_COND (t);
- cond
- = eval_constant_expression (ctx, cond, false, non_constant_p, overflow_p);
+ cond = eval_constant_expression (ctx, cond, false, non_constant_p, overflow_p,
+ jump_target);
VERIFY_CONSTANT (cond);
*jump_target = cond;
@@ -4996,12 +5215,13 @@ eval_switch_expr (const constexpr_ctx *ctx, tree t, bool *non_constant_p,
static tree
eval_unary_expression (const constexpr_ctx *ctx, tree t, bool /*lval*/,
- bool *non_constant_p, bool *overflow_p)
+ bool *non_constant_p, bool *overflow_p,
+ tree *jump_target)
{
tree r;
tree orig_arg = TREE_OPERAND (t, 0);
tree arg = eval_constant_expression (ctx, orig_arg, /*lval*/ false,
- non_constant_p, overflow_p);
+ non_constant_p, overflow_p, jump_target);
VERIFY_CONSTANT (arg);
location_t loc = EXPR_LOCATION (t);
enum tree_code code = TREE_CODE (t);
@@ -5137,7 +5357,9 @@ cxx_eval_outermost_constant_expr (tree t, bool allow_non_constant,
if (manifestly_const_eval)
instantiate_constexpr_fns (r);
- r = eval_constant_expression (&ctx, r, false, &non_constant_p, &overflow_p);
+ tree jump_target = NULL_TREE;
+ r = eval_constant_expression (&ctx, r, false, &non_constant_p, &overflow_p,
+ &jump_target);
if (!constexpr_dtor)
verify_constant (r, allow_non_constant, &non_constant_p, &overflow_p);
@@ -5149,7 +5371,7 @@ cxx_eval_outermost_constant_expr (tree t, bool allow_non_constant,
/* Evaluate the cleanups. */
FOR_EACH_VEC_ELT_REVERSE (cleanups, i, cleanup)
eval_constant_expression (&ctx, cleanup, false, &non_constant_p,
- &overflow_p);
+ &overflow_p, NULL);
/* Mutable logic is a bit tricky: we want to allow initialization of
constexpr variables with mutable members, but we can't copy those
@@ -5909,7 +6131,6 @@ potential_constant_expression_1 (tree t, bool want_rval, bool strict, bool now,
/* A pointer-to-member constant. */
return true;
- // handle_addr_expr:
#if 0
/* FIXME adjust when issue 1197 is fully resolved. For now don't do
any checking here, as we might dereference the pointer later. If
@@ -5945,7 +6166,8 @@ potential_constant_expression_1 (tree t, bool want_rval, bool strict, bool now,
case BIT_FIELD_REF:
return RECUR (TREE_OPERAND (t, 0), want_rval);
- case INDIRECT_REF: {
+ case INDIRECT_REF:
+ {
tree x = TREE_OPERAND (t, 0);
STRIP_NOPS (x);
return RECUR (x, rval);
@@ -6216,7 +6438,8 @@ potential_constant_expression_1 (tree t, bool want_rval, bool strict, bool now,
case INIT_EXPR:
return RECUR (TREE_OPERAND (t, 1), rval);
- case CONSTRUCTOR: {
+ case CONSTRUCTOR:
+ {
vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (t);
constructor_elt *ce;
for (i = 0; vec_safe_iterate (v, i, &ce); ++i)
@@ -6225,7 +6448,8 @@ potential_constant_expression_1 (tree t, bool want_rval, bool strict, bool now,
return true;
}
- case TREE_LIST: {
+ case TREE_LIST:
+ {
gcc_assert (TREE_PURPOSE (t) == NULL_TREE || DECL_P (TREE_PURPOSE (t)));
if (!RECUR (TREE_VALUE (t), want_rval))
return false;
@@ -6240,7 +6464,8 @@ potential_constant_expression_1 (tree t, bool want_rval, bool strict, bool now,
case ROUND_DIV_EXPR:
case TRUNC_MOD_EXPR:
case CEIL_MOD_EXPR:
- case ROUND_MOD_EXPR: {
+ case ROUND_MOD_EXPR:
+ {
tree denom = TREE_OPERAND (t, 1);
if (!RECUR (denom, rval))
return false;
@@ -6260,7 +6485,8 @@ potential_constant_expression_1 (tree t, bool want_rval, bool strict, bool now,
}
}
- case COMPOUND_EXPR: {
+ case COMPOUND_EXPR:
+ {
/* check_return_expr sometimes wraps a TARGET_EXPR in a
COMPOUND_EXPR; don't get confused. */
tree op0 = TREE_OPERAND (t, 0);
@@ -6282,7 +6508,8 @@ potential_constant_expression_1 (tree t, bool want_rval, bool strict, bool now,
case TRUTH_OR_EXPR:
case TRUTH_ORIF_EXPR:
tmp = boolean_false_node;
- truth : {
+ truth:
+ {
tree op0 = TREE_OPERAND (t, 0);
tree op1 = TREE_OPERAND (t, 1);
if (!RECUR (op0, rval))
@@ -6479,6 +6706,170 @@ fold_non_dependent_init (tree t, tsubst_flags_t /*=tf_warning_or_error*/,
return maybe_constant_init (t, object, manifestly_const_eval);
}
+/* Check whether the shift operation with code CODE and type TYPE on LHS
+ and RHS is undefined. If it is, give an error with an explanation,
+ and return true; return false otherwise. */
+
+static bool
+eval_check_shift_p (location_t loc, const constexpr_ctx *ctx,
+ enum tree_code code, tree type, tree lhs, tree rhs)
+{
+ if ((code != LSHIFT_EXPR && code != RSHIFT_EXPR)
+ || TREE_CODE (lhs) != INTEGER_CST || TREE_CODE (rhs) != INTEGER_CST)
+ return false;
+
+ tree lhstype = TREE_TYPE (lhs);
+ unsigned HOST_WIDE_INT uprec = TYPE_PRECISION (TREE_TYPE (lhs));
+
+ /* [expr.shift] The behavior is undefined if the right operand
+ is negative, or greater than or equal to the length in bits
+ of the promoted left operand. */
+ if (tree_int_cst_sgn (rhs) == -1)
+ {
+ if (!ctx->quiet)
+ permerror (loc, "right operand of shift expression %q+E is negative",
+ build2_loc (loc, code, type, lhs, rhs));
+ return (!flag_permissive || ctx->quiet);
+ }
+ if (compare_tree_int (rhs, uprec) >= 0)
+ {
+ if (!ctx->quiet)
+ permerror (loc,
+ "right operand of shift expression %q+E is greater "
+ "than or equal to the precision %wu of the left operand",
+ build2_loc (loc, code, type, lhs, rhs), uprec);
+ return (!flag_permissive || ctx->quiet);
+ }
+
+ /* The value of E1 << E2 is E1 left-shifted E2 bit positions; [...]
+ if E1 has a signed type and non-negative value, and E1x2^E2 is
+ representable in the corresponding unsigned type of the result type,
+ then that value, converted to the result type, is the resulting value;
+ otherwise, the behavior is undefined.
+ For C++20:
+ The value of E1 << E2 is the unique value congruent to E1 x 2^E2 modulo
+ 2^N, where N is the range exponent of the type of the result. */
+ if (code == LSHIFT_EXPR && !TYPE_OVERFLOW_WRAPS (lhstype))
+ {
+ if (tree_int_cst_sgn (lhs) == -1)
+ {
+ if (!ctx->quiet)
+ permerror (loc, "left operand of shift expression %q+E is negative",
+ build2_loc (loc, code, type, lhs, rhs));
+ return (!flag_permissive || ctx->quiet);
+ }
+ /* For signed x << y the following:
+ (unsigned) x >> ((prec (lhs) - 1) - y)
+ if > 1, is undefined. The right-hand side of this formula
+ is the highest bit of the LHS that can be set (starting from 0),
+ so that the shift doesn't overflow. We then right-shift the LHS
+ to see whether any other bit is set making the original shift
+ undefined -- the result is not representable in the corresponding
+ unsigned type. */
+ tree t = build_int_cst (unsigned_type_node, uprec - 1);
+ t = fold_build2 (MINUS_EXPR, unsigned_type_node, t, rhs);
+ tree ulhs = fold_convert (unsigned_type_for (lhstype), lhs);
+ t = fold_build2 (RSHIFT_EXPR, TREE_TYPE (ulhs), ulhs, t);
+ if (tree_int_cst_lt (integer_one_node, t))
+ {
+ if (!ctx->quiet)
+ permerror (loc, "shift expression %q+E overflows",
+ build2_loc (loc, code, type, lhs, rhs));
+ return (!flag_permissive || ctx->quiet);
+ }
+ }
+ return false;
+}
+
+/* Helper function for cxx_eval_binary_expression. Try to optimize
+ original POINTER_PLUS_EXPR T, LHS p+ RHS, return NULL_TREE if the
+ generic folding should be used. */
+
+static tree
+fold_pointer_plus_expression (const constexpr_ctx *ctx, tree t, tree lhs,
+ tree rhs, bool *non_constant_p, bool *overflow_p,
+ tree *jump_target)
+{
+ STRIP_NOPS (lhs);
+ if (TREE_CODE (lhs) != ADDR_EXPR)
+ return NULL_TREE;
+
+ lhs = TREE_OPERAND (lhs, 0);
+
+ /* &A[i] p+ j => &A[i + j] */
+ if (TREE_CODE (lhs) == ARRAY_REF
+ && TREE_CODE (TREE_OPERAND (lhs, 1)) == INTEGER_CST
+ && TREE_CODE (rhs) == INTEGER_CST && TYPE_SIZE_UNIT (TREE_TYPE (lhs))
+ && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (lhs))) == INTEGER_CST)
+ {
+ tree orig_type = TREE_TYPE (t);
+ location_t loc = EXPR_LOCATION (t);
+ tree type = TREE_TYPE (lhs);
+
+ t = fold_convert_loc (loc, ssizetype, TREE_OPERAND (lhs, 1));
+ tree nelts = array_type_nelts_top (TREE_TYPE (TREE_OPERAND (lhs, 0)));
+ nelts = eval_constant_expression (ctx, nelts, true, non_constant_p,
+ overflow_p, jump_target);
+ if (*non_constant_p)
+ return NULL_TREE;
+ if (*jump_target)
+ return NULL_TREE;
+ /* Don't fold an out-of-bound access. */
+ if (!tree_int_cst_le (t, nelts))
+ return NULL_TREE;
+ rhs = fold_convert (ssizetype, rhs);
+ /* Don't fold if rhs can't be divided exactly by TYPE_SIZE_UNIT.
+ constexpr int A[1]; ... (char *)&A[0] + 1 */
+ if (!integer_zerop (fold_build2_loc (loc, TRUNC_MOD_EXPR, sizetype, rhs,
+ TYPE_SIZE_UNIT (type))))
+ return NULL_TREE;
+ /* Make sure to treat the second operand of POINTER_PLUS_EXPR
+ as signed. */
+ rhs = fold_build2_loc (loc, EXACT_DIV_EXPR, ssizetype, rhs,
+ TYPE_SIZE_UNIT (type));
+ t = size_binop_loc (loc, PLUS_EXPR, rhs, t);
+ t = build4_loc (loc, ARRAY_REF, type, TREE_OPERAND (lhs, 0), t, NULL_TREE,
+ NULL_TREE);
+ t = build_fold_addr_expr (t);
+ t = fold_convert (orig_type, t);
+ return eval_constant_expression (ctx, t, true, non_constant_p, overflow_p,
+ jump_target);
+ }
+
+ return NULL_TREE;
+}
+
+/* Try to fold expressions like
+ (struct S *) (&a[0].D.2378 + 12)
+ into
+ &MEM <struct T> [(void *)&a + 12B]
+ This is something normally done by gimple_fold_stmt_to_constant_1
+ on GIMPLE, but is undesirable on GENERIC if we are e.g. going to
+ dereference the address because some details are lost.
+ For pointer comparisons we want such folding though so that
+ match.pd address_compare optimization works. */
+
+static tree
+maybe_fold_addr_pointer_plus (tree t)
+{
+ while (CONVERT_EXPR_P (t) && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
+ t = TREE_OPERAND (t, 0);
+ if (TREE_CODE (t) != POINTER_PLUS_EXPR)
+ return NULL_TREE;
+ tree op0 = TREE_OPERAND (t, 0);
+ tree op1 = TREE_OPERAND (t, 1);
+ if (TREE_CODE (op1) != INTEGER_CST)
+ return NULL_TREE;
+ while (CONVERT_EXPR_P (op0)
+ && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
+ op0 = TREE_OPERAND (op0, 0);
+ if (TREE_CODE (op0) != ADDR_EXPR)
+ return NULL_TREE;
+ op1 = fold_convert (ptr_type_node, op1);
+ tree r = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (op0)), op0, op1);
+ return build1_loc (EXPR_LOCATION (t), ADDR_EXPR, TREE_TYPE (op0), r);
+}
+
} // namespace Compile
} // namespace Rust
diff --git a/gcc/rust/backend/rust-constexpr.h b/gcc/rust/backend/rust-constexpr.h
index 77a0797..27f0a2e 100644
--- a/gcc/rust/backend/rust-constexpr.h
+++ b/gcc/rust/backend/rust-constexpr.h
@@ -24,8 +24,7 @@ namespace Rust {
namespace Compile {
extern tree fold_expr (tree);
-extern void
-maybe_save_constexpr_fundef (tree fun);
+extern void maybe_save_constexpr_fundef (tree fun);
} // namespace Compile
} // namespace Rust
diff --git a/gcc/rust/backend/rust-mangle-v0.cc b/gcc/rust/backend/rust-mangle-v0.cc
index d0df4ab..f6b1a4c 100644
--- a/gcc/rust/backend/rust-mangle-v0.cc
+++ b/gcc/rust/backend/rust-mangle-v0.cc
@@ -62,9 +62,9 @@ struct V0Path
}
};
-static std::string
-v0_path (Rust::Compile::Context *ctx, const TyTy::BaseType *ty,
- const Resolver::CanonicalPath &path);
+static std::string v0_path (Rust::Compile::Context *ctx,
+ const TyTy::BaseType *ty,
+ const Resolver::CanonicalPath &path);
static std::string
v0_tuple_prefix (const TyTy::BaseType *ty)
@@ -148,7 +148,8 @@ v0_complex_type_prefix (Context *ctx, const TyTy::BaseType *ty)
// TODO: generics
switch (ty->get_kind ())
{
- case TyTy::TypeKind::ADT: {
+ case TyTy::TypeKind::ADT:
+ {
const TyTy::ADTType *adt = static_cast<const TyTy::ADTType *> (ty);
return v0_path (ctx, ty, adt->get_ident ().path);
}
@@ -387,7 +388,8 @@ v0_path (Rust::Compile::Context *ctx, const TyTy::BaseType *ty,
{
switch (impl_item->first->get_impl_item_type ())
{
- case HIR::ImplItem::FUNCTION: {
+ case HIR::ImplItem::FUNCTION:
+ {
HIR::Function *fn
= static_cast<HIR::Function *> (impl_item->first);
v0path
@@ -408,7 +410,8 @@ v0_path (Rust::Compile::Context *ctx, const TyTy::BaseType *ty,
{
switch (trait_item.value ()->get_item_kind ())
{
- case HIR::TraitItem::FUNC: {
+ case HIR::TraitItem::FUNC:
+ {
auto fn = static_cast<HIR::TraitItemFunc *> (*trait_item);
rust_unreachable ();
v0path = v0_function_path (v0path, ctx, ty,
@@ -428,7 +431,8 @@ v0_path (Rust::Compile::Context *ctx, const TyTy::BaseType *ty,
else if (auto item = mappings.lookup_hir_item (hir_id))
switch (item.value ()->get_item_kind ())
{
- case HIR::Item::ItemKind::Function: {
+ case HIR::Item::ItemKind::Function:
+ {
HIR::Function *fn = static_cast<HIR::Function *> (*item);
v0path
= v0_function_path (v0path, ctx, ty, fn->get_generic_params (),
diff --git a/gcc/rust/backend/rust-mangle.h b/gcc/rust/backend/rust-mangle.h
index 2a84b6b..418f2bd 100644
--- a/gcc/rust/backend/rust-mangle.h
+++ b/gcc/rust/backend/rust-mangle.h
@@ -49,13 +49,12 @@ private:
static enum MangleVersion version;
};
-std::string
-legacy_mangle_item (const TyTy::BaseType *ty,
- const Resolver::CanonicalPath &path);
+std::string legacy_mangle_item (const TyTy::BaseType *ty,
+ const Resolver::CanonicalPath &path);
-std::string
-v0_mangle_item (Rust::Compile::Context *ctx, const TyTy::BaseType *ty,
- const Resolver::CanonicalPath &path);
+std::string v0_mangle_item (Rust::Compile::Context *ctx,
+ const TyTy::BaseType *ty,
+ const Resolver::CanonicalPath &path);
} // namespace Compile
} // namespace Rust
diff --git a/gcc/rust/backend/rust-tree.cc b/gcc/rust/backend/rust-tree.cc
index 6cba04b..b86c3c8 100644
--- a/gcc/rust/backend/rust-tree.cc
+++ b/gcc/rust/backend/rust-tree.cc
@@ -268,7 +268,8 @@ convert_to_void (tree expr, impl_conv_void implicit)
return expr;
switch (TREE_CODE (expr))
{
- case COND_EXPR: {
+ case COND_EXPR:
+ {
/* The two parts of a cond expr might be separate lvalues. */
tree op1 = TREE_OPERAND (expr, 1);
tree op2 = TREE_OPERAND (expr, 2);
@@ -294,7 +295,8 @@ convert_to_void (tree expr, impl_conv_void implicit)
break;
}
- case COMPOUND_EXPR: {
+ case COMPOUND_EXPR:
+ {
/* The second part of a compound expr contains the value. */
tree op1 = TREE_OPERAND (expr, 1);
tree new_op1;
@@ -323,7 +325,8 @@ convert_to_void (tree expr, impl_conv_void implicit)
maybe_warn_nodiscard (expr, implicit);
break;
- case INDIRECT_REF: {
+ case INDIRECT_REF:
+ {
tree type = TREE_TYPE (expr);
int is_reference = TYPE_REF_P (TREE_TYPE (TREE_OPERAND (expr, 0)));
int is_volatile = TYPE_VOLATILE (type);
@@ -518,7 +521,8 @@ convert_to_void (tree expr, impl_conv_void implicit)
break;
}
- case VAR_DECL: {
+ case VAR_DECL:
+ {
/* External variables might be incomplete. */
tree type = TREE_TYPE (expr);
int is_complete = COMPLETE_TYPE_P (type);
@@ -1485,7 +1489,8 @@ find_parameter_packs_r (tree *tp, int *walk_subtrees, void *data)
parameter pack. ??? Should some of these be in cp_walk_subtrees? */
switch (TREE_CODE (t))
{
- case DECL_EXPR: {
+ case DECL_EXPR:
+ {
tree decl = DECL_EXPR_DECL (t);
if (is_typedef_decl (decl))
/* Since we stop at typedefs above, we need to look through them at
@@ -1506,7 +1511,8 @@ find_parameter_packs_r (tree *tp, int *walk_subtrees, void *data)
*walk_subtrees = 0;
return NULL_TREE;
- case DECLTYPE_TYPE: {
+ case DECLTYPE_TYPE:
+ {
/* When traversing a DECLTYPE_TYPE_EXPR, we need to set
type_pack_expansion_p to false so that any placeholders
within the expression don't get marked as parameter packs. */
@@ -1970,7 +1976,8 @@ rs_tree_equal (tree t1, tree t2)
case SAVE_EXPR:
return rs_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
- case CALL_EXPR: {
+ case CALL_EXPR:
+ {
if (KOENIG_LOOKUP_P (t1) != KOENIG_LOOKUP_P (t2))
return false;
@@ -1996,7 +2003,8 @@ rs_tree_equal (tree t1, tree t2)
return true;
}
- case TARGET_EXPR: {
+ case TARGET_EXPR:
+ {
tree o1 = TREE_OPERAND (t1, 0);
tree o2 = TREE_OPERAND (t2, 0);
@@ -2067,7 +2075,8 @@ rs_tree_equal (tree t1, tree t2)
case tcc_expression:
case tcc_vl_exp:
case tcc_reference:
- case tcc_statement: {
+ case tcc_statement:
+ {
int n = rs_tree_operand_length (t1);
if (TREE_CODE_CLASS (code1) == tcc_vl_exp
&& n != TREE_OPERAND_LENGTH (t2))
@@ -2095,7 +2104,11 @@ rs_tree_equal (tree t1, tree t2)
/* TRUE iff TYPE is publicly & uniquely derived from PARENT. */
-bool publicly_uniquely_derived_p (tree, tree) { return false; }
+bool
+publicly_uniquely_derived_p (tree, tree)
+{
+ return false;
+}
// forked from gcc/cp/typeck.cc comp_except_types
@@ -3375,7 +3388,11 @@ release_tree_vector (vec<tree, va_gc> *vec)
/* As above, but also check value-dependence of the expression as a whole. */
-bool instantiation_dependent_expression_p (tree) { return false; }
+bool
+instantiation_dependent_expression_p (tree)
+{
+ return false;
+}
// forked from gcc/cp/cvt.cc cp_get_callee
@@ -3425,7 +3442,11 @@ scalarish_type_p (const_tree t)
constructors are deleted. This function implements the ABI notion of
non-trivial copy, which has diverged from the one in the standard. */
-bool type_has_nontrivial_copy_init (const_tree) { return false; }
+bool
+type_has_nontrivial_copy_init (const_tree)
+{
+ return false;
+}
// forked from gcc/cp/tree.cc build_local_temp
@@ -3448,7 +3469,11 @@ build_local_temp (tree type)
/* Returns true iff DECL is a capture proxy for a normal capture
(i.e. without explicit initializer). */
-bool is_normal_capture_proxy (tree) { return false; }
+bool
+is_normal_capture_proxy (tree)
+{
+ return false;
+}
// forked from gcc/cp/c-common.cc reject_gcc_builtin
@@ -3522,7 +3547,8 @@ is_bitfield_expr_with_lowered_type (const_tree exp)
case BIT_NOT_EXPR:
return is_bitfield_expr_with_lowered_type (TREE_OPERAND (exp, 0));
- case COMPONENT_REF: {
+ case COMPONENT_REF:
+ {
tree field;
field = TREE_OPERAND (exp, 1);
@@ -3848,16 +3874,18 @@ strip_top_quals (tree t)
/* Print an error message for invalid use of an incomplete type.
VALUE is the expression that was used (or 0 if that isn't known)
and TYPE is the type that was invalid. DIAG_KIND indicates the
- type of diagnostic (see diagnostic.def). */
+ type of diagnostic (see diagnostics/kinds.def). */
void
cxx_incomplete_type_diagnostic (location_t loc, const_tree value,
- const_tree type, diagnostic_t diag_kind)
+ const_tree type,
+ enum diagnostics::kind diag_kind)
{
// bool is_decl = false, complained = false;
- gcc_assert (diag_kind == DK_WARNING || diag_kind == DK_PEDWARN
- || diag_kind == DK_ERROR);
+ gcc_assert (diag_kind == diagnostics::kind::warning
+ || diag_kind == diagnostics::kind::pedwarn
+ || diag_kind == diagnostics::kind::error);
/* Avoid duplicate error message. */
if (TREE_CODE (type) == ERROR_MARK)
@@ -3907,7 +3935,8 @@ retry:
break;
case OFFSET_TYPE:
- bad_member : {
+ bad_member:
+ {
tree member = TREE_OPERAND (value, 1);
if (is_overloaded_fn (member))
member = get_first_fn (member);
@@ -3992,13 +4021,21 @@ decl_constant_var_p (tree decl)
/* Returns true iff DECL is a variable or function declared with an auto type
that has not yet been deduced to a real type. */
-bool undeduced_auto_decl (tree) { return false; }
+bool
+undeduced_auto_decl (tree)
+{
+ return false;
+}
// forked from gcc/cp/decl.cc require_deduced_type
/* Complain if DECL has an undeduced return type. */
-bool require_deduced_type (tree, tsubst_flags_t) { return true; }
+bool
+require_deduced_type (tree, tsubst_flags_t)
+{
+ return true;
+}
/* Return the location of a tree passed to %+ formats. */
@@ -4288,10 +4325,9 @@ struct GTY ((for_user)) source_location_table_entry
} // namespace Rust
-extern void
-gt_pch_nx (Rust::source_location_table_entry &);
-extern void
-gt_pch_nx (Rust::source_location_table_entry *, gt_pointer_operator, void *);
+extern void gt_pch_nx (Rust::source_location_table_entry &);
+extern void gt_pch_nx (Rust::source_location_table_entry *, gt_pointer_operator,
+ void *);
namespace Rust {
@@ -4419,7 +4455,8 @@ lvalue_kind (const_tree ref)
case VIEW_CONVERT_EXPR:
return lvalue_kind (TREE_OPERAND (ref, 0));
- case ARRAY_REF: {
+ case ARRAY_REF:
+ {
tree op1 = TREE_OPERAND (ref, 0);
if (TREE_CODE (TREE_TYPE (op1)) == ARRAY_TYPE)
{
@@ -4516,7 +4553,8 @@ lvalue_kind (const_tree ref)
op2_lvalue_kind = lvalue_kind (TREE_OPERAND (ref, 1));
break;
- case COND_EXPR: {
+ case COND_EXPR:
+ {
tree op1 = TREE_OPERAND (ref, 1);
if (!op1)
op1 = TREE_OPERAND (ref, 0);
@@ -5131,7 +5169,7 @@ complete_type_or_maybe_complain (tree type, tree value, tsubst_flags_t complain)
else if (!COMPLETE_TYPE_P (type))
{
if (complain & tf_error)
- cxx_incomplete_type_diagnostic (value, type, DK_ERROR);
+ cxx_incomplete_type_diagnostic (value, type, diagnostics::kind::error);
note_failed_type_completion_for_satisfaction (type);
return NULL_TREE;
}
diff --git a/gcc/rust/backend/rust-tree.h b/gcc/rust/backend/rust-tree.h
index bb99684..9e859d4 100644
--- a/gcc/rust/backend/rust-tree.h
+++ b/gcc/rust/backend/rust-tree.h
@@ -1543,7 +1543,7 @@ extern GTY (()) tree cp_global_trees[CPTI_MAX];
#if defined ENABLE_TREE_CHECKING
#define LANG_DECL_MIN_CHECK(NODE) \
- __extension__({ \
+ __extension__ ({ \
struct lang_decl *lt = DECL_LANG_SPECIFIC (NODE); \
if (!LANG_DECL_HAS_MIN (NODE)) \
lang_check_failed (__FILE__, __LINE__, __FUNCTION__); \
@@ -1554,7 +1554,7 @@ extern GTY (()) tree cp_global_trees[CPTI_MAX];
template, not just on a FUNCTION_DECL. So when looking for things in
lang_decl_fn, look down through a TEMPLATE_DECL into its result. */
#define LANG_DECL_FN_CHECK(NODE) \
- __extension__({ \
+ __extension__ ({ \
struct lang_decl *lt = DECL_LANG_SPECIFIC (NODE); \
if (!DECL_DECLARES_FUNCTION_P (NODE) || lt->u.base.selector != lds_fn) \
lang_check_failed (__FILE__, __LINE__, __FUNCTION__); \
@@ -1562,7 +1562,7 @@ extern GTY (()) tree cp_global_trees[CPTI_MAX];
})
#define LANG_DECL_NS_CHECK(NODE) \
- __extension__({ \
+ __extension__ ({ \
struct lang_decl *lt = DECL_LANG_SPECIFIC (NODE); \
if (TREE_CODE (NODE) != NAMESPACE_DECL || lt->u.base.selector != lds_ns) \
lang_check_failed (__FILE__, __LINE__, __FUNCTION__); \
@@ -1570,7 +1570,7 @@ extern GTY (()) tree cp_global_trees[CPTI_MAX];
})
#define LANG_DECL_PARM_CHECK(NODE) \
- __extension__({ \
+ __extension__ ({ \
struct lang_decl *lt = DECL_LANG_SPECIFIC (NODE); \
if (TREE_CODE (NODE) != PARM_DECL || lt->u.base.selector != lds_parm) \
lang_check_failed (__FILE__, __LINE__, __FUNCTION__); \
@@ -1578,7 +1578,7 @@ extern GTY (()) tree cp_global_trees[CPTI_MAX];
})
#define LANG_DECL_DECOMP_CHECK(NODE) \
- __extension__({ \
+ __extension__ ({ \
struct lang_decl *lt = DECL_LANG_SPECIFIC (NODE); \
if (!VAR_P (NODE) || lt->u.base.selector != lds_decomp) \
lang_check_failed (__FILE__, __LINE__, __FUNCTION__); \
@@ -2060,8 +2060,8 @@ struct GTY (()) rust_cxx_saved_binding
// forked from gcc/cp/name-lookup.h resort_type_member_vec
/* needed for GTY annotation */
-extern void
-resort_type_member_vec (void *, void *, gt_pointer_operator, void *);
+extern void resort_type_member_vec (void *, void *, gt_pointer_operator,
+ void *);
// forked from gcc/cp/cp-tree.h saved_scope
@@ -2895,8 +2895,7 @@ enum compare_bounds_t
bounds_first
};
-extern tree
-convert_to_void (tree expr, impl_conv_void implicit);
+extern tree convert_to_void (tree expr, impl_conv_void implicit);
// The lvalue-to-rvalue conversion (7.1) is applied if and only if the
// expression is a glvalue of volatile-qualified type and it is one of the
@@ -2911,63 +2910,52 @@ convert_to_void (tree expr, impl_conv_void implicit);
// operands are one of these expressions, or
// * comma expression (8.19) where the right operand is one of these
// expressions.
-extern tree
-mark_discarded_use (tree expr);
+extern tree mark_discarded_use (tree expr);
// Mark EXP as read, not just set, for set but not used -Wunused warning
// purposes.
-extern void
-mark_exp_read (tree exp);
+extern void mark_exp_read (tree exp);
// We've seen an actual use of EXPR. Possibly replace an outer variable
// reference inside with its constant value or a lambda capture.
-extern tree
-mark_use (tree expr, bool rvalue_p, bool read_p, location_t loc,
- bool reject_builtin);
+extern tree mark_use (tree expr, bool rvalue_p, bool read_p, location_t loc,
+ bool reject_builtin);
// Called whenever the expression EXPR is used in an rvalue context.
// When REJECT_BUILTIN is true the expression is checked to make sure
// it doesn't make it possible to obtain the address of a GCC built-in
// function with no library fallback (or any of its bits, such as in
// a conversion to bool).
-extern tree
-mark_rvalue_use (tree, location_t = UNKNOWN_LOCATION,
- bool reject_builtin = true);
+extern tree mark_rvalue_use (tree, location_t = UNKNOWN_LOCATION,
+ bool reject_builtin = true);
// Called whenever an expression is used in an lvalue context.
-extern tree
-mark_lvalue_use (tree expr);
+extern tree mark_lvalue_use (tree expr);
// As above, but don't consider this use a read.
-extern tree
-mark_lvalue_use_nonread (tree expr);
+extern tree mark_lvalue_use_nonread (tree expr);
// We are using a reference VAL for its value. Bash that reference all the way
// down to its lowest form.
-extern tree
-convert_from_reference (tree val);
+extern tree convert_from_reference (tree val);
// Subroutine of convert_to_void. Warn if we're discarding something with
// attribute [[nodiscard]].
-extern void
-maybe_warn_nodiscard (tree expr, impl_conv_void implicit);
+extern void maybe_warn_nodiscard (tree expr, impl_conv_void implicit);
-extern location_t
-expr_loc_or_loc (const_tree t, location_t or_loc);
+extern location_t expr_loc_or_loc (const_tree t, location_t or_loc);
-extern location_t
-expr_loc_or_input_loc (const_tree t);
+extern location_t expr_loc_or_input_loc (const_tree t);
// FN is the callee of a CALL_EXPR or AGGR_INIT_EXPR; return the FUNCTION_DECL
// if we can.
-extern tree
-get_fndecl_from_callee (tree fn);
+extern tree get_fndecl_from_callee (tree fn);
// FIXME some helpers from HIRCompileBase could probably be moved here over time
// Return an expression for the address of BASE[INDEX], used in offset intrinsic
-extern tree
-pointer_offset_expression (tree base_tree, tree index_tree, location_t locus);
+extern tree pointer_offset_expression (tree base_tree, tree index_tree,
+ location_t locus);
/* A tree node, together with a location, so that we can track locations
(and ranges) during parsing.
@@ -2978,11 +2966,9 @@ pointer_offset_expression (tree base_tree, tree index_tree, location_t locus);
extern location_t rs_expr_location (const_tree);
-extern int
-is_empty_class (tree type);
+extern int is_empty_class (tree type);
-extern bool
-is_really_empty_class (tree, bool);
+extern bool is_really_empty_class (tree, bool);
extern bool builtin_valid_in_constant_expr_p (const_tree);
@@ -2990,15 +2976,13 @@ extern bool maybe_constexpr_fn (tree);
extern bool var_in_maybe_constexpr_fn (tree);
-extern int
-rs_type_quals (const_tree type);
+extern int rs_type_quals (const_tree type);
inline bool type_unknown_p (const_tree);
extern bool decl_maybe_constant_var_p (tree);
-extern void
-init_modules ();
+extern void init_modules ();
extern bool var_in_constexpr_fn (tree);
@@ -3006,11 +2990,9 @@ inline tree ovl_first (tree) ATTRIBUTE_PURE;
inline bool type_unknown_p (const_tree);
-extern tree
-lookup_add (tree fns, tree lookup);
+extern tree lookup_add (tree fns, tree lookup);
-extern tree
-ovl_make (tree fn, tree next = NULL_TREE);
+extern tree ovl_make (tree fn, tree next = NULL_TREE);
extern int is_overloaded_fn (tree) ATTRIBUTE_PURE;
@@ -3024,19 +3006,15 @@ extern tree make_conv_op_name (tree);
extern int type_memfn_quals (const_tree);
-struct c_fileinfo *
-get_fileinfo (const char *);
+struct c_fileinfo *get_fileinfo (const char *);
-extern tree
-cxx_make_type (enum tree_code CXX_MEM_STAT_INFO);
+extern tree cxx_make_type (enum tree_code CXX_MEM_STAT_INFO);
-extern tree
-build_cplus_array_type (tree, tree, int is_dep = -1);
+extern tree build_cplus_array_type (tree, tree, int is_dep = -1);
extern bool is_byte_access_type (tree);
-extern bool
-comptypes (tree, tree, int);
+extern bool comptypes (tree, tree, int);
extern tree canonical_eh_spec (tree);
@@ -3046,8 +3024,7 @@ extern bool rs_tree_equal (tree, tree);
extern bool compparms (const_tree, const_tree);
-extern tree
-rs_build_qualified_type_real (tree, int, tsubst_flags_t);
+extern tree rs_build_qualified_type_real (tree, int, tsubst_flags_t);
#define rs_build_qualified_type(TYPE, QUALS) \
rs_build_qualified_type_real ((TYPE), (QUALS), tf_warning_or_error)
extern bool cv_qualified_p (const_tree);
@@ -3056,21 +3033,18 @@ extern bool similar_type_p (tree, tree);
extern bool rs_tree_equal (tree, tree);
-extern bool
-vector_targets_convertible_p (const_tree t1, const_tree t2);
+extern bool vector_targets_convertible_p (const_tree t1, const_tree t2);
extern bool same_type_ignoring_top_level_qualifiers_p (tree, tree);
extern bool comp_ptr_ttypes_const (tree, tree, compare_bounds_t);
-extern tree
-get_class_binding_direct (tree, tree, bool want_type = false);
+extern tree get_class_binding_direct (tree, tree, bool want_type = false);
extern tree skip_artificial_parms_for (const_tree, tree);
-extern void
-lang_check_failed (const char *, int,
- const char *) ATTRIBUTE_NORETURN ATTRIBUTE_COLD;
+extern void lang_check_failed (const char *, int,
+ const char *) ATTRIBUTE_NORETURN ATTRIBUTE_COLD;
extern tree default_init_uninitialized_part (tree);
@@ -3088,8 +3062,7 @@ extern tree in_class_defaulted_default_constructor (tree);
extern bool is_instantiation_of_constexpr (tree);
-extern bool
-check_for_uninitialized_const_var (tree, bool, tsubst_flags_t);
+extern bool check_for_uninitialized_const_var (tree, bool, tsubst_flags_t);
extern bool reduced_constant_expression_p (tree);
@@ -3108,19 +3081,17 @@ extern tree is_bitfield_expr_with_lowered_type (const_tree);
extern tree convert_bitfield_to_declared_type (tree);
-extern tree
-cp_fold_maybe_rvalue (tree, bool);
+extern tree cp_fold_maybe_rvalue (tree, bool);
extern tree maybe_undo_parenthesized_ref (tree);
-extern tree
-fold_offsetof (tree, tree = size_type_node, tree_code ctx = ERROR_MARK);
+extern tree fold_offsetof (tree, tree = size_type_node,
+ tree_code ctx = ERROR_MARK);
extern tree cp_truthvalue_conversion (tree, tsubst_flags_t);
-extern tree
-fold_non_dependent_expr (tree, tsubst_flags_t = tf_warning_or_error,
- bool = false, tree = NULL_TREE);
+extern tree fold_non_dependent_expr (tree, tsubst_flags_t = tf_warning_or_error,
+ bool = false, tree = NULL_TREE);
extern int char_type_p (tree);
@@ -3137,7 +3108,7 @@ extern bool reject_gcc_builtin (const_tree, location_t = UNKNOWN_LOCATION);
extern tree resolve_nondeduced_context (tree, tsubst_flags_t);
extern void cxx_incomplete_type_diagnostic (location_t, const_tree, const_tree,
- diagnostic_t);
+ enum diagnostics::kind);
extern void cxx_incomplete_type_error (location_t, const_tree, const_tree);
@@ -3163,13 +3134,11 @@ extern tree build_new_constexpr_heap_type (tree, tree, tree);
extern bool is_empty_field (tree);
-extern bool
-in_immediate_context ();
+extern bool in_immediate_context ();
extern tree cp_get_callee_fndecl_nofold (tree);
-extern bool
-cxx_mark_addressable (tree, bool = false);
+extern bool cxx_mark_addressable (tree, bool = false);
extern tree fold_builtin_source_location (location_t);
@@ -3183,25 +3152,22 @@ extern bool glvalue_p (const_tree);
extern cp_lvalue_kind lvalue_kind (const_tree);
-extern tree
-decl_constant_value (tree, bool);
+extern tree decl_constant_value (tree, bool);
extern tree lookup_enumerator (tree, tree);
-extern int
-is_class_type (tree, int);
+extern int is_class_type (tree, int);
extern tree braced_lists_to_strings (tree, tree);
-extern tree
-fold_builtin_is_pointer_inverconvertible_with_class (location_t, int, tree *);
+extern tree fold_builtin_is_pointer_inverconvertible_with_class (location_t,
+ int, tree *);
extern bool layout_compatible_type_p (tree, tree);
extern tree finish_underlying_type (tree);
-extern tree
-c_common_type_for_mode (machine_mode, int);
+extern tree c_common_type_for_mode (machine_mode, int);
extern bool std_layout_type_p (const_tree);
@@ -3213,25 +3179,21 @@ extern void note_failed_type_completion_for_satisfaction (tree);
extern tree complete_type_or_maybe_complain (tree, tree, tsubst_flags_t);
-extern bool
-next_common_initial_seqence (tree &, tree &);
+extern bool next_common_initial_seqence (tree &, tree &);
extern bool null_member_pointer_value_p (tree);
-extern tree
-fold_builtin_is_corresponding_member (location_t, int, tree *);
+extern tree fold_builtin_is_corresponding_member (location_t, int, tree *);
extern tree cp_fold_rvalue (tree);
-extern tree
-maybe_constant_value (tree, tree = NULL_TREE, bool = false);
+extern tree maybe_constant_value (tree, tree = NULL_TREE, bool = false);
extern tree lvalue_type (tree);
extern void lvalue_error (location_t, enum lvalue_use);
-extern tree
-cp_fold_maybe_rvalue (tree, bool);
+extern tree cp_fold_maybe_rvalue (tree, bool);
extern tree get_first_fn (tree) ATTRIBUTE_PURE;
@@ -3253,13 +3215,12 @@ enum
ce_exact
};
-extern tree
-rs_build_qualified_type_real (tree, int, tsubst_flags_t);
+extern tree rs_build_qualified_type_real (tree, int, tsubst_flags_t);
#define rs_build_qualified_type(TYPE, QUALS) \
rs_build_qualified_type_real ((TYPE), (QUALS), tf_warning_or_error)
-extern tree
-rs_walk_subtrees (tree *, int *, walk_tree_fn, void *, hash_set<tree> *);
+extern tree rs_walk_subtrees (tree *, int *, walk_tree_fn, void *,
+ hash_set<tree> *);
#define rs_walk_tree(tp, func, data, pset) \
walk_tree_1 (tp, func, data, pset, rs_walk_subtrees)
#define rs_walk_tree_without_duplicates(tp, func, data) \
@@ -3351,11 +3312,9 @@ gnu_vector_type_p (const_tree type)
return TREE_CODE (type) == VECTOR_TYPE && !TYPE_INDIVISIBLE_P (type);
}
-extern vec<tree, va_gc> *
-make_tree_vector (void);
+extern vec<tree, va_gc> *make_tree_vector (void);
-extern void
-release_tree_vector (vec<tree, va_gc> *);
+extern void release_tree_vector (vec<tree, va_gc> *);
/* Simplified unique_ptr clone to release a tree vec on exit. */
@@ -3373,7 +3332,7 @@ public:
releasing_vec &operator= (const releasing_vec &);
vec_t &operator* () const { return *v; }
- vec_t *operator-> () const { return v; }
+ vec_t *operator->() const { return v; }
vec_t *get () const { return v; }
operator vec_t * () const { return v; }
vec_t **operator& () { return &v; }
@@ -3430,7 +3389,7 @@ null_node_p (const_tree expr)
inline void
cxx_incomplete_type_diagnostic (const_tree value, const_tree type,
- diagnostic_t diag_kind)
+ enum diagnostics::kind diag_kind)
{
cxx_incomplete_type_diagnostic (rs_expr_loc_or_input_loc (value), value, type,
diag_kind);
@@ -3439,11 +3398,10 @@ cxx_incomplete_type_diagnostic (const_tree value, const_tree type,
inline void
cxx_incomplete_type_error (const_tree value, const_tree type)
{
- cxx_incomplete_type_diagnostic (value, type, DK_ERROR);
+ cxx_incomplete_type_diagnostic (value, type, diagnostics::kind::error);
}
-extern location_t
-location_of (tree t);
+extern location_t location_of (tree t);
/* Helpers for IMPLICIT_RVALUE_P to look through automatic dereference. */
@@ -3465,23 +3423,18 @@ set_implicit_rvalue_p (tree ot)
}
namespace Compile {
-extern tree
-maybe_constant_init (tree, tree = NULL_TREE, bool = false);
+extern tree maybe_constant_init (tree, tree = NULL_TREE, bool = false);
-extern void
-explain_invalid_constexpr_fn (tree fun);
+extern void explain_invalid_constexpr_fn (tree fun);
extern bool potential_constant_expression (tree);
-extern bool
-literal_type_p (tree t);
+extern bool literal_type_p (tree t);
-extern bool
-maybe_constexpr_fn (tree t);
+extern bool maybe_constexpr_fn (tree t);
-extern tree
-fold_non_dependent_init (tree, tsubst_flags_t = tf_warning_or_error,
- bool = false, tree = NULL_TREE);
+extern tree fold_non_dependent_init (tree, tsubst_flags_t = tf_warning_or_error,
+ bool = false, tree = NULL_TREE);
} // namespace Compile
} // namespace Rust