aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPierre-Emmanuel Patry <pierre-emmanuel.patry@embecosm.com>2023-05-31 12:52:16 +0200
committerPhilip Herron <philip.herron@embecosm.com>2023-06-01 15:23:35 +0000
commit430b26a3295329c931ce527de20b4c339eb285f2 (patch)
tree5beb68993e6cecfcf125237c574a6048e144caa8
parentf35d52ce69df89401bde47f648868fb227a7de40 (diff)
downloadgcc-430b26a3295329c931ce527de20b4c339eb285f2.zip
gcc-430b26a3295329c931ce527de20b4c339eb285f2.tar.gz
gcc-430b26a3295329c931ce527de20b4c339eb285f2.tar.bz2
converter: Convert back Locations from spans
Use spans to get locations of expanded tokens instead of using unknown locations. gcc/rust/ChangeLog: * lex/rust-lex.h: Make build_token method public. * lex/rust-token.h: Add a setter for a token location. * util/rust-token-converter.cc (convert): Add the function to convert from a Span to a Location. (from_ident): Convert Ident span to a Location. (from_literal): Convert Literal span to a Location. (from_punct): Convert Punct span to a Location. (from_group): Convert Group span to a Location. Signed-off-by: Pierre-Emmanuel Patry <pierre-emmanuel.patry@embecosm.com>
-rw-r--r--gcc/rust/lex/rust-lex.h6
-rw-r--r--gcc/rust/lex/rust-token.h3
-rw-r--r--gcc/rust/util/rust-token-converter.cc44
3 files changed, 32 insertions, 21 deletions
diff --git a/gcc/rust/lex/rust-lex.h b/gcc/rust/lex/rust-lex.h
index 1400839..6f89be4 100644
--- a/gcc/rust/lex/rust-lex.h
+++ b/gcc/rust/lex/rust-lex.h
@@ -126,9 +126,6 @@ private:
// Classifies keyword (i.e. gets id for keyword).
TokenId classify_keyword (const std::string &str);
- // Builds a token from the input queue.
- TokenPtr build_token ();
-
std::tuple<std::string, int, bool> parse_in_decimal ();
std::pair<std::string, int> parse_in_exponent_part ();
std::pair<PrimitiveCoreType, int> parse_in_type_suffix ();
@@ -187,6 +184,9 @@ public:
// Peeks the current token.
const_TokenPtr peek_token () { return peek_token (0); }
+ // Builds a token from the input queue.
+ TokenPtr build_token ();
+
// Advances current token to n + 1 tokens ahead of current position.
void skip_token (int n);
// Skips the current token.
diff --git a/gcc/rust/lex/rust-token.h b/gcc/rust/lex/rust-token.h
index 8956d7b..7c6bcae 100644
--- a/gcc/rust/lex/rust-token.h
+++ b/gcc/rust/lex/rust-token.h
@@ -386,6 +386,9 @@ public:
// Gets location of the token.
Location get_locus () const { return locus; }
+ // Set location of the token.
+ void set_locus (Location locus) { this->locus = locus; }
+
// Gets string description of the token.
const std::string &
get_str () const; /*{
diff --git a/gcc/rust/util/rust-token-converter.cc b/gcc/rust/util/rust-token-converter.cc
index 6c18ae3..d8b9090 100644
--- a/gcc/rust/util/rust-token-converter.cc
+++ b/gcc/rust/util/rust-token-converter.cc
@@ -57,6 +57,12 @@ convert (Location location)
return ProcMacro::Span::make_span (location.gcc_location (), 0);
}
+static Location
+convert (ProcMacro::Span span)
+{
+ return Location (span.start);
+}
+
static void
handle_suffix (ProcMacro::TokenStream &ts, const const_TokenPtr &token,
ProcMacro::LitKind kind)
@@ -275,10 +281,10 @@ from_ident (const ProcMacro::Ident &ident, std::vector<const_TokenPtr> &result)
if (ident.is_raw)
value = "r#" + value;
- // TODO: Inject span -> for now spans are not stored in Ident, once changed
- // the span should be injected in the built token below.
Lexer lexer (value);
- result.push_back (lexer.peek_token ());
+ auto token = lexer.build_token ();
+ token->set_locus (convert (ident.span));
+ result.push_back (token);
}
/**
@@ -292,6 +298,7 @@ from_literal (const ProcMacro::Literal &literal,
std::vector<const_TokenPtr> &result)
{
auto lookup = suffixes.lookup (literal.suffix.to_string ());
+ auto loc = convert (literal.span);
auto suffix
= suffixes.is_iter_ok (lookup) ? lookup->second : CORETYPE_UNKNOWN;
// FIXME: Add spans instead of empty locations
@@ -299,27 +306,25 @@ from_literal (const ProcMacro::Literal &literal,
{
case ProcMacro::BYTE:
result.push_back (
- Token::make_byte_char (Location (), literal.text.to_string ()[0]));
+ Token::make_byte_char (loc, literal.text.to_string ()[0]));
break;
case ProcMacro::CHAR:
- result.push_back (
- Token::make_char (Location (), literal.text.to_string ()[0]));
+ result.push_back (Token::make_char (loc, literal.text.to_string ()[0]));
break;
case ProcMacro::INTEGER:
result.push_back (
- Token::make_int (Location (), literal.text.to_string (), suffix));
+ Token::make_int (loc, literal.text.to_string (), suffix));
break;
case ProcMacro::FLOAT:
result.push_back (
- Token::make_float (Location (), literal.text.to_string (), suffix));
+ Token::make_float (loc, literal.text.to_string (), suffix));
break;
case ProcMacro::STR:
- result.push_back (
- Token::make_string (Location (), literal.text.to_string ()));
+ result.push_back (Token::make_string (loc, literal.text.to_string ()));
break;
case ProcMacro::BYTE_STR:
result.push_back (
- Token::make_byte_string (Location (), literal.text.to_string ()));
+ Token::make_byte_string (loc, literal.text.to_string ()));
break;
// FIXME: Handle raw string
case ProcMacro::STR_RAW:
@@ -347,7 +352,9 @@ from_punct (const ProcMacro::Punct &punct, std::vector<std::uint32_t> &acc,
// TODO: UTF-8 string
std::string whole (acc.begin (), acc.end ());
auto lexer = Lexer (whole);
- result.push_back (lexer.peek_token ());
+ auto token = lexer.build_token ();
+ token->set_locus (convert (punct.span));
+ result.push_back (token);
acc.clear ();
}
}
@@ -362,22 +369,23 @@ from_punct (const ProcMacro::Punct &punct, std::vector<std::uint32_t> &acc,
static void
from_group (const ProcMacro::Group &g, std::vector<const_TokenPtr> &result)
{
+ auto loc = convert (g.span);
switch (g.delimiter)
{
case ProcMacro::PARENTHESIS:
- result.push_back (Token::make (LEFT_PAREN, Location ()));
+ result.push_back (Token::make (LEFT_PAREN, loc));
from_tokenstream (g.stream, result);
- result.push_back (Token::make (RIGHT_PAREN, Location ()));
+ result.push_back (Token::make (RIGHT_PAREN, loc));
break;
case ProcMacro::BRACE:
- result.push_back (Token::make (LEFT_CURLY, Location ()));
+ result.push_back (Token::make (LEFT_CURLY, loc));
from_tokenstream (g.stream, result);
- result.push_back (Token::make (RIGHT_CURLY, Location ()));
+ result.push_back (Token::make (RIGHT_CURLY, loc));
break;
case ProcMacro::BRACKET:
- result.push_back (Token::make (LEFT_SQUARE, Location ()));
+ result.push_back (Token::make (LEFT_SQUARE, loc));
from_tokenstream (g.stream, result);
- result.push_back (Token::make (RIGHT_SQUARE, Location ()));
+ result.push_back (Token::make (RIGHT_SQUARE, loc));
break;
case ProcMacro::NONE:
from_tokenstream (g.stream, result);