aboutsummaryrefslogtreecommitdiff
path: root/gcc/cpplex.c
diff options
context:
space:
mode:
authorNeil Booth <neil@daikokuya.demon.co.uk>2001-09-13 20:05:17 +0000
committerNeil Booth <neil@gcc.gnu.org>2001-09-13 20:05:17 +0000
commitbdcbe49686fce1d35955579428aa2ade21dd941c (patch)
treef67a034f6447649165fb6297431354b49c7620fc /gcc/cpplex.c
parent83182544dbfc89d2d974431383d61039cac9b773 (diff)
downloadgcc-bdcbe49686fce1d35955579428aa2ade21dd941c.zip
gcc-bdcbe49686fce1d35955579428aa2ade21dd941c.tar.gz
gcc-bdcbe49686fce1d35955579428aa2ade21dd941c.tar.bz2
c-parse.in (_yylex): Use _cpp_backup_tokens.
* c-parse.in (_yylex): Use _cpp_backup_tokens. * cpphash.h (struct tokenrun): Add prev. (struct lexer_state): Remove bol. (struct cpp_reader): Remove old lookahead stuff, add lookaheads. (_cpp_free_lookaheads, _cpp_release_lookahead, _cpp_push_token) : Remove. * cppinit.c (cpp_create_reader): Don't set bol. (cpp_destroy): Don't free lookaheads. * cpplex.c (lex_directive): Remove. (next_tokenrun): Update. (_cpp_lex_token): Clean up logic. (lex_token): Update to return a pointer to lexed token, since it can move to the start of the buffer. Simpify newline handling. * cpplib.c (SEEN_EOL): Update. (skip_rest_of_line): Remove lookahead stuff. (end_directive): Line numbers are already incremented. Revert to start of lexed token buffer if we can. (_cpp_handle_directive, do_pragma, do_pragma_dependency, parse_answer): Use _cpp_backup_tokens. (run_directive, cpp_pop_buffer): Don't set bol, set saved_flags instead. Don't check for EOL. (do_include_common, do_line, do_pragma_system_header): Use skip_rest_of_line. * cpplib.h (BOL, _cpp_backup_tokens): New. * cppmacro.c (save_lookahead_token, take_lookahead_token, alloc_lookahead, free_lookahead, _cpp_free_lookaheads, cpp_start_lookahead, cpp_stop_lookahead, _cpp_push_token): Remove. (builtin_macro): Don't use cpp_get_line. (cpp_get_line): Short term kludge. (parse_arg): Handle directives in arguments here. Back up when appropriate. Store EOF at end of argument list. (funlike_invocation_p): Use _cpp_backup_tokens. (push_arg_context): Account for EOF at end of list. (cpp_get_token): Remove lookahead stuff. Update. * gcc.dg/cpp/directiv.c: Update. * gcc.dg/cpp/undef1.c: Update. From-SVN: r45582
Diffstat (limited to 'gcc/cpplex.c')
-rw-r--r--gcc/cpplex.c167
1 files changed, 63 insertions, 104 deletions
diff --git a/gcc/cpplex.c b/gcc/cpplex.c
index 1aea9e8..6d640e0 100644
--- a/gcc/cpplex.c
+++ b/gcc/cpplex.c
@@ -102,8 +102,7 @@ static void lex_dot PARAMS ((cpp_reader *, cpp_token *));
static int name_p PARAMS ((cpp_reader *, const cpp_string *));
static int maybe_read_ucs PARAMS ((cpp_reader *, const unsigned char **,
const unsigned char *, unsigned int *));
-static int lex_directive PARAMS ((cpp_reader *));
-static void lex_token PARAMS ((cpp_reader *, cpp_token *, int));
+static cpp_token *lex_token PARAMS ((cpp_reader *, cpp_token *));
static tokenrun *next_tokenrun PARAMS ((tokenrun *));
static cpp_chunk *new_chunk PARAMS ((unsigned int));
@@ -925,114 +924,69 @@ next_tokenrun (run)
if (run->next == NULL)
{
run->next = xnew (tokenrun);
+ run->next->prev = run;
_cpp_init_tokenrun (run->next, 250);
}
return run->next;
}
-static int
-lex_directive (pfile)
- cpp_reader *pfile;
-{
- /* 6.10.3 paragraph 11: If there are sequences of preprocessing
- tokens within the list of arguments that would otherwise act as
- preprocessing directives, the behavior is undefined.
-
- This implementation will report a hard error, terminate the macro
- invocation, and proceed to process the directive. */
- if (pfile->state.parsing_args)
- {
- pfile->lexer_pos.output_line = pfile->line;
- if (pfile->state.parsing_args == 2)
- {
- cpp_error (pfile,
- "directives may not be used inside a macro argument");
- pfile->state.bol = 1;
- pfile->buffer->cur = pfile->buffer->line_base;
- pfile->buffer->read_ahead = EOF;
- pfile->cur_token->type = CPP_EOF;
- }
-
- return 0;
- }
-
- /* This is a directive. If the return value is false, it is an
- assembler #. */
- {
- /* FIXME: short-term kludge only - it doesn't handle the case that
- the # is at the end of a run and we moved to the start of the
- next one. Easily fixed once we kill lookaheads. */
- cpp_token *token = pfile->cur_token++;
- if (_cpp_handle_directive (pfile, token->flags & PREV_WHITE))
- return 1;
- pfile->cur_token = token;
- return 0;
- }
-}
-
/* Lex a token into RESULT (external interface). */
void
-_cpp_lex_token (pfile, result)
+_cpp_lex_token (pfile, dest)
cpp_reader *pfile;
- cpp_token *result;
+ cpp_token *dest;
{
- if (pfile->cur_token == pfile->cur_run->limit)
- {
- pfile->cur_run = next_tokenrun (pfile->cur_run);
- pfile->cur_token = pfile->cur_run->base;
- }
+ cpp_token *result;
- next_token:
- if (pfile->state.bol)
+ for (;;)
{
- start_new_line:
- pfile->state.bol = 0;
-
- /* Return lexer back to base. */
- if (!pfile->keep_tokens)
+ if (pfile->cur_token == pfile->cur_run->limit)
{
- pfile->cur_run = &pfile->base_run;
- pfile->cur_token = pfile->base_run.base;
+ pfile->cur_run = next_tokenrun (pfile->cur_run);
+ pfile->cur_token = pfile->cur_run->base;
}
+ result = pfile->cur_token++;
- lex_token (pfile, pfile->cur_token, 1);
- pfile->lexer_pos.output_line = pfile->cur_token->line;
- if (pfile->cur_token->type == CPP_HASH && lex_directive (pfile))
- goto start_new_line;
- }
- else
- {
- lex_token (pfile, pfile->cur_token, 0);
- if (pfile->cur_token->type == CPP_EOF)
+ if (pfile->lookaheads)
+ pfile->lookaheads--;
+ else
+ result = lex_token (pfile, result);
+
+ if (result->flags & BOL)
{
- if (!pfile->state.in_directive)
- goto start_new_line;
- /* Decrementing pfile->line allows directives to recognise
- that the newline has been seen, and also means that
- diagnostics don't point to the next line. */
- pfile->lexer_pos.output_line = pfile->line--;
+ pfile->lexer_pos.output_line = result->line;
+ /* Is this a directive. If _cpp_handle_directive returns
+ false, it is an assembler #. */
+ if (result->type == CPP_HASH
+ && !pfile->state.parsing_args
+ && _cpp_handle_directive (pfile, result->flags & PREV_WHITE))
+ continue;
}
- }
- if (!pfile->state.in_directive)
- {
- if (pfile->state.skipping && pfile->cur_token->type != CPP_EOF)
- goto next_token;
+ /* We don't skip tokens in directives. */
+ if (pfile->state.in_directive)
+ break;
- /* Outside a directive, invalidate controlling macros. */
+ /* Outside a directive, invalidate controlling macros. At file
+ EOF, lex_token takes care of popping the buffer, so we never
+ get here and MI optimisation works. */
pfile->mi_valid = false;
+
+ if (!pfile->state.skipping || result->type == CPP_EOF)
+ break;
}
- *result = *pfile->cur_token++;
+ *dest = *result;
}
-/* Lex a token into RESULT (internal interface). */
-static void
-lex_token (pfile, result, skip_newlines)
+/* Lex a token into RESULT. When meeting a newline, returns CPP_EOF
+ if parsing a directive, otherwise returns to the start of the token
+ buffer if permissible. Returns the location of the lexed token. */
+static cpp_token *
+lex_token (pfile, result)
cpp_reader *pfile;
cpp_token *result;
- int skip_newlines;
{
cppchar_t c;
cpp_buffer *buffer;
@@ -1058,21 +1012,10 @@ lex_token (pfile, result, skip_newlines)
switch (c)
{
case EOF:
+ buffer->saved_flags = BOL;
if (!pfile->state.parsing_args && !pfile->state.in_directive)
{
- if (buffer->cur == buffer->line_base)
- {
- /* Don't pop the last buffer. */
- if (buffer->prev)
- {
- unsigned char stop = buffer->return_at_eof;
-
- _cpp_pop_buffer (pfile);
- if (!stop)
- goto fresh_line;
- }
- }
- else
+ if (buffer->cur != buffer->line_base)
{
/* Non-empty files should end in a newline. Don't warn
for command line and _Pragma buffers. */
@@ -1080,6 +1023,16 @@ lex_token (pfile, result, skip_newlines)
cpp_pedwarn (pfile, "no newline at end of file");
handle_newline (pfile, '\n');
}
+
+ /* Don't pop the last buffer. */
+ if (buffer->prev)
+ {
+ unsigned char stop = buffer->return_at_eof;
+
+ _cpp_pop_buffer (pfile);
+ if (!stop)
+ goto fresh_line;
+ }
}
result->type = CPP_EOF;
break;
@@ -1090,13 +1043,17 @@ lex_token (pfile, result, skip_newlines)
goto skipped_white;
case '\n': case '\r':
- if (pfile->state.in_directive && pfile->state.parsing_args)
- buffer->read_ahead = c;
- else
+ handle_newline (pfile, c);
+ buffer->saved_flags = BOL;
+ if (! pfile->state.in_directive)
{
- handle_newline (pfile, c);
- if (skip_newlines)
- goto fresh_line;
+ if (!pfile->keep_tokens)
+ {
+ pfile->cur_run = &pfile->base_run;
+ result = pfile->base_run.base;
+ pfile->cur_token = result + 1;
+ }
+ goto fresh_line;
}
result->type = CPP_EOF;
break;
@@ -1228,7 +1185,7 @@ lex_token (pfile, result, skip_newlines)
/* Save the comment as a token in its own right. */
save_comment (pfile, result, comment_start);
/* Don't do MI optimisation. */
- return;
+ break;
case '<':
if (pfile->state.angled_headers)
@@ -1397,6 +1354,8 @@ lex_token (pfile, result, skip_newlines)
result->val.c = c;
break;
}
+
+ return result;
}
/* An upper bound on the number of bytes needed to spell a token,