aboutsummaryrefslogtreecommitdiff
path: root/lib/Lex/PPLexerChange.cpp
diff options
context:
space:
mode:
authorArgyrios Kyrtzidis <akyrtzi@gmail.com>2008-08-10 13:15:22 +0000
committerArgyrios Kyrtzidis <akyrtzi@gmail.com>2008-08-10 13:15:22 +0000
commit03db1b31dd926409b7defc1c90b66549464652c0 (patch)
treeea9d1600a9098e36c5ec0da6632e44180d1ad9d4 /lib/Lex/PPLexerChange.cpp
parent9ca8bb0996bbb8b9dbf69c51cb3d1523559e47e3 (diff)
Allow the preprocessor to cache the lexed tokens, so that we can do efficient lookahead and backtracking.
1) New public methods added: -EnableBacktrackAtThisPos -DisableBacktrack -Backtrack -isBacktrackEnabled 2) LookAhead() implementation is replaced with a more efficient one. 3) LookNext() is removed. git-svn-id: https://llvm.org/svn/llvm-project/cfe/trunk@54611 91177308-0d34-0410-b5e6-96231b3b80d8
Diffstat (limited to 'lib/Lex/PPLexerChange.cpp')
-rw-r--r--lib/Lex/PPLexerChange.cpp88
1 files changed, 0 insertions, 88 deletions
diff --git a/lib/Lex/PPLexerChange.cpp b/lib/Lex/PPLexerChange.cpp
index 1bedd5eded..a14cbed03a 100644
--- a/lib/Lex/PPLexerChange.cpp
+++ b/lib/Lex/PPLexerChange.cpp
@@ -60,94 +60,6 @@ Lexer *Preprocessor::getCurrentFileLexer() const {
return 0;
}
-/// LookAhead - This peeks ahead N tokens and returns that token without
-/// consuming any tokens. LookAhead(0) returns 'Tok', LookAhead(1) returns
-/// the token after Tok, etc.
-///
-/// NOTE: is a relatively expensive method, so it should not be used in common
-/// code paths if possible!
-///
-Token Preprocessor::LookAhead(unsigned N) {
- // FIXME: Optimize the case where multiple lookahead calls are used back to
- // back. Consider if the the parser contained (dynamically):
- // Lookahead(1); Lookahead(1); Lookahead(1)
- // This would return the same token 3 times, but would end up making lots of
- // token stream lexers to do it. To handle this common case, see if the top
- // of the lexer stack is a TokenStreamLexer with macro expansion disabled. If
- // so, see if it has 'N' tokens available in it. If so, just return the
- // token.
-
- // FIXME: Optimize the case when the parser does multiple nearby lookahead
- // calls. For example, consider:
- // Lookahead(0); Lookahead(1); Lookahead(2);
- // The previous optimization won't apply, and there won't be any space left in
- // the array that was previously new'd. To handle this, always round up the
- // size we new to a multiple of 16 tokens. If the previous buffer has space
- // left, we can just grow it. This means we only have to do the new 1/16th as
- // often.
-
- // Optimized LookAhead(0) case.
- if (N == 0)
- return LookNext();
-
- Token *LookaheadTokens = new Token[N+1];
-
- // Read N+1 tokens into LookaheadTokens. After this loop, Tok is the token
- // to return.
- Token Tok;
- unsigned NumTokens = 0;
- for (; N != ~0U; --N, ++NumTokens) {
- Lex(Tok);
- LookaheadTokens[NumTokens] = Tok;
-
- // If we got to EOF, don't lex past it. This will cause LookAhead to return
- // the EOF token.
- if (Tok.is(tok::eof))
- break;
- }
-
- // Okay, at this point, we have the token we want to return in Tok. However,
- // we read it and a bunch of other stuff (in LookaheadTokens) that we must
- // allow subsequent calls to 'Lex' to return. To do this, we push a new token
- // lexer onto the lexer stack with the tokens we read here. This passes
- // ownership of LookaheadTokens to EnterTokenStream.
- //
- // Note that we disable macro expansion of the tokens from this buffer, since
- // any macros have already been expanded, and the internal preprocessor state
- // may already read past new macros. Consider something like LookAhead(1) on
- // X
- // #define X 14
- // Y
- // The lookahead call should return 'Y', and the next Lex call should return
- // 'X' even though X -> 14 has already been entered as a macro.
- //
- EnterTokenStream(LookaheadTokens, NumTokens, true /*DisableExpansion*/,
- true /*OwnsTokens*/);
- return Tok;
-}
-
-/// PeekToken - Lexes one token into PeekedToken and pushes CurLexer,
-/// CurLexerToken into the IncludeMacroStack before setting them to null.
-void Preprocessor::PeekToken() {
- Lex(PeekedToken);
- // Cache the current Lexer, TokenLexer and set them both to null.
- // When Lex() is called, PeekedToken will be "consumed".
- IncludeMacroStack.push_back(IncludeStackInfo(CurLexer, CurDirLookup,
- CurTokenLexer));
- CurLexer = 0;
- CurTokenLexer = 0;
-}
-
-/// ConsumedPeekedToken - Called when Lex() is about to return the PeekedToken
-/// and have it "consumed".
-void Preprocessor::ConsumedPeekedToken() {
- assert(PeekedToken.getLocation().isValid() && "Confused Peeking?");
- // Restore CurLexer, TokenLexer.
- RemoveTopOfLexerStack();
- // Make PeekedToken invalid.
- PeekedToken.startToken();
-}
-
//===----------------------------------------------------------------------===//
// Methods for Entering and Callbacks for leaving various contexts