diff options
author | Ted Kremenek <kremenek@apple.com> | 2008-12-23 19:24:24 +0000 |
---|---|---|
committer | Ted Kremenek <kremenek@apple.com> | 2008-12-23 19:24:24 +0000 |
commit | 59d08cb672136322375e5400578ee1fbd0947de2 (patch) | |
tree | f7e923cc68205635397dc35d09a2fc78e1b00d50 /lib/Lex/PTHLexer.cpp | |
parent | 18d9afb815bd8aff885dd64c5078760b3398d7be (diff) |
PTH: Remove some methods and simplify some conditions in PTHLexer::Lex(). No big functionality change.
git-svn-id: https://llvm.org/svn/llvm-project/cfe/trunk@61381 91177308-0d34-0410-b5e6-96231b3b80d8
Diffstat (limited to 'lib/Lex/PTHLexer.cpp')
-rw-r--r-- | lib/Lex/PTHLexer.cpp | 88 |
1 files changed, 30 insertions, 58 deletions
diff --git a/lib/Lex/PTHLexer.cpp b/lib/Lex/PTHLexer.cpp index b60b915d92..3924c9cd2d 100644 --- a/lib/Lex/PTHLexer.cpp +++ b/lib/Lex/PTHLexer.cpp @@ -87,7 +87,8 @@ LexNextToken: Tok.startToken(); Tok.setKind(k); - Tok.setFlag(flags); + Tok.setFlag(flags); + assert(!LexingRawMode); Tok.setIdentifierInfo(perID ? PTHMgr.GetIdentifierInfo(perID-1) : 0); Tok.setLocation(SourceLocation::getFileLoc(FileID, FileOffset)); Tok.setLength(Len); @@ -96,80 +97,51 @@ LexNextToken: // Process the token. //===--------------------------------------==// - if (Tok.is(tok::eof)) { + if (k == tok::identifier) { + MIOpt.ReadToken(); + return PP->HandleIdentifier(Tok); + } + + if (k == tok::eof) { // Save the end-of-file token. EofToken = Tok; Preprocessor *PPCache = PP; - - if (LexEndOfFile(Tok)) + + assert(!ParsingPreprocessorDirective); + assert(!LexingRawMode); + + // FIXME: Issue diagnostics similar to Lexer. + if (PP->HandleEndOfFile(Tok, false)) return; - + assert(PPCache && "Raw buffer::LexEndOfFile should return a token"); return PPCache->Lex(Tok); } - - MIOpt.ReadToken(); - - if (Tok.is(tok::eom)) { - ParsingPreprocessorDirective = false; - return; - } -#if 0 - SourceManager& SM = PP->getSourceManager(); - SourceLocation L = Tok.getLocation(); - - static const char* last = 0; - const char* next = SM.getContentCacheForLoc(L)->Entry->getName(); - if (next != last) { - last = next; - llvm::cerr << next << '\n'; - } - - llvm::cerr << "line " << SM.getLogicalLineNumber(L) << " col " << - SM.getLogicalColumnNumber(L) << '\n'; -#endif + if (k == tok::hash && Tok.isAtStartOfLine()) { + LastHashTokPtr = CurPtr - DISK_TOKEN_SIZE; + assert(!LexingRawMode); + PP->HandleDirective(Tok); - if (Tok.is(tok::hash)) { - if (Tok.isAtStartOfLine()) { - LastHashTokPtr = CurPtr - DISK_TOKEN_SIZE; - if (!LexingRawMode) { - PP->HandleDirective(Tok); - - if (PP->isCurrentLexer(this)) - goto LexNextToken; - - return PP->Lex(Tok); - } - } - } - - if (Tok.is(tok::identifier)) { - if (LexingRawMode) { - Tok.setIdentifierInfo(0); - return; - } + if (PP->isCurrentLexer(this)) + goto LexNextToken; - return PP->HandleIdentifier(Tok); + return PP->Lex(Tok); } - - assert(!Tok.is(tok::eom) || ParsingPreprocessorDirective); -} + if (k == tok::eom) { + assert(ParsingPreprocessorDirective); + ParsingPreprocessorDirective = false; + return; + } -// FIXME: This method can just be inlined into Lex(). -bool PTHLexer::LexEndOfFile(Token &Tok) { - assert(!ParsingPreprocessorDirective); - assert(!LexingRawMode); - - // FIXME: Issue diagnostics similar to Lexer. - return PP->HandleEndOfFile(Tok, false); + MIOpt.ReadToken(); } // FIXME: We can just grab the last token instead of storing a copy // into EofToken. -void PTHLexer::setEOF(Token& Tok) { +void PTHLexer::getEOF(Token& Tok) { assert(!EofToken.is(tok::eof)); Tok = EofToken; } @@ -304,7 +276,7 @@ SourceLocation PTHLexer::getSourceLocation() { // handling a #included file. Just read the necessary data from the token // data buffer to construct the SourceLocation object. // NOTE: This is a virtual function; hence it is defined out-of-line. - const char* p = CurPtr + (1 + 1 + 4); + const char* p = CurPtr + (1 + 1 + 3); uint32_t offset = ((uint32_t) ((uint8_t) p[0])) | (((uint32_t) ((uint8_t) p[1])) << 8) |