diff options
author | Chris Lattner <sabre@nondot.org> | 2009-01-19 06:46:35 +0000 |
---|---|---|
committer | Chris Lattner <sabre@nondot.org> | 2009-01-19 06:46:35 +0000 |
commit | bcc2a67e5180612417727cbdd8afd0f79fdf726d (patch) | |
tree | 6ec0a17c76a1d8ac80373d73b1e732d125dffc79 /lib | |
parent | ec0d7a6f4b0699cc9960e6d9fee0f957c64d1cf9 (diff) |
Make SourceLocation::getFileLoc private to reduce the API exposure of
SourceLocation. This requires making some cleanups to token pasting
and _Pragma expansion.
git-svn-id: https://llvm.org/svn/llvm-project/cfe/trunk@62490 91177308-0d34-0410-b5e6-96231b3b80d8
Diffstat (limited to 'lib')
-rw-r--r-- | lib/Lex/Lexer.cpp | 16 | ||||
-rw-r--r-- | lib/Lex/Pragma.cpp | 4 | ||||
-rw-r--r-- | lib/Lex/TokenLexer.cpp | 16 |
3 files changed, 23 insertions, 13 deletions
diff --git a/lib/Lex/Lexer.cpp b/lib/Lex/Lexer.cpp index 6e5402e8a3..9280526d22 100644 --- a/lib/Lex/Lexer.cpp +++ b/lib/Lex/Lexer.cpp @@ -150,13 +150,14 @@ Lexer::Lexer(FileID FID, const SourceManager &SM, const LangOptions &features) /// interface that could handle this stuff. This would pull GetMappedTokenLoc /// out of the critical path of the lexer! /// -Lexer *Lexer::Create_PragmaLexer(SourceLocation TokStartLoc, unsigned TokLen, - Preprocessor &PP) { +Lexer *Lexer::Create_PragmaLexer(SourceLocation SpellingLoc, + SourceLocation InstantiationLoc, + unsigned TokLen, Preprocessor &PP) { SourceManager &SM = PP.getSourceManager(); - SourceLocation SpellingLoc = SM.getSpellingLoc(TokStartLoc); // Create the lexer as if we were going to lex the file normally. - Lexer *L = new Lexer(SM.getCanonicalFileID(SpellingLoc), PP); + FileID SpellingFID = SM.getCanonicalFileID(SpellingLoc); + Lexer *L = new Lexer(SpellingFID, PP); // Now that the lexer is created, change the start/end locations so that we // just lex the subsection of the file that we want. This is lexing from a @@ -168,7 +169,8 @@ Lexer *Lexer::Create_PragmaLexer(SourceLocation TokStartLoc, unsigned TokLen, // Set the SourceLocation with the remapping information. This ensures that // GetMappedTokenLoc will remap the tokens as they are lexed. - L->FileLoc = TokStartLoc; + L->FileLoc = SM.getInstantiationLoc(SM.getLocForStartOfFile(SpellingFID), + InstantiationLoc); // Ensure that the lexer thinks it is inside a directive, so that end \n will // return an EOM token. @@ -321,7 +323,7 @@ static SourceLocation GetMappedTokenLoc(Preprocessor &PP, // characters come from spelling(FileLoc)+Offset. SourceLocation InstLoc = SourceMgr.getInstantiationLoc(FileLoc); SourceLocation SpellingLoc = SourceMgr.getSpellingLoc(FileLoc); - SpellingLoc = SourceLocation::getFileLoc(SpellingLoc.getChunkID(), CharNo); + SpellingLoc = SpellingLoc.getFileLocWithOffset(CharNo); return SourceMgr.getInstantiationLoc(SpellingLoc, InstLoc); } @@ -335,7 +337,7 @@ SourceLocation Lexer::getSourceLocation(const char *Loc) const { // the file id from FileLoc with the offset specified. unsigned CharNo = Loc-BufferStart; if (FileLoc.isFileID()) - return SourceLocation::getFileLoc(FileLoc.getChunkID(), CharNo); + return FileLoc.getFileLocWithOffset(CharNo); // Otherwise, this is the _Pragma lexer case, which pretends that all of the // tokens are lexed from where the _Pragma was defined. diff --git a/lib/Lex/Pragma.cpp b/lib/Lex/Pragma.cpp index 3f5a5a9d19..922af09e50 100644 --- a/lib/Lex/Pragma.cpp +++ b/lib/Lex/Pragma.cpp @@ -156,11 +156,11 @@ void Preprocessor::Handle_Pragma(Token &Tok) { // Plop the string (including the newline and trailing null) into a buffer // where we can lex it. - SourceLocation TokLoc = CreateString(&StrVal[0], StrVal.size(), StrLoc); + SourceLocation TokLoc = CreateString(&StrVal[0], StrVal.size()); // Make and enter a lexer object so that we lex and expand the tokens just // like any others. - Lexer *TL = Lexer::Create_PragmaLexer(TokLoc, + Lexer *TL = Lexer::Create_PragmaLexer(TokLoc, StrLoc, // do not include the null in the count. StrVal.size()-1, *this); diff --git a/lib/Lex/TokenLexer.cpp b/lib/Lex/TokenLexer.cpp index 7e56a43c44..82c4d926ae 100644 --- a/lib/Lex/TokenLexer.cpp +++ b/lib/Lex/TokenLexer.cpp @@ -388,13 +388,21 @@ bool TokenLexer::PasteTokens(Token &Tok) { } else { PP.IncrementPasteCounter(false); - // Make a lexer to lex this string from. + assert(ResultTokLoc.isFileID() && + "Should be a raw location into scratch buffer"); SourceManager &SourceMgr = PP.getSourceManager(); - const char *ResultStrData = SourceMgr.getCharacterData(ResultTokLoc); + std::pair<FileID, unsigned> LocInfo = + SourceMgr.getDecomposedFileLoc(ResultTokLoc); + + const char *ScratchBufStart =SourceMgr.getBufferData(LocInfo.first).first; + + // Make a lexer to lex this string from. Lex just this one token. + const char *ResultStrData = ScratchBufStart+LocInfo.second; // Make a lexer object so that we lex and expand the paste result. - Lexer TL(ResultTokLoc, PP.getLangOptions(), - SourceMgr.getBufferData(ResultTokLoc).first, + Lexer TL(SourceMgr.getLocForStartOfFile(LocInfo.first), + PP.getLangOptions(), + ScratchBufStart, ResultStrData, ResultStrData+LHSLen+RHSLen /*don't include null*/); |