diff options
author | Ted Kremenek <kremenek@apple.com> | 2008-12-11 23:36:38 +0000 |
---|---|---|
committer | Ted Kremenek <kremenek@apple.com> | 2008-12-11 23:36:38 +0000 |
commit | fb645b6547b75ddc2e3c7ab2126ad8beeefca62d (patch) | |
tree | 6fcd9b627758449bc49d257767aa184202008bb4 | |
parent | f62e14dae03604379434d08955bf1087e2639eff (diff) |
PTH:
- Added a side-table per each token-cached file with the preprocessor conditional stack. This tracks what #if's are matched with what #endifs and where their respective tokens are in the PTH file. This will allow for quick skipping of excluded conditional branches in the Preprocessor.
- Performance testing shows the addition of this information (without actually utilizing it) leads to no performance regressions.
git-svn-id: https://llvm.org/svn/llvm-project/cfe/trunk@60911 91177308-0d34-0410-b5e6-96231b3b80d8
-rw-r--r-- | Driver/CacheTokens.cpp | 61 | ||||
-rw-r--r-- | lib/Lex/PTHLexer.cpp | 30 |
2 files changed, 73 insertions, 18 deletions
diff --git a/Driver/CacheTokens.cpp b/Driver/CacheTokens.cpp index ceb16b0301..21e5ee09ae 100644 --- a/Driver/CacheTokens.cpp +++ b/Driver/CacheTokens.cpp @@ -27,7 +27,7 @@ using namespace clang; typedef uint32_t Offset; -typedef llvm::DenseMap<const FileEntry*,Offset> PCHMap; +typedef llvm::DenseMap<const FileEntry*,std::pair<Offset,Offset> > PCHMap; typedef llvm::DenseMap<const IdentifierInfo*,uint32_t> IDMap; static void Emit32(llvm::raw_ostream& Out, uint32_t V) { @@ -140,18 +140,25 @@ Offset EmitFileTable(llvm::raw_fd_ostream& Out, SourceManager& SM, PCHMap& PM) { unsigned size = strlen(Name); Emit32(Out, size); EmitBuf(Out, Name, Name+size); - Emit32(Out, I->second); + Emit32(Out, I->second.first); + Emit32(Out, I->second.second); } return off; } -static Offset LexTokens(llvm::raw_fd_ostream& Out, Lexer& L, Preprocessor& PP, - uint32_t& idcount, IDMap& IM) { +static std::pair<Offset,Offset> +LexTokens(llvm::raw_fd_ostream& Out, Lexer& L, Preprocessor& PP, + uint32_t& idcount, IDMap& IM) { // Record the location within the token file. Offset off = (Offset) Out.tell(); SourceManager& SMgr = PP.getSourceManager(); + + // Keep track of matching '#if' ... '#endif'. + typedef std::vector<std::pair<Offset, unsigned> > PPCondTable; + PPCondTable PPCond; + std::vector<unsigned> PPStartCond; Token Tok; @@ -164,6 +171,7 @@ static Offset LexTokens(llvm::raw_fd_ostream& Out, Lexer& L, Preprocessor& PP, else if (Tok.is(tok::hash) && Tok.isAtStartOfLine()) { // Special processing for #include. Store the '#' token and lex // the next token. + Offset HashOff = (Offset) Out.tell(); EmitToken(Out, Tok, SMgr, idcount, IM); L.LexFromRawLexer(Tok); @@ -189,11 +197,52 @@ static Offset LexTokens(llvm::raw_fd_ostream& Out, Lexer& L, Preprocessor& PP, if (Tok.is(tok::identifier)) Tok.setIdentifierInfo(PP.LookUpIdentifierInfo(Tok)); } + else if (K == tok::pp_if || K == tok::pp_ifdef || K == tok::pp_ifndef) { + // Ad an entry for '#if' and friends. We initially set the target index + // to 0. This will get backpatched when we hit #endif. + PPStartCond.push_back(PPCond.size()); + PPCond.push_back(std::make_pair((Offset) HashOff, 0U)); + } + else if (K == tok::pp_endif) { + assert(!PPStartCond.empty()); + // Add an entry for '#endif'. We set the target table index to itself. + unsigned index = PPCond.size(); + PPCond.push_back(std::make_pair((Offset) HashOff, index)); + // Backpatch the opening '#if' entry. + assert(PPCond[PPStartCond.back()].second == 0); + PPCond[PPStartCond.back()].second = index; + PPStartCond.pop_back(); + } + else if (K == tok::pp_elif) { + assert(!PPStartCond.empty()); + // Add an entry for '#elif'. This serves as both a closing and + // opening of a conditional block. This means that its entry + // will get backpatched later. + unsigned index = PPCond.size(); + PPCond.push_back(std::make_pair((Offset) HashOff, 0U)); + // Backpatch the previous '#if' entry. + assert(PPCond[PPStartCond.back()].second == 0); + PPCond[PPStartCond.back()].second = index; + PPStartCond.pop_back(); + // Now add '#elif' as a new block opening. + PPStartCond.push_back(index); + } } } while (EmitToken(Out, Tok, SMgr, idcount, IM), Tok.isNot(tok::eof)); + + // Next write out PPCond. + Offset PPCondOff = (Offset) Out.tell(); + // Write out the size of PPCond so that clients can tell if the table is + // empty. + Emit32(Out, PPCond.size()); - return off; + for (PPCondTable::iterator I=PPCond.begin(), E=PPCond.end(); I!=E; ++I) { + Emit32(Out, I->first - off); + Emit32(Out, I->second); + } + + return std::make_pair(off,PPCondOff); } void clang::CacheTokens(Preprocessor& PP, const std::string& OutFile) { @@ -243,7 +292,7 @@ void clang::CacheTokens(Preprocessor& PP, const std::string& OutFile) { Lexer L(SourceLocation::getFileLoc(I.getFileID(), 0), LOpts, B->getBufferStart(), B->getBufferEnd(), B); - + PM[FE] = LexTokens(Out, L, PP, idcount, IM); } diff --git a/lib/Lex/PTHLexer.cpp b/lib/Lex/PTHLexer.cpp index 3afbb5b717..0adcf410df 100644 --- a/lib/Lex/PTHLexer.cpp +++ b/lib/Lex/PTHLexer.cpp @@ -194,19 +194,24 @@ namespace { class VISIBILITY_HIDDEN PTHFileLookup { public: class Val { - uint32_t v; + uint32_t TokenOff; + uint32_t PPCondOff; public: - Val() : v(~0) {} - Val(uint32_t x) : v(x) {} + Val() : TokenOff(~0) {} + Val(uint32_t toff, uint32_t poff) : TokenOff(toff), PPCondOff(poff) {} - operator uint32_t() const { - assert(v != ~((uint32_t)0) && "PTHFileLookup entry initialized."); - return v; + uint32_t getTokenOffset() const { + assert(TokenOff != ~((uint32_t)0) && "PTHFileLookup entry initialized."); + return TokenOff; } - Val& operator=(uint32_t x) { v = x; return *this; } - bool isValid() const { return v != ~((uint32_t)0); } + uint32_t gettPPCondOffset() const { + assert(TokenOff != ~((uint32_t)0) && "PTHFileLookup entry initialized."); + return PPCondOff; + } + + bool isValid() const { return TokenOff != ~((uint32_t)0); } }; private: @@ -228,7 +233,8 @@ public: uint32_t len = Read32(D); const char* s = D; D += len; - FileMap.GetOrCreateValue(s, s+len).getValue() = Read32(D); + uint32_t TokenOff = Read32(D); + FileMap.GetOrCreateValue(s, s+len).getValue() = Val(TokenOff, Read32(D)); } } }; @@ -351,13 +357,13 @@ PTHLexer* PTHManager::CreateLexer(unsigned FileID, const FileEntry* FE) { // Lookup the FileEntry object in our file lookup data structure. It will // return a variant that indicates whether or not there is an offset within // the PTH file that contains cached tokens. - PTHFileLookup::Val Off = ((PTHFileLookup*) FileLookup)->Lookup(FE); + PTHFileLookup::Val FileData = ((PTHFileLookup*) FileLookup)->Lookup(FE); - if (!Off.isValid()) // No tokens available. + if (!FileData.isValid()) // No tokens available. return 0; // Compute the offset of the token data within the buffer. - const char* data = Buf->getBufferStart() + Off; + const char* data = Buf->getBufferStart() + FileData.getTokenOffset(); assert(data < Buf->getBufferEnd()); return new PTHLexer(PP, SourceLocation::getFileLoc(FileID, 0), data, *this); } |