diff options
author | Chris Lattner <sabre@nondot.org> | 2009-11-30 04:18:44 +0000 |
---|---|---|
committer | Chris Lattner <sabre@nondot.org> | 2009-11-30 04:18:44 +0000 |
commit | 6e2901407bff59aeb4cc301cc58b034723d0eb49 (patch) | |
tree | 4deae2d94f73fdf3c0608ac482e7e5666e8c0375 | |
parent | 76ed1f76f986a2c052654b81e2ed9dfb86dd79d8 (diff) |
Fix PR5633 by making the preprocessor handle the case where we can
stat a file but where mmaping it fails. In this case, we emit an
error like:
t.c:1:10: fatal error: error opening file '../../foo.h'
instead of "cannot find file".
git-svn-id: https://llvm.org/svn/llvm-project/cfe/trunk@90110 91177308-0d34-0410-b5e6-96231b3b80d8
-rw-r--r-- | include/clang/Basic/DiagnosticLexKinds.td | 2 | ||||
-rw-r--r-- | include/clang/Lex/Lexer.h | 5 | ||||
-rw-r--r-- | include/clang/Lex/Preprocessor.h | 6 | ||||
-rw-r--r-- | lib/Frontend/CacheTokens.cpp | 3 | ||||
-rw-r--r-- | lib/Frontend/DiagChecker.cpp | 3 | ||||
-rw-r--r-- | lib/Frontend/FrontendActions.cpp | 3 | ||||
-rw-r--r-- | lib/Frontend/RewriteMacros.cpp | 3 | ||||
-rw-r--r-- | lib/Frontend/VerifyDiagnosticsClient.cpp | 8 | ||||
-rw-r--r-- | lib/Lex/Lexer.cpp | 11 | ||||
-rw-r--r-- | lib/Lex/PPDirectives.cpp | 5 | ||||
-rw-r--r-- | lib/Lex/PPLexerChange.cpp | 17 | ||||
-rw-r--r-- | lib/Rewrite/HTMLRewrite.cpp | 6 | ||||
-rw-r--r-- | lib/Rewrite/TokenRewriter.cpp | 3 |
13 files changed, 49 insertions, 26 deletions
diff --git a/include/clang/Basic/DiagnosticLexKinds.td b/include/clang/Basic/DiagnosticLexKinds.td index 7f3f4ea1fc..39123d9b37 100644 --- a/include/clang/Basic/DiagnosticLexKinds.td +++ b/include/clang/Basic/DiagnosticLexKinds.td @@ -170,6 +170,8 @@ def ext_pp_counter : Extension< def err_pp_invalid_directive : Error<"invalid preprocessing directive">; def err_pp_hash_error : Error<"#error%0">; def err_pp_file_not_found : Error<"'%0' file not found">, DefaultFatal; +def err_pp_error_opening_file : Error< + "error opening file '%0'">, DefaultFatal; def err_pp_empty_filename : Error<"empty filename">; def err_pp_include_too_deep : Error<"#include nested too deeply">; def err_pp_expects_filename : Error<"expected \"FILENAME\" or <FILENAME>">; diff --git a/include/clang/Lex/Lexer.h b/include/clang/Lex/Lexer.h index c2db4d357c..52bf194883 100644 --- a/include/clang/Lex/Lexer.h +++ b/include/clang/Lex/Lexer.h @@ -78,7 +78,7 @@ public: /// with the specified preprocessor managing the lexing process. This lexer /// assumes that the associated file buffer and Preprocessor objects will /// outlive it, so it doesn't take ownership of either of them. - Lexer(FileID FID, Preprocessor &PP); + Lexer(FileID FID, const llvm::MemoryBuffer *InputBuffer, Preprocessor &PP); /// Lexer constructor - Create a new raw lexer object. This object is only /// suitable for calls to 'LexRawToken'. This lexer assumes that the text @@ -89,7 +89,8 @@ public: /// Lexer constructor - Create a new raw lexer object. This object is only /// suitable for calls to 'LexRawToken'. This lexer assumes that the text /// range will outlive it, so it doesn't take ownership of it. - Lexer(FileID FID, const SourceManager &SM, const LangOptions &Features); + Lexer(FileID FID, const llvm::MemoryBuffer *InputBuffer, + const SourceManager &SM, const LangOptions &Features); /// Create_PragmaLexer: Lexer constructor - Create a new lexer object for /// _Pragma expansion. This has a variety of magic semantics that this method diff --git a/include/clang/Lex/Preprocessor.h b/include/clang/Lex/Preprocessor.h index 1c0036e3ed..edd34b7189 100644 --- a/include/clang/Lex/Preprocessor.h +++ b/include/clang/Lex/Preprocessor.h @@ -329,9 +329,9 @@ public: void EnterMainSourceFile(); /// EnterSourceFile - Add a source file to the top of the include stack and - /// start lexing tokens from it instead of the current buffer. If isMainFile - /// is true, this is the main file for the translation unit. - void EnterSourceFile(FileID CurFileID, const DirectoryLookup *Dir); + /// start lexing tokens from it instead of the current buffer. Return true + /// on failure. + bool EnterSourceFile(FileID CurFileID, const DirectoryLookup *Dir); /// EnterMacro - Add a Macro to the top of the include stack and start lexing /// tokens from it instead of the current buffer. Args specifies the diff --git a/lib/Frontend/CacheTokens.cpp b/lib/Frontend/CacheTokens.cpp index 98126c89eb..7296246df2 100644 --- a/lib/Frontend/CacheTokens.cpp +++ b/lib/Frontend/CacheTokens.cpp @@ -482,7 +482,8 @@ void PTHWriter::GeneratePTH(const std::string *MainFile) { if (!B) continue; FileID FID = SM.createFileID(FE, SourceLocation(), SrcMgr::C_User); - Lexer L(FID, SM, LOpts); + const llvm::MemoryBuffer *FromFile = SM.getBuffer(FID); + Lexer L(FID, FromFile, SM, LOpts); PM.insert(FE, LexTokens(L)); } diff --git a/lib/Frontend/DiagChecker.cpp b/lib/Frontend/DiagChecker.cpp index 26bb6ccfa7..e7a66b1729 100644 --- a/lib/Frontend/DiagChecker.cpp +++ b/lib/Frontend/DiagChecker.cpp @@ -149,7 +149,8 @@ static void FindExpectedDiags(Preprocessor &PP, FileID FID = PP.getSourceManager().getMainFileID(); // Create a lexer to lex all the tokens of the main file in raw mode. - Lexer RawLex(FID, PP.getSourceManager(), PP.getLangOptions()); + const llvm::MemoryBuffer *FromFile = PP.getSourceManager().getBuffer(FID); + Lexer RawLex(FID, FromFile, PP.getSourceManager(), PP.getLangOptions()); // Return comments as tokens, this is how we find expected diagnostics. RawLex.SetCommentRetentionState(true); diff --git a/lib/Frontend/FrontendActions.cpp b/lib/Frontend/FrontendActions.cpp index 6cb3928d51..8092b715e3 100644 --- a/lib/Frontend/FrontendActions.cpp +++ b/lib/Frontend/FrontendActions.cpp @@ -192,7 +192,8 @@ void DumpRawTokensAction::ExecuteAction() { SourceManager &SM = PP.getSourceManager(); // Start lexing the specified input file. - Lexer RawLex(SM.getMainFileID(), SM, PP.getLangOptions()); + const llvm::MemoryBuffer *FromFile = SM.getBuffer(SM.getMainFileID()); + Lexer RawLex(SM.getMainFileID(), FromFile, SM, PP.getLangOptions()); RawLex.SetKeepWhitespaceMode(true); Token RawTok; diff --git a/lib/Frontend/RewriteMacros.cpp b/lib/Frontend/RewriteMacros.cpp index b5d59c0aa4..0bcbd4ff0e 100644 --- a/lib/Frontend/RewriteMacros.cpp +++ b/lib/Frontend/RewriteMacros.cpp @@ -65,7 +65,8 @@ static void LexRawTokensFromMainFile(Preprocessor &PP, // Create a lexer to lex all the tokens of the main file in raw mode. Even // though it is in raw mode, it will not return comments. - Lexer RawLex(SM.getMainFileID(), SM, PP.getLangOptions()); + const llvm::MemoryBuffer *FromFile = SM.getBuffer(SM.getMainFileID()); + Lexer RawLex(SM.getMainFileID(), FromFile, SM, PP.getLangOptions()); // Switch on comment lexing because we really do want them. RawLex.SetCommentRetentionState(true); diff --git a/lib/Frontend/VerifyDiagnosticsClient.cpp b/lib/Frontend/VerifyDiagnosticsClient.cpp index 2891aec504..99ec910be0 100644 --- a/lib/Frontend/VerifyDiagnosticsClient.cpp +++ b/lib/Frontend/VerifyDiagnosticsClient.cpp @@ -164,12 +164,14 @@ static void FindExpectedDiags(Preprocessor &PP, DiagList &ExpectedNotes) { // Create a raw lexer to pull all the comments out of the main file. We don't // want to look in #include'd headers for expected-error strings. - FileID FID = PP.getSourceManager().getMainFileID(); - if (PP.getSourceManager().getMainFileID().isInvalid()) + SourceManager &SM = PP.getSourceManager(); + FileID FID = SM.getMainFileID(); + if (SM.getMainFileID().isInvalid()) return; // Create a lexer to lex all the tokens of the main file in raw mode. - Lexer RawLex(FID, PP.getSourceManager(), PP.getLangOptions()); + const llvm::MemoryBuffer *FromFile = SM.getBuffer(FID); + Lexer RawLex(FID, FromFile, SM, PP.getLangOptions()); // Return comments as tokens, this is how we find expected diagnostics. RawLex.SetCommentRetentionState(true); diff --git a/lib/Lex/Lexer.cpp b/lib/Lex/Lexer.cpp index f4a4432410..52a7a04567 100644 --- a/lib/Lex/Lexer.cpp +++ b/lib/Lex/Lexer.cpp @@ -95,13 +95,11 @@ void Lexer::InitLexer(const char *BufStart, const char *BufPtr, /// with the specified preprocessor managing the lexing process. This lexer /// assumes that the associated file buffer and Preprocessor objects will /// outlive it, so it doesn't take ownership of either of them. -Lexer::Lexer(FileID FID, Preprocessor &PP) +Lexer::Lexer(FileID FID, const llvm::MemoryBuffer *InputFile, Preprocessor &PP) : PreprocessorLexer(&PP, FID), FileLoc(PP.getSourceManager().getLocForStartOfFile(FID)), Features(PP.getLangOptions()) { - const llvm::MemoryBuffer *InputFile = PP.getSourceManager().getBuffer(FID); - InitLexer(InputFile->getBufferStart(), InputFile->getBufferStart(), InputFile->getBufferEnd()); @@ -129,9 +127,9 @@ Lexer::Lexer(SourceLocation fileloc, const LangOptions &features, /// Lexer constructor - Create a new raw lexer object. This object is only /// suitable for calls to 'LexRawToken'. This lexer assumes that the text /// range will outlive it, so it doesn't take ownership of it. -Lexer::Lexer(FileID FID, const SourceManager &SM, const LangOptions &features) +Lexer::Lexer(FileID FID, const llvm::MemoryBuffer *FromFile, + const SourceManager &SM, const LangOptions &features) : FileLoc(SM.getLocForStartOfFile(FID)), Features(features) { - const llvm::MemoryBuffer *FromFile = SM.getBuffer(FID); InitLexer(FromFile->getBufferStart(), FromFile->getBufferStart(), FromFile->getBufferEnd()); @@ -163,7 +161,8 @@ Lexer *Lexer::Create_PragmaLexer(SourceLocation SpellingLoc, // Create the lexer as if we were going to lex the file normally. FileID SpellingFID = SM.getFileID(SpellingLoc); - Lexer *L = new Lexer(SpellingFID, PP); + const llvm::MemoryBuffer *InputFile = SM.getBuffer(SpellingFID); + Lexer *L = new Lexer(SpellingFID, InputFile, PP); // Now that the lexer is created, change the start/end locations so that we // just lex the subsection of the file that we want. This is lexing from a diff --git a/lib/Lex/PPDirectives.cpp b/lib/Lex/PPDirectives.cpp index dc7d95e701..9caca339be 100644 --- a/lib/Lex/PPDirectives.cpp +++ b/lib/Lex/PPDirectives.cpp @@ -16,6 +16,7 @@ #include "clang/Lex/HeaderSearch.h" #include "clang/Lex/MacroInfo.h" #include "clang/Lex/LexDiagnostic.h" +#include "clang/Basic/FileManager.h" #include "clang/Basic/SourceManager.h" #include "llvm/ADT/APInt.h" using namespace clang; @@ -1111,7 +1112,9 @@ void Preprocessor::HandleIncludeDirective(Token &IncludeTok, } // Finally, if all is good, enter the new file! - EnterSourceFile(FID, CurDir); + if (EnterSourceFile(FID, CurDir)) + Diag(FilenameTok, diag::err_pp_error_opening_file) + << std::string(SourceMgr.getFileEntryForID(FID)->getName()); } /// HandleIncludeNextDirective - Implements #include_next. diff --git a/lib/Lex/PPLexerChange.cpp b/lib/Lex/PPLexerChange.cpp index d5d6a6e972..8a61d7b9c2 100644 --- a/lib/Lex/PPLexerChange.cpp +++ b/lib/Lex/PPLexerChange.cpp @@ -64,7 +64,7 @@ PreprocessorLexer *Preprocessor::getCurrentFileLexer() const { /// EnterSourceFile - Add a source file to the top of the include stack and /// start lexing tokens from it instead of the current buffer. -void Preprocessor::EnterSourceFile(FileID FID, const DirectoryLookup *CurDir) { +bool Preprocessor::EnterSourceFile(FileID FID, const DirectoryLookup *CurDir) { assert(CurTokenLexer == 0 && "Cannot #include a file inside a macro!"); ++NumEnteredSourceFiles; @@ -72,10 +72,19 @@ void Preprocessor::EnterSourceFile(FileID FID, const DirectoryLookup *CurDir) { MaxIncludeStackDepth = IncludeMacroStack.size(); if (PTH) { - if (PTHLexer *PL = PTH->CreateLexer(FID)) - return EnterSourceFileWithPTH(PL, CurDir); + if (PTHLexer *PL = PTH->CreateLexer(FID)) { + EnterSourceFileWithPTH(PL, CurDir); + return false; + } } - EnterSourceFileWithLexer(new Lexer(FID, *this), CurDir); + + // Get the MemoryBuffer for this FID, if it fails, we fail. + const llvm::MemoryBuffer *InputFile = getSourceManager().getBuffer(FID); + if (InputFile == 0) + return true; + + EnterSourceFileWithLexer(new Lexer(FID, InputFile, *this), CurDir); + return false; } /// EnterSourceFileWithLexer - Add a source file to the top of the include stack diff --git a/lib/Rewrite/HTMLRewrite.cpp b/lib/Rewrite/HTMLRewrite.cpp index b4bf419bc5..342b0e6ef5 100644 --- a/lib/Rewrite/HTMLRewrite.cpp +++ b/lib/Rewrite/HTMLRewrite.cpp @@ -353,7 +353,8 @@ void html::SyntaxHighlight(Rewriter &R, FileID FID, const Preprocessor &PP) { RewriteBuffer &RB = R.getEditBuffer(FID); const SourceManager &SM = PP.getSourceManager(); - Lexer L(FID, SM, PP.getLangOptions()); + const llvm::MemoryBuffer *FromFile = SM.getBuffer(FID); + Lexer L(FID, FromFile, SM, PP.getLangOptions()); const char *BufferStart = L.getBufferStart(); // Inform the preprocessor that we want to retain comments as tokens, so we @@ -444,7 +445,8 @@ void html::HighlightMacros(Rewriter &R, FileID FID, const Preprocessor& PP) { const SourceManager &SM = PP.getSourceManager(); std::vector<Token> TokenStream; - Lexer L(FID, SM, PP.getLangOptions()); + const llvm::MemoryBuffer *FromFile = SM.getBuffer(FID); + Lexer L(FID, FromFile, SM, PP.getLangOptions()); // Lex all the tokens in raw mode, to avoid entering #includes or expanding // macros. diff --git a/lib/Rewrite/TokenRewriter.cpp b/lib/Rewrite/TokenRewriter.cpp index 0effbb18b8..789d53f4af 100644 --- a/lib/Rewrite/TokenRewriter.cpp +++ b/lib/Rewrite/TokenRewriter.cpp @@ -23,7 +23,8 @@ TokenRewriter::TokenRewriter(FileID FID, SourceManager &SM, ScratchBuf.reset(new ScratchBuffer(SM)); // Create a lexer to lex all the tokens of the main file in raw mode. - Lexer RawLex(FID, SM, LangOpts); + const llvm::MemoryBuffer *FromFile = SM.getBuffer(FID); + Lexer RawLex(FID, FromFile, SM, LangOpts); // Return all comments and whitespace as tokens. RawLex.SetKeepWhitespaceMode(true); |