aboutsummaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorPeter Collingbourne <peter@pcc.me.uk>2011-02-28 02:37:51 +0000
committerPeter Collingbourne <peter@pcc.me.uk>2011-02-28 02:37:51 +0000
commit84021556baceb76eedf7d44be8ba71d9b8cfacce (patch)
tree15b450508e966c91de2ecf88b3f4ee4f8f80090f /lib
parent15348aeb81285c75b2e92b5bf8d2db3445d147c2 (diff)
Rename tok::eom to tok::eod.
The previous name was inaccurate as this token in fact appears at the end of every preprocessing directive, not just macro definitions. No functionality change, except for a diagnostic tweak. git-svn-id: https://llvm.org/svn/llvm-project/cfe/trunk@126631 91177308-0d34-0410-b5e6-96231b3b80d8
Diffstat (limited to 'lib')
-rw-r--r--lib/Frontend/CacheTokens.cpp4
-rw-r--r--lib/Frontend/PrintPreprocessedOutput.cpp2
-rw-r--r--lib/Lex/Lexer.cpp14
-rw-r--r--lib/Lex/PPDirectives.cpp90
-rw-r--r--lib/Lex/PPExpressions.cpp18
-rw-r--r--lib/Lex/PPLexerChange.cpp20
-rw-r--r--lib/Lex/PPMacroExpansion.cpp10
-rw-r--r--lib/Lex/PTHLexer.cpp2
-rw-r--r--lib/Lex/Pragma.cpp24
-rw-r--r--lib/Lex/PreprocessorLexer.cpp2
-rw-r--r--lib/Lex/TokenLexer.cpp2
-rw-r--r--lib/Parse/ParsePragma.cpp10
12 files changed, 99 insertions, 99 deletions
diff --git a/lib/Frontend/CacheTokens.cpp b/lib/Frontend/CacheTokens.cpp
index ee3fdd8343..50182247fc 100644
--- a/lib/Frontend/CacheTokens.cpp
+++ b/lib/Frontend/CacheTokens.cpp
@@ -288,12 +288,12 @@ PTHEntry PTHWriter::LexTokens(Lexer& L) {
if ((Tok.isAtStartOfLine() || Tok.is(tok::eof)) &&
ParsingPreprocessorDirective) {
- // Insert an eom token into the token cache. It has the same
+ // Insert an eod token into the token cache. It has the same
// position as the next token that is not on the same line as the
// preprocessor directive. Observe that we continue processing
// 'Tok' when we exit this branch.
Token Tmp = Tok;
- Tmp.setKind(tok::eom);
+ Tmp.setKind(tok::eod);
Tmp.clearFlag(Token::StartOfLine);
Tmp.setIdentifierInfo(0);
EmitToken(Tmp);
diff --git a/lib/Frontend/PrintPreprocessedOutput.cpp b/lib/Frontend/PrintPreprocessedOutput.cpp
index 922d743adf..b46e04749b 100644
--- a/lib/Frontend/PrintPreprocessedOutput.cpp
+++ b/lib/Frontend/PrintPreprocessedOutput.cpp
@@ -432,7 +432,7 @@ struct UnknownPragmaHandler : public PragmaHandler {
Callbacks->OS.write(Prefix, strlen(Prefix));
Callbacks->SetEmittedTokensOnThisLine();
// Read and print all of the pragma tokens.
- while (PragmaTok.isNot(tok::eom)) {
+ while (PragmaTok.isNot(tok::eod)) {
if (PragmaTok.hasLeadingSpace())
Callbacks->OS << ' ';
std::string TokSpell = PP.getSpelling(PragmaTok);
diff --git a/lib/Lex/Lexer.cpp b/lib/Lex/Lexer.cpp
index b17198b219..9643d8e978 100644
--- a/lib/Lex/Lexer.cpp
+++ b/lib/Lex/Lexer.cpp
@@ -178,7 +178,7 @@ Lexer *Lexer::Create_PragmaLexer(SourceLocation SpellingLoc,
InstantiationLocEnd, TokLen);
// Ensure that the lexer thinks it is inside a directive, so that end \n will
- // return an EOM token.
+ // return an EOD token.
L->ParsingPreprocessorDirective = true;
// This lexer really is for _Pragma.
@@ -1407,7 +1407,7 @@ bool Lexer::SkipBCPLComment(Token &Result, const char *CurPtr) {
return SaveBCPLComment(Result, CurPtr);
// If we are inside a preprocessor directive and we see the end of line,
- // return immediately, so that the lexer can return this as an EOM token.
+ // return immediately, so that the lexer can return this as an EOD token.
if (ParsingPreprocessorDirective || CurPtr == BufferEnd) {
BufferPtr = CurPtr;
return false;
@@ -1715,14 +1715,14 @@ std::string Lexer::ReadToEndOfLine() {
assert(CurPtr[-1] == Char && "Trigraphs for newline?");
BufferPtr = CurPtr-1;
- // Next, lex the character, which should handle the EOM transition.
+ // Next, lex the character, which should handle the EOD transition.
Lex(Tmp);
if (Tmp.is(tok::code_completion)) {
if (PP && PP->getCodeCompletionHandler())
PP->getCodeCompletionHandler()->CodeCompleteNaturalLanguage();
Lex(Tmp);
}
- assert(Tmp.is(tok::eom) && "Unexpected token!");
+ assert(Tmp.is(tok::eod) && "Unexpected token!");
// Finally, we're done, return the string we found.
return Result;
@@ -1758,7 +1758,7 @@ bool Lexer::LexEndOfFile(Token &Result, const char *CurPtr) {
// Done parsing the "line".
ParsingPreprocessorDirective = false;
// Update the location of token as well as BufferPtr.
- FormTokenWithChars(Result, CurPtr, tok::eom);
+ FormTokenWithChars(Result, CurPtr, tok::eod);
// Restore comment saving mode, in case it was disabled for directive.
SetCommentRetentionState(PP->getCommentRetentionState());
@@ -2006,7 +2006,7 @@ LexNextToken:
case '\n':
case '\r':
// If we are inside a preprocessor directive and we see the end of line,
- // we know we are done with the directive, so return an EOM token.
+ // we know we are done with the directive, so return an EOD token.
if (ParsingPreprocessorDirective) {
// Done parsing the "line".
ParsingPreprocessorDirective = false;
@@ -2017,7 +2017,7 @@ LexNextToken:
// Since we consumed a newline, we are back at the start of a line.
IsAtStartOfLine = true;
- Kind = tok::eom;
+ Kind = tok::eod;
break;
}
// The returned token is at the start of the line.
diff --git a/lib/Lex/PPDirectives.cpp b/lib/Lex/PPDirectives.cpp
index 3e871ae7ab..cea0798f6e 100644
--- a/lib/Lex/PPDirectives.cpp
+++ b/lib/Lex/PPDirectives.cpp
@@ -81,17 +81,17 @@ void Preprocessor::ReleaseMacroInfo(MacroInfo *MI) {
}
/// DiscardUntilEndOfDirective - Read and discard all tokens remaining on the
-/// current line until the tok::eom token is found.
+/// current line until the tok::eod token is found.
void Preprocessor::DiscardUntilEndOfDirective() {
Token Tmp;
do {
LexUnexpandedToken(Tmp);
assert(Tmp.isNot(tok::eof) && "EOF seen while discarding directive tokens");
- } while (Tmp.isNot(tok::eom));
+ } while (Tmp.isNot(tok::eod));
}
/// ReadMacroName - Lex and validate a macro name, which occurs after a
-/// #define or #undef. This sets the token kind to eom and discards the rest
+/// #define or #undef. This sets the token kind to eod and discards the rest
/// of the macro line if the macro name is invalid. isDefineUndef is 1 if
/// this is due to a a #define, 2 if #undef directive, 0 if it is something
/// else (e.g. #ifdef).
@@ -107,7 +107,7 @@ void Preprocessor::ReadMacroName(Token &MacroNameTok, char isDefineUndef) {
}
// Missing macro name?
- if (MacroNameTok.is(tok::eom)) {
+ if (MacroNameTok.is(tok::eod)) {
Diag(MacroNameTok, diag::err_pp_missing_macro_name);
return;
}
@@ -143,13 +143,13 @@ void Preprocessor::ReadMacroName(Token &MacroNameTok, char isDefineUndef) {
}
// Invalid macro name, read and discard the rest of the line. Then set the
- // token kind to tok::eom.
- MacroNameTok.setKind(tok::eom);
+ // token kind to tok::eod.
+ MacroNameTok.setKind(tok::eod);
return DiscardUntilEndOfDirective();
}
-/// CheckEndOfDirective - Ensure that the next token is a tok::eom token. If
-/// not, emit a diagnostic and consume up until the eom. If EnableMacros is
+/// CheckEndOfDirective - Ensure that the next token is a tok::eod token. If
+/// not, emit a diagnostic and consume up until the eod. If EnableMacros is
/// true, then we consider macros that expand to zero tokens as being ok.
void Preprocessor::CheckEndOfDirective(const char *DirType, bool EnableMacros) {
Token Tmp;
@@ -166,7 +166,7 @@ void Preprocessor::CheckEndOfDirective(const char *DirType, bool EnableMacros) {
while (Tmp.is(tok::comment)) // Skip comments in -C mode.
LexUnexpandedToken(Tmp);
- if (Tmp.isNot(tok::eom)) {
+ if (Tmp.isNot(tok::eod)) {
// Add a fixit in GNU/C99/C++ mode. Don't offer a fixit for strict-C89,
// or if this is a macro-style preprocessing directive, because it is more
// trouble than it is worth to insert /**/ and check that there is no /**/
@@ -238,7 +238,7 @@ void Preprocessor::SkipExcludedConditionalBlock(SourceLocation IfTokenLoc,
// We just parsed a # character at the start of a line, so we're in
// directive mode. Tell the lexer this so any newlines we see will be
- // converted into an EOM token (this terminates the macro).
+ // converted into an EOD token (this terminates the macro).
CurPPLexer->ParsingPreprocessorDirective = true;
if (CurLexer) CurLexer->SetCommentRetentionState(false);
@@ -425,7 +425,7 @@ void Preprocessor::PTHSkipExcludedConditionalBlock() {
if (!CondInfo.FoundNonSkip) {
CondInfo.FoundNonSkip = true;
- // Scan until the eom token.
+ // Scan until the eod token.
CurPTHLexer->ParsingPreprocessorDirective = true;
DiscardUntilEndOfDirective();
CurPTHLexer->ParsingPreprocessorDirective = false;
@@ -535,7 +535,7 @@ void Preprocessor::HandleDirective(Token &Result) {
// We just parsed a # character at the start of a line, so we're in directive
// mode. Tell the lexer this so any newlines we see will be converted into an
- // EOM token (which terminates the directive).
+ // EOD token (which terminates the directive).
CurPPLexer->ParsingPreprocessorDirective = true;
++NumDirectives;
@@ -563,7 +563,7 @@ void Preprocessor::HandleDirective(Token &Result) {
TryAgain:
switch (Result.getKind()) {
- case tok::eom:
+ case tok::eod:
return; // null directive.
case tok::comment:
// Handle stuff like "# /*foo*/ define X" in -E -C mode.
@@ -686,7 +686,7 @@ static bool GetLineValue(Token &DigitTok, unsigned &Val,
if (DigitTok.isNot(tok::numeric_constant)) {
PP.Diag(DigitTok, DiagID);
- if (DigitTok.isNot(tok::eom))
+ if (DigitTok.isNot(tok::eod))
PP.DiscardUntilEndOfDirective();
return true;
}
@@ -758,9 +758,9 @@ void Preprocessor::HandleLineDirective(Token &Tok) {
Token StrTok;
Lex(StrTok);
- // If the StrTok is "eom", then it wasn't present. Otherwise, it must be a
- // string followed by eom.
- if (StrTok.is(tok::eom))
+ // If the StrTok is "eod", then it wasn't present. Otherwise, it must be a
+ // string followed by eod.
+ if (StrTok.is(tok::eod))
; // ok
else if (StrTok.isNot(tok::string_literal)) {
Diag(StrTok, diag::err_pp_line_invalid_filename);
@@ -779,7 +779,7 @@ void Preprocessor::HandleLineDirective(Token &Tok) {
FilenameID = SourceMgr.getLineTableFilenameID(Literal.GetString(),
Literal.GetStringLength());
- // Verify that there is nothing after the string, other than EOM. Because
+ // Verify that there is nothing after the string, other than EOD. Because
// of C99 6.10.4p5, macros that expand to empty tokens are ok.
CheckEndOfDirective("line", true);
}
@@ -800,7 +800,7 @@ static bool ReadLineMarkerFlags(bool &IsFileEntry, bool &IsFileExit,
unsigned FlagVal;
Token FlagTok;
PP.Lex(FlagTok);
- if (FlagTok.is(tok::eom)) return false;
+ if (FlagTok.is(tok::eod)) return false;
if (GetLineValue(FlagTok, FlagVal, diag::err_pp_linemarker_invalid_flag, PP))
return true;
@@ -808,7 +808,7 @@ static bool ReadLineMarkerFlags(bool &IsFileEntry, bool &IsFileExit,
IsFileEntry = true;
PP.Lex(FlagTok);
- if (FlagTok.is(tok::eom)) return false;
+ if (FlagTok.is(tok::eod)) return false;
if (GetLineValue(FlagTok, FlagVal, diag::err_pp_linemarker_invalid_flag,PP))
return true;
} else if (FlagVal == 2) {
@@ -834,7 +834,7 @@ static bool ReadLineMarkerFlags(bool &IsFileEntry, bool &IsFileExit,
}
PP.Lex(FlagTok);
- if (FlagTok.is(tok::eom)) return false;
+ if (FlagTok.is(tok::eod)) return false;
if (GetLineValue(FlagTok, FlagVal, diag::err_pp_linemarker_invalid_flag,PP))
return true;
}
@@ -849,7 +849,7 @@ static bool ReadLineMarkerFlags(bool &IsFileEntry, bool &IsFileExit,
IsSystemHeader = true;
PP.Lex(FlagTok);
- if (FlagTok.is(tok::eom)) return false;
+ if (FlagTok.is(tok::eod)) return false;
if (GetLineValue(FlagTok, FlagVal, diag::err_pp_linemarker_invalid_flag, PP))
return true;
@@ -863,7 +863,7 @@ static bool ReadLineMarkerFlags(bool &IsFileEntry, bool &IsFileExit,
IsExternCHeader = true;
PP.Lex(FlagTok);
- if (FlagTok.is(tok::eom)) return false;
+ if (FlagTok.is(tok::eod)) return false;
// There are no more valid flags here.
PP.Diag(FlagTok, diag::err_pp_linemarker_invalid_flag);
@@ -893,9 +893,9 @@ void Preprocessor::HandleDigitDirective(Token &DigitTok) {
bool IsSystemHeader = false, IsExternCHeader = false;
int FilenameID = -1;
- // If the StrTok is "eom", then it wasn't present. Otherwise, it must be a
- // string followed by eom.
- if (StrTok.is(tok::eom))
+ // If the StrTok is "eod", then it wasn't present. Otherwise, it must be a
+ // string followed by eod.
+ if (StrTok.is(tok::eod))
; // ok
else if (StrTok.isNot(tok::string_literal)) {
Diag(StrTok, diag::err_pp_linemarker_invalid_filename);
@@ -978,12 +978,12 @@ void Preprocessor::HandleIdentSCCSDirective(Token &Tok) {
if (StrTok.isNot(tok::string_literal) &&
StrTok.isNot(tok::wide_string_literal)) {
Diag(StrTok, diag::err_pp_malformed_ident);
- if (StrTok.isNot(tok::eom))
+ if (StrTok.isNot(tok::eod))
DiscardUntilEndOfDirective();
return;
}
- // Verify that there is nothing after the string, other than EOM.
+ // Verify that there is nothing after the string, other than EOD.
CheckEndOfDirective("ident");
if (Callbacks) {
@@ -1052,14 +1052,14 @@ bool Preprocessor::GetIncludeFilenameSpelling(SourceLocation Loc,
///
/// This code concatenates and consumes tokens up to the '>' token. It returns
/// false if the > was found, otherwise it returns true if it finds and consumes
-/// the EOM marker.
+/// the EOD marker.
bool Preprocessor::ConcatenateIncludeName(
llvm::SmallString<128> &FilenameBuffer,
SourceLocation &End) {
Token CurTok;
Lex(CurTok);
- while (CurTok.isNot(tok::eom)) {
+ while (CurTok.isNot(tok::eod)) {
End = CurTok.getLocation();
// FIXME: Provide code completion for #includes.
@@ -1095,8 +1095,8 @@ bool Preprocessor::ConcatenateIncludeName(
Lex(CurTok);
}
- // If we hit the eom marker, emit an error and return true so that the caller
- // knows the EOM has been read.
+ // If we hit the eod marker, emit an error and return true so that the caller
+ // knows the EOD has been read.
Diag(CurTok.getLocation(), diag::err_pp_expects_filename);
return true;
}
@@ -1120,8 +1120,8 @@ void Preprocessor::HandleIncludeDirective(SourceLocation HashLoc,
SourceLocation End;
switch (FilenameTok.getKind()) {
- case tok::eom:
- // If the token kind is EOM, the error has already been diagnosed.
+ case tok::eod:
+ // If the token kind is EOD, the error has already been diagnosed.
return;
case tok::angle_string_literal:
@@ -1135,7 +1135,7 @@ void Preprocessor::HandleIncludeDirective(SourceLocation HashLoc,
// case, glue the tokens together into FilenameBuffer and interpret those.
FilenameBuffer.push_back('<');
if (ConcatenateIncludeName(FilenameBuffer, End))
- return; // Found <eom> but no ">"? Diagnostic already emitted.
+ return; // Found <eod> but no ">"? Diagnostic already emitted.
Filename = FilenameBuffer.str();
break;
default:
@@ -1153,7 +1153,7 @@ void Preprocessor::HandleIncludeDirective(SourceLocation HashLoc,
return;
}
- // Verify that there is nothing after the filename, other than EOM. Note that
+ // Verify that there is nothing after the filename, other than EOD. Note that
// we allow macros that expand to nothing after the filename, because this
// falls into the category of "#include pp-tokens new-line" specified in
// C99 6.10.2p4.
@@ -1302,7 +1302,7 @@ bool Preprocessor::ReadMacroDefinitionArgList(MacroInfo *MI) {
MI->setIsC99Varargs();
MI->setArgumentList(&Arguments[0], Arguments.size(), BP);
return false;
- case tok::eom: // #define X(
+ case tok::eod: // #define X(
Diag(Tok, diag::err_pp_missing_rparen_in_macro_def);
return true;
default:
@@ -1366,7 +1366,7 @@ void Preprocessor::HandleDefineDirective(Token &DefineTok) {
ReadMacroName(MacroNameTok, 1);
// Error reading macro name? If so, diagnostic already issued.
- if (MacroNameTok.is(tok::eom))
+ if (MacroNameTok.is(tok::eod))
return;
Token LastTok = MacroNameTok;
@@ -1384,7 +1384,7 @@ void Preprocessor::HandleDefineDirective(Token &DefineTok) {
// If this is a function-like macro definition, parse the argument list,
// marking each of the identifiers as being used as macro arguments. Also,
// check other constraints on the first token of the macro body.
- if (Tok.is(tok::eom)) {
+ if (Tok.is(tok::eod)) {
// If there is no body to this macro, we have no special handling here.
} else if (Tok.hasLeadingSpace()) {
// This is a normal token with leading space. Clear the leading space
@@ -1439,13 +1439,13 @@ void Preprocessor::HandleDefineDirective(Token &DefineTok) {
Diag(Tok, diag::warn_missing_whitespace_after_macro_name);
}
- if (!Tok.is(tok::eom))
+ if (!Tok.is(tok::eod))
LastTok = Tok;
// Read the rest of the macro body.
if (MI->isObjectLike()) {
// Object-like macros are very simple, just read their body.
- while (Tok.isNot(tok::eom)) {
+ while (Tok.isNot(tok::eod)) {
LastTok = Tok;
MI->AddTokenToBody(Tok);
// Get the next token of the macro.
@@ -1456,7 +1456,7 @@ void Preprocessor::HandleDefineDirective(Token &DefineTok) {
// Otherwise, read the body of a function-like macro. While we are at it,
// check C99 6.10.3.2p1: ensure that # operators are followed by macro
// parameters in function-like macro expansions.
- while (Tok.isNot(tok::eom)) {
+ while (Tok.isNot(tok::eod)) {
LastTok = Tok;
if (Tok.isNot(tok::hash)) {
@@ -1478,7 +1478,7 @@ void Preprocessor::HandleDefineDirective(Token &DefineTok) {
// the '#' because '#' is often a comment character. However, change
// the kind of the token to tok::unknown so that the preprocessor isn't
// confused.
- if (getLangOptions().AsmPreprocessor && Tok.isNot(tok::eom)) {
+ if (getLangOptions().AsmPreprocessor && Tok.isNot(tok::eod)) {
LastTok.setKind(tok::unknown);
} else {
Diag(Tok, diag::err_pp_stringize_not_parameter);
@@ -1573,7 +1573,7 @@ void Preprocessor::HandleUndefDirective(Token &UndefTok) {
ReadMacroName(MacroNameTok, 2);
// Error reading macro name? If so, diagnostic already issued.
- if (MacroNameTok.is(tok::eom))
+ if (MacroNameTok.is(tok::eod))
return;
// Check to see if this is the last token on the #undef line.
@@ -1619,7 +1619,7 @@ void Preprocessor::HandleIfdefDirective(Token &Result, bool isIfndef,
ReadMacroName(MacroNameTok);
// Error reading macro name? If so, diagnostic already issued.
- if (MacroNameTok.is(tok::eom)) {
+ if (MacroNameTok.is(tok::eod)) {
// Skip code until we get to #endif. This helps with recovery by not
// emitting an error when the #endif is reached.
SkipExcludedConditionalBlock(DirectiveTok.getLocation(),
diff --git a/lib/Lex/PPExpressions.cpp b/lib/Lex/PPExpressions.cpp
index 1451c5a1ef..8fcfc70a7c 100644
--- a/lib/Lex/PPExpressions.cpp
+++ b/lib/Lex/PPExpressions.cpp
@@ -180,7 +180,7 @@ static bool EvaluateValue(PPValue &Result, Token &PeekTok, DefinedTracker &DT,
default: // Non-value token.
PP.Diag(PeekTok, diag::err_pp_expr_bad_token_start_expr);
return true;
- case tok::eom:
+ case tok::eod:
case tok::r_paren:
// If there is no expression, report and exit.
PP.Diag(PeekTok, diag::err_pp_expected_value_in_expr);
@@ -372,7 +372,7 @@ static bool EvaluateValue(PPValue &Result, Token &PeekTok, DefinedTracker &DT,
/// token. This returns:
/// ~0 - Invalid token.
/// 14 -> 3 - various operators.
-/// 0 - 'eom' or ')'
+/// 0 - 'eod' or ')'
static unsigned getPrecedence(tok::TokenKind Kind) {
switch (Kind) {
default: return ~0U;
@@ -397,8 +397,8 @@ static unsigned getPrecedence(tok::TokenKind Kind) {
case tok::question: return 4;
case tok::comma: return 3;
case tok::colon: return 2;
- case tok::r_paren: return 0; // Lowest priority, end of expr.
- case tok::eom: return 0; // Lowest priority, end of macro.
+ case tok::r_paren: return 0;// Lowest priority, end of expr.
+ case tok::eod: return 0;// Lowest priority, end of directive.
}
}
@@ -713,7 +713,7 @@ EvaluateDirectiveExpression(IdentifierInfo *&IfNDefMacro) {
DefinedTracker DT;
if (EvaluateValue(ResVal, Tok, DT, true, *this)) {
// Parse error, skip the rest of the macro line.
- if (Tok.isNot(tok::eom))
+ if (Tok.isNot(tok::eod))
DiscardUntilEndOfDirective();
// Restore 'DisableMacroExpansion'.
@@ -724,7 +724,7 @@ EvaluateDirectiveExpression(IdentifierInfo *&IfNDefMacro) {
// If we are at the end of the expression after just parsing a value, there
// must be no (unparenthesized) binary operators involved, so we can exit
// directly.
- if (Tok.is(tok::eom)) {
+ if (Tok.is(tok::eod)) {
// If the expression we parsed was of the form !defined(macro), return the
// macro in IfNDefMacro.
if (DT.State == DefinedTracker::NotDefinedMacro)
@@ -740,7 +740,7 @@ EvaluateDirectiveExpression(IdentifierInfo *&IfNDefMacro) {
if (EvaluateDirectiveSubExpr(ResVal, getPrecedence(tok::question),
Tok, true, *this)) {
// Parse error, skip the rest of the macro line.
- if (Tok.isNot(tok::eom))
+ if (Tok.isNot(tok::eod))
DiscardUntilEndOfDirective();
// Restore 'DisableMacroExpansion'.
@@ -748,9 +748,9 @@ EvaluateDirectiveExpression(IdentifierInfo *&IfNDefMacro) {
return false;
}
- // If we aren't at the tok::eom token, something bad happened, like an extra
+ // If we aren't at the tok::eod token, something bad happened, like an extra
// ')' token.
- if (Tok.isNot(tok::eom)) {
+ if (Tok.isNot(tok::eod)) {
Diag(Tok, diag::err_pp_expected_eol);
DiscardUntilEndOfDirective();
}
diff --git a/lib/Lex/PPLexerChange.cpp b/lib/Lex/PPLexerChange.cpp
index eef42b69d8..bf0a7fbfef 100644
--- a/lib/Lex/PPLexerChange.cpp
+++ b/lib/Lex/PPLexerChange.cpp
@@ -301,7 +301,7 @@ void Preprocessor::HandleMicrosoftCommentPaste(Token &Tok) {
// We handle this by scanning for the closest real lexer, switching it to
// raw mode and preprocessor mode. This will cause it to return \n as an
- // explicit EOM token.
+ // explicit EOD token.
PreprocessorLexer *FoundLexer = 0;
bool LexerWasInPPMode = false;
for (unsigned i = 0, e = IncludeMacroStack.size(); i != e; ++i) {
@@ -309,11 +309,11 @@ void Preprocessor::HandleMicrosoftCommentPaste(Token &Tok) {
if (ISI.ThePPLexer == 0) continue; // Scan for a real lexer.
// Once we find a real lexer, mark it as raw mode (disabling macro
- // expansions) and preprocessor mode (return EOM). We know that the lexer
+ // expansions) and preprocessor mode (return EOD). We know that the lexer
// was *not* in raw mode before, because the macro that the comment came
// from was expanded. However, it could have already been in preprocessor
// mode (#if COMMENT) in which case we have to return it to that mode and
- // return EOM.
+ // return EOD.
FoundLexer = ISI.ThePPLexer;
FoundLexer->LexingRawMode = true;
LexerWasInPPMode = FoundLexer->ParsingPreprocessorDirective;
@@ -326,22 +326,22 @@ void Preprocessor::HandleMicrosoftCommentPaste(Token &Tok) {
// the next token.
if (!HandleEndOfTokenLexer(Tok)) Lex(Tok);
- // Discarding comments as long as we don't have EOF or EOM. This 'comments
+ // Discarding comments as long as we don't have EOF or EOD. This 'comments
// out' the rest of the line, including any tokens that came from other macros
// that were active, as in:
// #define submacro a COMMENT b
// submacro c
// which should lex to 'a' only: 'b' and 'c' should be removed.
- while (Tok.isNot(tok::eom) && Tok.isNot(tok::eof))
+ while (Tok.isNot(tok::eod) && Tok.isNot(tok::eof))
Lex(Tok);
- // If we got an eom token, then we successfully found the end of the line.
- if (Tok.is(tok::eom)) {
+ // If we got an eod token, then we successfully found the end of the line.
+ if (Tok.is(tok::eod)) {
assert(FoundLexer && "Can't get end of line without an active lexer");
// Restore the lexer back to normal mode instead of raw mode.
FoundLexer->LexingRawMode = false;
- // If the lexer was already in preprocessor mode, just return the EOM token
+ // If the lexer was already in preprocessor mode, just return the EOD token
// to finish the preprocessor line.
if (LexerWasInPPMode) return;
@@ -352,7 +352,7 @@ void Preprocessor::HandleMicrosoftCommentPaste(Token &Tok) {
// If we got an EOF token, then we reached the end of the token stream but
// didn't find an explicit \n. This can only happen if there was no lexer
- // active (an active lexer would return EOM at EOF if there was no \n in
+ // active (an active lexer would return EOD at EOF if there was no \n in
// preprocessor directive mode), so just return EOF as our token.
- assert(!FoundLexer && "Lexer should return EOM before EOF in PP mode");
+ assert(!FoundLexer && "Lexer should return EOD before EOF in PP mode");
}
diff --git a/lib/Lex/PPMacroExpansion.cpp b/lib/Lex/PPMacroExpansion.cpp
index ba92614910..374f85d8b8 100644
--- a/lib/Lex/PPMacroExpansion.cpp
+++ b/lib/Lex/PPMacroExpansion.cpp
@@ -355,9 +355,9 @@ MacroArgs *Preprocessor::ReadFunctionLikeMacroArgs(Token &MacroName,
LexUnexpandedToken(Tok);
}
- if (Tok.is(tok::eof) || Tok.is(tok::eom)) { // "#if f(<eof>" & "#if f(\n"
+ if (Tok.is(tok::eof) || Tok.is(tok::eod)) { // "#if f(<eof>" & "#if f(\n"
Diag(MacroName, diag::err_unterm_macro_invoc);
- // Do not lose the EOF/EOM. Return it to the client.
+ // Do not lose the EOF/EOD. Return it to the client.
MacroName = Tok;
return 0;
} else if (Tok.is(tok::r_paren)) {
@@ -626,8 +626,8 @@ static bool EvaluateHasIncludeCommon(Token &Tok,
SourceLocation EndLoc;
switch (Tok.getKind()) {
- case tok::eom:
- // If the token kind is EOM, the error has already been diagnosed.
+ case tok::eod:
+ // If the token kind is EOD, the error has already been diagnosed.
return false;
case tok::angle_string_literal:
@@ -644,7 +644,7 @@ static bool EvaluateHasIncludeCommon(Token &Tok,
// case, glue the tokens together into FilenameBuffer and interpret those.
FilenameBuffer.push_back('<');
if (PP.ConcatenateIncludeName(FilenameBuffer, EndLoc))
- return false; // Found <eom> but no ">"? Diagnostic already emitted.
+ return false; // Found <eod> but no ">"? Diagnostic already emitted.
Filename = FilenameBuffer.str();
break;
default:
diff --git a/lib/Lex/PTHLexer.cpp b/lib/Lex/PTHLexer.cpp
index 975753bc23..3a53881ad5 100644
--- a/lib/Lex/PTHLexer.cpp
+++ b/lib/Lex/PTHLexer.cpp
@@ -125,7 +125,7 @@ LexNextToken:
return PP->Lex(Tok);
}
- if (TKind == tok::eom) {
+ if (TKind == tok::eod) {
assert(ParsingPreprocessorDirective);
ParsingPreprocessorDirective = false;
return;
diff --git a/lib/Lex/Pragma.cpp b/lib/Lex/Pragma.cpp
index 80d3bb1d27..8fd5ec253e 100644
--- a/lib/Lex/Pragma.cpp
+++ b/lib/Lex/Pragma.cpp
@@ -229,8 +229,8 @@ void Preprocessor::HandleMicrosoft__pragma(Token &Tok) {
PragmaToks.front().setFlag(Token::LeadingSpace);
- // Replace the ')' with an EOM to mark the end of the pragma.
- PragmaToks.back().setKind(tok::eom);
+ // Replace the ')' with an EOD to mark the end of the pragma.
+ PragmaToks.back().setKind(tok::eod);
Token *TokArray = new Token[PragmaToks.size()];
std::copy(PragmaToks.begin(), PragmaToks.end(), TokArray);
@@ -283,7 +283,7 @@ void Preprocessor::HandlePragmaPoison(Token &PoisonTok) {
if (CurPPLexer) CurPPLexer->LexingRawMode = false;
// If we reached the end of line, we're done.
- if (Tok.is(tok::eom)) return;
+ if (Tok.is(tok::eod)) return;
// Can only poison identifiers.
if (Tok.isNot(tok::raw_identifier)) {
@@ -348,8 +348,8 @@ void Preprocessor::HandlePragmaDependency(Token &DependencyTok) {
Token FilenameTok;
CurPPLexer->LexIncludeFilename(FilenameTok);
- // If the token kind is EOM, the error has already been diagnosed.
- if (FilenameTok.is(tok::eom))
+ // If the token kind is EOD, the error has already been diagnosed.
+ if (FilenameTok.is(tok::eod))
return;
// Reserve a buffer to get the spelling.
@@ -381,7 +381,7 @@ void Preprocessor::HandlePragmaDependency(Token &DependencyTok) {
// Lex tokens at the end of the message and include them in the message.
std::string Message;
Lex(DependencyTok);
- while (DependencyTok.isNot(tok::eom)) {
+ while (DependencyTok.isNot(tok::eod)) {
Message += getSpelling(DependencyTok) + " ";
Lex(DependencyTok);
}
@@ -470,7 +470,7 @@ void Preprocessor::HandlePragmaComment(Token &Tok) {
}
Lex(Tok); // eat the r_paren.
- if (Tok.isNot(tok::eom)) {
+ if (Tok.isNot(tok::eod)) {
Diag(Tok.getLocation(), diag::err_pragma_comment_malformed);
return;
}
@@ -541,7 +541,7 @@ void Preprocessor::HandlePragmaMessage(Token &Tok) {
Lex(Tok); // eat the r_paren.
}
- if (Tok.isNot(tok::eom)) {
+ if (Tok.isNot(tok::eod)) {
Diag(Tok.getLocation(), diag::err_pragma_message_malformed);
return;
}
@@ -737,10 +737,10 @@ bool Preprocessor::LexOnOffSwitch(tok::OnOffSwitch &Result) {
return true;
}
- // Verify that this is followed by EOM.
+ // Verify that this is followed by EOD.
LexUnexpandedToken(Tok);
- if (Tok.isNot(tok::eom))
- Diag(Tok, diag::ext_pragma_syntax_eom);
+ if (Tok.isNot(tok::eod))
+ Diag(Tok, diag::ext_pragma_syntax_eod);
return false;
}
@@ -883,7 +883,7 @@ public:
PP.LexUnexpandedToken(Tok);
}
- if (Tok.isNot(tok::eom)) {
+ if (Tok.isNot(tok::eod)) {
PP.Diag(Tok.getLocation(), diag::warn_pragma_diagnostic_invalid_token);
return;
}
diff --git a/lib/Lex/PreprocessorLexer.cpp b/lib/Lex/PreprocessorLexer.cpp
index e005c49476..808a81bd5e 100644
--- a/lib/Lex/PreprocessorLexer.cpp
+++ b/lib/Lex/PreprocessorLexer.cpp
@@ -34,7 +34,7 @@ void PreprocessorLexer::LexIncludeFilename(Token &FilenameTok) {
ParsingFilename = false;
// No filename?
- if (FilenameTok.is(tok::eom))
+ if (FilenameTok.is(tok::eod))
PP->Diag(FilenameTok.getLocation(), diag::err_pp_expects_filename);
}
diff --git a/lib/Lex/TokenLexer.cpp b/lib/Lex/TokenLexer.cpp
index caa44bf4a1..f1e1596789 100644
--- a/lib/Lex/TokenLexer.cpp
+++ b/lib/Lex/TokenLexer.cpp
@@ -546,7 +546,7 @@ unsigned TokenLexer::isNextTokenLParen() const {
/// isParsingPreprocessorDirective - Return true if we are in the middle of a
/// preprocessor directive.
bool TokenLexer::isParsingPreprocessorDirective() const {
- return Tokens[NumTokens-1].is(tok::eom) && !isAtEnd();
+ return Tokens[NumTokens-1].is(tok::eod) && !isAtEnd();
}
/// HandleMicrosoftCommentPaste - In microsoft compatibility mode, /##/ pastes
diff --git a/lib/Parse/ParsePragma.cpp b/lib/Parse/ParsePragma.cpp
index dfd0da079d..5585209e88 100644
--- a/lib/Parse/ParsePragma.cpp
+++ b/lib/Parse/ParsePragma.cpp
@@ -74,7 +74,7 @@ void PragmaGCCVisibilityHandler::HandlePragma(Preprocessor &PP,
return;
}
PP.Lex(Tok);
- if (Tok.isNot(tok::eom)) {
+ if (Tok.isNot(tok::eod)) {
PP.Diag(Tok.getLocation(), diag::warn_pragma_extra_tokens_at_eol)
<< "visibility";
return;
@@ -168,7 +168,7 @@ void PragmaPackHandler::HandlePragma(Preprocessor &PP,
SourceLocation RParenLoc = Tok.getLocation();
PP.Lex(Tok);
- if (Tok.isNot(tok::eom)) {
+ if (Tok.isNot(tok::eod)) {
PP.Diag(Tok.getLocation(), diag::warn_pragma_extra_tokens_at_eol) << "pack";
return;
}
@@ -228,7 +228,7 @@ static void ParseAlignPragma(Sema &Actions, Preprocessor &PP, Token &FirstTok,
SourceLocation KindLoc = Tok.getLocation();
PP.Lex(Tok);
- if (Tok.isNot(tok::eom)) {
+ if (Tok.isNot(tok::eod)) {
PP.Diag(Tok.getLocation(), diag::warn_pragma_extra_tokens_at_eol)
<< (IsOptions ? "options" : "align");
return;
@@ -302,7 +302,7 @@ void PragmaUnusedHandler::HandlePragma(Preprocessor &PP,
}
PP.Lex(Tok);
- if (Tok.isNot(tok::eom)) {
+ if (Tok.isNot(tok::eod)) {
PP.Diag(Tok.getLocation(), diag::warn_pragma_extra_tokens_at_eol) <<
"unused";
return;
@@ -359,7 +359,7 @@ void PragmaWeakHandler::HandlePragma(Preprocessor &PP,
PP.Lex(Tok);
}
- if (Tok.isNot(tok::eom)) {
+ if (Tok.isNot(tok::eod)) {
PP.Diag(Tok.getLocation(), diag::warn_pragma_extra_tokens_at_eol) << "weak";
return;
}