diff options
Diffstat (limited to 'contrib/llvm/tools/clang/lib/Lex')
-rw-r--r-- | contrib/llvm/tools/clang/lib/Lex/HeaderMap.cpp | 4 | ||||
-rw-r--r-- | contrib/llvm/tools/clang/lib/Lex/HeaderSearch.cpp | 123 | ||||
-rw-r--r-- | contrib/llvm/tools/clang/lib/Lex/Lexer.cpp | 104 | ||||
-rw-r--r-- | contrib/llvm/tools/clang/lib/Lex/LiteralSupport.cpp | 2 | ||||
-rw-r--r-- | contrib/llvm/tools/clang/lib/Lex/MacroArgs.cpp | 2 | ||||
-rw-r--r-- | contrib/llvm/tools/clang/lib/Lex/PPDirectives.cpp | 128 | ||||
-rw-r--r-- | contrib/llvm/tools/clang/lib/Lex/PPExpressions.cpp | 18 | ||||
-rw-r--r-- | contrib/llvm/tools/clang/lib/Lex/PPLexerChange.cpp | 20 | ||||
-rw-r--r-- | contrib/llvm/tools/clang/lib/Lex/PPMacroExpansion.cpp | 39 | ||||
-rw-r--r-- | contrib/llvm/tools/clang/lib/Lex/PTHLexer.cpp | 4 | ||||
-rw-r--r-- | contrib/llvm/tools/clang/lib/Lex/Pragma.cpp | 26 | ||||
-rw-r--r-- | contrib/llvm/tools/clang/lib/Lex/PreprocessingRecord.cpp | 15 | ||||
-rw-r--r-- | contrib/llvm/tools/clang/lib/Lex/Preprocessor.cpp | 52 | ||||
-rw-r--r-- | contrib/llvm/tools/clang/lib/Lex/PreprocessorLexer.cpp | 2 | ||||
-rw-r--r-- | contrib/llvm/tools/clang/lib/Lex/TokenLexer.cpp | 8 |
15 files changed, 386 insertions, 161 deletions
diff --git a/contrib/llvm/tools/clang/lib/Lex/HeaderMap.cpp b/contrib/llvm/tools/clang/lib/Lex/HeaderMap.cpp index e424f91..e102a6d 100644 --- a/contrib/llvm/tools/clang/lib/Lex/HeaderMap.cpp +++ b/contrib/llvm/tools/clang/lib/Lex/HeaderMap.cpp @@ -199,8 +199,8 @@ void HeaderMap::dump() const { /// LookupFile - Check to see if the specified relative filename is located in /// this HeaderMap. If so, open it and return its FileEntry. -const FileEntry *HeaderMap::LookupFile(llvm::StringRef Filename, - FileManager &FM) const { +const FileEntry *HeaderMap::LookupFile( + llvm::StringRef Filename, FileManager &FM) const { const HMapHeader &Hdr = getHeader(); unsigned NumBuckets = getEndianAdjustedWord(Hdr.NumBuckets); diff --git a/contrib/llvm/tools/clang/lib/Lex/HeaderSearch.cpp b/contrib/llvm/tools/clang/lib/Lex/HeaderSearch.cpp index b028e33..372078c 100644 --- a/contrib/llvm/tools/clang/lib/Lex/HeaderSearch.cpp +++ b/contrib/llvm/tools/clang/lib/Lex/HeaderSearch.cpp @@ -114,8 +114,11 @@ const char *DirectoryLookup::getName() const { /// LookupFile - Lookup the specified file in this search path, returning it /// if it exists or returning null if not. -const FileEntry *DirectoryLookup::LookupFile(llvm::StringRef Filename, - HeaderSearch &HS) const { +const FileEntry *DirectoryLookup::LookupFile( + llvm::StringRef Filename, + HeaderSearch &HS, + llvm::SmallVectorImpl<char> *SearchPath, + llvm::SmallVectorImpl<char> *RelativePath) const { llvm::SmallString<1024> TmpDir; if (isNormalDir()) { // Concatenate the requested file onto the directory. @@ -123,21 +126,46 @@ const FileEntry *DirectoryLookup::LookupFile(llvm::StringRef Filename, TmpDir += getDir()->getName(); TmpDir.push_back('/'); TmpDir.append(Filename.begin(), Filename.end()); - return HS.getFileMgr().getFile(TmpDir.str()); + if (SearchPath != NULL) { + llvm::StringRef SearchPathRef(getDir()->getName()); + SearchPath->clear(); + SearchPath->append(SearchPathRef.begin(), SearchPathRef.end()); + } + if (RelativePath != NULL) { + RelativePath->clear(); + RelativePath->append(Filename.begin(), Filename.end()); + } + return HS.getFileMgr().getFile(TmpDir.str(), /*openFile=*/true); } if (isFramework()) - return DoFrameworkLookup(Filename, HS); + return DoFrameworkLookup(Filename, HS, SearchPath, RelativePath); assert(isHeaderMap() && "Unknown directory lookup"); - return getHeaderMap()->LookupFile(Filename, HS.getFileMgr()); + const FileEntry * const Result = getHeaderMap()->LookupFile( + Filename, HS.getFileMgr()); + if (Result) { + if (SearchPath != NULL) { + llvm::StringRef SearchPathRef(getName()); + SearchPath->clear(); + SearchPath->append(SearchPathRef.begin(), SearchPathRef.end()); + } + if (RelativePath != NULL) { + RelativePath->clear(); + RelativePath->append(Filename.begin(), Filename.end()); + } + } + return Result; } /// DoFrameworkLookup - Do a lookup of the specified file in the current /// DirectoryLookup, which is a framework directory. -const FileEntry *DirectoryLookup::DoFrameworkLookup(llvm::StringRef Filename, - HeaderSearch &HS) const { +const FileEntry *DirectoryLookup::DoFrameworkLookup( + llvm::StringRef Filename, + HeaderSearch &HS, + llvm::SmallVectorImpl<char> *SearchPath, + llvm::SmallVectorImpl<char> *RelativePath) const { FileManager &FileMgr = HS.getFileMgr(); // Framework names must have a '/' in the filename. @@ -183,19 +211,37 @@ const FileEntry *DirectoryLookup::DoFrameworkLookup(llvm::StringRef Filename, FrameworkDirCache = getFrameworkDir(); } + if (RelativePath != NULL) { + RelativePath->clear(); + RelativePath->append(Filename.begin()+SlashPos+1, Filename.end()); + } + // Check "/System/Library/Frameworks/Cocoa.framework/Headers/file.h" unsigned OrigSize = FrameworkName.size(); FrameworkName += "Headers/"; + + if (SearchPath != NULL) { + SearchPath->clear(); + // Without trailing '/'. + SearchPath->append(FrameworkName.begin(), FrameworkName.end()-1); + } + FrameworkName.append(Filename.begin()+SlashPos+1, Filename.end()); - if (const FileEntry *FE = FileMgr.getFile(FrameworkName.str())) + if (const FileEntry *FE = FileMgr.getFile(FrameworkName.str(), + /*openFile=*/true)) { return FE; + } // Check "/System/Library/Frameworks/Cocoa.framework/PrivateHeaders/file.h" const char *Private = "Private"; FrameworkName.insert(FrameworkName.begin()+OrigSize, Private, Private+strlen(Private)); - return FileMgr.getFile(FrameworkName.str()); + if (SearchPath != NULL) + SearchPath->insert(SearchPath->begin()+OrigSize, Private, + Private+strlen(Private)); + + return FileMgr.getFile(FrameworkName.str(), /*openFile=*/true); } @@ -209,11 +255,14 @@ const FileEntry *DirectoryLookup::DoFrameworkLookup(llvm::StringRef Filename, /// for system #include's or not (i.e. using <> instead of ""). CurFileEnt, if /// non-null, indicates where the #including file is, in case a relative search /// is needed. -const FileEntry *HeaderSearch::LookupFile(llvm::StringRef Filename, - bool isAngled, - const DirectoryLookup *FromDir, - const DirectoryLookup *&CurDir, - const FileEntry *CurFileEnt) { +const FileEntry *HeaderSearch::LookupFile( + llvm::StringRef Filename, + bool isAngled, + const DirectoryLookup *FromDir, + const DirectoryLookup *&CurDir, + const FileEntry *CurFileEnt, + llvm::SmallVectorImpl<char> *SearchPath, + llvm::SmallVectorImpl<char> *RelativePath) { // If 'Filename' is absolute, check to see if it exists and no searching. if (llvm::sys::path::is_absolute(Filename)) { CurDir = 0; @@ -221,8 +270,14 @@ const FileEntry *HeaderSearch::LookupFile(llvm::StringRef Filename, // If this was an #include_next "/absolute/file", fail. if (FromDir) return 0; + if (SearchPath != NULL) + SearchPath->clear(); + if (RelativePath != NULL) { + RelativePath->clear(); + RelativePath->append(Filename.begin(), Filename.end()); + } // Otherwise, just return the file. - return FileMgr.getFile(Filename); + return FileMgr.getFile(Filename, /*openFile=*/true); } // Step #0, unless disabled, check to see if the file is in the #includer's @@ -237,7 +292,7 @@ const FileEntry *HeaderSearch::LookupFile(llvm::StringRef Filename, TmpDir += CurFileEnt->getDir()->getName(); TmpDir.push_back('/'); TmpDir.append(Filename.begin(), Filename.end()); - if (const FileEntry *FE = FileMgr.getFile(TmpDir.str())) { + if (const FileEntry *FE = FileMgr.getFile(TmpDir.str(),/*openFile=*/true)) { // Leave CurDir unset. // This file is a system header or C++ unfriendly if the old file is. // @@ -246,6 +301,15 @@ const FileEntry *HeaderSearch::LookupFile(llvm::StringRef Filename, // of evaluation. unsigned DirInfo = getFileInfo(CurFileEnt).DirInfo; getFileInfo(FE).DirInfo = DirInfo; + if (SearchPath != NULL) { + llvm::StringRef SearchPathRef(CurFileEnt->getDir()->getName()); + SearchPath->clear(); + SearchPath->append(SearchPathRef.begin(), SearchPathRef.end()); + } + if (RelativePath != NULL) { + RelativePath->clear(); + RelativePath->append(Filename.begin(), Filename.end()); + } return FE; } } @@ -283,7 +347,7 @@ const FileEntry *HeaderSearch::LookupFile(llvm::StringRef Filename, // Check each directory in sequence to see if it contains this file. for (; i != SearchDirs.size(); ++i) { const FileEntry *FE = - SearchDirs[i].LookupFile(Filename, *this); + SearchDirs[i].LookupFile(Filename, *this, SearchPath, RelativePath); if (!FE) continue; CurDir = &SearchDirs[i]; @@ -308,7 +372,9 @@ const FileEntry *HeaderSearch::LookupFile(llvm::StringRef Filename, /// for the designated file, otherwise return null. const FileEntry *HeaderSearch:: LookupSubframeworkHeader(llvm::StringRef Filename, - const FileEntry *ContextFileEnt) { + const FileEntry *ContextFileEnt, + llvm::SmallVectorImpl<char> *SearchPath, + llvm::SmallVectorImpl<char> *RelativePath) { assert(ContextFileEnt && "No context file?"); // Framework names must have a '/' in the filename. Find it. @@ -356,17 +422,34 @@ LookupSubframeworkHeader(llvm::StringRef Filename, const FileEntry *FE = 0; + if (RelativePath != NULL) { + RelativePath->clear(); + RelativePath->append(Filename.begin()+SlashPos+1, Filename.end()); + } + // Check ".../Frameworks/HIToolbox.framework/Headers/HIToolbox.h" llvm::SmallString<1024> HeadersFilename(FrameworkName); HeadersFilename += "Headers/"; + if (SearchPath != NULL) { + SearchPath->clear(); + // Without trailing '/'. + SearchPath->append(HeadersFilename.begin(), HeadersFilename.end()-1); + } + HeadersFilename.append(Filename.begin()+SlashPos+1, Filename.end()); - if (!(FE = FileMgr.getFile(HeadersFilename.str()))) { + if (!(FE = FileMgr.getFile(HeadersFilename.str(), /*openFile=*/true))) { // Check ".../Frameworks/HIToolbox.framework/PrivateHeaders/HIToolbox.h" HeadersFilename = FrameworkName; HeadersFilename += "PrivateHeaders/"; + if (SearchPath != NULL) { + SearchPath->clear(); + // Without trailing '/'. + SearchPath->append(HeadersFilename.begin(), HeadersFilename.end()-1); + } + HeadersFilename.append(Filename.begin()+SlashPos+1, Filename.end()); - if (!(FE = FileMgr.getFile(HeadersFilename.str()))) + if (!(FE = FileMgr.getFile(HeadersFilename.str(), /*openFile=*/true))) return 0; } diff --git a/contrib/llvm/tools/clang/lib/Lex/Lexer.cpp b/contrib/llvm/tools/clang/lib/Lex/Lexer.cpp index b17198b..16cc4f8 100644 --- a/contrib/llvm/tools/clang/lib/Lex/Lexer.cpp +++ b/contrib/llvm/tools/clang/lib/Lex/Lexer.cpp @@ -71,9 +71,22 @@ void Lexer::InitLexer(const char *BufStart, const char *BufPtr, "We assume that the input buffer has a null character at the end" " to simplify lexing!"); + // Check whether we have a BOM in the beginning of the buffer. If yes - act + // accordingly. Right now we support only UTF-8 with and without BOM, so, just + // skip the UTF-8 BOM if it's present. + if (BufferStart == BufferPtr) { + // Determine the size of the BOM. + size_t BOMLength = llvm::StringSwitch<size_t>(BufferStart) + .StartsWith("\xEF\xBB\xBF", 3) // UTF-8 BOM + .Default(0); + + // Skip the BOM. + BufferPtr += BOMLength; + } + Is_PragmaLexer = false; IsInConflictMarker = false; - + // Start of the file is a start of line. IsAtStartOfLine = true; @@ -178,7 +191,7 @@ Lexer *Lexer::Create_PragmaLexer(SourceLocation SpellingLoc, InstantiationLocEnd, TokLen); // Ensure that the lexer thinks it is inside a directive, so that end \n will - // return an EOM token. + // return an EOD token. L->ParsingPreprocessorDirective = true; // This lexer really is for _Pragma. @@ -221,6 +234,54 @@ void Lexer::Stringify(llvm::SmallVectorImpl<char> &Str) { /// after trigraph expansion and escaped-newline folding. In particular, this /// wants to get the true, uncanonicalized, spelling of things like digraphs /// UCNs, etc. +llvm::StringRef Lexer::getSpelling(SourceLocation loc, + llvm::SmallVectorImpl<char> &buffer, + const SourceManager &SM, + const LangOptions &options, + bool *invalid) { + // Break down the source location. + std::pair<FileID, unsigned> locInfo = SM.getDecomposedLoc(loc); + + // Try to the load the file buffer. + bool invalidTemp = false; + llvm::StringRef file = SM.getBufferData(locInfo.first, &invalidTemp); + if (invalidTemp) { + if (invalid) *invalid = true; + return llvm::StringRef(); + } + + const char *tokenBegin = file.data() + locInfo.second; + + // Lex from the start of the given location. + Lexer lexer(SM.getLocForStartOfFile(locInfo.first), options, + file.begin(), tokenBegin, file.end()); + Token token; + lexer.LexFromRawLexer(token); + + unsigned length = token.getLength(); + + // Common case: no need for cleaning. + if (!token.needsCleaning()) + return llvm::StringRef(tokenBegin, length); + + // Hard case, we need to relex the characters into the string. + buffer.clear(); + buffer.reserve(length); + + for (const char *ti = tokenBegin, *te = ti + length; ti != te; ) { + unsigned charSize; + buffer.push_back(Lexer::getCharAndSizeNoWarn(ti, charSize, options)); + ti += charSize; + } + + return llvm::StringRef(buffer.data(), buffer.size()); +} + +/// getSpelling() - Return the 'spelling' of this token. The spelling of a +/// token are the characters used to represent the token in the source file +/// after trigraph expansion and escaped-newline folding. In particular, this +/// wants to get the true, uncanonicalized, spelling of things like digraphs +/// UCNs, etc. std::string Lexer::getSpelling(const Token &Tok, const SourceManager &SourceMgr, const LangOptions &Features, bool *Invalid) { assert((int)Tok.getLength() >= 0 && "Token character range is bogus!"); @@ -626,7 +687,7 @@ SourceLocation Lexer::getLocForEndOfToken(SourceLocation Loc, unsigned Offset, else return Loc; - return AdvanceToTokenCharacter(Loc, Len, SM, Features); + return Loc.getFileLocWithOffset(Len); } //===----------------------------------------------------------------------===// @@ -1407,7 +1468,7 @@ bool Lexer::SkipBCPLComment(Token &Result, const char *CurPtr) { return SaveBCPLComment(Result, CurPtr); // If we are inside a preprocessor directive and we see the end of line, - // return immediately, so that the lexer can return this as an EOM token. + // return immediately, so that the lexer can return this as an EOD token. if (ParsingPreprocessorDirective || CurPtr == BufferEnd) { BufferPtr = CurPtr; return false; @@ -1534,7 +1595,7 @@ static bool isEndOfBlockCommentWithEscapedNewLine(const char *CurPtr, /// some tokens, this will store the first token and return true. bool Lexer::SkipBlockComment(Token &Result, const char *CurPtr) { // Scan one character past where we should, looking for a '/' character. Once - // we find it, check to see if it was preceeded by a *. This common + // we find it, check to see if it was preceded by a *. This common // optimization helps people who like to put a lot of * characters in their // comments. @@ -1715,14 +1776,14 @@ std::string Lexer::ReadToEndOfLine() { assert(CurPtr[-1] == Char && "Trigraphs for newline?"); BufferPtr = CurPtr-1; - // Next, lex the character, which should handle the EOM transition. + // Next, lex the character, which should handle the EOD transition. Lex(Tmp); if (Tmp.is(tok::code_completion)) { if (PP && PP->getCodeCompletionHandler()) PP->getCodeCompletionHandler()->CodeCompleteNaturalLanguage(); Lex(Tmp); } - assert(Tmp.is(tok::eom) && "Unexpected token!"); + assert(Tmp.is(tok::eod) && "Unexpected token!"); // Finally, we're done, return the string we found. return Result; @@ -1758,7 +1819,7 @@ bool Lexer::LexEndOfFile(Token &Result, const char *CurPtr) { // Done parsing the "line". ParsingPreprocessorDirective = false; // Update the location of token as well as BufferPtr. - FormTokenWithChars(Result, CurPtr, tok::eom); + FormTokenWithChars(Result, CurPtr, tok::eod); // Restore comment saving mode, in case it was disabled for directive. SetCommentRetentionState(PP->getCommentRetentionState()); @@ -2006,7 +2067,7 @@ LexNextToken: case '\n': case '\r': // If we are inside a preprocessor directive and we see the end of line, - // we know we are done with the directive, so return an EOM token. + // we know we are done with the directive, so return an EOD token. if (ParsingPreprocessorDirective) { // Done parsing the "line". ParsingPreprocessorDirective = false; @@ -2017,7 +2078,7 @@ LexNextToken: // Since we consumed a newline, we are back at the start of a line. IsAtStartOfLine = true; - Kind = tok::eom; + Kind = tok::eod; break; } // The returned token is at the start of the line. @@ -2043,7 +2104,7 @@ LexNextToken: // If the next token is obviously a // or /* */ comment, skip it efficiently // too (without going through the big switch stmt). if (CurPtr[0] == '/' && CurPtr[1] == '/' && !inKeepCommentMode() && - Features.BCPLComment) { + Features.BCPLComment && !Features.TraditionalCPP) { if (SkipBCPLComment(Result, CurPtr+2)) return; // There is a token to return. goto SkipIgnoredUnits; @@ -2232,8 +2293,10 @@ LexNextToken: // this as "foo / bar" and langauges with BCPL comments would lex it as // "foo". Check to see if the character after the second slash is a '*'. // If so, we will lex that as a "/" instead of the start of a comment. - if (Features.BCPLComment || - getCharAndSize(CurPtr+SizeTmp, SizeTmp2) != '*') { + // However, we never do this in -traditional-cpp mode. + if ((Features.BCPLComment || + getCharAndSize(CurPtr+SizeTmp, SizeTmp2) != '*') && + !Features.TraditionalCPP) { if (SkipBCPLComment(Result, ConsumeChar(CurPtr, SizeTmp, Result))) return; // There is a token to return. @@ -2335,6 +2398,21 @@ LexNextToken: CurPtr = ConsumeChar(CurPtr, SizeTmp, Result); Kind = tok::lessequal; } else if (Features.Digraphs && Char == ':') { // '<:' -> '[' + if (Features.CPlusPlus0x && + getCharAndSize(CurPtr + SizeTmp, SizeTmp2) == ':') { + // C++0x [lex.pptoken]p3: + // Otherwise, if the next three characters are <:: and the subsequent + // character is neither : nor >, the < is treated as a preprocessor + // token by itself and not as the first character of the alternative + // token <:. + unsigned SizeTmp3; + char After = getCharAndSize(CurPtr + SizeTmp + SizeTmp2, SizeTmp3); + if (After != ':' && After != '>') { + Kind = tok::less; + break; + } + } + CurPtr = ConsumeChar(CurPtr, SizeTmp, Result); Kind = tok::l_square; } else if (Features.Digraphs && Char == '%') { // '<%' -> '{' diff --git a/contrib/llvm/tools/clang/lib/Lex/LiteralSupport.cpp b/contrib/llvm/tools/clang/lib/Lex/LiteralSupport.cpp index 16d7b36..37e7bf4 100644 --- a/contrib/llvm/tools/clang/lib/Lex/LiteralSupport.cpp +++ b/contrib/llvm/tools/clang/lib/Lex/LiteralSupport.cpp @@ -710,7 +710,7 @@ CharLiteralParser::CharLiteralParser(const char *begin, const char *end, ++begin; // FIXME: The "Value" is an uint64_t so we can handle char literals of - // upto 64-bits. + // up to 64-bits. // FIXME: This extensively assumes that 'char' is 8-bits. assert(PP.getTargetInfo().getCharWidth() == 8 && "Assumes char is 8 bits"); diff --git a/contrib/llvm/tools/clang/lib/Lex/MacroArgs.cpp b/contrib/llvm/tools/clang/lib/Lex/MacroArgs.cpp index 89f6368..dee7da3 100644 --- a/contrib/llvm/tools/clang/lib/Lex/MacroArgs.cpp +++ b/contrib/llvm/tools/clang/lib/Lex/MacroArgs.cpp @@ -284,7 +284,7 @@ const Token &MacroArgs::getStringifiedArgument(unsigned ArgNo, assert(ArgNo < NumUnexpArgTokens && "Invalid argument number!"); if (StringifiedArgs.empty()) { StringifiedArgs.resize(getNumArguments()); - memset(&StringifiedArgs[0], 0, + memset((void*)&StringifiedArgs[0], 0, sizeof(StringifiedArgs[0])*getNumArguments()); } if (StringifiedArgs[ArgNo].isNot(tok::string_literal)) diff --git a/contrib/llvm/tools/clang/lib/Lex/PPDirectives.cpp b/contrib/llvm/tools/clang/lib/Lex/PPDirectives.cpp index 3e871ae..af3fa6e 100644 --- a/contrib/llvm/tools/clang/lib/Lex/PPDirectives.cpp +++ b/contrib/llvm/tools/clang/lib/Lex/PPDirectives.cpp @@ -81,17 +81,17 @@ void Preprocessor::ReleaseMacroInfo(MacroInfo *MI) { } /// DiscardUntilEndOfDirective - Read and discard all tokens remaining on the -/// current line until the tok::eom token is found. +/// current line until the tok::eod token is found. void Preprocessor::DiscardUntilEndOfDirective() { Token Tmp; do { LexUnexpandedToken(Tmp); assert(Tmp.isNot(tok::eof) && "EOF seen while discarding directive tokens"); - } while (Tmp.isNot(tok::eom)); + } while (Tmp.isNot(tok::eod)); } /// ReadMacroName - Lex and validate a macro name, which occurs after a -/// #define or #undef. This sets the token kind to eom and discards the rest +/// #define or #undef. This sets the token kind to eod and discards the rest /// of the macro line if the macro name is invalid. isDefineUndef is 1 if /// this is due to a a #define, 2 if #undef directive, 0 if it is something /// else (e.g. #ifdef). @@ -107,7 +107,7 @@ void Preprocessor::ReadMacroName(Token &MacroNameTok, char isDefineUndef) { } // Missing macro name? - if (MacroNameTok.is(tok::eom)) { + if (MacroNameTok.is(tok::eod)) { Diag(MacroNameTok, diag::err_pp_missing_macro_name); return; } @@ -143,13 +143,13 @@ void Preprocessor::ReadMacroName(Token &MacroNameTok, char isDefineUndef) { } // Invalid macro name, read and discard the rest of the line. Then set the - // token kind to tok::eom. - MacroNameTok.setKind(tok::eom); + // token kind to tok::eod. + MacroNameTok.setKind(tok::eod); return DiscardUntilEndOfDirective(); } -/// CheckEndOfDirective - Ensure that the next token is a tok::eom token. If -/// not, emit a diagnostic and consume up until the eom. If EnableMacros is +/// CheckEndOfDirective - Ensure that the next token is a tok::eod token. If +/// not, emit a diagnostic and consume up until the eod. If EnableMacros is /// true, then we consider macros that expand to zero tokens as being ok. void Preprocessor::CheckEndOfDirective(const char *DirType, bool EnableMacros) { Token Tmp; @@ -166,7 +166,7 @@ void Preprocessor::CheckEndOfDirective(const char *DirType, bool EnableMacros) { while (Tmp.is(tok::comment)) // Skip comments in -C mode. LexUnexpandedToken(Tmp); - if (Tmp.isNot(tok::eom)) { + if (Tmp.isNot(tok::eod)) { // Add a fixit in GNU/C99/C++ mode. Don't offer a fixit for strict-C89, // or if this is a macro-style preprocessing directive, because it is more // trouble than it is worth to insert /**/ and check that there is no /**/ @@ -238,7 +238,7 @@ void Preprocessor::SkipExcludedConditionalBlock(SourceLocation IfTokenLoc, // We just parsed a # character at the start of a line, so we're in // directive mode. Tell the lexer this so any newlines we see will be - // converted into an EOM token (this terminates the macro). + // converted into an EOD token (this terminates the macro). CurPPLexer->ParsingPreprocessorDirective = true; if (CurLexer) CurLexer->SetCommentRetentionState(false); @@ -425,7 +425,7 @@ void Preprocessor::PTHSkipExcludedConditionalBlock() { if (!CondInfo.FoundNonSkip) { CondInfo.FoundNonSkip = true; - // Scan until the eom token. + // Scan until the eod token. CurPTHLexer->ParsingPreprocessorDirective = true; DiscardUntilEndOfDirective(); CurPTHLexer->ParsingPreprocessorDirective = false; @@ -469,10 +469,13 @@ void Preprocessor::PTHSkipExcludedConditionalBlock() { /// LookupFile - Given a "foo" or <foo> reference, look up the indicated file, /// return null on failure. isAngled indicates whether the file reference is /// for system #include's or not (i.e. using <> instead of ""). -const FileEntry *Preprocessor::LookupFile(llvm::StringRef Filename, - bool isAngled, - const DirectoryLookup *FromDir, - const DirectoryLookup *&CurDir) { +const FileEntry *Preprocessor::LookupFile( + llvm::StringRef Filename, + bool isAngled, + const DirectoryLookup *FromDir, + const DirectoryLookup *&CurDir, + llvm::SmallVectorImpl<char> *SearchPath, + llvm::SmallVectorImpl<char> *RelativePath) { // If the header lookup mechanism may be relative to the current file, pass in // info about where the current file is. const FileEntry *CurFileEnt = 0; @@ -494,8 +497,9 @@ const FileEntry *Preprocessor::LookupFile(llvm::StringRef Filename, // Do a standard file entry lookup. CurDir = CurDirLookup; - const FileEntry *FE = - HeaderInfo.LookupFile(Filename, isAngled, FromDir, CurDir, CurFileEnt); + const FileEntry *FE = HeaderInfo.LookupFile( + Filename, isAngled, FromDir, CurDir, CurFileEnt, + SearchPath, RelativePath); if (FE) return FE; // Otherwise, see if this is a subframework header. If so, this is relative @@ -503,7 +507,8 @@ const FileEntry *Preprocessor::LookupFile(llvm::StringRef Filename, // headers on the #include stack and pass them to HeaderInfo. if (IsFileLexer()) { if ((CurFileEnt = SourceMgr.getFileEntryForID(CurPPLexer->getFileID()))) - if ((FE = HeaderInfo.LookupSubframeworkHeader(Filename, CurFileEnt))) + if ((FE = HeaderInfo.LookupSubframeworkHeader(Filename, CurFileEnt, + SearchPath, RelativePath))) return FE; } @@ -512,7 +517,8 @@ const FileEntry *Preprocessor::LookupFile(llvm::StringRef Filename, if (IsFileLexer(ISEntry)) { if ((CurFileEnt = SourceMgr.getFileEntryForID(ISEntry.ThePPLexer->getFileID()))) - if ((FE = HeaderInfo.LookupSubframeworkHeader(Filename, CurFileEnt))) + if ((FE = HeaderInfo.LookupSubframeworkHeader( + Filename, CurFileEnt, SearchPath, RelativePath))) return FE; } } @@ -535,7 +541,7 @@ void Preprocessor::HandleDirective(Token &Result) { // We just parsed a # character at the start of a line, so we're in directive // mode. Tell the lexer this so any newlines we see will be converted into an - // EOM token (which terminates the directive). + // EOD token (which terminates the directive). CurPPLexer->ParsingPreprocessorDirective = true; ++NumDirectives; @@ -563,7 +569,7 @@ void Preprocessor::HandleDirective(Token &Result) { TryAgain: switch (Result.getKind()) { - case tok::eom: + case tok::eod: return; // null directive. case tok::comment: // Handle stuff like "# /*foo*/ define X" in -E -C mode. @@ -686,7 +692,7 @@ static bool GetLineValue(Token &DigitTok, unsigned &Val, if (DigitTok.isNot(tok::numeric_constant)) { PP.Diag(DigitTok, DiagID); - if (DigitTok.isNot(tok::eom)) + if (DigitTok.isNot(tok::eod)) PP.DiscardUntilEndOfDirective(); return true; } @@ -758,9 +764,9 @@ void Preprocessor::HandleLineDirective(Token &Tok) { Token StrTok; Lex(StrTok); - // If the StrTok is "eom", then it wasn't present. Otherwise, it must be a - // string followed by eom. - if (StrTok.is(tok::eom)) + // If the StrTok is "eod", then it wasn't present. Otherwise, it must be a + // string followed by eod. + if (StrTok.is(tok::eod)) ; // ok else if (StrTok.isNot(tok::string_literal)) { Diag(StrTok, diag::err_pp_line_invalid_filename); @@ -779,7 +785,7 @@ void Preprocessor::HandleLineDirective(Token &Tok) { FilenameID = SourceMgr.getLineTableFilenameID(Literal.GetString(), Literal.GetStringLength()); - // Verify that there is nothing after the string, other than EOM. Because + // Verify that there is nothing after the string, other than EOD. Because // of C99 6.10.4p5, macros that expand to empty tokens are ok. CheckEndOfDirective("line", true); } @@ -800,7 +806,7 @@ static bool ReadLineMarkerFlags(bool &IsFileEntry, bool &IsFileExit, unsigned FlagVal; Token FlagTok; PP.Lex(FlagTok); - if (FlagTok.is(tok::eom)) return false; + if (FlagTok.is(tok::eod)) return false; if (GetLineValue(FlagTok, FlagVal, diag::err_pp_linemarker_invalid_flag, PP)) return true; @@ -808,7 +814,7 @@ static bool ReadLineMarkerFlags(bool &IsFileEntry, bool &IsFileExit, IsFileEntry = true; PP.Lex(FlagTok); - if (FlagTok.is(tok::eom)) return false; + if (FlagTok.is(tok::eod)) return false; if (GetLineValue(FlagTok, FlagVal, diag::err_pp_linemarker_invalid_flag,PP)) return true; } else if (FlagVal == 2) { @@ -834,7 +840,7 @@ static bool ReadLineMarkerFlags(bool &IsFileEntry, bool &IsFileExit, } PP.Lex(FlagTok); - if (FlagTok.is(tok::eom)) return false; + if (FlagTok.is(tok::eod)) return false; if (GetLineValue(FlagTok, FlagVal, diag::err_pp_linemarker_invalid_flag,PP)) return true; } @@ -849,7 +855,7 @@ static bool ReadLineMarkerFlags(bool &IsFileEntry, bool &IsFileExit, IsSystemHeader = true; PP.Lex(FlagTok); - if (FlagTok.is(tok::eom)) return false; + if (FlagTok.is(tok::eod)) return false; if (GetLineValue(FlagTok, FlagVal, diag::err_pp_linemarker_invalid_flag, PP)) return true; @@ -863,7 +869,7 @@ static bool ReadLineMarkerFlags(bool &IsFileEntry, bool &IsFileExit, IsExternCHeader = true; PP.Lex(FlagTok); - if (FlagTok.is(tok::eom)) return false; + if (FlagTok.is(tok::eod)) return false; // There are no more valid flags here. PP.Diag(FlagTok, diag::err_pp_linemarker_invalid_flag); @@ -893,9 +899,9 @@ void Preprocessor::HandleDigitDirective(Token &DigitTok) { bool IsSystemHeader = false, IsExternCHeader = false; int FilenameID = -1; - // If the StrTok is "eom", then it wasn't present. Otherwise, it must be a - // string followed by eom. - if (StrTok.is(tok::eom)) + // If the StrTok is "eod", then it wasn't present. Otherwise, it must be a + // string followed by eod. + if (StrTok.is(tok::eod)) ; // ok else if (StrTok.isNot(tok::string_literal)) { Diag(StrTok, diag::err_pp_linemarker_invalid_filename); @@ -978,12 +984,12 @@ void Preprocessor::HandleIdentSCCSDirective(Token &Tok) { if (StrTok.isNot(tok::string_literal) && StrTok.isNot(tok::wide_string_literal)) { Diag(StrTok, diag::err_pp_malformed_ident); - if (StrTok.isNot(tok::eom)) + if (StrTok.isNot(tok::eod)) DiscardUntilEndOfDirective(); return; } - // Verify that there is nothing after the string, other than EOM. + // Verify that there is nothing after the string, other than EOD. CheckEndOfDirective("ident"); if (Callbacks) { @@ -1052,14 +1058,14 @@ bool Preprocessor::GetIncludeFilenameSpelling(SourceLocation Loc, /// /// This code concatenates and consumes tokens up to the '>' token. It returns /// false if the > was found, otherwise it returns true if it finds and consumes -/// the EOM marker. +/// the EOD marker. bool Preprocessor::ConcatenateIncludeName( llvm::SmallString<128> &FilenameBuffer, SourceLocation &End) { Token CurTok; Lex(CurTok); - while (CurTok.isNot(tok::eom)) { + while (CurTok.isNot(tok::eod)) { End = CurTok.getLocation(); // FIXME: Provide code completion for #includes. @@ -1095,8 +1101,8 @@ bool Preprocessor::ConcatenateIncludeName( Lex(CurTok); } - // If we hit the eom marker, emit an error and return true so that the caller - // knows the EOM has been read. + // If we hit the eod marker, emit an error and return true so that the caller + // knows the EOD has been read. Diag(CurTok.getLocation(), diag::err_pp_expects_filename); return true; } @@ -1120,8 +1126,8 @@ void Preprocessor::HandleIncludeDirective(SourceLocation HashLoc, SourceLocation End; switch (FilenameTok.getKind()) { - case tok::eom: - // If the token kind is EOM, the error has already been diagnosed. + case tok::eod: + // If the token kind is EOD, the error has already been diagnosed. return; case tok::angle_string_literal: @@ -1135,7 +1141,7 @@ void Preprocessor::HandleIncludeDirective(SourceLocation HashLoc, // case, glue the tokens together into FilenameBuffer and interpret those. FilenameBuffer.push_back('<'); if (ConcatenateIncludeName(FilenameBuffer, End)) - return; // Found <eom> but no ">"? Diagnostic already emitted. + return; // Found <eod> but no ">"? Diagnostic already emitted. Filename = FilenameBuffer.str(); break; default: @@ -1153,7 +1159,7 @@ void Preprocessor::HandleIncludeDirective(SourceLocation HashLoc, return; } - // Verify that there is nothing after the filename, other than EOM. Note that + // Verify that there is nothing after the filename, other than EOD. Note that // we allow macros that expand to nothing after the filename, because this // falls into the category of "#include pp-tokens new-line" specified in // C99 6.10.2p4. @@ -1167,7 +1173,13 @@ void Preprocessor::HandleIncludeDirective(SourceLocation HashLoc, // Search include directories. const DirectoryLookup *CurDir; - const FileEntry *File = LookupFile(Filename, isAngled, LookupFrom, CurDir); + llvm::SmallString<1024> SearchPath; + llvm::SmallString<1024> RelativePath; + // We get the raw path only if we have 'Callbacks' to which we later pass + // the path. + const FileEntry *File = LookupFile( + Filename, isAngled, LookupFrom, CurDir, + Callbacks ? &SearchPath : NULL, Callbacks ? &RelativePath : NULL); if (File == 0) { Diag(FilenameTok, diag::err_pp_file_not_found) << Filename; return; @@ -1175,9 +1187,9 @@ void Preprocessor::HandleIncludeDirective(SourceLocation HashLoc, // Notify the callback object that we've seen an inclusion directive. if (Callbacks) - Callbacks->InclusionDirective(HashLoc, IncludeTok, Filename, isAngled, File, - End); - + Callbacks->InclusionDirective(HashLoc, IncludeTok, Filename, isAngled, File, + End, SearchPath, RelativePath); + // The #included file will be considered to be a system header if either it is // in a system include directory, or if the #includer is a system include // header. @@ -1302,7 +1314,7 @@ bool Preprocessor::ReadMacroDefinitionArgList(MacroInfo *MI) { MI->setIsC99Varargs(); MI->setArgumentList(&Arguments[0], Arguments.size(), BP); return false; - case tok::eom: // #define X( + case tok::eod: // #define X( Diag(Tok, diag::err_pp_missing_rparen_in_macro_def); return true; default: @@ -1366,7 +1378,7 @@ void Preprocessor::HandleDefineDirective(Token &DefineTok) { ReadMacroName(MacroNameTok, 1); // Error reading macro name? If so, diagnostic already issued. - if (MacroNameTok.is(tok::eom)) + if (MacroNameTok.is(tok::eod)) return; Token LastTok = MacroNameTok; @@ -1384,7 +1396,7 @@ void Preprocessor::HandleDefineDirective(Token &DefineTok) { // If this is a function-like macro definition, parse the argument list, // marking each of the identifiers as being used as macro arguments. Also, // check other constraints on the first token of the macro body. - if (Tok.is(tok::eom)) { + if (Tok.is(tok::eod)) { // If there is no body to this macro, we have no special handling here. } else if (Tok.hasLeadingSpace()) { // This is a normal token with leading space. Clear the leading space @@ -1439,13 +1451,13 @@ void Preprocessor::HandleDefineDirective(Token &DefineTok) { Diag(Tok, diag::warn_missing_whitespace_after_macro_name); } - if (!Tok.is(tok::eom)) + if (!Tok.is(tok::eod)) LastTok = Tok; // Read the rest of the macro body. if (MI->isObjectLike()) { // Object-like macros are very simple, just read their body. - while (Tok.isNot(tok::eom)) { + while (Tok.isNot(tok::eod)) { LastTok = Tok; MI->AddTokenToBody(Tok); // Get the next token of the macro. @@ -1456,7 +1468,7 @@ void Preprocessor::HandleDefineDirective(Token &DefineTok) { // Otherwise, read the body of a function-like macro. While we are at it, // check C99 6.10.3.2p1: ensure that # operators are followed by macro // parameters in function-like macro expansions. - while (Tok.isNot(tok::eom)) { + while (Tok.isNot(tok::eod)) { LastTok = Tok; if (Tok.isNot(tok::hash)) { @@ -1478,7 +1490,7 @@ void Preprocessor::HandleDefineDirective(Token &DefineTok) { // the '#' because '#' is often a comment character. However, change // the kind of the token to tok::unknown so that the preprocessor isn't // confused. - if (getLangOptions().AsmPreprocessor && Tok.isNot(tok::eom)) { + if (getLangOptions().AsmPreprocessor && Tok.isNot(tok::eod)) { LastTok.setKind(tok::unknown); } else { Diag(Tok, diag::err_pp_stringize_not_parameter); @@ -1504,7 +1516,7 @@ void Preprocessor::HandleDefineDirective(Token &DefineTok) { // Disable __VA_ARGS__ again. Ident__VA_ARGS__->setIsPoisoned(true); - // Check that there is no paste (##) operator at the begining or end of the + // Check that there is no paste (##) operator at the beginning or end of the // replacement list. unsigned NumTokens = MI->getNumTokens(); if (NumTokens != 0) { @@ -1573,7 +1585,7 @@ void Preprocessor::HandleUndefDirective(Token &UndefTok) { ReadMacroName(MacroNameTok, 2); // Error reading macro name? If so, diagnostic already issued. - if (MacroNameTok.is(tok::eom)) + if (MacroNameTok.is(tok::eod)) return; // Check to see if this is the last token on the #undef line. @@ -1619,7 +1631,7 @@ void Preprocessor::HandleIfdefDirective(Token &Result, bool isIfndef, ReadMacroName(MacroNameTok); // Error reading macro name? If so, diagnostic already issued. - if (MacroNameTok.is(tok::eom)) { + if (MacroNameTok.is(tok::eod)) { // Skip code until we get to #endif. This helps with recovery by not // emitting an error when the #endif is reached. SkipExcludedConditionalBlock(DirectiveTok.getLocation(), diff --git a/contrib/llvm/tools/clang/lib/Lex/PPExpressions.cpp b/contrib/llvm/tools/clang/lib/Lex/PPExpressions.cpp index 1451c5a..8fcfc70 100644 --- a/contrib/llvm/tools/clang/lib/Lex/PPExpressions.cpp +++ b/contrib/llvm/tools/clang/lib/Lex/PPExpressions.cpp @@ -180,7 +180,7 @@ static bool EvaluateValue(PPValue &Result, Token &PeekTok, DefinedTracker &DT, default: // Non-value token. PP.Diag(PeekTok, diag::err_pp_expr_bad_token_start_expr); return true; - case tok::eom: + case tok::eod: case tok::r_paren: // If there is no expression, report and exit. PP.Diag(PeekTok, diag::err_pp_expected_value_in_expr); @@ -372,7 +372,7 @@ static bool EvaluateValue(PPValue &Result, Token &PeekTok, DefinedTracker &DT, /// token. This returns: /// ~0 - Invalid token. /// 14 -> 3 - various operators. -/// 0 - 'eom' or ')' +/// 0 - 'eod' or ')' static unsigned getPrecedence(tok::TokenKind Kind) { switch (Kind) { default: return ~0U; @@ -397,8 +397,8 @@ static unsigned getPrecedence(tok::TokenKind Kind) { case tok::question: return 4; case tok::comma: return 3; case tok::colon: return 2; - case tok::r_paren: return 0; // Lowest priority, end of expr. - case tok::eom: return 0; // Lowest priority, end of macro. + case tok::r_paren: return 0;// Lowest priority, end of expr. + case tok::eod: return 0;// Lowest priority, end of directive. } } @@ -713,7 +713,7 @@ EvaluateDirectiveExpression(IdentifierInfo *&IfNDefMacro) { DefinedTracker DT; if (EvaluateValue(ResVal, Tok, DT, true, *this)) { // Parse error, skip the rest of the macro line. - if (Tok.isNot(tok::eom)) + if (Tok.isNot(tok::eod)) DiscardUntilEndOfDirective(); // Restore 'DisableMacroExpansion'. @@ -724,7 +724,7 @@ EvaluateDirectiveExpression(IdentifierInfo *&IfNDefMacro) { // If we are at the end of the expression after just parsing a value, there // must be no (unparenthesized) binary operators involved, so we can exit // directly. - if (Tok.is(tok::eom)) { + if (Tok.is(tok::eod)) { // If the expression we parsed was of the form !defined(macro), return the // macro in IfNDefMacro. if (DT.State == DefinedTracker::NotDefinedMacro) @@ -740,7 +740,7 @@ EvaluateDirectiveExpression(IdentifierInfo *&IfNDefMacro) { if (EvaluateDirectiveSubExpr(ResVal, getPrecedence(tok::question), Tok, true, *this)) { // Parse error, skip the rest of the macro line. - if (Tok.isNot(tok::eom)) + if (Tok.isNot(tok::eod)) DiscardUntilEndOfDirective(); // Restore 'DisableMacroExpansion'. @@ -748,9 +748,9 @@ EvaluateDirectiveExpression(IdentifierInfo *&IfNDefMacro) { return false; } - // If we aren't at the tok::eom token, something bad happened, like an extra + // If we aren't at the tok::eod token, something bad happened, like an extra // ')' token. - if (Tok.isNot(tok::eom)) { + if (Tok.isNot(tok::eod)) { Diag(Tok, diag::err_pp_expected_eol); DiscardUntilEndOfDirective(); } diff --git a/contrib/llvm/tools/clang/lib/Lex/PPLexerChange.cpp b/contrib/llvm/tools/clang/lib/Lex/PPLexerChange.cpp index eef42b6..bf0a7fb 100644 --- a/contrib/llvm/tools/clang/lib/Lex/PPLexerChange.cpp +++ b/contrib/llvm/tools/clang/lib/Lex/PPLexerChange.cpp @@ -301,7 +301,7 @@ void Preprocessor::HandleMicrosoftCommentPaste(Token &Tok) { // We handle this by scanning for the closest real lexer, switching it to // raw mode and preprocessor mode. This will cause it to return \n as an - // explicit EOM token. + // explicit EOD token. PreprocessorLexer *FoundLexer = 0; bool LexerWasInPPMode = false; for (unsigned i = 0, e = IncludeMacroStack.size(); i != e; ++i) { @@ -309,11 +309,11 @@ void Preprocessor::HandleMicrosoftCommentPaste(Token &Tok) { if (ISI.ThePPLexer == 0) continue; // Scan for a real lexer. // Once we find a real lexer, mark it as raw mode (disabling macro - // expansions) and preprocessor mode (return EOM). We know that the lexer + // expansions) and preprocessor mode (return EOD). We know that the lexer // was *not* in raw mode before, because the macro that the comment came // from was expanded. However, it could have already been in preprocessor // mode (#if COMMENT) in which case we have to return it to that mode and - // return EOM. + // return EOD. FoundLexer = ISI.ThePPLexer; FoundLexer->LexingRawMode = true; LexerWasInPPMode = FoundLexer->ParsingPreprocessorDirective; @@ -326,22 +326,22 @@ void Preprocessor::HandleMicrosoftCommentPaste(Token &Tok) { // the next token. if (!HandleEndOfTokenLexer(Tok)) Lex(Tok); - // Discarding comments as long as we don't have EOF or EOM. This 'comments + // Discarding comments as long as we don't have EOF or EOD. This 'comments // out' the rest of the line, including any tokens that came from other macros // that were active, as in: // #define submacro a COMMENT b // submacro c // which should lex to 'a' only: 'b' and 'c' should be removed. - while (Tok.isNot(tok::eom) && Tok.isNot(tok::eof)) + while (Tok.isNot(tok::eod) && Tok.isNot(tok::eof)) Lex(Tok); - // If we got an eom token, then we successfully found the end of the line. - if (Tok.is(tok::eom)) { + // If we got an eod token, then we successfully found the end of the line. + if (Tok.is(tok::eod)) { assert(FoundLexer && "Can't get end of line without an active lexer"); // Restore the lexer back to normal mode instead of raw mode. FoundLexer->LexingRawMode = false; - // If the lexer was already in preprocessor mode, just return the EOM token + // If the lexer was already in preprocessor mode, just return the EOD token // to finish the preprocessor line. if (LexerWasInPPMode) return; @@ -352,7 +352,7 @@ void Preprocessor::HandleMicrosoftCommentPaste(Token &Tok) { // If we got an EOF token, then we reached the end of the token stream but // didn't find an explicit \n. This can only happen if there was no lexer - // active (an active lexer would return EOM at EOF if there was no \n in + // active (an active lexer would return EOD at EOF if there was no \n in // preprocessor directive mode), so just return EOF as our token. - assert(!FoundLexer && "Lexer should return EOM before EOF in PP mode"); + assert(!FoundLexer && "Lexer should return EOD before EOF in PP mode"); } diff --git a/contrib/llvm/tools/clang/lib/Lex/PPMacroExpansion.cpp b/contrib/llvm/tools/clang/lib/Lex/PPMacroExpansion.cpp index ba92614..d6e0d3a 100644 --- a/contrib/llvm/tools/clang/lib/Lex/PPMacroExpansion.cpp +++ b/contrib/llvm/tools/clang/lib/Lex/PPMacroExpansion.cpp @@ -176,8 +176,6 @@ bool Preprocessor::isNextPPTokenLParen() { /// expanded as a macro, handle it and return the next token as 'Identifier'. bool Preprocessor::HandleMacroExpandedIdentifier(Token &Identifier, MacroInfo *MI) { - if (Callbacks) Callbacks->MacroExpands(Identifier, MI); - // If this is a macro expansion in the "#if !defined(x)" line for the file, // then the macro could expand to different things in other contexts, we need // to disable the optimization in this case. @@ -185,6 +183,7 @@ bool Preprocessor::HandleMacroExpandedIdentifier(Token &Identifier, // If this is a builtin macro, like __LINE__ or _Pragma, handle it specially. if (MI->isBuiltinMacro()) { + if (Callbacks) Callbacks->MacroExpands(Identifier, MI); ExpandBuiltinMacro(Identifier); return false; } @@ -225,8 +224,13 @@ bool Preprocessor::HandleMacroExpandedIdentifier(Token &Identifier, // Notice that this macro has been used. markMacroAsUsed(MI); + if (Callbacks) Callbacks->MacroExpands(Identifier, MI); + // If we started lexing a macro, enter the macro expansion body. + // Remember where the token is instantiated. + SourceLocation InstantiateLoc = Identifier.getLocation(); + // If this macro expands to no tokens, don't bother to push it onto the // expansion stack, only to take it right back off. if (MI->getNumTokens() == 0) { @@ -249,6 +253,7 @@ bool Preprocessor::HandleMacroExpandedIdentifier(Token &Identifier, if (HadLeadingSpace) Identifier.setFlag(Token::LeadingSpace); } Identifier.setFlag(Token::LeadingEmptyMacro); + LastEmptyMacroInstantiationLoc = InstantiateLoc; ++NumFastMacroExpanded; return false; @@ -267,9 +272,6 @@ bool Preprocessor::HandleMacroExpandedIdentifier(Token &Identifier, bool isAtStartOfLine = Identifier.isAtStartOfLine(); bool hasLeadingSpace = Identifier.hasLeadingSpace(); - // Remember where the token is instantiated. - SourceLocation InstantiateLoc = Identifier.getLocation(); - // Replace the result token. Identifier = MI->getReplacementToken(0); @@ -355,9 +357,9 @@ MacroArgs *Preprocessor::ReadFunctionLikeMacroArgs(Token &MacroName, LexUnexpandedToken(Tok); } - if (Tok.is(tok::eof) || Tok.is(tok::eom)) { // "#if f(<eof>" & "#if f(\n" + if (Tok.is(tok::eof) || Tok.is(tok::eod)) { // "#if f(<eof>" & "#if f(\n" Diag(MacroName, diag::err_unterm_macro_invoc); - // Do not lose the EOF/EOM. Return it to the client. + // Do not lose the EOF/EOD. Return it to the client. MacroName = Tok; return 0; } else if (Tok.is(tok::r_paren)) { @@ -410,9 +412,9 @@ MacroArgs *Preprocessor::ReadFunctionLikeMacroArgs(Token &MacroName, return 0; } - // Empty arguments are standard in C99 and supported as an extension in + // Empty arguments are standard in C99 and C++0x, and are supported as an extension in // other modes. - if (ArgTokens.size() == ArgTokenStart && !Features.C99) + if (ArgTokens.size() == ArgTokenStart && !Features.C99 && !Features.CPlusPlus0x) Diag(Tok, diag::ext_empty_fnmacro_arg); // Add a marker EOF token to the end of the token list for this argument. @@ -530,6 +532,7 @@ static bool HasFeature(const Preprocessor &PP, const IdentifierInfo *II) { return llvm::StringSwitch<bool>(II->getName()) .Case("attribute_analyzer_noreturn", true) + .Case("attribute_availability", true) .Case("attribute_cf_returns_not_retained", true) .Case("attribute_cf_returns_retained", true) .Case("attribute_deprecated_with_message", true) @@ -540,12 +543,14 @@ static bool HasFeature(const Preprocessor &PP, const IdentifierInfo *II) { .Case("attribute_ns_consumed", true) .Case("attribute_cf_consumed", true) .Case("attribute_objc_ivar_unused", true) + .Case("attribute_objc_method_family", true) .Case("attribute_overloadable", true) .Case("attribute_unavailable_with_message", true) .Case("blocks", LangOpts.Blocks) .Case("cxx_exceptions", LangOpts.Exceptions) .Case("cxx_rtti", LangOpts.RTTI) .Case("enumerator_attributes", true) + .Case("generic_selections", true) .Case("objc_nonfragile_abi", LangOpts.ObjCNonFragileABI) .Case("objc_weak_class", LangOpts.ObjCNonFragileABI) .Case("ownership_holds", true) @@ -556,10 +561,14 @@ static bool HasFeature(const Preprocessor &PP, const IdentifierInfo *II) { .Case("cxx_auto_type", LangOpts.CPlusPlus0x) .Case("cxx_decltype", LangOpts.CPlusPlus0x) .Case("cxx_default_function_template_args", LangOpts.CPlusPlus0x) + .Case("cxx_delegating_constructors", LangOpts.CPlusPlus0x) .Case("cxx_deleted_functions", LangOpts.CPlusPlus0x) .Case("cxx_inline_namespaces", LangOpts.CPlusPlus0x) //.Case("cxx_lambdas", false) + .Case("cxx_noexcept", LangOpts.CPlusPlus0x) //.Case("cxx_nullptr", false) + .Case("cxx_override_control", LangOpts.CPlusPlus0x) + .Case("cxx_range_for", LangOpts.CPlusPlus0x) .Case("cxx_reference_qualified_functions", LangOpts.CPlusPlus0x) .Case("cxx_rvalue_references", LangOpts.CPlusPlus0x) .Case("cxx_strong_enums", LangOpts.CPlusPlus0x) @@ -581,10 +590,11 @@ static bool HasFeature(const Preprocessor &PP, const IdentifierInfo *II) { .Case("is_convertible_to", LangOpts.CPlusPlus) .Case("is_empty", LangOpts.CPlusPlus) .Case("is_enum", LangOpts.CPlusPlus) + .Case("is_literal", LangOpts.CPlusPlus) .Case("is_pod", LangOpts.CPlusPlus) .Case("is_polymorphic", LangOpts.CPlusPlus) + .Case("is_trivial", LangOpts.CPlusPlus) .Case("is_union", LangOpts.CPlusPlus) - .Case("is_literal", LangOpts.CPlusPlus) .Case("tls", PP.getTargetInfo().isTLSSupported()) .Default(false); } @@ -626,8 +636,8 @@ static bool EvaluateHasIncludeCommon(Token &Tok, SourceLocation EndLoc; switch (Tok.getKind()) { - case tok::eom: - // If the token kind is EOM, the error has already been diagnosed. + case tok::eod: + // If the token kind is EOD, the error has already been diagnosed. return false; case tok::angle_string_literal: @@ -644,7 +654,7 @@ static bool EvaluateHasIncludeCommon(Token &Tok, // case, glue the tokens together into FilenameBuffer and interpret those. FilenameBuffer.push_back('<'); if (PP.ConcatenateIncludeName(FilenameBuffer, EndLoc)) - return false; // Found <eom> but no ">"? Diagnostic already emitted. + return false; // Found <eod> but no ">"? Diagnostic already emitted. Filename = FilenameBuffer.str(); break; default: @@ -660,7 +670,8 @@ static bool EvaluateHasIncludeCommon(Token &Tok, // Search include directories. const DirectoryLookup *CurDir; - const FileEntry *File = PP.LookupFile(Filename, isAngled, LookupFrom, CurDir); + const FileEntry *File = + PP.LookupFile(Filename, isAngled, LookupFrom, CurDir, NULL, NULL); // Get the result value. Result = true means the file exists. bool Result = File != 0; diff --git a/contrib/llvm/tools/clang/lib/Lex/PTHLexer.cpp b/contrib/llvm/tools/clang/lib/Lex/PTHLexer.cpp index 975753b..e5ef0fd 100644 --- a/contrib/llvm/tools/clang/lib/Lex/PTHLexer.cpp +++ b/contrib/llvm/tools/clang/lib/Lex/PTHLexer.cpp @@ -125,7 +125,7 @@ LexNextToken: return PP->Lex(Tok); } - if (TKind == tok::eom) { + if (TKind == tok::eod) { assert(ParsingPreprocessorDirective); ParsingPreprocessorDirective = false; return; @@ -527,7 +527,7 @@ PTHManager *PTHManager::Create(const std::string &file, Diagnostic &Diags) { // Get the number of IdentifierInfos and pre-allocate the identifier cache. uint32_t NumIds = ReadLE32(IData); - // Pre-allocate the peristent ID -> IdentifierInfo* cache. We use calloc() + // Pre-allocate the persistent ID -> IdentifierInfo* cache. We use calloc() // so that we in the best case only zero out memory once when the OS returns // us new pages. IdentifierInfo** PerIDCache = 0; diff --git a/contrib/llvm/tools/clang/lib/Lex/Pragma.cpp b/contrib/llvm/tools/clang/lib/Lex/Pragma.cpp index 80d3bb1..0c18091 100644 --- a/contrib/llvm/tools/clang/lib/Lex/Pragma.cpp +++ b/contrib/llvm/tools/clang/lib/Lex/Pragma.cpp @@ -229,8 +229,8 @@ void Preprocessor::HandleMicrosoft__pragma(Token &Tok) { PragmaToks.front().setFlag(Token::LeadingSpace); - // Replace the ')' with an EOM to mark the end of the pragma. - PragmaToks.back().setKind(tok::eom); + // Replace the ')' with an EOD to mark the end of the pragma. + PragmaToks.back().setKind(tok::eod); Token *TokArray = new Token[PragmaToks.size()]; std::copy(PragmaToks.begin(), PragmaToks.end(), TokArray); @@ -283,7 +283,7 @@ void Preprocessor::HandlePragmaPoison(Token &PoisonTok) { if (CurPPLexer) CurPPLexer->LexingRawMode = false; // If we reached the end of line, we're done. - if (Tok.is(tok::eom)) return; + if (Tok.is(tok::eod)) return; // Can only poison identifiers. if (Tok.isNot(tok::raw_identifier)) { @@ -348,8 +348,8 @@ void Preprocessor::HandlePragmaDependency(Token &DependencyTok) { Token FilenameTok; CurPPLexer->LexIncludeFilename(FilenameTok); - // If the token kind is EOM, the error has already been diagnosed. - if (FilenameTok.is(tok::eom)) + // If the token kind is EOD, the error has already been diagnosed. + if (FilenameTok.is(tok::eod)) return; // Reserve a buffer to get the spelling. @@ -368,7 +368,7 @@ void Preprocessor::HandlePragmaDependency(Token &DependencyTok) { // Search include directories for this file. const DirectoryLookup *CurDir; - const FileEntry *File = LookupFile(Filename, isAngled, 0, CurDir); + const FileEntry *File = LookupFile(Filename, isAngled, 0, CurDir, NULL, NULL); if (File == 0) { Diag(FilenameTok, diag::err_pp_file_not_found) << Filename; return; @@ -381,7 +381,7 @@ void Preprocessor::HandlePragmaDependency(Token &DependencyTok) { // Lex tokens at the end of the message and include them in the message. std::string Message; Lex(DependencyTok); - while (DependencyTok.isNot(tok::eom)) { + while (DependencyTok.isNot(tok::eod)) { Message += getSpelling(DependencyTok) + " "; Lex(DependencyTok); } @@ -470,7 +470,7 @@ void Preprocessor::HandlePragmaComment(Token &Tok) { } Lex(Tok); // eat the r_paren. - if (Tok.isNot(tok::eom)) { + if (Tok.isNot(tok::eod)) { Diag(Tok.getLocation(), diag::err_pragma_comment_malformed); return; } @@ -541,7 +541,7 @@ void Preprocessor::HandlePragmaMessage(Token &Tok) { Lex(Tok); // eat the r_paren. } - if (Tok.isNot(tok::eom)) { + if (Tok.isNot(tok::eod)) { Diag(Tok.getLocation(), diag::err_pragma_message_malformed); return; } @@ -737,10 +737,10 @@ bool Preprocessor::LexOnOffSwitch(tok::OnOffSwitch &Result) { return true; } - // Verify that this is followed by EOM. + // Verify that this is followed by EOD. LexUnexpandedToken(Tok); - if (Tok.isNot(tok::eom)) - Diag(Tok, diag::ext_pragma_syntax_eom); + if (Tok.isNot(tok::eod)) + Diag(Tok, diag::ext_pragma_syntax_eod); return false; } @@ -883,7 +883,7 @@ public: PP.LexUnexpandedToken(Tok); } - if (Tok.isNot(tok::eom)) { + if (Tok.isNot(tok::eod)) { PP.Diag(Tok.getLocation(), diag::warn_pragma_diagnostic_invalid_token); return; } diff --git a/contrib/llvm/tools/clang/lib/Lex/PreprocessingRecord.cpp b/contrib/llvm/tools/clang/lib/Lex/PreprocessingRecord.cpp index 3a43ac1..9555611 100644 --- a/contrib/llvm/tools/clang/lib/Lex/PreprocessingRecord.cpp +++ b/contrib/llvm/tools/clang/lib/Lex/PreprocessingRecord.cpp @@ -146,12 +146,15 @@ void PreprocessingRecord::MacroUndefined(const Token &Id, MacroDefinitions.erase(Pos); } -void PreprocessingRecord::InclusionDirective(SourceLocation HashLoc, - const clang::Token &IncludeTok, - llvm::StringRef FileName, - bool IsAngled, - const FileEntry *File, - clang::SourceLocation EndLoc) { +void PreprocessingRecord::InclusionDirective( + SourceLocation HashLoc, + const clang::Token &IncludeTok, + llvm::StringRef FileName, + bool IsAngled, + const FileEntry *File, + clang::SourceLocation EndLoc, + llvm::StringRef SearchPath, + llvm::StringRef RelativePath) { InclusionDirective::InclusionKind Kind = InclusionDirective::Include; switch (IncludeTok.getIdentifierInfo()->getPPKeywordID()) { diff --git a/contrib/llvm/tools/clang/lib/Lex/Preprocessor.cpp b/contrib/llvm/tools/clang/lib/Lex/Preprocessor.cpp index 6fe414b..31fd667 100644 --- a/contrib/llvm/tools/clang/lib/Lex/Preprocessor.cpp +++ b/contrib/llvm/tools/clang/lib/Lex/Preprocessor.cpp @@ -89,6 +89,7 @@ Preprocessor::Preprocessor(Diagnostic &diags, const LangOptions &opts, // "Poison" __VA_ARGS__, which can only appear in the expansion of a macro. // This gets unpoisoned where it is allowed. (Ident__VA_ARGS__ = getIdentifierInfo("__VA_ARGS__"))->setIsPoisoned(); + SetPoisonReason(Ident__VA_ARGS__,diag::ext_pp_bad_vaargs_use); // Initialize the pragma handlers. PragmaHandlers = new PragmaNamespace(llvm::StringRef()); @@ -96,6 +97,23 @@ Preprocessor::Preprocessor(Diagnostic &diags, const LangOptions &opts, // Initialize builtin macros like __LINE__ and friends. RegisterBuiltinMacros(); + + if(Features.Borland) { + Ident__exception_info = getIdentifierInfo("_exception_info"); + Ident___exception_info = getIdentifierInfo("__exception_info"); + Ident_GetExceptionInfo = getIdentifierInfo("GetExceptionInformation"); + Ident__exception_code = getIdentifierInfo("_exception_code"); + Ident___exception_code = getIdentifierInfo("__exception_code"); + Ident_GetExceptionCode = getIdentifierInfo("GetExceptionCode"); + Ident__abnormal_termination = getIdentifierInfo("_abnormal_termination"); + Ident___abnormal_termination = getIdentifierInfo("__abnormal_termination"); + Ident_AbnormalTermination = getIdentifierInfo("AbnormalTermination"); + } else { + Ident__exception_info = Ident__exception_code = Ident__abnormal_termination = 0; + Ident___exception_info = Ident___exception_code = Ident___abnormal_termination = 0; + Ident_GetExceptionInfo = Ident_GetExceptionCode = Ident_AbnormalTermination = 0; + } + } Preprocessor::~Preprocessor() { @@ -278,7 +296,6 @@ void Preprocessor::CodeCompleteNaturalLanguage() { CodeComplete->CodeCompleteNaturalLanguage(); } - /// getSpelling - This method is used to get the spelling of a token into a /// SmallVector. Note that the returned StringRef may not point to the /// supplied buffer if a copy can be avoided. @@ -400,6 +417,34 @@ IdentifierInfo *Preprocessor::LookUpIdentifierInfo(Token &Identifier) const { return II; } +void Preprocessor::SetPoisonReason(IdentifierInfo *II, unsigned DiagID) { + PoisonReasons[II] = DiagID; +} + +void Preprocessor::PoisonSEHIdentifiers(bool Poison) { + assert(Ident__exception_code && Ident__exception_info); + assert(Ident___exception_code && Ident___exception_info); + Ident__exception_code->setIsPoisoned(Poison); + Ident___exception_code->setIsPoisoned(Poison); + Ident_GetExceptionCode->setIsPoisoned(Poison); + Ident__exception_info->setIsPoisoned(Poison); + Ident___exception_info->setIsPoisoned(Poison); + Ident_GetExceptionInfo->setIsPoisoned(Poison); + Ident__abnormal_termination->setIsPoisoned(Poison); + Ident___abnormal_termination->setIsPoisoned(Poison); + Ident_AbnormalTermination->setIsPoisoned(Poison); +} + +void Preprocessor::HandlePoisonedIdentifier(Token & Identifier) { + assert(Identifier.getIdentifierInfo() && + "Can't handle identifiers without identifier info!"); + llvm::DenseMap<IdentifierInfo*,unsigned>::const_iterator it = + PoisonReasons.find(Identifier.getIdentifierInfo()); + if(it == PoisonReasons.end()) + Diag(Identifier, diag::err_pp_used_poisoned_id); + else + Diag(Identifier,it->second) << Identifier.getIdentifierInfo(); +} /// HandleIdentifier - This callback is invoked when the lexer reads an /// identifier. This callback looks up the identifier in the map and/or @@ -418,10 +463,7 @@ void Preprocessor::HandleIdentifier(Token &Identifier) { // If this identifier was poisoned, and if it was not produced from a macro // expansion, emit an error. if (II.isPoisoned() && CurPPLexer) { - if (&II != Ident__VA_ARGS__) // We warn about __VA_ARGS__ with poisoning. - Diag(Identifier, diag::err_pp_used_poisoned_id); - else - Diag(Identifier, diag::ext_pp_bad_vaargs_use); + HandlePoisonedIdentifier(Identifier); } // If this is a macro to be expanded, do it. diff --git a/contrib/llvm/tools/clang/lib/Lex/PreprocessorLexer.cpp b/contrib/llvm/tools/clang/lib/Lex/PreprocessorLexer.cpp index e005c49..808a81b 100644 --- a/contrib/llvm/tools/clang/lib/Lex/PreprocessorLexer.cpp +++ b/contrib/llvm/tools/clang/lib/Lex/PreprocessorLexer.cpp @@ -34,7 +34,7 @@ void PreprocessorLexer::LexIncludeFilename(Token &FilenameTok) { ParsingFilename = false; // No filename? - if (FilenameTok.is(tok::eom)) + if (FilenameTok.is(tok::eod)) PP->Diag(FilenameTok.getLocation(), diag::err_pp_expects_filename); } diff --git a/contrib/llvm/tools/clang/lib/Lex/TokenLexer.cpp b/contrib/llvm/tools/clang/lib/Lex/TokenLexer.cpp index caa44bf..65aff0d 100644 --- a/contrib/llvm/tools/clang/lib/Lex/TokenLexer.cpp +++ b/contrib/llvm/tools/clang/lib/Lex/TokenLexer.cpp @@ -367,11 +367,7 @@ void TokenLexer::Lex(Token &Tok) { // won't be handled by Preprocessor::HandleIdentifier because this is coming // from a macro expansion. if (II->isPoisoned() && TokenIsFromPaste) { - // We warn about __VA_ARGS__ with poisoning. - if (II->isStr("__VA_ARGS__")) - PP.Diag(Tok, diag::ext_pp_bad_vaargs_use); - else - PP.Diag(Tok, diag::err_pp_used_poisoned_id); + PP.HandlePoisonedIdentifier(Tok); } if (!DisableMacroExpansion && II->isHandleIdentifierCase()) @@ -546,7 +542,7 @@ unsigned TokenLexer::isNextTokenLParen() const { /// isParsingPreprocessorDirective - Return true if we are in the middle of a /// preprocessor directive. bool TokenLexer::isParsingPreprocessorDirective() const { - return Tokens[NumTokens-1].is(tok::eom) && !isAtEnd(); + return Tokens[NumTokens-1].is(tok::eod) && !isAtEnd(); } /// HandleMicrosoftCommentPaste - In microsoft compatibility mode, /##/ pastes |