diff options
Diffstat (limited to 'lib/Lex')
-rw-r--r-- | lib/Lex/PPDirectives.cpp | 9 | ||||
-rw-r--r-- | lib/Lex/Pragma.cpp | 68 | ||||
-rw-r--r-- | lib/Lex/TokenLexer.cpp | 5 |
3 files changed, 42 insertions, 40 deletions
diff --git a/lib/Lex/PPDirectives.cpp b/lib/Lex/PPDirectives.cpp index 0f0d25b..3e871ae 100644 --- a/lib/Lex/PPDirectives.cpp +++ b/lib/Lex/PPDirectives.cpp @@ -86,6 +86,7 @@ void Preprocessor::DiscardUntilEndOfDirective() { Token Tmp; do { LexUnexpandedToken(Tmp); + assert(Tmp.isNot(tok::eof) && "EOF seen while discarding directive tokens"); } while (Tmp.isNot(tok::eom)); } @@ -167,10 +168,12 @@ void Preprocessor::CheckEndOfDirective(const char *DirType, bool EnableMacros) { if (Tmp.isNot(tok::eom)) { // Add a fixit in GNU/C99/C++ mode. Don't offer a fixit for strict-C89, - // because it is more trouble than it is worth to insert /**/ and check that - // there is no /**/ in the range also. + // or if this is a macro-style preprocessing directive, because it is more + // trouble than it is worth to insert /**/ and check that there is no /**/ + // in the range also. FixItHint Hint; - if (Features.GNUMode || Features.C99 || Features.CPlusPlus) + if ((Features.GNUMode || Features.C99 || Features.CPlusPlus) && + !CurTokenLexer) Hint = FixItHint::CreateInsertion(Tmp.getLocation(),"//"); Diag(Tmp, diag::ext_pp_extra_tokens_at_eol) << DirType << Hint; DiscardUntilEndOfDirective(); diff --git a/lib/Lex/Pragma.cpp b/lib/Lex/Pragma.cpp index f0475bc..80d3bb1 100644 --- a/lib/Lex/Pragma.cpp +++ b/lib/Lex/Pragma.cpp @@ -110,7 +110,8 @@ void Preprocessor::HandlePragmaDirective(unsigned Introducer) { PragmaHandlers->HandlePragma(*this, PragmaIntroducerKind(Introducer), Tok); // If the pragma handler didn't read the rest of the line, consume it now. - if (CurPPLexer && CurPPLexer->ParsingPreprocessorDirective) + if ((CurTokenLexer && CurTokenLexer->isParsingPreprocessorDirective()) + || (CurPPLexer && CurPPLexer->ParsingPreprocessorDirective)) DiscardUntilEndOfDirective(); } @@ -174,7 +175,22 @@ void Preprocessor::Handle_Pragma(Token &Tok) { } } - Handle_Pragma(PIK__Pragma, StrVal, PragmaLoc, RParenLoc); + // Plop the string (including the newline and trailing null) into a buffer + // where we can lex it. + Token TmpTok; + TmpTok.startToken(); + CreateString(&StrVal[0], StrVal.size(), TmpTok); + SourceLocation TokLoc = TmpTok.getLocation(); + + // Make and enter a lexer object so that we lex and expand the tokens just + // like any others. + Lexer *TL = Lexer::Create_PragmaLexer(TokLoc, PragmaLoc, RParenLoc, + StrVal.size(), *this); + + EnterSourceFileWithLexer(TL, 0); + + // With everything set up, lex this as a #pragma directive. + HandlePragmaDirective(PIK__Pragma); // Finally, return whatever came after the pragma directive. return Lex(Tok); @@ -193,16 +209,16 @@ void Preprocessor::HandleMicrosoft__pragma(Token &Tok) { return; } - // Get the tokens enclosed within the __pragma(). + // Get the tokens enclosed within the __pragma(), as well as the final ')'. llvm::SmallVector<Token, 32> PragmaToks; int NumParens = 0; Lex(Tok); while (Tok.isNot(tok::eof)) { + PragmaToks.push_back(Tok); if (Tok.is(tok::l_paren)) NumParens++; else if (Tok.is(tok::r_paren) && NumParens-- == 0) break; - PragmaToks.push_back(Tok); Lex(Tok); } @@ -211,45 +227,23 @@ void Preprocessor::HandleMicrosoft__pragma(Token &Tok) { return; } - // Build the pragma string. - std::string StrVal = " "; - for (llvm::SmallVector<Token, 32>::iterator I = - PragmaToks.begin(), E = PragmaToks.end(); I != E; ++I) { - StrVal += getSpelling(*I); - } - - SourceLocation RParenLoc = Tok.getLocation(); - - Handle_Pragma(PIK___pragma, StrVal, PragmaLoc, RParenLoc); + PragmaToks.front().setFlag(Token::LeadingSpace); - // Finally, return whatever came after the pragma directive. - return Lex(Tok); -} + // Replace the ')' with an EOM to mark the end of the pragma. + PragmaToks.back().setKind(tok::eom); -void Preprocessor::Handle_Pragma(unsigned Introducer, - const std::string &StrVal, - SourceLocation PragmaLoc, - SourceLocation RParenLoc) { + Token *TokArray = new Token[PragmaToks.size()]; + std::copy(PragmaToks.begin(), PragmaToks.end(), TokArray); - // Plop the string (including the newline and trailing null) into a buffer - // where we can lex it. - Token TmpTok; - TmpTok.startToken(); - CreateString(&StrVal[0], StrVal.size(), TmpTok); - SourceLocation TokLoc = TmpTok.getLocation(); - - // Make and enter a lexer object so that we lex and expand the tokens just - // like any others. - Lexer *TL = Lexer::Create_PragmaLexer(TokLoc, PragmaLoc, RParenLoc, - StrVal.size(), *this); - - EnterSourceFileWithLexer(TL, 0); + // Push the tokens onto the stack. + EnterTokenStream(TokArray, PragmaToks.size(), true, true); // With everything set up, lex this as a #pragma directive. - HandlePragmaDirective(Introducer); -} - + HandlePragmaDirective(PIK___pragma); + // Finally, return whatever came after the pragma directive. + return Lex(Tok); +} /// HandlePragmaOnce - Handle #pragma once. OnceTok is the 'once'. /// diff --git a/lib/Lex/TokenLexer.cpp b/lib/Lex/TokenLexer.cpp index ea39b47..caa44bf 100644 --- a/lib/Lex/TokenLexer.cpp +++ b/lib/Lex/TokenLexer.cpp @@ -543,6 +543,11 @@ unsigned TokenLexer::isNextTokenLParen() const { return Tokens[CurToken].is(tok::l_paren); } +/// isParsingPreprocessorDirective - Return true if we are in the middle of a +/// preprocessor directive. +bool TokenLexer::isParsingPreprocessorDirective() const { + return Tokens[NumTokens-1].is(tok::eom) && !isAtEnd(); +} /// HandleMicrosoftCommentPaste - In microsoft compatibility mode, /##/ pastes /// together to form a comment that comments out everything in the current |