summaryrefslogtreecommitdiffstats
path: root/lib/Lex/TokenLexer.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'lib/Lex/TokenLexer.cpp')
-rw-r--r--lib/Lex/TokenLexer.cpp182
1 files changed, 153 insertions, 29 deletions
diff --git a/lib/Lex/TokenLexer.cpp b/lib/Lex/TokenLexer.cpp
index 65aff0d..8ff82f1 100644
--- a/lib/Lex/TokenLexer.cpp
+++ b/lib/Lex/TokenLexer.cpp
@@ -23,7 +23,7 @@ using namespace clang;
/// Create a TokenLexer for the specified macro with the specified actual
/// arguments. Note that this ctor takes ownership of the ActualArgs pointer.
-void TokenLexer::Init(Token &Tok, SourceLocation ILEnd, MacroArgs *Actuals) {
+void TokenLexer::Init(Token &Tok, SourceLocation ELEnd, MacroArgs *Actuals) {
// If the client is reusing a TokenLexer, make sure to free any memory
// associated with it.
destroy();
@@ -32,14 +32,36 @@ void TokenLexer::Init(Token &Tok, SourceLocation ILEnd, MacroArgs *Actuals) {
ActualArgs = Actuals;
CurToken = 0;
- InstantiateLocStart = Tok.getLocation();
- InstantiateLocEnd = ILEnd;
+ ExpandLocStart = Tok.getLocation();
+ ExpandLocEnd = ELEnd;
AtStartOfLine = Tok.isAtStartOfLine();
HasLeadingSpace = Tok.hasLeadingSpace();
Tokens = &*Macro->tokens_begin();
OwnsTokens = false;
DisableMacroExpansion = false;
NumTokens = Macro->tokens_end()-Macro->tokens_begin();
+ MacroExpansionStart = SourceLocation();
+
+ SourceManager &SM = PP.getSourceManager();
+ MacroStartSLocOffset = SM.getNextOffset();
+
+ if (NumTokens > 0) {
+ assert(Tokens[0].getLocation().isValid());
+ assert((Tokens[0].getLocation().isFileID() || Tokens[0].is(tok::comment)) &&
+ "Macro defined in macro?");
+ assert(ExpandLocStart.isValid());
+
+ // Reserve a source location entry chunk for the length of the macro
+ // definition. Tokens that get lexed directly from the definition will
+ // have their locations pointing inside this chunk. This is to avoid
+ // creating separate source location entries for each token.
+ SourceLocation macroStart = SM.getInstantiationLoc(Tokens[0].getLocation());
+ MacroDefStartInfo = SM.getDecomposedLoc(macroStart);
+ MacroExpansionStart = SM.createInstantiationLoc(macroStart,
+ ExpandLocStart,
+ ExpandLocEnd,
+ Macro->getDefinitionLength(SM));
+ }
// If this is a function-like macro, expand the arguments and change
// Tokens to point to the expanded tokens.
@@ -69,9 +91,10 @@ void TokenLexer::Init(const Token *TokArray, unsigned NumToks,
DisableMacroExpansion = disableMacroExpansion;
NumTokens = NumToks;
CurToken = 0;
- InstantiateLocStart = InstantiateLocEnd = SourceLocation();
+ ExpandLocStart = ExpandLocEnd = SourceLocation();
AtStartOfLine = false;
HasLeadingSpace = false;
+ MacroExpansionStart = SourceLocation();
// Set HasLeadingSpace/AtStartOfLine so that the first token will be
// returned unmodified.
@@ -98,6 +121,8 @@ void TokenLexer::destroy() {
/// Expand the arguments of a function-like macro so that we can quickly
/// return preexpanded tokens from Tokens.
void TokenLexer::ExpandFunctionArguments() {
+ SourceManager &SM = PP.getSourceManager();
+
llvm::SmallVector<Token, 128> ResultToks;
// Loop through 'Tokens', expanding them into ResultToks. Keep
@@ -119,13 +144,19 @@ void TokenLexer::ExpandFunctionArguments() {
int ArgNo = Macro->getArgumentNum(Tokens[i+1].getIdentifierInfo());
assert(ArgNo != -1 && "Token following # is not an argument?");
+ SourceLocation hashInstLoc;
+ if(ExpandLocStart.isValid()) {
+ hashInstLoc = getMacroExpansionLocation(CurTok.getLocation());
+ assert(hashInstLoc.isValid() && "Expected '#' to come from definition");
+ }
+
Token Res;
if (CurTok.is(tok::hash)) // Stringify
- Res = ActualArgs->getStringifiedArgument(ArgNo, PP);
+ Res = ActualArgs->getStringifiedArgument(ArgNo, PP, hashInstLoc);
else {
// 'charify': don't bother caching these.
Res = MacroArgs::StringifyArgument(ActualArgs->getUnexpArgument(ArgNo),
- PP, true);
+ PP, true, hashInstLoc);
}
// The stringified/charified string leading space flag gets set to match
@@ -185,6 +216,27 @@ void TokenLexer::ExpandFunctionArguments() {
unsigned NumToks = MacroArgs::getArgLength(ResultArgToks);
ResultToks.append(ResultArgToks, ResultArgToks+NumToks);
+ // If the '##' came from expanding an argument, turn it into 'unknown'
+ // to avoid pasting.
+ for (unsigned i = FirstResult, e = ResultToks.size(); i != e; ++i) {
+ Token &Tok = ResultToks[i];
+ if (Tok.is(tok::hashhash))
+ Tok.setKind(tok::unknown);
+ }
+
+ if(ExpandLocStart.isValid()) {
+ SourceLocation curInst =
+ getMacroExpansionLocation(CurTok.getLocation());
+ assert(curInst.isValid() &&
+ "Expected arg identifier to come from definition");
+ for (unsigned i = FirstResult, e = ResultToks.size(); i != e; ++i) {
+ Token &Tok = ResultToks[i];
+ Tok.setLocation(SM.createMacroArgInstantiationLoc(Tok.getLocation(),
+ curInst,
+ Tok.getLength()));
+ }
+ }
+
// If any tokens were substituted from the argument, the whitespace
// before the first token should match the whitespace of the arg
// identifier.
@@ -220,6 +272,29 @@ void TokenLexer::ExpandFunctionArguments() {
ResultToks.append(ArgToks, ArgToks+NumToks);
+ // If the '##' came from expanding an argument, turn it into 'unknown'
+ // to avoid pasting.
+ for (unsigned i = ResultToks.size() - NumToks, e = ResultToks.size();
+ i != e; ++i) {
+ Token &Tok = ResultToks[i];
+ if (Tok.is(tok::hashhash))
+ Tok.setKind(tok::unknown);
+ }
+
+ if (ExpandLocStart.isValid()) {
+ SourceLocation curInst =
+ getMacroExpansionLocation(CurTok.getLocation());
+ assert(curInst.isValid() &&
+ "Expected arg identifier to come from definition");
+ for (unsigned i = ResultToks.size() - NumToks, e = ResultToks.size();
+ i != e; ++i) {
+ Token &Tok = ResultToks[i];
+ Tok.setLocation(SM.createMacroArgInstantiationLoc(Tok.getLocation(),
+ curInst,
+ Tok.getLength()));
+ }
+ }
+
// If this token (the macro argument) was supposed to get leading
// whitespace, transfer this information onto the first token of the
// expansion.
@@ -284,15 +359,11 @@ void TokenLexer::ExpandFunctionArguments() {
assert(!OwnsTokens && "This would leak if we already own the token list");
// This is deleted in the dtor.
NumTokens = ResultToks.size();
- llvm::BumpPtrAllocator &Alloc = PP.getPreprocessorAllocator();
- Token *Res =
- static_cast<Token *>(Alloc.Allocate(sizeof(Token)*ResultToks.size(),
- llvm::alignOf<Token>()));
- if (NumTokens)
- memcpy(Res, &ResultToks[0], NumTokens*sizeof(Token));
- Tokens = Res;
-
- // The preprocessor bump pointer owns these tokens, not us.
+ // The tokens will be added to Preprocessor's cache and will be removed
+ // when this TokenLexer finishes lexing them.
+ Tokens = PP.cacheMacroExpandedTokens(this, ResultToks);
+
+ // The preprocessor cache of macro expanded tokens owns these tokens,not us.
OwnsTokens = false;
}
}
@@ -317,6 +388,8 @@ void TokenLexer::Lex(Token &Tok) {
return PPCache.Lex(Tok);
}
+ SourceManager &SM = PP.getSourceManager();
+
// If this is the first token of the expanded result, we inherit spacing
// properties later.
bool isFirstToken = CurToken == 0;
@@ -327,7 +400,8 @@ void TokenLexer::Lex(Token &Tok) {
bool TokenIsFromPaste = false;
// If this token is followed by a token paste (##) operator, paste the tokens!
- if (!isAtEnd() && Tokens[CurToken].is(tok::hashhash)) {
+ // Note that ## is a normal token when not expanding a macro.
+ if (!isAtEnd() && Tokens[CurToken].is(tok::hashhash) && Macro) {
// When handling the microsoft /##/ extension, the final token is
// returned by PasteTokens, not the pasted token.
if (PasteTokens(Tok))
@@ -339,14 +413,25 @@ void TokenLexer::Lex(Token &Tok) {
// The token's current location indicate where the token was lexed from. We
// need this information to compute the spelling of the token, but any
// diagnostics for the expanded token should appear as if they came from
- // InstantiationLoc. Pull this information together into a new SourceLocation
+ // ExpansionLoc. Pull this information together into a new SourceLocation
// that captures all of this.
- if (InstantiateLocStart.isValid()) { // Don't do this for token streams.
- SourceManager &SM = PP.getSourceManager();
- Tok.setLocation(SM.createInstantiationLoc(Tok.getLocation(),
- InstantiateLocStart,
- InstantiateLocEnd,
- Tok.getLength()));
+ if (ExpandLocStart.isValid() && // Don't do this for token streams.
+ // Check that the token's location was not already set properly.
+ SM.isBeforeInSourceLocationOffset(Tok.getLocation(),
+ MacroStartSLocOffset)) {
+ SourceLocation instLoc;
+ if (Tok.is(tok::comment)) {
+ instLoc = SM.createInstantiationLoc(Tok.getLocation(),
+ ExpandLocStart,
+ ExpandLocEnd,
+ Tok.getLength());
+ } else {
+ instLoc = getMacroExpansionLocation(Tok.getLocation());
+ assert(instLoc.isValid() &&
+ "Location for token not coming from definition was not set!");
+ }
+
+ Tok.setLocation(instLoc);
}
// If this is the first token, set the lexical properties of the token to
@@ -384,9 +469,10 @@ void TokenLexer::Lex(Token &Tok) {
bool TokenLexer::PasteTokens(Token &Tok) {
llvm::SmallString<128> Buffer;
const char *ResultTokStrPtr = 0;
+ SourceLocation PasteOpLoc;
do {
// Consume the ## operator.
- SourceLocation PasteOpLoc = Tokens[CurToken].getLocation();
+ PasteOpLoc = Tokens[CurToken].getLocation();
++CurToken;
assert(!isAtEnd() && "No token on the RHS of a paste operator!");
@@ -484,12 +570,12 @@ bool TokenLexer::PasteTokens(Token &Tok) {
// Do not emit the error when preprocessing assembler code.
if (!PP.getLangOptions().AsmPreprocessor) {
- // Explicitly convert the token location to have proper instantiation
+ // Explicitly convert the token location to have proper expansion
// information so that the user knows where it came from.
SourceManager &SM = PP.getSourceManager();
SourceLocation Loc =
- SM.createInstantiationLoc(PasteOpLoc, InstantiateLocStart,
- InstantiateLocEnd, 2);
+ SM.createInstantiationLoc(PasteOpLoc, ExpandLocStart,
+ ExpandLocEnd, 2);
// If we're in microsoft extensions mode, downgrade this from a hard
// error to a warning that defaults to an error. This allows
// disabling it.
@@ -512,12 +598,30 @@ bool TokenLexer::PasteTokens(Token &Tok) {
// Transfer properties of the LHS over the the Result.
Result.setFlagValue(Token::StartOfLine , Tok.isAtStartOfLine());
Result.setFlagValue(Token::LeadingSpace, Tok.hasLeadingSpace());
-
+
// Finally, replace LHS with the result, consume the RHS, and iterate.
++CurToken;
Tok = Result;
} while (!isAtEnd() && Tokens[CurToken].is(tok::hashhash));
+ // The token's current location indicate where the token was lexed from. We
+ // need this information to compute the spelling of the token, but any
+ // diagnostics for the expanded token should appear as if the token was
+ // expanded from the (##) operator. Pull this information together into
+ // a new SourceLocation that captures all of this.
+ if (ExpandLocStart.isValid()) {
+ SourceManager &SM = PP.getSourceManager();
+ SourceLocation pasteLocInst =
+ getMacroExpansionLocation(PasteOpLoc);
+ assert(pasteLocInst.isValid() &&
+ "Expected '##' to come from definition");
+
+ Tok.setLocation(SM.createInstantiationLoc(Tok.getLocation(),
+ pasteLocInst,
+ pasteLocInst,
+ Tok.getLength()));
+ }
+
// Now that we got the result token, it will be subject to expansion. Since
// token pasting re-lexes the result token in raw mode, identifier information
// isn't looked up. As such, if the result is an identifier, look up id info.
@@ -548,7 +652,7 @@ bool TokenLexer::isParsingPreprocessorDirective() const {
/// HandleMicrosoftCommentPaste - In microsoft compatibility mode, /##/ pastes
/// together to form a comment that comments out everything in the current
/// macro, other active macros, and anything left on the current physical
-/// source line of the instantiated buffer. Handle this by returning the
+/// source line of the expanded buffer. Handle this by returning the
/// first token on the next line.
void TokenLexer::HandleMicrosoftCommentPaste(Token &Tok) {
// We 'comment out' the rest of this macro by just ignoring the rest of the
@@ -561,3 +665,23 @@ void TokenLexer::HandleMicrosoftCommentPaste(Token &Tok) {
PP.HandleMicrosoftCommentPaste(Tok);
}
+
+/// \brief If \arg loc is a FileID and points inside the current macro
+/// definition, returns the appropriate source location pointing at the
+/// macro expansion source location entry.
+SourceLocation TokenLexer::getMacroExpansionLocation(SourceLocation loc) const {
+ assert(ExpandLocStart.isValid() && MacroExpansionStart.isValid() &&
+ "Not appropriate for token streams");
+ assert(loc.isValid());
+
+ SourceManager &SM = PP.getSourceManager();
+ unsigned relativeOffset;
+ if (loc.isFileID() &&
+ SM.isInFileID(loc,
+ MacroDefStartInfo.first, MacroDefStartInfo.second,
+ Macro->getDefinitionLength(SM), &relativeOffset)) {
+ return MacroExpansionStart.getFileLocWithOffset(relativeOffset);
+ }
+
+ return SourceLocation();
+}
OpenPOWER on IntegriCloud