diff options
Diffstat (limited to 'unittests/Lex/LexerTest.cpp')
-rw-r--r-- | unittests/Lex/LexerTest.cpp | 280 |
1 files changed, 225 insertions, 55 deletions
diff --git a/unittests/Lex/LexerTest.cpp b/unittests/Lex/LexerTest.cpp index c9b1840..40ce928 100644 --- a/unittests/Lex/LexerTest.cpp +++ b/unittests/Lex/LexerTest.cpp @@ -28,6 +28,20 @@ using namespace clang; namespace { +class VoidModuleLoader : public ModuleLoader { + virtual ModuleLoadResult loadModule(SourceLocation ImportLoc, + ModuleIdPath Path, + Module::NameVisibilityKind Visibility, + bool IsInclusionDirective) { + return ModuleLoadResult(); + } + + virtual void makeModuleVisible(Module *Mod, + Module::NameVisibilityKind Visibility, + SourceLocation ImportLoc, + bool Complain) { } +}; + // The test fixture. class LexerTest : public ::testing::Test { protected: @@ -42,6 +56,48 @@ protected: Target = TargetInfo::CreateTargetInfo(Diags, &*TargetOpts); } + std::vector<Token> CheckLex(StringRef Source, + ArrayRef<tok::TokenKind> ExpectedTokens) { + MemoryBuffer *buf = MemoryBuffer::getMemBuffer(Source); + (void) SourceMgr.createMainFileIDForMemBuffer(buf); + + VoidModuleLoader ModLoader; + HeaderSearch HeaderInfo(new HeaderSearchOptions, SourceMgr, Diags, LangOpts, + Target.getPtr()); + Preprocessor PP(new PreprocessorOptions(), Diags, LangOpts, Target.getPtr(), + SourceMgr, HeaderInfo, ModLoader, /*IILookup =*/ 0, + /*OwnsHeaderSearch =*/ false, + /*DelayInitialization =*/ false); + PP.EnterMainSourceFile(); + + std::vector<Token> toks; + while (1) { + Token tok; + PP.Lex(tok); + if (tok.is(tok::eof)) + break; + toks.push_back(tok); + } + + EXPECT_EQ(ExpectedTokens.size(), toks.size()); + for (unsigned i = 0, e = ExpectedTokens.size(); i != e; ++i) { + EXPECT_EQ(ExpectedTokens[i], toks[i].getKind()); + } + + return toks; + } + + std::string getSourceText(Token Begin, Token End) { + bool Invalid; + StringRef Str = + Lexer::getSourceText(CharSourceRange::getTokenRange(SourceRange( + Begin.getLocation(), End.getLocation())), + SourceMgr, LangOpts, &Invalid); + if (Invalid) + return "<INVALID>"; + return Str; + } + FileSystemOptions FileMgrOpts; FileManager FileMgr; IntrusiveRefCntPtr<DiagnosticIDs> DiagID; @@ -52,65 +108,179 @@ protected: IntrusiveRefCntPtr<TargetInfo> Target; }; -class VoidModuleLoader : public ModuleLoader { - virtual ModuleLoadResult loadModule(SourceLocation ImportLoc, - ModuleIdPath Path, - Module::NameVisibilityKind Visibility, - bool IsInclusionDirective) { - return ModuleLoadResult(); - } +TEST_F(LexerTest, GetSourceTextExpandsToMaximumInMacroArgument) { + std::vector<tok::TokenKind> ExpectedTokens; + ExpectedTokens.push_back(tok::identifier); + ExpectedTokens.push_back(tok::l_paren); + ExpectedTokens.push_back(tok::identifier); + ExpectedTokens.push_back(tok::r_paren); - virtual void makeModuleVisible(Module *Mod, - Module::NameVisibilityKind Visibility, - SourceLocation ImportLoc, - bool Complain) { } -}; + std::vector<Token> toks = CheckLex("#define M(x) x\n" + "M(f(M(i)))", + ExpectedTokens); + + EXPECT_EQ("M(i)", getSourceText(toks[2], toks[2])); +} + +TEST_F(LexerTest, GetSourceTextExpandsToMaximumInMacroArgumentForEndOfMacro) { + std::vector<tok::TokenKind> ExpectedTokens; + ExpectedTokens.push_back(tok::identifier); + ExpectedTokens.push_back(tok::identifier); + + std::vector<Token> toks = CheckLex("#define M(x) x\n" + "M(M(i) c)", + ExpectedTokens); + + EXPECT_EQ("M(i)", getSourceText(toks[0], toks[0])); +} + +TEST_F(LexerTest, GetSourceTextExpandsInMacroArgumentForBeginOfMacro) { + std::vector<tok::TokenKind> ExpectedTokens; + ExpectedTokens.push_back(tok::identifier); + ExpectedTokens.push_back(tok::identifier); + ExpectedTokens.push_back(tok::identifier); + + std::vector<Token> toks = CheckLex("#define M(x) x\n" + "M(c c M(i))", + ExpectedTokens); + + EXPECT_EQ("c M(i)", getSourceText(toks[1], toks[2])); +} + +TEST_F(LexerTest, GetSourceTextExpandsInMacroArgumentForEndOfMacro) { + std::vector<tok::TokenKind> ExpectedTokens; + ExpectedTokens.push_back(tok::identifier); + ExpectedTokens.push_back(tok::identifier); + ExpectedTokens.push_back(tok::identifier); + + std::vector<Token> toks = CheckLex("#define M(x) x\n" + "M(M(i) c c)", + ExpectedTokens); + + EXPECT_EQ("M(i) c", getSourceText(toks[0], toks[1])); +} + +TEST_F(LexerTest, GetSourceTextInSeparateFnMacros) { + std::vector<tok::TokenKind> ExpectedTokens; + ExpectedTokens.push_back(tok::identifier); + ExpectedTokens.push_back(tok::identifier); + ExpectedTokens.push_back(tok::identifier); + ExpectedTokens.push_back(tok::identifier); + + std::vector<Token> toks = CheckLex("#define M(x) x\n" + "M(c M(i)) M(M(i) c)", + ExpectedTokens); + + EXPECT_EQ("<INVALID>", getSourceText(toks[1], toks[2])); +} + +TEST_F(LexerTest, GetSourceTextWorksAcrossTokenPastes) { + std::vector<tok::TokenKind> ExpectedTokens; + ExpectedTokens.push_back(tok::identifier); + ExpectedTokens.push_back(tok::l_paren); + ExpectedTokens.push_back(tok::identifier); + ExpectedTokens.push_back(tok::r_paren); + + std::vector<Token> toks = CheckLex("#define M(x) x\n" + "#define C(x) M(x##c)\n" + "M(f(C(i)))", + ExpectedTokens); + + EXPECT_EQ("C(i)", getSourceText(toks[2], toks[2])); +} + +TEST_F(LexerTest, GetSourceTextExpandsAcrossMultipleMacroCalls) { + std::vector<tok::TokenKind> ExpectedTokens; + ExpectedTokens.push_back(tok::identifier); + ExpectedTokens.push_back(tok::l_paren); + ExpectedTokens.push_back(tok::identifier); + ExpectedTokens.push_back(tok::r_paren); + + std::vector<Token> toks = CheckLex("#define M(x) x\n" + "f(M(M(i)))", + ExpectedTokens); + EXPECT_EQ("M(M(i))", getSourceText(toks[2], toks[2])); +} + +TEST_F(LexerTest, GetSourceTextInMiddleOfMacroArgument) { + std::vector<tok::TokenKind> ExpectedTokens; + ExpectedTokens.push_back(tok::identifier); + ExpectedTokens.push_back(tok::l_paren); + ExpectedTokens.push_back(tok::identifier); + ExpectedTokens.push_back(tok::r_paren); + + std::vector<Token> toks = CheckLex("#define M(x) x\n" + "M(f(i))", + ExpectedTokens); + EXPECT_EQ("i", getSourceText(toks[2], toks[2])); +} + +TEST_F(LexerTest, GetSourceTextExpandsAroundDifferentMacroCalls) { + std::vector<tok::TokenKind> ExpectedTokens; + ExpectedTokens.push_back(tok::identifier); + ExpectedTokens.push_back(tok::l_paren); + ExpectedTokens.push_back(tok::identifier); + ExpectedTokens.push_back(tok::r_paren); + + std::vector<Token> toks = CheckLex("#define M(x) x\n" + "#define C(x) x\n" + "f(C(M(i)))", + ExpectedTokens); + EXPECT_EQ("C(M(i))", getSourceText(toks[2], toks[2])); +} + +TEST_F(LexerTest, GetSourceTextOnlyExpandsIfFirstTokenInMacro) { + std::vector<tok::TokenKind> ExpectedTokens; + ExpectedTokens.push_back(tok::identifier); + ExpectedTokens.push_back(tok::l_paren); + ExpectedTokens.push_back(tok::identifier); + ExpectedTokens.push_back(tok::identifier); + ExpectedTokens.push_back(tok::r_paren); + + std::vector<Token> toks = CheckLex("#define M(x) x\n" + "#define C(x) c x\n" + "f(C(M(i)))", + ExpectedTokens); + EXPECT_EQ("M(i)", getSourceText(toks[3], toks[3])); +} + +TEST_F(LexerTest, GetSourceTextExpandsRecursively) { + std::vector<tok::TokenKind> ExpectedTokens; + ExpectedTokens.push_back(tok::identifier); + ExpectedTokens.push_back(tok::identifier); + ExpectedTokens.push_back(tok::l_paren); + ExpectedTokens.push_back(tok::identifier); + ExpectedTokens.push_back(tok::r_paren); + + std::vector<Token> toks = CheckLex("#define M(x) x\n" + "#define C(x) c M(x)\n" + "C(f(M(i)))", + ExpectedTokens); + EXPECT_EQ("M(i)", getSourceText(toks[3], toks[3])); +} TEST_F(LexerTest, LexAPI) { - const char *source = - "#define M(x) [x]\n" - "#define N(x) x\n" - "#define INN(x) x\n" - "#define NOF1 INN(val)\n" - "#define NOF2 val\n" - "M(foo) N([bar])\n" - "N(INN(val)) N(NOF1) N(NOF2) N(val)"; - - MemoryBuffer *buf = MemoryBuffer::getMemBuffer(source); - (void)SourceMgr.createMainFileIDForMemBuffer(buf); - - VoidModuleLoader ModLoader; - HeaderSearch HeaderInfo(new HeaderSearchOptions, FileMgr, Diags, LangOpts, - Target.getPtr()); - Preprocessor PP(new PreprocessorOptions(), Diags, LangOpts, Target.getPtr(), - SourceMgr, HeaderInfo, ModLoader, - /*IILookup =*/ 0, - /*OwnsHeaderSearch =*/false, - /*DelayInitialization =*/ false); - PP.EnterMainSourceFile(); - - std::vector<Token> toks; - while (1) { - Token tok; - PP.Lex(tok); - if (tok.is(tok::eof)) - break; - toks.push_back(tok); - } + std::vector<tok::TokenKind> ExpectedTokens; + ExpectedTokens.push_back(tok::l_square); + ExpectedTokens.push_back(tok::identifier); + ExpectedTokens.push_back(tok::r_square); + ExpectedTokens.push_back(tok::l_square); + ExpectedTokens.push_back(tok::identifier); + ExpectedTokens.push_back(tok::r_square); + ExpectedTokens.push_back(tok::identifier); + ExpectedTokens.push_back(tok::identifier); + ExpectedTokens.push_back(tok::identifier); + ExpectedTokens.push_back(tok::identifier); + + std::vector<Token> toks = CheckLex("#define M(x) [x]\n" + "#define N(x) x\n" + "#define INN(x) x\n" + "#define NOF1 INN(val)\n" + "#define NOF2 val\n" + "M(foo) N([bar])\n" + "N(INN(val)) N(NOF1) N(NOF2) N(val)", + ExpectedTokens); - // Make sure we got the tokens that we expected. - ASSERT_EQ(10U, toks.size()); - ASSERT_EQ(tok::l_square, toks[0].getKind()); - ASSERT_EQ(tok::identifier, toks[1].getKind()); - ASSERT_EQ(tok::r_square, toks[2].getKind()); - ASSERT_EQ(tok::l_square, toks[3].getKind()); - ASSERT_EQ(tok::identifier, toks[4].getKind()); - ASSERT_EQ(tok::r_square, toks[5].getKind()); - ASSERT_EQ(tok::identifier, toks[6].getKind()); - ASSERT_EQ(tok::identifier, toks[7].getKind()); - ASSERT_EQ(tok::identifier, toks[8].getKind()); - ASSERT_EQ(tok::identifier, toks[9].getKind()); - SourceLocation lsqrLoc = toks[0].getLocation(); SourceLocation idLoc = toks[1].getLocation(); SourceLocation rsqrLoc = toks[2].getLocation(); |