diff options
author | dim <dim@FreeBSD.org> | 2015-06-21 14:00:56 +0000 |
---|---|---|
committer | dim <dim@FreeBSD.org> | 2015-06-21 14:00:56 +0000 |
commit | 9dd834653b811ad20382e98a87dff824980c9916 (patch) | |
tree | a764184c2fc9486979b074250b013a0937ee64e5 /lib/Lex | |
parent | bb9760db9b86e93a638ed430d0a14785f7ff9064 (diff) | |
download | FreeBSD-src-9dd834653b811ad20382e98a87dff824980c9916.zip FreeBSD-src-9dd834653b811ad20382e98a87dff824980c9916.tar.gz |
Vendor import of clang trunk r240225:
https://llvm.org/svn/llvm-project/cfe/trunk@240225
Diffstat (limited to 'lib/Lex')
-rw-r--r-- | lib/Lex/HeaderSearch.cpp | 14 | ||||
-rw-r--r-- | lib/Lex/ModuleMap.cpp | 36 | ||||
-rw-r--r-- | lib/Lex/PPDirectives.cpp | 22 | ||||
-rw-r--r-- | lib/Lex/PPLexerChange.cpp | 11 | ||||
-rw-r--r-- | lib/Lex/PPMacroExpansion.cpp | 17 | ||||
-rw-r--r-- | lib/Lex/Pragma.cpp | 55 | ||||
-rw-r--r-- | lib/Lex/TokenConcatenation.cpp | 20 | ||||
-rw-r--r-- | lib/Lex/TokenLexer.cpp | 2 |
8 files changed, 121 insertions, 56 deletions
diff --git a/lib/Lex/HeaderSearch.cpp b/lib/Lex/HeaderSearch.cpp index ad7d344..7a98f54 100644 --- a/lib/Lex/HeaderSearch.cpp +++ b/lib/Lex/HeaderSearch.cpp @@ -158,7 +158,7 @@ std::string HeaderSearch::getModuleFileName(StringRef ModuleName, Module *HeaderSearch::lookupModule(StringRef ModuleName, bool AllowSearch) { // Look in the module map to determine if there is a module by this name. Module *Module = ModMap.findModule(ModuleName); - if (Module || !AllowSearch || !LangOpts.ModulesImplicitMaps) + if (Module || !AllowSearch || !HSOpts->ImplicitModuleMaps) return Module; // Look through the various header search paths to load any available module @@ -1076,7 +1076,7 @@ StringRef HeaderSearch::getUniqueFrameworkName(StringRef Framework) { bool HeaderSearch::hasModuleMap(StringRef FileName, const DirectoryEntry *Root, bool IsSystem) { - if (!HSOpts->ModuleMaps || !LangOpts.ModulesImplicitMaps) + if (!HSOpts->ImplicitModuleMaps) return false; SmallVector<const DirectoryEntry *, 2> FixUpDirectories; @@ -1203,7 +1203,7 @@ HeaderSearch::loadModuleMapFileImpl(const FileEntry *File, bool IsSystem, const FileEntry * HeaderSearch::lookupModuleMapFile(const DirectoryEntry *Dir, bool IsFramework) { - if (!LangOpts.ModulesImplicitMaps) + if (!HSOpts->ImplicitModuleMaps) return nullptr; // For frameworks, the preferred spelling is Modules/module.modulemap, but // module.map at the framework root is also accepted. @@ -1241,7 +1241,7 @@ Module *HeaderSearch::loadFrameworkModule(StringRef Name, // Try to infer a module map from the framework directory. - if (LangOpts.ModulesImplicitMaps) + if (HSOpts->ImplicitModuleMaps) return ModMap.inferFrameworkModule(Name, Dir, IsSystem, /*Parent=*/nullptr); return nullptr; @@ -1282,7 +1282,7 @@ HeaderSearch::loadModuleMapFile(const DirectoryEntry *Dir, bool IsSystem, void HeaderSearch::collectAllModules(SmallVectorImpl<Module *> &Modules) { Modules.clear(); - if (LangOpts.ModulesImplicitMaps) { + if (HSOpts->ImplicitModuleMaps) { // Load module maps for each of the header search directories. for (unsigned Idx = 0, N = SearchDirs.size(); Idx != N; ++Idx) { bool IsSystem = SearchDirs[Idx].isSystemHeaderDirectory(); @@ -1333,7 +1333,7 @@ void HeaderSearch::collectAllModules(SmallVectorImpl<Module *> &Modules) { } void HeaderSearch::loadTopLevelSystemModules() { - if (!LangOpts.ModulesImplicitMaps) + if (!HSOpts->ImplicitModuleMaps) return; // Load module maps for each of the header search directories. @@ -1351,7 +1351,7 @@ void HeaderSearch::loadTopLevelSystemModules() { } void HeaderSearch::loadSubdirectoryModuleMaps(DirectoryLookup &SearchDir) { - assert(LangOpts.ModulesImplicitMaps && + assert(HSOpts->ImplicitModuleMaps && "Should not be loading subdirectory module maps"); if (SearchDir.haveSearchedAllModuleMaps()) diff --git a/lib/Lex/ModuleMap.cpp b/lib/Lex/ModuleMap.cpp index addad59..6c98d01 100644 --- a/lib/Lex/ModuleMap.cpp +++ b/lib/Lex/ModuleMap.cpp @@ -165,7 +165,8 @@ static bool isBuiltinHeader(StringRef FileName) { ModuleMap::HeadersMap::iterator ModuleMap::findKnownHeader(const FileEntry *File) { HeadersMap::iterator Known = Headers.find(File); - if (Known == Headers.end() && File->getDir() == BuiltinIncludeDir && + if (HeaderInfo.getHeaderSearchOpts().ImplicitModuleMaps && + Known == Headers.end() && File->getDir() == BuiltinIncludeDir && isBuiltinHeader(llvm::sys::path::filename(File->getName()))) { HeaderInfo.loadTopLevelSystemModules(); return Headers.find(File); @@ -176,6 +177,9 @@ ModuleMap::findKnownHeader(const FileEntry *File) { ModuleMap::KnownHeader ModuleMap::findHeaderInUmbrellaDirs(const FileEntry *File, SmallVectorImpl<const DirectoryEntry *> &IntermediateDirs) { + if (UmbrellaDirs.empty()) + return KnownHeader(); + const DirectoryEntry *Dir = File->getDir(); assert(Dir && "file in no directory"); @@ -330,41 +334,23 @@ static bool isBetterKnownHeader(const ModuleMap::KnownHeader &New, return false; } -ModuleMap::KnownHeader -ModuleMap::findModuleForHeader(const FileEntry *File, - Module *RequestingModule) { - HeadersMap::iterator Known = findKnownHeader(File); - +ModuleMap::KnownHeader ModuleMap::findModuleForHeader(const FileEntry *File) { auto MakeResult = [&](ModuleMap::KnownHeader R) -> ModuleMap::KnownHeader { if (R.getRole() & ModuleMap::TextualHeader) return ModuleMap::KnownHeader(); return R; }; + HeadersMap::iterator Known = findKnownHeader(File); if (Known != Headers.end()) { ModuleMap::KnownHeader Result; - // Iterate over all modules that 'File' is part of to find the best fit. - for (SmallVectorImpl<KnownHeader>::iterator I = Known->second.begin(), - E = Known->second.end(); - I != E; ++I) { + for (KnownHeader &H : Known->second) { // Cannot use a module if it is unavailable. - if (!I->getModule()->isAvailable()) + if (!H.getModule()->isAvailable()) continue; - - // If 'File' is part of 'RequestingModule', 'RequestingModule' is the - // module we are looking for. - if (I->getModule() == RequestingModule) - return MakeResult(*I); - - // If uses need to be specified explicitly, we are only allowed to return - // modules that are explicitly used by the requesting module. - if (RequestingModule && LangOpts.ModulesDeclUse && - !RequestingModule->directlyUses(I->getModule())) - continue; - - if (!Result || isBetterKnownHeader(*I, Result)) - Result = *I; + if (!Result || isBetterKnownHeader(H, Result)) + Result = H; } return MakeResult(Result); } diff --git a/lib/Lex/PPDirectives.cpp b/lib/Lex/PPDirectives.cpp index ec06e79..33ce799 100644 --- a/lib/Lex/PPDirectives.cpp +++ b/lib/Lex/PPDirectives.cpp @@ -1575,6 +1575,15 @@ void Preprocessor::HandleIncludeDirective(SourceLocation HashLoc, PragmaARCCFCodeAuditedLoc = SourceLocation(); } + // Complain about attempts to #include files in an assume-nonnull pragma. + if (PragmaAssumeNonNullLoc.isValid()) { + Diag(HashLoc, diag::err_pp_include_in_assume_nonnull); + Diag(PragmaAssumeNonNullLoc, diag::note_pragma_entered_here); + + // Immediately leave the pragma. + PragmaAssumeNonNullLoc = SourceLocation(); + } + if (HeaderInfo.HasIncludeAliasMap()) { // Map the filename with the brackets still attached. If the name doesn't // map to anything, fall back on the filename we've already gotten the @@ -1603,7 +1612,7 @@ void Preprocessor::HandleIncludeDirective(SourceLocation HashLoc, FilenameLoc, LangOpts.MSVCCompat ? NormalizedPath.c_str() : Filename, isAngled, LookupFrom, LookupFromFile, CurDir, Callbacks ? &SearchPath : nullptr, Callbacks ? &RelativePath : nullptr, - HeaderInfo.getHeaderSearchOpts().ModuleMaps ? &SuggestedModule : nullptr); + &SuggestedModule); if (!File) { if (Callbacks) { @@ -1620,9 +1629,7 @@ void Preprocessor::HandleIncludeDirective(SourceLocation HashLoc, FilenameLoc, LangOpts.MSVCCompat ? NormalizedPath.c_str() : Filename, isAngled, LookupFrom, LookupFromFile, CurDir, nullptr, nullptr, - HeaderInfo.getHeaderSearchOpts().ModuleMaps ? &SuggestedModule - : nullptr, - /*SkipCache*/ true); + &SuggestedModule, /*SkipCache*/ true); } } } @@ -1638,8 +1645,7 @@ void Preprocessor::HandleIncludeDirective(SourceLocation HashLoc, LookupFrom, LookupFromFile, CurDir, Callbacks ? &SearchPath : nullptr, Callbacks ? &RelativePath : nullptr, - HeaderInfo.getHeaderSearchOpts().ModuleMaps ? &SuggestedModule - : nullptr); + &SuggestedModule); if (File) { SourceRange Range(FilenameTok.getLocation(), CharEnd); Diag(FilenameTok, diag::err_pp_file_not_found_not_fatal) << @@ -2012,8 +2018,8 @@ static bool isConfigurationPattern(Token &MacroName, MacroInfo *MI, } // #define inline - if ((MacroName.is(tok::kw_extern) || MacroName.is(tok::kw_inline) || - MacroName.is(tok::kw_static) || MacroName.is(tok::kw_const)) && + if (MacroName.isOneOf(tok::kw_extern, tok::kw_inline, tok::kw_static, + tok::kw_const) && MI->getNumTokens() == 0) { return true; } diff --git a/lib/Lex/PPLexerChange.cpp b/lib/Lex/PPLexerChange.cpp index e68fb7d..1a35d32 100644 --- a/lib/Lex/PPLexerChange.cpp +++ b/lib/Lex/PPLexerChange.cpp @@ -355,6 +355,17 @@ bool Preprocessor::HandleEndOfFile(Token &Result, bool isEndOfMacro) { PragmaARCCFCodeAuditedLoc = SourceLocation(); } + // Complain about reaching a true EOF within assume_nonnull. + // We don't want to complain about reaching the end of a macro + // instantiation or a _Pragma. + if (PragmaAssumeNonNullLoc.isValid() && + !isEndOfMacro && !(CurLexer && CurLexer->Is_PragmaLexer)) { + Diag(PragmaAssumeNonNullLoc, diag::err_pp_eof_in_assume_nonnull); + + // Recover by leaving immediately. + PragmaAssumeNonNullLoc = SourceLocation(); + } + // If this is a #include'd file, pop it off the include stack and continue // lexing the #includer file. if (!IncludeMacroStack.empty()) { diff --git a/lib/Lex/PPMacroExpansion.cpp b/lib/Lex/PPMacroExpansion.cpp index 03784e2..5e0b283 100644 --- a/lib/Lex/PPMacroExpansion.cpp +++ b/lib/Lex/PPMacroExpansion.cpp @@ -726,10 +726,10 @@ MacroArgs *Preprocessor::ReadFunctionLikeMacroArgs(Token &MacroName, unsigned NumActuals = 0; while (Tok.isNot(tok::r_paren)) { - if (ContainsCodeCompletionTok && (Tok.is(tok::eof) || Tok.is(tok::eod))) + if (ContainsCodeCompletionTok && Tok.isOneOf(tok::eof, tok::eod)) break; - assert((Tok.is(tok::l_paren) || Tok.is(tok::comma)) && + assert(Tok.isOneOf(tok::l_paren, tok::comma) && "only expect argument separators here"); unsigned ArgTokenStart = ArgTokens.size(); @@ -744,7 +744,7 @@ MacroArgs *Preprocessor::ReadFunctionLikeMacroArgs(Token &MacroName, // an argument value in a macro could expand to ',' or '(' or ')'. LexUnexpandedToken(Tok); - if (Tok.is(tok::eof) || Tok.is(tok::eod)) { // "#if f(<eof>" & "#if f(\n" + if (Tok.isOneOf(tok::eof, tok::eod)) { // "#if f(<eof>" & "#if f(\n" if (!ContainsCodeCompletionTok) { Diag(MacroName, diag::err_unterm_macro_invoc); Diag(MI->getDefinitionLoc(), diag::note_macro_here) @@ -1049,13 +1049,17 @@ static bool HasFeature(const Preprocessor &PP, const IdentifierInfo *II) { Feature = Feature.substr(2, Feature.size() - 4); return llvm::StringSwitch<bool>(Feature) - .Case("address_sanitizer", LangOpts.Sanitize.has(SanitizerKind::Address)) + .Case("address_sanitizer", + LangOpts.Sanitize.hasOneOf(SanitizerKind::Address | + SanitizerKind::KernelAddress)) + .Case("assume_nonnull", LangOpts.ObjC1 || LangOpts.GNUMode) .Case("attribute_analyzer_noreturn", true) .Case("attribute_availability", true) .Case("attribute_availability_with_message", true) .Case("attribute_availability_app_extension", true) .Case("attribute_cf_returns_not_retained", true) .Case("attribute_cf_returns_retained", true) + .Case("attribute_cf_returns_on_parameters", true) .Case("attribute_deprecated_with_message", true) .Case("attribute_ext_vector_type", true) .Case("attribute_ns_returns_not_retained", true) @@ -1073,6 +1077,7 @@ static bool HasFeature(const Preprocessor &PP, const IdentifierInfo *II) { .Case("cxx_exceptions", LangOpts.CXXExceptions) .Case("cxx_rtti", LangOpts.RTTI) .Case("enumerator_attributes", true) + .Case("nullability", LangOpts.ObjC1 || LangOpts.GNUMode) .Case("memory_sanitizer", LangOpts.Sanitize.has(SanitizerKind::Memory)) .Case("thread_sanitizer", LangOpts.Sanitize.has(SanitizerKind::Thread)) .Case("dataflow_sanitizer", LangOpts.Sanitize.has(SanitizerKind::DataFlow)) @@ -1190,6 +1195,7 @@ static bool HasFeature(const Preprocessor &PP, const IdentifierInfo *II) { .Case("is_trivially_copyable", LangOpts.CPlusPlus) .Case("is_union", LangOpts.CPlusPlus) .Case("modules", LangOpts.Modules) + .Case("safe_stack", LangOpts.Sanitize.has(SanitizerKind::SafeStack)) .Case("tls", PP.getTargetInfo().isTLSSupported()) .Case("underlying_type", LangOpts.CPlusPlus) .Default(false); @@ -1219,6 +1225,7 @@ static bool HasExtension(const Preprocessor &PP, const IdentifierInfo *II) { // Because we inherit the feature list from HasFeature, this string switch // must be less restrictive than HasFeature's. return llvm::StringSwitch<bool>(Extension) + .Case("nullability", true) // C11 features supported by other languages as extensions. .Case("c_alignas", true) .Case("c_alignof", true) @@ -1746,7 +1753,7 @@ void Preprocessor::ExpandBuiltinMacro(Token &Tok) { Diag(Tok.getLocation(), diag::err_pp_identifier_arg_not_identifier) << Tok.getKind(); // Don't walk past anything that's not a real token. - if (Tok.is(tok::eof) || Tok.is(tok::eod) || Tok.isAnnotation()) + if (Tok.isOneOf(tok::eof, tok::eod) || Tok.isAnnotation()) return; } diff --git a/lib/Lex/Pragma.cpp b/lib/Lex/Pragma.cpp index 26ed674..5eb6655 100644 --- a/lib/Lex/Pragma.cpp +++ b/lib/Lex/Pragma.cpp @@ -1342,6 +1342,60 @@ struct PragmaARCCFCodeAuditedHandler : public PragmaHandler { } }; +/// PragmaAssumeNonNullHandler - +/// \#pragma clang assume_nonnull begin/end +struct PragmaAssumeNonNullHandler : public PragmaHandler { + PragmaAssumeNonNullHandler() : PragmaHandler("assume_nonnull") {} + void HandlePragma(Preprocessor &PP, PragmaIntroducerKind Introducer, + Token &NameTok) override { + SourceLocation Loc = NameTok.getLocation(); + bool IsBegin; + + Token Tok; + + // Lex the 'begin' or 'end'. + PP.LexUnexpandedToken(Tok); + const IdentifierInfo *BeginEnd = Tok.getIdentifierInfo(); + if (BeginEnd && BeginEnd->isStr("begin")) { + IsBegin = true; + } else if (BeginEnd && BeginEnd->isStr("end")) { + IsBegin = false; + } else { + PP.Diag(Tok.getLocation(), diag::err_pp_assume_nonnull_syntax); + return; + } + + // Verify that this is followed by EOD. + PP.LexUnexpandedToken(Tok); + if (Tok.isNot(tok::eod)) + PP.Diag(Tok, diag::ext_pp_extra_tokens_at_eol) << "pragma"; + + // The start location of the active audit. + SourceLocation BeginLoc = PP.getPragmaAssumeNonNullLoc(); + + // The start location we want after processing this. + SourceLocation NewLoc; + + if (IsBegin) { + // Complain about attempts to re-enter an audit. + if (BeginLoc.isValid()) { + PP.Diag(Loc, diag::err_pp_double_begin_of_assume_nonnull); + PP.Diag(BeginLoc, diag::note_pragma_entered_here); + } + NewLoc = Loc; + } else { + // Complain about attempts to leave an audit that doesn't exist. + if (!BeginLoc.isValid()) { + PP.Diag(Loc, diag::err_pp_unmatched_end_of_assume_nonnull); + return; + } + NewLoc = SourceLocation(); + } + + PP.setPragmaAssumeNonNullLoc(NewLoc); + } +}; + /// \brief Handle "\#pragma region [...]" /// /// The syntax is @@ -1393,6 +1447,7 @@ void Preprocessor::RegisterBuiltinPragmas() { AddPragmaHandler("clang", new PragmaDependencyHandler()); AddPragmaHandler("clang", new PragmaDiagnosticHandler("clang")); AddPragmaHandler("clang", new PragmaARCCFCodeAuditedHandler()); + AddPragmaHandler("clang", new PragmaAssumeNonNullHandler()); AddPragmaHandler("STDC", new PragmaSTDC_FENV_ACCESSHandler()); AddPragmaHandler("STDC", new PragmaSTDC_CX_LIMITED_RANGEHandler()); diff --git a/lib/Lex/TokenConcatenation.cpp b/lib/Lex/TokenConcatenation.cpp index 0832749..27b4eab 100644 --- a/lib/Lex/TokenConcatenation.cpp +++ b/lib/Lex/TokenConcatenation.cpp @@ -178,20 +178,20 @@ bool TokenConcatenation::AvoidConcat(const Token &PrevPrevTok, if (ConcatInfo & aci_avoid_equal) { // If the next token is '=' or '==', avoid concatenation. - if (Tok.is(tok::equal) || Tok.is(tok::equalequal)) + if (Tok.isOneOf(tok::equal, tok::equalequal)) return true; ConcatInfo &= ~aci_avoid_equal; } if (Tok.isAnnotation()) { // Modules annotation can show up when generated automatically for includes. - assert((Tok.is(tok::annot_module_include) || - Tok.is(tok::annot_module_begin) || - Tok.is(tok::annot_module_end)) && + assert(Tok.isOneOf(tok::annot_module_include, tok::annot_module_begin, + tok::annot_module_end) && "unexpected annotation in AvoidConcat"); ConcatInfo = 0; } - if (ConcatInfo == 0) return false; + if (ConcatInfo == 0) + return false; // Basic algorithm: we look at the first character of the second token, and // determine whether it, if appended to the first token, would form (or @@ -238,11 +238,11 @@ bool TokenConcatenation::AvoidConcat(const Token &PrevPrevTok, if (Tok.is(tok::numeric_constant)) return GetFirstChar(PP, Tok) != '.'; - if (Tok.getIdentifierInfo() || Tok.is(tok::wide_string_literal) || - Tok.is(tok::utf8_string_literal) || Tok.is(tok::utf16_string_literal) || - Tok.is(tok::utf32_string_literal) || Tok.is(tok::wide_char_constant) || - Tok.is(tok::utf8_char_constant) || Tok.is(tok::utf16_char_constant) || - Tok.is(tok::utf32_char_constant)) + if (Tok.getIdentifierInfo() || + Tok.isOneOf(tok::wide_string_literal, tok::utf8_string_literal, + tok::utf16_string_literal, tok::utf32_string_literal, + tok::wide_char_constant, tok::utf8_char_constant, + tok::utf16_char_constant, tok::utf32_char_constant)) return true; // If this isn't identifier + string, we're done. diff --git a/lib/Lex/TokenLexer.cpp b/lib/Lex/TokenLexer.cpp index 83efbab..e7512fa 100644 --- a/lib/Lex/TokenLexer.cpp +++ b/lib/Lex/TokenLexer.cpp @@ -185,7 +185,7 @@ void TokenLexer::ExpandFunctionArguments() { if (i != 0 && !Tokens[i-1].is(tok::hashhash) && CurTok.hasLeadingSpace()) NextTokGetsSpace = true; - if (CurTok.is(tok::hash) || CurTok.is(tok::hashat)) { + if (CurTok.isOneOf(tok::hash, tok::hashat)) { int ArgNo = Macro->getArgumentNum(Tokens[i+1].getIdentifierInfo()); assert(ArgNo != -1 && "Token following # is not an argument?"); |