summaryrefslogtreecommitdiffstats
path: root/contrib/llvm/tools/clang/lib/CodeGen/CGClass.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'contrib/llvm/tools/clang/lib/CodeGen/CGClass.cpp')
-rw-r--r--contrib/llvm/tools/clang/lib/CodeGen/CGClass.cpp378
1 files changed, 295 insertions, 83 deletions
diff --git a/contrib/llvm/tools/clang/lib/CodeGen/CGClass.cpp b/contrib/llvm/tools/clang/lib/CodeGen/CGClass.cpp
index 2e566de..7ed891f 100644
--- a/contrib/llvm/tools/clang/lib/CodeGen/CGClass.cpp
+++ b/contrib/llvm/tools/clang/lib/CodeGen/CGClass.cpp
@@ -26,6 +26,7 @@
#include "clang/Frontend/CodeGenOptions.h"
#include "llvm/IR/Intrinsics.h"
#include "llvm/IR/Metadata.h"
+#include "llvm/Transforms/Utils/SanitizerStats.h"
using namespace clang;
using namespace CodeGen;
@@ -94,7 +95,7 @@ CodeGenModule::getDynamicOffsetAlignment(CharUnits actualBaseAlign,
// unless we someday add some sort of attribute to change the
// assumed alignment of 'this'. So our goal here is pretty much
// just to allow the user to explicitly say that a pointer is
- // under-aligned and then safely access its fields and v-tables.
+ // under-aligned and then safely access its fields and vtables.
if (actualBaseAlign >= expectedBaseAlign) {
return expectedTargetAlign;
}
@@ -745,7 +746,7 @@ static void EmitMemberInitializer(CodeGenFunction &CGF,
ArrayRef<VarDecl *> ArrayIndexes;
if (MemberInit->getNumArrayIndices())
- ArrayIndexes = MemberInit->getArrayIndexes();
+ ArrayIndexes = MemberInit->getArrayIndices();
CGF.EmitInitializerForField(Field, LHS, MemberInit->getInit(), ArrayIndexes);
}
@@ -986,7 +987,7 @@ namespace {
CodeGenFunction &CGF;
SanitizerSet OldSanOpts;
};
-}
+} // end anonymous namespace
namespace {
class FieldMemcpyizer {
@@ -1071,7 +1072,6 @@ namespace {
const CXXRecordDecl *ClassDecl;
private:
-
void emitMemcpyIR(Address DestPtr, Address SrcPtr, CharUnits Size) {
llvm::PointerType *DPT = DestPtr.getType();
llvm::Type *DBP =
@@ -1087,13 +1087,12 @@ namespace {
}
void addInitialField(FieldDecl *F) {
- FirstField = F;
- LastField = F;
- FirstFieldOffset = RecLayout.getFieldOffset(F->getFieldIndex());
- LastFieldOffset = FirstFieldOffset;
- LastAddedFieldIndex = F->getFieldIndex();
- return;
- }
+ FirstField = F;
+ LastField = F;
+ FirstFieldOffset = RecLayout.getFieldOffset(F->getFieldIndex());
+ LastFieldOffset = FirstFieldOffset;
+ LastAddedFieldIndex = F->getFieldIndex();
+ }
void addNextField(FieldDecl *F) {
// For the most part, the following invariant will hold:
@@ -1127,7 +1126,6 @@ namespace {
class ConstructorMemcpyizer : public FieldMemcpyizer {
private:
-
/// Get source argument for copy constructor. Returns null if not a copy
/// constructor.
static const VarDecl *getTrivialCopySource(CodeGenFunction &CGF,
@@ -1232,7 +1230,6 @@ namespace {
class AssignmentMemcpyizer : public FieldMemcpyizer {
private:
-
// Returns the memcpyable field copied by the given statement, if one
// exists. Otherwise returns null.
FieldDecl *getMemcpyableField(Stmt *S) {
@@ -1306,7 +1303,6 @@ namespace {
SmallVector<Stmt*, 16> AggregatedStmts;
public:
-
AssignmentMemcpyizer(CodeGenFunction &CGF, const CXXMethodDecl *AD,
FunctionArgList &Args)
: FieldMemcpyizer(CGF, AD->getParent(), Args[Args.size() - 1]),
@@ -1607,6 +1603,7 @@ void CodeGenFunction::emitImplicitAssignmentOperatorBody(FunctionArgList &Args)
LexicalScope Scope(*this, RootCS->getSourceRange());
+ incrementProfileCounter(RootCS);
AssignmentMemcpyizer AM(*this, AssignOp, Args);
for (auto *I : RootCS->body())
AM.emitAssignment(I);
@@ -1628,6 +1625,7 @@ namespace {
struct CallDtorDeleteConditional final : EHScopeStack::Cleanup {
llvm::Value *ShouldDeleteCondition;
+
public:
CallDtorDeleteConditional(llvm::Value *ShouldDeleteCondition)
: ShouldDeleteCondition(ShouldDeleteCondition) {
@@ -1917,7 +1915,7 @@ void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD,
/// \param zeroInitialize true if each element should be
/// zero-initialized before it is constructed
void CodeGenFunction::EmitCXXAggrConstructorCall(
- const CXXConstructorDecl *ctor, const ConstantArrayType *arrayType,
+ const CXXConstructorDecl *ctor, const ArrayType *arrayType,
Address arrayBegin, const CXXConstructExpr *E, bool zeroInitialize) {
QualType elementType;
llvm::Value *numElements =
@@ -2050,6 +2048,62 @@ void CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D,
bool ForVirtualBase,
bool Delegating, Address This,
const CXXConstructExpr *E) {
+ CallArgList Args;
+
+ // Push the this ptr.
+ Args.add(RValue::get(This.getPointer()), D->getThisType(getContext()));
+
+ // If this is a trivial constructor, emit a memcpy now before we lose
+ // the alignment information on the argument.
+ // FIXME: It would be better to preserve alignment information into CallArg.
+ if (isMemcpyEquivalentSpecialMember(D)) {
+ assert(E->getNumArgs() == 1 && "unexpected argcount for trivial ctor");
+
+ const Expr *Arg = E->getArg(0);
+ QualType SrcTy = Arg->getType();
+ Address Src = EmitLValue(Arg).getAddress();
+ QualType DestTy = getContext().getTypeDeclType(D->getParent());
+ EmitAggregateCopyCtor(This, Src, DestTy, SrcTy);
+ return;
+ }
+
+ // Add the rest of the user-supplied arguments.
+ const FunctionProtoType *FPT = D->getType()->castAs<FunctionProtoType>();
+ EmitCallArgs(Args, FPT, E->arguments(), E->getConstructor());
+
+ EmitCXXConstructorCall(D, Type, ForVirtualBase, Delegating, This, Args);
+}
+
+static bool canEmitDelegateCallArgs(CodeGenFunction &CGF,
+ const CXXConstructorDecl *Ctor,
+ CXXCtorType Type, CallArgList &Args) {
+ // We can't forward a variadic call.
+ if (Ctor->isVariadic())
+ return false;
+
+ if (CGF.getTarget().getCXXABI().areArgsDestroyedLeftToRightInCallee()) {
+ // If the parameters are callee-cleanup, it's not safe to forward.
+ for (auto *P : Ctor->parameters())
+ if (P->getType().isDestructedType())
+ return false;
+
+ // Likewise if they're inalloca.
+ const CGFunctionInfo &Info =
+ CGF.CGM.getTypes().arrangeCXXConstructorCall(Args, Ctor, Type, 0);
+ if (Info.usesInAlloca())
+ return false;
+ }
+
+ // Anything else should be OK.
+ return true;
+}
+
+void CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D,
+ CXXCtorType Type,
+ bool ForVirtualBase,
+ bool Delegating,
+ Address This,
+ CallArgList &Args) {
const CXXRecordDecl *ClassDecl = D->getParent();
// C++11 [class.mfct.non-static]p2:
@@ -2060,7 +2114,7 @@ void CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D,
This.getPointer(), getContext().getRecordType(ClassDecl));
if (D->isTrivial() && D->isDefaultConstructor()) {
- assert(E->getNumArgs() == 0 && "trivial default ctor with args");
+ assert(Args.size() == 1 && "trivial default ctor with args");
return;
}
@@ -2068,24 +2122,24 @@ void CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D,
// union copy constructor, we must emit a memcpy, because the AST does not
// model that copy.
if (isMemcpyEquivalentSpecialMember(D)) {
- assert(E->getNumArgs() == 1 && "unexpected argcount for trivial ctor");
+ assert(Args.size() == 2 && "unexpected argcount for trivial ctor");
- const Expr *Arg = E->getArg(0);
- QualType SrcTy = Arg->getType();
- Address Src = EmitLValue(Arg).getAddress();
+ QualType SrcTy = D->getParamDecl(0)->getType().getNonReferenceType();
+ Address Src(Args[1].RV.getScalarVal(), getNaturalTypeAlignment(SrcTy));
QualType DestTy = getContext().getTypeDeclType(ClassDecl);
EmitAggregateCopyCtor(This, Src, DestTy, SrcTy);
return;
}
- CallArgList Args;
-
- // Push the this ptr.
- Args.add(RValue::get(This.getPointer()), D->getThisType(getContext()));
-
- // Add the rest of the user-supplied arguments.
- const FunctionProtoType *FPT = D->getType()->castAs<FunctionProtoType>();
- EmitCallArgs(Args, FPT, E->arguments(), E->getConstructor());
+ // Check whether we can actually emit the constructor before trying to do so.
+ if (auto Inherited = D->getInheritedConstructor()) {
+ if (getTypes().inheritingCtorHasParams(Inherited, Type) &&
+ !canEmitDelegateCallArgs(*this, D, Type, Args)) {
+ EmitInlinedInheritingCXXConstructorCall(D, Type, ForVirtualBase,
+ Delegating, Args);
+ return;
+ }
+ }
// Insert any ABI-specific implicit constructor arguments.
unsigned ExtraArgs = CGM.getCXXABI().addImplicitConstructorArgs(
@@ -2115,6 +2169,95 @@ void CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D,
EmitVTableAssumptionLoads(ClassDecl, This);
}
+void CodeGenFunction::EmitInheritedCXXConstructorCall(
+ const CXXConstructorDecl *D, bool ForVirtualBase, Address This,
+ bool InheritedFromVBase, const CXXInheritedCtorInitExpr *E) {
+ CallArgList Args;
+ CallArg ThisArg(RValue::get(This.getPointer()), D->getThisType(getContext()),
+ /*NeedsCopy=*/false);
+
+ // Forward the parameters.
+ if (InheritedFromVBase &&
+ CGM.getTarget().getCXXABI().hasConstructorVariants()) {
+ // Nothing to do; this construction is not responsible for constructing
+ // the base class containing the inherited constructor.
+ // FIXME: Can we just pass undef's for the remaining arguments if we don't
+ // have constructor variants?
+ Args.push_back(ThisArg);
+ } else if (!CXXInheritedCtorInitExprArgs.empty()) {
+ // The inheriting constructor was inlined; just inject its arguments.
+ assert(CXXInheritedCtorInitExprArgs.size() >= D->getNumParams() &&
+ "wrong number of parameters for inherited constructor call");
+ Args = CXXInheritedCtorInitExprArgs;
+ Args[0] = ThisArg;
+ } else {
+ // The inheriting constructor was not inlined. Emit delegating arguments.
+ Args.push_back(ThisArg);
+ const auto *OuterCtor = cast<CXXConstructorDecl>(CurCodeDecl);
+ assert(OuterCtor->getNumParams() == D->getNumParams());
+ assert(!OuterCtor->isVariadic() && "should have been inlined");
+
+ for (const auto *Param : OuterCtor->parameters()) {
+ assert(getContext().hasSameUnqualifiedType(
+ OuterCtor->getParamDecl(Param->getFunctionScopeIndex())->getType(),
+ Param->getType()));
+ EmitDelegateCallArg(Args, Param, E->getLocation());
+
+ // Forward __attribute__(pass_object_size).
+ if (Param->hasAttr<PassObjectSizeAttr>()) {
+ auto *POSParam = SizeArguments[Param];
+ assert(POSParam && "missing pass_object_size value for forwarding");
+ EmitDelegateCallArg(Args, POSParam, E->getLocation());
+ }
+ }
+ }
+
+ EmitCXXConstructorCall(D, Ctor_Base, ForVirtualBase, /*Delegating*/false,
+ This, Args);
+}
+
+void CodeGenFunction::EmitInlinedInheritingCXXConstructorCall(
+ const CXXConstructorDecl *Ctor, CXXCtorType CtorType, bool ForVirtualBase,
+ bool Delegating, CallArgList &Args) {
+ InlinedInheritingConstructorScope Scope(*this, GlobalDecl(Ctor, CtorType));
+
+ // Save the arguments to be passed to the inherited constructor.
+ CXXInheritedCtorInitExprArgs = Args;
+
+ FunctionArgList Params;
+ QualType RetType = BuildFunctionArgList(CurGD, Params);
+ FnRetTy = RetType;
+
+ // Insert any ABI-specific implicit constructor arguments.
+ CGM.getCXXABI().addImplicitConstructorArgs(*this, Ctor, CtorType,
+ ForVirtualBase, Delegating, Args);
+
+ // Emit a simplified prolog. We only need to emit the implicit params.
+ assert(Args.size() >= Params.size() && "too few arguments for call");
+ for (unsigned I = 0, N = Args.size(); I != N; ++I) {
+ if (I < Params.size() && isa<ImplicitParamDecl>(Params[I])) {
+ const RValue &RV = Args[I].RV;
+ assert(!RV.isComplex() && "complex indirect params not supported");
+ ParamValue Val = RV.isScalar()
+ ? ParamValue::forDirect(RV.getScalarVal())
+ : ParamValue::forIndirect(RV.getAggregateAddress());
+ EmitParmDecl(*Params[I], Val, I + 1);
+ }
+ }
+
+ // Create a return value slot if the ABI implementation wants one.
+ // FIXME: This is dumb, we should ask the ABI not to try to set the return
+ // value instead.
+ if (!RetType->isVoidType())
+ ReturnValue = CreateIRTemp(RetType, "retval.inhctor");
+
+ CGM.getCXXABI().EmitInstanceFunctionProlog(*this);
+ CXXThisValue = CXXABIThisValue;
+
+ // Directly emit the constructor initializers.
+ EmitCtorPrologue(Ctor, CtorType, Params);
+}
+
void CodeGenFunction::EmitVTableAssumptionLoad(const VPtr &Vptr, Address This) {
llvm::Value *VTableGlobal =
CGM.getCXXABI().getVTableAddressPoint(Vptr.Base, Vptr.VTableClass);
@@ -2147,19 +2290,6 @@ void
CodeGenFunction::EmitSynthesizedCXXCopyCtorCall(const CXXConstructorDecl *D,
Address This, Address Src,
const CXXConstructExpr *E) {
- if (isMemcpyEquivalentSpecialMember(D)) {
- assert(E->getNumArgs() == 1 && "unexpected argcount for trivial ctor");
- assert(D->isCopyOrMoveConstructor() &&
- "trivial 1-arg ctor not a copy/move ctor");
- EmitAggregateCopyCtor(This, Src,
- getContext().getTypeDeclType(D->getParent()),
- (*E->arg_begin())->getType());
- return;
- }
- llvm::Value *Callee = CGM.getAddrOfCXXStructor(D, StructorType::Complete);
- assert(D->isInstance() &&
- "Trying to emit a member call expr on a static method!");
-
const FunctionProtoType *FPT = D->getType()->castAs<FunctionProtoType>();
CallArgList Args;
@@ -2177,8 +2307,7 @@ CodeGenFunction::EmitSynthesizedCXXCopyCtorCall(const CXXConstructorDecl *D,
EmitCallArgs(Args, FPT, drop_begin(E->arguments(), 1), E->getConstructor(),
/*ParamsToSkip*/ 1);
- EmitCall(CGM.getTypes().arrangeCXXMethodCall(Args, FPT, RequiredArgs::All),
- Callee, ReturnValueSlot(), Args, D);
+ EmitCXXConstructorCall(D, Ctor_Complete, false, false, This, Args);
}
void
@@ -2192,21 +2321,17 @@ CodeGenFunction::EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor,
assert(I != E && "no parameters to constructor");
// this
- DelegateArgs.add(RValue::get(LoadCXXThis()), (*I)->getType());
+ Address This = LoadCXXThisAddress();
+ DelegateArgs.add(RValue::get(This.getPointer()), (*I)->getType());
++I;
- // vtt
- if (llvm::Value *VTT = GetVTTParameter(GlobalDecl(Ctor, CtorType),
- /*ForVirtualBase=*/false,
- /*Delegating=*/true)) {
- QualType VoidPP = getContext().getPointerType(getContext().VoidPtrTy);
- DelegateArgs.add(RValue::get(VTT), VoidPP);
-
- if (CGM.getCXXABI().NeedsVTTParameter(CurGD)) {
- assert(I != E && "cannot skip vtt parameter, already done with args");
- assert((*I)->getType() == VoidPP && "skipping parameter not of vtt type");
- ++I;
- }
+ // FIXME: The location of the VTT parameter in the parameter list is
+ // specific to the Itanium ABI and shouldn't be hardcoded here.
+ if (CGM.getCXXABI().NeedsVTTParameter(CurGD)) {
+ assert(I != E && "cannot skip vtt parameter, already done with args");
+ assert((*I)->getType()->isPointerType() &&
+ "skipping parameter not of vtt type");
+ ++I;
}
// Explicit arguments.
@@ -2216,11 +2341,8 @@ CodeGenFunction::EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor,
EmitDelegateCallArg(DelegateArgs, param, Loc);
}
- llvm::Value *Callee =
- CGM.getAddrOfCXXStructor(Ctor, getFromCtorType(CtorType));
- EmitCall(CGM.getTypes()
- .arrangeCXXStructorDeclaration(Ctor, getFromCtorType(CtorType)),
- Callee, ReturnValueSlot(), DelegateArgs, Ctor);
+ EmitCXXConstructorCall(Ctor, CtorType, /*ForVirtualBase=*/false,
+ /*Delegating=*/true, This, DelegateArgs);
}
namespace {
@@ -2289,7 +2411,7 @@ namespace {
/*Delegating=*/false, Addr);
}
};
-}
+} // end anonymous namespace
void CodeGenFunction::PushDestructorCleanup(const CXXDestructorDecl *D,
Address Addr) {
@@ -2487,15 +2609,35 @@ LeastDerivedClassWithSameLayout(const CXXRecordDecl *RD) {
RD->bases_begin()->getType()->getAsCXXRecordDecl());
}
-void CodeGenFunction::EmitVTablePtrCheckForCall(const CXXMethodDecl *MD,
+void CodeGenFunction::EmitTypeMetadataCodeForVCall(const CXXRecordDecl *RD,
+ llvm::Value *VTable,
+ SourceLocation Loc) {
+ if (CGM.getCodeGenOpts().WholeProgramVTables &&
+ CGM.HasHiddenLTOVisibility(RD)) {
+ llvm::Metadata *MD =
+ CGM.CreateMetadataIdentifierForType(QualType(RD->getTypeForDecl(), 0));
+ llvm::Value *TypeId =
+ llvm::MetadataAsValue::get(CGM.getLLVMContext(), MD);
+
+ llvm::Value *CastedVTable = Builder.CreateBitCast(VTable, Int8PtrTy);
+ llvm::Value *TypeTest =
+ Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::type_test),
+ {CastedVTable, TypeId});
+ Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::assume), TypeTest);
+ }
+
+ if (SanOpts.has(SanitizerKind::CFIVCall))
+ EmitVTablePtrCheckForCall(RD, VTable, CodeGenFunction::CFITCK_VCall, Loc);
+}
+
+void CodeGenFunction::EmitVTablePtrCheckForCall(const CXXRecordDecl *RD,
llvm::Value *VTable,
CFITypeCheckKind TCK,
SourceLocation Loc) {
- const CXXRecordDecl *ClassDecl = MD->getParent();
if (!SanOpts.has(SanitizerKind::CFICastStrict))
- ClassDecl = LeastDerivedClassWithSameLayout(ClassDecl);
+ RD = LeastDerivedClassWithSameLayout(RD);
- EmitVTablePtrCheck(ClassDecl, VTable, TCK, Loc);
+ EmitVTablePtrCheck(RD, VTable, TCK, Loc);
}
void CodeGenFunction::EmitVTablePtrCheckForCast(QualType T,
@@ -2547,26 +2689,41 @@ void CodeGenFunction::EmitVTablePtrCheck(const CXXRecordDecl *RD,
llvm::Value *VTable,
CFITypeCheckKind TCK,
SourceLocation Loc) {
- if (CGM.IsCFIBlacklistedRecord(RD))
+ if (!CGM.getCodeGenOpts().SanitizeCfiCrossDso &&
+ !CGM.HasHiddenLTOVisibility(RD))
+ return;
+
+ std::string TypeName = RD->getQualifiedNameAsString();
+ if (getContext().getSanitizerBlacklist().isBlacklistedType(TypeName))
return;
SanitizerScope SanScope(this);
+ llvm::SanitizerStatKind SSK;
+ switch (TCK) {
+ case CFITCK_VCall:
+ SSK = llvm::SanStat_CFI_VCall;
+ break;
+ case CFITCK_NVCall:
+ SSK = llvm::SanStat_CFI_NVCall;
+ break;
+ case CFITCK_DerivedCast:
+ SSK = llvm::SanStat_CFI_DerivedCast;
+ break;
+ case CFITCK_UnrelatedCast:
+ SSK = llvm::SanStat_CFI_UnrelatedCast;
+ break;
+ case CFITCK_ICall:
+ llvm_unreachable("not expecting CFITCK_ICall");
+ }
+ EmitSanitizerStatReport(SSK);
llvm::Metadata *MD =
CGM.CreateMetadataIdentifierForType(QualType(RD->getTypeForDecl(), 0));
- llvm::Value *BitSetName = llvm::MetadataAsValue::get(getLLVMContext(), MD);
+ llvm::Value *TypeId = llvm::MetadataAsValue::get(getLLVMContext(), MD);
llvm::Value *CastedVTable = Builder.CreateBitCast(VTable, Int8PtrTy);
- llvm::Value *BitSetTest =
- Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::bitset_test),
- {CastedVTable, BitSetName});
-
- if (CGM.getCodeGenOpts().SanitizeCfiCrossDso) {
- if (auto TypeId = CGM.CreateCfiIdForTypeMetadata(MD)) {
- EmitCfiSlowPathCheck(BitSetTest, TypeId, CastedVTable);
- return;
- }
- }
+ llvm::Value *TypeTest = Builder.CreateCall(
+ CGM.getIntrinsic(llvm::Intrinsic::type_test), {CastedVTable, TypeId});
SanitizerMask M;
switch (TCK) {
@@ -2582,15 +2739,70 @@ void CodeGenFunction::EmitVTablePtrCheck(const CXXRecordDecl *RD,
case CFITCK_UnrelatedCast:
M = SanitizerKind::CFIUnrelatedCast;
break;
+ case CFITCK_ICall:
+ llvm_unreachable("not expecting CFITCK_ICall");
}
llvm::Constant *StaticData[] = {
+ llvm::ConstantInt::get(Int8Ty, TCK),
EmitCheckSourceLocation(Loc),
EmitCheckTypeDescriptor(QualType(RD->getTypeForDecl(), 0)),
- llvm::ConstantInt::get(Int8Ty, TCK),
};
- EmitCheck(std::make_pair(BitSetTest, M), "cfi_bad_type", StaticData,
- CastedVTable);
+
+ auto CrossDsoTypeId = CGM.CreateCrossDsoCfiTypeId(MD);
+ if (CGM.getCodeGenOpts().SanitizeCfiCrossDso && CrossDsoTypeId) {
+ EmitCfiSlowPathCheck(M, TypeTest, CrossDsoTypeId, CastedVTable, StaticData);
+ return;
+ }
+
+ if (CGM.getCodeGenOpts().SanitizeTrap.has(M)) {
+ EmitTrapCheck(TypeTest);
+ return;
+ }
+
+ llvm::Value *AllVtables = llvm::MetadataAsValue::get(
+ CGM.getLLVMContext(),
+ llvm::MDString::get(CGM.getLLVMContext(), "all-vtables"));
+ llvm::Value *ValidVtable = Builder.CreateCall(
+ CGM.getIntrinsic(llvm::Intrinsic::type_test), {CastedVTable, AllVtables});
+ EmitCheck(std::make_pair(TypeTest, M), "cfi_check_fail", StaticData,
+ {CastedVTable, ValidVtable});
+}
+
+bool CodeGenFunction::ShouldEmitVTableTypeCheckedLoad(const CXXRecordDecl *RD) {
+ if (!CGM.getCodeGenOpts().WholeProgramVTables ||
+ !SanOpts.has(SanitizerKind::CFIVCall) ||
+ !CGM.getCodeGenOpts().SanitizeTrap.has(SanitizerKind::CFIVCall) ||
+ !CGM.HasHiddenLTOVisibility(RD))
+ return false;
+
+ std::string TypeName = RD->getQualifiedNameAsString();
+ return !getContext().getSanitizerBlacklist().isBlacklistedType(TypeName);
+}
+
+llvm::Value *CodeGenFunction::EmitVTableTypeCheckedLoad(
+ const CXXRecordDecl *RD, llvm::Value *VTable, uint64_t VTableByteOffset) {
+ SanitizerScope SanScope(this);
+
+ EmitSanitizerStatReport(llvm::SanStat_CFI_VCall);
+
+ llvm::Metadata *MD =
+ CGM.CreateMetadataIdentifierForType(QualType(RD->getTypeForDecl(), 0));
+ llvm::Value *TypeId = llvm::MetadataAsValue::get(CGM.getLLVMContext(), MD);
+
+ llvm::Value *CastedVTable = Builder.CreateBitCast(VTable, Int8PtrTy);
+ llvm::Value *CheckedLoad = Builder.CreateCall(
+ CGM.getIntrinsic(llvm::Intrinsic::type_checked_load),
+ {CastedVTable, llvm::ConstantInt::get(Int32Ty, VTableByteOffset),
+ TypeId});
+ llvm::Value *CheckResult = Builder.CreateExtractValue(CheckedLoad, 1);
+
+ EmitCheck(std::make_pair(CheckResult, SanitizerKind::CFIVCall),
+ "cfi_check_fail", nullptr, nullptr);
+
+ return Builder.CreateBitCast(
+ Builder.CreateExtractValue(CheckedLoad, 0),
+ cast<llvm::PointerType>(VTable->getType())->getElementType());
}
// FIXME: Ideally Expr::IgnoreParenNoopCasts should do this, but it doesn't do
@@ -2731,7 +2943,7 @@ void CodeGenFunction::EmitLambdaBlockInvokeBody() {
CallArgs.add(RValue::get(ThisPtr.getPointer()), ThisType);
// Add the rest of the parameters.
- for (auto param : BD->params())
+ for (auto param : BD->parameters())
EmitDelegateCallArg(CallArgs, param, param->getLocStart());
assert(!Lambda->isGenericLambda() &&
@@ -2761,7 +2973,7 @@ void CodeGenFunction::EmitLambdaDelegatingInvokeBody(const CXXMethodDecl *MD) {
CallArgs.add(RValue::get(ThisPtr), ThisType);
// Add the rest of the parameters.
- for (auto Param : MD->params())
+ for (auto Param : MD->parameters())
EmitDelegateCallArg(CallArgs, Param, Param->getLocStart());
const CXXMethodDecl *CallOp = Lambda->getLambdaCallOperator();
OpenPOWER on IntegriCloud