diff options
Diffstat (limited to 'lib/CodeGen/CGClass.cpp')
-rw-r--r-- | lib/CodeGen/CGClass.cpp | 784 |
1 files changed, 553 insertions, 231 deletions
diff --git a/lib/CodeGen/CGClass.cpp b/lib/CodeGen/CGClass.cpp index c49f182..2e566de 100644 --- a/lib/CodeGen/CGClass.cpp +++ b/lib/CodeGen/CGClass.cpp @@ -1,4 +1,4 @@ -//===--- CGClass.cpp - Emit LLVM Code for C++ classes ---------------------===// +//===--- CGClass.cpp - Emit LLVM Code for C++ classes -----------*- C++ -*-===// // // The LLVM Compiler Infrastructure // @@ -25,10 +25,124 @@ #include "clang/CodeGen/CGFunctionInfo.h" #include "clang/Frontend/CodeGenOptions.h" #include "llvm/IR/Intrinsics.h" +#include "llvm/IR/Metadata.h" using namespace clang; using namespace CodeGen; +/// Return the best known alignment for an unknown pointer to a +/// particular class. +CharUnits CodeGenModule::getClassPointerAlignment(const CXXRecordDecl *RD) { + if (!RD->isCompleteDefinition()) + return CharUnits::One(); // Hopefully won't be used anywhere. + + auto &layout = getContext().getASTRecordLayout(RD); + + // If the class is final, then we know that the pointer points to an + // object of that type and can use the full alignment. + if (RD->hasAttr<FinalAttr>()) { + return layout.getAlignment(); + + // Otherwise, we have to assume it could be a subclass. + } else { + return layout.getNonVirtualAlignment(); + } +} + +/// Return the best known alignment for a pointer to a virtual base, +/// given the alignment of a pointer to the derived class. +CharUnits CodeGenModule::getVBaseAlignment(CharUnits actualDerivedAlign, + const CXXRecordDecl *derivedClass, + const CXXRecordDecl *vbaseClass) { + // The basic idea here is that an underaligned derived pointer might + // indicate an underaligned base pointer. + + assert(vbaseClass->isCompleteDefinition()); + auto &baseLayout = getContext().getASTRecordLayout(vbaseClass); + CharUnits expectedVBaseAlign = baseLayout.getNonVirtualAlignment(); + + return getDynamicOffsetAlignment(actualDerivedAlign, derivedClass, + expectedVBaseAlign); +} + +CharUnits +CodeGenModule::getDynamicOffsetAlignment(CharUnits actualBaseAlign, + const CXXRecordDecl *baseDecl, + CharUnits expectedTargetAlign) { + // If the base is an incomplete type (which is, alas, possible with + // member pointers), be pessimistic. + if (!baseDecl->isCompleteDefinition()) + return std::min(actualBaseAlign, expectedTargetAlign); + + auto &baseLayout = getContext().getASTRecordLayout(baseDecl); + CharUnits expectedBaseAlign = baseLayout.getNonVirtualAlignment(); + + // If the class is properly aligned, assume the target offset is, too. + // + // This actually isn't necessarily the right thing to do --- if the + // class is a complete object, but it's only properly aligned for a + // base subobject, then the alignments of things relative to it are + // probably off as well. (Note that this requires the alignment of + // the target to be greater than the NV alignment of the derived + // class.) + // + // However, our approach to this kind of under-alignment can only + // ever be best effort; after all, we're never going to propagate + // alignments through variables or parameters. Note, in particular, + // that constructing a polymorphic type in an address that's less + // than pointer-aligned will generally trap in the constructor, + // unless we someday add some sort of attribute to change the + // assumed alignment of 'this'. So our goal here is pretty much + // just to allow the user to explicitly say that a pointer is + // under-aligned and then safely access its fields and v-tables. + if (actualBaseAlign >= expectedBaseAlign) { + return expectedTargetAlign; + } + + // Otherwise, we might be offset by an arbitrary multiple of the + // actual alignment. The correct adjustment is to take the min of + // the two alignments. + return std::min(actualBaseAlign, expectedTargetAlign); +} + +Address CodeGenFunction::LoadCXXThisAddress() { + assert(CurFuncDecl && "loading 'this' without a func declaration?"); + assert(isa<CXXMethodDecl>(CurFuncDecl)); + + // Lazily compute CXXThisAlignment. + if (CXXThisAlignment.isZero()) { + // Just use the best known alignment for the parent. + // TODO: if we're currently emitting a complete-object ctor/dtor, + // we can always use the complete-object alignment. + auto RD = cast<CXXMethodDecl>(CurFuncDecl)->getParent(); + CXXThisAlignment = CGM.getClassPointerAlignment(RD); + } + + return Address(LoadCXXThis(), CXXThisAlignment); +} + +/// Emit the address of a field using a member data pointer. +/// +/// \param E Only used for emergency diagnostics +Address +CodeGenFunction::EmitCXXMemberDataPointerAddress(const Expr *E, Address base, + llvm::Value *memberPtr, + const MemberPointerType *memberPtrType, + AlignmentSource *alignSource) { + // Ask the ABI to compute the actual address. + llvm::Value *ptr = + CGM.getCXXABI().EmitMemberDataPointerAddress(*this, E, base, + memberPtr, memberPtrType); + + QualType memberType = memberPtrType->getPointeeType(); + CharUnits memberAlign = getNaturalTypeAlignment(memberType, alignSource); + memberAlign = + CGM.getDynamicOffsetAlignment(base.getAlignment(), + memberPtrType->getClass()->getAsCXXRecordDecl(), + memberAlign); + return Address(ptr, memberAlign); +} + CharUnits CodeGenModule::computeNonVirtualBaseClassOffset( const CXXRecordDecl *DerivedClass, CastExpr::path_const_iterator Start, CastExpr::path_const_iterator End) { @@ -78,15 +192,13 @@ CodeGenModule::GetNonVirtualBaseClassOffset(const CXXRecordDecl *ClassDecl, /// when the type is known to be complete (e.g. in complete destructors). /// /// The object pointed to by 'This' is assumed to be non-null. -llvm::Value * -CodeGenFunction::GetAddressOfDirectBaseInCompleteClass(llvm::Value *This, +Address +CodeGenFunction::GetAddressOfDirectBaseInCompleteClass(Address This, const CXXRecordDecl *Derived, const CXXRecordDecl *Base, bool BaseIsVirtual) { // 'this' must be a pointer (in some address space) to Derived. - assert(This->getType()->isPointerTy() && - cast<llvm::PointerType>(This->getType())->getElementType() - == ConvertType(Derived)); + assert(This.getElementType() == ConvertType(Derived)); // Compute the offset of the virtual base. CharUnits Offset; @@ -98,20 +210,22 @@ CodeGenFunction::GetAddressOfDirectBaseInCompleteClass(llvm::Value *This, // Shift and cast down to the base type. // TODO: for complete types, this should be possible with a GEP. - llvm::Value *V = This; - if (Offset.isPositive()) { - V = Builder.CreateBitCast(V, Int8PtrTy); - V = Builder.CreateConstInBoundsGEP1_64(V, Offset.getQuantity()); + Address V = This; + if (!Offset.isZero()) { + V = Builder.CreateElementBitCast(V, Int8Ty); + V = Builder.CreateConstInBoundsByteGEP(V, Offset); } - V = Builder.CreateBitCast(V, ConvertType(Base)->getPointerTo()); + V = Builder.CreateElementBitCast(V, ConvertType(Base)); return V; } -static llvm::Value * -ApplyNonVirtualAndVirtualOffset(CodeGenFunction &CGF, llvm::Value *ptr, +static Address +ApplyNonVirtualAndVirtualOffset(CodeGenFunction &CGF, Address addr, CharUnits nonVirtualOffset, - llvm::Value *virtualOffset) { + llvm::Value *virtualOffset, + const CXXRecordDecl *derivedClass, + const CXXRecordDecl *nearestVBase) { // Assert that we have something to do. assert(!nonVirtualOffset.isZero() || virtualOffset != nullptr); @@ -128,13 +242,27 @@ ApplyNonVirtualAndVirtualOffset(CodeGenFunction &CGF, llvm::Value *ptr, } // Apply the base offset. + llvm::Value *ptr = addr.getPointer(); ptr = CGF.Builder.CreateBitCast(ptr, CGF.Int8PtrTy); ptr = CGF.Builder.CreateInBoundsGEP(ptr, baseOffset, "add.ptr"); - return ptr; + + // If we have a virtual component, the alignment of the result will + // be relative only to the known alignment of that vbase. + CharUnits alignment; + if (virtualOffset) { + assert(nearestVBase && "virtual offset without vbase?"); + alignment = CGF.CGM.getVBaseAlignment(addr.getAlignment(), + derivedClass, nearestVBase); + } else { + alignment = addr.getAlignment(); + } + alignment = alignment.alignmentAtOffset(nonVirtualOffset); + + return Address(ptr, alignment); } -llvm::Value *CodeGenFunction::GetAddressOfBaseClass( - llvm::Value *Value, const CXXRecordDecl *Derived, +Address CodeGenFunction::GetAddressOfBaseClass( + Address Value, const CXXRecordDecl *Derived, CastExpr::path_const_iterator PathBegin, CastExpr::path_const_iterator PathEnd, bool NullCheckValue, SourceLocation Loc) { @@ -174,14 +302,14 @@ llvm::Value *CodeGenFunction::GetAddressOfBaseClass( ConvertType((PathEnd[-1])->getType())->getPointerTo(); QualType DerivedTy = getContext().getRecordType(Derived); - CharUnits DerivedAlign = getContext().getTypeAlignInChars(DerivedTy); + CharUnits DerivedAlign = CGM.getClassPointerAlignment(Derived); // If the static offset is zero and we don't have a virtual step, // just do a bitcast; null checks are unnecessary. if (NonVirtualOffset.isZero() && !VBase) { if (sanitizePerformTypeCheck()) { - EmitTypeCheck(TCK_Upcast, Loc, Value, DerivedTy, DerivedAlign, - !NullCheckValue); + EmitTypeCheck(TCK_Upcast, Loc, Value.getPointer(), + DerivedTy, DerivedAlign, !NullCheckValue); } return Builder.CreateBitCast(Value, BasePtrTy); } @@ -196,14 +324,14 @@ llvm::Value *CodeGenFunction::GetAddressOfBaseClass( llvm::BasicBlock *notNullBB = createBasicBlock("cast.notnull"); endBB = createBasicBlock("cast.end"); - llvm::Value *isNull = Builder.CreateIsNull(Value); + llvm::Value *isNull = Builder.CreateIsNull(Value.getPointer()); Builder.CreateCondBr(isNull, endBB, notNullBB); EmitBlock(notNullBB); } if (sanitizePerformTypeCheck()) { - EmitTypeCheck(VBase ? TCK_UpcastToVirtualBase : TCK_Upcast, Loc, Value, - DerivedTy, DerivedAlign, true); + EmitTypeCheck(VBase ? TCK_UpcastToVirtualBase : TCK_Upcast, Loc, + Value.getPointer(), DerivedTy, DerivedAlign, true); } // Compute the virtual offset. @@ -214,9 +342,8 @@ llvm::Value *CodeGenFunction::GetAddressOfBaseClass( } // Apply both offsets. - Value = ApplyNonVirtualAndVirtualOffset(*this, Value, - NonVirtualOffset, - VirtualOffset); + Value = ApplyNonVirtualAndVirtualOffset(*this, Value, NonVirtualOffset, + VirtualOffset, Derived, VBase); // Cast to the destination type. Value = Builder.CreateBitCast(Value, BasePtrTy); @@ -228,16 +355,16 @@ llvm::Value *CodeGenFunction::GetAddressOfBaseClass( EmitBlock(endBB); llvm::PHINode *PHI = Builder.CreatePHI(BasePtrTy, 2, "cast.result"); - PHI->addIncoming(Value, notNullBB); + PHI->addIncoming(Value.getPointer(), notNullBB); PHI->addIncoming(llvm::Constant::getNullValue(BasePtrTy), origBB); - Value = PHI; + Value = Address(PHI, Value.getAlignment()); } return Value; } -llvm::Value * -CodeGenFunction::GetAddressOfDerivedClass(llvm::Value *Value, +Address +CodeGenFunction::GetAddressOfDerivedClass(Address BaseAddr, const CXXRecordDecl *Derived, CastExpr::path_const_iterator PathBegin, CastExpr::path_const_iterator PathEnd, @@ -253,7 +380,7 @@ CodeGenFunction::GetAddressOfDerivedClass(llvm::Value *Value, if (!NonVirtualOffset) { // No offset, we can just cast back. - return Builder.CreateBitCast(Value, DerivedPtrTy); + return Builder.CreateBitCast(BaseAddr, DerivedPtrTy); } llvm::BasicBlock *CastNull = nullptr; @@ -265,19 +392,20 @@ CodeGenFunction::GetAddressOfDerivedClass(llvm::Value *Value, CastNotNull = createBasicBlock("cast.notnull"); CastEnd = createBasicBlock("cast.end"); - llvm::Value *IsNull = Builder.CreateIsNull(Value); + llvm::Value *IsNull = Builder.CreateIsNull(BaseAddr.getPointer()); Builder.CreateCondBr(IsNull, CastNull, CastNotNull); EmitBlock(CastNotNull); } // Apply the offset. - Value = Builder.CreateBitCast(Value, Int8PtrTy); + llvm::Value *Value = Builder.CreateBitCast(BaseAddr.getPointer(), Int8PtrTy); Value = Builder.CreateGEP(Value, Builder.CreateNeg(NonVirtualOffset), "sub.ptr"); // Just cast. Value = Builder.CreateBitCast(Value, DerivedPtrTy); + // Produce a PHI if we had a null-check. if (NullCheckValue) { Builder.CreateBr(CastEnd); EmitBlock(CastNull); @@ -286,12 +414,11 @@ CodeGenFunction::GetAddressOfDerivedClass(llvm::Value *Value, llvm::PHINode *PHI = Builder.CreatePHI(Value->getType(), 2); PHI->addIncoming(Value, CastNotNull); - PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), - CastNull); + PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), CastNull); Value = PHI; } - return Value; + return Address(Value, CGM.getClassPointerAlignment(Derived)); } llvm::Value *CodeGenFunction::GetVTTParameter(GlobalDecl GD, @@ -345,7 +472,7 @@ llvm::Value *CodeGenFunction::GetVTTParameter(GlobalDecl GD, namespace { /// Call the destructor for a direct base class. - struct CallBaseDtor : EHScopeStack::Cleanup { + struct CallBaseDtor final : EHScopeStack::Cleanup { const CXXRecordDecl *BaseClass; bool BaseIsVirtual; CallBaseDtor(const CXXRecordDecl *Base, bool BaseIsVirtual) @@ -356,8 +483,8 @@ namespace { cast<CXXMethodDecl>(CGF.CurCodeDecl)->getParent(); const CXXDestructorDecl *D = BaseClass->getDestructor(); - llvm::Value *Addr = - CGF.GetAddressOfDirectBaseInCompleteClass(CGF.LoadCXXThis(), + Address Addr = + CGF.GetAddressOfDirectBaseInCompleteClass(CGF.LoadCXXThisAddress(), DerivedClass, BaseClass, BaseIsVirtual); CGF.EmitCXXDestructorCall(D, Dtor_Base, BaseIsVirtual, @@ -381,7 +508,7 @@ namespace { // external code might potentially access the vtable. void VisitCXXThisExpr(const CXXThisExpr *E) { UsesThis = true; } }; -} +} // end anonymous namespace static bool BaseInitializerUsesThis(ASTContext &C, const Expr *Init) { DynamicThisUseChecker Checker(C); @@ -396,7 +523,7 @@ static void EmitBaseInitializer(CodeGenFunction &CGF, assert(BaseInit->isBaseInitializer() && "Must have base initializer!"); - llvm::Value *ThisPtr = CGF.LoadCXXThis(); + Address ThisPtr = CGF.LoadCXXThisAddress(); const Type *BaseType = BaseInit->getBaseClass(); CXXRecordDecl *BaseClassDecl = @@ -416,13 +543,12 @@ static void EmitBaseInitializer(CodeGenFunction &CGF, // We can pretend to be a complete class because it only matters for // virtual bases, and we only do virtual bases for complete ctors. - llvm::Value *V = + Address V = CGF.GetAddressOfDirectBaseInCompleteClass(ThisPtr, ClassDecl, BaseClassDecl, isBaseVirtual); - CharUnits Alignment = CGF.getContext().getTypeAlignInChars(BaseType); AggValueSlot AggSlot = - AggValueSlot::forAddr(V, Alignment, Qualifiers(), + AggValueSlot::forAddr(V, Qualifiers(), AggValueSlot::IsDestructed, AggValueSlot::DoesNotNeedGCBarriers, AggValueSlot::IsNotAliased); @@ -438,17 +564,17 @@ static void EmitBaseInitializer(CodeGenFunction &CGF, static void EmitAggMemberInitializer(CodeGenFunction &CGF, LValue LHS, Expr *Init, - llvm::Value *ArrayIndexVar, + Address ArrayIndexVar, QualType T, ArrayRef<VarDecl *> ArrayIndexes, unsigned Index) { if (Index == ArrayIndexes.size()) { LValue LV = LHS; - if (ArrayIndexVar) { + if (ArrayIndexVar.isValid()) { // If we have an array index variable, load it and use it as an offset. // Then, increment the value. - llvm::Value *Dest = LHS.getAddress(); + llvm::Value *Dest = LHS.getPointer(); llvm::Value *ArrayIndex = CGF.Builder.CreateLoad(ArrayIndexVar); Dest = CGF.Builder.CreateInBoundsGEP(Dest, ArrayIndex, "destaddress"); llvm::Value *Next = llvm::ConstantInt::get(ArrayIndex->getType(), 1); @@ -456,9 +582,9 @@ static void EmitAggMemberInitializer(CodeGenFunction &CGF, CGF.Builder.CreateStore(Next, ArrayIndexVar); // Update the LValue. - LV.setAddress(Dest); - CharUnits Align = CGF.getContext().getTypeAlignInChars(T); - LV.setAlignment(std::min(Align, LV.getAlignment())); + CharUnits EltSize = CGF.getContext().getTypeSizeInChars(T); + CharUnits Align = LV.getAlignment().alignmentOfArrayElement(EltSize); + LV.setAddress(Address(Dest, Align)); } switch (CGF.getEvaluationKind(T)) { @@ -485,14 +611,11 @@ static void EmitAggMemberInitializer(CodeGenFunction &CGF, const ConstantArrayType *Array = CGF.getContext().getAsConstantArrayType(T); assert(Array && "Array initialization without the array type?"); - llvm::Value *IndexVar - = CGF.GetAddrOfLocalVar(ArrayIndexes[Index]); - assert(IndexVar && "Array index variable not loaded"); + Address IndexVar = CGF.GetAddrOfLocalVar(ArrayIndexes[Index]); // Initialize this index variable to zero. llvm::Value* Zero - = llvm::Constant::getNullValue( - CGF.ConvertType(CGF.getContext().getSizeType())); + = llvm::Constant::getNullValue(IndexVar.getElementType()); CGF.Builder.CreateStore(Zero, IndexVar); // Start the loop with a block that tests the condition. @@ -626,9 +749,8 @@ static void EmitMemberInitializer(CodeGenFunction &CGF, CGF.EmitInitializerForField(Field, LHS, MemberInit->getInit(), ArrayIndexes); } -void CodeGenFunction::EmitInitializerForField( - FieldDecl *Field, LValue LHS, Expr *Init, - ArrayRef<VarDecl *> ArrayIndexes) { +void CodeGenFunction::EmitInitializerForField(FieldDecl *Field, LValue LHS, + Expr *Init, ArrayRef<VarDecl *> ArrayIndexes) { QualType FieldType = Field->getType(); switch (getEvaluationKind(FieldType)) { case TEK_Scalar: @@ -643,26 +765,23 @@ void CodeGenFunction::EmitInitializerForField( EmitComplexExprIntoLValue(Init, LHS, /*isInit*/ true); break; case TEK_Aggregate: { - llvm::Value *ArrayIndexVar = nullptr; + Address ArrayIndexVar = Address::invalid(); if (ArrayIndexes.size()) { - llvm::Type *SizeTy = ConvertType(getContext().getSizeType()); - // The LHS is a pointer to the first object we'll be constructing, as // a flat array. QualType BaseElementTy = getContext().getBaseElementType(FieldType); llvm::Type *BasePtr = ConvertType(BaseElementTy); BasePtr = llvm::PointerType::getUnqual(BasePtr); - llvm::Value *BaseAddrPtr = Builder.CreateBitCast(LHS.getAddress(), - BasePtr); + Address BaseAddrPtr = Builder.CreateBitCast(LHS.getAddress(), BasePtr); LHS = MakeAddrLValue(BaseAddrPtr, BaseElementTy); // Create an array index that will be used to walk over all of the // objects we're constructing. - ArrayIndexVar = CreateTempAlloca(SizeTy, "object.index"); - llvm::Value *Zero = llvm::Constant::getNullValue(SizeTy); + ArrayIndexVar = CreateMemTemp(getContext().getSizeType(), "object.index"); + llvm::Value *Zero = + llvm::Constant::getNullValue(ArrayIndexVar.getElementType()); Builder.CreateStore(Zero, ArrayIndexVar); - // Emit the block variables for the array indices, if any. for (unsigned I = 0, N = ArrayIndexes.size(); I != N; ++I) EmitAutoVarDecl(*ArrayIndexes[I]); @@ -811,7 +930,7 @@ void CodeGenFunction::EmitConstructorBody(FunctionArgList &Args) { return; } - const FunctionDecl *Definition = 0; + const FunctionDecl *Definition = nullptr; Stmt *Body = Ctor->getBody(Definition); assert(Definition == Ctor && "emitting wrong constructor body"); @@ -868,7 +987,7 @@ namespace { SanitizerSet OldSanOpts; }; } - + namespace { class FieldMemcpyizer { public: @@ -930,19 +1049,16 @@ namespace { CharUnits MemcpySize = getMemcpySize(FirstByteOffset); QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl); - llvm::Value *ThisPtr = CGF.LoadCXXThis(); - LValue DestLV = CGF.MakeNaturalAlignAddrLValue(ThisPtr, RecordTy); + Address ThisPtr = CGF.LoadCXXThisAddress(); + LValue DestLV = CGF.MakeAddrLValue(ThisPtr, RecordTy); LValue Dest = CGF.EmitLValueForFieldInitialization(DestLV, FirstField); llvm::Value *SrcPtr = CGF.Builder.CreateLoad(CGF.GetAddrOfLocalVar(SrcRec)); LValue SrcLV = CGF.MakeNaturalAlignAddrLValue(SrcPtr, RecordTy); LValue Src = CGF.EmitLValueForFieldInitialization(SrcLV, FirstField); - CharUnits Offset = CGF.getContext().toCharUnitsFromBits(FirstByteOffset); - CharUnits Alignment = DestLV.getAlignment().alignmentAtOffset(Offset); - - emitMemcpyIR(Dest.isBitField() ? Dest.getBitFieldAddr() : Dest.getAddress(), - Src.isBitField() ? Src.getBitFieldAddr() : Src.getAddress(), - MemcpySize, Alignment); + emitMemcpyIR(Dest.isBitField() ? Dest.getBitFieldAddress() : Dest.getAddress(), + Src.isBitField() ? Src.getBitFieldAddress() : Src.getAddress(), + MemcpySize); reset(); } @@ -956,20 +1072,18 @@ namespace { private: - void emitMemcpyIR(llvm::Value *DestPtr, llvm::Value *SrcPtr, - CharUnits Size, CharUnits Alignment) { - llvm::PointerType *DPT = cast<llvm::PointerType>(DestPtr->getType()); + void emitMemcpyIR(Address DestPtr, Address SrcPtr, CharUnits Size) { + llvm::PointerType *DPT = DestPtr.getType(); llvm::Type *DBP = llvm::Type::getInt8PtrTy(CGF.getLLVMContext(), DPT->getAddressSpace()); DestPtr = CGF.Builder.CreateBitCast(DestPtr, DBP); - llvm::PointerType *SPT = cast<llvm::PointerType>(SrcPtr->getType()); + llvm::PointerType *SPT = SrcPtr.getType(); llvm::Type *SBP = llvm::Type::getInt8PtrTy(CGF.getLLVMContext(), SPT->getAddressSpace()); SrcPtr = CGF.Builder.CreateBitCast(SrcPtr, SBP); - CGF.Builder.CreateMemCpy(DestPtr, SrcPtr, Size.getQuantity(), - Alignment.getQuantity()); + CGF.Builder.CreateMemCpy(DestPtr, SrcPtr, Size.getQuantity()); } void addInitialField(FieldDecl *F) { @@ -1089,9 +1203,9 @@ namespace { } void pushEHDestructors() { - llvm::Value *ThisPtr = CGF.LoadCXXThis(); + Address ThisPtr = CGF.LoadCXXThisAddress(); QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl); - LValue LHS = CGF.MakeNaturalAlignAddrLValue(ThisPtr, RecordTy); + LValue LHS = CGF.MakeAddrLValue(ThisPtr, RecordTy); for (unsigned i = 0; i < AggregatedInits.size(); ++i) { CXXCtorInitializer *MemberInit = AggregatedInits[i]; @@ -1228,7 +1342,13 @@ namespace { emitAggregatedStmts(); } }; +} // end anonymous namespace +static bool isInitializerOfDynamicClass(const CXXCtorInitializer *BaseInit) { + const Type *BaseType = BaseInit->getBaseClass(); + const auto *BaseClassDecl = + cast<CXXRecordDecl>(BaseType->getAs<RecordType>()->getDecl()); + return BaseClassDecl->isDynamicClass(); } /// EmitCtorPrologue - This routine generates necessary code to initialize @@ -1254,8 +1374,13 @@ void CodeGenFunction::EmitCtorPrologue(const CXXConstructorDecl *CD, assert(BaseCtorContinueBB); } + llvm::Value *const OldThis = CXXThisValue; // Virtual base initializers first. for (; B != E && (*B)->isBaseInitializer() && (*B)->isBaseVirtual(); B++) { + if (CGM.getCodeGenOpts().StrictVTablePointers && + CGM.getCodeGenOpts().OptimizationLevel > 0 && + isInitializerOfDynamicClass(*B)) + CXXThisValue = Builder.CreateInvariantGroupBarrier(LoadCXXThis()); EmitBaseInitializer(*this, ClassDecl, *B, CtorType); } @@ -1268,13 +1393,20 @@ void CodeGenFunction::EmitCtorPrologue(const CXXConstructorDecl *CD, // Then, non-virtual base initializers. for (; B != E && (*B)->isBaseInitializer(); B++) { assert(!(*B)->isBaseVirtual()); + + if (CGM.getCodeGenOpts().StrictVTablePointers && + CGM.getCodeGenOpts().OptimizationLevel > 0 && + isInitializerOfDynamicClass(*B)) + CXXThisValue = Builder.CreateInvariantGroupBarrier(LoadCXXThis()); EmitBaseInitializer(*this, ClassDecl, *B, CtorType); } + CXXThisValue = OldThis; + InitializeVTablePointers(ClassDecl); // And finally, initialize class members. - FieldConstructionScope FCS(*this, CXXThisValue); + FieldConstructionScope FCS(*this, LoadCXXThisAddress()); ConstructorMemcpyizer CM(*this, CD, Args); for (; B != E; B++) { CXXCtorInitializer *Member = (*B); @@ -1334,7 +1466,7 @@ HasTrivialDestructorBody(ASTContext &Context, static bool FieldHasTrivialDestructorBody(ASTContext &Context, - const FieldDecl *Field) + const FieldDecl *Field) { QualType FieldBaseElementType = Context.getBaseElementType(Field->getType()); @@ -1353,39 +1485,23 @@ FieldHasTrivialDestructorBody(ASTContext &Context, /// CanSkipVTablePointerInitialization - Check whether we need to initialize /// any vtable pointers before calling this destructor. -static bool CanSkipVTablePointerInitialization(ASTContext &Context, +static bool CanSkipVTablePointerInitialization(CodeGenFunction &CGF, const CXXDestructorDecl *Dtor) { + const CXXRecordDecl *ClassDecl = Dtor->getParent(); + if (!ClassDecl->isDynamicClass()) + return true; + if (!Dtor->hasTrivialBody()) return false; // Check the fields. - const CXXRecordDecl *ClassDecl = Dtor->getParent(); for (const auto *Field : ClassDecl->fields()) - if (!FieldHasTrivialDestructorBody(Context, Field)) + if (!FieldHasTrivialDestructorBody(CGF.getContext(), Field)) return false; return true; } -// Generates function call for handling object poisoning, passing in -// references to 'this' and its size as arguments. -static void EmitDtorSanitizerCallback(CodeGenFunction &CGF, - const CXXDestructorDecl *Dtor) { - const ASTRecordLayout &Layout = - CGF.getContext().getASTRecordLayout(Dtor->getParent()); - - llvm::Value *Args[] = { - CGF.Builder.CreateBitCast(CGF.LoadCXXThis(), CGF.VoidPtrTy), - llvm::ConstantInt::get(CGF.SizeTy, Layout.getSize().getQuantity())}; - llvm::Type *ArgTypes[] = {CGF.VoidPtrTy, CGF.SizeTy}; - - llvm::FunctionType *FnType = - llvm::FunctionType::get(CGF.VoidTy, ArgTypes, false); - llvm::Value *Fn = - CGF.CGM.CreateRuntimeFunction(FnType, "__sanitizer_dtor_callback"); - CGF.EmitNounwindRuntimeCall(Fn, Args); -} - /// EmitDestructorBody - Emits the body of the current destructor. void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) { const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CurGD.getDecl()); @@ -1402,7 +1518,7 @@ void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) { if (DtorType == Dtor_Deleting) { EnterDtorCleanups(Dtor, Dtor_Deleting); EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false, - /*Delegating=*/false, LoadCXXThis()); + /*Delegating=*/false, LoadCXXThisAddress()); PopCleanupBlock(); return; } @@ -1437,7 +1553,7 @@ void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) { if (!isTryBody) { EmitCXXDestructorCall(Dtor, Dtor_Base, /*ForVirtualBase=*/false, - /*Delegating=*/false, LoadCXXThis()); + /*Delegating=*/false, LoadCXXThisAddress()); break; } // Fallthrough: act like we're in the base variant. @@ -1449,8 +1565,14 @@ void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) { EnterDtorCleanups(Dtor, Dtor_Base); // Initialize the vtable pointers before entering the body. - if (!CanSkipVTablePointerInitialization(getContext(), Dtor)) - InitializeVTablePointers(Dtor->getParent()); + if (!CanSkipVTablePointerInitialization(*this, Dtor)) { + // Insert the llvm.invariant.group.barrier intrinsic before initializing + // the vptrs to cancel any previous assumptions we might have made. + if (CGM.getCodeGenOpts().StrictVTablePointers && + CGM.getCodeGenOpts().OptimizationLevel > 0) + CXXThisValue = Builder.CreateInvariantGroupBarrier(LoadCXXThis()); + InitializeVTablePointers(Dtor->getParent()); + } if (isTryBody) EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock()); @@ -1464,6 +1586,7 @@ void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) { // the caller's body. if (getLangOpts().AppleKext) CurFn->addFnAttr(llvm::Attribute::AlwaysInline); + break; } @@ -1473,10 +1596,6 @@ void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) { // Exit the try if applicable. if (isTryBody) ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true); - - // Insert memory-poisoning instrumentation. - if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor) - EmitDtorSanitizerCallback(*this, Dtor); } void CodeGenFunction::emitImplicitAssignmentOperatorBody(FunctionArgList &Args) { @@ -1496,7 +1615,7 @@ void CodeGenFunction::emitImplicitAssignmentOperatorBody(FunctionArgList &Args) namespace { /// Call the operator delete associated with the current destructor. - struct CallDtorDelete : EHScopeStack::Cleanup { + struct CallDtorDelete final : EHScopeStack::Cleanup { CallDtorDelete() {} void Emit(CodeGenFunction &CGF, Flags flags) override { @@ -1507,11 +1626,11 @@ namespace { } }; - struct CallDtorDeleteConditional : EHScopeStack::Cleanup { + struct CallDtorDeleteConditional final : EHScopeStack::Cleanup { llvm::Value *ShouldDeleteCondition; public: CallDtorDeleteConditional(llvm::Value *ShouldDeleteCondition) - : ShouldDeleteCondition(ShouldDeleteCondition) { + : ShouldDeleteCondition(ShouldDeleteCondition) { assert(ShouldDeleteCondition != nullptr); } @@ -1533,7 +1652,7 @@ namespace { } }; - class DestroyField : public EHScopeStack::Cleanup { + class DestroyField final : public EHScopeStack::Cleanup { const FieldDecl *field; CodeGenFunction::Destroyer *destroyer; bool useEHCleanupForArray; @@ -1541,12 +1660,12 @@ namespace { public: DestroyField(const FieldDecl *field, CodeGenFunction::Destroyer *destroyer, bool useEHCleanupForArray) - : field(field), destroyer(destroyer), - useEHCleanupForArray(useEHCleanupForArray) {} + : field(field), destroyer(destroyer), + useEHCleanupForArray(useEHCleanupForArray) {} void Emit(CodeGenFunction &CGF, Flags flags) override { // Find the address of the field. - llvm::Value *thisValue = CGF.LoadCXXThis(); + Address thisValue = CGF.LoadCXXThisAddress(); QualType RecordTy = CGF.getContext().getTagDeclType(field->getParent()); LValue ThisLV = CGF.MakeAddrLValue(thisValue, RecordTy); LValue LV = CGF.EmitLValueForField(ThisLV, field); @@ -1556,7 +1675,133 @@ namespace { flags.isForNormalCleanup() && useEHCleanupForArray); } }; -} + + static void EmitSanitizerDtorCallback(CodeGenFunction &CGF, llvm::Value *Ptr, + CharUnits::QuantityType PoisonSize) { + // Pass in void pointer and size of region as arguments to runtime + // function + llvm::Value *Args[] = {CGF.Builder.CreateBitCast(Ptr, CGF.VoidPtrTy), + llvm::ConstantInt::get(CGF.SizeTy, PoisonSize)}; + + llvm::Type *ArgTypes[] = {CGF.VoidPtrTy, CGF.SizeTy}; + + llvm::FunctionType *FnType = + llvm::FunctionType::get(CGF.VoidTy, ArgTypes, false); + llvm::Value *Fn = + CGF.CGM.CreateRuntimeFunction(FnType, "__sanitizer_dtor_callback"); + CGF.EmitNounwindRuntimeCall(Fn, Args); + } + + class SanitizeDtorMembers final : public EHScopeStack::Cleanup { + const CXXDestructorDecl *Dtor; + + public: + SanitizeDtorMembers(const CXXDestructorDecl *Dtor) : Dtor(Dtor) {} + + // Generate function call for handling object poisoning. + // Disables tail call elimination, to prevent the current stack frame + // from disappearing from the stack trace. + void Emit(CodeGenFunction &CGF, Flags flags) override { + const ASTRecordLayout &Layout = + CGF.getContext().getASTRecordLayout(Dtor->getParent()); + + // Nothing to poison. + if (Layout.getFieldCount() == 0) + return; + + // Prevent the current stack frame from disappearing from the stack trace. + CGF.CurFn->addFnAttr("disable-tail-calls", "true"); + + // Construct pointer to region to begin poisoning, and calculate poison + // size, so that only members declared in this class are poisoned. + ASTContext &Context = CGF.getContext(); + unsigned fieldIndex = 0; + int startIndex = -1; + // RecordDecl::field_iterator Field; + for (const FieldDecl *Field : Dtor->getParent()->fields()) { + // Poison field if it is trivial + if (FieldHasTrivialDestructorBody(Context, Field)) { + // Start sanitizing at this field + if (startIndex < 0) + startIndex = fieldIndex; + + // Currently on the last field, and it must be poisoned with the + // current block. + if (fieldIndex == Layout.getFieldCount() - 1) { + PoisonMembers(CGF, startIndex, Layout.getFieldCount()); + } + } else if (startIndex >= 0) { + // No longer within a block of memory to poison, so poison the block + PoisonMembers(CGF, startIndex, fieldIndex); + // Re-set the start index + startIndex = -1; + } + fieldIndex += 1; + } + } + + private: + /// \param layoutStartOffset index of the ASTRecordLayout field to + /// start poisoning (inclusive) + /// \param layoutEndOffset index of the ASTRecordLayout field to + /// end poisoning (exclusive) + void PoisonMembers(CodeGenFunction &CGF, unsigned layoutStartOffset, + unsigned layoutEndOffset) { + ASTContext &Context = CGF.getContext(); + const ASTRecordLayout &Layout = + Context.getASTRecordLayout(Dtor->getParent()); + + llvm::ConstantInt *OffsetSizePtr = llvm::ConstantInt::get( + CGF.SizeTy, + Context.toCharUnitsFromBits(Layout.getFieldOffset(layoutStartOffset)) + .getQuantity()); + + llvm::Value *OffsetPtr = CGF.Builder.CreateGEP( + CGF.Builder.CreateBitCast(CGF.LoadCXXThis(), CGF.Int8PtrTy), + OffsetSizePtr); + + CharUnits::QuantityType PoisonSize; + if (layoutEndOffset >= Layout.getFieldCount()) { + PoisonSize = Layout.getNonVirtualSize().getQuantity() - + Context.toCharUnitsFromBits( + Layout.getFieldOffset(layoutStartOffset)) + .getQuantity(); + } else { + PoisonSize = Context.toCharUnitsFromBits( + Layout.getFieldOffset(layoutEndOffset) - + Layout.getFieldOffset(layoutStartOffset)) + .getQuantity(); + } + + if (PoisonSize == 0) + return; + + EmitSanitizerDtorCallback(CGF, OffsetPtr, PoisonSize); + } + }; + + class SanitizeDtorVTable final : public EHScopeStack::Cleanup { + const CXXDestructorDecl *Dtor; + + public: + SanitizeDtorVTable(const CXXDestructorDecl *Dtor) : Dtor(Dtor) {} + + // Generate function call for handling vtable pointer poisoning. + void Emit(CodeGenFunction &CGF, Flags flags) override { + assert(Dtor->getParent()->isDynamicClass()); + (void)Dtor; + ASTContext &Context = CGF.getContext(); + // Poison vtable and vtable ptr if they exist for this class. + llvm::Value *VTablePtr = CGF.LoadCXXThis(); + + CharUnits::QuantityType PoisonSize = + Context.toCharUnitsFromBits(CGF.PointerWidthInBits).getQuantity(); + // Pass in void pointer and size of region as arguments to runtime + // function + EmitSanitizerDtorCallback(CGF, VTablePtr, PoisonSize); + } + }; +} // end anonymous namespace /// \brief Emit all code that comes at the end of class's /// destructor. This is to call destructors on members and base classes @@ -1590,6 +1835,12 @@ void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD, // The complete-destructor phase just destructs all the virtual bases. if (DtorType == Dtor_Complete) { + // Poison the vtable pointer such that access after the base + // and member destructors are invoked is invalid. + if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor && + SanOpts.has(SanitizerKind::Memory) && ClassDecl->getNumVBases() && + ClassDecl->isPolymorphic()) + EHStack.pushCleanup<SanitizeDtorVTable>(NormalAndEHCleanup, DD); // We push them in the forward order so that they'll be popped in // the reverse order. @@ -1610,6 +1861,12 @@ void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD, } assert(DtorType == Dtor_Base); + // Poison the vtable pointer if it has no virtual bases, but inherits + // virtual functions. + if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor && + SanOpts.has(SanitizerKind::Memory) && !ClassDecl->getNumVBases() && + ClassDecl->isPolymorphic()) + EHStack.pushCleanup<SanitizeDtorVTable>(NormalAndEHCleanup, DD); // Destroy non-virtual bases. for (const auto &Base : ClassDecl->bases()) { @@ -1628,6 +1885,12 @@ void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD, /*BaseIsVirtual*/ false); } + // Poison fields such that access after their destructors are + // invoked, and before the base class destructor runs, is invalid. + if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor && + SanOpts.has(SanitizerKind::Memory)) + EHStack.pushCleanup<SanitizeDtorMembers>(NormalAndEHCleanup, DD); + // Destroy direct fields. for (const auto *Field : ClassDecl->fields()) { QualType type = Field->getType(); @@ -1655,7 +1918,7 @@ void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD, /// zero-initialized before it is constructed void CodeGenFunction::EmitCXXAggrConstructorCall( const CXXConstructorDecl *ctor, const ConstantArrayType *arrayType, - llvm::Value *arrayBegin, const CXXConstructExpr *E, bool zeroInitialize) { + Address arrayBegin, const CXXConstructExpr *E, bool zeroInitialize) { QualType elementType; llvm::Value *numElements = emitArrayLength(arrayType, elementType, arrayBegin); @@ -1669,15 +1932,14 @@ void CodeGenFunction::EmitCXXAggrConstructorCall( /// \param ctor the constructor to call for each element /// \param numElements the number of elements in the array; /// may be zero -/// \param arrayBegin a T*, where T is the type constructed by ctor +/// \param arrayBase a T*, where T is the type constructed by ctor /// \param zeroInitialize true if each element should be /// zero-initialized before it is constructed void CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *ctor, llvm::Value *numElements, - llvm::Value *arrayBegin, + Address arrayBase, const CXXConstructExpr *E, bool zeroInitialize) { - // It's legal for numElements to be zero. This can happen both // dynamically, because x can be zero in 'new A[x]', and statically, // because of GCC extensions that permit zero-length arrays. There @@ -1701,6 +1963,7 @@ void CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *ctor, } // Find the end of the array. + llvm::Value *arrayBegin = arrayBase.getPointer(); llvm::Value *arrayEnd = Builder.CreateInBoundsGEP(arrayBegin, numElements, "arrayctor.end"); @@ -1714,11 +1977,21 @@ void CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *ctor, // Inside the loop body, emit the constructor call on the array element. + // The alignment of the base, adjusted by the size of a single element, + // provides a conservative estimate of the alignment of every element. + // (This assumes we never start tracking offsetted alignments.) + // + // Note that these are complete objects and so we don't need to + // use the non-virtual size or alignment. QualType type = getContext().getTypeDeclType(ctor->getParent()); + CharUnits eltAlignment = + arrayBase.getAlignment() + .alignmentOfArrayElement(getContext().getTypeSizeInChars(type)); + Address curAddr = Address(cur, eltAlignment); // Zero initialize the storage, if requested. if (zeroInitialize) - EmitNullInitialization(cur, type); + EmitNullInitialization(curAddr, type); // C++ [class.temporary]p4: // There are two contexts in which temporaries are destroyed at a different @@ -1736,11 +2009,12 @@ void CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *ctor, if (getLangOpts().Exceptions && !ctor->getParent()->hasTrivialDestructor()) { Destroyer *destroyer = destroyCXXObject; - pushRegularPartialArrayCleanup(arrayBegin, cur, type, *destroyer); + pushRegularPartialArrayCleanup(arrayBegin, cur, type, eltAlignment, + *destroyer); } EmitCXXConstructorCall(ctor, Ctor_Complete, /*ForVirtualBase=*/false, - /*Delegating=*/false, cur, E); + /*Delegating=*/false, curAddr, E); } // Go to the next element. @@ -1761,7 +2035,7 @@ void CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *ctor, } void CodeGenFunction::destroyCXXObject(CodeGenFunction &CGF, - llvm::Value *addr, + Address addr, QualType type) { const RecordType *rtype = type->castAs<RecordType>(); const CXXRecordDecl *record = cast<CXXRecordDecl>(rtype->getDecl()); @@ -1774,14 +2048,16 @@ void CodeGenFunction::destroyCXXObject(CodeGenFunction &CGF, void CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D, CXXCtorType Type, bool ForVirtualBase, - bool Delegating, llvm::Value *This, + bool Delegating, Address This, const CXXConstructExpr *E) { + const CXXRecordDecl *ClassDecl = D->getParent(); + // C++11 [class.mfct.non-static]p2: // If a non-static member function of a class X is called for an object that // is not of type X, or of a type derived from X, the behavior is undefined. // FIXME: Provide a source location here. - EmitTypeCheck(CodeGenFunction::TCK_ConstructorCall, SourceLocation(), This, - getContext().getRecordType(D->getParent())); + EmitTypeCheck(CodeGenFunction::TCK_ConstructorCall, SourceLocation(), + This.getPointer(), getContext().getRecordType(ClassDecl)); if (D->isTrivial() && D->isDefaultConstructor()) { assert(E->getNumArgs() == 0 && "trivial default ctor with args"); @@ -1796,8 +2072,8 @@ void CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D, const Expr *Arg = E->getArg(0); QualType SrcTy = Arg->getType(); - llvm::Value *Src = EmitLValue(Arg).getAddress(); - QualType DestTy = getContext().getTypeDeclType(D->getParent()); + Address Src = EmitLValue(Arg).getAddress(); + QualType DestTy = getContext().getTypeDeclType(ClassDecl); EmitAggregateCopyCtor(This, Src, DestTy, SrcTy); return; } @@ -1805,11 +2081,11 @@ void CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D, CallArgList Args; // Push the this ptr. - Args.add(RValue::get(This), D->getThisType(getContext())); + Args.add(RValue::get(This.getPointer()), D->getThisType(getContext())); // Add the rest of the user-supplied arguments. const FunctionProtoType *FPT = D->getType()->castAs<FunctionProtoType>(); - EmitCallArgs(Args, FPT, E->arg_begin(), E->arg_end(), E->getConstructor()); + EmitCallArgs(Args, FPT, E->arguments(), E->getConstructor()); // Insert any ABI-specific implicit constructor arguments. unsigned ExtraArgs = CGM.getCXXABI().addImplicitConstructorArgs( @@ -1820,19 +2096,64 @@ void CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D, const CGFunctionInfo &Info = CGM.getTypes().arrangeCXXConstructorCall(Args, D, Type, ExtraArgs); EmitCall(Info, Callee, ReturnValueSlot(), Args, D); + + // Generate vtable assumptions if we're constructing a complete object + // with a vtable. We don't do this for base subobjects for two reasons: + // first, it's incorrect for classes with virtual bases, and second, we're + // about to overwrite the vptrs anyway. + // We also have to make sure if we can refer to vtable: + // - Otherwise we can refer to vtable if it's safe to speculatively emit. + // FIXME: If vtable is used by ctor/dtor, or if vtable is external and we are + // sure that definition of vtable is not hidden, + // then we are always safe to refer to it. + // FIXME: It looks like InstCombine is very inefficient on dealing with + // assumes. Make assumption loads require -fstrict-vtable-pointers temporarily. + if (CGM.getCodeGenOpts().OptimizationLevel > 0 && + ClassDecl->isDynamicClass() && Type != Ctor_Base && + CGM.getCXXABI().canSpeculativelyEmitVTable(ClassDecl) && + CGM.getCodeGenOpts().StrictVTablePointers) + EmitVTableAssumptionLoads(ClassDecl, This); +} + +void CodeGenFunction::EmitVTableAssumptionLoad(const VPtr &Vptr, Address This) { + llvm::Value *VTableGlobal = + CGM.getCXXABI().getVTableAddressPoint(Vptr.Base, Vptr.VTableClass); + if (!VTableGlobal) + return; + + // We can just use the base offset in the complete class. + CharUnits NonVirtualOffset = Vptr.Base.getBaseOffset(); + + if (!NonVirtualOffset.isZero()) + This = + ApplyNonVirtualAndVirtualOffset(*this, This, NonVirtualOffset, nullptr, + Vptr.VTableClass, Vptr.NearestVBase); + + llvm::Value *VPtrValue = + GetVTablePtr(This, VTableGlobal->getType(), Vptr.VTableClass); + llvm::Value *Cmp = + Builder.CreateICmpEQ(VPtrValue, VTableGlobal, "cmp.vtables"); + Builder.CreateAssumption(Cmp); +} + +void CodeGenFunction::EmitVTableAssumptionLoads(const CXXRecordDecl *ClassDecl, + Address This) { + if (CGM.getCXXABI().doStructorsInitializeVPtrs(ClassDecl)) + for (const VPtr &Vptr : getVTablePointers(ClassDecl)) + EmitVTableAssumptionLoad(Vptr, This); } void CodeGenFunction::EmitSynthesizedCXXCopyCtorCall(const CXXConstructorDecl *D, - llvm::Value *This, llvm::Value *Src, - const CXXConstructExpr *E) { + Address This, Address Src, + const CXXConstructExpr *E) { if (isMemcpyEquivalentSpecialMember(D)) { assert(E->getNumArgs() == 1 && "unexpected argcount for trivial ctor"); assert(D->isCopyOrMoveConstructor() && "trivial 1-arg ctor not a copy/move ctor"); EmitAggregateCopyCtor(This, Src, getContext().getTypeDeclType(D->getParent()), - E->arg_begin()->getType()); + (*E->arg_begin())->getType()); return; } llvm::Value *Callee = CGM.getAddrOfCXXStructor(D, StructorType::Complete); @@ -1844,16 +2165,16 @@ CodeGenFunction::EmitSynthesizedCXXCopyCtorCall(const CXXConstructorDecl *D, CallArgList Args; // Push the this ptr. - Args.add(RValue::get(This), D->getThisType(getContext())); + Args.add(RValue::get(This.getPointer()), D->getThisType(getContext())); // Push the src ptr. QualType QT = *(FPT->param_type_begin()); llvm::Type *t = CGM.getTypes().ConvertType(QT); Src = Builder.CreateBitCast(Src, t); - Args.add(RValue::get(Src), QT); + Args.add(RValue::get(Src.getPointer()), QT); // Skip over first argument (Src). - EmitCallArgs(Args, FPT, E->arg_begin() + 1, E->arg_end(), E->getConstructor(), + EmitCallArgs(Args, FPT, drop_begin(E->arguments(), 1), E->getConstructor(), /*ParamsToSkip*/ 1); EmitCall(CGM.getTypes().arrangeCXXMethodCall(Args, FPT, RequiredArgs::All), @@ -1903,12 +2224,12 @@ CodeGenFunction::EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor, } namespace { - struct CallDelegatingCtorDtor : EHScopeStack::Cleanup { + struct CallDelegatingCtorDtor final : EHScopeStack::Cleanup { const CXXDestructorDecl *Dtor; - llvm::Value *Addr; + Address Addr; CXXDtorType Type; - CallDelegatingCtorDtor(const CXXDestructorDecl *D, llvm::Value *Addr, + CallDelegatingCtorDtor(const CXXDestructorDecl *D, Address Addr, CXXDtorType Type) : Dtor(D), Addr(Addr), Type(Type) {} @@ -1917,19 +2238,17 @@ namespace { /*Delegating=*/true, Addr); } }; -} +} // end anonymous namespace void CodeGenFunction::EmitDelegatingCXXConstructorCall(const CXXConstructorDecl *Ctor, const FunctionArgList &Args) { assert(Ctor->isDelegatingConstructor()); - llvm::Value *ThisPtr = LoadCXXThis(); + Address ThisPtr = LoadCXXThisAddress(); - QualType Ty = getContext().getTagDeclType(Ctor->getParent()); - CharUnits Alignment = getContext().getTypeAlignInChars(Ty); AggValueSlot AggSlot = - AggValueSlot::forAddr(ThisPtr, Alignment, Qualifiers(), + AggValueSlot::forAddr(ThisPtr, Qualifiers(), AggValueSlot::IsDestructed, AggValueSlot::DoesNotNeedGCBarriers, AggValueSlot::IsNotAliased); @@ -1951,17 +2270,17 @@ void CodeGenFunction::EmitCXXDestructorCall(const CXXDestructorDecl *DD, CXXDtorType Type, bool ForVirtualBase, bool Delegating, - llvm::Value *This) { + Address This) { CGM.getCXXABI().EmitDestructorCall(*this, DD, Type, ForVirtualBase, Delegating, This); } namespace { - struct CallLocalDtor : EHScopeStack::Cleanup { + struct CallLocalDtor final : EHScopeStack::Cleanup { const CXXDestructorDecl *Dtor; - llvm::Value *Addr; + Address Addr; - CallLocalDtor(const CXXDestructorDecl *D, llvm::Value *Addr) + CallLocalDtor(const CXXDestructorDecl *D, Address Addr) : Dtor(D), Addr(Addr) {} void Emit(CodeGenFunction &CGF, Flags flags) override { @@ -1973,11 +2292,11 @@ namespace { } void CodeGenFunction::PushDestructorCleanup(const CXXDestructorDecl *D, - llvm::Value *Addr) { + Address Addr) { EHStack.pushCleanup<CallLocalDtor>(NormalAndEHCleanup, D, Addr); } -void CodeGenFunction::PushDestructorCleanup(QualType T, llvm::Value *Addr) { +void CodeGenFunction::PushDestructorCleanup(QualType T, Address Addr) { CXXRecordDecl *ClassDecl = T->getAsCXXRecordDecl(); if (!ClassDecl) return; if (ClassDecl->hasTrivialDestructor()) return; @@ -1987,24 +2306,12 @@ void CodeGenFunction::PushDestructorCleanup(QualType T, llvm::Value *Addr) { PushDestructorCleanup(D, Addr); } -void -CodeGenFunction::InitializeVTablePointer(BaseSubobject Base, - const CXXRecordDecl *NearestVBase, - CharUnits OffsetFromNearestVBase, - const CXXRecordDecl *VTableClass) { - const CXXRecordDecl *RD = Base.getBase(); - - // Don't initialize the vtable pointer if the class is marked with the - // 'novtable' attribute. - if ((RD == VTableClass || RD == NearestVBase) && - VTableClass->hasAttr<MSNoVTableAttr>()) - return; - +void CodeGenFunction::InitializeVTablePointer(const VPtr &Vptr) { // Compute the address point. - bool NeedsVirtualOffset; llvm::Value *VTableAddressPoint = CGM.getCXXABI().getVTableAddressPointInStructor( - *this, VTableClass, Base, NearestVBase, NeedsVirtualOffset); + *this, Vptr.VTableClass, Vptr.Base, Vptr.NearestVBase); + if (!VTableAddressPoint) return; @@ -2012,26 +2319,25 @@ CodeGenFunction::InitializeVTablePointer(BaseSubobject Base, llvm::Value *VirtualOffset = nullptr; CharUnits NonVirtualOffset = CharUnits::Zero(); - if (NeedsVirtualOffset) { + if (CGM.getCXXABI().isVirtualOffsetNeededForVTableField(*this, Vptr)) { // We need to use the virtual base offset offset because the virtual base // might have a different offset in the most derived class. - VirtualOffset = CGM.getCXXABI().GetVirtualBaseClassOffset(*this, - LoadCXXThis(), - VTableClass, - NearestVBase); - NonVirtualOffset = OffsetFromNearestVBase; + + VirtualOffset = CGM.getCXXABI().GetVirtualBaseClassOffset( + *this, LoadCXXThisAddress(), Vptr.VTableClass, Vptr.NearestVBase); + NonVirtualOffset = Vptr.OffsetFromNearestVBase; } else { // We can just use the base offset in the complete class. - NonVirtualOffset = Base.getBaseOffset(); + NonVirtualOffset = Vptr.Base.getBaseOffset(); } // Apply the offsets. - llvm::Value *VTableField = LoadCXXThis(); + Address VTableField = LoadCXXThisAddress(); if (!NonVirtualOffset.isZero() || VirtualOffset) - VTableField = ApplyNonVirtualAndVirtualOffset(*this, VTableField, - NonVirtualOffset, - VirtualOffset); + VTableField = ApplyNonVirtualAndVirtualOffset( + *this, VTableField, NonVirtualOffset, VirtualOffset, Vptr.VTableClass, + Vptr.NearestVBase); // Finally, store the address point. Use the same LLVM types as the field to // support optimization. @@ -2041,23 +2347,39 @@ CodeGenFunction::InitializeVTablePointer(BaseSubobject Base, ->getPointerTo(); VTableField = Builder.CreateBitCast(VTableField, VTablePtrTy->getPointerTo()); VTableAddressPoint = Builder.CreateBitCast(VTableAddressPoint, VTablePtrTy); + llvm::StoreInst *Store = Builder.CreateStore(VTableAddressPoint, VTableField); - CGM.DecorateInstruction(Store, CGM.getTBAAInfoForVTablePtr()); + CGM.DecorateInstructionWithTBAA(Store, CGM.getTBAAInfoForVTablePtr()); + if (CGM.getCodeGenOpts().OptimizationLevel > 0 && + CGM.getCodeGenOpts().StrictVTablePointers) + CGM.DecorateInstructionWithInvariantGroup(Store, Vptr.VTableClass); } -void -CodeGenFunction::InitializeVTablePointers(BaseSubobject Base, - const CXXRecordDecl *NearestVBase, - CharUnits OffsetFromNearestVBase, - bool BaseIsNonVirtualPrimaryBase, - const CXXRecordDecl *VTableClass, - VisitedVirtualBasesSetTy& VBases) { +CodeGenFunction::VPtrsVector +CodeGenFunction::getVTablePointers(const CXXRecordDecl *VTableClass) { + CodeGenFunction::VPtrsVector VPtrsResult; + VisitedVirtualBasesSetTy VBases; + getVTablePointers(BaseSubobject(VTableClass, CharUnits::Zero()), + /*NearestVBase=*/nullptr, + /*OffsetFromNearestVBase=*/CharUnits::Zero(), + /*BaseIsNonVirtualPrimaryBase=*/false, VTableClass, VBases, + VPtrsResult); + return VPtrsResult; +} + +void CodeGenFunction::getVTablePointers(BaseSubobject Base, + const CXXRecordDecl *NearestVBase, + CharUnits OffsetFromNearestVBase, + bool BaseIsNonVirtualPrimaryBase, + const CXXRecordDecl *VTableClass, + VisitedVirtualBasesSetTy &VBases, + VPtrsVector &Vptrs) { // If this base is a non-virtual primary base the address point has already // been set. if (!BaseIsNonVirtualPrimaryBase) { // Initialize the vtable pointer for this base. - InitializeVTablePointer(Base, NearestVBase, OffsetFromNearestVBase, - VTableClass); + VPtr Vptr = {Base, NearestVBase, OffsetFromNearestVBase, VTableClass}; + Vptrs.push_back(Vptr); } const CXXRecordDecl *RD = Base.getBase(); @@ -2095,11 +2417,10 @@ CodeGenFunction::InitializeVTablePointers(BaseSubobject Base, BaseDeclIsNonVirtualPrimaryBase = Layout.getPrimaryBase() == BaseDecl; } - InitializeVTablePointers(BaseSubobject(BaseDecl, BaseOffset), - I.isVirtual() ? BaseDecl : NearestVBase, - BaseOffsetFromNearestVBase, - BaseDeclIsNonVirtualPrimaryBase, - VTableClass, VBases); + getVTablePointers( + BaseSubobject(BaseDecl, BaseOffset), + I.isVirtual() ? BaseDecl : NearestVBase, BaseOffsetFromNearestVBase, + BaseDeclIsNonVirtualPrimaryBase, VTableClass, VBases, Vptrs); } } @@ -2109,21 +2430,25 @@ void CodeGenFunction::InitializeVTablePointers(const CXXRecordDecl *RD) { return; // Initialize the vtable pointers for this class and all of its bases. - VisitedVirtualBasesSetTy VBases; - InitializeVTablePointers(BaseSubobject(RD, CharUnits::Zero()), - /*NearestVBase=*/nullptr, - /*OffsetFromNearestVBase=*/CharUnits::Zero(), - /*BaseIsNonVirtualPrimaryBase=*/false, RD, VBases); + if (CGM.getCXXABI().doStructorsInitializeVPtrs(RD)) + for (const VPtr &Vptr : getVTablePointers(RD)) + InitializeVTablePointer(Vptr); if (RD->getNumVBases()) CGM.getCXXABI().initializeHiddenVirtualInheritanceMembers(*this, RD); } -llvm::Value *CodeGenFunction::GetVTablePtr(llvm::Value *This, - llvm::Type *Ty) { - llvm::Value *VTablePtrSrc = Builder.CreateBitCast(This, Ty->getPointerTo()); +llvm::Value *CodeGenFunction::GetVTablePtr(Address This, + llvm::Type *VTableTy, + const CXXRecordDecl *RD) { + Address VTablePtrSrc = Builder.CreateElementBitCast(This, VTableTy); llvm::Instruction *VTable = Builder.CreateLoad(VTablePtrSrc, "vtable"); - CGM.DecorateInstruction(VTable, CGM.getTBAAInfoForVTablePtr()); + CGM.DecorateInstructionWithTBAA(VTable, CGM.getTBAAInfoForVTablePtr()); + + if (CGM.getCodeGenOpts().OptimizationLevel > 0 && + CGM.getCodeGenOpts().StrictVTablePointers) + CGM.DecorateInstructionWithInvariantGroup(VTable, RD); + return VTable; } @@ -2190,19 +2515,10 @@ void CodeGenFunction::EmitVTablePtrCheckForCast(QualType T, if (!ClassDecl->isCompleteDefinition() || !ClassDecl->isDynamicClass()) return; - SmallString<64> MangledName; - llvm::raw_svector_ostream Out(MangledName); - CGM.getCXXABI().getMangleContext().mangleCXXRTTI(T.getUnqualifiedType(), - Out); - - // Blacklist based on the mangled type. - if (CGM.getContext().getSanitizerBlacklist().isBlacklistedType(Out.str())) - return; - if (!SanOpts.has(SanitizerKind::CFICastStrict)) ClassDecl = LeastDerivedClassWithSameLayout(ClassDecl); - llvm::BasicBlock *ContBlock = 0; + llvm::BasicBlock *ContBlock = nullptr; if (MayBeNull) { llvm::Value *DerivedNotNull = @@ -2216,7 +2532,9 @@ void CodeGenFunction::EmitVTablePtrCheckForCast(QualType T, EmitBlock(CheckBlock); } - llvm::Value *VTable = GetVTablePtr(Derived, Int8PtrTy); + llvm::Value *VTable = + GetVTablePtr(Address(Derived, getPointerAlign()), Int8PtrTy, ClassDecl); + EmitVTablePtrCheck(ClassDecl, VTable, TCK, Loc); if (MayBeNull) { @@ -2234,18 +2552,22 @@ void CodeGenFunction::EmitVTablePtrCheck(const CXXRecordDecl *RD, SanitizerScope SanScope(this); - std::string OutName; - llvm::raw_string_ostream Out(OutName); - CGM.getCXXABI().getMangleContext().mangleCXXVTableBitSet(RD, Out); - - llvm::Value *BitSetName = llvm::MetadataAsValue::get( - getLLVMContext(), llvm::MDString::get(getLLVMContext(), Out.str())); + llvm::Metadata *MD = + CGM.CreateMetadataIdentifierForType(QualType(RD->getTypeForDecl(), 0)); + llvm::Value *BitSetName = llvm::MetadataAsValue::get(getLLVMContext(), MD); llvm::Value *CastedVTable = Builder.CreateBitCast(VTable, Int8PtrTy); llvm::Value *BitSetTest = Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::bitset_test), {CastedVTable, BitSetName}); + if (CGM.getCodeGenOpts().SanitizeCfiCrossDso) { + if (auto TypeId = CGM.CreateCfiIdForTypeMetadata(MD)) { + EmitCfiSlowPathCheck(BitSetTest, TypeId, CastedVTable); + return; + } + } + SanitizerMask M; switch (TCK) { case CFITCK_VCall: @@ -2263,9 +2585,9 @@ void CodeGenFunction::EmitVTablePtrCheck(const CXXRecordDecl *RD, } llvm::Constant *StaticData[] = { - EmitCheckSourceLocation(Loc), - EmitCheckTypeDescriptor(QualType(RD->getTypeForDecl(), 0)), - llvm::ConstantInt::get(Int8Ty, TCK), + EmitCheckSourceLocation(Loc), + EmitCheckTypeDescriptor(QualType(RD->getTypeForDecl(), 0)), + llvm::ConstantInt::get(Int8Ty, TCK), }; EmitCheck(std::make_pair(BitSetTest, M), "cfi_bad_type", StaticData, CastedVTable); @@ -2405,8 +2727,8 @@ void CodeGenFunction::EmitLambdaBlockInvokeBody() { CallArgList CallArgs; QualType ThisType = getContext().getPointerType(getContext().getRecordType(Lambda)); - llvm::Value *ThisPtr = GetAddrOfBlockDecl(variable, false); - CallArgs.add(RValue::get(ThisPtr), ThisType); + Address ThisPtr = GetAddrOfBlockDecl(variable, false); + CallArgs.add(RValue::get(ThisPtr.getPointer()), ThisType); // Add the rest of the parameters. for (auto param : BD->params()) |