1296417Sdim//===--- CGClass.cpp - Emit LLVM Code for C++ classes -----------*- C++ -*-===// 2199990Srdivacky// 3199990Srdivacky// The LLVM Compiler Infrastructure 4199990Srdivacky// 5199990Srdivacky// This file is distributed under the University of Illinois Open Source 6199990Srdivacky// License. See LICENSE.TXT for details. 7199990Srdivacky// 8199990Srdivacky//===----------------------------------------------------------------------===// 9199990Srdivacky// 10199990Srdivacky// This contains code dealing with C++ code generation of classes 11199990Srdivacky// 12199990Srdivacky//===----------------------------------------------------------------------===// 13199990Srdivacky 14234353Sdim#include "CGBlocks.h" 15276479Sdim#include "CGCXXABI.h" 16212904Sdim#include "CGDebugInfo.h" 17249423Sdim#include "CGRecordLayout.h" 18199990Srdivacky#include "CodeGenFunction.h" 19199990Srdivacky#include "clang/AST/CXXInheritance.h" 20261991Sdim#include "clang/AST/DeclTemplate.h" 21218893Sdim#include "clang/AST/EvaluatedExprVisitor.h" 22199990Srdivacky#include "clang/AST/RecordLayout.h" 23204643Srdivacky#include "clang/AST/StmtCXX.h" 24249423Sdim#include "clang/Basic/TargetBuiltins.h" 25261991Sdim#include "clang/CodeGen/CGFunctionInfo.h" 26219077Sdim#include "clang/Frontend/CodeGenOptions.h" 27288943Sdim#include "llvm/IR/Intrinsics.h" 28296417Sdim#include "llvm/IR/Metadata.h" 29199990Srdivacky 30199990Srdivackyusing namespace clang; 31199990Srdivackyusing namespace CodeGen; 32199990Srdivacky 33296417Sdim/// Return the best known alignment for an unknown pointer to a 34296417Sdim/// particular class. 35296417SdimCharUnits CodeGenModule::getClassPointerAlignment(const CXXRecordDecl *RD) { 36296417Sdim if (!RD->isCompleteDefinition()) 37296417Sdim return CharUnits::One(); // Hopefully won't be used anywhere. 38296417Sdim 39296417Sdim auto &layout = getContext().getASTRecordLayout(RD); 40296417Sdim 41296417Sdim // If the class is final, then we know that the pointer points to an 42296417Sdim // object of that type and can use the full alignment. 43296417Sdim if (RD->hasAttr<FinalAttr>()) { 44296417Sdim return layout.getAlignment(); 45296417Sdim 46296417Sdim // Otherwise, we have to assume it could be a subclass. 47296417Sdim } else { 48296417Sdim return layout.getNonVirtualAlignment(); 49296417Sdim } 50296417Sdim} 51296417Sdim 52296417Sdim/// Return the best known alignment for a pointer to a virtual base, 53296417Sdim/// given the alignment of a pointer to the derived class. 54296417SdimCharUnits CodeGenModule::getVBaseAlignment(CharUnits actualDerivedAlign, 55296417Sdim const CXXRecordDecl *derivedClass, 56296417Sdim const CXXRecordDecl *vbaseClass) { 57296417Sdim // The basic idea here is that an underaligned derived pointer might 58296417Sdim // indicate an underaligned base pointer. 59296417Sdim 60296417Sdim assert(vbaseClass->isCompleteDefinition()); 61296417Sdim auto &baseLayout = getContext().getASTRecordLayout(vbaseClass); 62296417Sdim CharUnits expectedVBaseAlign = baseLayout.getNonVirtualAlignment(); 63296417Sdim 64296417Sdim return getDynamicOffsetAlignment(actualDerivedAlign, derivedClass, 65296417Sdim expectedVBaseAlign); 66296417Sdim} 67296417Sdim 68296417SdimCharUnits 69296417SdimCodeGenModule::getDynamicOffsetAlignment(CharUnits actualBaseAlign, 70296417Sdim const CXXRecordDecl *baseDecl, 71296417Sdim CharUnits expectedTargetAlign) { 72296417Sdim // If the base is an incomplete type (which is, alas, possible with 73296417Sdim // member pointers), be pessimistic. 74296417Sdim if (!baseDecl->isCompleteDefinition()) 75296417Sdim return std::min(actualBaseAlign, expectedTargetAlign); 76296417Sdim 77296417Sdim auto &baseLayout = getContext().getASTRecordLayout(baseDecl); 78296417Sdim CharUnits expectedBaseAlign = baseLayout.getNonVirtualAlignment(); 79296417Sdim 80296417Sdim // If the class is properly aligned, assume the target offset is, too. 81296417Sdim // 82296417Sdim // This actually isn't necessarily the right thing to do --- if the 83296417Sdim // class is a complete object, but it's only properly aligned for a 84296417Sdim // base subobject, then the alignments of things relative to it are 85296417Sdim // probably off as well. (Note that this requires the alignment of 86296417Sdim // the target to be greater than the NV alignment of the derived 87296417Sdim // class.) 88296417Sdim // 89296417Sdim // However, our approach to this kind of under-alignment can only 90296417Sdim // ever be best effort; after all, we're never going to propagate 91296417Sdim // alignments through variables or parameters. Note, in particular, 92296417Sdim // that constructing a polymorphic type in an address that's less 93296417Sdim // than pointer-aligned will generally trap in the constructor, 94296417Sdim // unless we someday add some sort of attribute to change the 95296417Sdim // assumed alignment of 'this'. So our goal here is pretty much 96296417Sdim // just to allow the user to explicitly say that a pointer is 97296417Sdim // under-aligned and then safely access its fields and v-tables. 98296417Sdim if (actualBaseAlign >= expectedBaseAlign) { 99296417Sdim return expectedTargetAlign; 100296417Sdim } 101296417Sdim 102296417Sdim // Otherwise, we might be offset by an arbitrary multiple of the 103296417Sdim // actual alignment. The correct adjustment is to take the min of 104296417Sdim // the two alignments. 105296417Sdim return std::min(actualBaseAlign, expectedTargetAlign); 106296417Sdim} 107296417Sdim 108296417SdimAddress CodeGenFunction::LoadCXXThisAddress() { 109296417Sdim assert(CurFuncDecl && "loading 'this' without a func declaration?"); 110296417Sdim assert(isa<CXXMethodDecl>(CurFuncDecl)); 111296417Sdim 112296417Sdim // Lazily compute CXXThisAlignment. 113296417Sdim if (CXXThisAlignment.isZero()) { 114296417Sdim // Just use the best known alignment for the parent. 115296417Sdim // TODO: if we're currently emitting a complete-object ctor/dtor, 116296417Sdim // we can always use the complete-object alignment. 117296417Sdim auto RD = cast<CXXMethodDecl>(CurFuncDecl)->getParent(); 118296417Sdim CXXThisAlignment = CGM.getClassPointerAlignment(RD); 119296417Sdim } 120296417Sdim 121296417Sdim return Address(LoadCXXThis(), CXXThisAlignment); 122296417Sdim} 123296417Sdim 124296417Sdim/// Emit the address of a field using a member data pointer. 125296417Sdim/// 126296417Sdim/// \param E Only used for emergency diagnostics 127296417SdimAddress 128296417SdimCodeGenFunction::EmitCXXMemberDataPointerAddress(const Expr *E, Address base, 129296417Sdim llvm::Value *memberPtr, 130296417Sdim const MemberPointerType *memberPtrType, 131296417Sdim AlignmentSource *alignSource) { 132296417Sdim // Ask the ABI to compute the actual address. 133296417Sdim llvm::Value *ptr = 134296417Sdim CGM.getCXXABI().EmitMemberDataPointerAddress(*this, E, base, 135296417Sdim memberPtr, memberPtrType); 136296417Sdim 137296417Sdim QualType memberType = memberPtrType->getPointeeType(); 138296417Sdim CharUnits memberAlign = getNaturalTypeAlignment(memberType, alignSource); 139296417Sdim memberAlign = 140296417Sdim CGM.getDynamicOffsetAlignment(base.getAlignment(), 141296417Sdim memberPtrType->getClass()->getAsCXXRecordDecl(), 142296417Sdim memberAlign); 143296417Sdim return Address(ptr, memberAlign); 144296417Sdim} 145296417Sdim 146288943SdimCharUnits CodeGenModule::computeNonVirtualBaseClassOffset( 147288943Sdim const CXXRecordDecl *DerivedClass, CastExpr::path_const_iterator Start, 148288943Sdim CastExpr::path_const_iterator End) { 149221345Sdim CharUnits Offset = CharUnits::Zero(); 150288943Sdim 151288943Sdim const ASTContext &Context = getContext(); 152207619Srdivacky const CXXRecordDecl *RD = DerivedClass; 153288943Sdim 154212904Sdim for (CastExpr::path_const_iterator I = Start; I != End; ++I) { 155207619Srdivacky const CXXBaseSpecifier *Base = *I; 156207619Srdivacky assert(!Base->isVirtual() && "Should not see virtual bases here!"); 157199990Srdivacky 158199990Srdivacky // Get the layout. 159207619Srdivacky const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD); 160288943Sdim 161288943Sdim const CXXRecordDecl *BaseDecl = 162207619Srdivacky cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl()); 163288943Sdim 164207619Srdivacky // Add the offset. 165221345Sdim Offset += Layout.getBaseClassOffset(BaseDecl); 166288943Sdim 167207619Srdivacky RD = BaseDecl; 168199990Srdivacky } 169288943Sdim 170221345Sdim return Offset; 171199990Srdivacky} 172199990Srdivacky 173199990Srdivackyllvm::Constant * 174207619SrdivackyCodeGenModule::GetNonVirtualBaseClassOffset(const CXXRecordDecl *ClassDecl, 175212904Sdim CastExpr::path_const_iterator PathBegin, 176212904Sdim CastExpr::path_const_iterator PathEnd) { 177212904Sdim assert(PathBegin != PathEnd && "Base path should not be empty!"); 178199990Srdivacky 179288943Sdim CharUnits Offset = 180288943Sdim computeNonVirtualBaseClassOffset(ClassDecl, PathBegin, PathEnd); 181221345Sdim if (Offset.isZero()) 182276479Sdim return nullptr; 183276479Sdim 184288943Sdim llvm::Type *PtrDiffTy = 185207619Srdivacky Types.ConvertType(getContext().getPointerDiffType()); 186288943Sdim 187221345Sdim return llvm::ConstantInt::get(PtrDiffTy, Offset.getQuantity()); 188199990Srdivacky} 189199990Srdivacky 190207619Srdivacky/// Gets the address of a direct base class within a complete object. 191203955Srdivacky/// This should only be used for (1) non-virtual bases or (2) virtual bases 192203955Srdivacky/// when the type is known to be complete (e.g. in complete destructors). 193203955Srdivacky/// 194203955Srdivacky/// The object pointed to by 'This' is assumed to be non-null. 195296417SdimAddress 196296417SdimCodeGenFunction::GetAddressOfDirectBaseInCompleteClass(Address This, 197207619Srdivacky const CXXRecordDecl *Derived, 198207619Srdivacky const CXXRecordDecl *Base, 199207619Srdivacky bool BaseIsVirtual) { 200203955Srdivacky // 'this' must be a pointer (in some address space) to Derived. 201296417Sdim assert(This.getElementType() == ConvertType(Derived)); 202203955Srdivacky 203203955Srdivacky // Compute the offset of the virtual base. 204221345Sdim CharUnits Offset; 205203955Srdivacky const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Derived); 206207619Srdivacky if (BaseIsVirtual) 207221345Sdim Offset = Layout.getVBaseClassOffset(Base); 208203955Srdivacky else 209221345Sdim Offset = Layout.getBaseClassOffset(Base); 210203955Srdivacky 211203955Srdivacky // Shift and cast down to the base type. 212203955Srdivacky // TODO: for complete types, this should be possible with a GEP. 213296417Sdim Address V = This; 214296417Sdim if (!Offset.isZero()) { 215296417Sdim V = Builder.CreateElementBitCast(V, Int8Ty); 216296417Sdim V = Builder.CreateConstInBoundsByteGEP(V, Offset); 217203955Srdivacky } 218296417Sdim V = Builder.CreateElementBitCast(V, ConvertType(Base)); 219203955Srdivacky 220203955Srdivacky return V; 221206084Srdivacky} 222203955Srdivacky 223296417Sdimstatic Address 224296417SdimApplyNonVirtualAndVirtualOffset(CodeGenFunction &CGF, Address addr, 225239462Sdim CharUnits nonVirtualOffset, 226296417Sdim llvm::Value *virtualOffset, 227296417Sdim const CXXRecordDecl *derivedClass, 228296417Sdim const CXXRecordDecl *nearestVBase) { 229239462Sdim // Assert that we have something to do. 230276479Sdim assert(!nonVirtualOffset.isZero() || virtualOffset != nullptr); 231239462Sdim 232239462Sdim // Compute the offset from the static and dynamic components. 233239462Sdim llvm::Value *baseOffset; 234239462Sdim if (!nonVirtualOffset.isZero()) { 235239462Sdim baseOffset = llvm::ConstantInt::get(CGF.PtrDiffTy, 236239462Sdim nonVirtualOffset.getQuantity()); 237239462Sdim if (virtualOffset) { 238239462Sdim baseOffset = CGF.Builder.CreateAdd(virtualOffset, baseOffset); 239239462Sdim } 240239462Sdim } else { 241239462Sdim baseOffset = virtualOffset; 242239462Sdim } 243288943Sdim 244207619Srdivacky // Apply the base offset. 245296417Sdim llvm::Value *ptr = addr.getPointer(); 246239462Sdim ptr = CGF.Builder.CreateBitCast(ptr, CGF.Int8PtrTy); 247239462Sdim ptr = CGF.Builder.CreateInBoundsGEP(ptr, baseOffset, "add.ptr"); 248296417Sdim 249296417Sdim // If we have a virtual component, the alignment of the result will 250296417Sdim // be relative only to the known alignment of that vbase. 251296417Sdim CharUnits alignment; 252296417Sdim if (virtualOffset) { 253296417Sdim assert(nearestVBase && "virtual offset without vbase?"); 254296417Sdim alignment = CGF.CGM.getVBaseAlignment(addr.getAlignment(), 255296417Sdim derivedClass, nearestVBase); 256296417Sdim } else { 257296417Sdim alignment = addr.getAlignment(); 258296417Sdim } 259296417Sdim alignment = alignment.alignmentAtOffset(nonVirtualOffset); 260296417Sdim 261296417Sdim return Address(ptr, alignment); 262207619Srdivacky} 263207619Srdivacky 264296417SdimAddress CodeGenFunction::GetAddressOfBaseClass( 265296417Sdim Address Value, const CXXRecordDecl *Derived, 266280031Sdim CastExpr::path_const_iterator PathBegin, 267280031Sdim CastExpr::path_const_iterator PathEnd, bool NullCheckValue, 268280031Sdim SourceLocation Loc) { 269212904Sdim assert(PathBegin != PathEnd && "Base path should not be empty!"); 270199990Srdivacky 271212904Sdim CastExpr::path_const_iterator Start = PathBegin; 272276479Sdim const CXXRecordDecl *VBase = nullptr; 273276479Sdim 274239462Sdim // Sema has done some convenient canonicalization here: if the 275239462Sdim // access path involved any virtual steps, the conversion path will 276239462Sdim // *start* with a step down to the correct virtual base subobject, 277239462Sdim // and hence will not require any further steps. 278207619Srdivacky if ((*Start)->isVirtual()) { 279288943Sdim VBase = 280207619Srdivacky cast<CXXRecordDecl>((*Start)->getType()->getAs<RecordType>()->getDecl()); 281207619Srdivacky ++Start; 282203955Srdivacky } 283239462Sdim 284239462Sdim // Compute the static offset of the ultimate destination within its 285239462Sdim // allocating subobject (the virtual base, if there is one, or else 286239462Sdim // the "complete" object that we see). 287288943Sdim CharUnits NonVirtualOffset = CGM.computeNonVirtualBaseClassOffset( 288288943Sdim VBase ? VBase : Derived, Start, PathEnd); 289203955Srdivacky 290239462Sdim // If there's a virtual step, we can sometimes "devirtualize" it. 291239462Sdim // For now, that's limited to when the derived type is final. 292239462Sdim // TODO: "devirtualize" this for accesses to known-complete objects. 293239462Sdim if (VBase && Derived->hasAttr<FinalAttr>()) { 294239462Sdim const ASTRecordLayout &layout = getContext().getASTRecordLayout(Derived); 295239462Sdim CharUnits vBaseOffset = layout.getVBaseClassOffset(VBase); 296239462Sdim NonVirtualOffset += vBaseOffset; 297276479Sdim VBase = nullptr; // we no longer have a virtual step 298239462Sdim } 299239462Sdim 300207619Srdivacky // Get the base pointer type. 301288943Sdim llvm::Type *BasePtrTy = 302212904Sdim ConvertType((PathEnd[-1])->getType())->getPointerTo(); 303239462Sdim 304280031Sdim QualType DerivedTy = getContext().getRecordType(Derived); 305296417Sdim CharUnits DerivedAlign = CGM.getClassPointerAlignment(Derived); 306280031Sdim 307239462Sdim // If the static offset is zero and we don't have a virtual step, 308239462Sdim // just do a bitcast; null checks are unnecessary. 309221345Sdim if (NonVirtualOffset.isZero() && !VBase) { 310280031Sdim if (sanitizePerformTypeCheck()) { 311296417Sdim EmitTypeCheck(TCK_Upcast, Loc, Value.getPointer(), 312296417Sdim DerivedTy, DerivedAlign, !NullCheckValue); 313280031Sdim } 314203955Srdivacky return Builder.CreateBitCast(Value, BasePtrTy); 315276479Sdim } 316239462Sdim 317276479Sdim llvm::BasicBlock *origBB = nullptr; 318276479Sdim llvm::BasicBlock *endBB = nullptr; 319276479Sdim 320239462Sdim // Skip over the offset (and the vtable load) if we're supposed to 321239462Sdim // null-check the pointer. 322199990Srdivacky if (NullCheckValue) { 323239462Sdim origBB = Builder.GetInsertBlock(); 324239462Sdim llvm::BasicBlock *notNullBB = createBasicBlock("cast.notnull"); 325239462Sdim endBB = createBasicBlock("cast.end"); 326288943Sdim 327296417Sdim llvm::Value *isNull = Builder.CreateIsNull(Value.getPointer()); 328239462Sdim Builder.CreateCondBr(isNull, endBB, notNullBB); 329239462Sdim EmitBlock(notNullBB); 330199990Srdivacky } 331207619Srdivacky 332280031Sdim if (sanitizePerformTypeCheck()) { 333296417Sdim EmitTypeCheck(VBase ? TCK_UpcastToVirtualBase : TCK_Upcast, Loc, 334296417Sdim Value.getPointer(), DerivedTy, DerivedAlign, true); 335280031Sdim } 336280031Sdim 337239462Sdim // Compute the virtual offset. 338276479Sdim llvm::Value *VirtualOffset = nullptr; 339218893Sdim if (VBase) { 340261991Sdim VirtualOffset = 341261991Sdim CGM.getCXXABI().GetVirtualBaseClassOffset(*this, Value, Derived, VBase); 342218893Sdim } 343218893Sdim 344239462Sdim // Apply both offsets. 345296417Sdim Value = ApplyNonVirtualAndVirtualOffset(*this, Value, NonVirtualOffset, 346296417Sdim VirtualOffset, Derived, VBase); 347288943Sdim 348239462Sdim // Cast to the destination type. 349199990Srdivacky Value = Builder.CreateBitCast(Value, BasePtrTy); 350239462Sdim 351239462Sdim // Build a phi if we needed a null check. 352199990Srdivacky if (NullCheckValue) { 353239462Sdim llvm::BasicBlock *notNullBB = Builder.GetInsertBlock(); 354239462Sdim Builder.CreateBr(endBB); 355239462Sdim EmitBlock(endBB); 356288943Sdim 357239462Sdim llvm::PHINode *PHI = Builder.CreatePHI(BasePtrTy, 2, "cast.result"); 358296417Sdim PHI->addIncoming(Value.getPointer(), notNullBB); 359239462Sdim PHI->addIncoming(llvm::Constant::getNullValue(BasePtrTy), origBB); 360296417Sdim Value = Address(PHI, Value.getAlignment()); 361199990Srdivacky } 362288943Sdim 363199990Srdivacky return Value; 364199990Srdivacky} 365199990Srdivacky 366296417SdimAddress 367296417SdimCodeGenFunction::GetAddressOfDerivedClass(Address BaseAddr, 368207619Srdivacky const CXXRecordDecl *Derived, 369212904Sdim CastExpr::path_const_iterator PathBegin, 370212904Sdim CastExpr::path_const_iterator PathEnd, 371199990Srdivacky bool NullCheckValue) { 372212904Sdim assert(PathBegin != PathEnd && "Base path should not be empty!"); 373207619Srdivacky 374199990Srdivacky QualType DerivedTy = 375207619Srdivacky getContext().getCanonicalType(getContext().getTagDeclType(Derived)); 376226633Sdim llvm::Type *DerivedPtrTy = ConvertType(DerivedTy)->getPointerTo(); 377249423Sdim 378203955Srdivacky llvm::Value *NonVirtualOffset = 379212904Sdim CGM.GetNonVirtualBaseClassOffset(Derived, PathBegin, PathEnd); 380288943Sdim 381203955Srdivacky if (!NonVirtualOffset) { 382203955Srdivacky // No offset, we can just cast back. 383296417Sdim return Builder.CreateBitCast(BaseAddr, DerivedPtrTy); 384203955Srdivacky } 385276479Sdim 386276479Sdim llvm::BasicBlock *CastNull = nullptr; 387276479Sdim llvm::BasicBlock *CastNotNull = nullptr; 388276479Sdim llvm::BasicBlock *CastEnd = nullptr; 389276479Sdim 390199990Srdivacky if (NullCheckValue) { 391199990Srdivacky CastNull = createBasicBlock("cast.null"); 392199990Srdivacky CastNotNull = createBasicBlock("cast.notnull"); 393199990Srdivacky CastEnd = createBasicBlock("cast.end"); 394288943Sdim 395296417Sdim llvm::Value *IsNull = Builder.CreateIsNull(BaseAddr.getPointer()); 396199990Srdivacky Builder.CreateCondBr(IsNull, CastNull, CastNotNull); 397199990Srdivacky EmitBlock(CastNotNull); 398199990Srdivacky } 399288943Sdim 400203955Srdivacky // Apply the offset. 401296417Sdim llvm::Value *Value = Builder.CreateBitCast(BaseAddr.getPointer(), Int8PtrTy); 402234353Sdim Value = Builder.CreateGEP(Value, Builder.CreateNeg(NonVirtualOffset), 403234353Sdim "sub.ptr"); 404199990Srdivacky 405203955Srdivacky // Just cast. 406203955Srdivacky Value = Builder.CreateBitCast(Value, DerivedPtrTy); 407203955Srdivacky 408296417Sdim // Produce a PHI if we had a null-check. 409199990Srdivacky if (NullCheckValue) { 410199990Srdivacky Builder.CreateBr(CastEnd); 411199990Srdivacky EmitBlock(CastNull); 412199990Srdivacky Builder.CreateBr(CastEnd); 413199990Srdivacky EmitBlock(CastEnd); 414288943Sdim 415221345Sdim llvm::PHINode *PHI = Builder.CreatePHI(Value->getType(), 2); 416199990Srdivacky PHI->addIncoming(Value, CastNotNull); 417296417Sdim PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), CastNull); 418199990Srdivacky Value = PHI; 419199990Srdivacky } 420288943Sdim 421296417Sdim return Address(Value, CGM.getClassPointerAlignment(Derived)); 422199990Srdivacky} 423249423Sdim 424249423Sdimllvm::Value *CodeGenFunction::GetVTTParameter(GlobalDecl GD, 425249423Sdim bool ForVirtualBase, 426249423Sdim bool Delegating) { 427261991Sdim if (!CGM.getCXXABI().NeedsVTTParameter(GD)) { 428202379Srdivacky // This constructor/destructor does not need a VTT parameter. 429276479Sdim return nullptr; 430202379Srdivacky } 431288943Sdim 432251662Sdim const CXXRecordDecl *RD = cast<CXXMethodDecl>(CurCodeDecl)->getParent(); 433202379Srdivacky const CXXRecordDecl *Base = cast<CXXMethodDecl>(GD.getDecl())->getParent(); 434204643Srdivacky 435202379Srdivacky llvm::Value *VTT; 436202379Srdivacky 437204643Srdivacky uint64_t SubVTTIndex; 438204643Srdivacky 439249423Sdim if (Delegating) { 440249423Sdim // If this is a delegating constructor call, just load the VTT. 441249423Sdim return LoadCXXVTT(); 442249423Sdim } else if (RD == Base) { 443249423Sdim // If the record matches the base, this is the complete ctor/dtor 444249423Sdim // variant calling the base variant in a class with virtual bases. 445261991Sdim assert(!CGM.getCXXABI().NeedsVTTParameter(CurGD) && 446204643Srdivacky "doing no-op VTT offset in base dtor/ctor?"); 447207619Srdivacky assert(!ForVirtualBase && "Can't have same class as virtual base!"); 448204643Srdivacky SubVTTIndex = 0; 449204643Srdivacky } else { 450249423Sdim const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD); 451288943Sdim CharUnits BaseOffset = ForVirtualBase ? 452288943Sdim Layout.getVBaseClassOffset(Base) : 453221345Sdim Layout.getBaseClassOffset(Base); 454207619Srdivacky 455288943Sdim SubVTTIndex = 456249423Sdim CGM.getVTables().getSubVTTIndex(RD, BaseSubobject(Base, BaseOffset)); 457204643Srdivacky assert(SubVTTIndex != 0 && "Sub-VTT index must be greater than zero!"); 458204643Srdivacky } 459288943Sdim 460261991Sdim if (CGM.getCXXABI().NeedsVTTParameter(CurGD)) { 461202379Srdivacky // A VTT parameter was passed to the constructor, use it. 462249423Sdim VTT = LoadCXXVTT(); 463249423Sdim VTT = Builder.CreateConstInBoundsGEP1_64(VTT, SubVTTIndex); 464202379Srdivacky } else { 465202379Srdivacky // We're the complete constructor, so get the VTT by name. 466249423Sdim VTT = CGM.getVTables().GetAddrOfVTT(RD); 467249423Sdim VTT = Builder.CreateConstInBoundsGEP2_64(VTT, 0, SubVTTIndex); 468202379Srdivacky } 469202379Srdivacky 470202379Srdivacky return VTT; 471202379Srdivacky} 472202379Srdivacky 473212904Sdimnamespace { 474212904Sdim /// Call the destructor for a direct base class. 475296417Sdim struct CallBaseDtor final : EHScopeStack::Cleanup { 476212904Sdim const CXXRecordDecl *BaseClass; 477212904Sdim bool BaseIsVirtual; 478212904Sdim CallBaseDtor(const CXXRecordDecl *Base, bool BaseIsVirtual) 479212904Sdim : BaseClass(Base), BaseIsVirtual(BaseIsVirtual) {} 480212904Sdim 481276479Sdim void Emit(CodeGenFunction &CGF, Flags flags) override { 482212904Sdim const CXXRecordDecl *DerivedClass = 483212904Sdim cast<CXXMethodDecl>(CGF.CurCodeDecl)->getParent(); 484212904Sdim 485212904Sdim const CXXDestructorDecl *D = BaseClass->getDestructor(); 486296417Sdim Address Addr = 487296417Sdim CGF.GetAddressOfDirectBaseInCompleteClass(CGF.LoadCXXThisAddress(), 488212904Sdim DerivedClass, BaseClass, 489212904Sdim BaseIsVirtual); 490249423Sdim CGF.EmitCXXDestructorCall(D, Dtor_Base, BaseIsVirtual, 491249423Sdim /*Delegating=*/false, Addr); 492212904Sdim } 493212904Sdim }; 494218893Sdim 495218893Sdim /// A visitor which checks whether an initializer uses 'this' in a 496218893Sdim /// way which requires the vtable to be properly set. 497288943Sdim struct DynamicThisUseChecker : ConstEvaluatedExprVisitor<DynamicThisUseChecker> { 498288943Sdim typedef ConstEvaluatedExprVisitor<DynamicThisUseChecker> super; 499218893Sdim 500218893Sdim bool UsesThis; 501218893Sdim 502288943Sdim DynamicThisUseChecker(const ASTContext &C) : super(C), UsesThis(false) {} 503218893Sdim 504218893Sdim // Black-list all explicit and implicit references to 'this'. 505218893Sdim // 506218893Sdim // Do we need to worry about external references to 'this' derived 507218893Sdim // from arbitrary code? If so, then anything which runs arbitrary 508218893Sdim // external code might potentially access the vtable. 509288943Sdim void VisitCXXThisExpr(const CXXThisExpr *E) { UsesThis = true; } 510218893Sdim }; 511296417Sdim} // end anonymous namespace 512212904Sdim 513218893Sdimstatic bool BaseInitializerUsesThis(ASTContext &C, const Expr *Init) { 514218893Sdim DynamicThisUseChecker Checker(C); 515288943Sdim Checker.Visit(Init); 516218893Sdim return Checker.UsesThis; 517218893Sdim} 518218893Sdim 519288943Sdimstatic void EmitBaseInitializer(CodeGenFunction &CGF, 520201361Srdivacky const CXXRecordDecl *ClassDecl, 521218893Sdim CXXCtorInitializer *BaseInit, 522201361Srdivacky CXXCtorType CtorType) { 523201361Srdivacky assert(BaseInit->isBaseInitializer() && 524201361Srdivacky "Must have base initializer!"); 525201361Srdivacky 526296417Sdim Address ThisPtr = CGF.LoadCXXThisAddress(); 527288943Sdim 528201361Srdivacky const Type *BaseType = BaseInit->getBaseClass(); 529201361Srdivacky CXXRecordDecl *BaseClassDecl = 530201361Srdivacky cast<CXXRecordDecl>(BaseType->getAs<RecordType>()->getDecl()); 531201361Srdivacky 532207619Srdivacky bool isBaseVirtual = BaseInit->isBaseVirtual(); 533201361Srdivacky 534201361Srdivacky // The base constructor doesn't construct virtual bases. 535201361Srdivacky if (CtorType == Ctor_Base && isBaseVirtual) 536201361Srdivacky return; 537201361Srdivacky 538218893Sdim // If the initializer for the base (other than the constructor 539218893Sdim // itself) accesses 'this' in any way, we need to initialize the 540218893Sdim // vtables. 541218893Sdim if (BaseInitializerUsesThis(CGF.getContext(), BaseInit->getInit())) 542218893Sdim CGF.InitializeVTablePointers(ClassDecl); 543218893Sdim 544203955Srdivacky // We can pretend to be a complete class because it only matters for 545203955Srdivacky // virtual bases, and we only do virtual bases for complete ctors. 546296417Sdim Address V = 547207619Srdivacky CGF.GetAddressOfDirectBaseInCompleteClass(ThisPtr, ClassDecl, 548212904Sdim BaseClassDecl, 549212904Sdim isBaseVirtual); 550226633Sdim AggValueSlot AggSlot = 551296417Sdim AggValueSlot::forAddr(V, Qualifiers(), 552226633Sdim AggValueSlot::IsDestructed, 553226633Sdim AggValueSlot::DoesNotNeedGCBarriers, 554226633Sdim AggValueSlot::IsNotAliased); 555218893Sdim 556218893Sdim CGF.EmitAggExpr(BaseInit->getInit(), AggSlot); 557288943Sdim 558288943Sdim if (CGF.CGM.getLangOpts().Exceptions && 559218893Sdim !BaseClassDecl->hasTrivialDestructor()) 560212904Sdim CGF.EHStack.pushCleanup<CallBaseDtor>(EHCleanup, BaseClassDecl, 561212904Sdim isBaseVirtual); 562201361Srdivacky} 563201361Srdivacky 564208600Srdivackystatic void EmitAggMemberInitializer(CodeGenFunction &CGF, 565208600Srdivacky LValue LHS, 566234353Sdim Expr *Init, 567296417Sdim Address ArrayIndexVar, 568208600Srdivacky QualType T, 569234353Sdim ArrayRef<VarDecl *> ArrayIndexes, 570208600Srdivacky unsigned Index) { 571234353Sdim if (Index == ArrayIndexes.size()) { 572234353Sdim LValue LV = LHS; 573234353Sdim 574296417Sdim if (ArrayIndexVar.isValid()) { 575261991Sdim // If we have an array index variable, load it and use it as an offset. 576261991Sdim // Then, increment the value. 577296417Sdim llvm::Value *Dest = LHS.getPointer(); 578261991Sdim llvm::Value *ArrayIndex = CGF.Builder.CreateLoad(ArrayIndexVar); 579261991Sdim Dest = CGF.Builder.CreateInBoundsGEP(Dest, ArrayIndex, "destaddress"); 580261991Sdim llvm::Value *Next = llvm::ConstantInt::get(ArrayIndex->getType(), 1); 581261991Sdim Next = CGF.Builder.CreateAdd(ArrayIndex, Next, "inc"); 582261991Sdim CGF.Builder.CreateStore(Next, ArrayIndexVar); 583234353Sdim 584261991Sdim // Update the LValue. 585296417Sdim CharUnits EltSize = CGF.getContext().getTypeSizeInChars(T); 586296417Sdim CharUnits Align = LV.getAlignment().alignmentOfArrayElement(EltSize); 587296417Sdim LV.setAddress(Address(Dest, Align)); 588261991Sdim } 589234353Sdim 590261991Sdim switch (CGF.getEvaluationKind(T)) { 591261991Sdim case TEK_Scalar: 592276479Sdim CGF.EmitScalarInit(Init, /*decl*/ nullptr, LV, false); 593261991Sdim break; 594261991Sdim case TEK_Complex: 595261991Sdim CGF.EmitComplexExprIntoLValue(Init, LV, /*isInit*/ true); 596261991Sdim break; 597261991Sdim case TEK_Aggregate: { 598261991Sdim AggValueSlot Slot = 599261991Sdim AggValueSlot::forLValue(LV, 600261991Sdim AggValueSlot::IsDestructed, 601261991Sdim AggValueSlot::DoesNotNeedGCBarriers, 602261991Sdim AggValueSlot::IsNotAliased); 603234353Sdim 604261991Sdim CGF.EmitAggExpr(Init, Slot); 605261991Sdim break; 606208600Srdivacky } 607261991Sdim } 608218893Sdim 609208600Srdivacky return; 610208600Srdivacky } 611261991Sdim 612208600Srdivacky const ConstantArrayType *Array = CGF.getContext().getAsConstantArrayType(T); 613208600Srdivacky assert(Array && "Array initialization without the array type?"); 614296417Sdim Address IndexVar = CGF.GetAddrOfLocalVar(ArrayIndexes[Index]); 615288943Sdim 616208600Srdivacky // Initialize this index variable to zero. 617208600Srdivacky llvm::Value* Zero 618296417Sdim = llvm::Constant::getNullValue(IndexVar.getElementType()); 619208600Srdivacky CGF.Builder.CreateStore(Zero, IndexVar); 620288943Sdim 621208600Srdivacky // Start the loop with a block that tests the condition. 622208600Srdivacky llvm::BasicBlock *CondBlock = CGF.createBasicBlock("for.cond"); 623208600Srdivacky llvm::BasicBlock *AfterFor = CGF.createBasicBlock("for.end"); 624288943Sdim 625208600Srdivacky CGF.EmitBlock(CondBlock); 626208600Srdivacky 627208600Srdivacky llvm::BasicBlock *ForBody = CGF.createBasicBlock("for.body"); 628208600Srdivacky // Generate: if (loop-index < number-of-elements) fall to the loop body, 629208600Srdivacky // otherwise, go to the block after the for-loop. 630208600Srdivacky uint64_t NumElements = Array->getSize().getZExtValue(); 631208600Srdivacky llvm::Value *Counter = CGF.Builder.CreateLoad(IndexVar); 632208600Srdivacky llvm::Value *NumElementsPtr = 633208600Srdivacky llvm::ConstantInt::get(Counter->getType(), NumElements); 634208600Srdivacky llvm::Value *IsLess = CGF.Builder.CreateICmpULT(Counter, NumElementsPtr, 635208600Srdivacky "isless"); 636288943Sdim 637208600Srdivacky // If the condition is true, execute the body. 638208600Srdivacky CGF.Builder.CreateCondBr(IsLess, ForBody, AfterFor); 639208600Srdivacky 640208600Srdivacky CGF.EmitBlock(ForBody); 641208600Srdivacky llvm::BasicBlock *ContinueBlock = CGF.createBasicBlock("for.inc"); 642261991Sdim 643261991Sdim // Inside the loop body recurse to emit the inner loop or, eventually, the 644261991Sdim // constructor call. 645261991Sdim EmitAggMemberInitializer(CGF, LHS, Init, ArrayIndexVar, 646261991Sdim Array->getElementType(), ArrayIndexes, Index + 1); 647261991Sdim 648208600Srdivacky CGF.EmitBlock(ContinueBlock); 649208600Srdivacky 650208600Srdivacky // Emit the increment of the loop counter. 651208600Srdivacky llvm::Value *NextVal = llvm::ConstantInt::get(Counter->getType(), 1); 652208600Srdivacky Counter = CGF.Builder.CreateLoad(IndexVar); 653208600Srdivacky NextVal = CGF.Builder.CreateAdd(Counter, NextVal, "inc"); 654208600Srdivacky CGF.Builder.CreateStore(NextVal, IndexVar); 655208600Srdivacky 656208600Srdivacky // Finally, branch back up to the condition for the next iteration. 657208600Srdivacky CGF.EmitBranch(CondBlock); 658208600Srdivacky 659208600Srdivacky // Emit the fall-through block. 660208600Srdivacky CGF.EmitBlock(AfterFor, true); 661208600Srdivacky} 662212904Sdim 663288943Sdimstatic bool isMemcpyEquivalentSpecialMember(const CXXMethodDecl *D) { 664288943Sdim auto *CD = dyn_cast<CXXConstructorDecl>(D); 665288943Sdim if (!(CD && CD->isCopyOrMoveConstructor()) && 666288943Sdim !D->isCopyAssignmentOperator() && !D->isMoveAssignmentOperator()) 667288943Sdim return false; 668288943Sdim 669288943Sdim // We can emit a memcpy for a trivial copy or move constructor/assignment. 670288943Sdim if (D->isTrivial() && !D->getParent()->mayInsertExtraPadding()) 671288943Sdim return true; 672288943Sdim 673288943Sdim // We *must* emit a memcpy for a defaulted union copy or move op. 674288943Sdim if (D->getParent()->isUnion() && D->isDefaulted()) 675288943Sdim return true; 676288943Sdim 677288943Sdim return false; 678288943Sdim} 679288943Sdim 680288943Sdimstatic void EmitLValueForAnyFieldInitialization(CodeGenFunction &CGF, 681288943Sdim CXXCtorInitializer *MemberInit, 682288943Sdim LValue &LHS) { 683288943Sdim FieldDecl *Field = MemberInit->getAnyMember(); 684288943Sdim if (MemberInit->isIndirectMemberInitializer()) { 685288943Sdim // If we are initializing an anonymous union field, drill down to the field. 686288943Sdim IndirectFieldDecl *IndirectField = MemberInit->getIndirectMember(); 687288943Sdim for (const auto *I : IndirectField->chain()) 688288943Sdim LHS = CGF.EmitLValueForFieldInitialization(LHS, cast<FieldDecl>(I)); 689288943Sdim } else { 690288943Sdim LHS = CGF.EmitLValueForFieldInitialization(LHS, Field); 691288943Sdim } 692288943Sdim} 693288943Sdim 694201361Srdivackystatic void EmitMemberInitializer(CodeGenFunction &CGF, 695201361Srdivacky const CXXRecordDecl *ClassDecl, 696218893Sdim CXXCtorInitializer *MemberInit, 697208600Srdivacky const CXXConstructorDecl *Constructor, 698208600Srdivacky FunctionArgList &Args) { 699280031Sdim ApplyDebugLocation Loc(CGF, MemberInit->getSourceLocation()); 700218893Sdim assert(MemberInit->isAnyMemberInitializer() && 701201361Srdivacky "Must have member initializer!"); 702223017Sdim assert(MemberInit->getInit() && "Must have initializer!"); 703288943Sdim 704201361Srdivacky // non-static data member initializers. 705218893Sdim FieldDecl *Field = MemberInit->getAnyMember(); 706234353Sdim QualType FieldType = Field->getType(); 707201361Srdivacky 708201361Srdivacky llvm::Value *ThisPtr = CGF.LoadCXXThis(); 709234982Sdim QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl); 710239462Sdim LValue LHS = CGF.MakeNaturalAlignAddrLValue(ThisPtr, RecordTy); 711234982Sdim 712288943Sdim EmitLValueForAnyFieldInitialization(CGF, MemberInit, LHS); 713201361Srdivacky 714234353Sdim // Special case: if we are in a copy or move constructor, and we are copying 715234353Sdim // an array of PODs or classes with trivial copy constructors, ignore the 716234353Sdim // AST and perform the copy we know is equivalent. 717234353Sdim // FIXME: This is hacky at best... if we had a bit more explicit information 718234353Sdim // in the AST, we could generalize it more easily. 719234353Sdim const ConstantArrayType *Array 720234353Sdim = CGF.getContext().getAsConstantArrayType(FieldType); 721261991Sdim if (Array && Constructor->isDefaulted() && 722234353Sdim Constructor->isCopyOrMoveConstructor()) { 723234353Sdim QualType BaseElementTy = CGF.getContext().getBaseElementType(Array); 724243830Sdim CXXConstructExpr *CE = dyn_cast<CXXConstructExpr>(MemberInit->getInit()); 725234353Sdim if (BaseElementTy.isPODType(CGF.getContext()) || 726288943Sdim (CE && isMemcpyEquivalentSpecialMember(CE->getConstructor()))) { 727280031Sdim unsigned SrcArgIndex = 728280031Sdim CGF.CGM.getCXXABI().getSrcArgforCopyCtor(Constructor, Args); 729234353Sdim llvm::Value *SrcPtr 730234353Sdim = CGF.Builder.CreateLoad(CGF.GetAddrOfLocalVar(Args[SrcArgIndex])); 731234982Sdim LValue ThisRHSLV = CGF.MakeNaturalAlignAddrLValue(SrcPtr, RecordTy); 732234982Sdim LValue Src = CGF.EmitLValueForFieldInitialization(ThisRHSLV, Field); 733288943Sdim 734234353Sdim // Copy the aggregate. 735234353Sdim CGF.EmitAggregateCopy(LHS.getAddress(), Src.getAddress(), FieldType, 736234353Sdim LHS.isVolatileQualified()); 737288943Sdim // Ensure that we destroy the objects if an exception is thrown later in 738288943Sdim // the constructor. 739288943Sdim QualType::DestructionKind dtorKind = FieldType.isDestructedType(); 740288943Sdim if (CGF.needsEHCleanup(dtorKind)) 741288943Sdim CGF.pushEHDestroy(dtorKind, LHS.getAddress(), FieldType); 742234353Sdim return; 743234353Sdim } 744234353Sdim } 745234353Sdim 746234353Sdim ArrayRef<VarDecl *> ArrayIndexes; 747234353Sdim if (MemberInit->getNumArrayIndices()) 748234353Sdim ArrayIndexes = MemberInit->getArrayIndexes(); 749234353Sdim CGF.EmitInitializerForField(Field, LHS, MemberInit->getInit(), ArrayIndexes); 750234353Sdim} 751234353Sdim 752296417Sdimvoid CodeGenFunction::EmitInitializerForField(FieldDecl *Field, LValue LHS, 753296417Sdim Expr *Init, ArrayRef<VarDecl *> ArrayIndexes) { 754234353Sdim QualType FieldType = Field->getType(); 755249423Sdim switch (getEvaluationKind(FieldType)) { 756249423Sdim case TEK_Scalar: 757224145Sdim if (LHS.isSimple()) { 758234353Sdim EmitExprAsInit(Init, Field, LHS, false); 759224145Sdim } else { 760234353Sdim RValue RHS = RValue::get(EmitScalarExpr(Init)); 761234353Sdim EmitStoreThroughLValue(RHS, LHS); 762224145Sdim } 763249423Sdim break; 764249423Sdim case TEK_Complex: 765249423Sdim EmitComplexExprIntoLValue(Init, LHS, /*isInit*/ true); 766249423Sdim break; 767249423Sdim case TEK_Aggregate: { 768296417Sdim Address ArrayIndexVar = Address::invalid(); 769234353Sdim if (ArrayIndexes.size()) { 770208600Srdivacky // The LHS is a pointer to the first object we'll be constructing, as 771208600Srdivacky // a flat array. 772234353Sdim QualType BaseElementTy = getContext().getBaseElementType(FieldType); 773234353Sdim llvm::Type *BasePtr = ConvertType(BaseElementTy); 774208600Srdivacky BasePtr = llvm::PointerType::getUnqual(BasePtr); 775296417Sdim Address BaseAddrPtr = Builder.CreateBitCast(LHS.getAddress(), BasePtr); 776234353Sdim LHS = MakeAddrLValue(BaseAddrPtr, BaseElementTy); 777288943Sdim 778208600Srdivacky // Create an array index that will be used to walk over all of the 779208600Srdivacky // objects we're constructing. 780296417Sdim ArrayIndexVar = CreateMemTemp(getContext().getSizeType(), "object.index"); 781296417Sdim llvm::Value *Zero = 782296417Sdim llvm::Constant::getNullValue(ArrayIndexVar.getElementType()); 783234353Sdim Builder.CreateStore(Zero, ArrayIndexVar); 784288943Sdim 785208600Srdivacky // Emit the block variables for the array indices, if any. 786234353Sdim for (unsigned I = 0, N = ArrayIndexes.size(); I != N; ++I) 787234353Sdim EmitAutoVarDecl(*ArrayIndexes[I]); 788208600Srdivacky } 789288943Sdim 790234353Sdim EmitAggMemberInitializer(*this, LHS, Init, ArrayIndexVar, FieldType, 791234353Sdim ArrayIndexes, 0); 792249423Sdim } 793249423Sdim } 794203955Srdivacky 795249423Sdim // Ensure that we destroy this object if an exception is thrown 796249423Sdim // later in the constructor. 797249423Sdim QualType::DestructionKind dtorKind = FieldType.isDestructedType(); 798249423Sdim if (needsEHCleanup(dtorKind)) 799249423Sdim pushEHDestroy(dtorKind, LHS.getAddress(), FieldType); 800201361Srdivacky} 801201361Srdivacky 802204643Srdivacky/// Checks whether the given constructor is a valid subject for the 803204643Srdivacky/// complete-to-base constructor delegation optimization, i.e. 804204643Srdivacky/// emitting the complete constructor as a simple call to the base 805204643Srdivacky/// constructor. 806204643Srdivackystatic bool IsConstructorDelegationValid(const CXXConstructorDecl *Ctor) { 807204643Srdivacky 808204643Srdivacky // Currently we disable the optimization for classes with virtual 809204643Srdivacky // bases because (1) the addresses of parameter variables need to be 810204643Srdivacky // consistent across all initializers but (2) the delegate function 811204643Srdivacky // call necessarily creates a second copy of the parameter variable. 812204643Srdivacky // 813204643Srdivacky // The limiting example (purely theoretical AFAIK): 814204643Srdivacky // struct A { A(int &c) { c++; } }; 815204643Srdivacky // struct B : virtual A { 816204643Srdivacky // B(int count) : A(count) { printf("%d\n", count); } 817204643Srdivacky // }; 818204643Srdivacky // ...although even this example could in principle be emitted as a 819204643Srdivacky // delegation since the address of the parameter doesn't escape. 820204643Srdivacky if (Ctor->getParent()->getNumVBases()) { 821204643Srdivacky // TODO: white-list trivial vbase initializers. This case wouldn't 822204643Srdivacky // be subject to the restrictions below. 823204643Srdivacky 824204643Srdivacky // TODO: white-list cases where: 825204643Srdivacky // - there are no non-reference parameters to the constructor 826204643Srdivacky // - the initializers don't access any non-reference parameters 827204643Srdivacky // - the initializers don't take the address of non-reference 828204643Srdivacky // parameters 829204643Srdivacky // - etc. 830204643Srdivacky // If we ever add any of the above cases, remember that: 831204643Srdivacky // - function-try-blocks will always blacklist this optimization 832204643Srdivacky // - we need to perform the constructor prologue and cleanup in 833204643Srdivacky // EmitConstructorBody. 834204643Srdivacky 835204643Srdivacky return false; 836204643Srdivacky } 837204643Srdivacky 838204643Srdivacky // We also disable the optimization for variadic functions because 839204643Srdivacky // it's impossible to "re-pass" varargs. 840204643Srdivacky if (Ctor->getType()->getAs<FunctionProtoType>()->isVariadic()) 841204643Srdivacky return false; 842204643Srdivacky 843221345Sdim // FIXME: Decide if we can do a delegation of a delegating constructor. 844221345Sdim if (Ctor->isDelegatingConstructor()) 845221345Sdim return false; 846221345Sdim 847204643Srdivacky return true; 848204643Srdivacky} 849204643Srdivacky 850280031Sdim// Emit code in ctor (Prologue==true) or dtor (Prologue==false) 851280031Sdim// to poison the extra field paddings inserted under 852280031Sdim// -fsanitize-address-field-padding=1|2. 853280031Sdimvoid CodeGenFunction::EmitAsanPrologueOrEpilogue(bool Prologue) { 854280031Sdim ASTContext &Context = getContext(); 855280031Sdim const CXXRecordDecl *ClassDecl = 856280031Sdim Prologue ? cast<CXXConstructorDecl>(CurGD.getDecl())->getParent() 857280031Sdim : cast<CXXDestructorDecl>(CurGD.getDecl())->getParent(); 858280031Sdim if (!ClassDecl->mayInsertExtraPadding()) return; 859280031Sdim 860280031Sdim struct SizeAndOffset { 861280031Sdim uint64_t Size; 862280031Sdim uint64_t Offset; 863280031Sdim }; 864280031Sdim 865280031Sdim unsigned PtrSize = CGM.getDataLayout().getPointerSizeInBits(); 866280031Sdim const ASTRecordLayout &Info = Context.getASTRecordLayout(ClassDecl); 867280031Sdim 868280031Sdim // Populate sizes and offsets of fields. 869280031Sdim SmallVector<SizeAndOffset, 16> SSV(Info.getFieldCount()); 870280031Sdim for (unsigned i = 0, e = Info.getFieldCount(); i != e; ++i) 871280031Sdim SSV[i].Offset = 872280031Sdim Context.toCharUnitsFromBits(Info.getFieldOffset(i)).getQuantity(); 873280031Sdim 874280031Sdim size_t NumFields = 0; 875280031Sdim for (const auto *Field : ClassDecl->fields()) { 876280031Sdim const FieldDecl *D = Field; 877280031Sdim std::pair<CharUnits, CharUnits> FieldInfo = 878280031Sdim Context.getTypeInfoInChars(D->getType()); 879280031Sdim CharUnits FieldSize = FieldInfo.first; 880280031Sdim assert(NumFields < SSV.size()); 881280031Sdim SSV[NumFields].Size = D->isBitField() ? 0 : FieldSize.getQuantity(); 882280031Sdim NumFields++; 883280031Sdim } 884280031Sdim assert(NumFields == SSV.size()); 885280031Sdim if (SSV.size() <= 1) return; 886280031Sdim 887280031Sdim // We will insert calls to __asan_* run-time functions. 888280031Sdim // LLVM AddressSanitizer pass may decide to inline them later. 889280031Sdim llvm::Type *Args[2] = {IntPtrTy, IntPtrTy}; 890280031Sdim llvm::FunctionType *FTy = 891280031Sdim llvm::FunctionType::get(CGM.VoidTy, Args, false); 892280031Sdim llvm::Constant *F = CGM.CreateRuntimeFunction( 893280031Sdim FTy, Prologue ? "__asan_poison_intra_object_redzone" 894280031Sdim : "__asan_unpoison_intra_object_redzone"); 895280031Sdim 896280031Sdim llvm::Value *ThisPtr = LoadCXXThis(); 897280031Sdim ThisPtr = Builder.CreatePtrToInt(ThisPtr, IntPtrTy); 898280031Sdim uint64_t TypeSize = Info.getNonVirtualSize().getQuantity(); 899280031Sdim // For each field check if it has sufficient padding, 900280031Sdim // if so (un)poison it with a call. 901280031Sdim for (size_t i = 0; i < SSV.size(); i++) { 902280031Sdim uint64_t AsanAlignment = 8; 903280031Sdim uint64_t NextField = i == SSV.size() - 1 ? TypeSize : SSV[i + 1].Offset; 904280031Sdim uint64_t PoisonSize = NextField - SSV[i].Offset - SSV[i].Size; 905280031Sdim uint64_t EndOffset = SSV[i].Offset + SSV[i].Size; 906280031Sdim if (PoisonSize < AsanAlignment || !SSV[i].Size || 907280031Sdim (NextField % AsanAlignment) != 0) 908280031Sdim continue; 909288943Sdim Builder.CreateCall( 910288943Sdim F, {Builder.CreateAdd(ThisPtr, Builder.getIntN(PtrSize, EndOffset)), 911288943Sdim Builder.getIntN(PtrSize, PoisonSize)}); 912280031Sdim } 913280031Sdim} 914280031Sdim 915204643Srdivacky/// EmitConstructorBody - Emits the body of the current constructor. 916204643Srdivackyvoid CodeGenFunction::EmitConstructorBody(FunctionArgList &Args) { 917280031Sdim EmitAsanPrologueOrEpilogue(true); 918204643Srdivacky const CXXConstructorDecl *Ctor = cast<CXXConstructorDecl>(CurGD.getDecl()); 919204643Srdivacky CXXCtorType CtorType = CurGD.getCtorType(); 920204643Srdivacky 921276479Sdim assert((CGM.getTarget().getCXXABI().hasConstructorVariants() || 922276479Sdim CtorType == Ctor_Complete) && 923276479Sdim "can only generate complete ctor for this ABI"); 924276479Sdim 925204643Srdivacky // Before we go any further, try the complete->base constructor 926204643Srdivacky // delegation optimization. 927239462Sdim if (CtorType == Ctor_Complete && IsConstructorDelegationValid(Ctor) && 928251662Sdim CGM.getTarget().getCXXABI().hasConstructorVariants()) { 929261991Sdim EmitDelegateCXXConstructorCall(Ctor, Ctor_Base, Args, Ctor->getLocEnd()); 930204643Srdivacky return; 931204643Srdivacky } 932204643Srdivacky 933296417Sdim const FunctionDecl *Definition = nullptr; 934280031Sdim Stmt *Body = Ctor->getBody(Definition); 935280031Sdim assert(Definition == Ctor && "emitting wrong constructor body"); 936204643Srdivacky 937204643Srdivacky // Enter the function-try-block before the constructor prologue if 938204643Srdivacky // applicable. 939204643Srdivacky bool IsTryBody = (Body && isa<CXXTryStmt>(Body)); 940204643Srdivacky if (IsTryBody) 941210299Sed EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true); 942204643Srdivacky 943288943Sdim incrementProfileCounter(Body); 944276479Sdim 945261991Sdim RunCleanupsScope RunCleanups(*this); 946204643Srdivacky 947234353Sdim // TODO: in restricted cases, we can emit the vbase initializers of 948234353Sdim // a complete ctor and then delegate to the base ctor. 949234353Sdim 950204643Srdivacky // Emit the constructor prologue, i.e. the base and member 951204643Srdivacky // initializers. 952208600Srdivacky EmitCtorPrologue(Ctor, CtorType, Args); 953204643Srdivacky 954204643Srdivacky // Emit the body of the statement. 955204643Srdivacky if (IsTryBody) 956204643Srdivacky EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock()); 957204643Srdivacky else if (Body) 958204643Srdivacky EmitStmt(Body); 959204643Srdivacky 960204643Srdivacky // Emit any cleanup blocks associated with the member or base 961204643Srdivacky // initializers, which includes (along the exceptional path) the 962204643Srdivacky // destructors for those members and bases that were fully 963204643Srdivacky // constructed. 964261991Sdim RunCleanups.ForceCleanup(); 965204643Srdivacky 966204643Srdivacky if (IsTryBody) 967210299Sed ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true); 968204643Srdivacky} 969204643Srdivacky 970249423Sdimnamespace { 971261991Sdim /// RAII object to indicate that codegen is copying the value representation 972261991Sdim /// instead of the object representation. Useful when copying a struct or 973261991Sdim /// class which has uninitialized members and we're only performing 974261991Sdim /// lvalue-to-rvalue conversion on the object but not its members. 975261991Sdim class CopyingValueRepresentation { 976261991Sdim public: 977261991Sdim explicit CopyingValueRepresentation(CodeGenFunction &CGF) 978280031Sdim : CGF(CGF), OldSanOpts(CGF.SanOpts) { 979280031Sdim CGF.SanOpts.set(SanitizerKind::Bool, false); 980280031Sdim CGF.SanOpts.set(SanitizerKind::Enum, false); 981261991Sdim } 982261991Sdim ~CopyingValueRepresentation() { 983261991Sdim CGF.SanOpts = OldSanOpts; 984261991Sdim } 985261991Sdim private: 986261991Sdim CodeGenFunction &CGF; 987280031Sdim SanitizerSet OldSanOpts; 988261991Sdim }; 989261991Sdim} 990296417Sdim 991261991Sdimnamespace { 992249423Sdim class FieldMemcpyizer { 993249423Sdim public: 994249423Sdim FieldMemcpyizer(CodeGenFunction &CGF, const CXXRecordDecl *ClassDecl, 995249423Sdim const VarDecl *SrcRec) 996288943Sdim : CGF(CGF), ClassDecl(ClassDecl), SrcRec(SrcRec), 997249423Sdim RecLayout(CGF.getContext().getASTRecordLayout(ClassDecl)), 998276479Sdim FirstField(nullptr), LastField(nullptr), FirstFieldOffset(0), 999276479Sdim LastFieldOffset(0), LastAddedFieldIndex(0) {} 1000249423Sdim 1001280031Sdim bool isMemcpyableField(FieldDecl *F) const { 1002280031Sdim // Never memcpy fields when we are adding poisoned paddings. 1003280031Sdim if (CGF.getContext().getLangOpts().SanitizeAddressFieldPadding) 1004280031Sdim return false; 1005249423Sdim Qualifiers Qual = F->getType().getQualifiers(); 1006249423Sdim if (Qual.hasVolatile() || Qual.hasObjCLifetime()) 1007249423Sdim return false; 1008249423Sdim return true; 1009249423Sdim } 1010249423Sdim 1011249423Sdim void addMemcpyableField(FieldDecl *F) { 1012276479Sdim if (!FirstField) 1013249423Sdim addInitialField(F); 1014249423Sdim else 1015249423Sdim addNextField(F); 1016249423Sdim } 1017249423Sdim 1018280031Sdim CharUnits getMemcpySize(uint64_t FirstByteOffset) const { 1019249423Sdim unsigned LastFieldSize = 1020249423Sdim LastField->isBitField() ? 1021249423Sdim LastField->getBitWidthValue(CGF.getContext()) : 1022288943Sdim CGF.getContext().getTypeSize(LastField->getType()); 1023249423Sdim uint64_t MemcpySizeBits = 1024280031Sdim LastFieldOffset + LastFieldSize - FirstByteOffset + 1025249423Sdim CGF.getContext().getCharWidth() - 1; 1026249423Sdim CharUnits MemcpySize = 1027249423Sdim CGF.getContext().toCharUnitsFromBits(MemcpySizeBits); 1028249423Sdim return MemcpySize; 1029249423Sdim } 1030249423Sdim 1031249423Sdim void emitMemcpy() { 1032249423Sdim // Give the subclass a chance to bail out if it feels the memcpy isn't 1033249423Sdim // worth it (e.g. Hasn't aggregated enough data). 1034276479Sdim if (!FirstField) { 1035249423Sdim return; 1036249423Sdim } 1037249423Sdim 1038280031Sdim uint64_t FirstByteOffset; 1039249423Sdim if (FirstField->isBitField()) { 1040249423Sdim const CGRecordLayout &RL = 1041249423Sdim CGF.getTypes().getCGRecordLayout(FirstField->getParent()); 1042249423Sdim const CGBitFieldInfo &BFInfo = RL.getBitFieldInfo(FirstField); 1043280031Sdim // FirstFieldOffset is not appropriate for bitfields, 1044288943Sdim // we need to use the storage offset instead. 1045288943Sdim FirstByteOffset = CGF.getContext().toBits(BFInfo.StorageOffset); 1046249423Sdim } else { 1047280031Sdim FirstByteOffset = FirstFieldOffset; 1048249423Sdim } 1049249423Sdim 1050280031Sdim CharUnits MemcpySize = getMemcpySize(FirstByteOffset); 1051249423Sdim QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl); 1052296417Sdim Address ThisPtr = CGF.LoadCXXThisAddress(); 1053296417Sdim LValue DestLV = CGF.MakeAddrLValue(ThisPtr, RecordTy); 1054249423Sdim LValue Dest = CGF.EmitLValueForFieldInitialization(DestLV, FirstField); 1055249423Sdim llvm::Value *SrcPtr = CGF.Builder.CreateLoad(CGF.GetAddrOfLocalVar(SrcRec)); 1056249423Sdim LValue SrcLV = CGF.MakeNaturalAlignAddrLValue(SrcPtr, RecordTy); 1057249423Sdim LValue Src = CGF.EmitLValueForFieldInitialization(SrcLV, FirstField); 1058249423Sdim 1059296417Sdim emitMemcpyIR(Dest.isBitField() ? Dest.getBitFieldAddress() : Dest.getAddress(), 1060296417Sdim Src.isBitField() ? Src.getBitFieldAddress() : Src.getAddress(), 1061296417Sdim MemcpySize); 1062249423Sdim reset(); 1063249423Sdim } 1064249423Sdim 1065249423Sdim void reset() { 1066276479Sdim FirstField = nullptr; 1067249423Sdim } 1068249423Sdim 1069249423Sdim protected: 1070249423Sdim CodeGenFunction &CGF; 1071249423Sdim const CXXRecordDecl *ClassDecl; 1072249423Sdim 1073249423Sdim private: 1074249423Sdim 1075296417Sdim void emitMemcpyIR(Address DestPtr, Address SrcPtr, CharUnits Size) { 1076296417Sdim llvm::PointerType *DPT = DestPtr.getType(); 1077249423Sdim llvm::Type *DBP = 1078249423Sdim llvm::Type::getInt8PtrTy(CGF.getLLVMContext(), DPT->getAddressSpace()); 1079249423Sdim DestPtr = CGF.Builder.CreateBitCast(DestPtr, DBP); 1080249423Sdim 1081296417Sdim llvm::PointerType *SPT = SrcPtr.getType(); 1082249423Sdim llvm::Type *SBP = 1083249423Sdim llvm::Type::getInt8PtrTy(CGF.getLLVMContext(), SPT->getAddressSpace()); 1084249423Sdim SrcPtr = CGF.Builder.CreateBitCast(SrcPtr, SBP); 1085249423Sdim 1086296417Sdim CGF.Builder.CreateMemCpy(DestPtr, SrcPtr, Size.getQuantity()); 1087249423Sdim } 1088249423Sdim 1089249423Sdim void addInitialField(FieldDecl *F) { 1090249423Sdim FirstField = F; 1091249423Sdim LastField = F; 1092249423Sdim FirstFieldOffset = RecLayout.getFieldOffset(F->getFieldIndex()); 1093249423Sdim LastFieldOffset = FirstFieldOffset; 1094249423Sdim LastAddedFieldIndex = F->getFieldIndex(); 1095249423Sdim return; 1096249423Sdim } 1097249423Sdim 1098249423Sdim void addNextField(FieldDecl *F) { 1099261991Sdim // For the most part, the following invariant will hold: 1100261991Sdim // F->getFieldIndex() == LastAddedFieldIndex + 1 1101261991Sdim // The one exception is that Sema won't add a copy-initializer for an 1102261991Sdim // unnamed bitfield, which will show up here as a gap in the sequence. 1103261991Sdim assert(F->getFieldIndex() >= LastAddedFieldIndex + 1 && 1104261991Sdim "Cannot aggregate fields out of order."); 1105249423Sdim LastAddedFieldIndex = F->getFieldIndex(); 1106249423Sdim 1107249423Sdim // The 'first' and 'last' fields are chosen by offset, rather than field 1108249423Sdim // index. This allows the code to support bitfields, as well as regular 1109249423Sdim // fields. 1110249423Sdim uint64_t FOffset = RecLayout.getFieldOffset(F->getFieldIndex()); 1111249423Sdim if (FOffset < FirstFieldOffset) { 1112249423Sdim FirstField = F; 1113249423Sdim FirstFieldOffset = FOffset; 1114249423Sdim } else if (FOffset > LastFieldOffset) { 1115249423Sdim LastField = F; 1116249423Sdim LastFieldOffset = FOffset; 1117249423Sdim } 1118249423Sdim } 1119249423Sdim 1120249423Sdim const VarDecl *SrcRec; 1121249423Sdim const ASTRecordLayout &RecLayout; 1122249423Sdim FieldDecl *FirstField; 1123249423Sdim FieldDecl *LastField; 1124249423Sdim uint64_t FirstFieldOffset, LastFieldOffset; 1125249423Sdim unsigned LastAddedFieldIndex; 1126249423Sdim }; 1127249423Sdim 1128249423Sdim class ConstructorMemcpyizer : public FieldMemcpyizer { 1129249423Sdim private: 1130249423Sdim 1131249423Sdim /// Get source argument for copy constructor. Returns null if not a copy 1132280031Sdim /// constructor. 1133280031Sdim static const VarDecl *getTrivialCopySource(CodeGenFunction &CGF, 1134280031Sdim const CXXConstructorDecl *CD, 1135249423Sdim FunctionArgList &Args) { 1136261991Sdim if (CD->isCopyOrMoveConstructor() && CD->isDefaulted()) 1137280031Sdim return Args[CGF.CGM.getCXXABI().getSrcArgforCopyCtor(CD, Args)]; 1138276479Sdim return nullptr; 1139249423Sdim } 1140249423Sdim 1141249423Sdim // Returns true if a CXXCtorInitializer represents a member initialization 1142249423Sdim // that can be rolled into a memcpy. 1143249423Sdim bool isMemberInitMemcpyable(CXXCtorInitializer *MemberInit) const { 1144249423Sdim if (!MemcpyableCtor) 1145249423Sdim return false; 1146249423Sdim FieldDecl *Field = MemberInit->getMember(); 1147276479Sdim assert(Field && "No field for member init."); 1148249423Sdim QualType FieldType = Field->getType(); 1149249423Sdim CXXConstructExpr *CE = dyn_cast<CXXConstructExpr>(MemberInit->getInit()); 1150249423Sdim 1151288943Sdim // Bail out on non-memcpyable, not-trivially-copyable members. 1152288943Sdim if (!(CE && isMemcpyEquivalentSpecialMember(CE->getConstructor())) && 1153249423Sdim !(FieldType.isTriviallyCopyableType(CGF.getContext()) || 1154249423Sdim FieldType->isReferenceType())) 1155249423Sdim return false; 1156249423Sdim 1157249423Sdim // Bail out on volatile fields. 1158249423Sdim if (!isMemcpyableField(Field)) 1159249423Sdim return false; 1160249423Sdim 1161249423Sdim // Otherwise we're good. 1162249423Sdim return true; 1163249423Sdim } 1164249423Sdim 1165249423Sdim public: 1166249423Sdim ConstructorMemcpyizer(CodeGenFunction &CGF, const CXXConstructorDecl *CD, 1167249423Sdim FunctionArgList &Args) 1168280031Sdim : FieldMemcpyizer(CGF, CD->getParent(), getTrivialCopySource(CGF, CD, Args)), 1169249423Sdim ConstructorDecl(CD), 1170261991Sdim MemcpyableCtor(CD->isDefaulted() && 1171249423Sdim CD->isCopyOrMoveConstructor() && 1172249423Sdim CGF.getLangOpts().getGC() == LangOptions::NonGC), 1173249423Sdim Args(Args) { } 1174249423Sdim 1175249423Sdim void addMemberInitializer(CXXCtorInitializer *MemberInit) { 1176249423Sdim if (isMemberInitMemcpyable(MemberInit)) { 1177249423Sdim AggregatedInits.push_back(MemberInit); 1178249423Sdim addMemcpyableField(MemberInit->getMember()); 1179249423Sdim } else { 1180249423Sdim emitAggregatedInits(); 1181249423Sdim EmitMemberInitializer(CGF, ConstructorDecl->getParent(), MemberInit, 1182249423Sdim ConstructorDecl, Args); 1183249423Sdim } 1184249423Sdim } 1185249423Sdim 1186249423Sdim void emitAggregatedInits() { 1187249423Sdim if (AggregatedInits.size() <= 1) { 1188249423Sdim // This memcpy is too small to be worthwhile. Fall back on default 1189249423Sdim // codegen. 1190261991Sdim if (!AggregatedInits.empty()) { 1191261991Sdim CopyingValueRepresentation CVR(CGF); 1192249423Sdim EmitMemberInitializer(CGF, ConstructorDecl->getParent(), 1193261991Sdim AggregatedInits[0], ConstructorDecl, Args); 1194288943Sdim AggregatedInits.clear(); 1195249423Sdim } 1196249423Sdim reset(); 1197249423Sdim return; 1198249423Sdim } 1199249423Sdim 1200249423Sdim pushEHDestructors(); 1201249423Sdim emitMemcpy(); 1202249423Sdim AggregatedInits.clear(); 1203249423Sdim } 1204249423Sdim 1205249423Sdim void pushEHDestructors() { 1206296417Sdim Address ThisPtr = CGF.LoadCXXThisAddress(); 1207249423Sdim QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl); 1208296417Sdim LValue LHS = CGF.MakeAddrLValue(ThisPtr, RecordTy); 1209249423Sdim 1210249423Sdim for (unsigned i = 0; i < AggregatedInits.size(); ++i) { 1211288943Sdim CXXCtorInitializer *MemberInit = AggregatedInits[i]; 1212288943Sdim QualType FieldType = MemberInit->getAnyMember()->getType(); 1213249423Sdim QualType::DestructionKind dtorKind = FieldType.isDestructedType(); 1214288943Sdim if (!CGF.needsEHCleanup(dtorKind)) 1215288943Sdim continue; 1216288943Sdim LValue FieldLHS = LHS; 1217288943Sdim EmitLValueForAnyFieldInitialization(CGF, MemberInit, FieldLHS); 1218288943Sdim CGF.pushEHDestroy(dtorKind, FieldLHS.getAddress(), FieldType); 1219249423Sdim } 1220249423Sdim } 1221249423Sdim 1222249423Sdim void finish() { 1223249423Sdim emitAggregatedInits(); 1224249423Sdim } 1225249423Sdim 1226249423Sdim private: 1227249423Sdim const CXXConstructorDecl *ConstructorDecl; 1228249423Sdim bool MemcpyableCtor; 1229249423Sdim FunctionArgList &Args; 1230249423Sdim SmallVector<CXXCtorInitializer*, 16> AggregatedInits; 1231249423Sdim }; 1232249423Sdim 1233249423Sdim class AssignmentMemcpyizer : public FieldMemcpyizer { 1234249423Sdim private: 1235249423Sdim 1236249423Sdim // Returns the memcpyable field copied by the given statement, if one 1237261991Sdim // exists. Otherwise returns null. 1238261991Sdim FieldDecl *getMemcpyableField(Stmt *S) { 1239249423Sdim if (!AssignmentsMemcpyable) 1240276479Sdim return nullptr; 1241249423Sdim if (BinaryOperator *BO = dyn_cast<BinaryOperator>(S)) { 1242249423Sdim // Recognise trivial assignments. 1243249423Sdim if (BO->getOpcode() != BO_Assign) 1244276479Sdim return nullptr; 1245249423Sdim MemberExpr *ME = dyn_cast<MemberExpr>(BO->getLHS()); 1246249423Sdim if (!ME) 1247276479Sdim return nullptr; 1248249423Sdim FieldDecl *Field = dyn_cast<FieldDecl>(ME->getMemberDecl()); 1249249423Sdim if (!Field || !isMemcpyableField(Field)) 1250276479Sdim return nullptr; 1251249423Sdim Stmt *RHS = BO->getRHS(); 1252249423Sdim if (ImplicitCastExpr *EC = dyn_cast<ImplicitCastExpr>(RHS)) 1253249423Sdim RHS = EC->getSubExpr(); 1254249423Sdim if (!RHS) 1255276479Sdim return nullptr; 1256249423Sdim MemberExpr *ME2 = dyn_cast<MemberExpr>(RHS); 1257249423Sdim if (dyn_cast<FieldDecl>(ME2->getMemberDecl()) != Field) 1258276479Sdim return nullptr; 1259249423Sdim return Field; 1260249423Sdim } else if (CXXMemberCallExpr *MCE = dyn_cast<CXXMemberCallExpr>(S)) { 1261249423Sdim CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(MCE->getCalleeDecl()); 1262288943Sdim if (!(MD && isMemcpyEquivalentSpecialMember(MD))) 1263276479Sdim return nullptr; 1264249423Sdim MemberExpr *IOA = dyn_cast<MemberExpr>(MCE->getImplicitObjectArgument()); 1265249423Sdim if (!IOA) 1266276479Sdim return nullptr; 1267249423Sdim FieldDecl *Field = dyn_cast<FieldDecl>(IOA->getMemberDecl()); 1268249423Sdim if (!Field || !isMemcpyableField(Field)) 1269276479Sdim return nullptr; 1270249423Sdim MemberExpr *Arg0 = dyn_cast<MemberExpr>(MCE->getArg(0)); 1271249423Sdim if (!Arg0 || Field != dyn_cast<FieldDecl>(Arg0->getMemberDecl())) 1272276479Sdim return nullptr; 1273249423Sdim return Field; 1274249423Sdim } else if (CallExpr *CE = dyn_cast<CallExpr>(S)) { 1275249423Sdim FunctionDecl *FD = dyn_cast<FunctionDecl>(CE->getCalleeDecl()); 1276249423Sdim if (!FD || FD->getBuiltinID() != Builtin::BI__builtin_memcpy) 1277276479Sdim return nullptr; 1278249423Sdim Expr *DstPtr = CE->getArg(0); 1279249423Sdim if (ImplicitCastExpr *DC = dyn_cast<ImplicitCastExpr>(DstPtr)) 1280249423Sdim DstPtr = DC->getSubExpr(); 1281249423Sdim UnaryOperator *DUO = dyn_cast<UnaryOperator>(DstPtr); 1282249423Sdim if (!DUO || DUO->getOpcode() != UO_AddrOf) 1283276479Sdim return nullptr; 1284249423Sdim MemberExpr *ME = dyn_cast<MemberExpr>(DUO->getSubExpr()); 1285249423Sdim if (!ME) 1286276479Sdim return nullptr; 1287249423Sdim FieldDecl *Field = dyn_cast<FieldDecl>(ME->getMemberDecl()); 1288249423Sdim if (!Field || !isMemcpyableField(Field)) 1289276479Sdim return nullptr; 1290249423Sdim Expr *SrcPtr = CE->getArg(1); 1291249423Sdim if (ImplicitCastExpr *SC = dyn_cast<ImplicitCastExpr>(SrcPtr)) 1292249423Sdim SrcPtr = SC->getSubExpr(); 1293249423Sdim UnaryOperator *SUO = dyn_cast<UnaryOperator>(SrcPtr); 1294249423Sdim if (!SUO || SUO->getOpcode() != UO_AddrOf) 1295276479Sdim return nullptr; 1296249423Sdim MemberExpr *ME2 = dyn_cast<MemberExpr>(SUO->getSubExpr()); 1297249423Sdim if (!ME2 || Field != dyn_cast<FieldDecl>(ME2->getMemberDecl())) 1298276479Sdim return nullptr; 1299249423Sdim return Field; 1300249423Sdim } 1301249423Sdim 1302276479Sdim return nullptr; 1303249423Sdim } 1304249423Sdim 1305249423Sdim bool AssignmentsMemcpyable; 1306249423Sdim SmallVector<Stmt*, 16> AggregatedStmts; 1307249423Sdim 1308249423Sdim public: 1309249423Sdim 1310249423Sdim AssignmentMemcpyizer(CodeGenFunction &CGF, const CXXMethodDecl *AD, 1311249423Sdim FunctionArgList &Args) 1312249423Sdim : FieldMemcpyizer(CGF, AD->getParent(), Args[Args.size() - 1]), 1313249423Sdim AssignmentsMemcpyable(CGF.getLangOpts().getGC() == LangOptions::NonGC) { 1314249423Sdim assert(Args.size() == 2); 1315249423Sdim } 1316249423Sdim 1317249423Sdim void emitAssignment(Stmt *S) { 1318249423Sdim FieldDecl *F = getMemcpyableField(S); 1319249423Sdim if (F) { 1320249423Sdim addMemcpyableField(F); 1321249423Sdim AggregatedStmts.push_back(S); 1322288943Sdim } else { 1323249423Sdim emitAggregatedStmts(); 1324249423Sdim CGF.EmitStmt(S); 1325249423Sdim } 1326249423Sdim } 1327249423Sdim 1328249423Sdim void emitAggregatedStmts() { 1329249423Sdim if (AggregatedStmts.size() <= 1) { 1330261991Sdim if (!AggregatedStmts.empty()) { 1331261991Sdim CopyingValueRepresentation CVR(CGF); 1332261991Sdim CGF.EmitStmt(AggregatedStmts[0]); 1333261991Sdim } 1334249423Sdim reset(); 1335249423Sdim } 1336249423Sdim 1337249423Sdim emitMemcpy(); 1338249423Sdim AggregatedStmts.clear(); 1339249423Sdim } 1340249423Sdim 1341249423Sdim void finish() { 1342249423Sdim emitAggregatedStmts(); 1343249423Sdim } 1344249423Sdim }; 1345296417Sdim} // end anonymous namespace 1346249423Sdim 1347296417Sdimstatic bool isInitializerOfDynamicClass(const CXXCtorInitializer *BaseInit) { 1348296417Sdim const Type *BaseType = BaseInit->getBaseClass(); 1349296417Sdim const auto *BaseClassDecl = 1350296417Sdim cast<CXXRecordDecl>(BaseType->getAs<RecordType>()->getDecl()); 1351296417Sdim return BaseClassDecl->isDynamicClass(); 1352249423Sdim} 1353249423Sdim 1354201361Srdivacky/// EmitCtorPrologue - This routine generates necessary code to initialize 1355201361Srdivacky/// base classes and non-static data members belonging to this constructor. 1356201361Srdivackyvoid CodeGenFunction::EmitCtorPrologue(const CXXConstructorDecl *CD, 1357208600Srdivacky CXXCtorType CtorType, 1358208600Srdivacky FunctionArgList &Args) { 1359221345Sdim if (CD->isDelegatingConstructor()) 1360221345Sdim return EmitDelegatingCXXConstructorCall(CD, Args); 1361221345Sdim 1362201361Srdivacky const CXXRecordDecl *ClassDecl = CD->getParent(); 1363203955Srdivacky 1364249423Sdim CXXConstructorDecl::init_const_iterator B = CD->init_begin(), 1365249423Sdim E = CD->init_end(); 1366249423Sdim 1367276479Sdim llvm::BasicBlock *BaseCtorContinueBB = nullptr; 1368249423Sdim if (ClassDecl->getNumVBases() && 1369249423Sdim !CGM.getTarget().getCXXABI().hasConstructorVariants()) { 1370249423Sdim // The ABIs that don't have constructor variants need to put a branch 1371249423Sdim // before the virtual base initialization code. 1372261991Sdim BaseCtorContinueBB = 1373261991Sdim CGM.getCXXABI().EmitCtorCompleteObjectHandler(*this, ClassDecl); 1374249423Sdim assert(BaseCtorContinueBB); 1375201361Srdivacky } 1376201361Srdivacky 1377296417Sdim llvm::Value *const OldThis = CXXThisValue; 1378249423Sdim // Virtual base initializers first. 1379249423Sdim for (; B != E && (*B)->isBaseInitializer() && (*B)->isBaseVirtual(); B++) { 1380296417Sdim if (CGM.getCodeGenOpts().StrictVTablePointers && 1381296417Sdim CGM.getCodeGenOpts().OptimizationLevel > 0 && 1382296417Sdim isInitializerOfDynamicClass(*B)) 1383296417Sdim CXXThisValue = Builder.CreateInvariantGroupBarrier(LoadCXXThis()); 1384249423Sdim EmitBaseInitializer(*this, ClassDecl, *B, CtorType); 1385249423Sdim } 1386249423Sdim 1387249423Sdim if (BaseCtorContinueBB) { 1388249423Sdim // Complete object handler should continue to the remaining initializers. 1389249423Sdim Builder.CreateBr(BaseCtorContinueBB); 1390249423Sdim EmitBlock(BaseCtorContinueBB); 1391249423Sdim } 1392249423Sdim 1393249423Sdim // Then, non-virtual base initializers. 1394249423Sdim for (; B != E && (*B)->isBaseInitializer(); B++) { 1395249423Sdim assert(!(*B)->isBaseVirtual()); 1396296417Sdim 1397296417Sdim if (CGM.getCodeGenOpts().StrictVTablePointers && 1398296417Sdim CGM.getCodeGenOpts().OptimizationLevel > 0 && 1399296417Sdim isInitializerOfDynamicClass(*B)) 1400296417Sdim CXXThisValue = Builder.CreateInvariantGroupBarrier(LoadCXXThis()); 1401249423Sdim EmitBaseInitializer(*this, ClassDecl, *B, CtorType); 1402249423Sdim } 1403249423Sdim 1404296417Sdim CXXThisValue = OldThis; 1405296417Sdim 1406206084Srdivacky InitializeVTablePointers(ClassDecl); 1407203955Srdivacky 1408249423Sdim // And finally, initialize class members. 1409296417Sdim FieldConstructionScope FCS(*this, LoadCXXThisAddress()); 1410249423Sdim ConstructorMemcpyizer CM(*this, CD, Args); 1411249423Sdim for (; B != E; B++) { 1412249423Sdim CXXCtorInitializer *Member = (*B); 1413249423Sdim assert(!Member->isBaseInitializer()); 1414249423Sdim assert(Member->isAnyMemberInitializer() && 1415249423Sdim "Delegating initializer on non-delegating constructor"); 1416249423Sdim CM.addMemberInitializer(Member); 1417249423Sdim } 1418249423Sdim CM.finish(); 1419201361Srdivacky} 1420201361Srdivacky 1421223017Sdimstatic bool 1422223017SdimFieldHasTrivialDestructorBody(ASTContext &Context, const FieldDecl *Field); 1423223017Sdim 1424223017Sdimstatic bool 1425288943SdimHasTrivialDestructorBody(ASTContext &Context, 1426223017Sdim const CXXRecordDecl *BaseClassDecl, 1427223017Sdim const CXXRecordDecl *MostDerivedClassDecl) 1428223017Sdim{ 1429223017Sdim // If the destructor is trivial we don't have to check anything else. 1430223017Sdim if (BaseClassDecl->hasTrivialDestructor()) 1431223017Sdim return true; 1432223017Sdim 1433223017Sdim if (!BaseClassDecl->getDestructor()->hasTrivialBody()) 1434223017Sdim return false; 1435223017Sdim 1436223017Sdim // Check fields. 1437276479Sdim for (const auto *Field : BaseClassDecl->fields()) 1438223017Sdim if (!FieldHasTrivialDestructorBody(Context, Field)) 1439223017Sdim return false; 1440223017Sdim 1441223017Sdim // Check non-virtual bases. 1442276479Sdim for (const auto &I : BaseClassDecl->bases()) { 1443276479Sdim if (I.isVirtual()) 1444223017Sdim continue; 1445223017Sdim 1446223017Sdim const CXXRecordDecl *NonVirtualBase = 1447276479Sdim cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl()); 1448223017Sdim if (!HasTrivialDestructorBody(Context, NonVirtualBase, 1449223017Sdim MostDerivedClassDecl)) 1450223017Sdim return false; 1451223017Sdim } 1452223017Sdim 1453223017Sdim if (BaseClassDecl == MostDerivedClassDecl) { 1454223017Sdim // Check virtual bases. 1455276479Sdim for (const auto &I : BaseClassDecl->vbases()) { 1456223017Sdim const CXXRecordDecl *VirtualBase = 1457276479Sdim cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl()); 1458223017Sdim if (!HasTrivialDestructorBody(Context, VirtualBase, 1459223017Sdim MostDerivedClassDecl)) 1460288943Sdim return false; 1461223017Sdim } 1462223017Sdim } 1463223017Sdim 1464223017Sdim return true; 1465223017Sdim} 1466223017Sdim 1467223017Sdimstatic bool 1468223017SdimFieldHasTrivialDestructorBody(ASTContext &Context, 1469296417Sdim const FieldDecl *Field) 1470223017Sdim{ 1471223017Sdim QualType FieldBaseElementType = Context.getBaseElementType(Field->getType()); 1472223017Sdim 1473223017Sdim const RecordType *RT = FieldBaseElementType->getAs<RecordType>(); 1474223017Sdim if (!RT) 1475223017Sdim return true; 1476288943Sdim 1477223017Sdim CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl()); 1478288943Sdim 1479288943Sdim // The destructor for an implicit anonymous union member is never invoked. 1480288943Sdim if (FieldClassDecl->isUnion() && FieldClassDecl->isAnonymousStructOrUnion()) 1481288943Sdim return false; 1482288943Sdim 1483223017Sdim return HasTrivialDestructorBody(Context, FieldClassDecl, FieldClassDecl); 1484223017Sdim} 1485223017Sdim 1486223017Sdim/// CanSkipVTablePointerInitialization - Check whether we need to initialize 1487223017Sdim/// any vtable pointers before calling this destructor. 1488296417Sdimstatic bool CanSkipVTablePointerInitialization(CodeGenFunction &CGF, 1489223017Sdim const CXXDestructorDecl *Dtor) { 1490296417Sdim const CXXRecordDecl *ClassDecl = Dtor->getParent(); 1491296417Sdim if (!ClassDecl->isDynamicClass()) 1492296417Sdim return true; 1493296417Sdim 1494223017Sdim if (!Dtor->hasTrivialBody()) 1495223017Sdim return false; 1496223017Sdim 1497223017Sdim // Check the fields. 1498276479Sdim for (const auto *Field : ClassDecl->fields()) 1499296417Sdim if (!FieldHasTrivialDestructorBody(CGF.getContext(), Field)) 1500223017Sdim return false; 1501223017Sdim 1502223017Sdim return true; 1503223017Sdim} 1504223017Sdim 1505204643Srdivacky/// EmitDestructorBody - Emits the body of the current destructor. 1506204643Srdivackyvoid CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) { 1507204643Srdivacky const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CurGD.getDecl()); 1508204643Srdivacky CXXDtorType DtorType = CurGD.getDtorType(); 1509204643Srdivacky 1510288943Sdim Stmt *Body = Dtor->getBody(); 1511288943Sdim if (Body) 1512288943Sdim incrementProfileCounter(Body); 1513288943Sdim 1514212904Sdim // The call to operator delete in a deleting destructor happens 1515212904Sdim // outside of the function-try-block, which means it's always 1516212904Sdim // possible to delegate the destructor body to the complete 1517212904Sdim // destructor. Do so. 1518212904Sdim if (DtorType == Dtor_Deleting) { 1519212904Sdim EnterDtorCleanups(Dtor, Dtor_Deleting); 1520212904Sdim EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false, 1521296417Sdim /*Delegating=*/false, LoadCXXThisAddress()); 1522212904Sdim PopCleanupBlock(); 1523212904Sdim return; 1524212904Sdim } 1525212904Sdim 1526204643Srdivacky // If the body is a function-try-block, enter the try before 1527212904Sdim // anything else. 1528212904Sdim bool isTryBody = (Body && isa<CXXTryStmt>(Body)); 1529204643Srdivacky if (isTryBody) 1530210299Sed EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true); 1531280031Sdim EmitAsanPrologueOrEpilogue(false); 1532204643Srdivacky 1533212904Sdim // Enter the epilogue cleanups. 1534212904Sdim RunCleanupsScope DtorEpilogue(*this); 1535288943Sdim 1536204643Srdivacky // If this is the complete variant, just invoke the base variant; 1537204643Srdivacky // the epilogue will destruct the virtual bases. But we can't do 1538204643Srdivacky // this optimization if the body is a function-try-block, because 1539288943Sdim // we'd introduce *two* handler blocks. In the Microsoft ABI, we 1540261991Sdim // always delegate because we might not have a definition in this TU. 1541212904Sdim switch (DtorType) { 1542280031Sdim case Dtor_Comdat: 1543280031Sdim llvm_unreachable("not expecting a COMDAT"); 1544280031Sdim 1545212904Sdim case Dtor_Deleting: llvm_unreachable("already handled deleting case"); 1546212904Sdim 1547212904Sdim case Dtor_Complete: 1548261991Sdim assert((Body || getTarget().getCXXABI().isMicrosoft()) && 1549261991Sdim "can't emit a dtor without a body for non-Microsoft ABIs"); 1550261991Sdim 1551212904Sdim // Enter the cleanup scopes for virtual bases. 1552212904Sdim EnterDtorCleanups(Dtor, Dtor_Complete); 1553212904Sdim 1554261991Sdim if (!isTryBody) { 1555212904Sdim EmitCXXDestructorCall(Dtor, Dtor_Base, /*ForVirtualBase=*/false, 1556296417Sdim /*Delegating=*/false, LoadCXXThisAddress()); 1557212904Sdim break; 1558212904Sdim } 1559212904Sdim // Fallthrough: act like we're in the base variant. 1560288943Sdim 1561212904Sdim case Dtor_Base: 1562261991Sdim assert(Body); 1563261991Sdim 1564212904Sdim // Enter the cleanup scopes for fields and non-virtual bases. 1565212904Sdim EnterDtorCleanups(Dtor, Dtor_Base); 1566212904Sdim 1567212904Sdim // Initialize the vtable pointers before entering the body. 1568296417Sdim if (!CanSkipVTablePointerInitialization(*this, Dtor)) { 1569296417Sdim // Insert the llvm.invariant.group.barrier intrinsic before initializing 1570296417Sdim // the vptrs to cancel any previous assumptions we might have made. 1571296417Sdim if (CGM.getCodeGenOpts().StrictVTablePointers && 1572296417Sdim CGM.getCodeGenOpts().OptimizationLevel > 0) 1573296417Sdim CXXThisValue = Builder.CreateInvariantGroupBarrier(LoadCXXThis()); 1574296417Sdim InitializeVTablePointers(Dtor->getParent()); 1575296417Sdim } 1576204643Srdivacky 1577212904Sdim if (isTryBody) 1578212904Sdim EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock()); 1579212904Sdim else if (Body) 1580212904Sdim EmitStmt(Body); 1581212904Sdim else { 1582212904Sdim assert(Dtor->isImplicit() && "bodyless dtor not implicit"); 1583212904Sdim // nothing to do besides what's in the epilogue 1584212904Sdim } 1585218893Sdim // -fapple-kext must inline any call to this dtor into 1586218893Sdim // the caller's body. 1587243830Sdim if (getLangOpts().AppleKext) 1588249423Sdim CurFn->addFnAttr(llvm::Attribute::AlwaysInline); 1589296417Sdim 1590212904Sdim break; 1591204643Srdivacky } 1592204643Srdivacky 1593212904Sdim // Jump out through the epilogue cleanups. 1594212904Sdim DtorEpilogue.ForceCleanup(); 1595204643Srdivacky 1596204643Srdivacky // Exit the try if applicable. 1597204643Srdivacky if (isTryBody) 1598210299Sed ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true); 1599204643Srdivacky} 1600204643Srdivacky 1601249423Sdimvoid CodeGenFunction::emitImplicitAssignmentOperatorBody(FunctionArgList &Args) { 1602249423Sdim const CXXMethodDecl *AssignOp = cast<CXXMethodDecl>(CurGD.getDecl()); 1603249423Sdim const Stmt *RootS = AssignOp->getBody(); 1604249423Sdim assert(isa<CompoundStmt>(RootS) && 1605249423Sdim "Body of an implicit assignment operator should be compound stmt."); 1606249423Sdim const CompoundStmt *RootCS = cast<CompoundStmt>(RootS); 1607249423Sdim 1608249423Sdim LexicalScope Scope(*this, RootCS->getSourceRange()); 1609249423Sdim 1610249423Sdim AssignmentMemcpyizer AM(*this, AssignOp, Args); 1611276479Sdim for (auto *I : RootCS->body()) 1612288943Sdim AM.emitAssignment(I); 1613249423Sdim AM.finish(); 1614249423Sdim} 1615249423Sdim 1616212904Sdimnamespace { 1617212904Sdim /// Call the operator delete associated with the current destructor. 1618296417Sdim struct CallDtorDelete final : EHScopeStack::Cleanup { 1619212904Sdim CallDtorDelete() {} 1620212904Sdim 1621276479Sdim void Emit(CodeGenFunction &CGF, Flags flags) override { 1622212904Sdim const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl); 1623212904Sdim const CXXRecordDecl *ClassDecl = Dtor->getParent(); 1624212904Sdim CGF.EmitDeleteCall(Dtor->getOperatorDelete(), CGF.LoadCXXThis(), 1625212904Sdim CGF.getContext().getTagDeclType(ClassDecl)); 1626212904Sdim } 1627212904Sdim }; 1628212904Sdim 1629296417Sdim struct CallDtorDeleteConditional final : EHScopeStack::Cleanup { 1630249423Sdim llvm::Value *ShouldDeleteCondition; 1631249423Sdim public: 1632249423Sdim CallDtorDeleteConditional(llvm::Value *ShouldDeleteCondition) 1633296417Sdim : ShouldDeleteCondition(ShouldDeleteCondition) { 1634276479Sdim assert(ShouldDeleteCondition != nullptr); 1635249423Sdim } 1636249423Sdim 1637276479Sdim void Emit(CodeGenFunction &CGF, Flags flags) override { 1638249423Sdim llvm::BasicBlock *callDeleteBB = CGF.createBasicBlock("dtor.call_delete"); 1639249423Sdim llvm::BasicBlock *continueBB = CGF.createBasicBlock("dtor.continue"); 1640249423Sdim llvm::Value *ShouldCallDelete 1641249423Sdim = CGF.Builder.CreateIsNull(ShouldDeleteCondition); 1642249423Sdim CGF.Builder.CreateCondBr(ShouldCallDelete, continueBB, callDeleteBB); 1643249423Sdim 1644249423Sdim CGF.EmitBlock(callDeleteBB); 1645249423Sdim const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl); 1646249423Sdim const CXXRecordDecl *ClassDecl = Dtor->getParent(); 1647249423Sdim CGF.EmitDeleteCall(Dtor->getOperatorDelete(), CGF.LoadCXXThis(), 1648249423Sdim CGF.getContext().getTagDeclType(ClassDecl)); 1649249423Sdim CGF.Builder.CreateBr(continueBB); 1650249423Sdim 1651249423Sdim CGF.EmitBlock(continueBB); 1652249423Sdim } 1653249423Sdim }; 1654249423Sdim 1655296417Sdim class DestroyField final : public EHScopeStack::Cleanup { 1656224145Sdim const FieldDecl *field; 1657234353Sdim CodeGenFunction::Destroyer *destroyer; 1658224145Sdim bool useEHCleanupForArray; 1659212904Sdim 1660224145Sdim public: 1661224145Sdim DestroyField(const FieldDecl *field, CodeGenFunction::Destroyer *destroyer, 1662224145Sdim bool useEHCleanupForArray) 1663296417Sdim : field(field), destroyer(destroyer), 1664296417Sdim useEHCleanupForArray(useEHCleanupForArray) {} 1665224145Sdim 1666276479Sdim void Emit(CodeGenFunction &CGF, Flags flags) override { 1667224145Sdim // Find the address of the field. 1668296417Sdim Address thisValue = CGF.LoadCXXThisAddress(); 1669234982Sdim QualType RecordTy = CGF.getContext().getTagDeclType(field->getParent()); 1670234982Sdim LValue ThisLV = CGF.MakeAddrLValue(thisValue, RecordTy); 1671234982Sdim LValue LV = CGF.EmitLValueForField(ThisLV, field); 1672224145Sdim assert(LV.isSimple()); 1673288943Sdim 1674224145Sdim CGF.emitDestroy(LV.getAddress(), field->getType(), destroyer, 1675224145Sdim flags.isForNormalCleanup() && useEHCleanupForArray); 1676212904Sdim } 1677212904Sdim }; 1678212904Sdim 1679296417Sdim static void EmitSanitizerDtorCallback(CodeGenFunction &CGF, llvm::Value *Ptr, 1680296417Sdim CharUnits::QuantityType PoisonSize) { 1681296417Sdim // Pass in void pointer and size of region as arguments to runtime 1682296417Sdim // function 1683296417Sdim llvm::Value *Args[] = {CGF.Builder.CreateBitCast(Ptr, CGF.VoidPtrTy), 1684296417Sdim llvm::ConstantInt::get(CGF.SizeTy, PoisonSize)}; 1685296417Sdim 1686296417Sdim llvm::Type *ArgTypes[] = {CGF.VoidPtrTy, CGF.SizeTy}; 1687296417Sdim 1688296417Sdim llvm::FunctionType *FnType = 1689296417Sdim llvm::FunctionType::get(CGF.VoidTy, ArgTypes, false); 1690296417Sdim llvm::Value *Fn = 1691296417Sdim CGF.CGM.CreateRuntimeFunction(FnType, "__sanitizer_dtor_callback"); 1692296417Sdim CGF.EmitNounwindRuntimeCall(Fn, Args); 1693296417Sdim } 1694296417Sdim 1695296417Sdim class SanitizeDtorMembers final : public EHScopeStack::Cleanup { 1696296417Sdim const CXXDestructorDecl *Dtor; 1697296417Sdim 1698296417Sdim public: 1699296417Sdim SanitizeDtorMembers(const CXXDestructorDecl *Dtor) : Dtor(Dtor) {} 1700296417Sdim 1701296417Sdim // Generate function call for handling object poisoning. 1702296417Sdim // Disables tail call elimination, to prevent the current stack frame 1703296417Sdim // from disappearing from the stack trace. 1704296417Sdim void Emit(CodeGenFunction &CGF, Flags flags) override { 1705296417Sdim const ASTRecordLayout &Layout = 1706296417Sdim CGF.getContext().getASTRecordLayout(Dtor->getParent()); 1707296417Sdim 1708296417Sdim // Nothing to poison. 1709296417Sdim if (Layout.getFieldCount() == 0) 1710296417Sdim return; 1711296417Sdim 1712296417Sdim // Prevent the current stack frame from disappearing from the stack trace. 1713296417Sdim CGF.CurFn->addFnAttr("disable-tail-calls", "true"); 1714296417Sdim 1715296417Sdim // Construct pointer to region to begin poisoning, and calculate poison 1716296417Sdim // size, so that only members declared in this class are poisoned. 1717296417Sdim ASTContext &Context = CGF.getContext(); 1718296417Sdim unsigned fieldIndex = 0; 1719296417Sdim int startIndex = -1; 1720296417Sdim // RecordDecl::field_iterator Field; 1721296417Sdim for (const FieldDecl *Field : Dtor->getParent()->fields()) { 1722296417Sdim // Poison field if it is trivial 1723296417Sdim if (FieldHasTrivialDestructorBody(Context, Field)) { 1724296417Sdim // Start sanitizing at this field 1725296417Sdim if (startIndex < 0) 1726296417Sdim startIndex = fieldIndex; 1727296417Sdim 1728296417Sdim // Currently on the last field, and it must be poisoned with the 1729296417Sdim // current block. 1730296417Sdim if (fieldIndex == Layout.getFieldCount() - 1) { 1731296417Sdim PoisonMembers(CGF, startIndex, Layout.getFieldCount()); 1732296417Sdim } 1733296417Sdim } else if (startIndex >= 0) { 1734296417Sdim // No longer within a block of memory to poison, so poison the block 1735296417Sdim PoisonMembers(CGF, startIndex, fieldIndex); 1736296417Sdim // Re-set the start index 1737296417Sdim startIndex = -1; 1738296417Sdim } 1739296417Sdim fieldIndex += 1; 1740296417Sdim } 1741296417Sdim } 1742296417Sdim 1743296417Sdim private: 1744296417Sdim /// \param layoutStartOffset index of the ASTRecordLayout field to 1745296417Sdim /// start poisoning (inclusive) 1746296417Sdim /// \param layoutEndOffset index of the ASTRecordLayout field to 1747296417Sdim /// end poisoning (exclusive) 1748296417Sdim void PoisonMembers(CodeGenFunction &CGF, unsigned layoutStartOffset, 1749296417Sdim unsigned layoutEndOffset) { 1750296417Sdim ASTContext &Context = CGF.getContext(); 1751296417Sdim const ASTRecordLayout &Layout = 1752296417Sdim Context.getASTRecordLayout(Dtor->getParent()); 1753296417Sdim 1754296417Sdim llvm::ConstantInt *OffsetSizePtr = llvm::ConstantInt::get( 1755296417Sdim CGF.SizeTy, 1756296417Sdim Context.toCharUnitsFromBits(Layout.getFieldOffset(layoutStartOffset)) 1757296417Sdim .getQuantity()); 1758296417Sdim 1759296417Sdim llvm::Value *OffsetPtr = CGF.Builder.CreateGEP( 1760296417Sdim CGF.Builder.CreateBitCast(CGF.LoadCXXThis(), CGF.Int8PtrTy), 1761296417Sdim OffsetSizePtr); 1762296417Sdim 1763296417Sdim CharUnits::QuantityType PoisonSize; 1764296417Sdim if (layoutEndOffset >= Layout.getFieldCount()) { 1765296417Sdim PoisonSize = Layout.getNonVirtualSize().getQuantity() - 1766296417Sdim Context.toCharUnitsFromBits( 1767296417Sdim Layout.getFieldOffset(layoutStartOffset)) 1768296417Sdim .getQuantity(); 1769296417Sdim } else { 1770296417Sdim PoisonSize = Context.toCharUnitsFromBits( 1771296417Sdim Layout.getFieldOffset(layoutEndOffset) - 1772296417Sdim Layout.getFieldOffset(layoutStartOffset)) 1773296417Sdim .getQuantity(); 1774296417Sdim } 1775296417Sdim 1776296417Sdim if (PoisonSize == 0) 1777296417Sdim return; 1778296417Sdim 1779296417Sdim EmitSanitizerDtorCallback(CGF, OffsetPtr, PoisonSize); 1780296417Sdim } 1781296417Sdim }; 1782296417Sdim 1783296417Sdim class SanitizeDtorVTable final : public EHScopeStack::Cleanup { 1784296417Sdim const CXXDestructorDecl *Dtor; 1785296417Sdim 1786296417Sdim public: 1787296417Sdim SanitizeDtorVTable(const CXXDestructorDecl *Dtor) : Dtor(Dtor) {} 1788296417Sdim 1789296417Sdim // Generate function call for handling vtable pointer poisoning. 1790296417Sdim void Emit(CodeGenFunction &CGF, Flags flags) override { 1791296417Sdim assert(Dtor->getParent()->isDynamicClass()); 1792296417Sdim (void)Dtor; 1793296417Sdim ASTContext &Context = CGF.getContext(); 1794296417Sdim // Poison vtable and vtable ptr if they exist for this class. 1795296417Sdim llvm::Value *VTablePtr = CGF.LoadCXXThis(); 1796296417Sdim 1797296417Sdim CharUnits::QuantityType PoisonSize = 1798296417Sdim Context.toCharUnitsFromBits(CGF.PointerWidthInBits).getQuantity(); 1799296417Sdim // Pass in void pointer and size of region as arguments to runtime 1800296417Sdim // function 1801296417Sdim EmitSanitizerDtorCallback(CGF, VTablePtr, PoisonSize); 1802296417Sdim } 1803296417Sdim }; 1804296417Sdim} // end anonymous namespace 1805296417Sdim 1806276479Sdim/// \brief Emit all code that comes at the end of class's 1807201361Srdivacky/// destructor. This is to call destructors on members and base classes 1808201361Srdivacky/// in reverse order of their construction. 1809212904Sdimvoid CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD, 1810212904Sdim CXXDtorType DtorType) { 1811276479Sdim assert((!DD->isTrivial() || DD->hasAttr<DLLExportAttr>()) && 1812276479Sdim "Should not emit dtor epilogue for non-exported trivial dtor!"); 1813201361Srdivacky 1814212904Sdim // The deleting-destructor phase just needs to call the appropriate 1815212904Sdim // operator delete that Sema picked up. 1816204643Srdivacky if (DtorType == Dtor_Deleting) { 1817288943Sdim assert(DD->getOperatorDelete() && 1818276479Sdim "operator delete missing - EnterDtorCleanups"); 1819249423Sdim if (CXXStructorImplicitParamValue) { 1820249423Sdim // If there is an implicit param to the deleting dtor, it's a boolean 1821249423Sdim // telling whether we should call delete at the end of the dtor. 1822249423Sdim EHStack.pushCleanup<CallDtorDeleteConditional>( 1823249423Sdim NormalAndEHCleanup, CXXStructorImplicitParamValue); 1824249423Sdim } else { 1825249423Sdim EHStack.pushCleanup<CallDtorDelete>(NormalAndEHCleanup); 1826249423Sdim } 1827204643Srdivacky return; 1828204643Srdivacky } 1829204643Srdivacky 1830212904Sdim const CXXRecordDecl *ClassDecl = DD->getParent(); 1831212904Sdim 1832226633Sdim // Unions have no bases and do not call field destructors. 1833226633Sdim if (ClassDecl->isUnion()) 1834226633Sdim return; 1835226633Sdim 1836212904Sdim // The complete-destructor phase just destructs all the virtual bases. 1837204643Srdivacky if (DtorType == Dtor_Complete) { 1838296417Sdim // Poison the vtable pointer such that access after the base 1839296417Sdim // and member destructors are invoked is invalid. 1840296417Sdim if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor && 1841296417Sdim SanOpts.has(SanitizerKind::Memory) && ClassDecl->getNumVBases() && 1842296417Sdim ClassDecl->isPolymorphic()) 1843296417Sdim EHStack.pushCleanup<SanitizeDtorVTable>(NormalAndEHCleanup, DD); 1844212904Sdim 1845212904Sdim // We push them in the forward order so that they'll be popped in 1846212904Sdim // the reverse order. 1847276479Sdim for (const auto &Base : ClassDecl->vbases()) { 1848204643Srdivacky CXXRecordDecl *BaseClassDecl 1849204643Srdivacky = cast<CXXRecordDecl>(Base.getType()->getAs<RecordType>()->getDecl()); 1850288943Sdim 1851204643Srdivacky // Ignore trivial destructors. 1852204643Srdivacky if (BaseClassDecl->hasTrivialDestructor()) 1853204643Srdivacky continue; 1854212904Sdim 1855212904Sdim EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup, 1856212904Sdim BaseClassDecl, 1857212904Sdim /*BaseIsVirtual*/ true); 1858204643Srdivacky } 1859212904Sdim 1860204643Srdivacky return; 1861204643Srdivacky } 1862204643Srdivacky 1863204643Srdivacky assert(DtorType == Dtor_Base); 1864296417Sdim // Poison the vtable pointer if it has no virtual bases, but inherits 1865296417Sdim // virtual functions. 1866296417Sdim if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor && 1867296417Sdim SanOpts.has(SanitizerKind::Memory) && !ClassDecl->getNumVBases() && 1868296417Sdim ClassDecl->isPolymorphic()) 1869296417Sdim EHStack.pushCleanup<SanitizeDtorVTable>(NormalAndEHCleanup, DD); 1870288943Sdim 1871212904Sdim // Destroy non-virtual bases. 1872276479Sdim for (const auto &Base : ClassDecl->bases()) { 1873212904Sdim // Ignore virtual bases. 1874212904Sdim if (Base.isVirtual()) 1875212904Sdim continue; 1876288943Sdim 1877212904Sdim CXXRecordDecl *BaseClassDecl = Base.getType()->getAsCXXRecordDecl(); 1878288943Sdim 1879212904Sdim // Ignore trivial destructors. 1880212904Sdim if (BaseClassDecl->hasTrivialDestructor()) 1881212904Sdim continue; 1882204643Srdivacky 1883212904Sdim EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup, 1884212904Sdim BaseClassDecl, 1885212904Sdim /*BaseIsVirtual*/ false); 1886212904Sdim } 1887212904Sdim 1888296417Sdim // Poison fields such that access after their destructors are 1889296417Sdim // invoked, and before the base class destructor runs, is invalid. 1890296417Sdim if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor && 1891296417Sdim SanOpts.has(SanitizerKind::Memory)) 1892296417Sdim EHStack.pushCleanup<SanitizeDtorMembers>(NormalAndEHCleanup, DD); 1893296417Sdim 1894212904Sdim // Destroy direct fields. 1895276479Sdim for (const auto *Field : ClassDecl->fields()) { 1896276479Sdim QualType type = Field->getType(); 1897224145Sdim QualType::DestructionKind dtorKind = type.isDestructedType(); 1898224145Sdim if (!dtorKind) continue; 1899212904Sdim 1900234353Sdim // Anonymous union members do not have their destructors called. 1901234353Sdim const RecordType *RT = type->getAsUnionType(); 1902234353Sdim if (RT && RT->getDecl()->isAnonymousStructOrUnion()) continue; 1903234353Sdim 1904224145Sdim CleanupKind cleanupKind = getCleanupKind(dtorKind); 1905276479Sdim EHStack.pushCleanup<DestroyField>(cleanupKind, Field, 1906224145Sdim getDestroyer(dtorKind), 1907224145Sdim cleanupKind & EHCleanup); 1908201361Srdivacky } 1909201361Srdivacky} 1910201361Srdivacky 1911224145Sdim/// EmitCXXAggrConstructorCall - Emit a loop to call a particular 1912224145Sdim/// constructor for each of several members of an array. 1913212904Sdim/// 1914224145Sdim/// \param ctor the constructor to call for each element 1915224145Sdim/// \param arrayType the type of the array to initialize 1916224145Sdim/// \param arrayBegin an arrayType* 1917224145Sdim/// \param zeroInitialize true if each element should be 1918224145Sdim/// zero-initialized before it is constructed 1919280031Sdimvoid CodeGenFunction::EmitCXXAggrConstructorCall( 1920280031Sdim const CXXConstructorDecl *ctor, const ConstantArrayType *arrayType, 1921296417Sdim Address arrayBegin, const CXXConstructExpr *E, bool zeroInitialize) { 1922224145Sdim QualType elementType; 1923224145Sdim llvm::Value *numElements = 1924224145Sdim emitArrayLength(arrayType, elementType, arrayBegin); 1925202379Srdivacky 1926280031Sdim EmitCXXAggrConstructorCall(ctor, numElements, arrayBegin, E, zeroInitialize); 1927202379Srdivacky} 1928202379Srdivacky 1929224145Sdim/// EmitCXXAggrConstructorCall - Emit a loop to call a particular 1930224145Sdim/// constructor for each of several members of an array. 1931224145Sdim/// 1932224145Sdim/// \param ctor the constructor to call for each element 1933224145Sdim/// \param numElements the number of elements in the array; 1934224145Sdim/// may be zero 1935296417Sdim/// \param arrayBase a T*, where T is the type constructed by ctor 1936224145Sdim/// \param zeroInitialize true if each element should be 1937224145Sdim/// zero-initialized before it is constructed 1938280031Sdimvoid CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *ctor, 1939280031Sdim llvm::Value *numElements, 1940296417Sdim Address arrayBase, 1941280031Sdim const CXXConstructExpr *E, 1942280031Sdim bool zeroInitialize) { 1943224145Sdim // It's legal for numElements to be zero. This can happen both 1944224145Sdim // dynamically, because x can be zero in 'new A[x]', and statically, 1945224145Sdim // because of GCC extensions that permit zero-length arrays. There 1946224145Sdim // are probably legitimate places where we could assume that this 1947224145Sdim // doesn't happen, but it's not clear that it's worth it. 1948276479Sdim llvm::BranchInst *zeroCheckBranch = nullptr; 1949202379Srdivacky 1950224145Sdim // Optimize for a constant count. 1951224145Sdim llvm::ConstantInt *constantCount 1952224145Sdim = dyn_cast<llvm::ConstantInt>(numElements); 1953224145Sdim if (constantCount) { 1954224145Sdim // Just skip out if the constant count is zero. 1955224145Sdim if (constantCount->isZero()) return; 1956202379Srdivacky 1957224145Sdim // Otherwise, emit the check. 1958224145Sdim } else { 1959224145Sdim llvm::BasicBlock *loopBB = createBasicBlock("new.ctorloop"); 1960224145Sdim llvm::Value *iszero = Builder.CreateIsNull(numElements, "isempty"); 1961224145Sdim zeroCheckBranch = Builder.CreateCondBr(iszero, loopBB, loopBB); 1962224145Sdim EmitBlock(loopBB); 1963224145Sdim } 1964288943Sdim 1965224145Sdim // Find the end of the array. 1966296417Sdim llvm::Value *arrayBegin = arrayBase.getPointer(); 1967224145Sdim llvm::Value *arrayEnd = Builder.CreateInBoundsGEP(arrayBegin, numElements, 1968224145Sdim "arrayctor.end"); 1969202379Srdivacky 1970224145Sdim // Enter the loop, setting up a phi for the current location to initialize. 1971224145Sdim llvm::BasicBlock *entryBB = Builder.GetInsertBlock(); 1972224145Sdim llvm::BasicBlock *loopBB = createBasicBlock("arrayctor.loop"); 1973224145Sdim EmitBlock(loopBB); 1974224145Sdim llvm::PHINode *cur = Builder.CreatePHI(arrayBegin->getType(), 2, 1975224145Sdim "arrayctor.cur"); 1976224145Sdim cur->addIncoming(arrayBegin, entryBB); 1977202379Srdivacky 1978224145Sdim // Inside the loop body, emit the constructor call on the array element. 1979202379Srdivacky 1980296417Sdim // The alignment of the base, adjusted by the size of a single element, 1981296417Sdim // provides a conservative estimate of the alignment of every element. 1982296417Sdim // (This assumes we never start tracking offsetted alignments.) 1983296417Sdim // 1984296417Sdim // Note that these are complete objects and so we don't need to 1985296417Sdim // use the non-virtual size or alignment. 1986224145Sdim QualType type = getContext().getTypeDeclType(ctor->getParent()); 1987296417Sdim CharUnits eltAlignment = 1988296417Sdim arrayBase.getAlignment() 1989296417Sdim .alignmentOfArrayElement(getContext().getTypeSizeInChars(type)); 1990296417Sdim Address curAddr = Address(cur, eltAlignment); 1991202379Srdivacky 1992212904Sdim // Zero initialize the storage, if requested. 1993224145Sdim if (zeroInitialize) 1994296417Sdim EmitNullInitialization(curAddr, type); 1995288943Sdim 1996288943Sdim // C++ [class.temporary]p4: 1997202379Srdivacky // There are two contexts in which temporaries are destroyed at a different 1998202379Srdivacky // point than the end of the full-expression. The first context is when a 1999288943Sdim // default constructor is called to initialize an element of an array. 2000288943Sdim // If the constructor has one or more default arguments, the destruction of 2001288943Sdim // every temporary created in a default argument expression is sequenced 2002202379Srdivacky // before the construction of the next array element, if any. 2003288943Sdim 2004206084Srdivacky { 2005210299Sed RunCleanupsScope Scope(*this); 2006202379Srdivacky 2007224145Sdim // Evaluate the constructor and its arguments in a regular 2008224145Sdim // partial-destroy cleanup. 2009234353Sdim if (getLangOpts().Exceptions && 2010224145Sdim !ctor->getParent()->hasTrivialDestructor()) { 2011224145Sdim Destroyer *destroyer = destroyCXXObject; 2012296417Sdim pushRegularPartialArrayCleanup(arrayBegin, cur, type, eltAlignment, 2013296417Sdim *destroyer); 2014224145Sdim } 2015224145Sdim 2016280031Sdim EmitCXXConstructorCall(ctor, Ctor_Complete, /*ForVirtualBase=*/false, 2017296417Sdim /*Delegating=*/false, curAddr, E); 2018206084Srdivacky } 2019202379Srdivacky 2020224145Sdim // Go to the next element. 2021224145Sdim llvm::Value *next = 2022224145Sdim Builder.CreateInBoundsGEP(cur, llvm::ConstantInt::get(SizeTy, 1), 2023224145Sdim "arrayctor.next"); 2024224145Sdim cur->addIncoming(next, Builder.GetInsertBlock()); 2025202379Srdivacky 2026224145Sdim // Check whether that's the end of the loop. 2027224145Sdim llvm::Value *done = Builder.CreateICmpEQ(next, arrayEnd, "arrayctor.done"); 2028224145Sdim llvm::BasicBlock *contBB = createBasicBlock("arrayctor.cont"); 2029224145Sdim Builder.CreateCondBr(done, contBB, loopBB); 2030202379Srdivacky 2031224145Sdim // Patch the earlier check to skip over the loop. 2032224145Sdim if (zeroCheckBranch) zeroCheckBranch->setSuccessor(0, contBB); 2033202379Srdivacky 2034224145Sdim EmitBlock(contBB); 2035202379Srdivacky} 2036202379Srdivacky 2037224145Sdimvoid CodeGenFunction::destroyCXXObject(CodeGenFunction &CGF, 2038296417Sdim Address addr, 2039224145Sdim QualType type) { 2040224145Sdim const RecordType *rtype = type->castAs<RecordType>(); 2041224145Sdim const CXXRecordDecl *record = cast<CXXRecordDecl>(rtype->getDecl()); 2042224145Sdim const CXXDestructorDecl *dtor = record->getDestructor(); 2043224145Sdim assert(!dtor->isTrivial()); 2044224145Sdim CGF.EmitCXXDestructorCall(dtor, Dtor_Complete, /*for vbase*/ false, 2045249423Sdim /*Delegating=*/false, addr); 2046202379Srdivacky} 2047202379Srdivacky 2048280031Sdimvoid CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D, 2049280031Sdim CXXCtorType Type, 2050280031Sdim bool ForVirtualBase, 2051296417Sdim bool Delegating, Address This, 2052280031Sdim const CXXConstructExpr *E) { 2053296417Sdim const CXXRecordDecl *ClassDecl = D->getParent(); 2054296417Sdim 2055288943Sdim // C++11 [class.mfct.non-static]p2: 2056288943Sdim // If a non-static member function of a class X is called for an object that 2057288943Sdim // is not of type X, or of a type derived from X, the behavior is undefined. 2058288943Sdim // FIXME: Provide a source location here. 2059296417Sdim EmitTypeCheck(CodeGenFunction::TCK_ConstructorCall, SourceLocation(), 2060296417Sdim This.getPointer(), getContext().getRecordType(ClassDecl)); 2061203955Srdivacky 2062288943Sdim if (D->isTrivial() && D->isDefaultConstructor()) { 2063288943Sdim assert(E->getNumArgs() == 0 && "trivial default ctor with args"); 2064288943Sdim return; 2065288943Sdim } 2066288943Sdim 2067288943Sdim // If this is a trivial constructor, just emit what's needed. If this is a 2068288943Sdim // union copy constructor, we must emit a memcpy, because the AST does not 2069288943Sdim // model that copy. 2070288943Sdim if (isMemcpyEquivalentSpecialMember(D)) { 2071280031Sdim assert(E->getNumArgs() == 1 && "unexpected argcount for trivial ctor"); 2072203955Srdivacky 2073280031Sdim const Expr *Arg = E->getArg(0); 2074288943Sdim QualType SrcTy = Arg->getType(); 2075296417Sdim Address Src = EmitLValue(Arg).getAddress(); 2076296417Sdim QualType DestTy = getContext().getTypeDeclType(ClassDecl); 2077288943Sdim EmitAggregateCopyCtor(This, Src, DestTy, SrcTy); 2078202379Srdivacky return; 2079202379Srdivacky } 2080202379Srdivacky 2081276479Sdim CallArgList Args; 2082276479Sdim 2083276479Sdim // Push the this ptr. 2084296417Sdim Args.add(RValue::get(This.getPointer()), D->getThisType(getContext())); 2085276479Sdim 2086276479Sdim // Add the rest of the user-supplied arguments. 2087276479Sdim const FunctionProtoType *FPT = D->getType()->castAs<FunctionProtoType>(); 2088296417Sdim EmitCallArgs(Args, FPT, E->arguments(), E->getConstructor()); 2089276479Sdim 2090276479Sdim // Insert any ABI-specific implicit constructor arguments. 2091276479Sdim unsigned ExtraArgs = CGM.getCXXABI().addImplicitConstructorArgs( 2092276479Sdim *this, D, Type, ForVirtualBase, Delegating, Args); 2093276479Sdim 2094276479Sdim // Emit the call. 2095280031Sdim llvm::Value *Callee = CGM.getAddrOfCXXStructor(D, getFromCtorType(Type)); 2096276479Sdim const CGFunctionInfo &Info = 2097276479Sdim CGM.getTypes().arrangeCXXConstructorCall(Args, D, Type, ExtraArgs); 2098276479Sdim EmitCall(Info, Callee, ReturnValueSlot(), Args, D); 2099296417Sdim 2100296417Sdim // Generate vtable assumptions if we're constructing a complete object 2101296417Sdim // with a vtable. We don't do this for base subobjects for two reasons: 2102296417Sdim // first, it's incorrect for classes with virtual bases, and second, we're 2103296417Sdim // about to overwrite the vptrs anyway. 2104296417Sdim // We also have to make sure if we can refer to vtable: 2105296417Sdim // - Otherwise we can refer to vtable if it's safe to speculatively emit. 2106296417Sdim // FIXME: If vtable is used by ctor/dtor, or if vtable is external and we are 2107296417Sdim // sure that definition of vtable is not hidden, 2108296417Sdim // then we are always safe to refer to it. 2109296417Sdim // FIXME: It looks like InstCombine is very inefficient on dealing with 2110296417Sdim // assumes. Make assumption loads require -fstrict-vtable-pointers temporarily. 2111296417Sdim if (CGM.getCodeGenOpts().OptimizationLevel > 0 && 2112296417Sdim ClassDecl->isDynamicClass() && Type != Ctor_Base && 2113296417Sdim CGM.getCXXABI().canSpeculativelyEmitVTable(ClassDecl) && 2114296417Sdim CGM.getCodeGenOpts().StrictVTablePointers) 2115296417Sdim EmitVTableAssumptionLoads(ClassDecl, This); 2116202379Srdivacky} 2117202379Srdivacky 2118296417Sdimvoid CodeGenFunction::EmitVTableAssumptionLoad(const VPtr &Vptr, Address This) { 2119296417Sdim llvm::Value *VTableGlobal = 2120296417Sdim CGM.getCXXABI().getVTableAddressPoint(Vptr.Base, Vptr.VTableClass); 2121296417Sdim if (!VTableGlobal) 2122296417Sdim return; 2123296417Sdim 2124296417Sdim // We can just use the base offset in the complete class. 2125296417Sdim CharUnits NonVirtualOffset = Vptr.Base.getBaseOffset(); 2126296417Sdim 2127296417Sdim if (!NonVirtualOffset.isZero()) 2128296417Sdim This = 2129296417Sdim ApplyNonVirtualAndVirtualOffset(*this, This, NonVirtualOffset, nullptr, 2130296417Sdim Vptr.VTableClass, Vptr.NearestVBase); 2131296417Sdim 2132296417Sdim llvm::Value *VPtrValue = 2133296417Sdim GetVTablePtr(This, VTableGlobal->getType(), Vptr.VTableClass); 2134296417Sdim llvm::Value *Cmp = 2135296417Sdim Builder.CreateICmpEQ(VPtrValue, VTableGlobal, "cmp.vtables"); 2136296417Sdim Builder.CreateAssumption(Cmp); 2137296417Sdim} 2138296417Sdim 2139296417Sdimvoid CodeGenFunction::EmitVTableAssumptionLoads(const CXXRecordDecl *ClassDecl, 2140296417Sdim Address This) { 2141296417Sdim if (CGM.getCXXABI().doStructorsInitializeVPtrs(ClassDecl)) 2142296417Sdim for (const VPtr &Vptr : getVTablePointers(ClassDecl)) 2143296417Sdim EmitVTableAssumptionLoad(Vptr, This); 2144296417Sdim} 2145296417Sdim 2146204643Srdivackyvoid 2147218893SdimCodeGenFunction::EmitSynthesizedCXXCopyCtorCall(const CXXConstructorDecl *D, 2148296417Sdim Address This, Address Src, 2149296417Sdim const CXXConstructExpr *E) { 2150288943Sdim if (isMemcpyEquivalentSpecialMember(D)) { 2151280031Sdim assert(E->getNumArgs() == 1 && "unexpected argcount for trivial ctor"); 2152226633Sdim assert(D->isCopyOrMoveConstructor() && 2153226633Sdim "trivial 1-arg ctor not a copy/move ctor"); 2154288943Sdim EmitAggregateCopyCtor(This, Src, 2155288943Sdim getContext().getTypeDeclType(D->getParent()), 2156296417Sdim (*E->arg_begin())->getType()); 2157218893Sdim return; 2158218893Sdim } 2159280031Sdim llvm::Value *Callee = CGM.getAddrOfCXXStructor(D, StructorType::Complete); 2160218893Sdim assert(D->isInstance() && 2161218893Sdim "Trying to emit a member call expr on a static method!"); 2162288943Sdim 2163276479Sdim const FunctionProtoType *FPT = D->getType()->castAs<FunctionProtoType>(); 2164288943Sdim 2165218893Sdim CallArgList Args; 2166288943Sdim 2167218893Sdim // Push the this ptr. 2168296417Sdim Args.add(RValue::get(This.getPointer()), D->getThisType(getContext())); 2169288943Sdim 2170218893Sdim // Push the src ptr. 2171276479Sdim QualType QT = *(FPT->param_type_begin()); 2172226633Sdim llvm::Type *t = CGM.getTypes().ConvertType(QT); 2173218893Sdim Src = Builder.CreateBitCast(Src, t); 2174296417Sdim Args.add(RValue::get(Src.getPointer()), QT); 2175276479Sdim 2176218893Sdim // Skip over first argument (Src). 2177296417Sdim EmitCallArgs(Args, FPT, drop_begin(E->arguments(), 1), E->getConstructor(), 2178280031Sdim /*ParamsToSkip*/ 1); 2179276479Sdim 2180239462Sdim EmitCall(CGM.getTypes().arrangeCXXMethodCall(Args, FPT, RequiredArgs::All), 2181239462Sdim Callee, ReturnValueSlot(), Args, D); 2182218893Sdim} 2183218893Sdim 2184218893Sdimvoid 2185204643SrdivackyCodeGenFunction::EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor, 2186204643Srdivacky CXXCtorType CtorType, 2187261991Sdim const FunctionArgList &Args, 2188261991Sdim SourceLocation Loc) { 2189204643Srdivacky CallArgList DelegateArgs; 2190204643Srdivacky 2191204643Srdivacky FunctionArgList::const_iterator I = Args.begin(), E = Args.end(); 2192204643Srdivacky assert(I != E && "no parameters to constructor"); 2193204643Srdivacky 2194204643Srdivacky // this 2195221345Sdim DelegateArgs.add(RValue::get(LoadCXXThis()), (*I)->getType()); 2196204643Srdivacky ++I; 2197204643Srdivacky 2198204643Srdivacky // vtt 2199249423Sdim if (llvm::Value *VTT = GetVTTParameter(GlobalDecl(Ctor, CtorType), 2200249423Sdim /*ForVirtualBase=*/false, 2201249423Sdim /*Delegating=*/true)) { 2202204643Srdivacky QualType VoidPP = getContext().getPointerType(getContext().VoidPtrTy); 2203221345Sdim DelegateArgs.add(RValue::get(VTT), VoidPP); 2204204643Srdivacky 2205261991Sdim if (CGM.getCXXABI().NeedsVTTParameter(CurGD)) { 2206204643Srdivacky assert(I != E && "cannot skip vtt parameter, already done with args"); 2207221345Sdim assert((*I)->getType() == VoidPP && "skipping parameter not of vtt type"); 2208204643Srdivacky ++I; 2209204643Srdivacky } 2210204643Srdivacky } 2211204643Srdivacky 2212204643Srdivacky // Explicit arguments. 2213204643Srdivacky for (; I != E; ++I) { 2214221345Sdim const VarDecl *param = *I; 2215261991Sdim // FIXME: per-argument source location 2216261991Sdim EmitDelegateCallArg(DelegateArgs, param, Loc); 2217204643Srdivacky } 2218204643Srdivacky 2219280031Sdim llvm::Value *Callee = 2220280031Sdim CGM.getAddrOfCXXStructor(Ctor, getFromCtorType(CtorType)); 2221280031Sdim EmitCall(CGM.getTypes() 2222280031Sdim .arrangeCXXStructorDeclaration(Ctor, getFromCtorType(CtorType)), 2223249423Sdim Callee, ReturnValueSlot(), DelegateArgs, Ctor); 2224204643Srdivacky} 2225204643Srdivacky 2226223017Sdimnamespace { 2227296417Sdim struct CallDelegatingCtorDtor final : EHScopeStack::Cleanup { 2228223017Sdim const CXXDestructorDecl *Dtor; 2229296417Sdim Address Addr; 2230223017Sdim CXXDtorType Type; 2231223017Sdim 2232296417Sdim CallDelegatingCtorDtor(const CXXDestructorDecl *D, Address Addr, 2233223017Sdim CXXDtorType Type) 2234223017Sdim : Dtor(D), Addr(Addr), Type(Type) {} 2235223017Sdim 2236276479Sdim void Emit(CodeGenFunction &CGF, Flags flags) override { 2237223017Sdim CGF.EmitCXXDestructorCall(Dtor, Type, /*ForVirtualBase=*/false, 2238249423Sdim /*Delegating=*/true, Addr); 2239223017Sdim } 2240223017Sdim }; 2241296417Sdim} // end anonymous namespace 2242223017Sdim 2243221345Sdimvoid 2244221345SdimCodeGenFunction::EmitDelegatingCXXConstructorCall(const CXXConstructorDecl *Ctor, 2245221345Sdim const FunctionArgList &Args) { 2246221345Sdim assert(Ctor->isDelegatingConstructor()); 2247221345Sdim 2248296417Sdim Address ThisPtr = LoadCXXThisAddress(); 2249221345Sdim 2250224145Sdim AggValueSlot AggSlot = 2251296417Sdim AggValueSlot::forAddr(ThisPtr, Qualifiers(), 2252226633Sdim AggValueSlot::IsDestructed, 2253226633Sdim AggValueSlot::DoesNotNeedGCBarriers, 2254226633Sdim AggValueSlot::IsNotAliased); 2255221345Sdim 2256221345Sdim EmitAggExpr(Ctor->init_begin()[0]->getInit(), AggSlot); 2257223017Sdim 2258223017Sdim const CXXRecordDecl *ClassDecl = Ctor->getParent(); 2259234353Sdim if (CGM.getLangOpts().Exceptions && !ClassDecl->hasTrivialDestructor()) { 2260223017Sdim CXXDtorType Type = 2261223017Sdim CurGD.getCtorType() == Ctor_Complete ? Dtor_Complete : Dtor_Base; 2262223017Sdim 2263223017Sdim EHStack.pushCleanup<CallDelegatingCtorDtor>(EHCleanup, 2264223017Sdim ClassDecl->getDestructor(), 2265223017Sdim ThisPtr, Type); 2266223017Sdim } 2267221345Sdim} 2268221345Sdim 2269202379Srdivackyvoid CodeGenFunction::EmitCXXDestructorCall(const CXXDestructorDecl *DD, 2270202379Srdivacky CXXDtorType Type, 2271207619Srdivacky bool ForVirtualBase, 2272249423Sdim bool Delegating, 2273296417Sdim Address This) { 2274276479Sdim CGM.getCXXABI().EmitDestructorCall(*this, DD, Type, ForVirtualBase, 2275276479Sdim Delegating, This); 2276202379Srdivacky} 2277202379Srdivacky 2278212904Sdimnamespace { 2279296417Sdim struct CallLocalDtor final : EHScopeStack::Cleanup { 2280212904Sdim const CXXDestructorDecl *Dtor; 2281296417Sdim Address Addr; 2282212904Sdim 2283296417Sdim CallLocalDtor(const CXXDestructorDecl *D, Address Addr) 2284212904Sdim : Dtor(D), Addr(Addr) {} 2285212904Sdim 2286276479Sdim void Emit(CodeGenFunction &CGF, Flags flags) override { 2287212904Sdim CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete, 2288249423Sdim /*ForVirtualBase=*/false, 2289249423Sdim /*Delegating=*/false, Addr); 2290212904Sdim } 2291212904Sdim }; 2292212904Sdim} 2293212904Sdim 2294212904Sdimvoid CodeGenFunction::PushDestructorCleanup(const CXXDestructorDecl *D, 2295296417Sdim Address Addr) { 2296212904Sdim EHStack.pushCleanup<CallLocalDtor>(NormalAndEHCleanup, D, Addr); 2297212904Sdim} 2298212904Sdim 2299296417Sdimvoid CodeGenFunction::PushDestructorCleanup(QualType T, Address Addr) { 2300210299Sed CXXRecordDecl *ClassDecl = T->getAsCXXRecordDecl(); 2301210299Sed if (!ClassDecl) return; 2302210299Sed if (ClassDecl->hasTrivialDestructor()) return; 2303210299Sed 2304210299Sed const CXXDestructorDecl *D = ClassDecl->getDestructor(); 2305221345Sdim assert(D && D->isUsed() && "destructor not marked as used!"); 2306212904Sdim PushDestructorCleanup(D, Addr); 2307210299Sed} 2308210299Sed 2309296417Sdimvoid CodeGenFunction::InitializeVTablePointer(const VPtr &Vptr) { 2310206084Srdivacky // Compute the address point. 2311261991Sdim llvm::Value *VTableAddressPoint = 2312261991Sdim CGM.getCXXABI().getVTableAddressPointInStructor( 2313296417Sdim *this, Vptr.VTableClass, Vptr.Base, Vptr.NearestVBase); 2314296417Sdim 2315261991Sdim if (!VTableAddressPoint) 2316261991Sdim return; 2317202379Srdivacky 2318206084Srdivacky // Compute where to store the address point. 2319276479Sdim llvm::Value *VirtualOffset = nullptr; 2320221345Sdim CharUnits NonVirtualOffset = CharUnits::Zero(); 2321288943Sdim 2322296417Sdim if (CGM.getCXXABI().isVirtualOffsetNeededForVTableField(*this, Vptr)) { 2323206084Srdivacky // We need to use the virtual base offset offset because the virtual base 2324206084Srdivacky // might have a different offset in the most derived class. 2325296417Sdim 2326296417Sdim VirtualOffset = CGM.getCXXABI().GetVirtualBaseClassOffset( 2327296417Sdim *this, LoadCXXThisAddress(), Vptr.VTableClass, Vptr.NearestVBase); 2328296417Sdim NonVirtualOffset = Vptr.OffsetFromNearestVBase; 2329206084Srdivacky } else { 2330207619Srdivacky // We can just use the base offset in the complete class. 2331296417Sdim NonVirtualOffset = Vptr.Base.getBaseOffset(); 2332206084Srdivacky } 2333288943Sdim 2334207619Srdivacky // Apply the offsets. 2335296417Sdim Address VTableField = LoadCXXThisAddress(); 2336288943Sdim 2337221345Sdim if (!NonVirtualOffset.isZero() || VirtualOffset) 2338296417Sdim VTableField = ApplyNonVirtualAndVirtualOffset( 2339296417Sdim *this, VTableField, NonVirtualOffset, VirtualOffset, Vptr.VTableClass, 2340296417Sdim Vptr.NearestVBase); 2341206084Srdivacky 2342280031Sdim // Finally, store the address point. Use the same LLVM types as the field to 2343280031Sdim // support optimization. 2344280031Sdim llvm::Type *VTablePtrTy = 2345280031Sdim llvm::FunctionType::get(CGM.Int32Ty, /*isVarArg=*/true) 2346280031Sdim ->getPointerTo() 2347280031Sdim ->getPointerTo(); 2348280031Sdim VTableField = Builder.CreateBitCast(VTableField, VTablePtrTy->getPointerTo()); 2349280031Sdim VTableAddressPoint = Builder.CreateBitCast(VTableAddressPoint, VTablePtrTy); 2350296417Sdim 2351234353Sdim llvm::StoreInst *Store = Builder.CreateStore(VTableAddressPoint, VTableField); 2352296417Sdim CGM.DecorateInstructionWithTBAA(Store, CGM.getTBAAInfoForVTablePtr()); 2353296417Sdim if (CGM.getCodeGenOpts().OptimizationLevel > 0 && 2354296417Sdim CGM.getCodeGenOpts().StrictVTablePointers) 2355296417Sdim CGM.DecorateInstructionWithInvariantGroup(Store, Vptr.VTableClass); 2356202379Srdivacky} 2357202379Srdivacky 2358296417SdimCodeGenFunction::VPtrsVector 2359296417SdimCodeGenFunction::getVTablePointers(const CXXRecordDecl *VTableClass) { 2360296417Sdim CodeGenFunction::VPtrsVector VPtrsResult; 2361296417Sdim VisitedVirtualBasesSetTy VBases; 2362296417Sdim getVTablePointers(BaseSubobject(VTableClass, CharUnits::Zero()), 2363296417Sdim /*NearestVBase=*/nullptr, 2364296417Sdim /*OffsetFromNearestVBase=*/CharUnits::Zero(), 2365296417Sdim /*BaseIsNonVirtualPrimaryBase=*/false, VTableClass, VBases, 2366296417Sdim VPtrsResult); 2367296417Sdim return VPtrsResult; 2368296417Sdim} 2369296417Sdim 2370296417Sdimvoid CodeGenFunction::getVTablePointers(BaseSubobject Base, 2371296417Sdim const CXXRecordDecl *NearestVBase, 2372296417Sdim CharUnits OffsetFromNearestVBase, 2373296417Sdim bool BaseIsNonVirtualPrimaryBase, 2374296417Sdim const CXXRecordDecl *VTableClass, 2375296417Sdim VisitedVirtualBasesSetTy &VBases, 2376296417Sdim VPtrsVector &Vptrs) { 2377206084Srdivacky // If this base is a non-virtual primary base the address point has already 2378206084Srdivacky // been set. 2379206084Srdivacky if (!BaseIsNonVirtualPrimaryBase) { 2380206084Srdivacky // Initialize the vtable pointer for this base. 2381296417Sdim VPtr Vptr = {Base, NearestVBase, OffsetFromNearestVBase, VTableClass}; 2382296417Sdim Vptrs.push_back(Vptr); 2383206084Srdivacky } 2384288943Sdim 2385206084Srdivacky const CXXRecordDecl *RD = Base.getBase(); 2386202379Srdivacky 2387206084Srdivacky // Traverse bases. 2388276479Sdim for (const auto &I : RD->bases()) { 2389206084Srdivacky CXXRecordDecl *BaseDecl 2390276479Sdim = cast<CXXRecordDecl>(I.getType()->getAs<RecordType>()->getDecl()); 2391206084Srdivacky 2392206084Srdivacky // Ignore classes without a vtable. 2393206084Srdivacky if (!BaseDecl->isDynamicClass()) 2394202379Srdivacky continue; 2395206084Srdivacky 2396221345Sdim CharUnits BaseOffset; 2397221345Sdim CharUnits BaseOffsetFromNearestVBase; 2398206084Srdivacky bool BaseDeclIsNonVirtualPrimaryBase; 2399206084Srdivacky 2400276479Sdim if (I.isVirtual()) { 2401206084Srdivacky // Check if we've visited this virtual base before. 2402280031Sdim if (!VBases.insert(BaseDecl).second) 2403206084Srdivacky continue; 2404206084Srdivacky 2405288943Sdim const ASTRecordLayout &Layout = 2406206084Srdivacky getContext().getASTRecordLayout(VTableClass); 2407206084Srdivacky 2408221345Sdim BaseOffset = Layout.getVBaseClassOffset(BaseDecl); 2409221345Sdim BaseOffsetFromNearestVBase = CharUnits::Zero(); 2410206084Srdivacky BaseDeclIsNonVirtualPrimaryBase = false; 2411206084Srdivacky } else { 2412206084Srdivacky const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD); 2413206084Srdivacky 2414221345Sdim BaseOffset = Base.getBaseOffset() + Layout.getBaseClassOffset(BaseDecl); 2415288943Sdim BaseOffsetFromNearestVBase = 2416221345Sdim OffsetFromNearestVBase + Layout.getBaseClassOffset(BaseDecl); 2417206084Srdivacky BaseDeclIsNonVirtualPrimaryBase = Layout.getPrimaryBase() == BaseDecl; 2418206084Srdivacky } 2419288943Sdim 2420296417Sdim getVTablePointers( 2421296417Sdim BaseSubobject(BaseDecl, BaseOffset), 2422296417Sdim I.isVirtual() ? BaseDecl : NearestVBase, BaseOffsetFromNearestVBase, 2423296417Sdim BaseDeclIsNonVirtualPrimaryBase, VTableClass, VBases, Vptrs); 2424202379Srdivacky } 2425206084Srdivacky} 2426202379Srdivacky 2427206084Srdivackyvoid CodeGenFunction::InitializeVTablePointers(const CXXRecordDecl *RD) { 2428206084Srdivacky // Ignore classes without a vtable. 2429206084Srdivacky if (!RD->isDynamicClass()) 2430206084Srdivacky return; 2431202379Srdivacky 2432206084Srdivacky // Initialize the vtable pointers for this class and all of its bases. 2433296417Sdim if (CGM.getCXXABI().doStructorsInitializeVPtrs(RD)) 2434296417Sdim for (const VPtr &Vptr : getVTablePointers(RD)) 2435296417Sdim InitializeVTablePointer(Vptr); 2436261991Sdim 2437261991Sdim if (RD->getNumVBases()) 2438261991Sdim CGM.getCXXABI().initializeHiddenVirtualInheritanceMembers(*this, RD); 2439202379Srdivacky} 2440218893Sdim 2441296417Sdimllvm::Value *CodeGenFunction::GetVTablePtr(Address This, 2442296417Sdim llvm::Type *VTableTy, 2443296417Sdim const CXXRecordDecl *RD) { 2444296417Sdim Address VTablePtrSrc = Builder.CreateElementBitCast(This, VTableTy); 2445234353Sdim llvm::Instruction *VTable = Builder.CreateLoad(VTablePtrSrc, "vtable"); 2446296417Sdim CGM.DecorateInstructionWithTBAA(VTable, CGM.getTBAAInfoForVTablePtr()); 2447296417Sdim 2448296417Sdim if (CGM.getCodeGenOpts().OptimizationLevel > 0 && 2449296417Sdim CGM.getCodeGenOpts().StrictVTablePointers) 2450296417Sdim CGM.DecorateInstructionWithInvariantGroup(VTable, RD); 2451296417Sdim 2452234353Sdim return VTable; 2453218893Sdim} 2454223017Sdim 2455288943Sdim// If a class has a single non-virtual base and does not introduce or override 2456288943Sdim// virtual member functions or fields, it will have the same layout as its base. 2457288943Sdim// This function returns the least derived such class. 2458288943Sdim// 2459288943Sdim// Casting an instance of a base class to such a derived class is technically 2460288943Sdim// undefined behavior, but it is a relatively common hack for introducing member 2461288943Sdim// functions on class instances with specific properties (e.g. llvm::Operator) 2462288943Sdim// that works under most compilers and should not have security implications, so 2463288943Sdim// we allow it by default. It can be disabled with -fsanitize=cfi-cast-strict. 2464288943Sdimstatic const CXXRecordDecl * 2465288943SdimLeastDerivedClassWithSameLayout(const CXXRecordDecl *RD) { 2466288943Sdim if (!RD->field_empty()) 2467288943Sdim return RD; 2468223017Sdim 2469288943Sdim if (RD->getNumVBases() != 0) 2470288943Sdim return RD; 2471288943Sdim 2472288943Sdim if (RD->getNumBases() != 1) 2473288943Sdim return RD; 2474288943Sdim 2475288943Sdim for (const CXXMethodDecl *MD : RD->methods()) { 2476288943Sdim if (MD->isVirtual()) { 2477288943Sdim // Virtual member functions are only ok if they are implicit destructors 2478288943Sdim // because the implicit destructor will have the same semantics as the 2479288943Sdim // base class's destructor if no fields are added. 2480288943Sdim if (isa<CXXDestructorDecl>(MD) && MD->isImplicit()) 2481288943Sdim continue; 2482288943Sdim return RD; 2483288943Sdim } 2484288943Sdim } 2485288943Sdim 2486288943Sdim return LeastDerivedClassWithSameLayout( 2487288943Sdim RD->bases_begin()->getType()->getAsCXXRecordDecl()); 2488288943Sdim} 2489288943Sdim 2490288943Sdimvoid CodeGenFunction::EmitVTablePtrCheckForCall(const CXXMethodDecl *MD, 2491288943Sdim llvm::Value *VTable, 2492288943Sdim CFITypeCheckKind TCK, 2493288943Sdim SourceLocation Loc) { 2494288943Sdim const CXXRecordDecl *ClassDecl = MD->getParent(); 2495288943Sdim if (!SanOpts.has(SanitizerKind::CFICastStrict)) 2496288943Sdim ClassDecl = LeastDerivedClassWithSameLayout(ClassDecl); 2497288943Sdim 2498288943Sdim EmitVTablePtrCheck(ClassDecl, VTable, TCK, Loc); 2499288943Sdim} 2500288943Sdim 2501288943Sdimvoid CodeGenFunction::EmitVTablePtrCheckForCast(QualType T, 2502288943Sdim llvm::Value *Derived, 2503288943Sdim bool MayBeNull, 2504288943Sdim CFITypeCheckKind TCK, 2505288943Sdim SourceLocation Loc) { 2506288943Sdim if (!getLangOpts().CPlusPlus) 2507288943Sdim return; 2508288943Sdim 2509288943Sdim auto *ClassTy = T->getAs<RecordType>(); 2510288943Sdim if (!ClassTy) 2511288943Sdim return; 2512288943Sdim 2513288943Sdim const CXXRecordDecl *ClassDecl = cast<CXXRecordDecl>(ClassTy->getDecl()); 2514288943Sdim 2515288943Sdim if (!ClassDecl->isCompleteDefinition() || !ClassDecl->isDynamicClass()) 2516288943Sdim return; 2517288943Sdim 2518288943Sdim if (!SanOpts.has(SanitizerKind::CFICastStrict)) 2519288943Sdim ClassDecl = LeastDerivedClassWithSameLayout(ClassDecl); 2520288943Sdim 2521296417Sdim llvm::BasicBlock *ContBlock = nullptr; 2522288943Sdim 2523288943Sdim if (MayBeNull) { 2524288943Sdim llvm::Value *DerivedNotNull = 2525288943Sdim Builder.CreateIsNotNull(Derived, "cast.nonnull"); 2526288943Sdim 2527288943Sdim llvm::BasicBlock *CheckBlock = createBasicBlock("cast.check"); 2528288943Sdim ContBlock = createBasicBlock("cast.cont"); 2529288943Sdim 2530288943Sdim Builder.CreateCondBr(DerivedNotNull, CheckBlock, ContBlock); 2531288943Sdim 2532288943Sdim EmitBlock(CheckBlock); 2533288943Sdim } 2534288943Sdim 2535296417Sdim llvm::Value *VTable = 2536296417Sdim GetVTablePtr(Address(Derived, getPointerAlign()), Int8PtrTy, ClassDecl); 2537296417Sdim 2538288943Sdim EmitVTablePtrCheck(ClassDecl, VTable, TCK, Loc); 2539288943Sdim 2540288943Sdim if (MayBeNull) { 2541288943Sdim Builder.CreateBr(ContBlock); 2542288943Sdim EmitBlock(ContBlock); 2543288943Sdim } 2544288943Sdim} 2545288943Sdim 2546288943Sdimvoid CodeGenFunction::EmitVTablePtrCheck(const CXXRecordDecl *RD, 2547288943Sdim llvm::Value *VTable, 2548288943Sdim CFITypeCheckKind TCK, 2549288943Sdim SourceLocation Loc) { 2550288943Sdim if (CGM.IsCFIBlacklistedRecord(RD)) 2551288943Sdim return; 2552288943Sdim 2553288943Sdim SanitizerScope SanScope(this); 2554288943Sdim 2555296417Sdim llvm::Metadata *MD = 2556296417Sdim CGM.CreateMetadataIdentifierForType(QualType(RD->getTypeForDecl(), 0)); 2557296417Sdim llvm::Value *BitSetName = llvm::MetadataAsValue::get(getLLVMContext(), MD); 2558288943Sdim 2559288943Sdim llvm::Value *CastedVTable = Builder.CreateBitCast(VTable, Int8PtrTy); 2560288943Sdim llvm::Value *BitSetTest = 2561288943Sdim Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::bitset_test), 2562288943Sdim {CastedVTable, BitSetName}); 2563288943Sdim 2564296417Sdim if (CGM.getCodeGenOpts().SanitizeCfiCrossDso) { 2565296417Sdim if (auto TypeId = CGM.CreateCfiIdForTypeMetadata(MD)) { 2566296417Sdim EmitCfiSlowPathCheck(BitSetTest, TypeId, CastedVTable); 2567296417Sdim return; 2568296417Sdim } 2569296417Sdim } 2570296417Sdim 2571288943Sdim SanitizerMask M; 2572288943Sdim switch (TCK) { 2573288943Sdim case CFITCK_VCall: 2574288943Sdim M = SanitizerKind::CFIVCall; 2575288943Sdim break; 2576288943Sdim case CFITCK_NVCall: 2577288943Sdim M = SanitizerKind::CFINVCall; 2578288943Sdim break; 2579288943Sdim case CFITCK_DerivedCast: 2580288943Sdim M = SanitizerKind::CFIDerivedCast; 2581288943Sdim break; 2582288943Sdim case CFITCK_UnrelatedCast: 2583288943Sdim M = SanitizerKind::CFIUnrelatedCast; 2584288943Sdim break; 2585288943Sdim } 2586288943Sdim 2587288943Sdim llvm::Constant *StaticData[] = { 2588296417Sdim EmitCheckSourceLocation(Loc), 2589296417Sdim EmitCheckTypeDescriptor(QualType(RD->getTypeForDecl(), 0)), 2590296417Sdim llvm::ConstantInt::get(Int8Ty, TCK), 2591288943Sdim }; 2592288943Sdim EmitCheck(std::make_pair(BitSetTest, M), "cfi_bad_type", StaticData, 2593288943Sdim CastedVTable); 2594288943Sdim} 2595288943Sdim 2596223017Sdim// FIXME: Ideally Expr::IgnoreParenNoopCasts should do this, but it doesn't do 2597223017Sdim// quite what we want. 2598223017Sdimstatic const Expr *skipNoOpCastsAndParens(const Expr *E) { 2599223017Sdim while (true) { 2600223017Sdim if (const ParenExpr *PE = dyn_cast<ParenExpr>(E)) { 2601223017Sdim E = PE->getSubExpr(); 2602223017Sdim continue; 2603223017Sdim } 2604223017Sdim 2605223017Sdim if (const CastExpr *CE = dyn_cast<CastExpr>(E)) { 2606223017Sdim if (CE->getCastKind() == CK_NoOp) { 2607223017Sdim E = CE->getSubExpr(); 2608223017Sdim continue; 2609223017Sdim } 2610223017Sdim } 2611223017Sdim if (const UnaryOperator *UO = dyn_cast<UnaryOperator>(E)) { 2612223017Sdim if (UO->getOpcode() == UO_Extension) { 2613223017Sdim E = UO->getSubExpr(); 2614223017Sdim continue; 2615223017Sdim } 2616223017Sdim } 2617223017Sdim return E; 2618223017Sdim } 2619223017Sdim} 2620223017Sdim 2621261991Sdimbool 2622261991SdimCodeGenFunction::CanDevirtualizeMemberFunctionCall(const Expr *Base, 2623261991Sdim const CXXMethodDecl *MD) { 2624261991Sdim // When building with -fapple-kext, all calls must go through the vtable since 2625261991Sdim // the kernel linker can do runtime patching of vtables. 2626261991Sdim if (getLangOpts().AppleKext) 2627261991Sdim return false; 2628261991Sdim 2629223017Sdim // If the most derived class is marked final, we know that no subclass can 2630223017Sdim // override this member function and so we can devirtualize it. For example: 2631223017Sdim // 2632223017Sdim // struct A { virtual void f(); } 2633223017Sdim // struct B final : A { }; 2634223017Sdim // 2635223017Sdim // void f(B *b) { 2636223017Sdim // b->f(); 2637223017Sdim // } 2638223017Sdim // 2639261991Sdim const CXXRecordDecl *MostDerivedClassDecl = Base->getBestDynamicClassType(); 2640223017Sdim if (MostDerivedClassDecl->hasAttr<FinalAttr>()) 2641223017Sdim return true; 2642223017Sdim 2643223017Sdim // If the member function is marked 'final', we know that it can't be 2644223017Sdim // overridden and can therefore devirtualize it. 2645223017Sdim if (MD->hasAttr<FinalAttr>()) 2646223017Sdim return true; 2647223017Sdim 2648223017Sdim // Similarly, if the class itself is marked 'final' it can't be overridden 2649223017Sdim // and we can therefore devirtualize the member function call. 2650223017Sdim if (MD->getParent()->hasAttr<FinalAttr>()) 2651223017Sdim return true; 2652223017Sdim 2653223017Sdim Base = skipNoOpCastsAndParens(Base); 2654223017Sdim if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Base)) { 2655223017Sdim if (const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl())) { 2656223017Sdim // This is a record decl. We know the type and can devirtualize it. 2657223017Sdim return VD->getType()->isRecordType(); 2658223017Sdim } 2659288943Sdim 2660223017Sdim return false; 2661223017Sdim } 2662261991Sdim 2663261991Sdim // We can devirtualize calls on an object accessed by a class member access 2664261991Sdim // expression, since by C++11 [basic.life]p6 we know that it can't refer to 2665261991Sdim // a derived class object constructed in the same location. 2666261991Sdim if (const MemberExpr *ME = dyn_cast<MemberExpr>(Base)) 2667261991Sdim if (const ValueDecl *VD = dyn_cast<ValueDecl>(ME->getMemberDecl())) 2668261991Sdim return VD->getType()->isRecordType(); 2669261991Sdim 2670223017Sdim // We can always devirtualize calls on temporary object expressions. 2671223017Sdim if (isa<CXXConstructExpr>(Base)) 2672223017Sdim return true; 2673288943Sdim 2674223017Sdim // And calls on bound temporaries. 2675223017Sdim if (isa<CXXBindTemporaryExpr>(Base)) 2676223017Sdim return true; 2677288943Sdim 2678223017Sdim // Check if this is a call expr that returns a record type. 2679223017Sdim if (const CallExpr *CE = dyn_cast<CallExpr>(Base)) 2680288943Sdim return CE->getCallReturnType(getContext())->isRecordType(); 2681223017Sdim 2682223017Sdim // We can't devirtualize the call. 2683223017Sdim return false; 2684223017Sdim} 2685223017Sdim 2686261991Sdimvoid CodeGenFunction::EmitForwardingCallToLambda( 2687261991Sdim const CXXMethodDecl *callOperator, 2688261991Sdim CallArgList &callArgs) { 2689234353Sdim // Get the address of the call operator. 2690239462Sdim const CGFunctionInfo &calleeFnInfo = 2691239462Sdim CGM.getTypes().arrangeCXXMethodDeclaration(callOperator); 2692239462Sdim llvm::Value *callee = 2693239462Sdim CGM.GetAddrOfFunction(GlobalDecl(callOperator), 2694239462Sdim CGM.getTypes().GetFunctionType(calleeFnInfo)); 2695234353Sdim 2696239462Sdim // Prepare the return slot. 2697239462Sdim const FunctionProtoType *FPT = 2698239462Sdim callOperator->getType()->castAs<FunctionProtoType>(); 2699276479Sdim QualType resultType = FPT->getReturnType(); 2700239462Sdim ReturnValueSlot returnSlot; 2701239462Sdim if (!resultType->isVoidType() && 2702239462Sdim calleeFnInfo.getReturnInfo().getKind() == ABIArgInfo::Indirect && 2703249423Sdim !hasScalarEvaluationKind(calleeFnInfo.getReturnType())) 2704239462Sdim returnSlot = ReturnValueSlot(ReturnValue, resultType.isVolatileQualified()); 2705239462Sdim 2706239462Sdim // We don't need to separately arrange the call arguments because 2707239462Sdim // the call can't be variadic anyway --- it's impossible to forward 2708239462Sdim // variadic arguments. 2709288943Sdim 2710234353Sdim // Now emit our call. 2711239462Sdim RValue RV = EmitCall(calleeFnInfo, callee, returnSlot, 2712239462Sdim callArgs, callOperator); 2713234353Sdim 2714239462Sdim // If necessary, copy the returned value into the slot. 2715239462Sdim if (!resultType->isVoidType() && returnSlot.isNull()) 2716239462Sdim EmitReturnOfRValue(RV, resultType); 2717249423Sdim else 2718249423Sdim EmitBranchThroughCleanup(ReturnBlock); 2719234353Sdim} 2720234353Sdim 2721234353Sdimvoid CodeGenFunction::EmitLambdaBlockInvokeBody() { 2722234353Sdim const BlockDecl *BD = BlockInfo->getBlockDecl(); 2723234353Sdim const VarDecl *variable = BD->capture_begin()->getVariable(); 2724234353Sdim const CXXRecordDecl *Lambda = variable->getType()->getAsCXXRecordDecl(); 2725234353Sdim 2726234353Sdim // Start building arguments for forwarding call 2727234353Sdim CallArgList CallArgs; 2728234353Sdim 2729234353Sdim QualType ThisType = getContext().getPointerType(getContext().getRecordType(Lambda)); 2730296417Sdim Address ThisPtr = GetAddrOfBlockDecl(variable, false); 2731296417Sdim CallArgs.add(RValue::get(ThisPtr.getPointer()), ThisType); 2732234353Sdim 2733234353Sdim // Add the rest of the parameters. 2734276479Sdim for (auto param : BD->params()) 2735261991Sdim EmitDelegateCallArg(CallArgs, param, param->getLocStart()); 2736276479Sdim 2737288943Sdim assert(!Lambda->isGenericLambda() && 2738261991Sdim "generic lambda interconversion to block not implemented"); 2739261991Sdim EmitForwardingCallToLambda(Lambda->getLambdaCallOperator(), CallArgs); 2740234353Sdim} 2741234353Sdim 2742234353Sdimvoid CodeGenFunction::EmitLambdaToBlockPointerBody(FunctionArgList &Args) { 2743251662Sdim if (cast<CXXMethodDecl>(CurCodeDecl)->isVariadic()) { 2744234353Sdim // FIXME: Making this work correctly is nasty because it requires either 2745234353Sdim // cloning the body of the call operator or making the call operator forward. 2746251662Sdim CGM.ErrorUnsupported(CurCodeDecl, "lambda conversion to variadic function"); 2747234353Sdim return; 2748234353Sdim } 2749234353Sdim 2750261991Sdim EmitFunctionBody(Args, cast<FunctionDecl>(CurGD.getDecl())->getBody()); 2751234353Sdim} 2752234353Sdim 2753234353Sdimvoid CodeGenFunction::EmitLambdaDelegatingInvokeBody(const CXXMethodDecl *MD) { 2754234353Sdim const CXXRecordDecl *Lambda = MD->getParent(); 2755234353Sdim 2756234353Sdim // Start building arguments for forwarding call 2757234353Sdim CallArgList CallArgs; 2758234353Sdim 2759234353Sdim QualType ThisType = getContext().getPointerType(getContext().getRecordType(Lambda)); 2760234353Sdim llvm::Value *ThisPtr = llvm::UndefValue::get(getTypes().ConvertType(ThisType)); 2761234353Sdim CallArgs.add(RValue::get(ThisPtr), ThisType); 2762234353Sdim 2763234353Sdim // Add the rest of the parameters. 2764276479Sdim for (auto Param : MD->params()) 2765276479Sdim EmitDelegateCallArg(CallArgs, Param, Param->getLocStart()); 2766276479Sdim 2767261991Sdim const CXXMethodDecl *CallOp = Lambda->getLambdaCallOperator(); 2768261991Sdim // For a generic lambda, find the corresponding call operator specialization 2769261991Sdim // to which the call to the static-invoker shall be forwarded. 2770261991Sdim if (Lambda->isGenericLambda()) { 2771261991Sdim assert(MD->isFunctionTemplateSpecialization()); 2772261991Sdim const TemplateArgumentList *TAL = MD->getTemplateSpecializationArgs(); 2773261991Sdim FunctionTemplateDecl *CallOpTemplate = CallOp->getDescribedFunctionTemplate(); 2774276479Sdim void *InsertPos = nullptr; 2775288943Sdim FunctionDecl *CorrespondingCallOpSpecialization = 2776276479Sdim CallOpTemplate->findSpecialization(TAL->asArray(), InsertPos); 2777261991Sdim assert(CorrespondingCallOpSpecialization); 2778261991Sdim CallOp = cast<CXXMethodDecl>(CorrespondingCallOpSpecialization); 2779261991Sdim } 2780261991Sdim EmitForwardingCallToLambda(CallOp, CallArgs); 2781234353Sdim} 2782234353Sdim 2783234353Sdimvoid CodeGenFunction::EmitLambdaStaticInvokeFunction(const CXXMethodDecl *MD) { 2784234353Sdim if (MD->isVariadic()) { 2785234353Sdim // FIXME: Making this work correctly is nasty because it requires either 2786234353Sdim // cloning the body of the call operator or making the call operator forward. 2787234353Sdim CGM.ErrorUnsupported(MD, "lambda conversion to variadic function"); 2788234353Sdim return; 2789234353Sdim } 2790234353Sdim 2791234353Sdim EmitLambdaDelegatingInvokeBody(MD); 2792234353Sdim} 2793