xref: /aosp_15_r20/external/clang/lib/CodeGen/CGClass.cpp (revision 67e74705e28f6214e480b399dd47ea732279e315)
1  //===--- CGClass.cpp - Emit LLVM Code for C++ classes -----------*- C++ -*-===//
2  //
3  //                     The LLVM Compiler Infrastructure
4  //
5  // This file is distributed under the University of Illinois Open Source
6  // License. See LICENSE.TXT for details.
7  //
8  //===----------------------------------------------------------------------===//
9  //
10  // This contains code dealing with C++ code generation of classes
11  //
12  //===----------------------------------------------------------------------===//
13  
14  #include "CGBlocks.h"
15  #include "CGCXXABI.h"
16  #include "CGDebugInfo.h"
17  #include "CGRecordLayout.h"
18  #include "CodeGenFunction.h"
19  #include "clang/AST/CXXInheritance.h"
20  #include "clang/AST/DeclTemplate.h"
21  #include "clang/AST/EvaluatedExprVisitor.h"
22  #include "clang/AST/RecordLayout.h"
23  #include "clang/AST/StmtCXX.h"
24  #include "clang/Basic/TargetBuiltins.h"
25  #include "clang/CodeGen/CGFunctionInfo.h"
26  #include "clang/Frontend/CodeGenOptions.h"
27  #include "llvm/IR/Intrinsics.h"
28  #include "llvm/IR/Metadata.h"
29  #include "llvm/Transforms/Utils/SanitizerStats.h"
30  
31  using namespace clang;
32  using namespace CodeGen;
33  
34  /// Return the best known alignment for an unknown pointer to a
35  /// particular class.
getClassPointerAlignment(const CXXRecordDecl * RD)36  CharUnits CodeGenModule::getClassPointerAlignment(const CXXRecordDecl *RD) {
37    if (!RD->isCompleteDefinition())
38      return CharUnits::One(); // Hopefully won't be used anywhere.
39  
40    auto &layout = getContext().getASTRecordLayout(RD);
41  
42    // If the class is final, then we know that the pointer points to an
43    // object of that type and can use the full alignment.
44    if (RD->hasAttr<FinalAttr>()) {
45      return layout.getAlignment();
46  
47    // Otherwise, we have to assume it could be a subclass.
48    } else {
49      return layout.getNonVirtualAlignment();
50    }
51  }
52  
53  /// Return the best known alignment for a pointer to a virtual base,
54  /// given the alignment of a pointer to the derived class.
getVBaseAlignment(CharUnits actualDerivedAlign,const CXXRecordDecl * derivedClass,const CXXRecordDecl * vbaseClass)55  CharUnits CodeGenModule::getVBaseAlignment(CharUnits actualDerivedAlign,
56                                             const CXXRecordDecl *derivedClass,
57                                             const CXXRecordDecl *vbaseClass) {
58    // The basic idea here is that an underaligned derived pointer might
59    // indicate an underaligned base pointer.
60  
61    assert(vbaseClass->isCompleteDefinition());
62    auto &baseLayout = getContext().getASTRecordLayout(vbaseClass);
63    CharUnits expectedVBaseAlign = baseLayout.getNonVirtualAlignment();
64  
65    return getDynamicOffsetAlignment(actualDerivedAlign, derivedClass,
66                                     expectedVBaseAlign);
67  }
68  
69  CharUnits
getDynamicOffsetAlignment(CharUnits actualBaseAlign,const CXXRecordDecl * baseDecl,CharUnits expectedTargetAlign)70  CodeGenModule::getDynamicOffsetAlignment(CharUnits actualBaseAlign,
71                                           const CXXRecordDecl *baseDecl,
72                                           CharUnits expectedTargetAlign) {
73    // If the base is an incomplete type (which is, alas, possible with
74    // member pointers), be pessimistic.
75    if (!baseDecl->isCompleteDefinition())
76      return std::min(actualBaseAlign, expectedTargetAlign);
77  
78    auto &baseLayout = getContext().getASTRecordLayout(baseDecl);
79    CharUnits expectedBaseAlign = baseLayout.getNonVirtualAlignment();
80  
81    // If the class is properly aligned, assume the target offset is, too.
82    //
83    // This actually isn't necessarily the right thing to do --- if the
84    // class is a complete object, but it's only properly aligned for a
85    // base subobject, then the alignments of things relative to it are
86    // probably off as well.  (Note that this requires the alignment of
87    // the target to be greater than the NV alignment of the derived
88    // class.)
89    //
90    // However, our approach to this kind of under-alignment can only
91    // ever be best effort; after all, we're never going to propagate
92    // alignments through variables or parameters.  Note, in particular,
93    // that constructing a polymorphic type in an address that's less
94    // than pointer-aligned will generally trap in the constructor,
95    // unless we someday add some sort of attribute to change the
96    // assumed alignment of 'this'.  So our goal here is pretty much
97    // just to allow the user to explicitly say that a pointer is
98    // under-aligned and then safely access its fields and vtables.
99    if (actualBaseAlign >= expectedBaseAlign) {
100      return expectedTargetAlign;
101    }
102  
103    // Otherwise, we might be offset by an arbitrary multiple of the
104    // actual alignment.  The correct adjustment is to take the min of
105    // the two alignments.
106    return std::min(actualBaseAlign, expectedTargetAlign);
107  }
108  
LoadCXXThisAddress()109  Address CodeGenFunction::LoadCXXThisAddress() {
110    assert(CurFuncDecl && "loading 'this' without a func declaration?");
111    assert(isa<CXXMethodDecl>(CurFuncDecl));
112  
113    // Lazily compute CXXThisAlignment.
114    if (CXXThisAlignment.isZero()) {
115      // Just use the best known alignment for the parent.
116      // TODO: if we're currently emitting a complete-object ctor/dtor,
117      // we can always use the complete-object alignment.
118      auto RD = cast<CXXMethodDecl>(CurFuncDecl)->getParent();
119      CXXThisAlignment = CGM.getClassPointerAlignment(RD);
120    }
121  
122    return Address(LoadCXXThis(), CXXThisAlignment);
123  }
124  
125  /// Emit the address of a field using a member data pointer.
126  ///
127  /// \param E Only used for emergency diagnostics
128  Address
EmitCXXMemberDataPointerAddress(const Expr * E,Address base,llvm::Value * memberPtr,const MemberPointerType * memberPtrType,AlignmentSource * alignSource)129  CodeGenFunction::EmitCXXMemberDataPointerAddress(const Expr *E, Address base,
130                                                   llvm::Value *memberPtr,
131                                        const MemberPointerType *memberPtrType,
132                                                   AlignmentSource *alignSource) {
133    // Ask the ABI to compute the actual address.
134    llvm::Value *ptr =
135      CGM.getCXXABI().EmitMemberDataPointerAddress(*this, E, base,
136                                                   memberPtr, memberPtrType);
137  
138    QualType memberType = memberPtrType->getPointeeType();
139    CharUnits memberAlign = getNaturalTypeAlignment(memberType, alignSource);
140    memberAlign =
141      CGM.getDynamicOffsetAlignment(base.getAlignment(),
142                              memberPtrType->getClass()->getAsCXXRecordDecl(),
143                                    memberAlign);
144    return Address(ptr, memberAlign);
145  }
146  
computeNonVirtualBaseClassOffset(const CXXRecordDecl * DerivedClass,CastExpr::path_const_iterator Start,CastExpr::path_const_iterator End)147  CharUnits CodeGenModule::computeNonVirtualBaseClassOffset(
148      const CXXRecordDecl *DerivedClass, CastExpr::path_const_iterator Start,
149      CastExpr::path_const_iterator End) {
150    CharUnits Offset = CharUnits::Zero();
151  
152    const ASTContext &Context = getContext();
153    const CXXRecordDecl *RD = DerivedClass;
154  
155    for (CastExpr::path_const_iterator I = Start; I != End; ++I) {
156      const CXXBaseSpecifier *Base = *I;
157      assert(!Base->isVirtual() && "Should not see virtual bases here!");
158  
159      // Get the layout.
160      const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
161  
162      const CXXRecordDecl *BaseDecl =
163        cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl());
164  
165      // Add the offset.
166      Offset += Layout.getBaseClassOffset(BaseDecl);
167  
168      RD = BaseDecl;
169    }
170  
171    return Offset;
172  }
173  
174  llvm::Constant *
GetNonVirtualBaseClassOffset(const CXXRecordDecl * ClassDecl,CastExpr::path_const_iterator PathBegin,CastExpr::path_const_iterator PathEnd)175  CodeGenModule::GetNonVirtualBaseClassOffset(const CXXRecordDecl *ClassDecl,
176                                     CastExpr::path_const_iterator PathBegin,
177                                     CastExpr::path_const_iterator PathEnd) {
178    assert(PathBegin != PathEnd && "Base path should not be empty!");
179  
180    CharUnits Offset =
181        computeNonVirtualBaseClassOffset(ClassDecl, PathBegin, PathEnd);
182    if (Offset.isZero())
183      return nullptr;
184  
185    llvm::Type *PtrDiffTy =
186    Types.ConvertType(getContext().getPointerDiffType());
187  
188    return llvm::ConstantInt::get(PtrDiffTy, Offset.getQuantity());
189  }
190  
191  /// Gets the address of a direct base class within a complete object.
192  /// This should only be used for (1) non-virtual bases or (2) virtual bases
193  /// when the type is known to be complete (e.g. in complete destructors).
194  ///
195  /// The object pointed to by 'This' is assumed to be non-null.
196  Address
GetAddressOfDirectBaseInCompleteClass(Address This,const CXXRecordDecl * Derived,const CXXRecordDecl * Base,bool BaseIsVirtual)197  CodeGenFunction::GetAddressOfDirectBaseInCompleteClass(Address This,
198                                                     const CXXRecordDecl *Derived,
199                                                     const CXXRecordDecl *Base,
200                                                     bool BaseIsVirtual) {
201    // 'this' must be a pointer (in some address space) to Derived.
202    assert(This.getElementType() == ConvertType(Derived));
203  
204    // Compute the offset of the virtual base.
205    CharUnits Offset;
206    const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Derived);
207    if (BaseIsVirtual)
208      Offset = Layout.getVBaseClassOffset(Base);
209    else
210      Offset = Layout.getBaseClassOffset(Base);
211  
212    // Shift and cast down to the base type.
213    // TODO: for complete types, this should be possible with a GEP.
214    Address V = This;
215    if (!Offset.isZero()) {
216      V = Builder.CreateElementBitCast(V, Int8Ty);
217      V = Builder.CreateConstInBoundsByteGEP(V, Offset);
218    }
219    V = Builder.CreateElementBitCast(V, ConvertType(Base));
220  
221    return V;
222  }
223  
224  static Address
ApplyNonVirtualAndVirtualOffset(CodeGenFunction & CGF,Address addr,CharUnits nonVirtualOffset,llvm::Value * virtualOffset,const CXXRecordDecl * derivedClass,const CXXRecordDecl * nearestVBase)225  ApplyNonVirtualAndVirtualOffset(CodeGenFunction &CGF, Address addr,
226                                  CharUnits nonVirtualOffset,
227                                  llvm::Value *virtualOffset,
228                                  const CXXRecordDecl *derivedClass,
229                                  const CXXRecordDecl *nearestVBase) {
230    // Assert that we have something to do.
231    assert(!nonVirtualOffset.isZero() || virtualOffset != nullptr);
232  
233    // Compute the offset from the static and dynamic components.
234    llvm::Value *baseOffset;
235    if (!nonVirtualOffset.isZero()) {
236      baseOffset = llvm::ConstantInt::get(CGF.PtrDiffTy,
237                                          nonVirtualOffset.getQuantity());
238      if (virtualOffset) {
239        baseOffset = CGF.Builder.CreateAdd(virtualOffset, baseOffset);
240      }
241    } else {
242      baseOffset = virtualOffset;
243    }
244  
245    // Apply the base offset.
246    llvm::Value *ptr = addr.getPointer();
247    ptr = CGF.Builder.CreateBitCast(ptr, CGF.Int8PtrTy);
248    ptr = CGF.Builder.CreateInBoundsGEP(ptr, baseOffset, "add.ptr");
249  
250    // If we have a virtual component, the alignment of the result will
251    // be relative only to the known alignment of that vbase.
252    CharUnits alignment;
253    if (virtualOffset) {
254      assert(nearestVBase && "virtual offset without vbase?");
255      alignment = CGF.CGM.getVBaseAlignment(addr.getAlignment(),
256                                            derivedClass, nearestVBase);
257    } else {
258      alignment = addr.getAlignment();
259    }
260    alignment = alignment.alignmentAtOffset(nonVirtualOffset);
261  
262    return Address(ptr, alignment);
263  }
264  
GetAddressOfBaseClass(Address Value,const CXXRecordDecl * Derived,CastExpr::path_const_iterator PathBegin,CastExpr::path_const_iterator PathEnd,bool NullCheckValue,SourceLocation Loc)265  Address CodeGenFunction::GetAddressOfBaseClass(
266      Address Value, const CXXRecordDecl *Derived,
267      CastExpr::path_const_iterator PathBegin,
268      CastExpr::path_const_iterator PathEnd, bool NullCheckValue,
269      SourceLocation Loc) {
270    assert(PathBegin != PathEnd && "Base path should not be empty!");
271  
272    CastExpr::path_const_iterator Start = PathBegin;
273    const CXXRecordDecl *VBase = nullptr;
274  
275    // Sema has done some convenient canonicalization here: if the
276    // access path involved any virtual steps, the conversion path will
277    // *start* with a step down to the correct virtual base subobject,
278    // and hence will not require any further steps.
279    if ((*Start)->isVirtual()) {
280      VBase =
281        cast<CXXRecordDecl>((*Start)->getType()->getAs<RecordType>()->getDecl());
282      ++Start;
283    }
284  
285    // Compute the static offset of the ultimate destination within its
286    // allocating subobject (the virtual base, if there is one, or else
287    // the "complete" object that we see).
288    CharUnits NonVirtualOffset = CGM.computeNonVirtualBaseClassOffset(
289        VBase ? VBase : Derived, Start, PathEnd);
290  
291    // If there's a virtual step, we can sometimes "devirtualize" it.
292    // For now, that's limited to when the derived type is final.
293    // TODO: "devirtualize" this for accesses to known-complete objects.
294    if (VBase && Derived->hasAttr<FinalAttr>()) {
295      const ASTRecordLayout &layout = getContext().getASTRecordLayout(Derived);
296      CharUnits vBaseOffset = layout.getVBaseClassOffset(VBase);
297      NonVirtualOffset += vBaseOffset;
298      VBase = nullptr; // we no longer have a virtual step
299    }
300  
301    // Get the base pointer type.
302    llvm::Type *BasePtrTy =
303      ConvertType((PathEnd[-1])->getType())->getPointerTo();
304  
305    QualType DerivedTy = getContext().getRecordType(Derived);
306    CharUnits DerivedAlign = CGM.getClassPointerAlignment(Derived);
307  
308    // If the static offset is zero and we don't have a virtual step,
309    // just do a bitcast; null checks are unnecessary.
310    if (NonVirtualOffset.isZero() && !VBase) {
311      if (sanitizePerformTypeCheck()) {
312        EmitTypeCheck(TCK_Upcast, Loc, Value.getPointer(),
313                      DerivedTy, DerivedAlign, !NullCheckValue);
314      }
315      return Builder.CreateBitCast(Value, BasePtrTy);
316    }
317  
318    llvm::BasicBlock *origBB = nullptr;
319    llvm::BasicBlock *endBB = nullptr;
320  
321    // Skip over the offset (and the vtable load) if we're supposed to
322    // null-check the pointer.
323    if (NullCheckValue) {
324      origBB = Builder.GetInsertBlock();
325      llvm::BasicBlock *notNullBB = createBasicBlock("cast.notnull");
326      endBB = createBasicBlock("cast.end");
327  
328      llvm::Value *isNull = Builder.CreateIsNull(Value.getPointer());
329      Builder.CreateCondBr(isNull, endBB, notNullBB);
330      EmitBlock(notNullBB);
331    }
332  
333    if (sanitizePerformTypeCheck()) {
334      EmitTypeCheck(VBase ? TCK_UpcastToVirtualBase : TCK_Upcast, Loc,
335                    Value.getPointer(), DerivedTy, DerivedAlign, true);
336    }
337  
338    // Compute the virtual offset.
339    llvm::Value *VirtualOffset = nullptr;
340    if (VBase) {
341      VirtualOffset =
342        CGM.getCXXABI().GetVirtualBaseClassOffset(*this, Value, Derived, VBase);
343    }
344  
345    // Apply both offsets.
346    Value = ApplyNonVirtualAndVirtualOffset(*this, Value, NonVirtualOffset,
347                                            VirtualOffset, Derived, VBase);
348  
349    // Cast to the destination type.
350    Value = Builder.CreateBitCast(Value, BasePtrTy);
351  
352    // Build a phi if we needed a null check.
353    if (NullCheckValue) {
354      llvm::BasicBlock *notNullBB = Builder.GetInsertBlock();
355      Builder.CreateBr(endBB);
356      EmitBlock(endBB);
357  
358      llvm::PHINode *PHI = Builder.CreatePHI(BasePtrTy, 2, "cast.result");
359      PHI->addIncoming(Value.getPointer(), notNullBB);
360      PHI->addIncoming(llvm::Constant::getNullValue(BasePtrTy), origBB);
361      Value = Address(PHI, Value.getAlignment());
362    }
363  
364    return Value;
365  }
366  
367  Address
GetAddressOfDerivedClass(Address BaseAddr,const CXXRecordDecl * Derived,CastExpr::path_const_iterator PathBegin,CastExpr::path_const_iterator PathEnd,bool NullCheckValue)368  CodeGenFunction::GetAddressOfDerivedClass(Address BaseAddr,
369                                            const CXXRecordDecl *Derived,
370                                          CastExpr::path_const_iterator PathBegin,
371                                            CastExpr::path_const_iterator PathEnd,
372                                            bool NullCheckValue) {
373    assert(PathBegin != PathEnd && "Base path should not be empty!");
374  
375    QualType DerivedTy =
376      getContext().getCanonicalType(getContext().getTagDeclType(Derived));
377    llvm::Type *DerivedPtrTy = ConvertType(DerivedTy)->getPointerTo();
378  
379    llvm::Value *NonVirtualOffset =
380      CGM.GetNonVirtualBaseClassOffset(Derived, PathBegin, PathEnd);
381  
382    if (!NonVirtualOffset) {
383      // No offset, we can just cast back.
384      return Builder.CreateBitCast(BaseAddr, DerivedPtrTy);
385    }
386  
387    llvm::BasicBlock *CastNull = nullptr;
388    llvm::BasicBlock *CastNotNull = nullptr;
389    llvm::BasicBlock *CastEnd = nullptr;
390  
391    if (NullCheckValue) {
392      CastNull = createBasicBlock("cast.null");
393      CastNotNull = createBasicBlock("cast.notnull");
394      CastEnd = createBasicBlock("cast.end");
395  
396      llvm::Value *IsNull = Builder.CreateIsNull(BaseAddr.getPointer());
397      Builder.CreateCondBr(IsNull, CastNull, CastNotNull);
398      EmitBlock(CastNotNull);
399    }
400  
401    // Apply the offset.
402    llvm::Value *Value = Builder.CreateBitCast(BaseAddr.getPointer(), Int8PtrTy);
403    Value = Builder.CreateGEP(Value, Builder.CreateNeg(NonVirtualOffset),
404                              "sub.ptr");
405  
406    // Just cast.
407    Value = Builder.CreateBitCast(Value, DerivedPtrTy);
408  
409    // Produce a PHI if we had a null-check.
410    if (NullCheckValue) {
411      Builder.CreateBr(CastEnd);
412      EmitBlock(CastNull);
413      Builder.CreateBr(CastEnd);
414      EmitBlock(CastEnd);
415  
416      llvm::PHINode *PHI = Builder.CreatePHI(Value->getType(), 2);
417      PHI->addIncoming(Value, CastNotNull);
418      PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), CastNull);
419      Value = PHI;
420    }
421  
422    return Address(Value, CGM.getClassPointerAlignment(Derived));
423  }
424  
GetVTTParameter(GlobalDecl GD,bool ForVirtualBase,bool Delegating)425  llvm::Value *CodeGenFunction::GetVTTParameter(GlobalDecl GD,
426                                                bool ForVirtualBase,
427                                                bool Delegating) {
428    if (!CGM.getCXXABI().NeedsVTTParameter(GD)) {
429      // This constructor/destructor does not need a VTT parameter.
430      return nullptr;
431    }
432  
433    const CXXRecordDecl *RD = cast<CXXMethodDecl>(CurCodeDecl)->getParent();
434    const CXXRecordDecl *Base = cast<CXXMethodDecl>(GD.getDecl())->getParent();
435  
436    llvm::Value *VTT;
437  
438    uint64_t SubVTTIndex;
439  
440    if (Delegating) {
441      // If this is a delegating constructor call, just load the VTT.
442      return LoadCXXVTT();
443    } else if (RD == Base) {
444      // If the record matches the base, this is the complete ctor/dtor
445      // variant calling the base variant in a class with virtual bases.
446      assert(!CGM.getCXXABI().NeedsVTTParameter(CurGD) &&
447             "doing no-op VTT offset in base dtor/ctor?");
448      assert(!ForVirtualBase && "Can't have same class as virtual base!");
449      SubVTTIndex = 0;
450    } else {
451      const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD);
452      CharUnits BaseOffset = ForVirtualBase ?
453        Layout.getVBaseClassOffset(Base) :
454        Layout.getBaseClassOffset(Base);
455  
456      SubVTTIndex =
457        CGM.getVTables().getSubVTTIndex(RD, BaseSubobject(Base, BaseOffset));
458      assert(SubVTTIndex != 0 && "Sub-VTT index must be greater than zero!");
459    }
460  
461    if (CGM.getCXXABI().NeedsVTTParameter(CurGD)) {
462      // A VTT parameter was passed to the constructor, use it.
463      VTT = LoadCXXVTT();
464      VTT = Builder.CreateConstInBoundsGEP1_64(VTT, SubVTTIndex);
465    } else {
466      // We're the complete constructor, so get the VTT by name.
467      VTT = CGM.getVTables().GetAddrOfVTT(RD);
468      VTT = Builder.CreateConstInBoundsGEP2_64(VTT, 0, SubVTTIndex);
469    }
470  
471    return VTT;
472  }
473  
474  namespace {
475    /// Call the destructor for a direct base class.
476    struct CallBaseDtor final : EHScopeStack::Cleanup {
477      const CXXRecordDecl *BaseClass;
478      bool BaseIsVirtual;
CallBaseDtor__anon6bf721160111::CallBaseDtor479      CallBaseDtor(const CXXRecordDecl *Base, bool BaseIsVirtual)
480        : BaseClass(Base), BaseIsVirtual(BaseIsVirtual) {}
481  
Emit__anon6bf721160111::CallBaseDtor482      void Emit(CodeGenFunction &CGF, Flags flags) override {
483        const CXXRecordDecl *DerivedClass =
484          cast<CXXMethodDecl>(CGF.CurCodeDecl)->getParent();
485  
486        const CXXDestructorDecl *D = BaseClass->getDestructor();
487        Address Addr =
488          CGF.GetAddressOfDirectBaseInCompleteClass(CGF.LoadCXXThisAddress(),
489                                                    DerivedClass, BaseClass,
490                                                    BaseIsVirtual);
491        CGF.EmitCXXDestructorCall(D, Dtor_Base, BaseIsVirtual,
492                                  /*Delegating=*/false, Addr);
493      }
494    };
495  
496    /// A visitor which checks whether an initializer uses 'this' in a
497    /// way which requires the vtable to be properly set.
498    struct DynamicThisUseChecker : ConstEvaluatedExprVisitor<DynamicThisUseChecker> {
499      typedef ConstEvaluatedExprVisitor<DynamicThisUseChecker> super;
500  
501      bool UsesThis;
502  
DynamicThisUseChecker__anon6bf721160111::DynamicThisUseChecker503      DynamicThisUseChecker(const ASTContext &C) : super(C), UsesThis(false) {}
504  
505      // Black-list all explicit and implicit references to 'this'.
506      //
507      // Do we need to worry about external references to 'this' derived
508      // from arbitrary code?  If so, then anything which runs arbitrary
509      // external code might potentially access the vtable.
VisitCXXThisExpr__anon6bf721160111::DynamicThisUseChecker510      void VisitCXXThisExpr(const CXXThisExpr *E) { UsesThis = true; }
511    };
512  } // end anonymous namespace
513  
BaseInitializerUsesThis(ASTContext & C,const Expr * Init)514  static bool BaseInitializerUsesThis(ASTContext &C, const Expr *Init) {
515    DynamicThisUseChecker Checker(C);
516    Checker.Visit(Init);
517    return Checker.UsesThis;
518  }
519  
EmitBaseInitializer(CodeGenFunction & CGF,const CXXRecordDecl * ClassDecl,CXXCtorInitializer * BaseInit,CXXCtorType CtorType)520  static void EmitBaseInitializer(CodeGenFunction &CGF,
521                                  const CXXRecordDecl *ClassDecl,
522                                  CXXCtorInitializer *BaseInit,
523                                  CXXCtorType CtorType) {
524    assert(BaseInit->isBaseInitializer() &&
525           "Must have base initializer!");
526  
527    Address ThisPtr = CGF.LoadCXXThisAddress();
528  
529    const Type *BaseType = BaseInit->getBaseClass();
530    CXXRecordDecl *BaseClassDecl =
531      cast<CXXRecordDecl>(BaseType->getAs<RecordType>()->getDecl());
532  
533    bool isBaseVirtual = BaseInit->isBaseVirtual();
534  
535    // The base constructor doesn't construct virtual bases.
536    if (CtorType == Ctor_Base && isBaseVirtual)
537      return;
538  
539    // If the initializer for the base (other than the constructor
540    // itself) accesses 'this' in any way, we need to initialize the
541    // vtables.
542    if (BaseInitializerUsesThis(CGF.getContext(), BaseInit->getInit()))
543      CGF.InitializeVTablePointers(ClassDecl);
544  
545    // We can pretend to be a complete class because it only matters for
546    // virtual bases, and we only do virtual bases for complete ctors.
547    Address V =
548      CGF.GetAddressOfDirectBaseInCompleteClass(ThisPtr, ClassDecl,
549                                                BaseClassDecl,
550                                                isBaseVirtual);
551    AggValueSlot AggSlot =
552      AggValueSlot::forAddr(V, Qualifiers(),
553                            AggValueSlot::IsDestructed,
554                            AggValueSlot::DoesNotNeedGCBarriers,
555                            AggValueSlot::IsNotAliased);
556  
557    CGF.EmitAggExpr(BaseInit->getInit(), AggSlot);
558  
559    if (CGF.CGM.getLangOpts().Exceptions &&
560        !BaseClassDecl->hasTrivialDestructor())
561      CGF.EHStack.pushCleanup<CallBaseDtor>(EHCleanup, BaseClassDecl,
562                                            isBaseVirtual);
563  }
564  
EmitAggMemberInitializer(CodeGenFunction & CGF,LValue LHS,Expr * Init,Address ArrayIndexVar,QualType T,ArrayRef<VarDecl * > ArrayIndexes,unsigned Index)565  static void EmitAggMemberInitializer(CodeGenFunction &CGF,
566                                       LValue LHS,
567                                       Expr *Init,
568                                       Address ArrayIndexVar,
569                                       QualType T,
570                                       ArrayRef<VarDecl *> ArrayIndexes,
571                                       unsigned Index) {
572    if (Index == ArrayIndexes.size()) {
573      LValue LV = LHS;
574  
575      if (ArrayIndexVar.isValid()) {
576        // If we have an array index variable, load it and use it as an offset.
577        // Then, increment the value.
578        llvm::Value *Dest = LHS.getPointer();
579        llvm::Value *ArrayIndex = CGF.Builder.CreateLoad(ArrayIndexVar);
580        Dest = CGF.Builder.CreateInBoundsGEP(Dest, ArrayIndex, "destaddress");
581        llvm::Value *Next = llvm::ConstantInt::get(ArrayIndex->getType(), 1);
582        Next = CGF.Builder.CreateAdd(ArrayIndex, Next, "inc");
583        CGF.Builder.CreateStore(Next, ArrayIndexVar);
584  
585        // Update the LValue.
586        CharUnits EltSize = CGF.getContext().getTypeSizeInChars(T);
587        CharUnits Align = LV.getAlignment().alignmentOfArrayElement(EltSize);
588        LV.setAddress(Address(Dest, Align));
589      }
590  
591      switch (CGF.getEvaluationKind(T)) {
592      case TEK_Scalar:
593        CGF.EmitScalarInit(Init, /*decl*/ nullptr, LV, false);
594        break;
595      case TEK_Complex:
596        CGF.EmitComplexExprIntoLValue(Init, LV, /*isInit*/ true);
597        break;
598      case TEK_Aggregate: {
599        AggValueSlot Slot =
600          AggValueSlot::forLValue(LV,
601                                  AggValueSlot::IsDestructed,
602                                  AggValueSlot::DoesNotNeedGCBarriers,
603                                  AggValueSlot::IsNotAliased);
604  
605        CGF.EmitAggExpr(Init, Slot);
606        break;
607      }
608      }
609  
610      return;
611    }
612  
613    const ConstantArrayType *Array = CGF.getContext().getAsConstantArrayType(T);
614    assert(Array && "Array initialization without the array type?");
615    Address IndexVar = CGF.GetAddrOfLocalVar(ArrayIndexes[Index]);
616  
617    // Initialize this index variable to zero.
618    llvm::Value* Zero
619      = llvm::Constant::getNullValue(IndexVar.getElementType());
620    CGF.Builder.CreateStore(Zero, IndexVar);
621  
622    // Start the loop with a block that tests the condition.
623    llvm::BasicBlock *CondBlock = CGF.createBasicBlock("for.cond");
624    llvm::BasicBlock *AfterFor = CGF.createBasicBlock("for.end");
625  
626    CGF.EmitBlock(CondBlock);
627  
628    llvm::BasicBlock *ForBody = CGF.createBasicBlock("for.body");
629    // Generate: if (loop-index < number-of-elements) fall to the loop body,
630    // otherwise, go to the block after the for-loop.
631    uint64_t NumElements = Array->getSize().getZExtValue();
632    llvm::Value *Counter = CGF.Builder.CreateLoad(IndexVar);
633    llvm::Value *NumElementsPtr =
634      llvm::ConstantInt::get(Counter->getType(), NumElements);
635    llvm::Value *IsLess = CGF.Builder.CreateICmpULT(Counter, NumElementsPtr,
636                                                    "isless");
637  
638    // If the condition is true, execute the body.
639    CGF.Builder.CreateCondBr(IsLess, ForBody, AfterFor);
640  
641    CGF.EmitBlock(ForBody);
642    llvm::BasicBlock *ContinueBlock = CGF.createBasicBlock("for.inc");
643  
644    // Inside the loop body recurse to emit the inner loop or, eventually, the
645    // constructor call.
646    EmitAggMemberInitializer(CGF, LHS, Init, ArrayIndexVar,
647                             Array->getElementType(), ArrayIndexes, Index + 1);
648  
649    CGF.EmitBlock(ContinueBlock);
650  
651    // Emit the increment of the loop counter.
652    llvm::Value *NextVal = llvm::ConstantInt::get(Counter->getType(), 1);
653    Counter = CGF.Builder.CreateLoad(IndexVar);
654    NextVal = CGF.Builder.CreateAdd(Counter, NextVal, "inc");
655    CGF.Builder.CreateStore(NextVal, IndexVar);
656  
657    // Finally, branch back up to the condition for the next iteration.
658    CGF.EmitBranch(CondBlock);
659  
660    // Emit the fall-through block.
661    CGF.EmitBlock(AfterFor, true);
662  }
663  
isMemcpyEquivalentSpecialMember(const CXXMethodDecl * D)664  static bool isMemcpyEquivalentSpecialMember(const CXXMethodDecl *D) {
665    auto *CD = dyn_cast<CXXConstructorDecl>(D);
666    if (!(CD && CD->isCopyOrMoveConstructor()) &&
667        !D->isCopyAssignmentOperator() && !D->isMoveAssignmentOperator())
668      return false;
669  
670    // We can emit a memcpy for a trivial copy or move constructor/assignment.
671    if (D->isTrivial() && !D->getParent()->mayInsertExtraPadding())
672      return true;
673  
674    // We *must* emit a memcpy for a defaulted union copy or move op.
675    if (D->getParent()->isUnion() && D->isDefaulted())
676      return true;
677  
678    return false;
679  }
680  
EmitLValueForAnyFieldInitialization(CodeGenFunction & CGF,CXXCtorInitializer * MemberInit,LValue & LHS)681  static void EmitLValueForAnyFieldInitialization(CodeGenFunction &CGF,
682                                                  CXXCtorInitializer *MemberInit,
683                                                  LValue &LHS) {
684    FieldDecl *Field = MemberInit->getAnyMember();
685    if (MemberInit->isIndirectMemberInitializer()) {
686      // If we are initializing an anonymous union field, drill down to the field.
687      IndirectFieldDecl *IndirectField = MemberInit->getIndirectMember();
688      for (const auto *I : IndirectField->chain())
689        LHS = CGF.EmitLValueForFieldInitialization(LHS, cast<FieldDecl>(I));
690    } else {
691      LHS = CGF.EmitLValueForFieldInitialization(LHS, Field);
692    }
693  }
694  
EmitMemberInitializer(CodeGenFunction & CGF,const CXXRecordDecl * ClassDecl,CXXCtorInitializer * MemberInit,const CXXConstructorDecl * Constructor,FunctionArgList & Args)695  static void EmitMemberInitializer(CodeGenFunction &CGF,
696                                    const CXXRecordDecl *ClassDecl,
697                                    CXXCtorInitializer *MemberInit,
698                                    const CXXConstructorDecl *Constructor,
699                                    FunctionArgList &Args) {
700    ApplyDebugLocation Loc(CGF, MemberInit->getSourceLocation());
701    assert(MemberInit->isAnyMemberInitializer() &&
702           "Must have member initializer!");
703    assert(MemberInit->getInit() && "Must have initializer!");
704  
705    // non-static data member initializers.
706    FieldDecl *Field = MemberInit->getAnyMember();
707    QualType FieldType = Field->getType();
708  
709    llvm::Value *ThisPtr = CGF.LoadCXXThis();
710    QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl);
711    LValue LHS = CGF.MakeNaturalAlignAddrLValue(ThisPtr, RecordTy);
712  
713    EmitLValueForAnyFieldInitialization(CGF, MemberInit, LHS);
714  
715    // Special case: if we are in a copy or move constructor, and we are copying
716    // an array of PODs or classes with trivial copy constructors, ignore the
717    // AST and perform the copy we know is equivalent.
718    // FIXME: This is hacky at best... if we had a bit more explicit information
719    // in the AST, we could generalize it more easily.
720    const ConstantArrayType *Array
721      = CGF.getContext().getAsConstantArrayType(FieldType);
722    if (Array && Constructor->isDefaulted() &&
723        Constructor->isCopyOrMoveConstructor()) {
724      QualType BaseElementTy = CGF.getContext().getBaseElementType(Array);
725      CXXConstructExpr *CE = dyn_cast<CXXConstructExpr>(MemberInit->getInit());
726      if (BaseElementTy.isPODType(CGF.getContext()) ||
727          (CE && isMemcpyEquivalentSpecialMember(CE->getConstructor()))) {
728        unsigned SrcArgIndex =
729            CGF.CGM.getCXXABI().getSrcArgforCopyCtor(Constructor, Args);
730        llvm::Value *SrcPtr
731          = CGF.Builder.CreateLoad(CGF.GetAddrOfLocalVar(Args[SrcArgIndex]));
732        LValue ThisRHSLV = CGF.MakeNaturalAlignAddrLValue(SrcPtr, RecordTy);
733        LValue Src = CGF.EmitLValueForFieldInitialization(ThisRHSLV, Field);
734  
735        // Copy the aggregate.
736        CGF.EmitAggregateCopy(LHS.getAddress(), Src.getAddress(), FieldType,
737                              LHS.isVolatileQualified());
738        // Ensure that we destroy the objects if an exception is thrown later in
739        // the constructor.
740        QualType::DestructionKind dtorKind = FieldType.isDestructedType();
741        if (CGF.needsEHCleanup(dtorKind))
742          CGF.pushEHDestroy(dtorKind, LHS.getAddress(), FieldType);
743        return;
744      }
745    }
746  
747    ArrayRef<VarDecl *> ArrayIndexes;
748    if (MemberInit->getNumArrayIndices())
749      ArrayIndexes = MemberInit->getArrayIndices();
750    CGF.EmitInitializerForField(Field, LHS, MemberInit->getInit(), ArrayIndexes);
751  }
752  
EmitInitializerForField(FieldDecl * Field,LValue LHS,Expr * Init,ArrayRef<VarDecl * > ArrayIndexes)753  void CodeGenFunction::EmitInitializerForField(FieldDecl *Field, LValue LHS,
754                                  Expr *Init, ArrayRef<VarDecl *> ArrayIndexes) {
755    QualType FieldType = Field->getType();
756    switch (getEvaluationKind(FieldType)) {
757    case TEK_Scalar:
758      if (LHS.isSimple()) {
759        EmitExprAsInit(Init, Field, LHS, false);
760      } else {
761        RValue RHS = RValue::get(EmitScalarExpr(Init));
762        EmitStoreThroughLValue(RHS, LHS);
763      }
764      break;
765    case TEK_Complex:
766      EmitComplexExprIntoLValue(Init, LHS, /*isInit*/ true);
767      break;
768    case TEK_Aggregate: {
769      Address ArrayIndexVar = Address::invalid();
770      if (ArrayIndexes.size()) {
771        // The LHS is a pointer to the first object we'll be constructing, as
772        // a flat array.
773        QualType BaseElementTy = getContext().getBaseElementType(FieldType);
774        llvm::Type *BasePtr = ConvertType(BaseElementTy);
775        BasePtr = llvm::PointerType::getUnqual(BasePtr);
776        Address BaseAddrPtr = Builder.CreateBitCast(LHS.getAddress(), BasePtr);
777        LHS = MakeAddrLValue(BaseAddrPtr, BaseElementTy);
778  
779        // Create an array index that will be used to walk over all of the
780        // objects we're constructing.
781        ArrayIndexVar = CreateMemTemp(getContext().getSizeType(), "object.index");
782        llvm::Value *Zero =
783          llvm::Constant::getNullValue(ArrayIndexVar.getElementType());
784        Builder.CreateStore(Zero, ArrayIndexVar);
785  
786        // Emit the block variables for the array indices, if any.
787        for (unsigned I = 0, N = ArrayIndexes.size(); I != N; ++I)
788          EmitAutoVarDecl(*ArrayIndexes[I]);
789      }
790  
791      EmitAggMemberInitializer(*this, LHS, Init, ArrayIndexVar, FieldType,
792                               ArrayIndexes, 0);
793    }
794    }
795  
796    // Ensure that we destroy this object if an exception is thrown
797    // later in the constructor.
798    QualType::DestructionKind dtorKind = FieldType.isDestructedType();
799    if (needsEHCleanup(dtorKind))
800      pushEHDestroy(dtorKind, LHS.getAddress(), FieldType);
801  }
802  
803  /// Checks whether the given constructor is a valid subject for the
804  /// complete-to-base constructor delegation optimization, i.e.
805  /// emitting the complete constructor as a simple call to the base
806  /// constructor.
IsConstructorDelegationValid(const CXXConstructorDecl * Ctor)807  static bool IsConstructorDelegationValid(const CXXConstructorDecl *Ctor) {
808  
809    // Currently we disable the optimization for classes with virtual
810    // bases because (1) the addresses of parameter variables need to be
811    // consistent across all initializers but (2) the delegate function
812    // call necessarily creates a second copy of the parameter variable.
813    //
814    // The limiting example (purely theoretical AFAIK):
815    //   struct A { A(int &c) { c++; } };
816    //   struct B : virtual A {
817    //     B(int count) : A(count) { printf("%d\n", count); }
818    //   };
819    // ...although even this example could in principle be emitted as a
820    // delegation since the address of the parameter doesn't escape.
821    if (Ctor->getParent()->getNumVBases()) {
822      // TODO: white-list trivial vbase initializers.  This case wouldn't
823      // be subject to the restrictions below.
824  
825      // TODO: white-list cases where:
826      //  - there are no non-reference parameters to the constructor
827      //  - the initializers don't access any non-reference parameters
828      //  - the initializers don't take the address of non-reference
829      //    parameters
830      //  - etc.
831      // If we ever add any of the above cases, remember that:
832      //  - function-try-blocks will always blacklist this optimization
833      //  - we need to perform the constructor prologue and cleanup in
834      //    EmitConstructorBody.
835  
836      return false;
837    }
838  
839    // We also disable the optimization for variadic functions because
840    // it's impossible to "re-pass" varargs.
841    if (Ctor->getType()->getAs<FunctionProtoType>()->isVariadic())
842      return false;
843  
844    // FIXME: Decide if we can do a delegation of a delegating constructor.
845    if (Ctor->isDelegatingConstructor())
846      return false;
847  
848    return true;
849  }
850  
851  // Emit code in ctor (Prologue==true) or dtor (Prologue==false)
852  // to poison the extra field paddings inserted under
853  // -fsanitize-address-field-padding=1|2.
EmitAsanPrologueOrEpilogue(bool Prologue)854  void CodeGenFunction::EmitAsanPrologueOrEpilogue(bool Prologue) {
855    ASTContext &Context = getContext();
856    const CXXRecordDecl *ClassDecl =
857        Prologue ? cast<CXXConstructorDecl>(CurGD.getDecl())->getParent()
858                 : cast<CXXDestructorDecl>(CurGD.getDecl())->getParent();
859    if (!ClassDecl->mayInsertExtraPadding()) return;
860  
861    struct SizeAndOffset {
862      uint64_t Size;
863      uint64_t Offset;
864    };
865  
866    unsigned PtrSize = CGM.getDataLayout().getPointerSizeInBits();
867    const ASTRecordLayout &Info = Context.getASTRecordLayout(ClassDecl);
868  
869    // Populate sizes and offsets of fields.
870    SmallVector<SizeAndOffset, 16> SSV(Info.getFieldCount());
871    for (unsigned i = 0, e = Info.getFieldCount(); i != e; ++i)
872      SSV[i].Offset =
873          Context.toCharUnitsFromBits(Info.getFieldOffset(i)).getQuantity();
874  
875    size_t NumFields = 0;
876    for (const auto *Field : ClassDecl->fields()) {
877      const FieldDecl *D = Field;
878      std::pair<CharUnits, CharUnits> FieldInfo =
879          Context.getTypeInfoInChars(D->getType());
880      CharUnits FieldSize = FieldInfo.first;
881      assert(NumFields < SSV.size());
882      SSV[NumFields].Size = D->isBitField() ? 0 : FieldSize.getQuantity();
883      NumFields++;
884    }
885    assert(NumFields == SSV.size());
886    if (SSV.size() <= 1) return;
887  
888    // We will insert calls to __asan_* run-time functions.
889    // LLVM AddressSanitizer pass may decide to inline them later.
890    llvm::Type *Args[2] = {IntPtrTy, IntPtrTy};
891    llvm::FunctionType *FTy =
892        llvm::FunctionType::get(CGM.VoidTy, Args, false);
893    llvm::Constant *F = CGM.CreateRuntimeFunction(
894        FTy, Prologue ? "__asan_poison_intra_object_redzone"
895                      : "__asan_unpoison_intra_object_redzone");
896  
897    llvm::Value *ThisPtr = LoadCXXThis();
898    ThisPtr = Builder.CreatePtrToInt(ThisPtr, IntPtrTy);
899    uint64_t TypeSize = Info.getNonVirtualSize().getQuantity();
900    // For each field check if it has sufficient padding,
901    // if so (un)poison it with a call.
902    for (size_t i = 0; i < SSV.size(); i++) {
903      uint64_t AsanAlignment = 8;
904      uint64_t NextField = i == SSV.size() - 1 ? TypeSize : SSV[i + 1].Offset;
905      uint64_t PoisonSize = NextField - SSV[i].Offset - SSV[i].Size;
906      uint64_t EndOffset = SSV[i].Offset + SSV[i].Size;
907      if (PoisonSize < AsanAlignment || !SSV[i].Size ||
908          (NextField % AsanAlignment) != 0)
909        continue;
910      Builder.CreateCall(
911          F, {Builder.CreateAdd(ThisPtr, Builder.getIntN(PtrSize, EndOffset)),
912              Builder.getIntN(PtrSize, PoisonSize)});
913    }
914  }
915  
916  /// EmitConstructorBody - Emits the body of the current constructor.
EmitConstructorBody(FunctionArgList & Args)917  void CodeGenFunction::EmitConstructorBody(FunctionArgList &Args) {
918    EmitAsanPrologueOrEpilogue(true);
919    const CXXConstructorDecl *Ctor = cast<CXXConstructorDecl>(CurGD.getDecl());
920    CXXCtorType CtorType = CurGD.getCtorType();
921  
922    assert((CGM.getTarget().getCXXABI().hasConstructorVariants() ||
923            CtorType == Ctor_Complete) &&
924           "can only generate complete ctor for this ABI");
925  
926    // Before we go any further, try the complete->base constructor
927    // delegation optimization.
928    if (CtorType == Ctor_Complete && IsConstructorDelegationValid(Ctor) &&
929        CGM.getTarget().getCXXABI().hasConstructorVariants()) {
930      EmitDelegateCXXConstructorCall(Ctor, Ctor_Base, Args, Ctor->getLocEnd());
931      return;
932    }
933  
934    const FunctionDecl *Definition = nullptr;
935    Stmt *Body = Ctor->getBody(Definition);
936    assert(Definition == Ctor && "emitting wrong constructor body");
937  
938    // Enter the function-try-block before the constructor prologue if
939    // applicable.
940    bool IsTryBody = (Body && isa<CXXTryStmt>(Body));
941    if (IsTryBody)
942      EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true);
943  
944    incrementProfileCounter(Body);
945  
946    RunCleanupsScope RunCleanups(*this);
947  
948    // TODO: in restricted cases, we can emit the vbase initializers of
949    // a complete ctor and then delegate to the base ctor.
950  
951    // Emit the constructor prologue, i.e. the base and member
952    // initializers.
953    EmitCtorPrologue(Ctor, CtorType, Args);
954  
955    // Emit the body of the statement.
956    if (IsTryBody)
957      EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock());
958    else if (Body)
959      EmitStmt(Body);
960  
961    // Emit any cleanup blocks associated with the member or base
962    // initializers, which includes (along the exceptional path) the
963    // destructors for those members and bases that were fully
964    // constructed.
965    RunCleanups.ForceCleanup();
966  
967    if (IsTryBody)
968      ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true);
969  }
970  
971  namespace {
972    /// RAII object to indicate that codegen is copying the value representation
973    /// instead of the object representation. Useful when copying a struct or
974    /// class which has uninitialized members and we're only performing
975    /// lvalue-to-rvalue conversion on the object but not its members.
976    class CopyingValueRepresentation {
977    public:
CopyingValueRepresentation(CodeGenFunction & CGF)978      explicit CopyingValueRepresentation(CodeGenFunction &CGF)
979          : CGF(CGF), OldSanOpts(CGF.SanOpts) {
980        CGF.SanOpts.set(SanitizerKind::Bool, false);
981        CGF.SanOpts.set(SanitizerKind::Enum, false);
982      }
~CopyingValueRepresentation()983      ~CopyingValueRepresentation() {
984        CGF.SanOpts = OldSanOpts;
985      }
986    private:
987      CodeGenFunction &CGF;
988      SanitizerSet OldSanOpts;
989    };
990  } // end anonymous namespace
991  
992  namespace {
993    class FieldMemcpyizer {
994    public:
FieldMemcpyizer(CodeGenFunction & CGF,const CXXRecordDecl * ClassDecl,const VarDecl * SrcRec)995      FieldMemcpyizer(CodeGenFunction &CGF, const CXXRecordDecl *ClassDecl,
996                      const VarDecl *SrcRec)
997        : CGF(CGF), ClassDecl(ClassDecl), SrcRec(SrcRec),
998          RecLayout(CGF.getContext().getASTRecordLayout(ClassDecl)),
999          FirstField(nullptr), LastField(nullptr), FirstFieldOffset(0),
1000          LastFieldOffset(0), LastAddedFieldIndex(0) {}
1001  
isMemcpyableField(FieldDecl * F) const1002      bool isMemcpyableField(FieldDecl *F) const {
1003        // Never memcpy fields when we are adding poisoned paddings.
1004        if (CGF.getContext().getLangOpts().SanitizeAddressFieldPadding)
1005          return false;
1006        Qualifiers Qual = F->getType().getQualifiers();
1007        if (Qual.hasVolatile() || Qual.hasObjCLifetime())
1008          return false;
1009        return true;
1010      }
1011  
addMemcpyableField(FieldDecl * F)1012      void addMemcpyableField(FieldDecl *F) {
1013        if (!FirstField)
1014          addInitialField(F);
1015        else
1016          addNextField(F);
1017      }
1018  
getMemcpySize(uint64_t FirstByteOffset) const1019      CharUnits getMemcpySize(uint64_t FirstByteOffset) const {
1020        unsigned LastFieldSize =
1021          LastField->isBitField() ?
1022            LastField->getBitWidthValue(CGF.getContext()) :
1023            CGF.getContext().getTypeSize(LastField->getType());
1024        uint64_t MemcpySizeBits =
1025          LastFieldOffset + LastFieldSize - FirstByteOffset +
1026          CGF.getContext().getCharWidth() - 1;
1027        CharUnits MemcpySize =
1028          CGF.getContext().toCharUnitsFromBits(MemcpySizeBits);
1029        return MemcpySize;
1030      }
1031  
emitMemcpy()1032      void emitMemcpy() {
1033        // Give the subclass a chance to bail out if it feels the memcpy isn't
1034        // worth it (e.g. Hasn't aggregated enough data).
1035        if (!FirstField) {
1036          return;
1037        }
1038  
1039        uint64_t FirstByteOffset;
1040        if (FirstField->isBitField()) {
1041          const CGRecordLayout &RL =
1042            CGF.getTypes().getCGRecordLayout(FirstField->getParent());
1043          const CGBitFieldInfo &BFInfo = RL.getBitFieldInfo(FirstField);
1044          // FirstFieldOffset is not appropriate for bitfields,
1045          // we need to use the storage offset instead.
1046          FirstByteOffset = CGF.getContext().toBits(BFInfo.StorageOffset);
1047        } else {
1048          FirstByteOffset = FirstFieldOffset;
1049        }
1050  
1051        CharUnits MemcpySize = getMemcpySize(FirstByteOffset);
1052        QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl);
1053        Address ThisPtr = CGF.LoadCXXThisAddress();
1054        LValue DestLV = CGF.MakeAddrLValue(ThisPtr, RecordTy);
1055        LValue Dest = CGF.EmitLValueForFieldInitialization(DestLV, FirstField);
1056        llvm::Value *SrcPtr = CGF.Builder.CreateLoad(CGF.GetAddrOfLocalVar(SrcRec));
1057        LValue SrcLV = CGF.MakeNaturalAlignAddrLValue(SrcPtr, RecordTy);
1058        LValue Src = CGF.EmitLValueForFieldInitialization(SrcLV, FirstField);
1059  
1060        emitMemcpyIR(Dest.isBitField() ? Dest.getBitFieldAddress() : Dest.getAddress(),
1061                     Src.isBitField() ? Src.getBitFieldAddress() : Src.getAddress(),
1062                     MemcpySize);
1063        reset();
1064      }
1065  
reset()1066      void reset() {
1067        FirstField = nullptr;
1068      }
1069  
1070    protected:
1071      CodeGenFunction &CGF;
1072      const CXXRecordDecl *ClassDecl;
1073  
1074    private:
emitMemcpyIR(Address DestPtr,Address SrcPtr,CharUnits Size)1075      void emitMemcpyIR(Address DestPtr, Address SrcPtr, CharUnits Size) {
1076        llvm::PointerType *DPT = DestPtr.getType();
1077        llvm::Type *DBP =
1078          llvm::Type::getInt8PtrTy(CGF.getLLVMContext(), DPT->getAddressSpace());
1079        DestPtr = CGF.Builder.CreateBitCast(DestPtr, DBP);
1080  
1081        llvm::PointerType *SPT = SrcPtr.getType();
1082        llvm::Type *SBP =
1083          llvm::Type::getInt8PtrTy(CGF.getLLVMContext(), SPT->getAddressSpace());
1084        SrcPtr = CGF.Builder.CreateBitCast(SrcPtr, SBP);
1085  
1086        CGF.Builder.CreateMemCpy(DestPtr, SrcPtr, Size.getQuantity());
1087      }
1088  
addInitialField(FieldDecl * F)1089      void addInitialField(FieldDecl *F) {
1090        FirstField = F;
1091        LastField = F;
1092        FirstFieldOffset = RecLayout.getFieldOffset(F->getFieldIndex());
1093        LastFieldOffset = FirstFieldOffset;
1094        LastAddedFieldIndex = F->getFieldIndex();
1095      }
1096  
addNextField(FieldDecl * F)1097      void addNextField(FieldDecl *F) {
1098        // For the most part, the following invariant will hold:
1099        //   F->getFieldIndex() == LastAddedFieldIndex + 1
1100        // The one exception is that Sema won't add a copy-initializer for an
1101        // unnamed bitfield, which will show up here as a gap in the sequence.
1102        assert(F->getFieldIndex() >= LastAddedFieldIndex + 1 &&
1103               "Cannot aggregate fields out of order.");
1104        LastAddedFieldIndex = F->getFieldIndex();
1105  
1106        // The 'first' and 'last' fields are chosen by offset, rather than field
1107        // index. This allows the code to support bitfields, as well as regular
1108        // fields.
1109        uint64_t FOffset = RecLayout.getFieldOffset(F->getFieldIndex());
1110        if (FOffset < FirstFieldOffset) {
1111          FirstField = F;
1112          FirstFieldOffset = FOffset;
1113        } else if (FOffset > LastFieldOffset) {
1114          LastField = F;
1115          LastFieldOffset = FOffset;
1116        }
1117      }
1118  
1119      const VarDecl *SrcRec;
1120      const ASTRecordLayout &RecLayout;
1121      FieldDecl *FirstField;
1122      FieldDecl *LastField;
1123      uint64_t FirstFieldOffset, LastFieldOffset;
1124      unsigned LastAddedFieldIndex;
1125    };
1126  
1127    class ConstructorMemcpyizer : public FieldMemcpyizer {
1128    private:
1129      /// Get source argument for copy constructor. Returns null if not a copy
1130      /// constructor.
getTrivialCopySource(CodeGenFunction & CGF,const CXXConstructorDecl * CD,FunctionArgList & Args)1131      static const VarDecl *getTrivialCopySource(CodeGenFunction &CGF,
1132                                                 const CXXConstructorDecl *CD,
1133                                                 FunctionArgList &Args) {
1134        if (CD->isCopyOrMoveConstructor() && CD->isDefaulted())
1135          return Args[CGF.CGM.getCXXABI().getSrcArgforCopyCtor(CD, Args)];
1136        return nullptr;
1137      }
1138  
1139      // Returns true if a CXXCtorInitializer represents a member initialization
1140      // that can be rolled into a memcpy.
isMemberInitMemcpyable(CXXCtorInitializer * MemberInit) const1141      bool isMemberInitMemcpyable(CXXCtorInitializer *MemberInit) const {
1142        if (!MemcpyableCtor)
1143          return false;
1144        FieldDecl *Field = MemberInit->getMember();
1145        assert(Field && "No field for member init.");
1146        QualType FieldType = Field->getType();
1147        CXXConstructExpr *CE = dyn_cast<CXXConstructExpr>(MemberInit->getInit());
1148  
1149        // Bail out on non-memcpyable, not-trivially-copyable members.
1150        if (!(CE && isMemcpyEquivalentSpecialMember(CE->getConstructor())) &&
1151            !(FieldType.isTriviallyCopyableType(CGF.getContext()) ||
1152              FieldType->isReferenceType()))
1153          return false;
1154  
1155        // Bail out on volatile fields.
1156        if (!isMemcpyableField(Field))
1157          return false;
1158  
1159        // Otherwise we're good.
1160        return true;
1161      }
1162  
1163    public:
ConstructorMemcpyizer(CodeGenFunction & CGF,const CXXConstructorDecl * CD,FunctionArgList & Args)1164      ConstructorMemcpyizer(CodeGenFunction &CGF, const CXXConstructorDecl *CD,
1165                            FunctionArgList &Args)
1166        : FieldMemcpyizer(CGF, CD->getParent(), getTrivialCopySource(CGF, CD, Args)),
1167          ConstructorDecl(CD),
1168          MemcpyableCtor(CD->isDefaulted() &&
1169                         CD->isCopyOrMoveConstructor() &&
1170                         CGF.getLangOpts().getGC() == LangOptions::NonGC),
1171          Args(Args) { }
1172  
addMemberInitializer(CXXCtorInitializer * MemberInit)1173      void addMemberInitializer(CXXCtorInitializer *MemberInit) {
1174        if (isMemberInitMemcpyable(MemberInit)) {
1175          AggregatedInits.push_back(MemberInit);
1176          addMemcpyableField(MemberInit->getMember());
1177        } else {
1178          emitAggregatedInits();
1179          EmitMemberInitializer(CGF, ConstructorDecl->getParent(), MemberInit,
1180                                ConstructorDecl, Args);
1181        }
1182      }
1183  
emitAggregatedInits()1184      void emitAggregatedInits() {
1185        if (AggregatedInits.size() <= 1) {
1186          // This memcpy is too small to be worthwhile. Fall back on default
1187          // codegen.
1188          if (!AggregatedInits.empty()) {
1189            CopyingValueRepresentation CVR(CGF);
1190            EmitMemberInitializer(CGF, ConstructorDecl->getParent(),
1191                                  AggregatedInits[0], ConstructorDecl, Args);
1192            AggregatedInits.clear();
1193          }
1194          reset();
1195          return;
1196        }
1197  
1198        pushEHDestructors();
1199        emitMemcpy();
1200        AggregatedInits.clear();
1201      }
1202  
pushEHDestructors()1203      void pushEHDestructors() {
1204        Address ThisPtr = CGF.LoadCXXThisAddress();
1205        QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl);
1206        LValue LHS = CGF.MakeAddrLValue(ThisPtr, RecordTy);
1207  
1208        for (unsigned i = 0; i < AggregatedInits.size(); ++i) {
1209          CXXCtorInitializer *MemberInit = AggregatedInits[i];
1210          QualType FieldType = MemberInit->getAnyMember()->getType();
1211          QualType::DestructionKind dtorKind = FieldType.isDestructedType();
1212          if (!CGF.needsEHCleanup(dtorKind))
1213            continue;
1214          LValue FieldLHS = LHS;
1215          EmitLValueForAnyFieldInitialization(CGF, MemberInit, FieldLHS);
1216          CGF.pushEHDestroy(dtorKind, FieldLHS.getAddress(), FieldType);
1217        }
1218      }
1219  
finish()1220      void finish() {
1221        emitAggregatedInits();
1222      }
1223  
1224    private:
1225      const CXXConstructorDecl *ConstructorDecl;
1226      bool MemcpyableCtor;
1227      FunctionArgList &Args;
1228      SmallVector<CXXCtorInitializer*, 16> AggregatedInits;
1229    };
1230  
1231    class AssignmentMemcpyizer : public FieldMemcpyizer {
1232    private:
1233      // Returns the memcpyable field copied by the given statement, if one
1234      // exists. Otherwise returns null.
getMemcpyableField(Stmt * S)1235      FieldDecl *getMemcpyableField(Stmt *S) {
1236        if (!AssignmentsMemcpyable)
1237          return nullptr;
1238        if (BinaryOperator *BO = dyn_cast<BinaryOperator>(S)) {
1239          // Recognise trivial assignments.
1240          if (BO->getOpcode() != BO_Assign)
1241            return nullptr;
1242          MemberExpr *ME = dyn_cast<MemberExpr>(BO->getLHS());
1243          if (!ME)
1244            return nullptr;
1245          FieldDecl *Field = dyn_cast<FieldDecl>(ME->getMemberDecl());
1246          if (!Field || !isMemcpyableField(Field))
1247            return nullptr;
1248          Stmt *RHS = BO->getRHS();
1249          if (ImplicitCastExpr *EC = dyn_cast<ImplicitCastExpr>(RHS))
1250            RHS = EC->getSubExpr();
1251          if (!RHS)
1252            return nullptr;
1253          MemberExpr *ME2 = dyn_cast<MemberExpr>(RHS);
1254          if (dyn_cast<FieldDecl>(ME2->getMemberDecl()) != Field)
1255            return nullptr;
1256          return Field;
1257        } else if (CXXMemberCallExpr *MCE = dyn_cast<CXXMemberCallExpr>(S)) {
1258          CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(MCE->getCalleeDecl());
1259          if (!(MD && isMemcpyEquivalentSpecialMember(MD)))
1260            return nullptr;
1261          MemberExpr *IOA = dyn_cast<MemberExpr>(MCE->getImplicitObjectArgument());
1262          if (!IOA)
1263            return nullptr;
1264          FieldDecl *Field = dyn_cast<FieldDecl>(IOA->getMemberDecl());
1265          if (!Field || !isMemcpyableField(Field))
1266            return nullptr;
1267          MemberExpr *Arg0 = dyn_cast<MemberExpr>(MCE->getArg(0));
1268          if (!Arg0 || Field != dyn_cast<FieldDecl>(Arg0->getMemberDecl()))
1269            return nullptr;
1270          return Field;
1271        } else if (CallExpr *CE = dyn_cast<CallExpr>(S)) {
1272          FunctionDecl *FD = dyn_cast<FunctionDecl>(CE->getCalleeDecl());
1273          if (!FD || FD->getBuiltinID() != Builtin::BI__builtin_memcpy)
1274            return nullptr;
1275          Expr *DstPtr = CE->getArg(0);
1276          if (ImplicitCastExpr *DC = dyn_cast<ImplicitCastExpr>(DstPtr))
1277            DstPtr = DC->getSubExpr();
1278          UnaryOperator *DUO = dyn_cast<UnaryOperator>(DstPtr);
1279          if (!DUO || DUO->getOpcode() != UO_AddrOf)
1280            return nullptr;
1281          MemberExpr *ME = dyn_cast<MemberExpr>(DUO->getSubExpr());
1282          if (!ME)
1283            return nullptr;
1284          FieldDecl *Field = dyn_cast<FieldDecl>(ME->getMemberDecl());
1285          if (!Field || !isMemcpyableField(Field))
1286            return nullptr;
1287          Expr *SrcPtr = CE->getArg(1);
1288          if (ImplicitCastExpr *SC = dyn_cast<ImplicitCastExpr>(SrcPtr))
1289            SrcPtr = SC->getSubExpr();
1290          UnaryOperator *SUO = dyn_cast<UnaryOperator>(SrcPtr);
1291          if (!SUO || SUO->getOpcode() != UO_AddrOf)
1292            return nullptr;
1293          MemberExpr *ME2 = dyn_cast<MemberExpr>(SUO->getSubExpr());
1294          if (!ME2 || Field != dyn_cast<FieldDecl>(ME2->getMemberDecl()))
1295            return nullptr;
1296          return Field;
1297        }
1298  
1299        return nullptr;
1300      }
1301  
1302      bool AssignmentsMemcpyable;
1303      SmallVector<Stmt*, 16> AggregatedStmts;
1304  
1305    public:
AssignmentMemcpyizer(CodeGenFunction & CGF,const CXXMethodDecl * AD,FunctionArgList & Args)1306      AssignmentMemcpyizer(CodeGenFunction &CGF, const CXXMethodDecl *AD,
1307                           FunctionArgList &Args)
1308        : FieldMemcpyizer(CGF, AD->getParent(), Args[Args.size() - 1]),
1309          AssignmentsMemcpyable(CGF.getLangOpts().getGC() == LangOptions::NonGC) {
1310        assert(Args.size() == 2);
1311      }
1312  
emitAssignment(Stmt * S)1313      void emitAssignment(Stmt *S) {
1314        FieldDecl *F = getMemcpyableField(S);
1315        if (F) {
1316          addMemcpyableField(F);
1317          AggregatedStmts.push_back(S);
1318        } else {
1319          emitAggregatedStmts();
1320          CGF.EmitStmt(S);
1321        }
1322      }
1323  
emitAggregatedStmts()1324      void emitAggregatedStmts() {
1325        if (AggregatedStmts.size() <= 1) {
1326          if (!AggregatedStmts.empty()) {
1327            CopyingValueRepresentation CVR(CGF);
1328            CGF.EmitStmt(AggregatedStmts[0]);
1329          }
1330          reset();
1331        }
1332  
1333        emitMemcpy();
1334        AggregatedStmts.clear();
1335      }
1336  
finish()1337      void finish() {
1338        emitAggregatedStmts();
1339      }
1340    };
1341  } // end anonymous namespace
1342  
isInitializerOfDynamicClass(const CXXCtorInitializer * BaseInit)1343  static bool isInitializerOfDynamicClass(const CXXCtorInitializer *BaseInit) {
1344    const Type *BaseType = BaseInit->getBaseClass();
1345    const auto *BaseClassDecl =
1346            cast<CXXRecordDecl>(BaseType->getAs<RecordType>()->getDecl());
1347    return BaseClassDecl->isDynamicClass();
1348  }
1349  
1350  /// EmitCtorPrologue - This routine generates necessary code to initialize
1351  /// base classes and non-static data members belonging to this constructor.
EmitCtorPrologue(const CXXConstructorDecl * CD,CXXCtorType CtorType,FunctionArgList & Args)1352  void CodeGenFunction::EmitCtorPrologue(const CXXConstructorDecl *CD,
1353                                         CXXCtorType CtorType,
1354                                         FunctionArgList &Args) {
1355    if (CD->isDelegatingConstructor())
1356      return EmitDelegatingCXXConstructorCall(CD, Args);
1357  
1358    const CXXRecordDecl *ClassDecl = CD->getParent();
1359  
1360    CXXConstructorDecl::init_const_iterator B = CD->init_begin(),
1361                                            E = CD->init_end();
1362  
1363    llvm::BasicBlock *BaseCtorContinueBB = nullptr;
1364    if (ClassDecl->getNumVBases() &&
1365        !CGM.getTarget().getCXXABI().hasConstructorVariants()) {
1366      // The ABIs that don't have constructor variants need to put a branch
1367      // before the virtual base initialization code.
1368      BaseCtorContinueBB =
1369        CGM.getCXXABI().EmitCtorCompleteObjectHandler(*this, ClassDecl);
1370      assert(BaseCtorContinueBB);
1371    }
1372  
1373    llvm::Value *const OldThis = CXXThisValue;
1374    // Virtual base initializers first.
1375    for (; B != E && (*B)->isBaseInitializer() && (*B)->isBaseVirtual(); B++) {
1376      if (CGM.getCodeGenOpts().StrictVTablePointers &&
1377          CGM.getCodeGenOpts().OptimizationLevel > 0 &&
1378          isInitializerOfDynamicClass(*B))
1379        CXXThisValue = Builder.CreateInvariantGroupBarrier(LoadCXXThis());
1380      EmitBaseInitializer(*this, ClassDecl, *B, CtorType);
1381    }
1382  
1383    if (BaseCtorContinueBB) {
1384      // Complete object handler should continue to the remaining initializers.
1385      Builder.CreateBr(BaseCtorContinueBB);
1386      EmitBlock(BaseCtorContinueBB);
1387    }
1388  
1389    // Then, non-virtual base initializers.
1390    for (; B != E && (*B)->isBaseInitializer(); B++) {
1391      assert(!(*B)->isBaseVirtual());
1392  
1393      if (CGM.getCodeGenOpts().StrictVTablePointers &&
1394          CGM.getCodeGenOpts().OptimizationLevel > 0 &&
1395          isInitializerOfDynamicClass(*B))
1396        CXXThisValue = Builder.CreateInvariantGroupBarrier(LoadCXXThis());
1397      EmitBaseInitializer(*this, ClassDecl, *B, CtorType);
1398    }
1399  
1400    CXXThisValue = OldThis;
1401  
1402    InitializeVTablePointers(ClassDecl);
1403  
1404    // And finally, initialize class members.
1405    FieldConstructionScope FCS(*this, LoadCXXThisAddress());
1406    ConstructorMemcpyizer CM(*this, CD, Args);
1407    for (; B != E; B++) {
1408      CXXCtorInitializer *Member = (*B);
1409      assert(!Member->isBaseInitializer());
1410      assert(Member->isAnyMemberInitializer() &&
1411             "Delegating initializer on non-delegating constructor");
1412      CM.addMemberInitializer(Member);
1413    }
1414    CM.finish();
1415  }
1416  
1417  static bool
1418  FieldHasTrivialDestructorBody(ASTContext &Context, const FieldDecl *Field);
1419  
1420  static bool
HasTrivialDestructorBody(ASTContext & Context,const CXXRecordDecl * BaseClassDecl,const CXXRecordDecl * MostDerivedClassDecl)1421  HasTrivialDestructorBody(ASTContext &Context,
1422                           const CXXRecordDecl *BaseClassDecl,
1423                           const CXXRecordDecl *MostDerivedClassDecl)
1424  {
1425    // If the destructor is trivial we don't have to check anything else.
1426    if (BaseClassDecl->hasTrivialDestructor())
1427      return true;
1428  
1429    if (!BaseClassDecl->getDestructor()->hasTrivialBody())
1430      return false;
1431  
1432    // Check fields.
1433    for (const auto *Field : BaseClassDecl->fields())
1434      if (!FieldHasTrivialDestructorBody(Context, Field))
1435        return false;
1436  
1437    // Check non-virtual bases.
1438    for (const auto &I : BaseClassDecl->bases()) {
1439      if (I.isVirtual())
1440        continue;
1441  
1442      const CXXRecordDecl *NonVirtualBase =
1443        cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl());
1444      if (!HasTrivialDestructorBody(Context, NonVirtualBase,
1445                                    MostDerivedClassDecl))
1446        return false;
1447    }
1448  
1449    if (BaseClassDecl == MostDerivedClassDecl) {
1450      // Check virtual bases.
1451      for (const auto &I : BaseClassDecl->vbases()) {
1452        const CXXRecordDecl *VirtualBase =
1453          cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl());
1454        if (!HasTrivialDestructorBody(Context, VirtualBase,
1455                                      MostDerivedClassDecl))
1456          return false;
1457      }
1458    }
1459  
1460    return true;
1461  }
1462  
1463  static bool
FieldHasTrivialDestructorBody(ASTContext & Context,const FieldDecl * Field)1464  FieldHasTrivialDestructorBody(ASTContext &Context,
1465                                            const FieldDecl *Field)
1466  {
1467    QualType FieldBaseElementType = Context.getBaseElementType(Field->getType());
1468  
1469    const RecordType *RT = FieldBaseElementType->getAs<RecordType>();
1470    if (!RT)
1471      return true;
1472  
1473    CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl());
1474  
1475    // The destructor for an implicit anonymous union member is never invoked.
1476    if (FieldClassDecl->isUnion() && FieldClassDecl->isAnonymousStructOrUnion())
1477      return false;
1478  
1479    return HasTrivialDestructorBody(Context, FieldClassDecl, FieldClassDecl);
1480  }
1481  
1482  /// CanSkipVTablePointerInitialization - Check whether we need to initialize
1483  /// any vtable pointers before calling this destructor.
CanSkipVTablePointerInitialization(CodeGenFunction & CGF,const CXXDestructorDecl * Dtor)1484  static bool CanSkipVTablePointerInitialization(CodeGenFunction &CGF,
1485                                                 const CXXDestructorDecl *Dtor) {
1486    const CXXRecordDecl *ClassDecl = Dtor->getParent();
1487    if (!ClassDecl->isDynamicClass())
1488      return true;
1489  
1490    if (!Dtor->hasTrivialBody())
1491      return false;
1492  
1493    // Check the fields.
1494    for (const auto *Field : ClassDecl->fields())
1495      if (!FieldHasTrivialDestructorBody(CGF.getContext(), Field))
1496        return false;
1497  
1498    return true;
1499  }
1500  
1501  /// EmitDestructorBody - Emits the body of the current destructor.
EmitDestructorBody(FunctionArgList & Args)1502  void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) {
1503    const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CurGD.getDecl());
1504    CXXDtorType DtorType = CurGD.getDtorType();
1505  
1506    Stmt *Body = Dtor->getBody();
1507    if (Body)
1508      incrementProfileCounter(Body);
1509  
1510    // The call to operator delete in a deleting destructor happens
1511    // outside of the function-try-block, which means it's always
1512    // possible to delegate the destructor body to the complete
1513    // destructor.  Do so.
1514    if (DtorType == Dtor_Deleting) {
1515      EnterDtorCleanups(Dtor, Dtor_Deleting);
1516      EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false,
1517                            /*Delegating=*/false, LoadCXXThisAddress());
1518      PopCleanupBlock();
1519      return;
1520    }
1521  
1522    // If the body is a function-try-block, enter the try before
1523    // anything else.
1524    bool isTryBody = (Body && isa<CXXTryStmt>(Body));
1525    if (isTryBody)
1526      EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true);
1527    EmitAsanPrologueOrEpilogue(false);
1528  
1529    // Enter the epilogue cleanups.
1530    RunCleanupsScope DtorEpilogue(*this);
1531  
1532    // If this is the complete variant, just invoke the base variant;
1533    // the epilogue will destruct the virtual bases.  But we can't do
1534    // this optimization if the body is a function-try-block, because
1535    // we'd introduce *two* handler blocks.  In the Microsoft ABI, we
1536    // always delegate because we might not have a definition in this TU.
1537    switch (DtorType) {
1538    case Dtor_Comdat:
1539      llvm_unreachable("not expecting a COMDAT");
1540  
1541    case Dtor_Deleting: llvm_unreachable("already handled deleting case");
1542  
1543    case Dtor_Complete:
1544      assert((Body || getTarget().getCXXABI().isMicrosoft()) &&
1545             "can't emit a dtor without a body for non-Microsoft ABIs");
1546  
1547      // Enter the cleanup scopes for virtual bases.
1548      EnterDtorCleanups(Dtor, Dtor_Complete);
1549  
1550      if (!isTryBody) {
1551        EmitCXXDestructorCall(Dtor, Dtor_Base, /*ForVirtualBase=*/false,
1552                              /*Delegating=*/false, LoadCXXThisAddress());
1553        break;
1554      }
1555      // Fallthrough: act like we're in the base variant.
1556  
1557    case Dtor_Base:
1558      assert(Body);
1559  
1560      // Enter the cleanup scopes for fields and non-virtual bases.
1561      EnterDtorCleanups(Dtor, Dtor_Base);
1562  
1563      // Initialize the vtable pointers before entering the body.
1564      if (!CanSkipVTablePointerInitialization(*this, Dtor)) {
1565        // Insert the llvm.invariant.group.barrier intrinsic before initializing
1566        // the vptrs to cancel any previous assumptions we might have made.
1567        if (CGM.getCodeGenOpts().StrictVTablePointers &&
1568            CGM.getCodeGenOpts().OptimizationLevel > 0)
1569          CXXThisValue = Builder.CreateInvariantGroupBarrier(LoadCXXThis());
1570        InitializeVTablePointers(Dtor->getParent());
1571      }
1572  
1573      if (isTryBody)
1574        EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock());
1575      else if (Body)
1576        EmitStmt(Body);
1577      else {
1578        assert(Dtor->isImplicit() && "bodyless dtor not implicit");
1579        // nothing to do besides what's in the epilogue
1580      }
1581      // -fapple-kext must inline any call to this dtor into
1582      // the caller's body.
1583      if (getLangOpts().AppleKext)
1584        CurFn->addFnAttr(llvm::Attribute::AlwaysInline);
1585  
1586      break;
1587    }
1588  
1589    // Jump out through the epilogue cleanups.
1590    DtorEpilogue.ForceCleanup();
1591  
1592    // Exit the try if applicable.
1593    if (isTryBody)
1594      ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true);
1595  }
1596  
emitImplicitAssignmentOperatorBody(FunctionArgList & Args)1597  void CodeGenFunction::emitImplicitAssignmentOperatorBody(FunctionArgList &Args) {
1598    const CXXMethodDecl *AssignOp = cast<CXXMethodDecl>(CurGD.getDecl());
1599    const Stmt *RootS = AssignOp->getBody();
1600    assert(isa<CompoundStmt>(RootS) &&
1601           "Body of an implicit assignment operator should be compound stmt.");
1602    const CompoundStmt *RootCS = cast<CompoundStmt>(RootS);
1603  
1604    LexicalScope Scope(*this, RootCS->getSourceRange());
1605  
1606    incrementProfileCounter(RootCS);
1607    AssignmentMemcpyizer AM(*this, AssignOp, Args);
1608    for (auto *I : RootCS->body())
1609      AM.emitAssignment(I);
1610    AM.finish();
1611  }
1612  
1613  namespace {
1614    /// Call the operator delete associated with the current destructor.
1615    struct CallDtorDelete final : EHScopeStack::Cleanup {
CallDtorDelete__anon6bf721160411::CallDtorDelete1616      CallDtorDelete() {}
1617  
Emit__anon6bf721160411::CallDtorDelete1618      void Emit(CodeGenFunction &CGF, Flags flags) override {
1619        const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl);
1620        const CXXRecordDecl *ClassDecl = Dtor->getParent();
1621        CGF.EmitDeleteCall(Dtor->getOperatorDelete(), CGF.LoadCXXThis(),
1622                           CGF.getContext().getTagDeclType(ClassDecl));
1623      }
1624    };
1625  
1626    struct CallDtorDeleteConditional final : EHScopeStack::Cleanup {
1627      llvm::Value *ShouldDeleteCondition;
1628  
1629    public:
CallDtorDeleteConditional__anon6bf721160411::CallDtorDeleteConditional1630      CallDtorDeleteConditional(llvm::Value *ShouldDeleteCondition)
1631          : ShouldDeleteCondition(ShouldDeleteCondition) {
1632        assert(ShouldDeleteCondition != nullptr);
1633      }
1634  
Emit__anon6bf721160411::CallDtorDeleteConditional1635      void Emit(CodeGenFunction &CGF, Flags flags) override {
1636        llvm::BasicBlock *callDeleteBB = CGF.createBasicBlock("dtor.call_delete");
1637        llvm::BasicBlock *continueBB = CGF.createBasicBlock("dtor.continue");
1638        llvm::Value *ShouldCallDelete
1639          = CGF.Builder.CreateIsNull(ShouldDeleteCondition);
1640        CGF.Builder.CreateCondBr(ShouldCallDelete, continueBB, callDeleteBB);
1641  
1642        CGF.EmitBlock(callDeleteBB);
1643        const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl);
1644        const CXXRecordDecl *ClassDecl = Dtor->getParent();
1645        CGF.EmitDeleteCall(Dtor->getOperatorDelete(), CGF.LoadCXXThis(),
1646                           CGF.getContext().getTagDeclType(ClassDecl));
1647        CGF.Builder.CreateBr(continueBB);
1648  
1649        CGF.EmitBlock(continueBB);
1650      }
1651    };
1652  
1653    class DestroyField  final : public EHScopeStack::Cleanup {
1654      const FieldDecl *field;
1655      CodeGenFunction::Destroyer *destroyer;
1656      bool useEHCleanupForArray;
1657  
1658    public:
DestroyField(const FieldDecl * field,CodeGenFunction::Destroyer * destroyer,bool useEHCleanupForArray)1659      DestroyField(const FieldDecl *field, CodeGenFunction::Destroyer *destroyer,
1660                   bool useEHCleanupForArray)
1661          : field(field), destroyer(destroyer),
1662            useEHCleanupForArray(useEHCleanupForArray) {}
1663  
Emit(CodeGenFunction & CGF,Flags flags)1664      void Emit(CodeGenFunction &CGF, Flags flags) override {
1665        // Find the address of the field.
1666        Address thisValue = CGF.LoadCXXThisAddress();
1667        QualType RecordTy = CGF.getContext().getTagDeclType(field->getParent());
1668        LValue ThisLV = CGF.MakeAddrLValue(thisValue, RecordTy);
1669        LValue LV = CGF.EmitLValueForField(ThisLV, field);
1670        assert(LV.isSimple());
1671  
1672        CGF.emitDestroy(LV.getAddress(), field->getType(), destroyer,
1673                        flags.isForNormalCleanup() && useEHCleanupForArray);
1674      }
1675    };
1676  
EmitSanitizerDtorCallback(CodeGenFunction & CGF,llvm::Value * Ptr,CharUnits::QuantityType PoisonSize)1677   static void EmitSanitizerDtorCallback(CodeGenFunction &CGF, llvm::Value *Ptr,
1678               CharUnits::QuantityType PoisonSize) {
1679     // Pass in void pointer and size of region as arguments to runtime
1680     // function
1681     llvm::Value *Args[] = {CGF.Builder.CreateBitCast(Ptr, CGF.VoidPtrTy),
1682                            llvm::ConstantInt::get(CGF.SizeTy, PoisonSize)};
1683  
1684     llvm::Type *ArgTypes[] = {CGF.VoidPtrTy, CGF.SizeTy};
1685  
1686     llvm::FunctionType *FnType =
1687         llvm::FunctionType::get(CGF.VoidTy, ArgTypes, false);
1688     llvm::Value *Fn =
1689         CGF.CGM.CreateRuntimeFunction(FnType, "__sanitizer_dtor_callback");
1690     CGF.EmitNounwindRuntimeCall(Fn, Args);
1691   }
1692  
1693    class SanitizeDtorMembers final : public EHScopeStack::Cleanup {
1694      const CXXDestructorDecl *Dtor;
1695  
1696    public:
SanitizeDtorMembers(const CXXDestructorDecl * Dtor)1697      SanitizeDtorMembers(const CXXDestructorDecl *Dtor) : Dtor(Dtor) {}
1698  
1699      // Generate function call for handling object poisoning.
1700      // Disables tail call elimination, to prevent the current stack frame
1701      // from disappearing from the stack trace.
Emit(CodeGenFunction & CGF,Flags flags)1702      void Emit(CodeGenFunction &CGF, Flags flags) override {
1703        const ASTRecordLayout &Layout =
1704            CGF.getContext().getASTRecordLayout(Dtor->getParent());
1705  
1706        // Nothing to poison.
1707        if (Layout.getFieldCount() == 0)
1708          return;
1709  
1710        // Prevent the current stack frame from disappearing from the stack trace.
1711        CGF.CurFn->addFnAttr("disable-tail-calls", "true");
1712  
1713        // Construct pointer to region to begin poisoning, and calculate poison
1714        // size, so that only members declared in this class are poisoned.
1715        ASTContext &Context = CGF.getContext();
1716        unsigned fieldIndex = 0;
1717        int startIndex = -1;
1718        // RecordDecl::field_iterator Field;
1719        for (const FieldDecl *Field : Dtor->getParent()->fields()) {
1720          // Poison field if it is trivial
1721          if (FieldHasTrivialDestructorBody(Context, Field)) {
1722            // Start sanitizing at this field
1723            if (startIndex < 0)
1724              startIndex = fieldIndex;
1725  
1726            // Currently on the last field, and it must be poisoned with the
1727            // current block.
1728            if (fieldIndex == Layout.getFieldCount() - 1) {
1729              PoisonMembers(CGF, startIndex, Layout.getFieldCount());
1730            }
1731          } else if (startIndex >= 0) {
1732            // No longer within a block of memory to poison, so poison the block
1733            PoisonMembers(CGF, startIndex, fieldIndex);
1734            // Re-set the start index
1735            startIndex = -1;
1736          }
1737          fieldIndex += 1;
1738        }
1739      }
1740  
1741    private:
1742      /// \param layoutStartOffset index of the ASTRecordLayout field to
1743      ///     start poisoning (inclusive)
1744      /// \param layoutEndOffset index of the ASTRecordLayout field to
1745      ///     end poisoning (exclusive)
PoisonMembers(CodeGenFunction & CGF,unsigned layoutStartOffset,unsigned layoutEndOffset)1746      void PoisonMembers(CodeGenFunction &CGF, unsigned layoutStartOffset,
1747                       unsigned layoutEndOffset) {
1748        ASTContext &Context = CGF.getContext();
1749        const ASTRecordLayout &Layout =
1750            Context.getASTRecordLayout(Dtor->getParent());
1751  
1752        llvm::ConstantInt *OffsetSizePtr = llvm::ConstantInt::get(
1753            CGF.SizeTy,
1754            Context.toCharUnitsFromBits(Layout.getFieldOffset(layoutStartOffset))
1755                .getQuantity());
1756  
1757        llvm::Value *OffsetPtr = CGF.Builder.CreateGEP(
1758            CGF.Builder.CreateBitCast(CGF.LoadCXXThis(), CGF.Int8PtrTy),
1759            OffsetSizePtr);
1760  
1761        CharUnits::QuantityType PoisonSize;
1762        if (layoutEndOffset >= Layout.getFieldCount()) {
1763          PoisonSize = Layout.getNonVirtualSize().getQuantity() -
1764                       Context.toCharUnitsFromBits(
1765                                  Layout.getFieldOffset(layoutStartOffset))
1766                           .getQuantity();
1767        } else {
1768          PoisonSize = Context.toCharUnitsFromBits(
1769                                  Layout.getFieldOffset(layoutEndOffset) -
1770                                  Layout.getFieldOffset(layoutStartOffset))
1771                           .getQuantity();
1772        }
1773  
1774        if (PoisonSize == 0)
1775          return;
1776  
1777        EmitSanitizerDtorCallback(CGF, OffsetPtr, PoisonSize);
1778      }
1779    };
1780  
1781   class SanitizeDtorVTable final : public EHScopeStack::Cleanup {
1782      const CXXDestructorDecl *Dtor;
1783  
1784    public:
SanitizeDtorVTable(const CXXDestructorDecl * Dtor)1785      SanitizeDtorVTable(const CXXDestructorDecl *Dtor) : Dtor(Dtor) {}
1786  
1787      // Generate function call for handling vtable pointer poisoning.
Emit(CodeGenFunction & CGF,Flags flags)1788      void Emit(CodeGenFunction &CGF, Flags flags) override {
1789        assert(Dtor->getParent()->isDynamicClass());
1790        (void)Dtor;
1791        ASTContext &Context = CGF.getContext();
1792        // Poison vtable and vtable ptr if they exist for this class.
1793        llvm::Value *VTablePtr = CGF.LoadCXXThis();
1794  
1795        CharUnits::QuantityType PoisonSize =
1796            Context.toCharUnitsFromBits(CGF.PointerWidthInBits).getQuantity();
1797        // Pass in void pointer and size of region as arguments to runtime
1798        // function
1799        EmitSanitizerDtorCallback(CGF, VTablePtr, PoisonSize);
1800      }
1801   };
1802  } // end anonymous namespace
1803  
1804  /// \brief Emit all code that comes at the end of class's
1805  /// destructor. This is to call destructors on members and base classes
1806  /// in reverse order of their construction.
EnterDtorCleanups(const CXXDestructorDecl * DD,CXXDtorType DtorType)1807  void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD,
1808                                          CXXDtorType DtorType) {
1809    assert((!DD->isTrivial() || DD->hasAttr<DLLExportAttr>()) &&
1810           "Should not emit dtor epilogue for non-exported trivial dtor!");
1811  
1812    // The deleting-destructor phase just needs to call the appropriate
1813    // operator delete that Sema picked up.
1814    if (DtorType == Dtor_Deleting) {
1815      assert(DD->getOperatorDelete() &&
1816             "operator delete missing - EnterDtorCleanups");
1817      if (CXXStructorImplicitParamValue) {
1818        // If there is an implicit param to the deleting dtor, it's a boolean
1819        // telling whether we should call delete at the end of the dtor.
1820        EHStack.pushCleanup<CallDtorDeleteConditional>(
1821            NormalAndEHCleanup, CXXStructorImplicitParamValue);
1822      } else {
1823        EHStack.pushCleanup<CallDtorDelete>(NormalAndEHCleanup);
1824      }
1825      return;
1826    }
1827  
1828    const CXXRecordDecl *ClassDecl = DD->getParent();
1829  
1830    // Unions have no bases and do not call field destructors.
1831    if (ClassDecl->isUnion())
1832      return;
1833  
1834    // The complete-destructor phase just destructs all the virtual bases.
1835    if (DtorType == Dtor_Complete) {
1836      // Poison the vtable pointer such that access after the base
1837      // and member destructors are invoked is invalid.
1838      if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor &&
1839          SanOpts.has(SanitizerKind::Memory) && ClassDecl->getNumVBases() &&
1840          ClassDecl->isPolymorphic())
1841        EHStack.pushCleanup<SanitizeDtorVTable>(NormalAndEHCleanup, DD);
1842  
1843      // We push them in the forward order so that they'll be popped in
1844      // the reverse order.
1845      for (const auto &Base : ClassDecl->vbases()) {
1846        CXXRecordDecl *BaseClassDecl
1847          = cast<CXXRecordDecl>(Base.getType()->getAs<RecordType>()->getDecl());
1848  
1849        // Ignore trivial destructors.
1850        if (BaseClassDecl->hasTrivialDestructor())
1851          continue;
1852  
1853        EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup,
1854                                          BaseClassDecl,
1855                                          /*BaseIsVirtual*/ true);
1856      }
1857  
1858      return;
1859    }
1860  
1861    assert(DtorType == Dtor_Base);
1862    // Poison the vtable pointer if it has no virtual bases, but inherits
1863    // virtual functions.
1864    if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor &&
1865        SanOpts.has(SanitizerKind::Memory) && !ClassDecl->getNumVBases() &&
1866        ClassDecl->isPolymorphic())
1867      EHStack.pushCleanup<SanitizeDtorVTable>(NormalAndEHCleanup, DD);
1868  
1869    // Destroy non-virtual bases.
1870    for (const auto &Base : ClassDecl->bases()) {
1871      // Ignore virtual bases.
1872      if (Base.isVirtual())
1873        continue;
1874  
1875      CXXRecordDecl *BaseClassDecl = Base.getType()->getAsCXXRecordDecl();
1876  
1877      // Ignore trivial destructors.
1878      if (BaseClassDecl->hasTrivialDestructor())
1879        continue;
1880  
1881      EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup,
1882                                        BaseClassDecl,
1883                                        /*BaseIsVirtual*/ false);
1884    }
1885  
1886    // Poison fields such that access after their destructors are
1887    // invoked, and before the base class destructor runs, is invalid.
1888    if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor &&
1889        SanOpts.has(SanitizerKind::Memory))
1890      EHStack.pushCleanup<SanitizeDtorMembers>(NormalAndEHCleanup, DD);
1891  
1892    // Destroy direct fields.
1893    for (const auto *Field : ClassDecl->fields()) {
1894      QualType type = Field->getType();
1895      QualType::DestructionKind dtorKind = type.isDestructedType();
1896      if (!dtorKind) continue;
1897  
1898      // Anonymous union members do not have their destructors called.
1899      const RecordType *RT = type->getAsUnionType();
1900      if (RT && RT->getDecl()->isAnonymousStructOrUnion()) continue;
1901  
1902      CleanupKind cleanupKind = getCleanupKind(dtorKind);
1903      EHStack.pushCleanup<DestroyField>(cleanupKind, Field,
1904                                        getDestroyer(dtorKind),
1905                                        cleanupKind & EHCleanup);
1906    }
1907  }
1908  
1909  /// EmitCXXAggrConstructorCall - Emit a loop to call a particular
1910  /// constructor for each of several members of an array.
1911  ///
1912  /// \param ctor the constructor to call for each element
1913  /// \param arrayType the type of the array to initialize
1914  /// \param arrayBegin an arrayType*
1915  /// \param zeroInitialize true if each element should be
1916  ///   zero-initialized before it is constructed
EmitCXXAggrConstructorCall(const CXXConstructorDecl * ctor,const ArrayType * arrayType,Address arrayBegin,const CXXConstructExpr * E,bool zeroInitialize)1917  void CodeGenFunction::EmitCXXAggrConstructorCall(
1918      const CXXConstructorDecl *ctor, const ArrayType *arrayType,
1919      Address arrayBegin, const CXXConstructExpr *E, bool zeroInitialize) {
1920    QualType elementType;
1921    llvm::Value *numElements =
1922      emitArrayLength(arrayType, elementType, arrayBegin);
1923  
1924    EmitCXXAggrConstructorCall(ctor, numElements, arrayBegin, E, zeroInitialize);
1925  }
1926  
1927  /// EmitCXXAggrConstructorCall - Emit a loop to call a particular
1928  /// constructor for each of several members of an array.
1929  ///
1930  /// \param ctor the constructor to call for each element
1931  /// \param numElements the number of elements in the array;
1932  ///   may be zero
1933  /// \param arrayBase a T*, where T is the type constructed by ctor
1934  /// \param zeroInitialize true if each element should be
1935  ///   zero-initialized before it is constructed
EmitCXXAggrConstructorCall(const CXXConstructorDecl * ctor,llvm::Value * numElements,Address arrayBase,const CXXConstructExpr * E,bool zeroInitialize)1936  void CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *ctor,
1937                                                   llvm::Value *numElements,
1938                                                   Address arrayBase,
1939                                                   const CXXConstructExpr *E,
1940                                                   bool zeroInitialize) {
1941    // It's legal for numElements to be zero.  This can happen both
1942    // dynamically, because x can be zero in 'new A[x]', and statically,
1943    // because of GCC extensions that permit zero-length arrays.  There
1944    // are probably legitimate places where we could assume that this
1945    // doesn't happen, but it's not clear that it's worth it.
1946    llvm::BranchInst *zeroCheckBranch = nullptr;
1947  
1948    // Optimize for a constant count.
1949    llvm::ConstantInt *constantCount
1950      = dyn_cast<llvm::ConstantInt>(numElements);
1951    if (constantCount) {
1952      // Just skip out if the constant count is zero.
1953      if (constantCount->isZero()) return;
1954  
1955    // Otherwise, emit the check.
1956    } else {
1957      llvm::BasicBlock *loopBB = createBasicBlock("new.ctorloop");
1958      llvm::Value *iszero = Builder.CreateIsNull(numElements, "isempty");
1959      zeroCheckBranch = Builder.CreateCondBr(iszero, loopBB, loopBB);
1960      EmitBlock(loopBB);
1961    }
1962  
1963    // Find the end of the array.
1964    llvm::Value *arrayBegin = arrayBase.getPointer();
1965    llvm::Value *arrayEnd = Builder.CreateInBoundsGEP(arrayBegin, numElements,
1966                                                      "arrayctor.end");
1967  
1968    // Enter the loop, setting up a phi for the current location to initialize.
1969    llvm::BasicBlock *entryBB = Builder.GetInsertBlock();
1970    llvm::BasicBlock *loopBB = createBasicBlock("arrayctor.loop");
1971    EmitBlock(loopBB);
1972    llvm::PHINode *cur = Builder.CreatePHI(arrayBegin->getType(), 2,
1973                                           "arrayctor.cur");
1974    cur->addIncoming(arrayBegin, entryBB);
1975  
1976    // Inside the loop body, emit the constructor call on the array element.
1977  
1978    // The alignment of the base, adjusted by the size of a single element,
1979    // provides a conservative estimate of the alignment of every element.
1980    // (This assumes we never start tracking offsetted alignments.)
1981    //
1982    // Note that these are complete objects and so we don't need to
1983    // use the non-virtual size or alignment.
1984    QualType type = getContext().getTypeDeclType(ctor->getParent());
1985    CharUnits eltAlignment =
1986      arrayBase.getAlignment()
1987               .alignmentOfArrayElement(getContext().getTypeSizeInChars(type));
1988    Address curAddr = Address(cur, eltAlignment);
1989  
1990    // Zero initialize the storage, if requested.
1991    if (zeroInitialize)
1992      EmitNullInitialization(curAddr, type);
1993  
1994    // C++ [class.temporary]p4:
1995    // There are two contexts in which temporaries are destroyed at a different
1996    // point than the end of the full-expression. The first context is when a
1997    // default constructor is called to initialize an element of an array.
1998    // If the constructor has one or more default arguments, the destruction of
1999    // every temporary created in a default argument expression is sequenced
2000    // before the construction of the next array element, if any.
2001  
2002    {
2003      RunCleanupsScope Scope(*this);
2004  
2005      // Evaluate the constructor and its arguments in a regular
2006      // partial-destroy cleanup.
2007      if (getLangOpts().Exceptions &&
2008          !ctor->getParent()->hasTrivialDestructor()) {
2009        Destroyer *destroyer = destroyCXXObject;
2010        pushRegularPartialArrayCleanup(arrayBegin, cur, type, eltAlignment,
2011                                       *destroyer);
2012      }
2013  
2014      EmitCXXConstructorCall(ctor, Ctor_Complete, /*ForVirtualBase=*/false,
2015                             /*Delegating=*/false, curAddr, E);
2016    }
2017  
2018    // Go to the next element.
2019    llvm::Value *next =
2020      Builder.CreateInBoundsGEP(cur, llvm::ConstantInt::get(SizeTy, 1),
2021                                "arrayctor.next");
2022    cur->addIncoming(next, Builder.GetInsertBlock());
2023  
2024    // Check whether that's the end of the loop.
2025    llvm::Value *done = Builder.CreateICmpEQ(next, arrayEnd, "arrayctor.done");
2026    llvm::BasicBlock *contBB = createBasicBlock("arrayctor.cont");
2027    Builder.CreateCondBr(done, contBB, loopBB);
2028  
2029    // Patch the earlier check to skip over the loop.
2030    if (zeroCheckBranch) zeroCheckBranch->setSuccessor(0, contBB);
2031  
2032    EmitBlock(contBB);
2033  }
2034  
destroyCXXObject(CodeGenFunction & CGF,Address addr,QualType type)2035  void CodeGenFunction::destroyCXXObject(CodeGenFunction &CGF,
2036                                         Address addr,
2037                                         QualType type) {
2038    const RecordType *rtype = type->castAs<RecordType>();
2039    const CXXRecordDecl *record = cast<CXXRecordDecl>(rtype->getDecl());
2040    const CXXDestructorDecl *dtor = record->getDestructor();
2041    assert(!dtor->isTrivial());
2042    CGF.EmitCXXDestructorCall(dtor, Dtor_Complete, /*for vbase*/ false,
2043                              /*Delegating=*/false, addr);
2044  }
2045  
EmitCXXConstructorCall(const CXXConstructorDecl * D,CXXCtorType Type,bool ForVirtualBase,bool Delegating,Address This,const CXXConstructExpr * E)2046  void CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D,
2047                                               CXXCtorType Type,
2048                                               bool ForVirtualBase,
2049                                               bool Delegating, Address This,
2050                                               const CXXConstructExpr *E) {
2051    CallArgList Args;
2052  
2053    // Push the this ptr.
2054    Args.add(RValue::get(This.getPointer()), D->getThisType(getContext()));
2055  
2056    // If this is a trivial constructor, emit a memcpy now before we lose
2057    // the alignment information on the argument.
2058    // FIXME: It would be better to preserve alignment information into CallArg.
2059    if (isMemcpyEquivalentSpecialMember(D)) {
2060      assert(E->getNumArgs() == 1 && "unexpected argcount for trivial ctor");
2061  
2062      const Expr *Arg = E->getArg(0);
2063      QualType SrcTy = Arg->getType();
2064      Address Src = EmitLValue(Arg).getAddress();
2065      QualType DestTy = getContext().getTypeDeclType(D->getParent());
2066      EmitAggregateCopyCtor(This, Src, DestTy, SrcTy);
2067      return;
2068    }
2069  
2070    // Add the rest of the user-supplied arguments.
2071    const FunctionProtoType *FPT = D->getType()->castAs<FunctionProtoType>();
2072    EmitCallArgs(Args, FPT, E->arguments(), E->getConstructor());
2073  
2074    EmitCXXConstructorCall(D, Type, ForVirtualBase, Delegating, This, Args);
2075  }
2076  
canEmitDelegateCallArgs(CodeGenFunction & CGF,const CXXConstructorDecl * Ctor,CXXCtorType Type,CallArgList & Args)2077  static bool canEmitDelegateCallArgs(CodeGenFunction &CGF,
2078                                      const CXXConstructorDecl *Ctor,
2079                                      CXXCtorType Type, CallArgList &Args) {
2080    // We can't forward a variadic call.
2081    if (Ctor->isVariadic())
2082      return false;
2083  
2084    if (CGF.getTarget().getCXXABI().areArgsDestroyedLeftToRightInCallee()) {
2085      // If the parameters are callee-cleanup, it's not safe to forward.
2086      for (auto *P : Ctor->parameters())
2087        if (P->getType().isDestructedType())
2088          return false;
2089  
2090      // Likewise if they're inalloca.
2091      const CGFunctionInfo &Info =
2092          CGF.CGM.getTypes().arrangeCXXConstructorCall(Args, Ctor, Type, 0);
2093      if (Info.usesInAlloca())
2094        return false;
2095    }
2096  
2097    // Anything else should be OK.
2098    return true;
2099  }
2100  
EmitCXXConstructorCall(const CXXConstructorDecl * D,CXXCtorType Type,bool ForVirtualBase,bool Delegating,Address This,CallArgList & Args)2101  void CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D,
2102                                               CXXCtorType Type,
2103                                               bool ForVirtualBase,
2104                                               bool Delegating,
2105                                               Address This,
2106                                               CallArgList &Args) {
2107    const CXXRecordDecl *ClassDecl = D->getParent();
2108  
2109    // C++11 [class.mfct.non-static]p2:
2110    //   If a non-static member function of a class X is called for an object that
2111    //   is not of type X, or of a type derived from X, the behavior is undefined.
2112    // FIXME: Provide a source location here.
2113    EmitTypeCheck(CodeGenFunction::TCK_ConstructorCall, SourceLocation(),
2114                  This.getPointer(), getContext().getRecordType(ClassDecl));
2115  
2116    if (D->isTrivial() && D->isDefaultConstructor()) {
2117      assert(Args.size() == 1 && "trivial default ctor with args");
2118      return;
2119    }
2120  
2121    // If this is a trivial constructor, just emit what's needed. If this is a
2122    // union copy constructor, we must emit a memcpy, because the AST does not
2123    // model that copy.
2124    if (isMemcpyEquivalentSpecialMember(D)) {
2125      assert(Args.size() == 2 && "unexpected argcount for trivial ctor");
2126  
2127      QualType SrcTy = D->getParamDecl(0)->getType().getNonReferenceType();
2128      Address Src(Args[1].RV.getScalarVal(), getNaturalTypeAlignment(SrcTy));
2129      QualType DestTy = getContext().getTypeDeclType(ClassDecl);
2130      EmitAggregateCopyCtor(This, Src, DestTy, SrcTy);
2131      return;
2132    }
2133  
2134    // Check whether we can actually emit the constructor before trying to do so.
2135    if (auto Inherited = D->getInheritedConstructor()) {
2136      if (getTypes().inheritingCtorHasParams(Inherited, Type) &&
2137          !canEmitDelegateCallArgs(*this, D, Type, Args)) {
2138        EmitInlinedInheritingCXXConstructorCall(D, Type, ForVirtualBase,
2139                                                Delegating, Args);
2140        return;
2141      }
2142    }
2143  
2144    // Insert any ABI-specific implicit constructor arguments.
2145    unsigned ExtraArgs = CGM.getCXXABI().addImplicitConstructorArgs(
2146        *this, D, Type, ForVirtualBase, Delegating, Args);
2147  
2148    // Emit the call.
2149    llvm::Value *Callee = CGM.getAddrOfCXXStructor(D, getFromCtorType(Type));
2150    const CGFunctionInfo &Info =
2151        CGM.getTypes().arrangeCXXConstructorCall(Args, D, Type, ExtraArgs);
2152    EmitCall(Info, Callee, ReturnValueSlot(), Args, D);
2153  
2154    // Generate vtable assumptions if we're constructing a complete object
2155    // with a vtable.  We don't do this for base subobjects for two reasons:
2156    // first, it's incorrect for classes with virtual bases, and second, we're
2157    // about to overwrite the vptrs anyway.
2158    // We also have to make sure if we can refer to vtable:
2159    // - Otherwise we can refer to vtable if it's safe to speculatively emit.
2160    // FIXME: If vtable is used by ctor/dtor, or if vtable is external and we are
2161    // sure that definition of vtable is not hidden,
2162    // then we are always safe to refer to it.
2163    // FIXME: It looks like InstCombine is very inefficient on dealing with
2164    // assumes. Make assumption loads require -fstrict-vtable-pointers temporarily.
2165    if (CGM.getCodeGenOpts().OptimizationLevel > 0 &&
2166        ClassDecl->isDynamicClass() && Type != Ctor_Base &&
2167        CGM.getCXXABI().canSpeculativelyEmitVTable(ClassDecl) &&
2168        CGM.getCodeGenOpts().StrictVTablePointers)
2169      EmitVTableAssumptionLoads(ClassDecl, This);
2170  }
2171  
EmitInheritedCXXConstructorCall(const CXXConstructorDecl * D,bool ForVirtualBase,Address This,bool InheritedFromVBase,const CXXInheritedCtorInitExpr * E)2172  void CodeGenFunction::EmitInheritedCXXConstructorCall(
2173      const CXXConstructorDecl *D, bool ForVirtualBase, Address This,
2174      bool InheritedFromVBase, const CXXInheritedCtorInitExpr *E) {
2175    CallArgList Args;
2176    CallArg ThisArg(RValue::get(This.getPointer()), D->getThisType(getContext()),
2177                    /*NeedsCopy=*/false);
2178  
2179    // Forward the parameters.
2180    if (InheritedFromVBase &&
2181        CGM.getTarget().getCXXABI().hasConstructorVariants()) {
2182      // Nothing to do; this construction is not responsible for constructing
2183      // the base class containing the inherited constructor.
2184      // FIXME: Can we just pass undef's for the remaining arguments if we don't
2185      // have constructor variants?
2186      Args.push_back(ThisArg);
2187    } else if (!CXXInheritedCtorInitExprArgs.empty()) {
2188      // The inheriting constructor was inlined; just inject its arguments.
2189      assert(CXXInheritedCtorInitExprArgs.size() >= D->getNumParams() &&
2190             "wrong number of parameters for inherited constructor call");
2191      Args = CXXInheritedCtorInitExprArgs;
2192      Args[0] = ThisArg;
2193    } else {
2194      // The inheriting constructor was not inlined. Emit delegating arguments.
2195      Args.push_back(ThisArg);
2196      const auto *OuterCtor = cast<CXXConstructorDecl>(CurCodeDecl);
2197      assert(OuterCtor->getNumParams() == D->getNumParams());
2198      assert(!OuterCtor->isVariadic() && "should have been inlined");
2199  
2200      for (const auto *Param : OuterCtor->parameters()) {
2201        assert(getContext().hasSameUnqualifiedType(
2202            OuterCtor->getParamDecl(Param->getFunctionScopeIndex())->getType(),
2203            Param->getType()));
2204        EmitDelegateCallArg(Args, Param, E->getLocation());
2205  
2206        // Forward __attribute__(pass_object_size).
2207        if (Param->hasAttr<PassObjectSizeAttr>()) {
2208          auto *POSParam = SizeArguments[Param];
2209          assert(POSParam && "missing pass_object_size value for forwarding");
2210          EmitDelegateCallArg(Args, POSParam, E->getLocation());
2211        }
2212      }
2213    }
2214  
2215    EmitCXXConstructorCall(D, Ctor_Base, ForVirtualBase, /*Delegating*/false,
2216                           This, Args);
2217  }
2218  
EmitInlinedInheritingCXXConstructorCall(const CXXConstructorDecl * Ctor,CXXCtorType CtorType,bool ForVirtualBase,bool Delegating,CallArgList & Args)2219  void CodeGenFunction::EmitInlinedInheritingCXXConstructorCall(
2220      const CXXConstructorDecl *Ctor, CXXCtorType CtorType, bool ForVirtualBase,
2221      bool Delegating, CallArgList &Args) {
2222    InlinedInheritingConstructorScope Scope(*this, GlobalDecl(Ctor, CtorType));
2223  
2224    // Save the arguments to be passed to the inherited constructor.
2225    CXXInheritedCtorInitExprArgs = Args;
2226  
2227    FunctionArgList Params;
2228    QualType RetType = BuildFunctionArgList(CurGD, Params);
2229    FnRetTy = RetType;
2230  
2231    // Insert any ABI-specific implicit constructor arguments.
2232    CGM.getCXXABI().addImplicitConstructorArgs(*this, Ctor, CtorType,
2233                                               ForVirtualBase, Delegating, Args);
2234  
2235    // Emit a simplified prolog. We only need to emit the implicit params.
2236    assert(Args.size() >= Params.size() && "too few arguments for call");
2237    for (unsigned I = 0, N = Args.size(); I != N; ++I) {
2238      if (I < Params.size() && isa<ImplicitParamDecl>(Params[I])) {
2239        const RValue &RV = Args[I].RV;
2240        assert(!RV.isComplex() && "complex indirect params not supported");
2241        ParamValue Val = RV.isScalar()
2242                             ? ParamValue::forDirect(RV.getScalarVal())
2243                             : ParamValue::forIndirect(RV.getAggregateAddress());
2244        EmitParmDecl(*Params[I], Val, I + 1);
2245      }
2246    }
2247  
2248    // Create a return value slot if the ABI implementation wants one.
2249    // FIXME: This is dumb, we should ask the ABI not to try to set the return
2250    // value instead.
2251    if (!RetType->isVoidType())
2252      ReturnValue = CreateIRTemp(RetType, "retval.inhctor");
2253  
2254    CGM.getCXXABI().EmitInstanceFunctionProlog(*this);
2255    CXXThisValue = CXXABIThisValue;
2256  
2257    // Directly emit the constructor initializers.
2258    EmitCtorPrologue(Ctor, CtorType, Params);
2259  }
2260  
EmitVTableAssumptionLoad(const VPtr & Vptr,Address This)2261  void CodeGenFunction::EmitVTableAssumptionLoad(const VPtr &Vptr, Address This) {
2262    llvm::Value *VTableGlobal =
2263        CGM.getCXXABI().getVTableAddressPoint(Vptr.Base, Vptr.VTableClass);
2264    if (!VTableGlobal)
2265      return;
2266  
2267    // We can just use the base offset in the complete class.
2268    CharUnits NonVirtualOffset = Vptr.Base.getBaseOffset();
2269  
2270    if (!NonVirtualOffset.isZero())
2271      This =
2272          ApplyNonVirtualAndVirtualOffset(*this, This, NonVirtualOffset, nullptr,
2273                                          Vptr.VTableClass, Vptr.NearestVBase);
2274  
2275    llvm::Value *VPtrValue =
2276        GetVTablePtr(This, VTableGlobal->getType(), Vptr.VTableClass);
2277    llvm::Value *Cmp =
2278        Builder.CreateICmpEQ(VPtrValue, VTableGlobal, "cmp.vtables");
2279    Builder.CreateAssumption(Cmp);
2280  }
2281  
EmitVTableAssumptionLoads(const CXXRecordDecl * ClassDecl,Address This)2282  void CodeGenFunction::EmitVTableAssumptionLoads(const CXXRecordDecl *ClassDecl,
2283                                                  Address This) {
2284    if (CGM.getCXXABI().doStructorsInitializeVPtrs(ClassDecl))
2285      for (const VPtr &Vptr : getVTablePointers(ClassDecl))
2286        EmitVTableAssumptionLoad(Vptr, This);
2287  }
2288  
2289  void
EmitSynthesizedCXXCopyCtorCall(const CXXConstructorDecl * D,Address This,Address Src,const CXXConstructExpr * E)2290  CodeGenFunction::EmitSynthesizedCXXCopyCtorCall(const CXXConstructorDecl *D,
2291                                                  Address This, Address Src,
2292                                                  const CXXConstructExpr *E) {
2293    const FunctionProtoType *FPT = D->getType()->castAs<FunctionProtoType>();
2294  
2295    CallArgList Args;
2296  
2297    // Push the this ptr.
2298    Args.add(RValue::get(This.getPointer()), D->getThisType(getContext()));
2299  
2300    // Push the src ptr.
2301    QualType QT = *(FPT->param_type_begin());
2302    llvm::Type *t = CGM.getTypes().ConvertType(QT);
2303    Src = Builder.CreateBitCast(Src, t);
2304    Args.add(RValue::get(Src.getPointer()), QT);
2305  
2306    // Skip over first argument (Src).
2307    EmitCallArgs(Args, FPT, drop_begin(E->arguments(), 1), E->getConstructor(),
2308                 /*ParamsToSkip*/ 1);
2309  
2310    EmitCXXConstructorCall(D, Ctor_Complete, false, false, This, Args);
2311  }
2312  
2313  void
EmitDelegateCXXConstructorCall(const CXXConstructorDecl * Ctor,CXXCtorType CtorType,const FunctionArgList & Args,SourceLocation Loc)2314  CodeGenFunction::EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor,
2315                                                  CXXCtorType CtorType,
2316                                                  const FunctionArgList &Args,
2317                                                  SourceLocation Loc) {
2318    CallArgList DelegateArgs;
2319  
2320    FunctionArgList::const_iterator I = Args.begin(), E = Args.end();
2321    assert(I != E && "no parameters to constructor");
2322  
2323    // this
2324    Address This = LoadCXXThisAddress();
2325    DelegateArgs.add(RValue::get(This.getPointer()), (*I)->getType());
2326    ++I;
2327  
2328    // FIXME: The location of the VTT parameter in the parameter list is
2329    // specific to the Itanium ABI and shouldn't be hardcoded here.
2330    if (CGM.getCXXABI().NeedsVTTParameter(CurGD)) {
2331      assert(I != E && "cannot skip vtt parameter, already done with args");
2332      assert((*I)->getType()->isPointerType() &&
2333             "skipping parameter not of vtt type");
2334      ++I;
2335    }
2336  
2337    // Explicit arguments.
2338    for (; I != E; ++I) {
2339      const VarDecl *param = *I;
2340      // FIXME: per-argument source location
2341      EmitDelegateCallArg(DelegateArgs, param, Loc);
2342    }
2343  
2344    EmitCXXConstructorCall(Ctor, CtorType, /*ForVirtualBase=*/false,
2345                           /*Delegating=*/true, This, DelegateArgs);
2346  }
2347  
2348  namespace {
2349    struct CallDelegatingCtorDtor final : EHScopeStack::Cleanup {
2350      const CXXDestructorDecl *Dtor;
2351      Address Addr;
2352      CXXDtorType Type;
2353  
CallDelegatingCtorDtor__anon6bf721160511::CallDelegatingCtorDtor2354      CallDelegatingCtorDtor(const CXXDestructorDecl *D, Address Addr,
2355                             CXXDtorType Type)
2356        : Dtor(D), Addr(Addr), Type(Type) {}
2357  
Emit__anon6bf721160511::CallDelegatingCtorDtor2358      void Emit(CodeGenFunction &CGF, Flags flags) override {
2359        CGF.EmitCXXDestructorCall(Dtor, Type, /*ForVirtualBase=*/false,
2360                                  /*Delegating=*/true, Addr);
2361      }
2362    };
2363  } // end anonymous namespace
2364  
2365  void
EmitDelegatingCXXConstructorCall(const CXXConstructorDecl * Ctor,const FunctionArgList & Args)2366  CodeGenFunction::EmitDelegatingCXXConstructorCall(const CXXConstructorDecl *Ctor,
2367                                                    const FunctionArgList &Args) {
2368    assert(Ctor->isDelegatingConstructor());
2369  
2370    Address ThisPtr = LoadCXXThisAddress();
2371  
2372    AggValueSlot AggSlot =
2373      AggValueSlot::forAddr(ThisPtr, Qualifiers(),
2374                            AggValueSlot::IsDestructed,
2375                            AggValueSlot::DoesNotNeedGCBarriers,
2376                            AggValueSlot::IsNotAliased);
2377  
2378    EmitAggExpr(Ctor->init_begin()[0]->getInit(), AggSlot);
2379  
2380    const CXXRecordDecl *ClassDecl = Ctor->getParent();
2381    if (CGM.getLangOpts().Exceptions && !ClassDecl->hasTrivialDestructor()) {
2382      CXXDtorType Type =
2383        CurGD.getCtorType() == Ctor_Complete ? Dtor_Complete : Dtor_Base;
2384  
2385      EHStack.pushCleanup<CallDelegatingCtorDtor>(EHCleanup,
2386                                                  ClassDecl->getDestructor(),
2387                                                  ThisPtr, Type);
2388    }
2389  }
2390  
EmitCXXDestructorCall(const CXXDestructorDecl * DD,CXXDtorType Type,bool ForVirtualBase,bool Delegating,Address This)2391  void CodeGenFunction::EmitCXXDestructorCall(const CXXDestructorDecl *DD,
2392                                              CXXDtorType Type,
2393                                              bool ForVirtualBase,
2394                                              bool Delegating,
2395                                              Address This) {
2396    CGM.getCXXABI().EmitDestructorCall(*this, DD, Type, ForVirtualBase,
2397                                       Delegating, This);
2398  }
2399  
2400  namespace {
2401    struct CallLocalDtor final : EHScopeStack::Cleanup {
2402      const CXXDestructorDecl *Dtor;
2403      Address Addr;
2404  
CallLocalDtor__anon6bf721160611::CallLocalDtor2405      CallLocalDtor(const CXXDestructorDecl *D, Address Addr)
2406        : Dtor(D), Addr(Addr) {}
2407  
Emit__anon6bf721160611::CallLocalDtor2408      void Emit(CodeGenFunction &CGF, Flags flags) override {
2409        CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete,
2410                                  /*ForVirtualBase=*/false,
2411                                  /*Delegating=*/false, Addr);
2412      }
2413    };
2414  } // end anonymous namespace
2415  
PushDestructorCleanup(const CXXDestructorDecl * D,Address Addr)2416  void CodeGenFunction::PushDestructorCleanup(const CXXDestructorDecl *D,
2417                                              Address Addr) {
2418    EHStack.pushCleanup<CallLocalDtor>(NormalAndEHCleanup, D, Addr);
2419  }
2420  
PushDestructorCleanup(QualType T,Address Addr)2421  void CodeGenFunction::PushDestructorCleanup(QualType T, Address Addr) {
2422    CXXRecordDecl *ClassDecl = T->getAsCXXRecordDecl();
2423    if (!ClassDecl) return;
2424    if (ClassDecl->hasTrivialDestructor()) return;
2425  
2426    const CXXDestructorDecl *D = ClassDecl->getDestructor();
2427    assert(D && D->isUsed() && "destructor not marked as used!");
2428    PushDestructorCleanup(D, Addr);
2429  }
2430  
InitializeVTablePointer(const VPtr & Vptr)2431  void CodeGenFunction::InitializeVTablePointer(const VPtr &Vptr) {
2432    // Compute the address point.
2433    llvm::Value *VTableAddressPoint =
2434        CGM.getCXXABI().getVTableAddressPointInStructor(
2435            *this, Vptr.VTableClass, Vptr.Base, Vptr.NearestVBase);
2436  
2437    if (!VTableAddressPoint)
2438      return;
2439  
2440    // Compute where to store the address point.
2441    llvm::Value *VirtualOffset = nullptr;
2442    CharUnits NonVirtualOffset = CharUnits::Zero();
2443  
2444    if (CGM.getCXXABI().isVirtualOffsetNeededForVTableField(*this, Vptr)) {
2445      // We need to use the virtual base offset offset because the virtual base
2446      // might have a different offset in the most derived class.
2447  
2448      VirtualOffset = CGM.getCXXABI().GetVirtualBaseClassOffset(
2449          *this, LoadCXXThisAddress(), Vptr.VTableClass, Vptr.NearestVBase);
2450      NonVirtualOffset = Vptr.OffsetFromNearestVBase;
2451    } else {
2452      // We can just use the base offset in the complete class.
2453      NonVirtualOffset = Vptr.Base.getBaseOffset();
2454    }
2455  
2456    // Apply the offsets.
2457    Address VTableField = LoadCXXThisAddress();
2458  
2459    if (!NonVirtualOffset.isZero() || VirtualOffset)
2460      VTableField = ApplyNonVirtualAndVirtualOffset(
2461          *this, VTableField, NonVirtualOffset, VirtualOffset, Vptr.VTableClass,
2462          Vptr.NearestVBase);
2463  
2464    // Finally, store the address point. Use the same LLVM types as the field to
2465    // support optimization.
2466    llvm::Type *VTablePtrTy =
2467        llvm::FunctionType::get(CGM.Int32Ty, /*isVarArg=*/true)
2468            ->getPointerTo()
2469            ->getPointerTo();
2470    VTableField = Builder.CreateBitCast(VTableField, VTablePtrTy->getPointerTo());
2471    VTableAddressPoint = Builder.CreateBitCast(VTableAddressPoint, VTablePtrTy);
2472  
2473    llvm::StoreInst *Store = Builder.CreateStore(VTableAddressPoint, VTableField);
2474    CGM.DecorateInstructionWithTBAA(Store, CGM.getTBAAInfoForVTablePtr());
2475    if (CGM.getCodeGenOpts().OptimizationLevel > 0 &&
2476        CGM.getCodeGenOpts().StrictVTablePointers)
2477      CGM.DecorateInstructionWithInvariantGroup(Store, Vptr.VTableClass);
2478  }
2479  
2480  CodeGenFunction::VPtrsVector
getVTablePointers(const CXXRecordDecl * VTableClass)2481  CodeGenFunction::getVTablePointers(const CXXRecordDecl *VTableClass) {
2482    CodeGenFunction::VPtrsVector VPtrsResult;
2483    VisitedVirtualBasesSetTy VBases;
2484    getVTablePointers(BaseSubobject(VTableClass, CharUnits::Zero()),
2485                      /*NearestVBase=*/nullptr,
2486                      /*OffsetFromNearestVBase=*/CharUnits::Zero(),
2487                      /*BaseIsNonVirtualPrimaryBase=*/false, VTableClass, VBases,
2488                      VPtrsResult);
2489    return VPtrsResult;
2490  }
2491  
getVTablePointers(BaseSubobject Base,const CXXRecordDecl * NearestVBase,CharUnits OffsetFromNearestVBase,bool BaseIsNonVirtualPrimaryBase,const CXXRecordDecl * VTableClass,VisitedVirtualBasesSetTy & VBases,VPtrsVector & Vptrs)2492  void CodeGenFunction::getVTablePointers(BaseSubobject Base,
2493                                          const CXXRecordDecl *NearestVBase,
2494                                          CharUnits OffsetFromNearestVBase,
2495                                          bool BaseIsNonVirtualPrimaryBase,
2496                                          const CXXRecordDecl *VTableClass,
2497                                          VisitedVirtualBasesSetTy &VBases,
2498                                          VPtrsVector &Vptrs) {
2499    // If this base is a non-virtual primary base the address point has already
2500    // been set.
2501    if (!BaseIsNonVirtualPrimaryBase) {
2502      // Initialize the vtable pointer for this base.
2503      VPtr Vptr = {Base, NearestVBase, OffsetFromNearestVBase, VTableClass};
2504      Vptrs.push_back(Vptr);
2505    }
2506  
2507    const CXXRecordDecl *RD = Base.getBase();
2508  
2509    // Traverse bases.
2510    for (const auto &I : RD->bases()) {
2511      CXXRecordDecl *BaseDecl
2512        = cast<CXXRecordDecl>(I.getType()->getAs<RecordType>()->getDecl());
2513  
2514      // Ignore classes without a vtable.
2515      if (!BaseDecl->isDynamicClass())
2516        continue;
2517  
2518      CharUnits BaseOffset;
2519      CharUnits BaseOffsetFromNearestVBase;
2520      bool BaseDeclIsNonVirtualPrimaryBase;
2521  
2522      if (I.isVirtual()) {
2523        // Check if we've visited this virtual base before.
2524        if (!VBases.insert(BaseDecl).second)
2525          continue;
2526  
2527        const ASTRecordLayout &Layout =
2528          getContext().getASTRecordLayout(VTableClass);
2529  
2530        BaseOffset = Layout.getVBaseClassOffset(BaseDecl);
2531        BaseOffsetFromNearestVBase = CharUnits::Zero();
2532        BaseDeclIsNonVirtualPrimaryBase = false;
2533      } else {
2534        const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD);
2535  
2536        BaseOffset = Base.getBaseOffset() + Layout.getBaseClassOffset(BaseDecl);
2537        BaseOffsetFromNearestVBase =
2538          OffsetFromNearestVBase + Layout.getBaseClassOffset(BaseDecl);
2539        BaseDeclIsNonVirtualPrimaryBase = Layout.getPrimaryBase() == BaseDecl;
2540      }
2541  
2542      getVTablePointers(
2543          BaseSubobject(BaseDecl, BaseOffset),
2544          I.isVirtual() ? BaseDecl : NearestVBase, BaseOffsetFromNearestVBase,
2545          BaseDeclIsNonVirtualPrimaryBase, VTableClass, VBases, Vptrs);
2546    }
2547  }
2548  
InitializeVTablePointers(const CXXRecordDecl * RD)2549  void CodeGenFunction::InitializeVTablePointers(const CXXRecordDecl *RD) {
2550    // Ignore classes without a vtable.
2551    if (!RD->isDynamicClass())
2552      return;
2553  
2554    // Initialize the vtable pointers for this class and all of its bases.
2555    if (CGM.getCXXABI().doStructorsInitializeVPtrs(RD))
2556      for (const VPtr &Vptr : getVTablePointers(RD))
2557        InitializeVTablePointer(Vptr);
2558  
2559    if (RD->getNumVBases())
2560      CGM.getCXXABI().initializeHiddenVirtualInheritanceMembers(*this, RD);
2561  }
2562  
GetVTablePtr(Address This,llvm::Type * VTableTy,const CXXRecordDecl * RD)2563  llvm::Value *CodeGenFunction::GetVTablePtr(Address This,
2564                                             llvm::Type *VTableTy,
2565                                             const CXXRecordDecl *RD) {
2566    Address VTablePtrSrc = Builder.CreateElementBitCast(This, VTableTy);
2567    llvm::Instruction *VTable = Builder.CreateLoad(VTablePtrSrc, "vtable");
2568    CGM.DecorateInstructionWithTBAA(VTable, CGM.getTBAAInfoForVTablePtr());
2569  
2570    if (CGM.getCodeGenOpts().OptimizationLevel > 0 &&
2571        CGM.getCodeGenOpts().StrictVTablePointers)
2572      CGM.DecorateInstructionWithInvariantGroup(VTable, RD);
2573  
2574    return VTable;
2575  }
2576  
2577  // If a class has a single non-virtual base and does not introduce or override
2578  // virtual member functions or fields, it will have the same layout as its base.
2579  // This function returns the least derived such class.
2580  //
2581  // Casting an instance of a base class to such a derived class is technically
2582  // undefined behavior, but it is a relatively common hack for introducing member
2583  // functions on class instances with specific properties (e.g. llvm::Operator)
2584  // that works under most compilers and should not have security implications, so
2585  // we allow it by default. It can be disabled with -fsanitize=cfi-cast-strict.
2586  static const CXXRecordDecl *
LeastDerivedClassWithSameLayout(const CXXRecordDecl * RD)2587  LeastDerivedClassWithSameLayout(const CXXRecordDecl *RD) {
2588    if (!RD->field_empty())
2589      return RD;
2590  
2591    if (RD->getNumVBases() != 0)
2592      return RD;
2593  
2594    if (RD->getNumBases() != 1)
2595      return RD;
2596  
2597    for (const CXXMethodDecl *MD : RD->methods()) {
2598      if (MD->isVirtual()) {
2599        // Virtual member functions are only ok if they are implicit destructors
2600        // because the implicit destructor will have the same semantics as the
2601        // base class's destructor if no fields are added.
2602        if (isa<CXXDestructorDecl>(MD) && MD->isImplicit())
2603          continue;
2604        return RD;
2605      }
2606    }
2607  
2608    return LeastDerivedClassWithSameLayout(
2609        RD->bases_begin()->getType()->getAsCXXRecordDecl());
2610  }
2611  
EmitTypeMetadataCodeForVCall(const CXXRecordDecl * RD,llvm::Value * VTable,SourceLocation Loc)2612  void CodeGenFunction::EmitTypeMetadataCodeForVCall(const CXXRecordDecl *RD,
2613                                                     llvm::Value *VTable,
2614                                                     SourceLocation Loc) {
2615    if (CGM.getCodeGenOpts().WholeProgramVTables &&
2616        CGM.HasHiddenLTOVisibility(RD)) {
2617      llvm::Metadata *MD =
2618          CGM.CreateMetadataIdentifierForType(QualType(RD->getTypeForDecl(), 0));
2619      llvm::Value *TypeId =
2620          llvm::MetadataAsValue::get(CGM.getLLVMContext(), MD);
2621  
2622      llvm::Value *CastedVTable = Builder.CreateBitCast(VTable, Int8PtrTy);
2623      llvm::Value *TypeTest =
2624          Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::type_test),
2625                             {CastedVTable, TypeId});
2626      Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::assume), TypeTest);
2627    }
2628  
2629    if (SanOpts.has(SanitizerKind::CFIVCall))
2630      EmitVTablePtrCheckForCall(RD, VTable, CodeGenFunction::CFITCK_VCall, Loc);
2631  }
2632  
EmitVTablePtrCheckForCall(const CXXRecordDecl * RD,llvm::Value * VTable,CFITypeCheckKind TCK,SourceLocation Loc)2633  void CodeGenFunction::EmitVTablePtrCheckForCall(const CXXRecordDecl *RD,
2634                                                  llvm::Value *VTable,
2635                                                  CFITypeCheckKind TCK,
2636                                                  SourceLocation Loc) {
2637    if (!SanOpts.has(SanitizerKind::CFICastStrict))
2638      RD = LeastDerivedClassWithSameLayout(RD);
2639  
2640    EmitVTablePtrCheck(RD, VTable, TCK, Loc);
2641  }
2642  
EmitVTablePtrCheckForCast(QualType T,llvm::Value * Derived,bool MayBeNull,CFITypeCheckKind TCK,SourceLocation Loc)2643  void CodeGenFunction::EmitVTablePtrCheckForCast(QualType T,
2644                                                  llvm::Value *Derived,
2645                                                  bool MayBeNull,
2646                                                  CFITypeCheckKind TCK,
2647                                                  SourceLocation Loc) {
2648    if (!getLangOpts().CPlusPlus)
2649      return;
2650  
2651    auto *ClassTy = T->getAs<RecordType>();
2652    if (!ClassTy)
2653      return;
2654  
2655    const CXXRecordDecl *ClassDecl = cast<CXXRecordDecl>(ClassTy->getDecl());
2656  
2657    if (!ClassDecl->isCompleteDefinition() || !ClassDecl->isDynamicClass())
2658      return;
2659  
2660    if (!SanOpts.has(SanitizerKind::CFICastStrict))
2661      ClassDecl = LeastDerivedClassWithSameLayout(ClassDecl);
2662  
2663    llvm::BasicBlock *ContBlock = nullptr;
2664  
2665    if (MayBeNull) {
2666      llvm::Value *DerivedNotNull =
2667          Builder.CreateIsNotNull(Derived, "cast.nonnull");
2668  
2669      llvm::BasicBlock *CheckBlock = createBasicBlock("cast.check");
2670      ContBlock = createBasicBlock("cast.cont");
2671  
2672      Builder.CreateCondBr(DerivedNotNull, CheckBlock, ContBlock);
2673  
2674      EmitBlock(CheckBlock);
2675    }
2676  
2677    llvm::Value *VTable =
2678      GetVTablePtr(Address(Derived, getPointerAlign()), Int8PtrTy, ClassDecl);
2679  
2680    EmitVTablePtrCheck(ClassDecl, VTable, TCK, Loc);
2681  
2682    if (MayBeNull) {
2683      Builder.CreateBr(ContBlock);
2684      EmitBlock(ContBlock);
2685    }
2686  }
2687  
EmitVTablePtrCheck(const CXXRecordDecl * RD,llvm::Value * VTable,CFITypeCheckKind TCK,SourceLocation Loc)2688  void CodeGenFunction::EmitVTablePtrCheck(const CXXRecordDecl *RD,
2689                                           llvm::Value *VTable,
2690                                           CFITypeCheckKind TCK,
2691                                           SourceLocation Loc) {
2692    if (!CGM.getCodeGenOpts().SanitizeCfiCrossDso &&
2693        !CGM.HasHiddenLTOVisibility(RD))
2694      return;
2695  
2696    std::string TypeName = RD->getQualifiedNameAsString();
2697    if (getContext().getSanitizerBlacklist().isBlacklistedType(TypeName))
2698      return;
2699  
2700    SanitizerScope SanScope(this);
2701    llvm::SanitizerStatKind SSK;
2702    switch (TCK) {
2703    case CFITCK_VCall:
2704      SSK = llvm::SanStat_CFI_VCall;
2705      break;
2706    case CFITCK_NVCall:
2707      SSK = llvm::SanStat_CFI_NVCall;
2708      break;
2709    case CFITCK_DerivedCast:
2710      SSK = llvm::SanStat_CFI_DerivedCast;
2711      break;
2712    case CFITCK_UnrelatedCast:
2713      SSK = llvm::SanStat_CFI_UnrelatedCast;
2714      break;
2715    case CFITCK_ICall:
2716      llvm_unreachable("not expecting CFITCK_ICall");
2717    }
2718    EmitSanitizerStatReport(SSK);
2719  
2720    llvm::Metadata *MD =
2721        CGM.CreateMetadataIdentifierForType(QualType(RD->getTypeForDecl(), 0));
2722    llvm::Value *TypeId = llvm::MetadataAsValue::get(getLLVMContext(), MD);
2723  
2724    llvm::Value *CastedVTable = Builder.CreateBitCast(VTable, Int8PtrTy);
2725    llvm::Value *TypeTest = Builder.CreateCall(
2726        CGM.getIntrinsic(llvm::Intrinsic::type_test), {CastedVTable, TypeId});
2727  
2728    SanitizerMask M;
2729    switch (TCK) {
2730    case CFITCK_VCall:
2731      M = SanitizerKind::CFIVCall;
2732      break;
2733    case CFITCK_NVCall:
2734      M = SanitizerKind::CFINVCall;
2735      break;
2736    case CFITCK_DerivedCast:
2737      M = SanitizerKind::CFIDerivedCast;
2738      break;
2739    case CFITCK_UnrelatedCast:
2740      M = SanitizerKind::CFIUnrelatedCast;
2741      break;
2742    case CFITCK_ICall:
2743      llvm_unreachable("not expecting CFITCK_ICall");
2744    }
2745  
2746    llvm::Constant *StaticData[] = {
2747        llvm::ConstantInt::get(Int8Ty, TCK),
2748        EmitCheckSourceLocation(Loc),
2749        EmitCheckTypeDescriptor(QualType(RD->getTypeForDecl(), 0)),
2750    };
2751  
2752    auto CrossDsoTypeId = CGM.CreateCrossDsoCfiTypeId(MD);
2753    if (CGM.getCodeGenOpts().SanitizeCfiCrossDso && CrossDsoTypeId) {
2754      EmitCfiSlowPathCheck(M, TypeTest, CrossDsoTypeId, CastedVTable, StaticData);
2755      return;
2756    }
2757  
2758    if (CGM.getCodeGenOpts().SanitizeTrap.has(M)) {
2759      EmitTrapCheck(TypeTest);
2760      return;
2761    }
2762  
2763    llvm::Value *AllVtables = llvm::MetadataAsValue::get(
2764        CGM.getLLVMContext(),
2765        llvm::MDString::get(CGM.getLLVMContext(), "all-vtables"));
2766    llvm::Value *ValidVtable = Builder.CreateCall(
2767        CGM.getIntrinsic(llvm::Intrinsic::type_test), {CastedVTable, AllVtables});
2768    EmitCheck(std::make_pair(TypeTest, M), "cfi_check_fail", StaticData,
2769              {CastedVTable, ValidVtable});
2770  }
2771  
ShouldEmitVTableTypeCheckedLoad(const CXXRecordDecl * RD)2772  bool CodeGenFunction::ShouldEmitVTableTypeCheckedLoad(const CXXRecordDecl *RD) {
2773    if (!CGM.getCodeGenOpts().WholeProgramVTables ||
2774        !SanOpts.has(SanitizerKind::CFIVCall) ||
2775        !CGM.getCodeGenOpts().SanitizeTrap.has(SanitizerKind::CFIVCall) ||
2776        !CGM.HasHiddenLTOVisibility(RD))
2777      return false;
2778  
2779    std::string TypeName = RD->getQualifiedNameAsString();
2780    return !getContext().getSanitizerBlacklist().isBlacklistedType(TypeName);
2781  }
2782  
EmitVTableTypeCheckedLoad(const CXXRecordDecl * RD,llvm::Value * VTable,uint64_t VTableByteOffset)2783  llvm::Value *CodeGenFunction::EmitVTableTypeCheckedLoad(
2784      const CXXRecordDecl *RD, llvm::Value *VTable, uint64_t VTableByteOffset) {
2785    SanitizerScope SanScope(this);
2786  
2787    EmitSanitizerStatReport(llvm::SanStat_CFI_VCall);
2788  
2789    llvm::Metadata *MD =
2790        CGM.CreateMetadataIdentifierForType(QualType(RD->getTypeForDecl(), 0));
2791    llvm::Value *TypeId = llvm::MetadataAsValue::get(CGM.getLLVMContext(), MD);
2792  
2793    llvm::Value *CastedVTable = Builder.CreateBitCast(VTable, Int8PtrTy);
2794    llvm::Value *CheckedLoad = Builder.CreateCall(
2795        CGM.getIntrinsic(llvm::Intrinsic::type_checked_load),
2796        {CastedVTable, llvm::ConstantInt::get(Int32Ty, VTableByteOffset),
2797         TypeId});
2798    llvm::Value *CheckResult = Builder.CreateExtractValue(CheckedLoad, 1);
2799  
2800    EmitCheck(std::make_pair(CheckResult, SanitizerKind::CFIVCall),
2801              "cfi_check_fail", nullptr, nullptr);
2802  
2803    return Builder.CreateBitCast(
2804        Builder.CreateExtractValue(CheckedLoad, 0),
2805        cast<llvm::PointerType>(VTable->getType())->getElementType());
2806  }
2807  
2808  // FIXME: Ideally Expr::IgnoreParenNoopCasts should do this, but it doesn't do
2809  // quite what we want.
skipNoOpCastsAndParens(const Expr * E)2810  static const Expr *skipNoOpCastsAndParens(const Expr *E) {
2811    while (true) {
2812      if (const ParenExpr *PE = dyn_cast<ParenExpr>(E)) {
2813        E = PE->getSubExpr();
2814        continue;
2815      }
2816  
2817      if (const CastExpr *CE = dyn_cast<CastExpr>(E)) {
2818        if (CE->getCastKind() == CK_NoOp) {
2819          E = CE->getSubExpr();
2820          continue;
2821        }
2822      }
2823      if (const UnaryOperator *UO = dyn_cast<UnaryOperator>(E)) {
2824        if (UO->getOpcode() == UO_Extension) {
2825          E = UO->getSubExpr();
2826          continue;
2827        }
2828      }
2829      return E;
2830    }
2831  }
2832  
2833  bool
CanDevirtualizeMemberFunctionCall(const Expr * Base,const CXXMethodDecl * MD)2834  CodeGenFunction::CanDevirtualizeMemberFunctionCall(const Expr *Base,
2835                                                     const CXXMethodDecl *MD) {
2836    // When building with -fapple-kext, all calls must go through the vtable since
2837    // the kernel linker can do runtime patching of vtables.
2838    if (getLangOpts().AppleKext)
2839      return false;
2840  
2841    // If the most derived class is marked final, we know that no subclass can
2842    // override this member function and so we can devirtualize it. For example:
2843    //
2844    // struct A { virtual void f(); }
2845    // struct B final : A { };
2846    //
2847    // void f(B *b) {
2848    //   b->f();
2849    // }
2850    //
2851    const CXXRecordDecl *MostDerivedClassDecl = Base->getBestDynamicClassType();
2852    if (MostDerivedClassDecl->hasAttr<FinalAttr>())
2853      return true;
2854  
2855    // If the member function is marked 'final', we know that it can't be
2856    // overridden and can therefore devirtualize it.
2857    if (MD->hasAttr<FinalAttr>())
2858      return true;
2859  
2860    // Similarly, if the class itself is marked 'final' it can't be overridden
2861    // and we can therefore devirtualize the member function call.
2862    if (MD->getParent()->hasAttr<FinalAttr>())
2863      return true;
2864  
2865    Base = skipNoOpCastsAndParens(Base);
2866    if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Base)) {
2867      if (const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl())) {
2868        // This is a record decl. We know the type and can devirtualize it.
2869        return VD->getType()->isRecordType();
2870      }
2871  
2872      return false;
2873    }
2874  
2875    // We can devirtualize calls on an object accessed by a class member access
2876    // expression, since by C++11 [basic.life]p6 we know that it can't refer to
2877    // a derived class object constructed in the same location.
2878    if (const MemberExpr *ME = dyn_cast<MemberExpr>(Base))
2879      if (const ValueDecl *VD = dyn_cast<ValueDecl>(ME->getMemberDecl()))
2880        return VD->getType()->isRecordType();
2881  
2882    // We can always devirtualize calls on temporary object expressions.
2883    if (isa<CXXConstructExpr>(Base))
2884      return true;
2885  
2886    // And calls on bound temporaries.
2887    if (isa<CXXBindTemporaryExpr>(Base))
2888      return true;
2889  
2890    // Check if this is a call expr that returns a record type.
2891    if (const CallExpr *CE = dyn_cast<CallExpr>(Base))
2892      return CE->getCallReturnType(getContext())->isRecordType();
2893  
2894    // We can't devirtualize the call.
2895    return false;
2896  }
2897  
EmitForwardingCallToLambda(const CXXMethodDecl * callOperator,CallArgList & callArgs)2898  void CodeGenFunction::EmitForwardingCallToLambda(
2899                                        const CXXMethodDecl *callOperator,
2900                                        CallArgList &callArgs) {
2901    // Get the address of the call operator.
2902    const CGFunctionInfo &calleeFnInfo =
2903      CGM.getTypes().arrangeCXXMethodDeclaration(callOperator);
2904    llvm::Value *callee =
2905      CGM.GetAddrOfFunction(GlobalDecl(callOperator),
2906                            CGM.getTypes().GetFunctionType(calleeFnInfo));
2907  
2908    // Prepare the return slot.
2909    const FunctionProtoType *FPT =
2910      callOperator->getType()->castAs<FunctionProtoType>();
2911    QualType resultType = FPT->getReturnType();
2912    ReturnValueSlot returnSlot;
2913    if (!resultType->isVoidType() &&
2914        calleeFnInfo.getReturnInfo().getKind() == ABIArgInfo::Indirect &&
2915        !hasScalarEvaluationKind(calleeFnInfo.getReturnType()))
2916      returnSlot = ReturnValueSlot(ReturnValue, resultType.isVolatileQualified());
2917  
2918    // We don't need to separately arrange the call arguments because
2919    // the call can't be variadic anyway --- it's impossible to forward
2920    // variadic arguments.
2921  
2922    // Now emit our call.
2923    RValue RV = EmitCall(calleeFnInfo, callee, returnSlot,
2924                         callArgs, callOperator);
2925  
2926    // If necessary, copy the returned value into the slot.
2927    if (!resultType->isVoidType() && returnSlot.isNull())
2928      EmitReturnOfRValue(RV, resultType);
2929    else
2930      EmitBranchThroughCleanup(ReturnBlock);
2931  }
2932  
EmitLambdaBlockInvokeBody()2933  void CodeGenFunction::EmitLambdaBlockInvokeBody() {
2934    const BlockDecl *BD = BlockInfo->getBlockDecl();
2935    const VarDecl *variable = BD->capture_begin()->getVariable();
2936    const CXXRecordDecl *Lambda = variable->getType()->getAsCXXRecordDecl();
2937  
2938    // Start building arguments for forwarding call
2939    CallArgList CallArgs;
2940  
2941    QualType ThisType = getContext().getPointerType(getContext().getRecordType(Lambda));
2942    Address ThisPtr = GetAddrOfBlockDecl(variable, false);
2943    CallArgs.add(RValue::get(ThisPtr.getPointer()), ThisType);
2944  
2945    // Add the rest of the parameters.
2946    for (auto param : BD->parameters())
2947      EmitDelegateCallArg(CallArgs, param, param->getLocStart());
2948  
2949    assert(!Lambda->isGenericLambda() &&
2950              "generic lambda interconversion to block not implemented");
2951    EmitForwardingCallToLambda(Lambda->getLambdaCallOperator(), CallArgs);
2952  }
2953  
EmitLambdaToBlockPointerBody(FunctionArgList & Args)2954  void CodeGenFunction::EmitLambdaToBlockPointerBody(FunctionArgList &Args) {
2955    if (cast<CXXMethodDecl>(CurCodeDecl)->isVariadic()) {
2956      // FIXME: Making this work correctly is nasty because it requires either
2957      // cloning the body of the call operator or making the call operator forward.
2958      CGM.ErrorUnsupported(CurCodeDecl, "lambda conversion to variadic function");
2959      return;
2960    }
2961  
2962    EmitFunctionBody(Args, cast<FunctionDecl>(CurGD.getDecl())->getBody());
2963  }
2964  
EmitLambdaDelegatingInvokeBody(const CXXMethodDecl * MD)2965  void CodeGenFunction::EmitLambdaDelegatingInvokeBody(const CXXMethodDecl *MD) {
2966    const CXXRecordDecl *Lambda = MD->getParent();
2967  
2968    // Start building arguments for forwarding call
2969    CallArgList CallArgs;
2970  
2971    QualType ThisType = getContext().getPointerType(getContext().getRecordType(Lambda));
2972    llvm::Value *ThisPtr = llvm::UndefValue::get(getTypes().ConvertType(ThisType));
2973    CallArgs.add(RValue::get(ThisPtr), ThisType);
2974  
2975    // Add the rest of the parameters.
2976    for (auto Param : MD->parameters())
2977      EmitDelegateCallArg(CallArgs, Param, Param->getLocStart());
2978  
2979    const CXXMethodDecl *CallOp = Lambda->getLambdaCallOperator();
2980    // For a generic lambda, find the corresponding call operator specialization
2981    // to which the call to the static-invoker shall be forwarded.
2982    if (Lambda->isGenericLambda()) {
2983      assert(MD->isFunctionTemplateSpecialization());
2984      const TemplateArgumentList *TAL = MD->getTemplateSpecializationArgs();
2985      FunctionTemplateDecl *CallOpTemplate = CallOp->getDescribedFunctionTemplate();
2986      void *InsertPos = nullptr;
2987      FunctionDecl *CorrespondingCallOpSpecialization =
2988          CallOpTemplate->findSpecialization(TAL->asArray(), InsertPos);
2989      assert(CorrespondingCallOpSpecialization);
2990      CallOp = cast<CXXMethodDecl>(CorrespondingCallOpSpecialization);
2991    }
2992    EmitForwardingCallToLambda(CallOp, CallArgs);
2993  }
2994  
EmitLambdaStaticInvokeFunction(const CXXMethodDecl * MD)2995  void CodeGenFunction::EmitLambdaStaticInvokeFunction(const CXXMethodDecl *MD) {
2996    if (MD->isVariadic()) {
2997      // FIXME: Making this work correctly is nasty because it requires either
2998      // cloning the body of the call operator or making the call operator forward.
2999      CGM.ErrorUnsupported(MD, "lambda conversion to variadic function");
3000      return;
3001    }
3002  
3003    EmitLambdaDelegatingInvokeBody(MD);
3004  }
3005