CGClass.cpp 66 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830
  1. //===--- CGClass.cpp - Emit LLVM Code for C++ classes ---------------------===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This contains code dealing with C++ code generation of classes
  11. //
  12. //===----------------------------------------------------------------------===//
  13. #include "CGBlocks.h"
  14. #include "CGDebugInfo.h"
  15. #include "CodeGenFunction.h"
  16. #include "clang/AST/CXXInheritance.h"
  17. #include "clang/AST/EvaluatedExprVisitor.h"
  18. #include "clang/AST/RecordLayout.h"
  19. #include "clang/AST/StmtCXX.h"
  20. #include "clang/Frontend/CodeGenOptions.h"
  21. using namespace clang;
  22. using namespace CodeGen;
  23. static CharUnits
  24. ComputeNonVirtualBaseClassOffset(ASTContext &Context,
  25. const CXXRecordDecl *DerivedClass,
  26. CastExpr::path_const_iterator Start,
  27. CastExpr::path_const_iterator End) {
  28. CharUnits Offset = CharUnits::Zero();
  29. const CXXRecordDecl *RD = DerivedClass;
  30. for (CastExpr::path_const_iterator I = Start; I != End; ++I) {
  31. const CXXBaseSpecifier *Base = *I;
  32. assert(!Base->isVirtual() && "Should not see virtual bases here!");
  33. // Get the layout.
  34. const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
  35. const CXXRecordDecl *BaseDecl =
  36. cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl());
  37. // Add the offset.
  38. Offset += Layout.getBaseClassOffset(BaseDecl);
  39. RD = BaseDecl;
  40. }
  41. return Offset;
  42. }
  43. llvm::Constant *
  44. CodeGenModule::GetNonVirtualBaseClassOffset(const CXXRecordDecl *ClassDecl,
  45. CastExpr::path_const_iterator PathBegin,
  46. CastExpr::path_const_iterator PathEnd) {
  47. assert(PathBegin != PathEnd && "Base path should not be empty!");
  48. CharUnits Offset =
  49. ComputeNonVirtualBaseClassOffset(getContext(), ClassDecl,
  50. PathBegin, PathEnd);
  51. if (Offset.isZero())
  52. return 0;
  53. llvm::Type *PtrDiffTy =
  54. Types.ConvertType(getContext().getPointerDiffType());
  55. return llvm::ConstantInt::get(PtrDiffTy, Offset.getQuantity());
  56. }
  57. /// Gets the address of a direct base class within a complete object.
  58. /// This should only be used for (1) non-virtual bases or (2) virtual bases
  59. /// when the type is known to be complete (e.g. in complete destructors).
  60. ///
  61. /// The object pointed to by 'This' is assumed to be non-null.
  62. llvm::Value *
  63. CodeGenFunction::GetAddressOfDirectBaseInCompleteClass(llvm::Value *This,
  64. const CXXRecordDecl *Derived,
  65. const CXXRecordDecl *Base,
  66. bool BaseIsVirtual) {
  67. // 'this' must be a pointer (in some address space) to Derived.
  68. assert(This->getType()->isPointerTy() &&
  69. cast<llvm::PointerType>(This->getType())->getElementType()
  70. == ConvertType(Derived));
  71. // Compute the offset of the virtual base.
  72. CharUnits Offset;
  73. const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Derived);
  74. if (BaseIsVirtual)
  75. Offset = Layout.getVBaseClassOffset(Base);
  76. else
  77. Offset = Layout.getBaseClassOffset(Base);
  78. // Shift and cast down to the base type.
  79. // TODO: for complete types, this should be possible with a GEP.
  80. llvm::Value *V = This;
  81. if (Offset.isPositive()) {
  82. V = Builder.CreateBitCast(V, Int8PtrTy);
  83. V = Builder.CreateConstInBoundsGEP1_64(V, Offset.getQuantity());
  84. }
  85. V = Builder.CreateBitCast(V, ConvertType(Base)->getPointerTo());
  86. return V;
  87. }
  88. static llvm::Value *
  89. ApplyNonVirtualAndVirtualOffset(CodeGenFunction &CGF, llvm::Value *ThisPtr,
  90. CharUnits NonVirtual, llvm::Value *Virtual) {
  91. llvm::Type *PtrDiffTy =
  92. CGF.ConvertType(CGF.getContext().getPointerDiffType());
  93. llvm::Value *NonVirtualOffset = 0;
  94. if (!NonVirtual.isZero())
  95. NonVirtualOffset = llvm::ConstantInt::get(PtrDiffTy,
  96. NonVirtual.getQuantity());
  97. llvm::Value *BaseOffset;
  98. if (Virtual) {
  99. if (NonVirtualOffset)
  100. BaseOffset = CGF.Builder.CreateAdd(Virtual, NonVirtualOffset);
  101. else
  102. BaseOffset = Virtual;
  103. } else
  104. BaseOffset = NonVirtualOffset;
  105. // Apply the base offset.
  106. ThisPtr = CGF.Builder.CreateBitCast(ThisPtr, CGF.Int8PtrTy);
  107. ThisPtr = CGF.Builder.CreateGEP(ThisPtr, BaseOffset, "add.ptr");
  108. return ThisPtr;
  109. }
  110. llvm::Value *
  111. CodeGenFunction::GetAddressOfBaseClass(llvm::Value *Value,
  112. const CXXRecordDecl *Derived,
  113. CastExpr::path_const_iterator PathBegin,
  114. CastExpr::path_const_iterator PathEnd,
  115. bool NullCheckValue) {
  116. assert(PathBegin != PathEnd && "Base path should not be empty!");
  117. CastExpr::path_const_iterator Start = PathBegin;
  118. const CXXRecordDecl *VBase = 0;
  119. // Get the virtual base.
  120. if ((*Start)->isVirtual()) {
  121. VBase =
  122. cast<CXXRecordDecl>((*Start)->getType()->getAs<RecordType>()->getDecl());
  123. ++Start;
  124. }
  125. CharUnits NonVirtualOffset =
  126. ComputeNonVirtualBaseClassOffset(getContext(), VBase ? VBase : Derived,
  127. Start, PathEnd);
  128. // Get the base pointer type.
  129. llvm::Type *BasePtrTy =
  130. ConvertType((PathEnd[-1])->getType())->getPointerTo();
  131. if (NonVirtualOffset.isZero() && !VBase) {
  132. // Just cast back.
  133. return Builder.CreateBitCast(Value, BasePtrTy);
  134. }
  135. llvm::BasicBlock *CastNull = 0;
  136. llvm::BasicBlock *CastNotNull = 0;
  137. llvm::BasicBlock *CastEnd = 0;
  138. if (NullCheckValue) {
  139. CastNull = createBasicBlock("cast.null");
  140. CastNotNull = createBasicBlock("cast.notnull");
  141. CastEnd = createBasicBlock("cast.end");
  142. llvm::Value *IsNull = Builder.CreateIsNull(Value);
  143. Builder.CreateCondBr(IsNull, CastNull, CastNotNull);
  144. EmitBlock(CastNotNull);
  145. }
  146. llvm::Value *VirtualOffset = 0;
  147. if (VBase) {
  148. if (Derived->hasAttr<FinalAttr>()) {
  149. VirtualOffset = 0;
  150. const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Derived);
  151. CharUnits VBaseOffset = Layout.getVBaseClassOffset(VBase);
  152. NonVirtualOffset += VBaseOffset;
  153. } else
  154. VirtualOffset = GetVirtualBaseClassOffset(Value, Derived, VBase);
  155. }
  156. // Apply the offsets.
  157. Value = ApplyNonVirtualAndVirtualOffset(*this, Value,
  158. NonVirtualOffset,
  159. VirtualOffset);
  160. // Cast back.
  161. Value = Builder.CreateBitCast(Value, BasePtrTy);
  162. if (NullCheckValue) {
  163. Builder.CreateBr(CastEnd);
  164. EmitBlock(CastNull);
  165. Builder.CreateBr(CastEnd);
  166. EmitBlock(CastEnd);
  167. llvm::PHINode *PHI = Builder.CreatePHI(Value->getType(), 2);
  168. PHI->addIncoming(Value, CastNotNull);
  169. PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()),
  170. CastNull);
  171. Value = PHI;
  172. }
  173. return Value;
  174. }
  175. llvm::Value *
  176. CodeGenFunction::GetAddressOfDerivedClass(llvm::Value *Value,
  177. const CXXRecordDecl *Derived,
  178. CastExpr::path_const_iterator PathBegin,
  179. CastExpr::path_const_iterator PathEnd,
  180. bool NullCheckValue) {
  181. assert(PathBegin != PathEnd && "Base path should not be empty!");
  182. QualType DerivedTy =
  183. getContext().getCanonicalType(getContext().getTagDeclType(Derived));
  184. llvm::Type *DerivedPtrTy = ConvertType(DerivedTy)->getPointerTo();
  185. llvm::Value *NonVirtualOffset =
  186. CGM.GetNonVirtualBaseClassOffset(Derived, PathBegin, PathEnd);
  187. if (!NonVirtualOffset) {
  188. // No offset, we can just cast back.
  189. return Builder.CreateBitCast(Value, DerivedPtrTy);
  190. }
  191. llvm::BasicBlock *CastNull = 0;
  192. llvm::BasicBlock *CastNotNull = 0;
  193. llvm::BasicBlock *CastEnd = 0;
  194. if (NullCheckValue) {
  195. CastNull = createBasicBlock("cast.null");
  196. CastNotNull = createBasicBlock("cast.notnull");
  197. CastEnd = createBasicBlock("cast.end");
  198. llvm::Value *IsNull = Builder.CreateIsNull(Value);
  199. Builder.CreateCondBr(IsNull, CastNull, CastNotNull);
  200. EmitBlock(CastNotNull);
  201. }
  202. // Apply the offset.
  203. Value = Builder.CreateBitCast(Value, Int8PtrTy);
  204. Value = Builder.CreateGEP(Value, Builder.CreateNeg(NonVirtualOffset),
  205. "sub.ptr");
  206. // Just cast.
  207. Value = Builder.CreateBitCast(Value, DerivedPtrTy);
  208. if (NullCheckValue) {
  209. Builder.CreateBr(CastEnd);
  210. EmitBlock(CastNull);
  211. Builder.CreateBr(CastEnd);
  212. EmitBlock(CastEnd);
  213. llvm::PHINode *PHI = Builder.CreatePHI(Value->getType(), 2);
  214. PHI->addIncoming(Value, CastNotNull);
  215. PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()),
  216. CastNull);
  217. Value = PHI;
  218. }
  219. return Value;
  220. }
  221. /// GetVTTParameter - Return the VTT parameter that should be passed to a
  222. /// base constructor/destructor with virtual bases.
  223. static llvm::Value *GetVTTParameter(CodeGenFunction &CGF, GlobalDecl GD,
  224. bool ForVirtualBase) {
  225. if (!CodeGenVTables::needsVTTParameter(GD)) {
  226. // This constructor/destructor does not need a VTT parameter.
  227. return 0;
  228. }
  229. const CXXRecordDecl *RD = cast<CXXMethodDecl>(CGF.CurFuncDecl)->getParent();
  230. const CXXRecordDecl *Base = cast<CXXMethodDecl>(GD.getDecl())->getParent();
  231. llvm::Value *VTT;
  232. uint64_t SubVTTIndex;
  233. // If the record matches the base, this is the complete ctor/dtor
  234. // variant calling the base variant in a class with virtual bases.
  235. if (RD == Base) {
  236. assert(!CodeGenVTables::needsVTTParameter(CGF.CurGD) &&
  237. "doing no-op VTT offset in base dtor/ctor?");
  238. assert(!ForVirtualBase && "Can't have same class as virtual base!");
  239. SubVTTIndex = 0;
  240. } else {
  241. const ASTRecordLayout &Layout =
  242. CGF.getContext().getASTRecordLayout(RD);
  243. CharUnits BaseOffset = ForVirtualBase ?
  244. Layout.getVBaseClassOffset(Base) :
  245. Layout.getBaseClassOffset(Base);
  246. SubVTTIndex =
  247. CGF.CGM.getVTables().getSubVTTIndex(RD, BaseSubobject(Base, BaseOffset));
  248. assert(SubVTTIndex != 0 && "Sub-VTT index must be greater than zero!");
  249. }
  250. if (CodeGenVTables::needsVTTParameter(CGF.CurGD)) {
  251. // A VTT parameter was passed to the constructor, use it.
  252. VTT = CGF.LoadCXXVTT();
  253. VTT = CGF.Builder.CreateConstInBoundsGEP1_64(VTT, SubVTTIndex);
  254. } else {
  255. // We're the complete constructor, so get the VTT by name.
  256. VTT = CGF.CGM.getVTables().GetAddrOfVTT(RD);
  257. VTT = CGF.Builder.CreateConstInBoundsGEP2_64(VTT, 0, SubVTTIndex);
  258. }
  259. return VTT;
  260. }
  261. namespace {
  262. /// Call the destructor for a direct base class.
  263. struct CallBaseDtor : EHScopeStack::Cleanup {
  264. const CXXRecordDecl *BaseClass;
  265. bool BaseIsVirtual;
  266. CallBaseDtor(const CXXRecordDecl *Base, bool BaseIsVirtual)
  267. : BaseClass(Base), BaseIsVirtual(BaseIsVirtual) {}
  268. void Emit(CodeGenFunction &CGF, Flags flags) {
  269. const CXXRecordDecl *DerivedClass =
  270. cast<CXXMethodDecl>(CGF.CurCodeDecl)->getParent();
  271. const CXXDestructorDecl *D = BaseClass->getDestructor();
  272. llvm::Value *Addr =
  273. CGF.GetAddressOfDirectBaseInCompleteClass(CGF.LoadCXXThis(),
  274. DerivedClass, BaseClass,
  275. BaseIsVirtual);
  276. CGF.EmitCXXDestructorCall(D, Dtor_Base, BaseIsVirtual, Addr);
  277. }
  278. };
  279. /// A visitor which checks whether an initializer uses 'this' in a
  280. /// way which requires the vtable to be properly set.
  281. struct DynamicThisUseChecker : EvaluatedExprVisitor<DynamicThisUseChecker> {
  282. typedef EvaluatedExprVisitor<DynamicThisUseChecker> super;
  283. bool UsesThis;
  284. DynamicThisUseChecker(ASTContext &C) : super(C), UsesThis(false) {}
  285. // Black-list all explicit and implicit references to 'this'.
  286. //
  287. // Do we need to worry about external references to 'this' derived
  288. // from arbitrary code? If so, then anything which runs arbitrary
  289. // external code might potentially access the vtable.
  290. void VisitCXXThisExpr(CXXThisExpr *E) { UsesThis = true; }
  291. };
  292. }
  293. static bool BaseInitializerUsesThis(ASTContext &C, const Expr *Init) {
  294. DynamicThisUseChecker Checker(C);
  295. Checker.Visit(const_cast<Expr*>(Init));
  296. return Checker.UsesThis;
  297. }
  298. static void EmitBaseInitializer(CodeGenFunction &CGF,
  299. const CXXRecordDecl *ClassDecl,
  300. CXXCtorInitializer *BaseInit,
  301. CXXCtorType CtorType) {
  302. assert(BaseInit->isBaseInitializer() &&
  303. "Must have base initializer!");
  304. llvm::Value *ThisPtr = CGF.LoadCXXThis();
  305. const Type *BaseType = BaseInit->getBaseClass();
  306. CXXRecordDecl *BaseClassDecl =
  307. cast<CXXRecordDecl>(BaseType->getAs<RecordType>()->getDecl());
  308. bool isBaseVirtual = BaseInit->isBaseVirtual();
  309. // The base constructor doesn't construct virtual bases.
  310. if (CtorType == Ctor_Base && isBaseVirtual)
  311. return;
  312. // If the initializer for the base (other than the constructor
  313. // itself) accesses 'this' in any way, we need to initialize the
  314. // vtables.
  315. if (BaseInitializerUsesThis(CGF.getContext(), BaseInit->getInit()))
  316. CGF.InitializeVTablePointers(ClassDecl);
  317. // We can pretend to be a complete class because it only matters for
  318. // virtual bases, and we only do virtual bases for complete ctors.
  319. llvm::Value *V =
  320. CGF.GetAddressOfDirectBaseInCompleteClass(ThisPtr, ClassDecl,
  321. BaseClassDecl,
  322. isBaseVirtual);
  323. CharUnits Alignment = CGF.getContext().getTypeAlignInChars(BaseType);
  324. AggValueSlot AggSlot =
  325. AggValueSlot::forAddr(V, Alignment, Qualifiers(),
  326. AggValueSlot::IsDestructed,
  327. AggValueSlot::DoesNotNeedGCBarriers,
  328. AggValueSlot::IsNotAliased);
  329. CGF.EmitAggExpr(BaseInit->getInit(), AggSlot);
  330. if (CGF.CGM.getLangOpts().Exceptions &&
  331. !BaseClassDecl->hasTrivialDestructor())
  332. CGF.EHStack.pushCleanup<CallBaseDtor>(EHCleanup, BaseClassDecl,
  333. isBaseVirtual);
  334. }
  335. static void EmitAggMemberInitializer(CodeGenFunction &CGF,
  336. LValue LHS,
  337. Expr *Init,
  338. llvm::Value *ArrayIndexVar,
  339. QualType T,
  340. ArrayRef<VarDecl *> ArrayIndexes,
  341. unsigned Index) {
  342. if (Index == ArrayIndexes.size()) {
  343. LValue LV = LHS;
  344. { // Scope for Cleanups.
  345. CodeGenFunction::RunCleanupsScope Cleanups(CGF);
  346. if (ArrayIndexVar) {
  347. // If we have an array index variable, load it and use it as an offset.
  348. // Then, increment the value.
  349. llvm::Value *Dest = LHS.getAddress();
  350. llvm::Value *ArrayIndex = CGF.Builder.CreateLoad(ArrayIndexVar);
  351. Dest = CGF.Builder.CreateInBoundsGEP(Dest, ArrayIndex, "destaddress");
  352. llvm::Value *Next = llvm::ConstantInt::get(ArrayIndex->getType(), 1);
  353. Next = CGF.Builder.CreateAdd(ArrayIndex, Next, "inc");
  354. CGF.Builder.CreateStore(Next, ArrayIndexVar);
  355. // Update the LValue.
  356. LV.setAddress(Dest);
  357. CharUnits Align = CGF.getContext().getTypeAlignInChars(T);
  358. LV.setAlignment(std::min(Align, LV.getAlignment()));
  359. }
  360. if (!CGF.hasAggregateLLVMType(T)) {
  361. CGF.EmitScalarInit(Init, /*decl*/ 0, LV, false);
  362. } else if (T->isAnyComplexType()) {
  363. CGF.EmitComplexExprIntoAddr(Init, LV.getAddress(),
  364. LV.isVolatileQualified());
  365. } else {
  366. AggValueSlot Slot =
  367. AggValueSlot::forLValue(LV,
  368. AggValueSlot::IsDestructed,
  369. AggValueSlot::DoesNotNeedGCBarriers,
  370. AggValueSlot::IsNotAliased);
  371. CGF.EmitAggExpr(Init, Slot);
  372. }
  373. }
  374. // Now, outside of the initializer cleanup scope, destroy the backing array
  375. // for a std::initializer_list member.
  376. CGF.MaybeEmitStdInitializerListCleanup(LV.getAddress(), Init);
  377. return;
  378. }
  379. const ConstantArrayType *Array = CGF.getContext().getAsConstantArrayType(T);
  380. assert(Array && "Array initialization without the array type?");
  381. llvm::Value *IndexVar
  382. = CGF.GetAddrOfLocalVar(ArrayIndexes[Index]);
  383. assert(IndexVar && "Array index variable not loaded");
  384. // Initialize this index variable to zero.
  385. llvm::Value* Zero
  386. = llvm::Constant::getNullValue(
  387. CGF.ConvertType(CGF.getContext().getSizeType()));
  388. CGF.Builder.CreateStore(Zero, IndexVar);
  389. // Start the loop with a block that tests the condition.
  390. llvm::BasicBlock *CondBlock = CGF.createBasicBlock("for.cond");
  391. llvm::BasicBlock *AfterFor = CGF.createBasicBlock("for.end");
  392. CGF.EmitBlock(CondBlock);
  393. llvm::BasicBlock *ForBody = CGF.createBasicBlock("for.body");
  394. // Generate: if (loop-index < number-of-elements) fall to the loop body,
  395. // otherwise, go to the block after the for-loop.
  396. uint64_t NumElements = Array->getSize().getZExtValue();
  397. llvm::Value *Counter = CGF.Builder.CreateLoad(IndexVar);
  398. llvm::Value *NumElementsPtr =
  399. llvm::ConstantInt::get(Counter->getType(), NumElements);
  400. llvm::Value *IsLess = CGF.Builder.CreateICmpULT(Counter, NumElementsPtr,
  401. "isless");
  402. // If the condition is true, execute the body.
  403. CGF.Builder.CreateCondBr(IsLess, ForBody, AfterFor);
  404. CGF.EmitBlock(ForBody);
  405. llvm::BasicBlock *ContinueBlock = CGF.createBasicBlock("for.inc");
  406. {
  407. CodeGenFunction::RunCleanupsScope Cleanups(CGF);
  408. // Inside the loop body recurse to emit the inner loop or, eventually, the
  409. // constructor call.
  410. EmitAggMemberInitializer(CGF, LHS, Init, ArrayIndexVar,
  411. Array->getElementType(), ArrayIndexes, Index + 1);
  412. }
  413. CGF.EmitBlock(ContinueBlock);
  414. // Emit the increment of the loop counter.
  415. llvm::Value *NextVal = llvm::ConstantInt::get(Counter->getType(), 1);
  416. Counter = CGF.Builder.CreateLoad(IndexVar);
  417. NextVal = CGF.Builder.CreateAdd(Counter, NextVal, "inc");
  418. CGF.Builder.CreateStore(NextVal, IndexVar);
  419. // Finally, branch back up to the condition for the next iteration.
  420. CGF.EmitBranch(CondBlock);
  421. // Emit the fall-through block.
  422. CGF.EmitBlock(AfterFor, true);
  423. }
  424. namespace {
  425. struct CallMemberDtor : EHScopeStack::Cleanup {
  426. llvm::Value *V;
  427. CXXDestructorDecl *Dtor;
  428. CallMemberDtor(llvm::Value *V, CXXDestructorDecl *Dtor)
  429. : V(V), Dtor(Dtor) {}
  430. void Emit(CodeGenFunction &CGF, Flags flags) {
  431. CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false,
  432. V);
  433. }
  434. };
  435. }
  436. static bool hasTrivialCopyOrMoveConstructor(const CXXRecordDecl *Record,
  437. bool Moving) {
  438. return Moving ? Record->hasTrivialMoveConstructor() :
  439. Record->hasTrivialCopyConstructor();
  440. }
  441. static void EmitMemberInitializer(CodeGenFunction &CGF,
  442. const CXXRecordDecl *ClassDecl,
  443. CXXCtorInitializer *MemberInit,
  444. const CXXConstructorDecl *Constructor,
  445. FunctionArgList &Args) {
  446. assert(MemberInit->isAnyMemberInitializer() &&
  447. "Must have member initializer!");
  448. assert(MemberInit->getInit() && "Must have initializer!");
  449. // non-static data member initializers.
  450. FieldDecl *Field = MemberInit->getAnyMember();
  451. QualType FieldType = Field->getType();
  452. llvm::Value *ThisPtr = CGF.LoadCXXThis();
  453. LValue LHS;
  454. // If we are initializing an anonymous union field, drill down to the field.
  455. if (MemberInit->isIndirectMemberInitializer()) {
  456. LHS = CGF.EmitLValueForAnonRecordField(ThisPtr,
  457. MemberInit->getIndirectMember(), 0);
  458. FieldType = MemberInit->getIndirectMember()->getAnonField()->getType();
  459. } else {
  460. LHS = CGF.EmitLValueForFieldInitialization(ThisPtr, Field, 0);
  461. }
  462. // Special case: if we are in a copy or move constructor, and we are copying
  463. // an array of PODs or classes with trivial copy constructors, ignore the
  464. // AST and perform the copy we know is equivalent.
  465. // FIXME: This is hacky at best... if we had a bit more explicit information
  466. // in the AST, we could generalize it more easily.
  467. const ConstantArrayType *Array
  468. = CGF.getContext().getAsConstantArrayType(FieldType);
  469. if (Array && Constructor->isImplicitlyDefined() &&
  470. Constructor->isCopyOrMoveConstructor()) {
  471. QualType BaseElementTy = CGF.getContext().getBaseElementType(Array);
  472. const CXXRecordDecl *Record = BaseElementTy->getAsCXXRecordDecl();
  473. if (BaseElementTy.isPODType(CGF.getContext()) ||
  474. (Record && hasTrivialCopyOrMoveConstructor(Record,
  475. Constructor->isMoveConstructor()))) {
  476. // Find the source pointer. We knows it's the last argument because
  477. // we know we're in a copy constructor.
  478. unsigned SrcArgIndex = Args.size() - 1;
  479. llvm::Value *SrcPtr
  480. = CGF.Builder.CreateLoad(CGF.GetAddrOfLocalVar(Args[SrcArgIndex]));
  481. LValue Src = CGF.EmitLValueForFieldInitialization(SrcPtr, Field, 0);
  482. // Copy the aggregate.
  483. CGF.EmitAggregateCopy(LHS.getAddress(), Src.getAddress(), FieldType,
  484. LHS.isVolatileQualified());
  485. return;
  486. }
  487. }
  488. ArrayRef<VarDecl *> ArrayIndexes;
  489. if (MemberInit->getNumArrayIndices())
  490. ArrayIndexes = MemberInit->getArrayIndexes();
  491. CGF.EmitInitializerForField(Field, LHS, MemberInit->getInit(), ArrayIndexes);
  492. }
  493. void CodeGenFunction::EmitInitializerForField(FieldDecl *Field,
  494. LValue LHS, Expr *Init,
  495. ArrayRef<VarDecl *> ArrayIndexes) {
  496. QualType FieldType = Field->getType();
  497. if (!hasAggregateLLVMType(FieldType)) {
  498. if (LHS.isSimple()) {
  499. EmitExprAsInit(Init, Field, LHS, false);
  500. } else {
  501. RValue RHS = RValue::get(EmitScalarExpr(Init));
  502. EmitStoreThroughLValue(RHS, LHS);
  503. }
  504. } else if (FieldType->isAnyComplexType()) {
  505. EmitComplexExprIntoAddr(Init, LHS.getAddress(), LHS.isVolatileQualified());
  506. } else {
  507. llvm::Value *ArrayIndexVar = 0;
  508. if (ArrayIndexes.size()) {
  509. llvm::Type *SizeTy = ConvertType(getContext().getSizeType());
  510. // The LHS is a pointer to the first object we'll be constructing, as
  511. // a flat array.
  512. QualType BaseElementTy = getContext().getBaseElementType(FieldType);
  513. llvm::Type *BasePtr = ConvertType(BaseElementTy);
  514. BasePtr = llvm::PointerType::getUnqual(BasePtr);
  515. llvm::Value *BaseAddrPtr = Builder.CreateBitCast(LHS.getAddress(),
  516. BasePtr);
  517. LHS = MakeAddrLValue(BaseAddrPtr, BaseElementTy);
  518. // Create an array index that will be used to walk over all of the
  519. // objects we're constructing.
  520. ArrayIndexVar = CreateTempAlloca(SizeTy, "object.index");
  521. llvm::Value *Zero = llvm::Constant::getNullValue(SizeTy);
  522. Builder.CreateStore(Zero, ArrayIndexVar);
  523. // Emit the block variables for the array indices, if any.
  524. for (unsigned I = 0, N = ArrayIndexes.size(); I != N; ++I)
  525. EmitAutoVarDecl(*ArrayIndexes[I]);
  526. }
  527. EmitAggMemberInitializer(*this, LHS, Init, ArrayIndexVar, FieldType,
  528. ArrayIndexes, 0);
  529. if (!CGM.getLangOpts().Exceptions)
  530. return;
  531. // FIXME: If we have an array of classes w/ non-trivial destructors,
  532. // we need to destroy in reverse order of construction along the exception
  533. // path.
  534. const RecordType *RT = FieldType->getAs<RecordType>();
  535. if (!RT)
  536. return;
  537. CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
  538. if (!RD->hasTrivialDestructor())
  539. EHStack.pushCleanup<CallMemberDtor>(EHCleanup, LHS.getAddress(),
  540. RD->getDestructor());
  541. }
  542. }
  543. /// Checks whether the given constructor is a valid subject for the
  544. /// complete-to-base constructor delegation optimization, i.e.
  545. /// emitting the complete constructor as a simple call to the base
  546. /// constructor.
  547. static bool IsConstructorDelegationValid(const CXXConstructorDecl *Ctor) {
  548. // Currently we disable the optimization for classes with virtual
  549. // bases because (1) the addresses of parameter variables need to be
  550. // consistent across all initializers but (2) the delegate function
  551. // call necessarily creates a second copy of the parameter variable.
  552. //
  553. // The limiting example (purely theoretical AFAIK):
  554. // struct A { A(int &c) { c++; } };
  555. // struct B : virtual A {
  556. // B(int count) : A(count) { printf("%d\n", count); }
  557. // };
  558. // ...although even this example could in principle be emitted as a
  559. // delegation since the address of the parameter doesn't escape.
  560. if (Ctor->getParent()->getNumVBases()) {
  561. // TODO: white-list trivial vbase initializers. This case wouldn't
  562. // be subject to the restrictions below.
  563. // TODO: white-list cases where:
  564. // - there are no non-reference parameters to the constructor
  565. // - the initializers don't access any non-reference parameters
  566. // - the initializers don't take the address of non-reference
  567. // parameters
  568. // - etc.
  569. // If we ever add any of the above cases, remember that:
  570. // - function-try-blocks will always blacklist this optimization
  571. // - we need to perform the constructor prologue and cleanup in
  572. // EmitConstructorBody.
  573. return false;
  574. }
  575. // We also disable the optimization for variadic functions because
  576. // it's impossible to "re-pass" varargs.
  577. if (Ctor->getType()->getAs<FunctionProtoType>()->isVariadic())
  578. return false;
  579. // FIXME: Decide if we can do a delegation of a delegating constructor.
  580. if (Ctor->isDelegatingConstructor())
  581. return false;
  582. return true;
  583. }
  584. /// EmitConstructorBody - Emits the body of the current constructor.
  585. void CodeGenFunction::EmitConstructorBody(FunctionArgList &Args) {
  586. const CXXConstructorDecl *Ctor = cast<CXXConstructorDecl>(CurGD.getDecl());
  587. CXXCtorType CtorType = CurGD.getCtorType();
  588. // Before we go any further, try the complete->base constructor
  589. // delegation optimization.
  590. if (CtorType == Ctor_Complete && IsConstructorDelegationValid(Ctor)) {
  591. if (CGDebugInfo *DI = getDebugInfo())
  592. DI->EmitLocation(Builder, Ctor->getLocEnd());
  593. EmitDelegateCXXConstructorCall(Ctor, Ctor_Base, Args);
  594. return;
  595. }
  596. Stmt *Body = Ctor->getBody();
  597. // Enter the function-try-block before the constructor prologue if
  598. // applicable.
  599. bool IsTryBody = (Body && isa<CXXTryStmt>(Body));
  600. if (IsTryBody)
  601. EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true);
  602. EHScopeStack::stable_iterator CleanupDepth = EHStack.stable_begin();
  603. // Emit the constructor prologue, i.e. the base and member
  604. // initializers.
  605. EmitCtorPrologue(Ctor, CtorType, Args);
  606. // Emit the body of the statement.
  607. if (IsTryBody)
  608. EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock());
  609. else if (Body)
  610. EmitStmt(Body);
  611. // Emit any cleanup blocks associated with the member or base
  612. // initializers, which includes (along the exceptional path) the
  613. // destructors for those members and bases that were fully
  614. // constructed.
  615. PopCleanupBlocks(CleanupDepth);
  616. if (IsTryBody)
  617. ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true);
  618. }
  619. /// EmitCtorPrologue - This routine generates necessary code to initialize
  620. /// base classes and non-static data members belonging to this constructor.
  621. void CodeGenFunction::EmitCtorPrologue(const CXXConstructorDecl *CD,
  622. CXXCtorType CtorType,
  623. FunctionArgList &Args) {
  624. if (CD->isDelegatingConstructor())
  625. return EmitDelegatingCXXConstructorCall(CD, Args);
  626. const CXXRecordDecl *ClassDecl = CD->getParent();
  627. SmallVector<CXXCtorInitializer *, 8> MemberInitializers;
  628. for (CXXConstructorDecl::init_const_iterator B = CD->init_begin(),
  629. E = CD->init_end();
  630. B != E; ++B) {
  631. CXXCtorInitializer *Member = (*B);
  632. if (Member->isBaseInitializer()) {
  633. EmitBaseInitializer(*this, ClassDecl, Member, CtorType);
  634. } else {
  635. assert(Member->isAnyMemberInitializer() &&
  636. "Delegating initializer on non-delegating constructor");
  637. MemberInitializers.push_back(Member);
  638. }
  639. }
  640. InitializeVTablePointers(ClassDecl);
  641. for (unsigned I = 0, E = MemberInitializers.size(); I != E; ++I)
  642. EmitMemberInitializer(*this, ClassDecl, MemberInitializers[I], CD, Args);
  643. }
  644. static bool
  645. FieldHasTrivialDestructorBody(ASTContext &Context, const FieldDecl *Field);
  646. static bool
  647. HasTrivialDestructorBody(ASTContext &Context,
  648. const CXXRecordDecl *BaseClassDecl,
  649. const CXXRecordDecl *MostDerivedClassDecl)
  650. {
  651. // If the destructor is trivial we don't have to check anything else.
  652. if (BaseClassDecl->hasTrivialDestructor())
  653. return true;
  654. if (!BaseClassDecl->getDestructor()->hasTrivialBody())
  655. return false;
  656. // Check fields.
  657. for (CXXRecordDecl::field_iterator I = BaseClassDecl->field_begin(),
  658. E = BaseClassDecl->field_end(); I != E; ++I) {
  659. const FieldDecl *Field = *I;
  660. if (!FieldHasTrivialDestructorBody(Context, Field))
  661. return false;
  662. }
  663. // Check non-virtual bases.
  664. for (CXXRecordDecl::base_class_const_iterator I =
  665. BaseClassDecl->bases_begin(), E = BaseClassDecl->bases_end();
  666. I != E; ++I) {
  667. if (I->isVirtual())
  668. continue;
  669. const CXXRecordDecl *NonVirtualBase =
  670. cast<CXXRecordDecl>(I->getType()->castAs<RecordType>()->getDecl());
  671. if (!HasTrivialDestructorBody(Context, NonVirtualBase,
  672. MostDerivedClassDecl))
  673. return false;
  674. }
  675. if (BaseClassDecl == MostDerivedClassDecl) {
  676. // Check virtual bases.
  677. for (CXXRecordDecl::base_class_const_iterator I =
  678. BaseClassDecl->vbases_begin(), E = BaseClassDecl->vbases_end();
  679. I != E; ++I) {
  680. const CXXRecordDecl *VirtualBase =
  681. cast<CXXRecordDecl>(I->getType()->castAs<RecordType>()->getDecl());
  682. if (!HasTrivialDestructorBody(Context, VirtualBase,
  683. MostDerivedClassDecl))
  684. return false;
  685. }
  686. }
  687. return true;
  688. }
  689. static bool
  690. FieldHasTrivialDestructorBody(ASTContext &Context,
  691. const FieldDecl *Field)
  692. {
  693. QualType FieldBaseElementType = Context.getBaseElementType(Field->getType());
  694. const RecordType *RT = FieldBaseElementType->getAs<RecordType>();
  695. if (!RT)
  696. return true;
  697. CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl());
  698. return HasTrivialDestructorBody(Context, FieldClassDecl, FieldClassDecl);
  699. }
  700. /// CanSkipVTablePointerInitialization - Check whether we need to initialize
  701. /// any vtable pointers before calling this destructor.
  702. static bool CanSkipVTablePointerInitialization(ASTContext &Context,
  703. const CXXDestructorDecl *Dtor) {
  704. if (!Dtor->hasTrivialBody())
  705. return false;
  706. // Check the fields.
  707. const CXXRecordDecl *ClassDecl = Dtor->getParent();
  708. for (CXXRecordDecl::field_iterator I = ClassDecl->field_begin(),
  709. E = ClassDecl->field_end(); I != E; ++I) {
  710. const FieldDecl *Field = *I;
  711. if (!FieldHasTrivialDestructorBody(Context, Field))
  712. return false;
  713. }
  714. return true;
  715. }
  716. /// EmitDestructorBody - Emits the body of the current destructor.
  717. void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) {
  718. const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CurGD.getDecl());
  719. CXXDtorType DtorType = CurGD.getDtorType();
  720. // The call to operator delete in a deleting destructor happens
  721. // outside of the function-try-block, which means it's always
  722. // possible to delegate the destructor body to the complete
  723. // destructor. Do so.
  724. if (DtorType == Dtor_Deleting) {
  725. EnterDtorCleanups(Dtor, Dtor_Deleting);
  726. EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false,
  727. LoadCXXThis());
  728. PopCleanupBlock();
  729. return;
  730. }
  731. Stmt *Body = Dtor->getBody();
  732. // If the body is a function-try-block, enter the try before
  733. // anything else.
  734. bool isTryBody = (Body && isa<CXXTryStmt>(Body));
  735. if (isTryBody)
  736. EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true);
  737. // Enter the epilogue cleanups.
  738. RunCleanupsScope DtorEpilogue(*this);
  739. // If this is the complete variant, just invoke the base variant;
  740. // the epilogue will destruct the virtual bases. But we can't do
  741. // this optimization if the body is a function-try-block, because
  742. // we'd introduce *two* handler blocks.
  743. switch (DtorType) {
  744. case Dtor_Deleting: llvm_unreachable("already handled deleting case");
  745. case Dtor_Complete:
  746. // Enter the cleanup scopes for virtual bases.
  747. EnterDtorCleanups(Dtor, Dtor_Complete);
  748. if (!isTryBody) {
  749. EmitCXXDestructorCall(Dtor, Dtor_Base, /*ForVirtualBase=*/false,
  750. LoadCXXThis());
  751. break;
  752. }
  753. // Fallthrough: act like we're in the base variant.
  754. case Dtor_Base:
  755. // Enter the cleanup scopes for fields and non-virtual bases.
  756. EnterDtorCleanups(Dtor, Dtor_Base);
  757. // Initialize the vtable pointers before entering the body.
  758. if (!CanSkipVTablePointerInitialization(getContext(), Dtor))
  759. InitializeVTablePointers(Dtor->getParent());
  760. if (isTryBody)
  761. EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock());
  762. else if (Body)
  763. EmitStmt(Body);
  764. else {
  765. assert(Dtor->isImplicit() && "bodyless dtor not implicit");
  766. // nothing to do besides what's in the epilogue
  767. }
  768. // -fapple-kext must inline any call to this dtor into
  769. // the caller's body.
  770. if (getContext().getLangOpts().AppleKext)
  771. CurFn->addFnAttr(llvm::Attribute::AlwaysInline);
  772. break;
  773. }
  774. // Jump out through the epilogue cleanups.
  775. DtorEpilogue.ForceCleanup();
  776. // Exit the try if applicable.
  777. if (isTryBody)
  778. ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true);
  779. }
  780. namespace {
  781. /// Call the operator delete associated with the current destructor.
  782. struct CallDtorDelete : EHScopeStack::Cleanup {
  783. CallDtorDelete() {}
  784. void Emit(CodeGenFunction &CGF, Flags flags) {
  785. const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl);
  786. const CXXRecordDecl *ClassDecl = Dtor->getParent();
  787. CGF.EmitDeleteCall(Dtor->getOperatorDelete(), CGF.LoadCXXThis(),
  788. CGF.getContext().getTagDeclType(ClassDecl));
  789. }
  790. };
  791. class DestroyField : public EHScopeStack::Cleanup {
  792. const FieldDecl *field;
  793. CodeGenFunction::Destroyer *destroyer;
  794. bool useEHCleanupForArray;
  795. public:
  796. DestroyField(const FieldDecl *field, CodeGenFunction::Destroyer *destroyer,
  797. bool useEHCleanupForArray)
  798. : field(field), destroyer(destroyer),
  799. useEHCleanupForArray(useEHCleanupForArray) {}
  800. void Emit(CodeGenFunction &CGF, Flags flags) {
  801. // Find the address of the field.
  802. llvm::Value *thisValue = CGF.LoadCXXThis();
  803. LValue LV = CGF.EmitLValueForField(thisValue, field, /*CVRQualifiers=*/0);
  804. assert(LV.isSimple());
  805. CGF.emitDestroy(LV.getAddress(), field->getType(), destroyer,
  806. flags.isForNormalCleanup() && useEHCleanupForArray);
  807. }
  808. };
  809. }
  810. /// EmitDtorEpilogue - Emit all code that comes at the end of class's
  811. /// destructor. This is to call destructors on members and base classes
  812. /// in reverse order of their construction.
  813. void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD,
  814. CXXDtorType DtorType) {
  815. assert(!DD->isTrivial() &&
  816. "Should not emit dtor epilogue for trivial dtor!");
  817. // The deleting-destructor phase just needs to call the appropriate
  818. // operator delete that Sema picked up.
  819. if (DtorType == Dtor_Deleting) {
  820. assert(DD->getOperatorDelete() &&
  821. "operator delete missing - EmitDtorEpilogue");
  822. EHStack.pushCleanup<CallDtorDelete>(NormalAndEHCleanup);
  823. return;
  824. }
  825. const CXXRecordDecl *ClassDecl = DD->getParent();
  826. // Unions have no bases and do not call field destructors.
  827. if (ClassDecl->isUnion())
  828. return;
  829. // The complete-destructor phase just destructs all the virtual bases.
  830. if (DtorType == Dtor_Complete) {
  831. // We push them in the forward order so that they'll be popped in
  832. // the reverse order.
  833. for (CXXRecordDecl::base_class_const_iterator I =
  834. ClassDecl->vbases_begin(), E = ClassDecl->vbases_end();
  835. I != E; ++I) {
  836. const CXXBaseSpecifier &Base = *I;
  837. CXXRecordDecl *BaseClassDecl
  838. = cast<CXXRecordDecl>(Base.getType()->getAs<RecordType>()->getDecl());
  839. // Ignore trivial destructors.
  840. if (BaseClassDecl->hasTrivialDestructor())
  841. continue;
  842. EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup,
  843. BaseClassDecl,
  844. /*BaseIsVirtual*/ true);
  845. }
  846. return;
  847. }
  848. assert(DtorType == Dtor_Base);
  849. // Destroy non-virtual bases.
  850. for (CXXRecordDecl::base_class_const_iterator I =
  851. ClassDecl->bases_begin(), E = ClassDecl->bases_end(); I != E; ++I) {
  852. const CXXBaseSpecifier &Base = *I;
  853. // Ignore virtual bases.
  854. if (Base.isVirtual())
  855. continue;
  856. CXXRecordDecl *BaseClassDecl = Base.getType()->getAsCXXRecordDecl();
  857. // Ignore trivial destructors.
  858. if (BaseClassDecl->hasTrivialDestructor())
  859. continue;
  860. EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup,
  861. BaseClassDecl,
  862. /*BaseIsVirtual*/ false);
  863. }
  864. // Destroy direct fields.
  865. SmallVector<const FieldDecl *, 16> FieldDecls;
  866. for (CXXRecordDecl::field_iterator I = ClassDecl->field_begin(),
  867. E = ClassDecl->field_end(); I != E; ++I) {
  868. const FieldDecl *field = *I;
  869. QualType type = field->getType();
  870. QualType::DestructionKind dtorKind = type.isDestructedType();
  871. if (!dtorKind) continue;
  872. // Anonymous union members do not have their destructors called.
  873. const RecordType *RT = type->getAsUnionType();
  874. if (RT && RT->getDecl()->isAnonymousStructOrUnion()) continue;
  875. CleanupKind cleanupKind = getCleanupKind(dtorKind);
  876. EHStack.pushCleanup<DestroyField>(cleanupKind, field,
  877. getDestroyer(dtorKind),
  878. cleanupKind & EHCleanup);
  879. }
  880. }
  881. /// EmitCXXAggrConstructorCall - Emit a loop to call a particular
  882. /// constructor for each of several members of an array.
  883. ///
  884. /// \param ctor the constructor to call for each element
  885. /// \param argBegin,argEnd the arguments to evaluate and pass to the
  886. /// constructor
  887. /// \param arrayType the type of the array to initialize
  888. /// \param arrayBegin an arrayType*
  889. /// \param zeroInitialize true if each element should be
  890. /// zero-initialized before it is constructed
  891. void
  892. CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *ctor,
  893. const ConstantArrayType *arrayType,
  894. llvm::Value *arrayBegin,
  895. CallExpr::const_arg_iterator argBegin,
  896. CallExpr::const_arg_iterator argEnd,
  897. bool zeroInitialize) {
  898. QualType elementType;
  899. llvm::Value *numElements =
  900. emitArrayLength(arrayType, elementType, arrayBegin);
  901. EmitCXXAggrConstructorCall(ctor, numElements, arrayBegin,
  902. argBegin, argEnd, zeroInitialize);
  903. }
  904. /// EmitCXXAggrConstructorCall - Emit a loop to call a particular
  905. /// constructor for each of several members of an array.
  906. ///
  907. /// \param ctor the constructor to call for each element
  908. /// \param numElements the number of elements in the array;
  909. /// may be zero
  910. /// \param argBegin,argEnd the arguments to evaluate and pass to the
  911. /// constructor
  912. /// \param arrayBegin a T*, where T is the type constructed by ctor
  913. /// \param zeroInitialize true if each element should be
  914. /// zero-initialized before it is constructed
  915. void
  916. CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *ctor,
  917. llvm::Value *numElements,
  918. llvm::Value *arrayBegin,
  919. CallExpr::const_arg_iterator argBegin,
  920. CallExpr::const_arg_iterator argEnd,
  921. bool zeroInitialize) {
  922. // It's legal for numElements to be zero. This can happen both
  923. // dynamically, because x can be zero in 'new A[x]', and statically,
  924. // because of GCC extensions that permit zero-length arrays. There
  925. // are probably legitimate places where we could assume that this
  926. // doesn't happen, but it's not clear that it's worth it.
  927. llvm::BranchInst *zeroCheckBranch = 0;
  928. // Optimize for a constant count.
  929. llvm::ConstantInt *constantCount
  930. = dyn_cast<llvm::ConstantInt>(numElements);
  931. if (constantCount) {
  932. // Just skip out if the constant count is zero.
  933. if (constantCount->isZero()) return;
  934. // Otherwise, emit the check.
  935. } else {
  936. llvm::BasicBlock *loopBB = createBasicBlock("new.ctorloop");
  937. llvm::Value *iszero = Builder.CreateIsNull(numElements, "isempty");
  938. zeroCheckBranch = Builder.CreateCondBr(iszero, loopBB, loopBB);
  939. EmitBlock(loopBB);
  940. }
  941. // Find the end of the array.
  942. llvm::Value *arrayEnd = Builder.CreateInBoundsGEP(arrayBegin, numElements,
  943. "arrayctor.end");
  944. // Enter the loop, setting up a phi for the current location to initialize.
  945. llvm::BasicBlock *entryBB = Builder.GetInsertBlock();
  946. llvm::BasicBlock *loopBB = createBasicBlock("arrayctor.loop");
  947. EmitBlock(loopBB);
  948. llvm::PHINode *cur = Builder.CreatePHI(arrayBegin->getType(), 2,
  949. "arrayctor.cur");
  950. cur->addIncoming(arrayBegin, entryBB);
  951. // Inside the loop body, emit the constructor call on the array element.
  952. QualType type = getContext().getTypeDeclType(ctor->getParent());
  953. // Zero initialize the storage, if requested.
  954. if (zeroInitialize)
  955. EmitNullInitialization(cur, type);
  956. // C++ [class.temporary]p4:
  957. // There are two contexts in which temporaries are destroyed at a different
  958. // point than the end of the full-expression. The first context is when a
  959. // default constructor is called to initialize an element of an array.
  960. // If the constructor has one or more default arguments, the destruction of
  961. // every temporary created in a default argument expression is sequenced
  962. // before the construction of the next array element, if any.
  963. {
  964. RunCleanupsScope Scope(*this);
  965. // Evaluate the constructor and its arguments in a regular
  966. // partial-destroy cleanup.
  967. if (getLangOpts().Exceptions &&
  968. !ctor->getParent()->hasTrivialDestructor()) {
  969. Destroyer *destroyer = destroyCXXObject;
  970. pushRegularPartialArrayCleanup(arrayBegin, cur, type, *destroyer);
  971. }
  972. EmitCXXConstructorCall(ctor, Ctor_Complete, /*ForVirtualBase=*/ false,
  973. cur, argBegin, argEnd);
  974. }
  975. // Go to the next element.
  976. llvm::Value *next =
  977. Builder.CreateInBoundsGEP(cur, llvm::ConstantInt::get(SizeTy, 1),
  978. "arrayctor.next");
  979. cur->addIncoming(next, Builder.GetInsertBlock());
  980. // Check whether that's the end of the loop.
  981. llvm::Value *done = Builder.CreateICmpEQ(next, arrayEnd, "arrayctor.done");
  982. llvm::BasicBlock *contBB = createBasicBlock("arrayctor.cont");
  983. Builder.CreateCondBr(done, contBB, loopBB);
  984. // Patch the earlier check to skip over the loop.
  985. if (zeroCheckBranch) zeroCheckBranch->setSuccessor(0, contBB);
  986. EmitBlock(contBB);
  987. }
  988. void CodeGenFunction::destroyCXXObject(CodeGenFunction &CGF,
  989. llvm::Value *addr,
  990. QualType type) {
  991. const RecordType *rtype = type->castAs<RecordType>();
  992. const CXXRecordDecl *record = cast<CXXRecordDecl>(rtype->getDecl());
  993. const CXXDestructorDecl *dtor = record->getDestructor();
  994. assert(!dtor->isTrivial());
  995. CGF.EmitCXXDestructorCall(dtor, Dtor_Complete, /*for vbase*/ false,
  996. addr);
  997. }
  998. void
  999. CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D,
  1000. CXXCtorType Type, bool ForVirtualBase,
  1001. llvm::Value *This,
  1002. CallExpr::const_arg_iterator ArgBeg,
  1003. CallExpr::const_arg_iterator ArgEnd) {
  1004. CGDebugInfo *DI = getDebugInfo();
  1005. if (DI && CGM.getCodeGenOpts().LimitDebugInfo) {
  1006. // If debug info for this class has not been emitted then this is the
  1007. // right time to do so.
  1008. const CXXRecordDecl *Parent = D->getParent();
  1009. DI->getOrCreateRecordType(CGM.getContext().getTypeDeclType(Parent),
  1010. Parent->getLocation());
  1011. }
  1012. if (D->isTrivial()) {
  1013. if (ArgBeg == ArgEnd) {
  1014. // Trivial default constructor, no codegen required.
  1015. assert(D->isDefaultConstructor() &&
  1016. "trivial 0-arg ctor not a default ctor");
  1017. return;
  1018. }
  1019. assert(ArgBeg + 1 == ArgEnd && "unexpected argcount for trivial ctor");
  1020. assert(D->isCopyOrMoveConstructor() &&
  1021. "trivial 1-arg ctor not a copy/move ctor");
  1022. const Expr *E = (*ArgBeg);
  1023. QualType Ty = E->getType();
  1024. llvm::Value *Src = EmitLValue(E).getAddress();
  1025. EmitAggregateCopy(This, Src, Ty);
  1026. return;
  1027. }
  1028. llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(D, Type), ForVirtualBase);
  1029. llvm::Value *Callee = CGM.GetAddrOfCXXConstructor(D, Type);
  1030. EmitCXXMemberCall(D, Callee, ReturnValueSlot(), This, VTT, ArgBeg, ArgEnd);
  1031. }
  1032. void
  1033. CodeGenFunction::EmitSynthesizedCXXCopyCtorCall(const CXXConstructorDecl *D,
  1034. llvm::Value *This, llvm::Value *Src,
  1035. CallExpr::const_arg_iterator ArgBeg,
  1036. CallExpr::const_arg_iterator ArgEnd) {
  1037. if (D->isTrivial()) {
  1038. assert(ArgBeg + 1 == ArgEnd && "unexpected argcount for trivial ctor");
  1039. assert(D->isCopyOrMoveConstructor() &&
  1040. "trivial 1-arg ctor not a copy/move ctor");
  1041. EmitAggregateCopy(This, Src, (*ArgBeg)->getType());
  1042. return;
  1043. }
  1044. llvm::Value *Callee = CGM.GetAddrOfCXXConstructor(D,
  1045. clang::Ctor_Complete);
  1046. assert(D->isInstance() &&
  1047. "Trying to emit a member call expr on a static method!");
  1048. const FunctionProtoType *FPT = D->getType()->getAs<FunctionProtoType>();
  1049. CallArgList Args;
  1050. // Push the this ptr.
  1051. Args.add(RValue::get(This), D->getThisType(getContext()));
  1052. // Push the src ptr.
  1053. QualType QT = *(FPT->arg_type_begin());
  1054. llvm::Type *t = CGM.getTypes().ConvertType(QT);
  1055. Src = Builder.CreateBitCast(Src, t);
  1056. Args.add(RValue::get(Src), QT);
  1057. // Skip over first argument (Src).
  1058. ++ArgBeg;
  1059. CallExpr::const_arg_iterator Arg = ArgBeg;
  1060. for (FunctionProtoType::arg_type_iterator I = FPT->arg_type_begin()+1,
  1061. E = FPT->arg_type_end(); I != E; ++I, ++Arg) {
  1062. assert(Arg != ArgEnd && "Running over edge of argument list!");
  1063. EmitCallArg(Args, *Arg, *I);
  1064. }
  1065. // Either we've emitted all the call args, or we have a call to a
  1066. // variadic function.
  1067. assert((Arg == ArgEnd || FPT->isVariadic()) &&
  1068. "Extra arguments in non-variadic function!");
  1069. // If we still have any arguments, emit them using the type of the argument.
  1070. for (; Arg != ArgEnd; ++Arg) {
  1071. QualType ArgType = Arg->getType();
  1072. EmitCallArg(Args, *Arg, ArgType);
  1073. }
  1074. EmitCall(CGM.getTypes().arrangeFunctionCall(Args, FPT), Callee,
  1075. ReturnValueSlot(), Args, D);
  1076. }
  1077. void
  1078. CodeGenFunction::EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor,
  1079. CXXCtorType CtorType,
  1080. const FunctionArgList &Args) {
  1081. CallArgList DelegateArgs;
  1082. FunctionArgList::const_iterator I = Args.begin(), E = Args.end();
  1083. assert(I != E && "no parameters to constructor");
  1084. // this
  1085. DelegateArgs.add(RValue::get(LoadCXXThis()), (*I)->getType());
  1086. ++I;
  1087. // vtt
  1088. if (llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(Ctor, CtorType),
  1089. /*ForVirtualBase=*/false)) {
  1090. QualType VoidPP = getContext().getPointerType(getContext().VoidPtrTy);
  1091. DelegateArgs.add(RValue::get(VTT), VoidPP);
  1092. if (CodeGenVTables::needsVTTParameter(CurGD)) {
  1093. assert(I != E && "cannot skip vtt parameter, already done with args");
  1094. assert((*I)->getType() == VoidPP && "skipping parameter not of vtt type");
  1095. ++I;
  1096. }
  1097. }
  1098. // Explicit arguments.
  1099. for (; I != E; ++I) {
  1100. const VarDecl *param = *I;
  1101. EmitDelegateCallArg(DelegateArgs, param);
  1102. }
  1103. EmitCall(CGM.getTypes().arrangeCXXConstructorDeclaration(Ctor, CtorType),
  1104. CGM.GetAddrOfCXXConstructor(Ctor, CtorType),
  1105. ReturnValueSlot(), DelegateArgs, Ctor);
  1106. }
  1107. namespace {
  1108. struct CallDelegatingCtorDtor : EHScopeStack::Cleanup {
  1109. const CXXDestructorDecl *Dtor;
  1110. llvm::Value *Addr;
  1111. CXXDtorType Type;
  1112. CallDelegatingCtorDtor(const CXXDestructorDecl *D, llvm::Value *Addr,
  1113. CXXDtorType Type)
  1114. : Dtor(D), Addr(Addr), Type(Type) {}
  1115. void Emit(CodeGenFunction &CGF, Flags flags) {
  1116. CGF.EmitCXXDestructorCall(Dtor, Type, /*ForVirtualBase=*/false,
  1117. Addr);
  1118. }
  1119. };
  1120. }
  1121. void
  1122. CodeGenFunction::EmitDelegatingCXXConstructorCall(const CXXConstructorDecl *Ctor,
  1123. const FunctionArgList &Args) {
  1124. assert(Ctor->isDelegatingConstructor());
  1125. llvm::Value *ThisPtr = LoadCXXThis();
  1126. QualType Ty = getContext().getTagDeclType(Ctor->getParent());
  1127. CharUnits Alignment = getContext().getTypeAlignInChars(Ty);
  1128. AggValueSlot AggSlot =
  1129. AggValueSlot::forAddr(ThisPtr, Alignment, Qualifiers(),
  1130. AggValueSlot::IsDestructed,
  1131. AggValueSlot::DoesNotNeedGCBarriers,
  1132. AggValueSlot::IsNotAliased);
  1133. EmitAggExpr(Ctor->init_begin()[0]->getInit(), AggSlot);
  1134. const CXXRecordDecl *ClassDecl = Ctor->getParent();
  1135. if (CGM.getLangOpts().Exceptions && !ClassDecl->hasTrivialDestructor()) {
  1136. CXXDtorType Type =
  1137. CurGD.getCtorType() == Ctor_Complete ? Dtor_Complete : Dtor_Base;
  1138. EHStack.pushCleanup<CallDelegatingCtorDtor>(EHCleanup,
  1139. ClassDecl->getDestructor(),
  1140. ThisPtr, Type);
  1141. }
  1142. }
  1143. void CodeGenFunction::EmitCXXDestructorCall(const CXXDestructorDecl *DD,
  1144. CXXDtorType Type,
  1145. bool ForVirtualBase,
  1146. llvm::Value *This) {
  1147. llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(DD, Type),
  1148. ForVirtualBase);
  1149. llvm::Value *Callee = 0;
  1150. if (getContext().getLangOpts().AppleKext)
  1151. Callee = BuildAppleKextVirtualDestructorCall(DD, Type,
  1152. DD->getParent());
  1153. if (!Callee)
  1154. Callee = CGM.GetAddrOfCXXDestructor(DD, Type);
  1155. EmitCXXMemberCall(DD, Callee, ReturnValueSlot(), This, VTT, 0, 0);
  1156. }
  1157. namespace {
  1158. struct CallLocalDtor : EHScopeStack::Cleanup {
  1159. const CXXDestructorDecl *Dtor;
  1160. llvm::Value *Addr;
  1161. CallLocalDtor(const CXXDestructorDecl *D, llvm::Value *Addr)
  1162. : Dtor(D), Addr(Addr) {}
  1163. void Emit(CodeGenFunction &CGF, Flags flags) {
  1164. CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete,
  1165. /*ForVirtualBase=*/false, Addr);
  1166. }
  1167. };
  1168. }
  1169. void CodeGenFunction::PushDestructorCleanup(const CXXDestructorDecl *D,
  1170. llvm::Value *Addr) {
  1171. EHStack.pushCleanup<CallLocalDtor>(NormalAndEHCleanup, D, Addr);
  1172. }
  1173. void CodeGenFunction::PushDestructorCleanup(QualType T, llvm::Value *Addr) {
  1174. CXXRecordDecl *ClassDecl = T->getAsCXXRecordDecl();
  1175. if (!ClassDecl) return;
  1176. if (ClassDecl->hasTrivialDestructor()) return;
  1177. const CXXDestructorDecl *D = ClassDecl->getDestructor();
  1178. assert(D && D->isUsed() && "destructor not marked as used!");
  1179. PushDestructorCleanup(D, Addr);
  1180. }
  1181. llvm::Value *
  1182. CodeGenFunction::GetVirtualBaseClassOffset(llvm::Value *This,
  1183. const CXXRecordDecl *ClassDecl,
  1184. const CXXRecordDecl *BaseClassDecl) {
  1185. llvm::Value *VTablePtr = GetVTablePtr(This, Int8PtrTy);
  1186. CharUnits VBaseOffsetOffset =
  1187. CGM.getVTableContext().getVirtualBaseOffsetOffset(ClassDecl, BaseClassDecl);
  1188. llvm::Value *VBaseOffsetPtr =
  1189. Builder.CreateConstGEP1_64(VTablePtr, VBaseOffsetOffset.getQuantity(),
  1190. "vbase.offset.ptr");
  1191. llvm::Type *PtrDiffTy =
  1192. ConvertType(getContext().getPointerDiffType());
  1193. VBaseOffsetPtr = Builder.CreateBitCast(VBaseOffsetPtr,
  1194. PtrDiffTy->getPointerTo());
  1195. llvm::Value *VBaseOffset = Builder.CreateLoad(VBaseOffsetPtr, "vbase.offset");
  1196. return VBaseOffset;
  1197. }
  1198. void
  1199. CodeGenFunction::InitializeVTablePointer(BaseSubobject Base,
  1200. const CXXRecordDecl *NearestVBase,
  1201. CharUnits OffsetFromNearestVBase,
  1202. llvm::Constant *VTable,
  1203. const CXXRecordDecl *VTableClass) {
  1204. const CXXRecordDecl *RD = Base.getBase();
  1205. // Compute the address point.
  1206. llvm::Value *VTableAddressPoint;
  1207. // Check if we need to use a vtable from the VTT.
  1208. if (CodeGenVTables::needsVTTParameter(CurGD) &&
  1209. (RD->getNumVBases() || NearestVBase)) {
  1210. // Get the secondary vpointer index.
  1211. uint64_t VirtualPointerIndex =
  1212. CGM.getVTables().getSecondaryVirtualPointerIndex(VTableClass, Base);
  1213. /// Load the VTT.
  1214. llvm::Value *VTT = LoadCXXVTT();
  1215. if (VirtualPointerIndex)
  1216. VTT = Builder.CreateConstInBoundsGEP1_64(VTT, VirtualPointerIndex);
  1217. // And load the address point from the VTT.
  1218. VTableAddressPoint = Builder.CreateLoad(VTT);
  1219. } else {
  1220. uint64_t AddressPoint =
  1221. CGM.getVTableContext().getVTableLayout(VTableClass).getAddressPoint(Base);
  1222. VTableAddressPoint =
  1223. Builder.CreateConstInBoundsGEP2_64(VTable, 0, AddressPoint);
  1224. }
  1225. // Compute where to store the address point.
  1226. llvm::Value *VirtualOffset = 0;
  1227. CharUnits NonVirtualOffset = CharUnits::Zero();
  1228. if (CodeGenVTables::needsVTTParameter(CurGD) && NearestVBase) {
  1229. // We need to use the virtual base offset offset because the virtual base
  1230. // might have a different offset in the most derived class.
  1231. VirtualOffset = GetVirtualBaseClassOffset(LoadCXXThis(), VTableClass,
  1232. NearestVBase);
  1233. NonVirtualOffset = OffsetFromNearestVBase;
  1234. } else {
  1235. // We can just use the base offset in the complete class.
  1236. NonVirtualOffset = Base.getBaseOffset();
  1237. }
  1238. // Apply the offsets.
  1239. llvm::Value *VTableField = LoadCXXThis();
  1240. if (!NonVirtualOffset.isZero() || VirtualOffset)
  1241. VTableField = ApplyNonVirtualAndVirtualOffset(*this, VTableField,
  1242. NonVirtualOffset,
  1243. VirtualOffset);
  1244. // Finally, store the address point.
  1245. llvm::Type *AddressPointPtrTy =
  1246. VTableAddressPoint->getType()->getPointerTo();
  1247. VTableField = Builder.CreateBitCast(VTableField, AddressPointPtrTy);
  1248. Builder.CreateStore(VTableAddressPoint, VTableField);
  1249. }
  1250. void
  1251. CodeGenFunction::InitializeVTablePointers(BaseSubobject Base,
  1252. const CXXRecordDecl *NearestVBase,
  1253. CharUnits OffsetFromNearestVBase,
  1254. bool BaseIsNonVirtualPrimaryBase,
  1255. llvm::Constant *VTable,
  1256. const CXXRecordDecl *VTableClass,
  1257. VisitedVirtualBasesSetTy& VBases) {
  1258. // If this base is a non-virtual primary base the address point has already
  1259. // been set.
  1260. if (!BaseIsNonVirtualPrimaryBase) {
  1261. // Initialize the vtable pointer for this base.
  1262. InitializeVTablePointer(Base, NearestVBase, OffsetFromNearestVBase,
  1263. VTable, VTableClass);
  1264. }
  1265. const CXXRecordDecl *RD = Base.getBase();
  1266. // Traverse bases.
  1267. for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
  1268. E = RD->bases_end(); I != E; ++I) {
  1269. CXXRecordDecl *BaseDecl
  1270. = cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
  1271. // Ignore classes without a vtable.
  1272. if (!BaseDecl->isDynamicClass())
  1273. continue;
  1274. CharUnits BaseOffset;
  1275. CharUnits BaseOffsetFromNearestVBase;
  1276. bool BaseDeclIsNonVirtualPrimaryBase;
  1277. if (I->isVirtual()) {
  1278. // Check if we've visited this virtual base before.
  1279. if (!VBases.insert(BaseDecl))
  1280. continue;
  1281. const ASTRecordLayout &Layout =
  1282. getContext().getASTRecordLayout(VTableClass);
  1283. BaseOffset = Layout.getVBaseClassOffset(BaseDecl);
  1284. BaseOffsetFromNearestVBase = CharUnits::Zero();
  1285. BaseDeclIsNonVirtualPrimaryBase = false;
  1286. } else {
  1287. const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD);
  1288. BaseOffset = Base.getBaseOffset() + Layout.getBaseClassOffset(BaseDecl);
  1289. BaseOffsetFromNearestVBase =
  1290. OffsetFromNearestVBase + Layout.getBaseClassOffset(BaseDecl);
  1291. BaseDeclIsNonVirtualPrimaryBase = Layout.getPrimaryBase() == BaseDecl;
  1292. }
  1293. InitializeVTablePointers(BaseSubobject(BaseDecl, BaseOffset),
  1294. I->isVirtual() ? BaseDecl : NearestVBase,
  1295. BaseOffsetFromNearestVBase,
  1296. BaseDeclIsNonVirtualPrimaryBase,
  1297. VTable, VTableClass, VBases);
  1298. }
  1299. }
  1300. void CodeGenFunction::InitializeVTablePointers(const CXXRecordDecl *RD) {
  1301. // Ignore classes without a vtable.
  1302. if (!RD->isDynamicClass())
  1303. return;
  1304. // Get the VTable.
  1305. llvm::Constant *VTable = CGM.getVTables().GetAddrOfVTable(RD);
  1306. // Initialize the vtable pointers for this class and all of its bases.
  1307. VisitedVirtualBasesSetTy VBases;
  1308. InitializeVTablePointers(BaseSubobject(RD, CharUnits::Zero()),
  1309. /*NearestVBase=*/0,
  1310. /*OffsetFromNearestVBase=*/CharUnits::Zero(),
  1311. /*BaseIsNonVirtualPrimaryBase=*/false,
  1312. VTable, RD, VBases);
  1313. }
  1314. llvm::Value *CodeGenFunction::GetVTablePtr(llvm::Value *This,
  1315. llvm::Type *Ty) {
  1316. llvm::Value *VTablePtrSrc = Builder.CreateBitCast(This, Ty->getPointerTo());
  1317. return Builder.CreateLoad(VTablePtrSrc, "vtable");
  1318. }
  1319. static const CXXRecordDecl *getMostDerivedClassDecl(const Expr *Base) {
  1320. const Expr *E = Base;
  1321. while (true) {
  1322. E = E->IgnoreParens();
  1323. if (const CastExpr *CE = dyn_cast<CastExpr>(E)) {
  1324. if (CE->getCastKind() == CK_DerivedToBase ||
  1325. CE->getCastKind() == CK_UncheckedDerivedToBase ||
  1326. CE->getCastKind() == CK_NoOp) {
  1327. E = CE->getSubExpr();
  1328. continue;
  1329. }
  1330. }
  1331. break;
  1332. }
  1333. QualType DerivedType = E->getType();
  1334. if (const PointerType *PTy = DerivedType->getAs<PointerType>())
  1335. DerivedType = PTy->getPointeeType();
  1336. return cast<CXXRecordDecl>(DerivedType->castAs<RecordType>()->getDecl());
  1337. }
  1338. // FIXME: Ideally Expr::IgnoreParenNoopCasts should do this, but it doesn't do
  1339. // quite what we want.
  1340. static const Expr *skipNoOpCastsAndParens(const Expr *E) {
  1341. while (true) {
  1342. if (const ParenExpr *PE = dyn_cast<ParenExpr>(E)) {
  1343. E = PE->getSubExpr();
  1344. continue;
  1345. }
  1346. if (const CastExpr *CE = dyn_cast<CastExpr>(E)) {
  1347. if (CE->getCastKind() == CK_NoOp) {
  1348. E = CE->getSubExpr();
  1349. continue;
  1350. }
  1351. }
  1352. if (const UnaryOperator *UO = dyn_cast<UnaryOperator>(E)) {
  1353. if (UO->getOpcode() == UO_Extension) {
  1354. E = UO->getSubExpr();
  1355. continue;
  1356. }
  1357. }
  1358. return E;
  1359. }
  1360. }
  1361. /// canDevirtualizeMemberFunctionCall - Checks whether the given virtual member
  1362. /// function call on the given expr can be devirtualized.
  1363. static bool canDevirtualizeMemberFunctionCall(const Expr *Base,
  1364. const CXXMethodDecl *MD) {
  1365. // If the most derived class is marked final, we know that no subclass can
  1366. // override this member function and so we can devirtualize it. For example:
  1367. //
  1368. // struct A { virtual void f(); }
  1369. // struct B final : A { };
  1370. //
  1371. // void f(B *b) {
  1372. // b->f();
  1373. // }
  1374. //
  1375. const CXXRecordDecl *MostDerivedClassDecl = getMostDerivedClassDecl(Base);
  1376. if (MostDerivedClassDecl->hasAttr<FinalAttr>())
  1377. return true;
  1378. // If the member function is marked 'final', we know that it can't be
  1379. // overridden and can therefore devirtualize it.
  1380. if (MD->hasAttr<FinalAttr>())
  1381. return true;
  1382. // Similarly, if the class itself is marked 'final' it can't be overridden
  1383. // and we can therefore devirtualize the member function call.
  1384. if (MD->getParent()->hasAttr<FinalAttr>())
  1385. return true;
  1386. Base = skipNoOpCastsAndParens(Base);
  1387. if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Base)) {
  1388. if (const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl())) {
  1389. // This is a record decl. We know the type and can devirtualize it.
  1390. return VD->getType()->isRecordType();
  1391. }
  1392. return false;
  1393. }
  1394. // We can always devirtualize calls on temporary object expressions.
  1395. if (isa<CXXConstructExpr>(Base))
  1396. return true;
  1397. // And calls on bound temporaries.
  1398. if (isa<CXXBindTemporaryExpr>(Base))
  1399. return true;
  1400. // Check if this is a call expr that returns a record type.
  1401. if (const CallExpr *CE = dyn_cast<CallExpr>(Base))
  1402. return CE->getCallReturnType()->isRecordType();
  1403. // We can't devirtualize the call.
  1404. return false;
  1405. }
  1406. static bool UseVirtualCall(ASTContext &Context,
  1407. const CXXOperatorCallExpr *CE,
  1408. const CXXMethodDecl *MD) {
  1409. if (!MD->isVirtual())
  1410. return false;
  1411. // When building with -fapple-kext, all calls must go through the vtable since
  1412. // the kernel linker can do runtime patching of vtables.
  1413. if (Context.getLangOpts().AppleKext)
  1414. return true;
  1415. return !canDevirtualizeMemberFunctionCall(CE->getArg(0), MD);
  1416. }
  1417. llvm::Value *
  1418. CodeGenFunction::EmitCXXOperatorMemberCallee(const CXXOperatorCallExpr *E,
  1419. const CXXMethodDecl *MD,
  1420. llvm::Value *This) {
  1421. llvm::FunctionType *fnType =
  1422. CGM.getTypes().GetFunctionType(
  1423. CGM.getTypes().arrangeCXXMethodDeclaration(MD));
  1424. if (UseVirtualCall(getContext(), E, MD))
  1425. return BuildVirtualCall(MD, This, fnType);
  1426. return CGM.GetAddrOfFunction(MD, fnType);
  1427. }
  1428. void CodeGenFunction::EmitForwardingCallToLambda(const CXXRecordDecl *Lambda,
  1429. CallArgList &CallArgs) {
  1430. // Lookup the call operator
  1431. DeclarationName Name
  1432. = getContext().DeclarationNames.getCXXOperatorName(OO_Call);
  1433. DeclContext::lookup_const_result Calls = Lambda->lookup(Name);
  1434. CXXMethodDecl *CallOperator = cast<CXXMethodDecl>(*Calls.first++);
  1435. const FunctionProtoType *FPT =
  1436. CallOperator->getType()->getAs<FunctionProtoType>();
  1437. QualType ResultType = FPT->getResultType();
  1438. // Get the address of the call operator.
  1439. GlobalDecl GD(CallOperator);
  1440. const CGFunctionInfo &CalleeFnInfo =
  1441. CGM.getTypes().arrangeFunctionCall(ResultType, CallArgs, FPT->getExtInfo(),
  1442. RequiredArgs::forPrototypePlus(FPT, 1));
  1443. llvm::Type *Ty = CGM.getTypes().GetFunctionType(CalleeFnInfo);
  1444. llvm::Value *Callee = CGM.GetAddrOfFunction(GD, Ty);
  1445. // Determine whether we have a return value slot to use.
  1446. ReturnValueSlot Slot;
  1447. if (!ResultType->isVoidType() &&
  1448. CurFnInfo->getReturnInfo().getKind() == ABIArgInfo::Indirect &&
  1449. hasAggregateLLVMType(CurFnInfo->getReturnType()))
  1450. Slot = ReturnValueSlot(ReturnValue, ResultType.isVolatileQualified());
  1451. // Now emit our call.
  1452. RValue RV = EmitCall(CalleeFnInfo, Callee, Slot, CallArgs, CallOperator);
  1453. // Forward the returned value
  1454. if (!ResultType->isVoidType() && Slot.isNull())
  1455. EmitReturnOfRValue(RV, ResultType);
  1456. }
  1457. void CodeGenFunction::EmitLambdaBlockInvokeBody() {
  1458. const BlockDecl *BD = BlockInfo->getBlockDecl();
  1459. const VarDecl *variable = BD->capture_begin()->getVariable();
  1460. const CXXRecordDecl *Lambda = variable->getType()->getAsCXXRecordDecl();
  1461. // Start building arguments for forwarding call
  1462. CallArgList CallArgs;
  1463. QualType ThisType = getContext().getPointerType(getContext().getRecordType(Lambda));
  1464. llvm::Value *ThisPtr = GetAddrOfBlockDecl(variable, false);
  1465. CallArgs.add(RValue::get(ThisPtr), ThisType);
  1466. // Add the rest of the parameters.
  1467. for (BlockDecl::param_const_iterator I = BD->param_begin(),
  1468. E = BD->param_end(); I != E; ++I) {
  1469. ParmVarDecl *param = *I;
  1470. EmitDelegateCallArg(CallArgs, param);
  1471. }
  1472. EmitForwardingCallToLambda(Lambda, CallArgs);
  1473. }
  1474. void CodeGenFunction::EmitLambdaToBlockPointerBody(FunctionArgList &Args) {
  1475. if (cast<CXXMethodDecl>(CurFuncDecl)->isVariadic()) {
  1476. // FIXME: Making this work correctly is nasty because it requires either
  1477. // cloning the body of the call operator or making the call operator forward.
  1478. CGM.ErrorUnsupported(CurFuncDecl, "lambda conversion to variadic function");
  1479. return;
  1480. }
  1481. EmitFunctionBody(Args);
  1482. }
  1483. void CodeGenFunction::EmitLambdaDelegatingInvokeBody(const CXXMethodDecl *MD) {
  1484. const CXXRecordDecl *Lambda = MD->getParent();
  1485. // Start building arguments for forwarding call
  1486. CallArgList CallArgs;
  1487. QualType ThisType = getContext().getPointerType(getContext().getRecordType(Lambda));
  1488. llvm::Value *ThisPtr = llvm::UndefValue::get(getTypes().ConvertType(ThisType));
  1489. CallArgs.add(RValue::get(ThisPtr), ThisType);
  1490. // Add the rest of the parameters.
  1491. for (FunctionDecl::param_const_iterator I = MD->param_begin(),
  1492. E = MD->param_end(); I != E; ++I) {
  1493. ParmVarDecl *param = *I;
  1494. EmitDelegateCallArg(CallArgs, param);
  1495. }
  1496. EmitForwardingCallToLambda(Lambda, CallArgs);
  1497. }
  1498. void CodeGenFunction::EmitLambdaStaticInvokeFunction(const CXXMethodDecl *MD) {
  1499. if (MD->isVariadic()) {
  1500. // FIXME: Making this work correctly is nasty because it requires either
  1501. // cloning the body of the call operator or making the call operator forward.
  1502. CGM.ErrorUnsupported(MD, "lambda conversion to variadic function");
  1503. return;
  1504. }
  1505. EmitLambdaDelegatingInvokeBody(MD);
  1506. }