CGClass.cpp 52 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400
  1. //===--- CGClass.cpp - Emit LLVM Code for C++ classes ---------------------===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This contains code dealing with C++ code generation of classes
  11. //
  12. //===----------------------------------------------------------------------===//
  13. #include "CGDebugInfo.h"
  14. #include "CodeGenFunction.h"
  15. #include "clang/AST/CXXInheritance.h"
  16. #include "clang/AST/EvaluatedExprVisitor.h"
  17. #include "clang/AST/RecordLayout.h"
  18. #include "clang/AST/StmtCXX.h"
  19. using namespace clang;
  20. using namespace CodeGen;
  21. static uint64_t
  22. ComputeNonVirtualBaseClassOffset(ASTContext &Context,
  23. const CXXRecordDecl *DerivedClass,
  24. CastExpr::path_const_iterator Start,
  25. CastExpr::path_const_iterator End) {
  26. uint64_t Offset = 0;
  27. const CXXRecordDecl *RD = DerivedClass;
  28. for (CastExpr::path_const_iterator I = Start; I != End; ++I) {
  29. const CXXBaseSpecifier *Base = *I;
  30. assert(!Base->isVirtual() && "Should not see virtual bases here!");
  31. // Get the layout.
  32. const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
  33. const CXXRecordDecl *BaseDecl =
  34. cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl());
  35. // Add the offset.
  36. Offset += Layout.getBaseClassOffsetInBits(BaseDecl);
  37. RD = BaseDecl;
  38. }
  39. // FIXME: We should not use / 8 here.
  40. return Offset / 8;
  41. }
  42. llvm::Constant *
  43. CodeGenModule::GetNonVirtualBaseClassOffset(const CXXRecordDecl *ClassDecl,
  44. CastExpr::path_const_iterator PathBegin,
  45. CastExpr::path_const_iterator PathEnd) {
  46. assert(PathBegin != PathEnd && "Base path should not be empty!");
  47. uint64_t Offset =
  48. ComputeNonVirtualBaseClassOffset(getContext(), ClassDecl,
  49. PathBegin, PathEnd);
  50. if (!Offset)
  51. return 0;
  52. const llvm::Type *PtrDiffTy =
  53. Types.ConvertType(getContext().getPointerDiffType());
  54. return llvm::ConstantInt::get(PtrDiffTy, Offset);
  55. }
  56. /// Gets the address of a direct base class within a complete object.
  57. /// This should only be used for (1) non-virtual bases or (2) virtual bases
  58. /// when the type is known to be complete (e.g. in complete destructors).
  59. ///
  60. /// The object pointed to by 'This' is assumed to be non-null.
  61. llvm::Value *
  62. CodeGenFunction::GetAddressOfDirectBaseInCompleteClass(llvm::Value *This,
  63. const CXXRecordDecl *Derived,
  64. const CXXRecordDecl *Base,
  65. bool BaseIsVirtual) {
  66. // 'this' must be a pointer (in some address space) to Derived.
  67. assert(This->getType()->isPointerTy() &&
  68. cast<llvm::PointerType>(This->getType())->getElementType()
  69. == ConvertType(Derived));
  70. // Compute the offset of the virtual base.
  71. uint64_t Offset;
  72. const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Derived);
  73. if (BaseIsVirtual)
  74. Offset = Layout.getVBaseClassOffsetInBits(Base);
  75. else
  76. Offset = Layout.getBaseClassOffsetInBits(Base);
  77. // Shift and cast down to the base type.
  78. // TODO: for complete types, this should be possible with a GEP.
  79. llvm::Value *V = This;
  80. if (Offset) {
  81. const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(getLLVMContext());
  82. V = Builder.CreateBitCast(V, Int8PtrTy);
  83. V = Builder.CreateConstInBoundsGEP1_64(V, Offset / 8);
  84. }
  85. V = Builder.CreateBitCast(V, ConvertType(Base)->getPointerTo());
  86. return V;
  87. }
  88. static llvm::Value *
  89. ApplyNonVirtualAndVirtualOffset(CodeGenFunction &CGF, llvm::Value *ThisPtr,
  90. uint64_t NonVirtual, llvm::Value *Virtual) {
  91. const llvm::Type *PtrDiffTy =
  92. CGF.ConvertType(CGF.getContext().getPointerDiffType());
  93. llvm::Value *NonVirtualOffset = 0;
  94. if (NonVirtual)
  95. NonVirtualOffset = llvm::ConstantInt::get(PtrDiffTy, NonVirtual);
  96. llvm::Value *BaseOffset;
  97. if (Virtual) {
  98. if (NonVirtualOffset)
  99. BaseOffset = CGF.Builder.CreateAdd(Virtual, NonVirtualOffset);
  100. else
  101. BaseOffset = Virtual;
  102. } else
  103. BaseOffset = NonVirtualOffset;
  104. // Apply the base offset.
  105. const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(CGF.getLLVMContext());
  106. ThisPtr = CGF.Builder.CreateBitCast(ThisPtr, Int8PtrTy);
  107. ThisPtr = CGF.Builder.CreateGEP(ThisPtr, BaseOffset, "add.ptr");
  108. return ThisPtr;
  109. }
  110. llvm::Value *
  111. CodeGenFunction::GetAddressOfBaseClass(llvm::Value *Value,
  112. const CXXRecordDecl *Derived,
  113. CastExpr::path_const_iterator PathBegin,
  114. CastExpr::path_const_iterator PathEnd,
  115. bool NullCheckValue) {
  116. assert(PathBegin != PathEnd && "Base path should not be empty!");
  117. CastExpr::path_const_iterator Start = PathBegin;
  118. const CXXRecordDecl *VBase = 0;
  119. // Get the virtual base.
  120. if ((*Start)->isVirtual()) {
  121. VBase =
  122. cast<CXXRecordDecl>((*Start)->getType()->getAs<RecordType>()->getDecl());
  123. ++Start;
  124. }
  125. uint64_t NonVirtualOffset =
  126. ComputeNonVirtualBaseClassOffset(getContext(), VBase ? VBase : Derived,
  127. Start, PathEnd);
  128. // Get the base pointer type.
  129. const llvm::Type *BasePtrTy =
  130. ConvertType((PathEnd[-1])->getType())->getPointerTo();
  131. if (!NonVirtualOffset && !VBase) {
  132. // Just cast back.
  133. return Builder.CreateBitCast(Value, BasePtrTy);
  134. }
  135. llvm::BasicBlock *CastNull = 0;
  136. llvm::BasicBlock *CastNotNull = 0;
  137. llvm::BasicBlock *CastEnd = 0;
  138. if (NullCheckValue) {
  139. CastNull = createBasicBlock("cast.null");
  140. CastNotNull = createBasicBlock("cast.notnull");
  141. CastEnd = createBasicBlock("cast.end");
  142. llvm::Value *IsNull =
  143. Builder.CreateICmpEQ(Value,
  144. llvm::Constant::getNullValue(Value->getType()));
  145. Builder.CreateCondBr(IsNull, CastNull, CastNotNull);
  146. EmitBlock(CastNotNull);
  147. }
  148. llvm::Value *VirtualOffset = 0;
  149. if (VBase)
  150. VirtualOffset = GetVirtualBaseClassOffset(Value, Derived, VBase);
  151. // Apply the offsets.
  152. Value = ApplyNonVirtualAndVirtualOffset(*this, Value, NonVirtualOffset,
  153. VirtualOffset);
  154. // Cast back.
  155. Value = Builder.CreateBitCast(Value, BasePtrTy);
  156. if (NullCheckValue) {
  157. Builder.CreateBr(CastEnd);
  158. EmitBlock(CastNull);
  159. Builder.CreateBr(CastEnd);
  160. EmitBlock(CastEnd);
  161. llvm::PHINode *PHI = Builder.CreatePHI(Value->getType());
  162. PHI->reserveOperandSpace(2);
  163. PHI->addIncoming(Value, CastNotNull);
  164. PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()),
  165. CastNull);
  166. Value = PHI;
  167. }
  168. return Value;
  169. }
  170. llvm::Value *
  171. CodeGenFunction::GetAddressOfDerivedClass(llvm::Value *Value,
  172. const CXXRecordDecl *Derived,
  173. CastExpr::path_const_iterator PathBegin,
  174. CastExpr::path_const_iterator PathEnd,
  175. bool NullCheckValue) {
  176. assert(PathBegin != PathEnd && "Base path should not be empty!");
  177. QualType DerivedTy =
  178. getContext().getCanonicalType(getContext().getTagDeclType(Derived));
  179. const llvm::Type *DerivedPtrTy = ConvertType(DerivedTy)->getPointerTo();
  180. llvm::Value *NonVirtualOffset =
  181. CGM.GetNonVirtualBaseClassOffset(Derived, PathBegin, PathEnd);
  182. if (!NonVirtualOffset) {
  183. // No offset, we can just cast back.
  184. return Builder.CreateBitCast(Value, DerivedPtrTy);
  185. }
  186. llvm::BasicBlock *CastNull = 0;
  187. llvm::BasicBlock *CastNotNull = 0;
  188. llvm::BasicBlock *CastEnd = 0;
  189. if (NullCheckValue) {
  190. CastNull = createBasicBlock("cast.null");
  191. CastNotNull = createBasicBlock("cast.notnull");
  192. CastEnd = createBasicBlock("cast.end");
  193. llvm::Value *IsNull =
  194. Builder.CreateICmpEQ(Value,
  195. llvm::Constant::getNullValue(Value->getType()));
  196. Builder.CreateCondBr(IsNull, CastNull, CastNotNull);
  197. EmitBlock(CastNotNull);
  198. }
  199. // Apply the offset.
  200. Value = Builder.CreatePtrToInt(Value, NonVirtualOffset->getType());
  201. Value = Builder.CreateSub(Value, NonVirtualOffset);
  202. Value = Builder.CreateIntToPtr(Value, DerivedPtrTy);
  203. // Just cast.
  204. Value = Builder.CreateBitCast(Value, DerivedPtrTy);
  205. if (NullCheckValue) {
  206. Builder.CreateBr(CastEnd);
  207. EmitBlock(CastNull);
  208. Builder.CreateBr(CastEnd);
  209. EmitBlock(CastEnd);
  210. llvm::PHINode *PHI = Builder.CreatePHI(Value->getType());
  211. PHI->reserveOperandSpace(2);
  212. PHI->addIncoming(Value, CastNotNull);
  213. PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()),
  214. CastNull);
  215. Value = PHI;
  216. }
  217. return Value;
  218. }
  219. /// GetVTTParameter - Return the VTT parameter that should be passed to a
  220. /// base constructor/destructor with virtual bases.
  221. static llvm::Value *GetVTTParameter(CodeGenFunction &CGF, GlobalDecl GD,
  222. bool ForVirtualBase) {
  223. if (!CodeGenVTables::needsVTTParameter(GD)) {
  224. // This constructor/destructor does not need a VTT parameter.
  225. return 0;
  226. }
  227. const CXXRecordDecl *RD = cast<CXXMethodDecl>(CGF.CurFuncDecl)->getParent();
  228. const CXXRecordDecl *Base = cast<CXXMethodDecl>(GD.getDecl())->getParent();
  229. llvm::Value *VTT;
  230. uint64_t SubVTTIndex;
  231. // If the record matches the base, this is the complete ctor/dtor
  232. // variant calling the base variant in a class with virtual bases.
  233. if (RD == Base) {
  234. assert(!CodeGenVTables::needsVTTParameter(CGF.CurGD) &&
  235. "doing no-op VTT offset in base dtor/ctor?");
  236. assert(!ForVirtualBase && "Can't have same class as virtual base!");
  237. SubVTTIndex = 0;
  238. } else {
  239. const ASTRecordLayout &Layout =
  240. CGF.getContext().getASTRecordLayout(RD);
  241. uint64_t BaseOffset = ForVirtualBase ?
  242. Layout.getVBaseClassOffsetInBits(Base) :
  243. Layout.getBaseClassOffsetInBits(Base);
  244. SubVTTIndex =
  245. CGF.CGM.getVTables().getSubVTTIndex(RD, BaseSubobject(Base, BaseOffset));
  246. assert(SubVTTIndex != 0 && "Sub-VTT index must be greater than zero!");
  247. }
  248. if (CodeGenVTables::needsVTTParameter(CGF.CurGD)) {
  249. // A VTT parameter was passed to the constructor, use it.
  250. VTT = CGF.LoadCXXVTT();
  251. VTT = CGF.Builder.CreateConstInBoundsGEP1_64(VTT, SubVTTIndex);
  252. } else {
  253. // We're the complete constructor, so get the VTT by name.
  254. VTT = CGF.CGM.getVTables().getVTT(RD);
  255. VTT = CGF.Builder.CreateConstInBoundsGEP2_64(VTT, 0, SubVTTIndex);
  256. }
  257. return VTT;
  258. }
  259. namespace {
  260. /// Call the destructor for a direct base class.
  261. struct CallBaseDtor : EHScopeStack::Cleanup {
  262. const CXXRecordDecl *BaseClass;
  263. bool BaseIsVirtual;
  264. CallBaseDtor(const CXXRecordDecl *Base, bool BaseIsVirtual)
  265. : BaseClass(Base), BaseIsVirtual(BaseIsVirtual) {}
  266. void Emit(CodeGenFunction &CGF, bool IsForEH) {
  267. const CXXRecordDecl *DerivedClass =
  268. cast<CXXMethodDecl>(CGF.CurCodeDecl)->getParent();
  269. const CXXDestructorDecl *D = BaseClass->getDestructor();
  270. llvm::Value *Addr =
  271. CGF.GetAddressOfDirectBaseInCompleteClass(CGF.LoadCXXThis(),
  272. DerivedClass, BaseClass,
  273. BaseIsVirtual);
  274. CGF.EmitCXXDestructorCall(D, Dtor_Base, BaseIsVirtual, Addr);
  275. }
  276. };
  277. /// A visitor which checks whether an initializer uses 'this' in a
  278. /// way which requires the vtable to be properly set.
  279. struct DynamicThisUseChecker : EvaluatedExprVisitor<DynamicThisUseChecker> {
  280. typedef EvaluatedExprVisitor<DynamicThisUseChecker> super;
  281. bool UsesThis;
  282. DynamicThisUseChecker(ASTContext &C) : super(C), UsesThis(false) {}
  283. // Black-list all explicit and implicit references to 'this'.
  284. //
  285. // Do we need to worry about external references to 'this' derived
  286. // from arbitrary code? If so, then anything which runs arbitrary
  287. // external code might potentially access the vtable.
  288. void VisitCXXThisExpr(CXXThisExpr *E) { UsesThis = true; }
  289. };
  290. }
  291. static bool BaseInitializerUsesThis(ASTContext &C, const Expr *Init) {
  292. DynamicThisUseChecker Checker(C);
  293. Checker.Visit(const_cast<Expr*>(Init));
  294. return Checker.UsesThis;
  295. }
  296. static void EmitBaseInitializer(CodeGenFunction &CGF,
  297. const CXXRecordDecl *ClassDecl,
  298. CXXBaseOrMemberInitializer *BaseInit,
  299. CXXCtorType CtorType) {
  300. assert(BaseInit->isBaseInitializer() &&
  301. "Must have base initializer!");
  302. llvm::Value *ThisPtr = CGF.LoadCXXThis();
  303. const Type *BaseType = BaseInit->getBaseClass();
  304. CXXRecordDecl *BaseClassDecl =
  305. cast<CXXRecordDecl>(BaseType->getAs<RecordType>()->getDecl());
  306. bool isBaseVirtual = BaseInit->isBaseVirtual();
  307. // The base constructor doesn't construct virtual bases.
  308. if (CtorType == Ctor_Base && isBaseVirtual)
  309. return;
  310. // If the initializer for the base (other than the constructor
  311. // itself) accesses 'this' in any way, we need to initialize the
  312. // vtables.
  313. if (BaseInitializerUsesThis(CGF.getContext(), BaseInit->getInit()))
  314. CGF.InitializeVTablePointers(ClassDecl);
  315. // We can pretend to be a complete class because it only matters for
  316. // virtual bases, and we only do virtual bases for complete ctors.
  317. llvm::Value *V =
  318. CGF.GetAddressOfDirectBaseInCompleteClass(ThisPtr, ClassDecl,
  319. BaseClassDecl,
  320. isBaseVirtual);
  321. AggValueSlot AggSlot = AggValueSlot::forAddr(V, false, /*Lifetime*/ true);
  322. CGF.EmitAggExpr(BaseInit->getInit(), AggSlot);
  323. if (CGF.Exceptions && !BaseClassDecl->hasTrivialDestructor())
  324. CGF.EHStack.pushCleanup<CallBaseDtor>(EHCleanup, BaseClassDecl,
  325. isBaseVirtual);
  326. }
  327. static void EmitAggMemberInitializer(CodeGenFunction &CGF,
  328. LValue LHS,
  329. llvm::Value *ArrayIndexVar,
  330. CXXBaseOrMemberInitializer *MemberInit,
  331. QualType T,
  332. unsigned Index) {
  333. if (Index == MemberInit->getNumArrayIndices()) {
  334. CodeGenFunction::RunCleanupsScope Cleanups(CGF);
  335. llvm::Value *Dest = LHS.getAddress();
  336. if (ArrayIndexVar) {
  337. // If we have an array index variable, load it and use it as an offset.
  338. // Then, increment the value.
  339. llvm::Value *ArrayIndex = CGF.Builder.CreateLoad(ArrayIndexVar);
  340. Dest = CGF.Builder.CreateInBoundsGEP(Dest, ArrayIndex, "destaddress");
  341. llvm::Value *Next = llvm::ConstantInt::get(ArrayIndex->getType(), 1);
  342. Next = CGF.Builder.CreateAdd(ArrayIndex, Next, "inc");
  343. CGF.Builder.CreateStore(Next, ArrayIndexVar);
  344. }
  345. AggValueSlot Slot = AggValueSlot::forAddr(Dest, LHS.isVolatileQualified(),
  346. /*Lifetime*/ true);
  347. CGF.EmitAggExpr(MemberInit->getInit(), Slot);
  348. return;
  349. }
  350. const ConstantArrayType *Array = CGF.getContext().getAsConstantArrayType(T);
  351. assert(Array && "Array initialization without the array type?");
  352. llvm::Value *IndexVar
  353. = CGF.GetAddrOfLocalVar(MemberInit->getArrayIndex(Index));
  354. assert(IndexVar && "Array index variable not loaded");
  355. // Initialize this index variable to zero.
  356. llvm::Value* Zero
  357. = llvm::Constant::getNullValue(
  358. CGF.ConvertType(CGF.getContext().getSizeType()));
  359. CGF.Builder.CreateStore(Zero, IndexVar);
  360. // Start the loop with a block that tests the condition.
  361. llvm::BasicBlock *CondBlock = CGF.createBasicBlock("for.cond");
  362. llvm::BasicBlock *AfterFor = CGF.createBasicBlock("for.end");
  363. CGF.EmitBlock(CondBlock);
  364. llvm::BasicBlock *ForBody = CGF.createBasicBlock("for.body");
  365. // Generate: if (loop-index < number-of-elements) fall to the loop body,
  366. // otherwise, go to the block after the for-loop.
  367. uint64_t NumElements = Array->getSize().getZExtValue();
  368. llvm::Value *Counter = CGF.Builder.CreateLoad(IndexVar);
  369. llvm::Value *NumElementsPtr =
  370. llvm::ConstantInt::get(Counter->getType(), NumElements);
  371. llvm::Value *IsLess = CGF.Builder.CreateICmpULT(Counter, NumElementsPtr,
  372. "isless");
  373. // If the condition is true, execute the body.
  374. CGF.Builder.CreateCondBr(IsLess, ForBody, AfterFor);
  375. CGF.EmitBlock(ForBody);
  376. llvm::BasicBlock *ContinueBlock = CGF.createBasicBlock("for.inc");
  377. {
  378. CodeGenFunction::RunCleanupsScope Cleanups(CGF);
  379. // Inside the loop body recurse to emit the inner loop or, eventually, the
  380. // constructor call.
  381. EmitAggMemberInitializer(CGF, LHS, ArrayIndexVar, MemberInit,
  382. Array->getElementType(), Index + 1);
  383. }
  384. CGF.EmitBlock(ContinueBlock);
  385. // Emit the increment of the loop counter.
  386. llvm::Value *NextVal = llvm::ConstantInt::get(Counter->getType(), 1);
  387. Counter = CGF.Builder.CreateLoad(IndexVar);
  388. NextVal = CGF.Builder.CreateAdd(Counter, NextVal, "inc");
  389. CGF.Builder.CreateStore(NextVal, IndexVar);
  390. // Finally, branch back up to the condition for the next iteration.
  391. CGF.EmitBranch(CondBlock);
  392. // Emit the fall-through block.
  393. CGF.EmitBlock(AfterFor, true);
  394. }
  395. namespace {
  396. struct CallMemberDtor : EHScopeStack::Cleanup {
  397. FieldDecl *Field;
  398. CXXDestructorDecl *Dtor;
  399. CallMemberDtor(FieldDecl *Field, CXXDestructorDecl *Dtor)
  400. : Field(Field), Dtor(Dtor) {}
  401. void Emit(CodeGenFunction &CGF, bool IsForEH) {
  402. // FIXME: Is this OK for C++0x delegating constructors?
  403. llvm::Value *ThisPtr = CGF.LoadCXXThis();
  404. LValue LHS = CGF.EmitLValueForField(ThisPtr, Field, 0);
  405. CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false,
  406. LHS.getAddress());
  407. }
  408. };
  409. }
  410. static void EmitMemberInitializer(CodeGenFunction &CGF,
  411. const CXXRecordDecl *ClassDecl,
  412. CXXBaseOrMemberInitializer *MemberInit,
  413. const CXXConstructorDecl *Constructor,
  414. FunctionArgList &Args) {
  415. assert(MemberInit->isMemberInitializer() &&
  416. "Must have member initializer!");
  417. // non-static data member initializers.
  418. FieldDecl *Field = MemberInit->getMember();
  419. QualType FieldType = CGF.getContext().getCanonicalType(Field->getType());
  420. llvm::Value *ThisPtr = CGF.LoadCXXThis();
  421. LValue LHS;
  422. // If we are initializing an anonymous union field, drill down to the field.
  423. if (MemberInit->getAnonUnionMember()) {
  424. Field = MemberInit->getAnonUnionMember();
  425. LHS = CGF.EmitLValueForAnonRecordField(ThisPtr, Field, 0);
  426. FieldType = Field->getType();
  427. } else {
  428. LHS = CGF.EmitLValueForFieldInitialization(ThisPtr, Field, 0);
  429. }
  430. // FIXME: If there's no initializer and the CXXBaseOrMemberInitializer
  431. // was implicitly generated, we shouldn't be zeroing memory.
  432. RValue RHS;
  433. if (FieldType->isReferenceType()) {
  434. RHS = CGF.EmitReferenceBindingToExpr(MemberInit->getInit(), Field);
  435. CGF.EmitStoreThroughLValue(RHS, LHS, FieldType);
  436. } else if (FieldType->isArrayType() && !MemberInit->getInit()) {
  437. CGF.EmitNullInitialization(LHS.getAddress(), Field->getType());
  438. } else if (!CGF.hasAggregateLLVMType(Field->getType())) {
  439. RHS = RValue::get(CGF.EmitScalarExpr(MemberInit->getInit()));
  440. CGF.EmitStoreThroughLValue(RHS, LHS, FieldType);
  441. } else if (MemberInit->getInit()->getType()->isAnyComplexType()) {
  442. CGF.EmitComplexExprIntoAddr(MemberInit->getInit(), LHS.getAddress(),
  443. LHS.isVolatileQualified());
  444. } else {
  445. llvm::Value *ArrayIndexVar = 0;
  446. const ConstantArrayType *Array
  447. = CGF.getContext().getAsConstantArrayType(FieldType);
  448. if (Array && Constructor->isImplicit() &&
  449. Constructor->isCopyConstructor()) {
  450. const llvm::Type *SizeTy
  451. = CGF.ConvertType(CGF.getContext().getSizeType());
  452. // The LHS is a pointer to the first object we'll be constructing, as
  453. // a flat array.
  454. QualType BaseElementTy = CGF.getContext().getBaseElementType(Array);
  455. const llvm::Type *BasePtr = CGF.ConvertType(BaseElementTy);
  456. BasePtr = llvm::PointerType::getUnqual(BasePtr);
  457. llvm::Value *BaseAddrPtr = CGF.Builder.CreateBitCast(LHS.getAddress(),
  458. BasePtr);
  459. LHS = CGF.MakeAddrLValue(BaseAddrPtr, BaseElementTy);
  460. // Create an array index that will be used to walk over all of the
  461. // objects we're constructing.
  462. ArrayIndexVar = CGF.CreateTempAlloca(SizeTy, "object.index");
  463. llvm::Value *Zero = llvm::Constant::getNullValue(SizeTy);
  464. CGF.Builder.CreateStore(Zero, ArrayIndexVar);
  465. // If we are copying an array of scalars or classes with trivial copy
  466. // constructors, perform a single aggregate copy.
  467. const RecordType *Record = BaseElementTy->getAs<RecordType>();
  468. if (!Record ||
  469. cast<CXXRecordDecl>(Record->getDecl())->hasTrivialCopyConstructor()) {
  470. // Find the source pointer. We knows it's the last argument because
  471. // we know we're in a copy constructor.
  472. unsigned SrcArgIndex = Args.size() - 1;
  473. llvm::Value *SrcPtr
  474. = CGF.Builder.CreateLoad(
  475. CGF.GetAddrOfLocalVar(Args[SrcArgIndex].first));
  476. LValue Src = CGF.EmitLValueForFieldInitialization(SrcPtr, Field, 0);
  477. // Copy the aggregate.
  478. CGF.EmitAggregateCopy(LHS.getAddress(), Src.getAddress(), FieldType,
  479. LHS.isVolatileQualified());
  480. return;
  481. }
  482. // Emit the block variables for the array indices, if any.
  483. for (unsigned I = 0, N = MemberInit->getNumArrayIndices(); I != N; ++I)
  484. CGF.EmitAutoVarDecl(*MemberInit->getArrayIndex(I));
  485. }
  486. EmitAggMemberInitializer(CGF, LHS, ArrayIndexVar, MemberInit, FieldType, 0);
  487. if (!CGF.Exceptions)
  488. return;
  489. // FIXME: If we have an array of classes w/ non-trivial destructors,
  490. // we need to destroy in reverse order of construction along the exception
  491. // path.
  492. const RecordType *RT = FieldType->getAs<RecordType>();
  493. if (!RT)
  494. return;
  495. CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
  496. if (!RD->hasTrivialDestructor())
  497. CGF.EHStack.pushCleanup<CallMemberDtor>(EHCleanup, Field,
  498. RD->getDestructor());
  499. }
  500. }
  501. /// Checks whether the given constructor is a valid subject for the
  502. /// complete-to-base constructor delegation optimization, i.e.
  503. /// emitting the complete constructor as a simple call to the base
  504. /// constructor.
  505. static bool IsConstructorDelegationValid(const CXXConstructorDecl *Ctor) {
  506. // Currently we disable the optimization for classes with virtual
  507. // bases because (1) the addresses of parameter variables need to be
  508. // consistent across all initializers but (2) the delegate function
  509. // call necessarily creates a second copy of the parameter variable.
  510. //
  511. // The limiting example (purely theoretical AFAIK):
  512. // struct A { A(int &c) { c++; } };
  513. // struct B : virtual A {
  514. // B(int count) : A(count) { printf("%d\n", count); }
  515. // };
  516. // ...although even this example could in principle be emitted as a
  517. // delegation since the address of the parameter doesn't escape.
  518. if (Ctor->getParent()->getNumVBases()) {
  519. // TODO: white-list trivial vbase initializers. This case wouldn't
  520. // be subject to the restrictions below.
  521. // TODO: white-list cases where:
  522. // - there are no non-reference parameters to the constructor
  523. // - the initializers don't access any non-reference parameters
  524. // - the initializers don't take the address of non-reference
  525. // parameters
  526. // - etc.
  527. // If we ever add any of the above cases, remember that:
  528. // - function-try-blocks will always blacklist this optimization
  529. // - we need to perform the constructor prologue and cleanup in
  530. // EmitConstructorBody.
  531. return false;
  532. }
  533. // We also disable the optimization for variadic functions because
  534. // it's impossible to "re-pass" varargs.
  535. if (Ctor->getType()->getAs<FunctionProtoType>()->isVariadic())
  536. return false;
  537. return true;
  538. }
  539. /// EmitConstructorBody - Emits the body of the current constructor.
  540. void CodeGenFunction::EmitConstructorBody(FunctionArgList &Args) {
  541. const CXXConstructorDecl *Ctor = cast<CXXConstructorDecl>(CurGD.getDecl());
  542. CXXCtorType CtorType = CurGD.getCtorType();
  543. // Before we go any further, try the complete->base constructor
  544. // delegation optimization.
  545. if (CtorType == Ctor_Complete && IsConstructorDelegationValid(Ctor)) {
  546. if (CGDebugInfo *DI = getDebugInfo())
  547. DI->EmitStopPoint(Builder);
  548. EmitDelegateCXXConstructorCall(Ctor, Ctor_Base, Args);
  549. return;
  550. }
  551. Stmt *Body = Ctor->getBody();
  552. // Enter the function-try-block before the constructor prologue if
  553. // applicable.
  554. bool IsTryBody = (Body && isa<CXXTryStmt>(Body));
  555. if (IsTryBody)
  556. EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true);
  557. EHScopeStack::stable_iterator CleanupDepth = EHStack.stable_begin();
  558. // Emit the constructor prologue, i.e. the base and member
  559. // initializers.
  560. EmitCtorPrologue(Ctor, CtorType, Args);
  561. // Emit the body of the statement.
  562. if (IsTryBody)
  563. EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock());
  564. else if (Body)
  565. EmitStmt(Body);
  566. // Emit any cleanup blocks associated with the member or base
  567. // initializers, which includes (along the exceptional path) the
  568. // destructors for those members and bases that were fully
  569. // constructed.
  570. PopCleanupBlocks(CleanupDepth);
  571. if (IsTryBody)
  572. ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true);
  573. }
  574. /// EmitCtorPrologue - This routine generates necessary code to initialize
  575. /// base classes and non-static data members belonging to this constructor.
  576. void CodeGenFunction::EmitCtorPrologue(const CXXConstructorDecl *CD,
  577. CXXCtorType CtorType,
  578. FunctionArgList &Args) {
  579. const CXXRecordDecl *ClassDecl = CD->getParent();
  580. llvm::SmallVector<CXXBaseOrMemberInitializer *, 8> MemberInitializers;
  581. for (CXXConstructorDecl::init_const_iterator B = CD->init_begin(),
  582. E = CD->init_end();
  583. B != E; ++B) {
  584. CXXBaseOrMemberInitializer *Member = (*B);
  585. if (Member->isBaseInitializer())
  586. EmitBaseInitializer(*this, ClassDecl, Member, CtorType);
  587. else
  588. MemberInitializers.push_back(Member);
  589. }
  590. InitializeVTablePointers(ClassDecl);
  591. for (unsigned I = 0, E = MemberInitializers.size(); I != E; ++I)
  592. EmitMemberInitializer(*this, ClassDecl, MemberInitializers[I], CD, Args);
  593. }
  594. /// EmitDestructorBody - Emits the body of the current destructor.
  595. void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) {
  596. const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CurGD.getDecl());
  597. CXXDtorType DtorType = CurGD.getDtorType();
  598. // The call to operator delete in a deleting destructor happens
  599. // outside of the function-try-block, which means it's always
  600. // possible to delegate the destructor body to the complete
  601. // destructor. Do so.
  602. if (DtorType == Dtor_Deleting) {
  603. EnterDtorCleanups(Dtor, Dtor_Deleting);
  604. EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false,
  605. LoadCXXThis());
  606. PopCleanupBlock();
  607. return;
  608. }
  609. Stmt *Body = Dtor->getBody();
  610. // If the body is a function-try-block, enter the try before
  611. // anything else.
  612. bool isTryBody = (Body && isa<CXXTryStmt>(Body));
  613. if (isTryBody)
  614. EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true);
  615. // Enter the epilogue cleanups.
  616. RunCleanupsScope DtorEpilogue(*this);
  617. // If this is the complete variant, just invoke the base variant;
  618. // the epilogue will destruct the virtual bases. But we can't do
  619. // this optimization if the body is a function-try-block, because
  620. // we'd introduce *two* handler blocks.
  621. switch (DtorType) {
  622. case Dtor_Deleting: llvm_unreachable("already handled deleting case");
  623. case Dtor_Complete:
  624. // Enter the cleanup scopes for virtual bases.
  625. EnterDtorCleanups(Dtor, Dtor_Complete);
  626. if (!isTryBody) {
  627. EmitCXXDestructorCall(Dtor, Dtor_Base, /*ForVirtualBase=*/false,
  628. LoadCXXThis());
  629. break;
  630. }
  631. // Fallthrough: act like we're in the base variant.
  632. case Dtor_Base:
  633. // Enter the cleanup scopes for fields and non-virtual bases.
  634. EnterDtorCleanups(Dtor, Dtor_Base);
  635. // Initialize the vtable pointers before entering the body.
  636. InitializeVTablePointers(Dtor->getParent());
  637. if (isTryBody)
  638. EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock());
  639. else if (Body)
  640. EmitStmt(Body);
  641. else {
  642. assert(Dtor->isImplicit() && "bodyless dtor not implicit");
  643. // nothing to do besides what's in the epilogue
  644. }
  645. break;
  646. }
  647. // Jump out through the epilogue cleanups.
  648. DtorEpilogue.ForceCleanup();
  649. // Exit the try if applicable.
  650. if (isTryBody)
  651. ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true);
  652. }
  653. namespace {
  654. /// Call the operator delete associated with the current destructor.
  655. struct CallDtorDelete : EHScopeStack::Cleanup {
  656. CallDtorDelete() {}
  657. void Emit(CodeGenFunction &CGF, bool IsForEH) {
  658. const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl);
  659. const CXXRecordDecl *ClassDecl = Dtor->getParent();
  660. CGF.EmitDeleteCall(Dtor->getOperatorDelete(), CGF.LoadCXXThis(),
  661. CGF.getContext().getTagDeclType(ClassDecl));
  662. }
  663. };
  664. struct CallArrayFieldDtor : EHScopeStack::Cleanup {
  665. const FieldDecl *Field;
  666. CallArrayFieldDtor(const FieldDecl *Field) : Field(Field) {}
  667. void Emit(CodeGenFunction &CGF, bool IsForEH) {
  668. QualType FieldType = Field->getType();
  669. const ConstantArrayType *Array =
  670. CGF.getContext().getAsConstantArrayType(FieldType);
  671. QualType BaseType =
  672. CGF.getContext().getBaseElementType(Array->getElementType());
  673. const CXXRecordDecl *FieldClassDecl = BaseType->getAsCXXRecordDecl();
  674. llvm::Value *ThisPtr = CGF.LoadCXXThis();
  675. LValue LHS = CGF.EmitLValueForField(ThisPtr, Field,
  676. // FIXME: Qualifiers?
  677. /*CVRQualifiers=*/0);
  678. const llvm::Type *BasePtr = CGF.ConvertType(BaseType)->getPointerTo();
  679. llvm::Value *BaseAddrPtr =
  680. CGF.Builder.CreateBitCast(LHS.getAddress(), BasePtr);
  681. CGF.EmitCXXAggrDestructorCall(FieldClassDecl->getDestructor(),
  682. Array, BaseAddrPtr);
  683. }
  684. };
  685. struct CallFieldDtor : EHScopeStack::Cleanup {
  686. const FieldDecl *Field;
  687. CallFieldDtor(const FieldDecl *Field) : Field(Field) {}
  688. void Emit(CodeGenFunction &CGF, bool IsForEH) {
  689. const CXXRecordDecl *FieldClassDecl =
  690. Field->getType()->getAsCXXRecordDecl();
  691. llvm::Value *ThisPtr = CGF.LoadCXXThis();
  692. LValue LHS = CGF.EmitLValueForField(ThisPtr, Field,
  693. // FIXME: Qualifiers?
  694. /*CVRQualifiers=*/0);
  695. CGF.EmitCXXDestructorCall(FieldClassDecl->getDestructor(),
  696. Dtor_Complete, /*ForVirtualBase=*/false,
  697. LHS.getAddress());
  698. }
  699. };
  700. }
  701. /// EmitDtorEpilogue - Emit all code that comes at the end of class's
  702. /// destructor. This is to call destructors on members and base classes
  703. /// in reverse order of their construction.
  704. void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD,
  705. CXXDtorType DtorType) {
  706. assert(!DD->isTrivial() &&
  707. "Should not emit dtor epilogue for trivial dtor!");
  708. // The deleting-destructor phase just needs to call the appropriate
  709. // operator delete that Sema picked up.
  710. if (DtorType == Dtor_Deleting) {
  711. assert(DD->getOperatorDelete() &&
  712. "operator delete missing - EmitDtorEpilogue");
  713. EHStack.pushCleanup<CallDtorDelete>(NormalAndEHCleanup);
  714. return;
  715. }
  716. const CXXRecordDecl *ClassDecl = DD->getParent();
  717. // The complete-destructor phase just destructs all the virtual bases.
  718. if (DtorType == Dtor_Complete) {
  719. // We push them in the forward order so that they'll be popped in
  720. // the reverse order.
  721. for (CXXRecordDecl::base_class_const_iterator I =
  722. ClassDecl->vbases_begin(), E = ClassDecl->vbases_end();
  723. I != E; ++I) {
  724. const CXXBaseSpecifier &Base = *I;
  725. CXXRecordDecl *BaseClassDecl
  726. = cast<CXXRecordDecl>(Base.getType()->getAs<RecordType>()->getDecl());
  727. // Ignore trivial destructors.
  728. if (BaseClassDecl->hasTrivialDestructor())
  729. continue;
  730. EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup,
  731. BaseClassDecl,
  732. /*BaseIsVirtual*/ true);
  733. }
  734. return;
  735. }
  736. assert(DtorType == Dtor_Base);
  737. // Destroy non-virtual bases.
  738. for (CXXRecordDecl::base_class_const_iterator I =
  739. ClassDecl->bases_begin(), E = ClassDecl->bases_end(); I != E; ++I) {
  740. const CXXBaseSpecifier &Base = *I;
  741. // Ignore virtual bases.
  742. if (Base.isVirtual())
  743. continue;
  744. CXXRecordDecl *BaseClassDecl = Base.getType()->getAsCXXRecordDecl();
  745. // Ignore trivial destructors.
  746. if (BaseClassDecl->hasTrivialDestructor())
  747. continue;
  748. EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup,
  749. BaseClassDecl,
  750. /*BaseIsVirtual*/ false);
  751. }
  752. // Destroy direct fields.
  753. llvm::SmallVector<const FieldDecl *, 16> FieldDecls;
  754. for (CXXRecordDecl::field_iterator I = ClassDecl->field_begin(),
  755. E = ClassDecl->field_end(); I != E; ++I) {
  756. const FieldDecl *Field = *I;
  757. QualType FieldType = getContext().getCanonicalType(Field->getType());
  758. const ConstantArrayType *Array =
  759. getContext().getAsConstantArrayType(FieldType);
  760. if (Array)
  761. FieldType = getContext().getBaseElementType(Array->getElementType());
  762. const RecordType *RT = FieldType->getAs<RecordType>();
  763. if (!RT)
  764. continue;
  765. CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl());
  766. if (FieldClassDecl->hasTrivialDestructor())
  767. continue;
  768. if (Array)
  769. EHStack.pushCleanup<CallArrayFieldDtor>(NormalAndEHCleanup, Field);
  770. else
  771. EHStack.pushCleanup<CallFieldDtor>(NormalAndEHCleanup, Field);
  772. }
  773. }
  774. /// EmitCXXAggrConstructorCall - This routine essentially creates a (nested)
  775. /// for-loop to call the default constructor on individual members of the
  776. /// array.
  777. /// 'D' is the default constructor for elements of the array, 'ArrayTy' is the
  778. /// array type and 'ArrayPtr' points to the beginning fo the array.
  779. /// It is assumed that all relevant checks have been made by the caller.
  780. ///
  781. /// \param ZeroInitialization True if each element should be zero-initialized
  782. /// before it is constructed.
  783. void
  784. CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *D,
  785. const ConstantArrayType *ArrayTy,
  786. llvm::Value *ArrayPtr,
  787. CallExpr::const_arg_iterator ArgBeg,
  788. CallExpr::const_arg_iterator ArgEnd,
  789. bool ZeroInitialization) {
  790. const llvm::Type *SizeTy = ConvertType(getContext().getSizeType());
  791. llvm::Value * NumElements =
  792. llvm::ConstantInt::get(SizeTy,
  793. getContext().getConstantArrayElementCount(ArrayTy));
  794. EmitCXXAggrConstructorCall(D, NumElements, ArrayPtr, ArgBeg, ArgEnd,
  795. ZeroInitialization);
  796. }
  797. void
  798. CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *D,
  799. llvm::Value *NumElements,
  800. llvm::Value *ArrayPtr,
  801. CallExpr::const_arg_iterator ArgBeg,
  802. CallExpr::const_arg_iterator ArgEnd,
  803. bool ZeroInitialization) {
  804. const llvm::Type *SizeTy = ConvertType(getContext().getSizeType());
  805. // Create a temporary for the loop index and initialize it with 0.
  806. llvm::Value *IndexPtr = CreateTempAlloca(SizeTy, "loop.index");
  807. llvm::Value *Zero = llvm::Constant::getNullValue(SizeTy);
  808. Builder.CreateStore(Zero, IndexPtr);
  809. // Start the loop with a block that tests the condition.
  810. llvm::BasicBlock *CondBlock = createBasicBlock("for.cond");
  811. llvm::BasicBlock *AfterFor = createBasicBlock("for.end");
  812. EmitBlock(CondBlock);
  813. llvm::BasicBlock *ForBody = createBasicBlock("for.body");
  814. // Generate: if (loop-index < number-of-elements fall to the loop body,
  815. // otherwise, go to the block after the for-loop.
  816. llvm::Value *Counter = Builder.CreateLoad(IndexPtr);
  817. llvm::Value *IsLess = Builder.CreateICmpULT(Counter, NumElements, "isless");
  818. // If the condition is true, execute the body.
  819. Builder.CreateCondBr(IsLess, ForBody, AfterFor);
  820. EmitBlock(ForBody);
  821. llvm::BasicBlock *ContinueBlock = createBasicBlock("for.inc");
  822. // Inside the loop body, emit the constructor call on the array element.
  823. Counter = Builder.CreateLoad(IndexPtr);
  824. llvm::Value *Address = Builder.CreateInBoundsGEP(ArrayPtr, Counter,
  825. "arrayidx");
  826. // Zero initialize the storage, if requested.
  827. if (ZeroInitialization)
  828. EmitNullInitialization(Address,
  829. getContext().getTypeDeclType(D->getParent()));
  830. // C++ [class.temporary]p4:
  831. // There are two contexts in which temporaries are destroyed at a different
  832. // point than the end of the full-expression. The first context is when a
  833. // default constructor is called to initialize an element of an array.
  834. // If the constructor has one or more default arguments, the destruction of
  835. // every temporary created in a default argument expression is sequenced
  836. // before the construction of the next array element, if any.
  837. // Keep track of the current number of live temporaries.
  838. {
  839. RunCleanupsScope Scope(*this);
  840. EmitCXXConstructorCall(D, Ctor_Complete, /*ForVirtualBase=*/false, Address,
  841. ArgBeg, ArgEnd);
  842. }
  843. EmitBlock(ContinueBlock);
  844. // Emit the increment of the loop counter.
  845. llvm::Value *NextVal = llvm::ConstantInt::get(SizeTy, 1);
  846. Counter = Builder.CreateLoad(IndexPtr);
  847. NextVal = Builder.CreateAdd(Counter, NextVal, "inc");
  848. Builder.CreateStore(NextVal, IndexPtr);
  849. // Finally, branch back up to the condition for the next iteration.
  850. EmitBranch(CondBlock);
  851. // Emit the fall-through block.
  852. EmitBlock(AfterFor, true);
  853. }
  854. /// EmitCXXAggrDestructorCall - calls the default destructor on array
  855. /// elements in reverse order of construction.
  856. void
  857. CodeGenFunction::EmitCXXAggrDestructorCall(const CXXDestructorDecl *D,
  858. const ArrayType *Array,
  859. llvm::Value *This) {
  860. const ConstantArrayType *CA = dyn_cast<ConstantArrayType>(Array);
  861. assert(CA && "Do we support VLA for destruction ?");
  862. uint64_t ElementCount = getContext().getConstantArrayElementCount(CA);
  863. const llvm::Type *SizeLTy = ConvertType(getContext().getSizeType());
  864. llvm::Value* ElementCountPtr = llvm::ConstantInt::get(SizeLTy, ElementCount);
  865. EmitCXXAggrDestructorCall(D, ElementCountPtr, This);
  866. }
  867. /// EmitCXXAggrDestructorCall - calls the default destructor on array
  868. /// elements in reverse order of construction.
  869. void
  870. CodeGenFunction::EmitCXXAggrDestructorCall(const CXXDestructorDecl *D,
  871. llvm::Value *UpperCount,
  872. llvm::Value *This) {
  873. const llvm::Type *SizeLTy = ConvertType(getContext().getSizeType());
  874. llvm::Value *One = llvm::ConstantInt::get(SizeLTy, 1);
  875. // Create a temporary for the loop index and initialize it with count of
  876. // array elements.
  877. llvm::Value *IndexPtr = CreateTempAlloca(SizeLTy, "loop.index");
  878. // Store the number of elements in the index pointer.
  879. Builder.CreateStore(UpperCount, IndexPtr);
  880. // Start the loop with a block that tests the condition.
  881. llvm::BasicBlock *CondBlock = createBasicBlock("for.cond");
  882. llvm::BasicBlock *AfterFor = createBasicBlock("for.end");
  883. EmitBlock(CondBlock);
  884. llvm::BasicBlock *ForBody = createBasicBlock("for.body");
  885. // Generate: if (loop-index != 0 fall to the loop body,
  886. // otherwise, go to the block after the for-loop.
  887. llvm::Value* zeroConstant =
  888. llvm::Constant::getNullValue(SizeLTy);
  889. llvm::Value *Counter = Builder.CreateLoad(IndexPtr);
  890. llvm::Value *IsNE = Builder.CreateICmpNE(Counter, zeroConstant,
  891. "isne");
  892. // If the condition is true, execute the body.
  893. Builder.CreateCondBr(IsNE, ForBody, AfterFor);
  894. EmitBlock(ForBody);
  895. llvm::BasicBlock *ContinueBlock = createBasicBlock("for.inc");
  896. // Inside the loop body, emit the constructor call on the array element.
  897. Counter = Builder.CreateLoad(IndexPtr);
  898. Counter = Builder.CreateSub(Counter, One);
  899. llvm::Value *Address = Builder.CreateInBoundsGEP(This, Counter, "arrayidx");
  900. EmitCXXDestructorCall(D, Dtor_Complete, /*ForVirtualBase=*/false, Address);
  901. EmitBlock(ContinueBlock);
  902. // Emit the decrement of the loop counter.
  903. Counter = Builder.CreateLoad(IndexPtr);
  904. Counter = Builder.CreateSub(Counter, One, "dec");
  905. Builder.CreateStore(Counter, IndexPtr);
  906. // Finally, branch back up to the condition for the next iteration.
  907. EmitBranch(CondBlock);
  908. // Emit the fall-through block.
  909. EmitBlock(AfterFor, true);
  910. }
  911. void
  912. CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D,
  913. CXXCtorType Type, bool ForVirtualBase,
  914. llvm::Value *This,
  915. CallExpr::const_arg_iterator ArgBeg,
  916. CallExpr::const_arg_iterator ArgEnd) {
  917. if (D->isTrivial()) {
  918. if (ArgBeg == ArgEnd) {
  919. // Trivial default constructor, no codegen required.
  920. assert(D->isDefaultConstructor() &&
  921. "trivial 0-arg ctor not a default ctor");
  922. return;
  923. }
  924. assert(ArgBeg + 1 == ArgEnd && "unexpected argcount for trivial ctor");
  925. assert(D->isCopyConstructor() && "trivial 1-arg ctor not a copy ctor");
  926. const Expr *E = (*ArgBeg);
  927. QualType Ty = E->getType();
  928. llvm::Value *Src = EmitLValue(E).getAddress();
  929. EmitAggregateCopy(This, Src, Ty);
  930. return;
  931. }
  932. llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(D, Type), ForVirtualBase);
  933. llvm::Value *Callee = CGM.GetAddrOfCXXConstructor(D, Type);
  934. EmitCXXMemberCall(D, Callee, ReturnValueSlot(), This, VTT, ArgBeg, ArgEnd);
  935. }
  936. void
  937. CodeGenFunction::EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor,
  938. CXXCtorType CtorType,
  939. const FunctionArgList &Args) {
  940. CallArgList DelegateArgs;
  941. FunctionArgList::const_iterator I = Args.begin(), E = Args.end();
  942. assert(I != E && "no parameters to constructor");
  943. // this
  944. DelegateArgs.push_back(std::make_pair(RValue::get(LoadCXXThis()),
  945. I->second));
  946. ++I;
  947. // vtt
  948. if (llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(Ctor, CtorType),
  949. /*ForVirtualBase=*/false)) {
  950. QualType VoidPP = getContext().getPointerType(getContext().VoidPtrTy);
  951. DelegateArgs.push_back(std::make_pair(RValue::get(VTT), VoidPP));
  952. if (CodeGenVTables::needsVTTParameter(CurGD)) {
  953. assert(I != E && "cannot skip vtt parameter, already done with args");
  954. assert(I->second == VoidPP && "skipping parameter not of vtt type");
  955. ++I;
  956. }
  957. }
  958. // Explicit arguments.
  959. for (; I != E; ++I) {
  960. const VarDecl *Param = I->first;
  961. QualType ArgType = Param->getType(); // because we're passing it to itself
  962. RValue Arg = EmitDelegateCallArg(Param);
  963. DelegateArgs.push_back(std::make_pair(Arg, ArgType));
  964. }
  965. EmitCall(CGM.getTypes().getFunctionInfo(Ctor, CtorType),
  966. CGM.GetAddrOfCXXConstructor(Ctor, CtorType),
  967. ReturnValueSlot(), DelegateArgs, Ctor);
  968. }
  969. void CodeGenFunction::EmitCXXDestructorCall(const CXXDestructorDecl *DD,
  970. CXXDtorType Type,
  971. bool ForVirtualBase,
  972. llvm::Value *This) {
  973. llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(DD, Type),
  974. ForVirtualBase);
  975. llvm::Value *Callee = CGM.GetAddrOfCXXDestructor(DD, Type);
  976. EmitCXXMemberCall(DD, Callee, ReturnValueSlot(), This, VTT, 0, 0);
  977. }
  978. namespace {
  979. struct CallLocalDtor : EHScopeStack::Cleanup {
  980. const CXXDestructorDecl *Dtor;
  981. llvm::Value *Addr;
  982. CallLocalDtor(const CXXDestructorDecl *D, llvm::Value *Addr)
  983. : Dtor(D), Addr(Addr) {}
  984. void Emit(CodeGenFunction &CGF, bool IsForEH) {
  985. CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete,
  986. /*ForVirtualBase=*/false, Addr);
  987. }
  988. };
  989. }
  990. void CodeGenFunction::PushDestructorCleanup(const CXXDestructorDecl *D,
  991. llvm::Value *Addr) {
  992. EHStack.pushCleanup<CallLocalDtor>(NormalAndEHCleanup, D, Addr);
  993. }
  994. void CodeGenFunction::PushDestructorCleanup(QualType T, llvm::Value *Addr) {
  995. CXXRecordDecl *ClassDecl = T->getAsCXXRecordDecl();
  996. if (!ClassDecl) return;
  997. if (ClassDecl->hasTrivialDestructor()) return;
  998. const CXXDestructorDecl *D = ClassDecl->getDestructor();
  999. PushDestructorCleanup(D, Addr);
  1000. }
  1001. llvm::Value *
  1002. CodeGenFunction::GetVirtualBaseClassOffset(llvm::Value *This,
  1003. const CXXRecordDecl *ClassDecl,
  1004. const CXXRecordDecl *BaseClassDecl) {
  1005. const llvm::Type *Int8PtrTy =
  1006. llvm::Type::getInt8Ty(VMContext)->getPointerTo();
  1007. llvm::Value *VTablePtr = GetVTablePtr(This, Int8PtrTy);
  1008. int64_t VBaseOffsetOffset =
  1009. CGM.getVTables().getVirtualBaseOffsetOffset(ClassDecl, BaseClassDecl);
  1010. llvm::Value *VBaseOffsetPtr =
  1011. Builder.CreateConstGEP1_64(VTablePtr, VBaseOffsetOffset, "vbase.offset.ptr");
  1012. const llvm::Type *PtrDiffTy =
  1013. ConvertType(getContext().getPointerDiffType());
  1014. VBaseOffsetPtr = Builder.CreateBitCast(VBaseOffsetPtr,
  1015. PtrDiffTy->getPointerTo());
  1016. llvm::Value *VBaseOffset = Builder.CreateLoad(VBaseOffsetPtr, "vbase.offset");
  1017. return VBaseOffset;
  1018. }
  1019. void
  1020. CodeGenFunction::InitializeVTablePointer(BaseSubobject Base,
  1021. const CXXRecordDecl *NearestVBase,
  1022. uint64_t OffsetFromNearestVBase,
  1023. llvm::Constant *VTable,
  1024. const CXXRecordDecl *VTableClass) {
  1025. const CXXRecordDecl *RD = Base.getBase();
  1026. // Compute the address point.
  1027. llvm::Value *VTableAddressPoint;
  1028. // Check if we need to use a vtable from the VTT.
  1029. if (CodeGenVTables::needsVTTParameter(CurGD) &&
  1030. (RD->getNumVBases() || NearestVBase)) {
  1031. // Get the secondary vpointer index.
  1032. uint64_t VirtualPointerIndex =
  1033. CGM.getVTables().getSecondaryVirtualPointerIndex(VTableClass, Base);
  1034. /// Load the VTT.
  1035. llvm::Value *VTT = LoadCXXVTT();
  1036. if (VirtualPointerIndex)
  1037. VTT = Builder.CreateConstInBoundsGEP1_64(VTT, VirtualPointerIndex);
  1038. // And load the address point from the VTT.
  1039. VTableAddressPoint = Builder.CreateLoad(VTT);
  1040. } else {
  1041. uint64_t AddressPoint = CGM.getVTables().getAddressPoint(Base, VTableClass);
  1042. VTableAddressPoint =
  1043. Builder.CreateConstInBoundsGEP2_64(VTable, 0, AddressPoint);
  1044. }
  1045. // Compute where to store the address point.
  1046. llvm::Value *VirtualOffset = 0;
  1047. uint64_t NonVirtualOffset = 0;
  1048. if (CodeGenVTables::needsVTTParameter(CurGD) && NearestVBase) {
  1049. // We need to use the virtual base offset offset because the virtual base
  1050. // might have a different offset in the most derived class.
  1051. VirtualOffset = GetVirtualBaseClassOffset(LoadCXXThis(), VTableClass,
  1052. NearestVBase);
  1053. NonVirtualOffset = OffsetFromNearestVBase / 8;
  1054. } else {
  1055. // We can just use the base offset in the complete class.
  1056. NonVirtualOffset = Base.getBaseOffset() / 8;
  1057. }
  1058. // Apply the offsets.
  1059. llvm::Value *VTableField = LoadCXXThis();
  1060. if (NonVirtualOffset || VirtualOffset)
  1061. VTableField = ApplyNonVirtualAndVirtualOffset(*this, VTableField,
  1062. NonVirtualOffset,
  1063. VirtualOffset);
  1064. // Finally, store the address point.
  1065. const llvm::Type *AddressPointPtrTy =
  1066. VTableAddressPoint->getType()->getPointerTo();
  1067. VTableField = Builder.CreateBitCast(VTableField, AddressPointPtrTy);
  1068. Builder.CreateStore(VTableAddressPoint, VTableField);
  1069. }
  1070. void
  1071. CodeGenFunction::InitializeVTablePointers(BaseSubobject Base,
  1072. const CXXRecordDecl *NearestVBase,
  1073. uint64_t OffsetFromNearestVBase,
  1074. bool BaseIsNonVirtualPrimaryBase,
  1075. llvm::Constant *VTable,
  1076. const CXXRecordDecl *VTableClass,
  1077. VisitedVirtualBasesSetTy& VBases) {
  1078. // If this base is a non-virtual primary base the address point has already
  1079. // been set.
  1080. if (!BaseIsNonVirtualPrimaryBase) {
  1081. // Initialize the vtable pointer for this base.
  1082. InitializeVTablePointer(Base, NearestVBase, OffsetFromNearestVBase,
  1083. VTable, VTableClass);
  1084. }
  1085. const CXXRecordDecl *RD = Base.getBase();
  1086. // Traverse bases.
  1087. for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
  1088. E = RD->bases_end(); I != E; ++I) {
  1089. CXXRecordDecl *BaseDecl
  1090. = cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
  1091. // Ignore classes without a vtable.
  1092. if (!BaseDecl->isDynamicClass())
  1093. continue;
  1094. uint64_t BaseOffset;
  1095. uint64_t BaseOffsetFromNearestVBase;
  1096. bool BaseDeclIsNonVirtualPrimaryBase;
  1097. if (I->isVirtual()) {
  1098. // Check if we've visited this virtual base before.
  1099. if (!VBases.insert(BaseDecl))
  1100. continue;
  1101. const ASTRecordLayout &Layout =
  1102. getContext().getASTRecordLayout(VTableClass);
  1103. BaseOffset = Layout.getVBaseClassOffsetInBits(BaseDecl);
  1104. BaseOffsetFromNearestVBase = 0;
  1105. BaseDeclIsNonVirtualPrimaryBase = false;
  1106. } else {
  1107. const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD);
  1108. BaseOffset =
  1109. Base.getBaseOffset() + Layout.getBaseClassOffsetInBits(BaseDecl);
  1110. BaseOffsetFromNearestVBase =
  1111. OffsetFromNearestVBase + Layout.getBaseClassOffsetInBits(BaseDecl);
  1112. BaseDeclIsNonVirtualPrimaryBase = Layout.getPrimaryBase() == BaseDecl;
  1113. }
  1114. InitializeVTablePointers(BaseSubobject(BaseDecl, BaseOffset),
  1115. I->isVirtual() ? BaseDecl : NearestVBase,
  1116. BaseOffsetFromNearestVBase,
  1117. BaseDeclIsNonVirtualPrimaryBase,
  1118. VTable, VTableClass, VBases);
  1119. }
  1120. }
  1121. void CodeGenFunction::InitializeVTablePointers(const CXXRecordDecl *RD) {
  1122. // Ignore classes without a vtable.
  1123. if (!RD->isDynamicClass())
  1124. return;
  1125. // Get the VTable.
  1126. llvm::Constant *VTable = CGM.getVTables().GetAddrOfVTable(RD);
  1127. // Initialize the vtable pointers for this class and all of its bases.
  1128. VisitedVirtualBasesSetTy VBases;
  1129. InitializeVTablePointers(BaseSubobject(RD, 0), /*NearestVBase=*/0,
  1130. /*OffsetFromNearestVBase=*/0,
  1131. /*BaseIsNonVirtualPrimaryBase=*/false,
  1132. VTable, RD, VBases);
  1133. }
  1134. llvm::Value *CodeGenFunction::GetVTablePtr(llvm::Value *This,
  1135. const llvm::Type *Ty) {
  1136. llvm::Value *VTablePtrSrc = Builder.CreateBitCast(This, Ty->getPointerTo());
  1137. return Builder.CreateLoad(VTablePtrSrc, "vtable");
  1138. }