CGExprCXX.cpp 68 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830
  1. //===--- CGExprCXX.cpp - Emit LLVM Code for C++ expressions ---------------===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This contains code dealing with code generation of C++ expressions
  11. //
  12. //===----------------------------------------------------------------------===//
  13. #include "clang/Frontend/CodeGenOptions.h"
  14. #include "CodeGenFunction.h"
  15. #include "CGCUDARuntime.h"
  16. #include "CGCXXABI.h"
  17. #include "CGObjCRuntime.h"
  18. #include "CGDebugInfo.h"
  19. #include "llvm/Intrinsics.h"
  20. #include "llvm/Support/CallSite.h"
  21. using namespace clang;
  22. using namespace CodeGen;
  23. RValue CodeGenFunction::EmitCXXMemberCall(const CXXMethodDecl *MD,
  24. llvm::Value *Callee,
  25. ReturnValueSlot ReturnValue,
  26. llvm::Value *This,
  27. llvm::Value *VTT,
  28. CallExpr::const_arg_iterator ArgBeg,
  29. CallExpr::const_arg_iterator ArgEnd) {
  30. assert(MD->isInstance() &&
  31. "Trying to emit a member call expr on a static method!");
  32. CallArgList Args;
  33. // Push the this ptr.
  34. Args.add(RValue::get(This), MD->getThisType(getContext()));
  35. // If there is a VTT parameter, emit it.
  36. if (VTT) {
  37. QualType T = getContext().getPointerType(getContext().VoidPtrTy);
  38. Args.add(RValue::get(VTT), T);
  39. }
  40. const FunctionProtoType *FPT = MD->getType()->castAs<FunctionProtoType>();
  41. RequiredArgs required = RequiredArgs::forPrototypePlus(FPT, Args.size());
  42. // And the rest of the call args.
  43. EmitCallArgs(Args, FPT, ArgBeg, ArgEnd);
  44. return EmitCall(CGM.getTypes().arrangeFunctionCall(FPT->getResultType(), Args,
  45. FPT->getExtInfo(),
  46. required),
  47. Callee, ReturnValue, Args, MD);
  48. }
  49. static const CXXRecordDecl *getMostDerivedClassDecl(const Expr *Base) {
  50. const Expr *E = Base;
  51. while (true) {
  52. E = E->IgnoreParens();
  53. if (const CastExpr *CE = dyn_cast<CastExpr>(E)) {
  54. if (CE->getCastKind() == CK_DerivedToBase ||
  55. CE->getCastKind() == CK_UncheckedDerivedToBase ||
  56. CE->getCastKind() == CK_NoOp) {
  57. E = CE->getSubExpr();
  58. continue;
  59. }
  60. }
  61. break;
  62. }
  63. QualType DerivedType = E->getType();
  64. if (const PointerType *PTy = DerivedType->getAs<PointerType>())
  65. DerivedType = PTy->getPointeeType();
  66. return cast<CXXRecordDecl>(DerivedType->castAs<RecordType>()->getDecl());
  67. }
  68. // FIXME: Ideally Expr::IgnoreParenNoopCasts should do this, but it doesn't do
  69. // quite what we want.
  70. static const Expr *skipNoOpCastsAndParens(const Expr *E) {
  71. while (true) {
  72. if (const ParenExpr *PE = dyn_cast<ParenExpr>(E)) {
  73. E = PE->getSubExpr();
  74. continue;
  75. }
  76. if (const CastExpr *CE = dyn_cast<CastExpr>(E)) {
  77. if (CE->getCastKind() == CK_NoOp) {
  78. E = CE->getSubExpr();
  79. continue;
  80. }
  81. }
  82. if (const UnaryOperator *UO = dyn_cast<UnaryOperator>(E)) {
  83. if (UO->getOpcode() == UO_Extension) {
  84. E = UO->getSubExpr();
  85. continue;
  86. }
  87. }
  88. return E;
  89. }
  90. }
  91. /// canDevirtualizeMemberFunctionCalls - Checks whether virtual calls on given
  92. /// expr can be devirtualized.
  93. static bool canDevirtualizeMemberFunctionCalls(ASTContext &Context,
  94. const Expr *Base,
  95. const CXXMethodDecl *MD) {
  96. // When building with -fapple-kext, all calls must go through the vtable since
  97. // the kernel linker can do runtime patching of vtables.
  98. if (Context.getLangOpts().AppleKext)
  99. return false;
  100. // If the most derived class is marked final, we know that no subclass can
  101. // override this member function and so we can devirtualize it. For example:
  102. //
  103. // struct A { virtual void f(); }
  104. // struct B final : A { };
  105. //
  106. // void f(B *b) {
  107. // b->f();
  108. // }
  109. //
  110. const CXXRecordDecl *MostDerivedClassDecl = getMostDerivedClassDecl(Base);
  111. if (MostDerivedClassDecl->hasAttr<FinalAttr>())
  112. return true;
  113. // If the member function is marked 'final', we know that it can't be
  114. // overridden and can therefore devirtualize it.
  115. if (MD->hasAttr<FinalAttr>())
  116. return true;
  117. // Similarly, if the class itself is marked 'final' it can't be overridden
  118. // and we can therefore devirtualize the member function call.
  119. if (MD->getParent()->hasAttr<FinalAttr>())
  120. return true;
  121. Base = skipNoOpCastsAndParens(Base);
  122. if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Base)) {
  123. if (const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl())) {
  124. // This is a record decl. We know the type and can devirtualize it.
  125. return VD->getType()->isRecordType();
  126. }
  127. return false;
  128. }
  129. // We can always devirtualize calls on temporary object expressions.
  130. if (isa<CXXConstructExpr>(Base))
  131. return true;
  132. // And calls on bound temporaries.
  133. if (isa<CXXBindTemporaryExpr>(Base))
  134. return true;
  135. // Check if this is a call expr that returns a record type.
  136. if (const CallExpr *CE = dyn_cast<CallExpr>(Base))
  137. return CE->getCallReturnType()->isRecordType();
  138. // We can't devirtualize the call.
  139. return false;
  140. }
  141. // Note: This function also emit constructor calls to support a MSVC
  142. // extensions allowing explicit constructor function call.
  143. RValue CodeGenFunction::EmitCXXMemberCallExpr(const CXXMemberCallExpr *CE,
  144. ReturnValueSlot ReturnValue) {
  145. const Expr *callee = CE->getCallee()->IgnoreParens();
  146. if (isa<BinaryOperator>(callee))
  147. return EmitCXXMemberPointerCallExpr(CE, ReturnValue);
  148. const MemberExpr *ME = cast<MemberExpr>(callee);
  149. const CXXMethodDecl *MD = cast<CXXMethodDecl>(ME->getMemberDecl());
  150. CGDebugInfo *DI = getDebugInfo();
  151. if (DI && CGM.getCodeGenOpts().LimitDebugInfo
  152. && !isa<CallExpr>(ME->getBase())) {
  153. QualType PQTy = ME->getBase()->IgnoreParenImpCasts()->getType();
  154. if (const PointerType * PTy = dyn_cast<PointerType>(PQTy)) {
  155. DI->getOrCreateRecordType(PTy->getPointeeType(),
  156. MD->getParent()->getLocation());
  157. }
  158. }
  159. if (MD->isStatic()) {
  160. // The method is static, emit it as we would a regular call.
  161. llvm::Value *Callee = CGM.GetAddrOfFunction(MD);
  162. return EmitCall(getContext().getPointerType(MD->getType()), Callee,
  163. ReturnValue, CE->arg_begin(), CE->arg_end());
  164. }
  165. // Compute the object pointer.
  166. llvm::Value *This;
  167. if (ME->isArrow())
  168. This = EmitScalarExpr(ME->getBase());
  169. else
  170. This = EmitLValue(ME->getBase()).getAddress();
  171. if (MD->isTrivial()) {
  172. if (isa<CXXDestructorDecl>(MD)) return RValue::get(0);
  173. if (isa<CXXConstructorDecl>(MD) &&
  174. cast<CXXConstructorDecl>(MD)->isDefaultConstructor())
  175. return RValue::get(0);
  176. if (MD->isCopyAssignmentOperator() || MD->isMoveAssignmentOperator()) {
  177. // We don't like to generate the trivial copy/move assignment operator
  178. // when it isn't necessary; just produce the proper effect here.
  179. llvm::Value *RHS = EmitLValue(*CE->arg_begin()).getAddress();
  180. EmitAggregateCopy(This, RHS, CE->getType());
  181. return RValue::get(This);
  182. }
  183. if (isa<CXXConstructorDecl>(MD) &&
  184. cast<CXXConstructorDecl>(MD)->isCopyOrMoveConstructor()) {
  185. // Trivial move and copy ctor are the same.
  186. llvm::Value *RHS = EmitLValue(*CE->arg_begin()).getAddress();
  187. EmitSynthesizedCXXCopyCtorCall(cast<CXXConstructorDecl>(MD), This, RHS,
  188. CE->arg_begin(), CE->arg_end());
  189. return RValue::get(This);
  190. }
  191. llvm_unreachable("unknown trivial member function");
  192. }
  193. // Compute the function type we're calling.
  194. const CGFunctionInfo *FInfo = 0;
  195. if (isa<CXXDestructorDecl>(MD))
  196. FInfo = &CGM.getTypes().arrangeCXXDestructor(cast<CXXDestructorDecl>(MD),
  197. Dtor_Complete);
  198. else if (isa<CXXConstructorDecl>(MD))
  199. FInfo = &CGM.getTypes().arrangeCXXConstructorDeclaration(
  200. cast<CXXConstructorDecl>(MD),
  201. Ctor_Complete);
  202. else
  203. FInfo = &CGM.getTypes().arrangeCXXMethodDeclaration(MD);
  204. llvm::Type *Ty = CGM.getTypes().GetFunctionType(*FInfo);
  205. // C++ [class.virtual]p12:
  206. // Explicit qualification with the scope operator (5.1) suppresses the
  207. // virtual call mechanism.
  208. //
  209. // We also don't emit a virtual call if the base expression has a record type
  210. // because then we know what the type is.
  211. bool UseVirtualCall;
  212. UseVirtualCall = MD->isVirtual() && !ME->hasQualifier()
  213. && !canDevirtualizeMemberFunctionCalls(getContext(),
  214. ME->getBase(), MD);
  215. llvm::Value *Callee;
  216. if (const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(MD)) {
  217. if (UseVirtualCall) {
  218. Callee = BuildVirtualCall(Dtor, Dtor_Complete, This, Ty);
  219. } else {
  220. if (getContext().getLangOpts().AppleKext &&
  221. MD->isVirtual() &&
  222. ME->hasQualifier())
  223. Callee = BuildAppleKextVirtualCall(MD, ME->getQualifier(), Ty);
  224. else
  225. Callee = CGM.GetAddrOfFunction(GlobalDecl(Dtor, Dtor_Complete), Ty);
  226. }
  227. } else if (const CXXConstructorDecl *Ctor =
  228. dyn_cast<CXXConstructorDecl>(MD)) {
  229. Callee = CGM.GetAddrOfFunction(GlobalDecl(Ctor, Ctor_Complete), Ty);
  230. } else if (UseVirtualCall) {
  231. Callee = BuildVirtualCall(MD, This, Ty);
  232. } else {
  233. if (getContext().getLangOpts().AppleKext &&
  234. MD->isVirtual() &&
  235. ME->hasQualifier())
  236. Callee = BuildAppleKextVirtualCall(MD, ME->getQualifier(), Ty);
  237. else
  238. Callee = CGM.GetAddrOfFunction(MD, Ty);
  239. }
  240. return EmitCXXMemberCall(MD, Callee, ReturnValue, This, /*VTT=*/0,
  241. CE->arg_begin(), CE->arg_end());
  242. }
  243. RValue
  244. CodeGenFunction::EmitCXXMemberPointerCallExpr(const CXXMemberCallExpr *E,
  245. ReturnValueSlot ReturnValue) {
  246. const BinaryOperator *BO =
  247. cast<BinaryOperator>(E->getCallee()->IgnoreParens());
  248. const Expr *BaseExpr = BO->getLHS();
  249. const Expr *MemFnExpr = BO->getRHS();
  250. const MemberPointerType *MPT =
  251. MemFnExpr->getType()->castAs<MemberPointerType>();
  252. const FunctionProtoType *FPT =
  253. MPT->getPointeeType()->castAs<FunctionProtoType>();
  254. const CXXRecordDecl *RD =
  255. cast<CXXRecordDecl>(MPT->getClass()->getAs<RecordType>()->getDecl());
  256. // Get the member function pointer.
  257. llvm::Value *MemFnPtr = EmitScalarExpr(MemFnExpr);
  258. // Emit the 'this' pointer.
  259. llvm::Value *This;
  260. if (BO->getOpcode() == BO_PtrMemI)
  261. This = EmitScalarExpr(BaseExpr);
  262. else
  263. This = EmitLValue(BaseExpr).getAddress();
  264. // Ask the ABI to load the callee. Note that This is modified.
  265. llvm::Value *Callee =
  266. CGM.getCXXABI().EmitLoadOfMemberFunctionPointer(*this, This, MemFnPtr, MPT);
  267. CallArgList Args;
  268. QualType ThisType =
  269. getContext().getPointerType(getContext().getTagDeclType(RD));
  270. // Push the this ptr.
  271. Args.add(RValue::get(This), ThisType);
  272. // And the rest of the call args
  273. EmitCallArgs(Args, FPT, E->arg_begin(), E->arg_end());
  274. return EmitCall(CGM.getTypes().arrangeFunctionCall(Args, FPT), Callee,
  275. ReturnValue, Args);
  276. }
  277. RValue
  278. CodeGenFunction::EmitCXXOperatorMemberCallExpr(const CXXOperatorCallExpr *E,
  279. const CXXMethodDecl *MD,
  280. ReturnValueSlot ReturnValue) {
  281. assert(MD->isInstance() &&
  282. "Trying to emit a member call expr on a static method!");
  283. LValue LV = EmitLValue(E->getArg(0));
  284. llvm::Value *This = LV.getAddress();
  285. if ((MD->isCopyAssignmentOperator() || MD->isMoveAssignmentOperator()) &&
  286. MD->isTrivial()) {
  287. llvm::Value *Src = EmitLValue(E->getArg(1)).getAddress();
  288. QualType Ty = E->getType();
  289. EmitAggregateCopy(This, Src, Ty);
  290. return RValue::get(This);
  291. }
  292. llvm::Value *Callee = EmitCXXOperatorMemberCallee(E, MD, This);
  293. return EmitCXXMemberCall(MD, Callee, ReturnValue, This, /*VTT=*/0,
  294. E->arg_begin() + 1, E->arg_end());
  295. }
  296. RValue CodeGenFunction::EmitCUDAKernelCallExpr(const CUDAKernelCallExpr *E,
  297. ReturnValueSlot ReturnValue) {
  298. return CGM.getCUDARuntime().EmitCUDAKernelCallExpr(*this, E, ReturnValue);
  299. }
  300. static void EmitNullBaseClassInitialization(CodeGenFunction &CGF,
  301. llvm::Value *DestPtr,
  302. const CXXRecordDecl *Base) {
  303. if (Base->isEmpty())
  304. return;
  305. DestPtr = CGF.EmitCastToVoidPtr(DestPtr);
  306. const ASTRecordLayout &Layout = CGF.getContext().getASTRecordLayout(Base);
  307. CharUnits Size = Layout.getNonVirtualSize();
  308. CharUnits Align = Layout.getNonVirtualAlign();
  309. llvm::Value *SizeVal = CGF.CGM.getSize(Size);
  310. // If the type contains a pointer to data member we can't memset it to zero.
  311. // Instead, create a null constant and copy it to the destination.
  312. // TODO: there are other patterns besides zero that we can usefully memset,
  313. // like -1, which happens to be the pattern used by member-pointers.
  314. // TODO: isZeroInitializable can be over-conservative in the case where a
  315. // virtual base contains a member pointer.
  316. if (!CGF.CGM.getTypes().isZeroInitializable(Base)) {
  317. llvm::Constant *NullConstant = CGF.CGM.EmitNullConstantForBase(Base);
  318. llvm::GlobalVariable *NullVariable =
  319. new llvm::GlobalVariable(CGF.CGM.getModule(), NullConstant->getType(),
  320. /*isConstant=*/true,
  321. llvm::GlobalVariable::PrivateLinkage,
  322. NullConstant, Twine());
  323. NullVariable->setAlignment(Align.getQuantity());
  324. llvm::Value *SrcPtr = CGF.EmitCastToVoidPtr(NullVariable);
  325. // Get and call the appropriate llvm.memcpy overload.
  326. CGF.Builder.CreateMemCpy(DestPtr, SrcPtr, SizeVal, Align.getQuantity());
  327. return;
  328. }
  329. // Otherwise, just memset the whole thing to zero. This is legal
  330. // because in LLVM, all default initializers (other than the ones we just
  331. // handled above) are guaranteed to have a bit pattern of all zeros.
  332. CGF.Builder.CreateMemSet(DestPtr, CGF.Builder.getInt8(0), SizeVal,
  333. Align.getQuantity());
  334. }
  335. void
  336. CodeGenFunction::EmitCXXConstructExpr(const CXXConstructExpr *E,
  337. AggValueSlot Dest) {
  338. assert(!Dest.isIgnored() && "Must have a destination!");
  339. const CXXConstructorDecl *CD = E->getConstructor();
  340. // If we require zero initialization before (or instead of) calling the
  341. // constructor, as can be the case with a non-user-provided default
  342. // constructor, emit the zero initialization now, unless destination is
  343. // already zeroed.
  344. if (E->requiresZeroInitialization() && !Dest.isZeroed()) {
  345. switch (E->getConstructionKind()) {
  346. case CXXConstructExpr::CK_Delegating:
  347. assert(0 && "Delegating constructor should not need zeroing");
  348. case CXXConstructExpr::CK_Complete:
  349. EmitNullInitialization(Dest.getAddr(), E->getType());
  350. break;
  351. case CXXConstructExpr::CK_VirtualBase:
  352. case CXXConstructExpr::CK_NonVirtualBase:
  353. EmitNullBaseClassInitialization(*this, Dest.getAddr(), CD->getParent());
  354. break;
  355. }
  356. }
  357. // If this is a call to a trivial default constructor, do nothing.
  358. if (CD->isTrivial() && CD->isDefaultConstructor())
  359. return;
  360. // Elide the constructor if we're constructing from a temporary.
  361. // The temporary check is required because Sema sets this on NRVO
  362. // returns.
  363. if (getContext().getLangOpts().ElideConstructors && E->isElidable()) {
  364. assert(getContext().hasSameUnqualifiedType(E->getType(),
  365. E->getArg(0)->getType()));
  366. if (E->getArg(0)->isTemporaryObject(getContext(), CD->getParent())) {
  367. EmitAggExpr(E->getArg(0), Dest);
  368. return;
  369. }
  370. }
  371. if (const ConstantArrayType *arrayType
  372. = getContext().getAsConstantArrayType(E->getType())) {
  373. EmitCXXAggrConstructorCall(CD, arrayType, Dest.getAddr(),
  374. E->arg_begin(), E->arg_end());
  375. } else {
  376. CXXCtorType Type = Ctor_Complete;
  377. bool ForVirtualBase = false;
  378. switch (E->getConstructionKind()) {
  379. case CXXConstructExpr::CK_Delegating:
  380. // We should be emitting a constructor; GlobalDecl will assert this
  381. Type = CurGD.getCtorType();
  382. break;
  383. case CXXConstructExpr::CK_Complete:
  384. Type = Ctor_Complete;
  385. break;
  386. case CXXConstructExpr::CK_VirtualBase:
  387. ForVirtualBase = true;
  388. // fall-through
  389. case CXXConstructExpr::CK_NonVirtualBase:
  390. Type = Ctor_Base;
  391. }
  392. // Call the constructor.
  393. EmitCXXConstructorCall(CD, Type, ForVirtualBase, Dest.getAddr(),
  394. E->arg_begin(), E->arg_end());
  395. }
  396. }
  397. void
  398. CodeGenFunction::EmitSynthesizedCXXCopyCtor(llvm::Value *Dest,
  399. llvm::Value *Src,
  400. const Expr *Exp) {
  401. if (const ExprWithCleanups *E = dyn_cast<ExprWithCleanups>(Exp))
  402. Exp = E->getSubExpr();
  403. assert(isa<CXXConstructExpr>(Exp) &&
  404. "EmitSynthesizedCXXCopyCtor - unknown copy ctor expr");
  405. const CXXConstructExpr* E = cast<CXXConstructExpr>(Exp);
  406. const CXXConstructorDecl *CD = E->getConstructor();
  407. RunCleanupsScope Scope(*this);
  408. // If we require zero initialization before (or instead of) calling the
  409. // constructor, as can be the case with a non-user-provided default
  410. // constructor, emit the zero initialization now.
  411. // FIXME. Do I still need this for a copy ctor synthesis?
  412. if (E->requiresZeroInitialization())
  413. EmitNullInitialization(Dest, E->getType());
  414. assert(!getContext().getAsConstantArrayType(E->getType())
  415. && "EmitSynthesizedCXXCopyCtor - Copied-in Array");
  416. EmitSynthesizedCXXCopyCtorCall(CD, Dest, Src,
  417. E->arg_begin(), E->arg_end());
  418. }
  419. static CharUnits CalculateCookiePadding(CodeGenFunction &CGF,
  420. const CXXNewExpr *E) {
  421. if (!E->isArray())
  422. return CharUnits::Zero();
  423. // No cookie is required if the operator new[] being used is the
  424. // reserved placement operator new[].
  425. if (E->getOperatorNew()->isReservedGlobalPlacementOperator())
  426. return CharUnits::Zero();
  427. return CGF.CGM.getCXXABI().GetArrayCookieSize(E);
  428. }
  429. static llvm::Value *EmitCXXNewAllocSize(CodeGenFunction &CGF,
  430. const CXXNewExpr *e,
  431. unsigned minElements,
  432. llvm::Value *&numElements,
  433. llvm::Value *&sizeWithoutCookie) {
  434. QualType type = e->getAllocatedType();
  435. if (!e->isArray()) {
  436. CharUnits typeSize = CGF.getContext().getTypeSizeInChars(type);
  437. sizeWithoutCookie
  438. = llvm::ConstantInt::get(CGF.SizeTy, typeSize.getQuantity());
  439. return sizeWithoutCookie;
  440. }
  441. // The width of size_t.
  442. unsigned sizeWidth = CGF.SizeTy->getBitWidth();
  443. // Figure out the cookie size.
  444. llvm::APInt cookieSize(sizeWidth,
  445. CalculateCookiePadding(CGF, e).getQuantity());
  446. // Emit the array size expression.
  447. // We multiply the size of all dimensions for NumElements.
  448. // e.g for 'int[2][3]', ElemType is 'int' and NumElements is 6.
  449. numElements = CGF.EmitScalarExpr(e->getArraySize());
  450. assert(isa<llvm::IntegerType>(numElements->getType()));
  451. // The number of elements can be have an arbitrary integer type;
  452. // essentially, we need to multiply it by a constant factor, add a
  453. // cookie size, and verify that the result is representable as a
  454. // size_t. That's just a gloss, though, and it's wrong in one
  455. // important way: if the count is negative, it's an error even if
  456. // the cookie size would bring the total size >= 0.
  457. bool isSigned
  458. = e->getArraySize()->getType()->isSignedIntegerOrEnumerationType();
  459. llvm::IntegerType *numElementsType
  460. = cast<llvm::IntegerType>(numElements->getType());
  461. unsigned numElementsWidth = numElementsType->getBitWidth();
  462. // Compute the constant factor.
  463. llvm::APInt arraySizeMultiplier(sizeWidth, 1);
  464. while (const ConstantArrayType *CAT
  465. = CGF.getContext().getAsConstantArrayType(type)) {
  466. type = CAT->getElementType();
  467. arraySizeMultiplier *= CAT->getSize();
  468. }
  469. CharUnits typeSize = CGF.getContext().getTypeSizeInChars(type);
  470. llvm::APInt typeSizeMultiplier(sizeWidth, typeSize.getQuantity());
  471. typeSizeMultiplier *= arraySizeMultiplier;
  472. // This will be a size_t.
  473. llvm::Value *size;
  474. // If someone is doing 'new int[42]' there is no need to do a dynamic check.
  475. // Don't bloat the -O0 code.
  476. if (llvm::ConstantInt *numElementsC =
  477. dyn_cast<llvm::ConstantInt>(numElements)) {
  478. const llvm::APInt &count = numElementsC->getValue();
  479. bool hasAnyOverflow = false;
  480. // If 'count' was a negative number, it's an overflow.
  481. if (isSigned && count.isNegative())
  482. hasAnyOverflow = true;
  483. // We want to do all this arithmetic in size_t. If numElements is
  484. // wider than that, check whether it's already too big, and if so,
  485. // overflow.
  486. else if (numElementsWidth > sizeWidth &&
  487. numElementsWidth - sizeWidth > count.countLeadingZeros())
  488. hasAnyOverflow = true;
  489. // Okay, compute a count at the right width.
  490. llvm::APInt adjustedCount = count.zextOrTrunc(sizeWidth);
  491. // If there is a brace-initializer, we cannot allocate fewer elements than
  492. // there are initializers. If we do, that's treated like an overflow.
  493. if (adjustedCount.ult(minElements))
  494. hasAnyOverflow = true;
  495. // Scale numElements by that. This might overflow, but we don't
  496. // care because it only overflows if allocationSize does, too, and
  497. // if that overflows then we shouldn't use this.
  498. numElements = llvm::ConstantInt::get(CGF.SizeTy,
  499. adjustedCount * arraySizeMultiplier);
  500. // Compute the size before cookie, and track whether it overflowed.
  501. bool overflow;
  502. llvm::APInt allocationSize
  503. = adjustedCount.umul_ov(typeSizeMultiplier, overflow);
  504. hasAnyOverflow |= overflow;
  505. // Add in the cookie, and check whether it's overflowed.
  506. if (cookieSize != 0) {
  507. // Save the current size without a cookie. This shouldn't be
  508. // used if there was overflow.
  509. sizeWithoutCookie = llvm::ConstantInt::get(CGF.SizeTy, allocationSize);
  510. allocationSize = allocationSize.uadd_ov(cookieSize, overflow);
  511. hasAnyOverflow |= overflow;
  512. }
  513. // On overflow, produce a -1 so operator new will fail.
  514. if (hasAnyOverflow) {
  515. size = llvm::Constant::getAllOnesValue(CGF.SizeTy);
  516. } else {
  517. size = llvm::ConstantInt::get(CGF.SizeTy, allocationSize);
  518. }
  519. // Otherwise, we might need to use the overflow intrinsics.
  520. } else {
  521. // There are up to five conditions we need to test for:
  522. // 1) if isSigned, we need to check whether numElements is negative;
  523. // 2) if numElementsWidth > sizeWidth, we need to check whether
  524. // numElements is larger than something representable in size_t;
  525. // 3) if minElements > 0, we need to check whether numElements is smaller
  526. // than that.
  527. // 4) we need to compute
  528. // sizeWithoutCookie := numElements * typeSizeMultiplier
  529. // and check whether it overflows; and
  530. // 5) if we need a cookie, we need to compute
  531. // size := sizeWithoutCookie + cookieSize
  532. // and check whether it overflows.
  533. llvm::Value *hasOverflow = 0;
  534. // If numElementsWidth > sizeWidth, then one way or another, we're
  535. // going to have to do a comparison for (2), and this happens to
  536. // take care of (1), too.
  537. if (numElementsWidth > sizeWidth) {
  538. llvm::APInt threshold(numElementsWidth, 1);
  539. threshold <<= sizeWidth;
  540. llvm::Value *thresholdV
  541. = llvm::ConstantInt::get(numElementsType, threshold);
  542. hasOverflow = CGF.Builder.CreateICmpUGE(numElements, thresholdV);
  543. numElements = CGF.Builder.CreateTrunc(numElements, CGF.SizeTy);
  544. // Otherwise, if we're signed, we want to sext up to size_t.
  545. } else if (isSigned) {
  546. if (numElementsWidth < sizeWidth)
  547. numElements = CGF.Builder.CreateSExt(numElements, CGF.SizeTy);
  548. // If there's a non-1 type size multiplier, then we can do the
  549. // signedness check at the same time as we do the multiply
  550. // because a negative number times anything will cause an
  551. // unsigned overflow. Otherwise, we have to do it here. But at least
  552. // in this case, we can subsume the >= minElements check.
  553. if (typeSizeMultiplier == 1)
  554. hasOverflow = CGF.Builder.CreateICmpSLT(numElements,
  555. llvm::ConstantInt::get(CGF.SizeTy, minElements));
  556. // Otherwise, zext up to size_t if necessary.
  557. } else if (numElementsWidth < sizeWidth) {
  558. numElements = CGF.Builder.CreateZExt(numElements, CGF.SizeTy);
  559. }
  560. assert(numElements->getType() == CGF.SizeTy);
  561. if (minElements) {
  562. // Don't allow allocation of fewer elements than we have initializers.
  563. if (!hasOverflow) {
  564. hasOverflow = CGF.Builder.CreateICmpULT(numElements,
  565. llvm::ConstantInt::get(CGF.SizeTy, minElements));
  566. } else if (numElementsWidth > sizeWidth) {
  567. // The other existing overflow subsumes this check.
  568. // We do an unsigned comparison, since any signed value < -1 is
  569. // taken care of either above or below.
  570. hasOverflow = CGF.Builder.CreateOr(hasOverflow,
  571. CGF.Builder.CreateICmpULT(numElements,
  572. llvm::ConstantInt::get(CGF.SizeTy, minElements)));
  573. }
  574. }
  575. size = numElements;
  576. // Multiply by the type size if necessary. This multiplier
  577. // includes all the factors for nested arrays.
  578. //
  579. // This step also causes numElements to be scaled up by the
  580. // nested-array factor if necessary. Overflow on this computation
  581. // can be ignored because the result shouldn't be used if
  582. // allocation fails.
  583. if (typeSizeMultiplier != 1) {
  584. llvm::Value *umul_with_overflow
  585. = CGF.CGM.getIntrinsic(llvm::Intrinsic::umul_with_overflow, CGF.SizeTy);
  586. llvm::Value *tsmV =
  587. llvm::ConstantInt::get(CGF.SizeTy, typeSizeMultiplier);
  588. llvm::Value *result =
  589. CGF.Builder.CreateCall2(umul_with_overflow, size, tsmV);
  590. llvm::Value *overflowed = CGF.Builder.CreateExtractValue(result, 1);
  591. if (hasOverflow)
  592. hasOverflow = CGF.Builder.CreateOr(hasOverflow, overflowed);
  593. else
  594. hasOverflow = overflowed;
  595. size = CGF.Builder.CreateExtractValue(result, 0);
  596. // Also scale up numElements by the array size multiplier.
  597. if (arraySizeMultiplier != 1) {
  598. // If the base element type size is 1, then we can re-use the
  599. // multiply we just did.
  600. if (typeSize.isOne()) {
  601. assert(arraySizeMultiplier == typeSizeMultiplier);
  602. numElements = size;
  603. // Otherwise we need a separate multiply.
  604. } else {
  605. llvm::Value *asmV =
  606. llvm::ConstantInt::get(CGF.SizeTy, arraySizeMultiplier);
  607. numElements = CGF.Builder.CreateMul(numElements, asmV);
  608. }
  609. }
  610. } else {
  611. // numElements doesn't need to be scaled.
  612. assert(arraySizeMultiplier == 1);
  613. }
  614. // Add in the cookie size if necessary.
  615. if (cookieSize != 0) {
  616. sizeWithoutCookie = size;
  617. llvm::Value *uadd_with_overflow
  618. = CGF.CGM.getIntrinsic(llvm::Intrinsic::uadd_with_overflow, CGF.SizeTy);
  619. llvm::Value *cookieSizeV = llvm::ConstantInt::get(CGF.SizeTy, cookieSize);
  620. llvm::Value *result =
  621. CGF.Builder.CreateCall2(uadd_with_overflow, size, cookieSizeV);
  622. llvm::Value *overflowed = CGF.Builder.CreateExtractValue(result, 1);
  623. if (hasOverflow)
  624. hasOverflow = CGF.Builder.CreateOr(hasOverflow, overflowed);
  625. else
  626. hasOverflow = overflowed;
  627. size = CGF.Builder.CreateExtractValue(result, 0);
  628. }
  629. // If we had any possibility of dynamic overflow, make a select to
  630. // overwrite 'size' with an all-ones value, which should cause
  631. // operator new to throw.
  632. if (hasOverflow)
  633. size = CGF.Builder.CreateSelect(hasOverflow,
  634. llvm::Constant::getAllOnesValue(CGF.SizeTy),
  635. size);
  636. }
  637. if (cookieSize == 0)
  638. sizeWithoutCookie = size;
  639. else
  640. assert(sizeWithoutCookie && "didn't set sizeWithoutCookie?");
  641. return size;
  642. }
  643. static void StoreAnyExprIntoOneUnit(CodeGenFunction &CGF, const Expr *Init,
  644. QualType AllocType, llvm::Value *NewPtr) {
  645. CharUnits Alignment = CGF.getContext().getTypeAlignInChars(AllocType);
  646. if (!CGF.hasAggregateLLVMType(AllocType))
  647. CGF.EmitScalarInit(Init, 0, CGF.MakeAddrLValue(NewPtr, AllocType,
  648. Alignment),
  649. false);
  650. else if (AllocType->isAnyComplexType())
  651. CGF.EmitComplexExprIntoAddr(Init, NewPtr,
  652. AllocType.isVolatileQualified());
  653. else {
  654. AggValueSlot Slot
  655. = AggValueSlot::forAddr(NewPtr, Alignment, AllocType.getQualifiers(),
  656. AggValueSlot::IsDestructed,
  657. AggValueSlot::DoesNotNeedGCBarriers,
  658. AggValueSlot::IsNotAliased);
  659. CGF.EmitAggExpr(Init, Slot);
  660. CGF.MaybeEmitStdInitializerListCleanup(NewPtr, Init);
  661. }
  662. }
  663. void
  664. CodeGenFunction::EmitNewArrayInitializer(const CXXNewExpr *E,
  665. QualType elementType,
  666. llvm::Value *beginPtr,
  667. llvm::Value *numElements) {
  668. if (!E->hasInitializer())
  669. return; // We have a POD type.
  670. llvm::Value *explicitPtr = beginPtr;
  671. // Find the end of the array, hoisted out of the loop.
  672. llvm::Value *endPtr =
  673. Builder.CreateInBoundsGEP(beginPtr, numElements, "array.end");
  674. unsigned initializerElements = 0;
  675. const Expr *Init = E->getInitializer();
  676. llvm::AllocaInst *endOfInit = 0;
  677. QualType::DestructionKind dtorKind = elementType.isDestructedType();
  678. EHScopeStack::stable_iterator cleanup;
  679. llvm::Instruction *cleanupDominator = 0;
  680. // If the initializer is an initializer list, first do the explicit elements.
  681. if (const InitListExpr *ILE = dyn_cast<InitListExpr>(Init)) {
  682. initializerElements = ILE->getNumInits();
  683. // Enter a partial-destruction cleanup if necessary.
  684. if (needsEHCleanup(dtorKind)) {
  685. // In principle we could tell the cleanup where we are more
  686. // directly, but the control flow can get so varied here that it
  687. // would actually be quite complex. Therefore we go through an
  688. // alloca.
  689. endOfInit = CreateTempAlloca(beginPtr->getType(), "array.endOfInit");
  690. cleanupDominator = Builder.CreateStore(beginPtr, endOfInit);
  691. pushIrregularPartialArrayCleanup(beginPtr, endOfInit, elementType,
  692. getDestroyer(dtorKind));
  693. cleanup = EHStack.stable_begin();
  694. }
  695. for (unsigned i = 0, e = ILE->getNumInits(); i != e; ++i) {
  696. // Tell the cleanup that it needs to destroy up to this
  697. // element. TODO: some of these stores can be trivially
  698. // observed to be unnecessary.
  699. if (endOfInit) Builder.CreateStore(explicitPtr, endOfInit);
  700. StoreAnyExprIntoOneUnit(*this, ILE->getInit(i), elementType, explicitPtr);
  701. explicitPtr =Builder.CreateConstGEP1_32(explicitPtr, 1, "array.exp.next");
  702. }
  703. // The remaining elements are filled with the array filler expression.
  704. Init = ILE->getArrayFiller();
  705. }
  706. // Create the continuation block.
  707. llvm::BasicBlock *contBB = createBasicBlock("new.loop.end");
  708. // If the number of elements isn't constant, we have to now check if there is
  709. // anything left to initialize.
  710. if (llvm::ConstantInt *constNum = dyn_cast<llvm::ConstantInt>(numElements)) {
  711. // If all elements have already been initialized, skip the whole loop.
  712. if (constNum->getZExtValue() <= initializerElements) {
  713. // If there was a cleanup, deactivate it.
  714. if (cleanupDominator)
  715. DeactivateCleanupBlock(cleanup, cleanupDominator);;
  716. return;
  717. }
  718. } else {
  719. llvm::BasicBlock *nonEmptyBB = createBasicBlock("new.loop.nonempty");
  720. llvm::Value *isEmpty = Builder.CreateICmpEQ(explicitPtr, endPtr,
  721. "array.isempty");
  722. Builder.CreateCondBr(isEmpty, contBB, nonEmptyBB);
  723. EmitBlock(nonEmptyBB);
  724. }
  725. // Enter the loop.
  726. llvm::BasicBlock *entryBB = Builder.GetInsertBlock();
  727. llvm::BasicBlock *loopBB = createBasicBlock("new.loop");
  728. EmitBlock(loopBB);
  729. // Set up the current-element phi.
  730. llvm::PHINode *curPtr =
  731. Builder.CreatePHI(explicitPtr->getType(), 2, "array.cur");
  732. curPtr->addIncoming(explicitPtr, entryBB);
  733. // Store the new cleanup position for irregular cleanups.
  734. if (endOfInit) Builder.CreateStore(curPtr, endOfInit);
  735. // Enter a partial-destruction cleanup if necessary.
  736. if (!cleanupDominator && needsEHCleanup(dtorKind)) {
  737. pushRegularPartialArrayCleanup(beginPtr, curPtr, elementType,
  738. getDestroyer(dtorKind));
  739. cleanup = EHStack.stable_begin();
  740. cleanupDominator = Builder.CreateUnreachable();
  741. }
  742. // Emit the initializer into this element.
  743. StoreAnyExprIntoOneUnit(*this, Init, E->getAllocatedType(), curPtr);
  744. // Leave the cleanup if we entered one.
  745. if (cleanupDominator) {
  746. DeactivateCleanupBlock(cleanup, cleanupDominator);
  747. cleanupDominator->eraseFromParent();
  748. }
  749. // Advance to the next element.
  750. llvm::Value *nextPtr = Builder.CreateConstGEP1_32(curPtr, 1, "array.next");
  751. // Check whether we've gotten to the end of the array and, if so,
  752. // exit the loop.
  753. llvm::Value *isEnd = Builder.CreateICmpEQ(nextPtr, endPtr, "array.atend");
  754. Builder.CreateCondBr(isEnd, contBB, loopBB);
  755. curPtr->addIncoming(nextPtr, Builder.GetInsertBlock());
  756. EmitBlock(contBB);
  757. }
  758. static void EmitZeroMemSet(CodeGenFunction &CGF, QualType T,
  759. llvm::Value *NewPtr, llvm::Value *Size) {
  760. CGF.EmitCastToVoidPtr(NewPtr);
  761. CharUnits Alignment = CGF.getContext().getTypeAlignInChars(T);
  762. CGF.Builder.CreateMemSet(NewPtr, CGF.Builder.getInt8(0), Size,
  763. Alignment.getQuantity(), false);
  764. }
  765. static void EmitNewInitializer(CodeGenFunction &CGF, const CXXNewExpr *E,
  766. QualType ElementType,
  767. llvm::Value *NewPtr,
  768. llvm::Value *NumElements,
  769. llvm::Value *AllocSizeWithoutCookie) {
  770. const Expr *Init = E->getInitializer();
  771. if (E->isArray()) {
  772. if (const CXXConstructExpr *CCE = dyn_cast_or_null<CXXConstructExpr>(Init)){
  773. CXXConstructorDecl *Ctor = CCE->getConstructor();
  774. bool RequiresZeroInitialization = false;
  775. if (Ctor->isTrivial()) {
  776. // If new expression did not specify value-initialization, then there
  777. // is no initialization.
  778. if (!CCE->requiresZeroInitialization() || Ctor->getParent()->isEmpty())
  779. return;
  780. if (CGF.CGM.getTypes().isZeroInitializable(ElementType)) {
  781. // Optimization: since zero initialization will just set the memory
  782. // to all zeroes, generate a single memset to do it in one shot.
  783. EmitZeroMemSet(CGF, ElementType, NewPtr, AllocSizeWithoutCookie);
  784. return;
  785. }
  786. RequiresZeroInitialization = true;
  787. }
  788. CGF.EmitCXXAggrConstructorCall(Ctor, NumElements, NewPtr,
  789. CCE->arg_begin(), CCE->arg_end(),
  790. RequiresZeroInitialization);
  791. return;
  792. } else if (Init && isa<ImplicitValueInitExpr>(Init) &&
  793. CGF.CGM.getTypes().isZeroInitializable(ElementType)) {
  794. // Optimization: since zero initialization will just set the memory
  795. // to all zeroes, generate a single memset to do it in one shot.
  796. EmitZeroMemSet(CGF, ElementType, NewPtr, AllocSizeWithoutCookie);
  797. return;
  798. }
  799. CGF.EmitNewArrayInitializer(E, ElementType, NewPtr, NumElements);
  800. return;
  801. }
  802. if (!Init)
  803. return;
  804. StoreAnyExprIntoOneUnit(CGF, Init, E->getAllocatedType(), NewPtr);
  805. }
  806. namespace {
  807. /// A cleanup to call the given 'operator delete' function upon
  808. /// abnormal exit from a new expression.
  809. class CallDeleteDuringNew : public EHScopeStack::Cleanup {
  810. size_t NumPlacementArgs;
  811. const FunctionDecl *OperatorDelete;
  812. llvm::Value *Ptr;
  813. llvm::Value *AllocSize;
  814. RValue *getPlacementArgs() { return reinterpret_cast<RValue*>(this+1); }
  815. public:
  816. static size_t getExtraSize(size_t NumPlacementArgs) {
  817. return NumPlacementArgs * sizeof(RValue);
  818. }
  819. CallDeleteDuringNew(size_t NumPlacementArgs,
  820. const FunctionDecl *OperatorDelete,
  821. llvm::Value *Ptr,
  822. llvm::Value *AllocSize)
  823. : NumPlacementArgs(NumPlacementArgs), OperatorDelete(OperatorDelete),
  824. Ptr(Ptr), AllocSize(AllocSize) {}
  825. void setPlacementArg(unsigned I, RValue Arg) {
  826. assert(I < NumPlacementArgs && "index out of range");
  827. getPlacementArgs()[I] = Arg;
  828. }
  829. void Emit(CodeGenFunction &CGF, Flags flags) {
  830. const FunctionProtoType *FPT
  831. = OperatorDelete->getType()->getAs<FunctionProtoType>();
  832. assert(FPT->getNumArgs() == NumPlacementArgs + 1 ||
  833. (FPT->getNumArgs() == 2 && NumPlacementArgs == 0));
  834. CallArgList DeleteArgs;
  835. // The first argument is always a void*.
  836. FunctionProtoType::arg_type_iterator AI = FPT->arg_type_begin();
  837. DeleteArgs.add(RValue::get(Ptr), *AI++);
  838. // A member 'operator delete' can take an extra 'size_t' argument.
  839. if (FPT->getNumArgs() == NumPlacementArgs + 2)
  840. DeleteArgs.add(RValue::get(AllocSize), *AI++);
  841. // Pass the rest of the arguments, which must match exactly.
  842. for (unsigned I = 0; I != NumPlacementArgs; ++I)
  843. DeleteArgs.add(getPlacementArgs()[I], *AI++);
  844. // Call 'operator delete'.
  845. CGF.EmitCall(CGF.CGM.getTypes().arrangeFunctionCall(DeleteArgs, FPT),
  846. CGF.CGM.GetAddrOfFunction(OperatorDelete),
  847. ReturnValueSlot(), DeleteArgs, OperatorDelete);
  848. }
  849. };
  850. /// A cleanup to call the given 'operator delete' function upon
  851. /// abnormal exit from a new expression when the new expression is
  852. /// conditional.
  853. class CallDeleteDuringConditionalNew : public EHScopeStack::Cleanup {
  854. size_t NumPlacementArgs;
  855. const FunctionDecl *OperatorDelete;
  856. DominatingValue<RValue>::saved_type Ptr;
  857. DominatingValue<RValue>::saved_type AllocSize;
  858. DominatingValue<RValue>::saved_type *getPlacementArgs() {
  859. return reinterpret_cast<DominatingValue<RValue>::saved_type*>(this+1);
  860. }
  861. public:
  862. static size_t getExtraSize(size_t NumPlacementArgs) {
  863. return NumPlacementArgs * sizeof(DominatingValue<RValue>::saved_type);
  864. }
  865. CallDeleteDuringConditionalNew(size_t NumPlacementArgs,
  866. const FunctionDecl *OperatorDelete,
  867. DominatingValue<RValue>::saved_type Ptr,
  868. DominatingValue<RValue>::saved_type AllocSize)
  869. : NumPlacementArgs(NumPlacementArgs), OperatorDelete(OperatorDelete),
  870. Ptr(Ptr), AllocSize(AllocSize) {}
  871. void setPlacementArg(unsigned I, DominatingValue<RValue>::saved_type Arg) {
  872. assert(I < NumPlacementArgs && "index out of range");
  873. getPlacementArgs()[I] = Arg;
  874. }
  875. void Emit(CodeGenFunction &CGF, Flags flags) {
  876. const FunctionProtoType *FPT
  877. = OperatorDelete->getType()->getAs<FunctionProtoType>();
  878. assert(FPT->getNumArgs() == NumPlacementArgs + 1 ||
  879. (FPT->getNumArgs() == 2 && NumPlacementArgs == 0));
  880. CallArgList DeleteArgs;
  881. // The first argument is always a void*.
  882. FunctionProtoType::arg_type_iterator AI = FPT->arg_type_begin();
  883. DeleteArgs.add(Ptr.restore(CGF), *AI++);
  884. // A member 'operator delete' can take an extra 'size_t' argument.
  885. if (FPT->getNumArgs() == NumPlacementArgs + 2) {
  886. RValue RV = AllocSize.restore(CGF);
  887. DeleteArgs.add(RV, *AI++);
  888. }
  889. // Pass the rest of the arguments, which must match exactly.
  890. for (unsigned I = 0; I != NumPlacementArgs; ++I) {
  891. RValue RV = getPlacementArgs()[I].restore(CGF);
  892. DeleteArgs.add(RV, *AI++);
  893. }
  894. // Call 'operator delete'.
  895. CGF.EmitCall(CGF.CGM.getTypes().arrangeFunctionCall(DeleteArgs, FPT),
  896. CGF.CGM.GetAddrOfFunction(OperatorDelete),
  897. ReturnValueSlot(), DeleteArgs, OperatorDelete);
  898. }
  899. };
  900. }
  901. /// Enter a cleanup to call 'operator delete' if the initializer in a
  902. /// new-expression throws.
  903. static void EnterNewDeleteCleanup(CodeGenFunction &CGF,
  904. const CXXNewExpr *E,
  905. llvm::Value *NewPtr,
  906. llvm::Value *AllocSize,
  907. const CallArgList &NewArgs) {
  908. // If we're not inside a conditional branch, then the cleanup will
  909. // dominate and we can do the easier (and more efficient) thing.
  910. if (!CGF.isInConditionalBranch()) {
  911. CallDeleteDuringNew *Cleanup = CGF.EHStack
  912. .pushCleanupWithExtra<CallDeleteDuringNew>(EHCleanup,
  913. E->getNumPlacementArgs(),
  914. E->getOperatorDelete(),
  915. NewPtr, AllocSize);
  916. for (unsigned I = 0, N = E->getNumPlacementArgs(); I != N; ++I)
  917. Cleanup->setPlacementArg(I, NewArgs[I+1].RV);
  918. return;
  919. }
  920. // Otherwise, we need to save all this stuff.
  921. DominatingValue<RValue>::saved_type SavedNewPtr =
  922. DominatingValue<RValue>::save(CGF, RValue::get(NewPtr));
  923. DominatingValue<RValue>::saved_type SavedAllocSize =
  924. DominatingValue<RValue>::save(CGF, RValue::get(AllocSize));
  925. CallDeleteDuringConditionalNew *Cleanup = CGF.EHStack
  926. .pushCleanupWithExtra<CallDeleteDuringConditionalNew>(EHCleanup,
  927. E->getNumPlacementArgs(),
  928. E->getOperatorDelete(),
  929. SavedNewPtr,
  930. SavedAllocSize);
  931. for (unsigned I = 0, N = E->getNumPlacementArgs(); I != N; ++I)
  932. Cleanup->setPlacementArg(I,
  933. DominatingValue<RValue>::save(CGF, NewArgs[I+1].RV));
  934. CGF.initFullExprCleanup();
  935. }
  936. llvm::Value *CodeGenFunction::EmitCXXNewExpr(const CXXNewExpr *E) {
  937. // The element type being allocated.
  938. QualType allocType = getContext().getBaseElementType(E->getAllocatedType());
  939. // 1. Build a call to the allocation function.
  940. FunctionDecl *allocator = E->getOperatorNew();
  941. const FunctionProtoType *allocatorType =
  942. allocator->getType()->castAs<FunctionProtoType>();
  943. CallArgList allocatorArgs;
  944. // The allocation size is the first argument.
  945. QualType sizeType = getContext().getSizeType();
  946. // If there is a brace-initializer, cannot allocate fewer elements than inits.
  947. unsigned minElements = 0;
  948. if (E->isArray() && E->hasInitializer()) {
  949. if (const InitListExpr *ILE = dyn_cast<InitListExpr>(E->getInitializer()))
  950. minElements = ILE->getNumInits();
  951. }
  952. llvm::Value *numElements = 0;
  953. llvm::Value *allocSizeWithoutCookie = 0;
  954. llvm::Value *allocSize =
  955. EmitCXXNewAllocSize(*this, E, minElements, numElements,
  956. allocSizeWithoutCookie);
  957. allocatorArgs.add(RValue::get(allocSize), sizeType);
  958. // Emit the rest of the arguments.
  959. // FIXME: Ideally, this should just use EmitCallArgs.
  960. CXXNewExpr::const_arg_iterator placementArg = E->placement_arg_begin();
  961. // First, use the types from the function type.
  962. // We start at 1 here because the first argument (the allocation size)
  963. // has already been emitted.
  964. for (unsigned i = 1, e = allocatorType->getNumArgs(); i != e;
  965. ++i, ++placementArg) {
  966. QualType argType = allocatorType->getArgType(i);
  967. assert(getContext().hasSameUnqualifiedType(argType.getNonReferenceType(),
  968. placementArg->getType()) &&
  969. "type mismatch in call argument!");
  970. EmitCallArg(allocatorArgs, *placementArg, argType);
  971. }
  972. // Either we've emitted all the call args, or we have a call to a
  973. // variadic function.
  974. assert((placementArg == E->placement_arg_end() ||
  975. allocatorType->isVariadic()) &&
  976. "Extra arguments to non-variadic function!");
  977. // If we still have any arguments, emit them using the type of the argument.
  978. for (CXXNewExpr::const_arg_iterator placementArgsEnd = E->placement_arg_end();
  979. placementArg != placementArgsEnd; ++placementArg) {
  980. EmitCallArg(allocatorArgs, *placementArg, placementArg->getType());
  981. }
  982. // Emit the allocation call. If the allocator is a global placement
  983. // operator, just "inline" it directly.
  984. RValue RV;
  985. if (allocator->isReservedGlobalPlacementOperator()) {
  986. assert(allocatorArgs.size() == 2);
  987. RV = allocatorArgs[1].RV;
  988. // TODO: kill any unnecessary computations done for the size
  989. // argument.
  990. } else {
  991. RV = EmitCall(CGM.getTypes().arrangeFunctionCall(allocatorArgs,
  992. allocatorType),
  993. CGM.GetAddrOfFunction(allocator), ReturnValueSlot(),
  994. allocatorArgs, allocator);
  995. }
  996. // Emit a null check on the allocation result if the allocation
  997. // function is allowed to return null (because it has a non-throwing
  998. // exception spec; for this part, we inline
  999. // CXXNewExpr::shouldNullCheckAllocation()) and we have an
  1000. // interesting initializer.
  1001. bool nullCheck = allocatorType->isNothrow(getContext()) &&
  1002. (!allocType.isPODType(getContext()) || E->hasInitializer());
  1003. llvm::BasicBlock *nullCheckBB = 0;
  1004. llvm::BasicBlock *contBB = 0;
  1005. llvm::Value *allocation = RV.getScalarVal();
  1006. unsigned AS =
  1007. cast<llvm::PointerType>(allocation->getType())->getAddressSpace();
  1008. // The null-check means that the initializer is conditionally
  1009. // evaluated.
  1010. ConditionalEvaluation conditional(*this);
  1011. if (nullCheck) {
  1012. conditional.begin(*this);
  1013. nullCheckBB = Builder.GetInsertBlock();
  1014. llvm::BasicBlock *notNullBB = createBasicBlock("new.notnull");
  1015. contBB = createBasicBlock("new.cont");
  1016. llvm::Value *isNull = Builder.CreateIsNull(allocation, "new.isnull");
  1017. Builder.CreateCondBr(isNull, contBB, notNullBB);
  1018. EmitBlock(notNullBB);
  1019. }
  1020. // If there's an operator delete, enter a cleanup to call it if an
  1021. // exception is thrown.
  1022. EHScopeStack::stable_iterator operatorDeleteCleanup;
  1023. llvm::Instruction *cleanupDominator = 0;
  1024. if (E->getOperatorDelete() &&
  1025. !E->getOperatorDelete()->isReservedGlobalPlacementOperator()) {
  1026. EnterNewDeleteCleanup(*this, E, allocation, allocSize, allocatorArgs);
  1027. operatorDeleteCleanup = EHStack.stable_begin();
  1028. cleanupDominator = Builder.CreateUnreachable();
  1029. }
  1030. assert((allocSize == allocSizeWithoutCookie) ==
  1031. CalculateCookiePadding(*this, E).isZero());
  1032. if (allocSize != allocSizeWithoutCookie) {
  1033. assert(E->isArray());
  1034. allocation = CGM.getCXXABI().InitializeArrayCookie(*this, allocation,
  1035. numElements,
  1036. E, allocType);
  1037. }
  1038. llvm::Type *elementPtrTy
  1039. = ConvertTypeForMem(allocType)->getPointerTo(AS);
  1040. llvm::Value *result = Builder.CreateBitCast(allocation, elementPtrTy);
  1041. EmitNewInitializer(*this, E, allocType, result, numElements,
  1042. allocSizeWithoutCookie);
  1043. if (E->isArray()) {
  1044. // NewPtr is a pointer to the base element type. If we're
  1045. // allocating an array of arrays, we'll need to cast back to the
  1046. // array pointer type.
  1047. llvm::Type *resultType = ConvertTypeForMem(E->getType());
  1048. if (result->getType() != resultType)
  1049. result = Builder.CreateBitCast(result, resultType);
  1050. }
  1051. // Deactivate the 'operator delete' cleanup if we finished
  1052. // initialization.
  1053. if (operatorDeleteCleanup.isValid()) {
  1054. DeactivateCleanupBlock(operatorDeleteCleanup, cleanupDominator);
  1055. cleanupDominator->eraseFromParent();
  1056. }
  1057. if (nullCheck) {
  1058. conditional.end(*this);
  1059. llvm::BasicBlock *notNullBB = Builder.GetInsertBlock();
  1060. EmitBlock(contBB);
  1061. llvm::PHINode *PHI = Builder.CreatePHI(result->getType(), 2);
  1062. PHI->addIncoming(result, notNullBB);
  1063. PHI->addIncoming(llvm::Constant::getNullValue(result->getType()),
  1064. nullCheckBB);
  1065. result = PHI;
  1066. }
  1067. return result;
  1068. }
  1069. void CodeGenFunction::EmitDeleteCall(const FunctionDecl *DeleteFD,
  1070. llvm::Value *Ptr,
  1071. QualType DeleteTy) {
  1072. assert(DeleteFD->getOverloadedOperator() == OO_Delete);
  1073. const FunctionProtoType *DeleteFTy =
  1074. DeleteFD->getType()->getAs<FunctionProtoType>();
  1075. CallArgList DeleteArgs;
  1076. // Check if we need to pass the size to the delete operator.
  1077. llvm::Value *Size = 0;
  1078. QualType SizeTy;
  1079. if (DeleteFTy->getNumArgs() == 2) {
  1080. SizeTy = DeleteFTy->getArgType(1);
  1081. CharUnits DeleteTypeSize = getContext().getTypeSizeInChars(DeleteTy);
  1082. Size = llvm::ConstantInt::get(ConvertType(SizeTy),
  1083. DeleteTypeSize.getQuantity());
  1084. }
  1085. QualType ArgTy = DeleteFTy->getArgType(0);
  1086. llvm::Value *DeletePtr = Builder.CreateBitCast(Ptr, ConvertType(ArgTy));
  1087. DeleteArgs.add(RValue::get(DeletePtr), ArgTy);
  1088. if (Size)
  1089. DeleteArgs.add(RValue::get(Size), SizeTy);
  1090. // Emit the call to delete.
  1091. EmitCall(CGM.getTypes().arrangeFunctionCall(DeleteArgs, DeleteFTy),
  1092. CGM.GetAddrOfFunction(DeleteFD), ReturnValueSlot(),
  1093. DeleteArgs, DeleteFD);
  1094. }
  1095. namespace {
  1096. /// Calls the given 'operator delete' on a single object.
  1097. struct CallObjectDelete : EHScopeStack::Cleanup {
  1098. llvm::Value *Ptr;
  1099. const FunctionDecl *OperatorDelete;
  1100. QualType ElementType;
  1101. CallObjectDelete(llvm::Value *Ptr,
  1102. const FunctionDecl *OperatorDelete,
  1103. QualType ElementType)
  1104. : Ptr(Ptr), OperatorDelete(OperatorDelete), ElementType(ElementType) {}
  1105. void Emit(CodeGenFunction &CGF, Flags flags) {
  1106. CGF.EmitDeleteCall(OperatorDelete, Ptr, ElementType);
  1107. }
  1108. };
  1109. }
  1110. /// Emit the code for deleting a single object.
  1111. static void EmitObjectDelete(CodeGenFunction &CGF,
  1112. const FunctionDecl *OperatorDelete,
  1113. llvm::Value *Ptr,
  1114. QualType ElementType,
  1115. bool UseGlobalDelete) {
  1116. // Find the destructor for the type, if applicable. If the
  1117. // destructor is virtual, we'll just emit the vcall and return.
  1118. const CXXDestructorDecl *Dtor = 0;
  1119. if (const RecordType *RT = ElementType->getAs<RecordType>()) {
  1120. CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
  1121. if (RD->hasDefinition() && !RD->hasTrivialDestructor()) {
  1122. Dtor = RD->getDestructor();
  1123. if (Dtor->isVirtual()) {
  1124. if (UseGlobalDelete) {
  1125. // If we're supposed to call the global delete, make sure we do so
  1126. // even if the destructor throws.
  1127. CGF.EHStack.pushCleanup<CallObjectDelete>(NormalAndEHCleanup,
  1128. Ptr, OperatorDelete,
  1129. ElementType);
  1130. }
  1131. llvm::Type *Ty =
  1132. CGF.getTypes().GetFunctionType(
  1133. CGF.getTypes().arrangeCXXDestructor(Dtor, Dtor_Complete));
  1134. llvm::Value *Callee
  1135. = CGF.BuildVirtualCall(Dtor,
  1136. UseGlobalDelete? Dtor_Complete : Dtor_Deleting,
  1137. Ptr, Ty);
  1138. CGF.EmitCXXMemberCall(Dtor, Callee, ReturnValueSlot(), Ptr, /*VTT=*/0,
  1139. 0, 0);
  1140. if (UseGlobalDelete) {
  1141. CGF.PopCleanupBlock();
  1142. }
  1143. return;
  1144. }
  1145. }
  1146. }
  1147. // Make sure that we call delete even if the dtor throws.
  1148. // This doesn't have to a conditional cleanup because we're going
  1149. // to pop it off in a second.
  1150. CGF.EHStack.pushCleanup<CallObjectDelete>(NormalAndEHCleanup,
  1151. Ptr, OperatorDelete, ElementType);
  1152. if (Dtor)
  1153. CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete,
  1154. /*ForVirtualBase=*/false, Ptr);
  1155. else if (CGF.getLangOpts().ObjCAutoRefCount &&
  1156. ElementType->isObjCLifetimeType()) {
  1157. switch (ElementType.getObjCLifetime()) {
  1158. case Qualifiers::OCL_None:
  1159. case Qualifiers::OCL_ExplicitNone:
  1160. case Qualifiers::OCL_Autoreleasing:
  1161. break;
  1162. case Qualifiers::OCL_Strong: {
  1163. // Load the pointer value.
  1164. llvm::Value *PtrValue = CGF.Builder.CreateLoad(Ptr,
  1165. ElementType.isVolatileQualified());
  1166. CGF.EmitARCRelease(PtrValue, /*precise*/ true);
  1167. break;
  1168. }
  1169. case Qualifiers::OCL_Weak:
  1170. CGF.EmitARCDestroyWeak(Ptr);
  1171. break;
  1172. }
  1173. }
  1174. CGF.PopCleanupBlock();
  1175. }
  1176. namespace {
  1177. /// Calls the given 'operator delete' on an array of objects.
  1178. struct CallArrayDelete : EHScopeStack::Cleanup {
  1179. llvm::Value *Ptr;
  1180. const FunctionDecl *OperatorDelete;
  1181. llvm::Value *NumElements;
  1182. QualType ElementType;
  1183. CharUnits CookieSize;
  1184. CallArrayDelete(llvm::Value *Ptr,
  1185. const FunctionDecl *OperatorDelete,
  1186. llvm::Value *NumElements,
  1187. QualType ElementType,
  1188. CharUnits CookieSize)
  1189. : Ptr(Ptr), OperatorDelete(OperatorDelete), NumElements(NumElements),
  1190. ElementType(ElementType), CookieSize(CookieSize) {}
  1191. void Emit(CodeGenFunction &CGF, Flags flags) {
  1192. const FunctionProtoType *DeleteFTy =
  1193. OperatorDelete->getType()->getAs<FunctionProtoType>();
  1194. assert(DeleteFTy->getNumArgs() == 1 || DeleteFTy->getNumArgs() == 2);
  1195. CallArgList Args;
  1196. // Pass the pointer as the first argument.
  1197. QualType VoidPtrTy = DeleteFTy->getArgType(0);
  1198. llvm::Value *DeletePtr
  1199. = CGF.Builder.CreateBitCast(Ptr, CGF.ConvertType(VoidPtrTy));
  1200. Args.add(RValue::get(DeletePtr), VoidPtrTy);
  1201. // Pass the original requested size as the second argument.
  1202. if (DeleteFTy->getNumArgs() == 2) {
  1203. QualType size_t = DeleteFTy->getArgType(1);
  1204. llvm::IntegerType *SizeTy
  1205. = cast<llvm::IntegerType>(CGF.ConvertType(size_t));
  1206. CharUnits ElementTypeSize =
  1207. CGF.CGM.getContext().getTypeSizeInChars(ElementType);
  1208. // The size of an element, multiplied by the number of elements.
  1209. llvm::Value *Size
  1210. = llvm::ConstantInt::get(SizeTy, ElementTypeSize.getQuantity());
  1211. Size = CGF.Builder.CreateMul(Size, NumElements);
  1212. // Plus the size of the cookie if applicable.
  1213. if (!CookieSize.isZero()) {
  1214. llvm::Value *CookieSizeV
  1215. = llvm::ConstantInt::get(SizeTy, CookieSize.getQuantity());
  1216. Size = CGF.Builder.CreateAdd(Size, CookieSizeV);
  1217. }
  1218. Args.add(RValue::get(Size), size_t);
  1219. }
  1220. // Emit the call to delete.
  1221. CGF.EmitCall(CGF.getTypes().arrangeFunctionCall(Args, DeleteFTy),
  1222. CGF.CGM.GetAddrOfFunction(OperatorDelete),
  1223. ReturnValueSlot(), Args, OperatorDelete);
  1224. }
  1225. };
  1226. }
  1227. /// Emit the code for deleting an array of objects.
  1228. static void EmitArrayDelete(CodeGenFunction &CGF,
  1229. const CXXDeleteExpr *E,
  1230. llvm::Value *deletedPtr,
  1231. QualType elementType) {
  1232. llvm::Value *numElements = 0;
  1233. llvm::Value *allocatedPtr = 0;
  1234. CharUnits cookieSize;
  1235. CGF.CGM.getCXXABI().ReadArrayCookie(CGF, deletedPtr, E, elementType,
  1236. numElements, allocatedPtr, cookieSize);
  1237. assert(allocatedPtr && "ReadArrayCookie didn't set allocated pointer");
  1238. // Make sure that we call delete even if one of the dtors throws.
  1239. const FunctionDecl *operatorDelete = E->getOperatorDelete();
  1240. CGF.EHStack.pushCleanup<CallArrayDelete>(NormalAndEHCleanup,
  1241. allocatedPtr, operatorDelete,
  1242. numElements, elementType,
  1243. cookieSize);
  1244. // Destroy the elements.
  1245. if (QualType::DestructionKind dtorKind = elementType.isDestructedType()) {
  1246. assert(numElements && "no element count for a type with a destructor!");
  1247. llvm::Value *arrayEnd =
  1248. CGF.Builder.CreateInBoundsGEP(deletedPtr, numElements, "delete.end");
  1249. // Note that it is legal to allocate a zero-length array, and we
  1250. // can never fold the check away because the length should always
  1251. // come from a cookie.
  1252. CGF.emitArrayDestroy(deletedPtr, arrayEnd, elementType,
  1253. CGF.getDestroyer(dtorKind),
  1254. /*checkZeroLength*/ true,
  1255. CGF.needsEHCleanup(dtorKind));
  1256. }
  1257. // Pop the cleanup block.
  1258. CGF.PopCleanupBlock();
  1259. }
  1260. void CodeGenFunction::EmitCXXDeleteExpr(const CXXDeleteExpr *E) {
  1261. // Get at the argument before we performed the implicit conversion
  1262. // to void*.
  1263. const Expr *Arg = E->getArgument();
  1264. while (const ImplicitCastExpr *ICE = dyn_cast<ImplicitCastExpr>(Arg)) {
  1265. if (ICE->getCastKind() != CK_UserDefinedConversion &&
  1266. ICE->getType()->isVoidPointerType())
  1267. Arg = ICE->getSubExpr();
  1268. else
  1269. break;
  1270. }
  1271. llvm::Value *Ptr = EmitScalarExpr(Arg);
  1272. // Null check the pointer.
  1273. llvm::BasicBlock *DeleteNotNull = createBasicBlock("delete.notnull");
  1274. llvm::BasicBlock *DeleteEnd = createBasicBlock("delete.end");
  1275. llvm::Value *IsNull = Builder.CreateIsNull(Ptr, "isnull");
  1276. Builder.CreateCondBr(IsNull, DeleteEnd, DeleteNotNull);
  1277. EmitBlock(DeleteNotNull);
  1278. // We might be deleting a pointer to array. If so, GEP down to the
  1279. // first non-array element.
  1280. // (this assumes that A(*)[3][7] is converted to [3 x [7 x %A]]*)
  1281. QualType DeleteTy = Arg->getType()->getAs<PointerType>()->getPointeeType();
  1282. if (DeleteTy->isConstantArrayType()) {
  1283. llvm::Value *Zero = Builder.getInt32(0);
  1284. SmallVector<llvm::Value*,8> GEP;
  1285. GEP.push_back(Zero); // point at the outermost array
  1286. // For each layer of array type we're pointing at:
  1287. while (const ConstantArrayType *Arr
  1288. = getContext().getAsConstantArrayType(DeleteTy)) {
  1289. // 1. Unpeel the array type.
  1290. DeleteTy = Arr->getElementType();
  1291. // 2. GEP to the first element of the array.
  1292. GEP.push_back(Zero);
  1293. }
  1294. Ptr = Builder.CreateInBoundsGEP(Ptr, GEP, "del.first");
  1295. }
  1296. assert(ConvertTypeForMem(DeleteTy) ==
  1297. cast<llvm::PointerType>(Ptr->getType())->getElementType());
  1298. if (E->isArrayForm()) {
  1299. EmitArrayDelete(*this, E, Ptr, DeleteTy);
  1300. } else {
  1301. EmitObjectDelete(*this, E->getOperatorDelete(), Ptr, DeleteTy,
  1302. E->isGlobalDelete());
  1303. }
  1304. EmitBlock(DeleteEnd);
  1305. }
  1306. static llvm::Constant *getBadTypeidFn(CodeGenFunction &CGF) {
  1307. // void __cxa_bad_typeid();
  1308. llvm::FunctionType *FTy = llvm::FunctionType::get(CGF.VoidTy, false);
  1309. return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_bad_typeid");
  1310. }
  1311. static void EmitBadTypeidCall(CodeGenFunction &CGF) {
  1312. llvm::Value *Fn = getBadTypeidFn(CGF);
  1313. CGF.EmitCallOrInvoke(Fn).setDoesNotReturn();
  1314. CGF.Builder.CreateUnreachable();
  1315. }
  1316. static llvm::Value *EmitTypeidFromVTable(CodeGenFunction &CGF,
  1317. const Expr *E,
  1318. llvm::Type *StdTypeInfoPtrTy) {
  1319. // Get the vtable pointer.
  1320. llvm::Value *ThisPtr = CGF.EmitLValue(E).getAddress();
  1321. // C++ [expr.typeid]p2:
  1322. // If the glvalue expression is obtained by applying the unary * operator to
  1323. // a pointer and the pointer is a null pointer value, the typeid expression
  1324. // throws the std::bad_typeid exception.
  1325. if (const UnaryOperator *UO = dyn_cast<UnaryOperator>(E->IgnoreParens())) {
  1326. if (UO->getOpcode() == UO_Deref) {
  1327. llvm::BasicBlock *BadTypeidBlock =
  1328. CGF.createBasicBlock("typeid.bad_typeid");
  1329. llvm::BasicBlock *EndBlock =
  1330. CGF.createBasicBlock("typeid.end");
  1331. llvm::Value *IsNull = CGF.Builder.CreateIsNull(ThisPtr);
  1332. CGF.Builder.CreateCondBr(IsNull, BadTypeidBlock, EndBlock);
  1333. CGF.EmitBlock(BadTypeidBlock);
  1334. EmitBadTypeidCall(CGF);
  1335. CGF.EmitBlock(EndBlock);
  1336. }
  1337. }
  1338. llvm::Value *Value = CGF.GetVTablePtr(ThisPtr,
  1339. StdTypeInfoPtrTy->getPointerTo());
  1340. // Load the type info.
  1341. Value = CGF.Builder.CreateConstInBoundsGEP1_64(Value, -1ULL);
  1342. return CGF.Builder.CreateLoad(Value);
  1343. }
  1344. llvm::Value *CodeGenFunction::EmitCXXTypeidExpr(const CXXTypeidExpr *E) {
  1345. llvm::Type *StdTypeInfoPtrTy =
  1346. ConvertType(E->getType())->getPointerTo();
  1347. if (E->isTypeOperand()) {
  1348. llvm::Constant *TypeInfo =
  1349. CGM.GetAddrOfRTTIDescriptor(E->getTypeOperand());
  1350. return Builder.CreateBitCast(TypeInfo, StdTypeInfoPtrTy);
  1351. }
  1352. // C++ [expr.typeid]p2:
  1353. // When typeid is applied to a glvalue expression whose type is a
  1354. // polymorphic class type, the result refers to a std::type_info object
  1355. // representing the type of the most derived object (that is, the dynamic
  1356. // type) to which the glvalue refers.
  1357. if (E->getExprOperand()->isGLValue()) {
  1358. if (const RecordType *RT =
  1359. E->getExprOperand()->getType()->getAs<RecordType>()) {
  1360. const CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
  1361. if (RD->isPolymorphic())
  1362. return EmitTypeidFromVTable(*this, E->getExprOperand(),
  1363. StdTypeInfoPtrTy);
  1364. }
  1365. }
  1366. QualType OperandTy = E->getExprOperand()->getType();
  1367. return Builder.CreateBitCast(CGM.GetAddrOfRTTIDescriptor(OperandTy),
  1368. StdTypeInfoPtrTy);
  1369. }
  1370. static llvm::Constant *getDynamicCastFn(CodeGenFunction &CGF) {
  1371. // void *__dynamic_cast(const void *sub,
  1372. // const abi::__class_type_info *src,
  1373. // const abi::__class_type_info *dst,
  1374. // std::ptrdiff_t src2dst_offset);
  1375. llvm::Type *Int8PtrTy = CGF.Int8PtrTy;
  1376. llvm::Type *PtrDiffTy =
  1377. CGF.ConvertType(CGF.getContext().getPointerDiffType());
  1378. llvm::Type *Args[4] = { Int8PtrTy, Int8PtrTy, Int8PtrTy, PtrDiffTy };
  1379. llvm::FunctionType *FTy =
  1380. llvm::FunctionType::get(Int8PtrTy, Args, false);
  1381. return CGF.CGM.CreateRuntimeFunction(FTy, "__dynamic_cast");
  1382. }
  1383. static llvm::Constant *getBadCastFn(CodeGenFunction &CGF) {
  1384. // void __cxa_bad_cast();
  1385. llvm::FunctionType *FTy = llvm::FunctionType::get(CGF.VoidTy, false);
  1386. return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_bad_cast");
  1387. }
  1388. static void EmitBadCastCall(CodeGenFunction &CGF) {
  1389. llvm::Value *Fn = getBadCastFn(CGF);
  1390. CGF.EmitCallOrInvoke(Fn).setDoesNotReturn();
  1391. CGF.Builder.CreateUnreachable();
  1392. }
  1393. static llvm::Value *
  1394. EmitDynamicCastCall(CodeGenFunction &CGF, llvm::Value *Value,
  1395. QualType SrcTy, QualType DestTy,
  1396. llvm::BasicBlock *CastEnd) {
  1397. llvm::Type *PtrDiffLTy =
  1398. CGF.ConvertType(CGF.getContext().getPointerDiffType());
  1399. llvm::Type *DestLTy = CGF.ConvertType(DestTy);
  1400. if (const PointerType *PTy = DestTy->getAs<PointerType>()) {
  1401. if (PTy->getPointeeType()->isVoidType()) {
  1402. // C++ [expr.dynamic.cast]p7:
  1403. // If T is "pointer to cv void," then the result is a pointer to the
  1404. // most derived object pointed to by v.
  1405. // Get the vtable pointer.
  1406. llvm::Value *VTable = CGF.GetVTablePtr(Value, PtrDiffLTy->getPointerTo());
  1407. // Get the offset-to-top from the vtable.
  1408. llvm::Value *OffsetToTop =
  1409. CGF.Builder.CreateConstInBoundsGEP1_64(VTable, -2ULL);
  1410. OffsetToTop = CGF.Builder.CreateLoad(OffsetToTop, "offset.to.top");
  1411. // Finally, add the offset to the pointer.
  1412. Value = CGF.EmitCastToVoidPtr(Value);
  1413. Value = CGF.Builder.CreateInBoundsGEP(Value, OffsetToTop);
  1414. return CGF.Builder.CreateBitCast(Value, DestLTy);
  1415. }
  1416. }
  1417. QualType SrcRecordTy;
  1418. QualType DestRecordTy;
  1419. if (const PointerType *DestPTy = DestTy->getAs<PointerType>()) {
  1420. SrcRecordTy = SrcTy->castAs<PointerType>()->getPointeeType();
  1421. DestRecordTy = DestPTy->getPointeeType();
  1422. } else {
  1423. SrcRecordTy = SrcTy;
  1424. DestRecordTy = DestTy->castAs<ReferenceType>()->getPointeeType();
  1425. }
  1426. assert(SrcRecordTy->isRecordType() && "source type must be a record type!");
  1427. assert(DestRecordTy->isRecordType() && "dest type must be a record type!");
  1428. llvm::Value *SrcRTTI =
  1429. CGF.CGM.GetAddrOfRTTIDescriptor(SrcRecordTy.getUnqualifiedType());
  1430. llvm::Value *DestRTTI =
  1431. CGF.CGM.GetAddrOfRTTIDescriptor(DestRecordTy.getUnqualifiedType());
  1432. // FIXME: Actually compute a hint here.
  1433. llvm::Value *OffsetHint = llvm::ConstantInt::get(PtrDiffLTy, -1ULL);
  1434. // Emit the call to __dynamic_cast.
  1435. Value = CGF.EmitCastToVoidPtr(Value);
  1436. Value = CGF.Builder.CreateCall4(getDynamicCastFn(CGF), Value,
  1437. SrcRTTI, DestRTTI, OffsetHint);
  1438. Value = CGF.Builder.CreateBitCast(Value, DestLTy);
  1439. /// C++ [expr.dynamic.cast]p9:
  1440. /// A failed cast to reference type throws std::bad_cast
  1441. if (DestTy->isReferenceType()) {
  1442. llvm::BasicBlock *BadCastBlock =
  1443. CGF.createBasicBlock("dynamic_cast.bad_cast");
  1444. llvm::Value *IsNull = CGF.Builder.CreateIsNull(Value);
  1445. CGF.Builder.CreateCondBr(IsNull, BadCastBlock, CastEnd);
  1446. CGF.EmitBlock(BadCastBlock);
  1447. EmitBadCastCall(CGF);
  1448. }
  1449. return Value;
  1450. }
  1451. static llvm::Value *EmitDynamicCastToNull(CodeGenFunction &CGF,
  1452. QualType DestTy) {
  1453. llvm::Type *DestLTy = CGF.ConvertType(DestTy);
  1454. if (DestTy->isPointerType())
  1455. return llvm::Constant::getNullValue(DestLTy);
  1456. /// C++ [expr.dynamic.cast]p9:
  1457. /// A failed cast to reference type throws std::bad_cast
  1458. EmitBadCastCall(CGF);
  1459. CGF.EmitBlock(CGF.createBasicBlock("dynamic_cast.end"));
  1460. return llvm::UndefValue::get(DestLTy);
  1461. }
  1462. llvm::Value *CodeGenFunction::EmitDynamicCast(llvm::Value *Value,
  1463. const CXXDynamicCastExpr *DCE) {
  1464. QualType DestTy = DCE->getTypeAsWritten();
  1465. if (DCE->isAlwaysNull())
  1466. return EmitDynamicCastToNull(*this, DestTy);
  1467. QualType SrcTy = DCE->getSubExpr()->getType();
  1468. // C++ [expr.dynamic.cast]p4:
  1469. // If the value of v is a null pointer value in the pointer case, the result
  1470. // is the null pointer value of type T.
  1471. bool ShouldNullCheckSrcValue = SrcTy->isPointerType();
  1472. llvm::BasicBlock *CastNull = 0;
  1473. llvm::BasicBlock *CastNotNull = 0;
  1474. llvm::BasicBlock *CastEnd = createBasicBlock("dynamic_cast.end");
  1475. if (ShouldNullCheckSrcValue) {
  1476. CastNull = createBasicBlock("dynamic_cast.null");
  1477. CastNotNull = createBasicBlock("dynamic_cast.notnull");
  1478. llvm::Value *IsNull = Builder.CreateIsNull(Value);
  1479. Builder.CreateCondBr(IsNull, CastNull, CastNotNull);
  1480. EmitBlock(CastNotNull);
  1481. }
  1482. Value = EmitDynamicCastCall(*this, Value, SrcTy, DestTy, CastEnd);
  1483. if (ShouldNullCheckSrcValue) {
  1484. EmitBranch(CastEnd);
  1485. EmitBlock(CastNull);
  1486. EmitBranch(CastEnd);
  1487. }
  1488. EmitBlock(CastEnd);
  1489. if (ShouldNullCheckSrcValue) {
  1490. llvm::PHINode *PHI = Builder.CreatePHI(Value->getType(), 2);
  1491. PHI->addIncoming(Value, CastNotNull);
  1492. PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), CastNull);
  1493. Value = PHI;
  1494. }
  1495. return Value;
  1496. }
  1497. void CodeGenFunction::EmitLambdaExpr(const LambdaExpr *E, AggValueSlot Slot) {
  1498. RunCleanupsScope Scope(*this);
  1499. CXXRecordDecl::field_iterator CurField = E->getLambdaClass()->field_begin();
  1500. for (LambdaExpr::capture_init_iterator i = E->capture_init_begin(),
  1501. e = E->capture_init_end();
  1502. i != e; ++i, ++CurField) {
  1503. // Emit initialization
  1504. LValue LV = EmitLValueForFieldInitialization(Slot.getAddr(), *CurField, 0);
  1505. ArrayRef<VarDecl *> ArrayIndexes;
  1506. if (CurField->getType()->isArrayType())
  1507. ArrayIndexes = E->getCaptureInitIndexVars(i);
  1508. EmitInitializerForField(*CurField, LV, *i, ArrayIndexes);
  1509. }
  1510. }