CGExprCXX.cpp 89 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271
  1. //===--- CGExprCXX.cpp - Emit LLVM Code for C++ expressions ---------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This contains code dealing with code generation of C++ expressions
  10. //
  11. //===----------------------------------------------------------------------===//
  12. #include "CGCUDARuntime.h"
  13. #include "CGCXXABI.h"
  14. #include "CGDebugInfo.h"
  15. #include "CGObjCRuntime.h"
  16. #include "CodeGenFunction.h"
  17. #include "ConstantEmitter.h"
  18. #include "TargetInfo.h"
  19. #include "clang/Basic/CodeGenOptions.h"
  20. #include "clang/CodeGen/CGFunctionInfo.h"
  21. #include "llvm/IR/Intrinsics.h"
  22. using namespace clang;
  23. using namespace CodeGen;
  24. namespace {
  25. struct MemberCallInfo {
  26. RequiredArgs ReqArgs;
  27. // Number of prefix arguments for the call. Ignores the `this` pointer.
  28. unsigned PrefixSize;
  29. };
  30. }
  31. static MemberCallInfo
  32. commonEmitCXXMemberOrOperatorCall(CodeGenFunction &CGF, const CXXMethodDecl *MD,
  33. llvm::Value *This, llvm::Value *ImplicitParam,
  34. QualType ImplicitParamTy, const CallExpr *CE,
  35. CallArgList &Args, CallArgList *RtlArgs) {
  36. assert(CE == nullptr || isa<CXXMemberCallExpr>(CE) ||
  37. isa<CXXOperatorCallExpr>(CE));
  38. assert(MD->isInstance() &&
  39. "Trying to emit a member or operator call expr on a static method!");
  40. // Push the this ptr.
  41. const CXXRecordDecl *RD =
  42. CGF.CGM.getCXXABI().getThisArgumentTypeForMethod(MD);
  43. Args.add(RValue::get(This), CGF.getTypes().DeriveThisType(RD, MD));
  44. // If there is an implicit parameter (e.g. VTT), emit it.
  45. if (ImplicitParam) {
  46. Args.add(RValue::get(ImplicitParam), ImplicitParamTy);
  47. }
  48. const FunctionProtoType *FPT = MD->getType()->castAs<FunctionProtoType>();
  49. RequiredArgs required = RequiredArgs::forPrototypePlus(FPT, Args.size());
  50. unsigned PrefixSize = Args.size() - 1;
  51. // And the rest of the call args.
  52. if (RtlArgs) {
  53. // Special case: if the caller emitted the arguments right-to-left already
  54. // (prior to emitting the *this argument), we're done. This happens for
  55. // assignment operators.
  56. Args.addFrom(*RtlArgs);
  57. } else if (CE) {
  58. // Special case: skip first argument of CXXOperatorCall (it is "this").
  59. unsigned ArgsToSkip = isa<CXXOperatorCallExpr>(CE) ? 1 : 0;
  60. CGF.EmitCallArgs(Args, FPT, drop_begin(CE->arguments(), ArgsToSkip),
  61. CE->getDirectCallee());
  62. } else {
  63. assert(
  64. FPT->getNumParams() == 0 &&
  65. "No CallExpr specified for function with non-zero number of arguments");
  66. }
  67. return {required, PrefixSize};
  68. }
  69. RValue CodeGenFunction::EmitCXXMemberOrOperatorCall(
  70. const CXXMethodDecl *MD, const CGCallee &Callee,
  71. ReturnValueSlot ReturnValue,
  72. llvm::Value *This, llvm::Value *ImplicitParam, QualType ImplicitParamTy,
  73. const CallExpr *CE, CallArgList *RtlArgs) {
  74. const FunctionProtoType *FPT = MD->getType()->castAs<FunctionProtoType>();
  75. CallArgList Args;
  76. MemberCallInfo CallInfo = commonEmitCXXMemberOrOperatorCall(
  77. *this, MD, This, ImplicitParam, ImplicitParamTy, CE, Args, RtlArgs);
  78. auto &FnInfo = CGM.getTypes().arrangeCXXMethodCall(
  79. Args, FPT, CallInfo.ReqArgs, CallInfo.PrefixSize);
  80. return EmitCall(FnInfo, Callee, ReturnValue, Args, nullptr,
  81. CE ? CE->getExprLoc() : SourceLocation());
  82. }
  83. RValue CodeGenFunction::EmitCXXDestructorCall(
  84. GlobalDecl Dtor, const CGCallee &Callee, llvm::Value *This, QualType ThisTy,
  85. llvm::Value *ImplicitParam, QualType ImplicitParamTy, const CallExpr *CE) {
  86. const CXXMethodDecl *DtorDecl = cast<CXXMethodDecl>(Dtor.getDecl());
  87. assert(!ThisTy.isNull());
  88. assert(ThisTy->getAsCXXRecordDecl() == DtorDecl->getParent() &&
  89. "Pointer/Object mixup");
  90. LangAS SrcAS = ThisTy.getAddressSpace();
  91. LangAS DstAS = DtorDecl->getMethodQualifiers().getAddressSpace();
  92. if (SrcAS != DstAS) {
  93. QualType DstTy = DtorDecl->getThisType();
  94. llvm::Type *NewType = CGM.getTypes().ConvertType(DstTy);
  95. This = getTargetHooks().performAddrSpaceCast(*this, This, SrcAS, DstAS,
  96. NewType);
  97. }
  98. CallArgList Args;
  99. commonEmitCXXMemberOrOperatorCall(*this, DtorDecl, This, ImplicitParam,
  100. ImplicitParamTy, CE, Args, nullptr);
  101. return EmitCall(CGM.getTypes().arrangeCXXStructorDeclaration(Dtor), Callee,
  102. ReturnValueSlot(), Args);
  103. }
  104. RValue CodeGenFunction::EmitCXXPseudoDestructorExpr(
  105. const CXXPseudoDestructorExpr *E) {
  106. QualType DestroyedType = E->getDestroyedType();
  107. if (DestroyedType.hasStrongOrWeakObjCLifetime()) {
  108. // Automatic Reference Counting:
  109. // If the pseudo-expression names a retainable object with weak or
  110. // strong lifetime, the object shall be released.
  111. Expr *BaseExpr = E->getBase();
  112. Address BaseValue = Address::invalid();
  113. Qualifiers BaseQuals;
  114. // If this is s.x, emit s as an lvalue. If it is s->x, emit s as a scalar.
  115. if (E->isArrow()) {
  116. BaseValue = EmitPointerWithAlignment(BaseExpr);
  117. const PointerType *PTy = BaseExpr->getType()->getAs<PointerType>();
  118. BaseQuals = PTy->getPointeeType().getQualifiers();
  119. } else {
  120. LValue BaseLV = EmitLValue(BaseExpr);
  121. BaseValue = BaseLV.getAddress();
  122. QualType BaseTy = BaseExpr->getType();
  123. BaseQuals = BaseTy.getQualifiers();
  124. }
  125. switch (DestroyedType.getObjCLifetime()) {
  126. case Qualifiers::OCL_None:
  127. case Qualifiers::OCL_ExplicitNone:
  128. case Qualifiers::OCL_Autoreleasing:
  129. break;
  130. case Qualifiers::OCL_Strong:
  131. EmitARCRelease(Builder.CreateLoad(BaseValue,
  132. DestroyedType.isVolatileQualified()),
  133. ARCPreciseLifetime);
  134. break;
  135. case Qualifiers::OCL_Weak:
  136. EmitARCDestroyWeak(BaseValue);
  137. break;
  138. }
  139. } else {
  140. // C++ [expr.pseudo]p1:
  141. // The result shall only be used as the operand for the function call
  142. // operator (), and the result of such a call has type void. The only
  143. // effect is the evaluation of the postfix-expression before the dot or
  144. // arrow.
  145. EmitIgnoredExpr(E->getBase());
  146. }
  147. return RValue::get(nullptr);
  148. }
  149. static CXXRecordDecl *getCXXRecord(const Expr *E) {
  150. QualType T = E->getType();
  151. if (const PointerType *PTy = T->getAs<PointerType>())
  152. T = PTy->getPointeeType();
  153. const RecordType *Ty = T->castAs<RecordType>();
  154. return cast<CXXRecordDecl>(Ty->getDecl());
  155. }
  156. // Note: This function also emit constructor calls to support a MSVC
  157. // extensions allowing explicit constructor function call.
  158. RValue CodeGenFunction::EmitCXXMemberCallExpr(const CXXMemberCallExpr *CE,
  159. ReturnValueSlot ReturnValue) {
  160. const Expr *callee = CE->getCallee()->IgnoreParens();
  161. if (isa<BinaryOperator>(callee))
  162. return EmitCXXMemberPointerCallExpr(CE, ReturnValue);
  163. const MemberExpr *ME = cast<MemberExpr>(callee);
  164. const CXXMethodDecl *MD = cast<CXXMethodDecl>(ME->getMemberDecl());
  165. if (MD->isStatic()) {
  166. // The method is static, emit it as we would a regular call.
  167. CGCallee callee =
  168. CGCallee::forDirect(CGM.GetAddrOfFunction(MD), GlobalDecl(MD));
  169. return EmitCall(getContext().getPointerType(MD->getType()), callee, CE,
  170. ReturnValue);
  171. }
  172. bool HasQualifier = ME->hasQualifier();
  173. NestedNameSpecifier *Qualifier = HasQualifier ? ME->getQualifier() : nullptr;
  174. bool IsArrow = ME->isArrow();
  175. const Expr *Base = ME->getBase();
  176. return EmitCXXMemberOrOperatorMemberCallExpr(
  177. CE, MD, ReturnValue, HasQualifier, Qualifier, IsArrow, Base);
  178. }
  179. RValue CodeGenFunction::EmitCXXMemberOrOperatorMemberCallExpr(
  180. const CallExpr *CE, const CXXMethodDecl *MD, ReturnValueSlot ReturnValue,
  181. bool HasQualifier, NestedNameSpecifier *Qualifier, bool IsArrow,
  182. const Expr *Base) {
  183. assert(isa<CXXMemberCallExpr>(CE) || isa<CXXOperatorCallExpr>(CE));
  184. // Compute the object pointer.
  185. bool CanUseVirtualCall = MD->isVirtual() && !HasQualifier;
  186. const CXXMethodDecl *DevirtualizedMethod = nullptr;
  187. if (CanUseVirtualCall &&
  188. MD->getDevirtualizedMethod(Base, getLangOpts().AppleKext)) {
  189. const CXXRecordDecl *BestDynamicDecl = Base->getBestDynamicClassType();
  190. DevirtualizedMethod = MD->getCorrespondingMethodInClass(BestDynamicDecl);
  191. assert(DevirtualizedMethod);
  192. const CXXRecordDecl *DevirtualizedClass = DevirtualizedMethod->getParent();
  193. const Expr *Inner = Base->ignoreParenBaseCasts();
  194. if (DevirtualizedMethod->getReturnType().getCanonicalType() !=
  195. MD->getReturnType().getCanonicalType())
  196. // If the return types are not the same, this might be a case where more
  197. // code needs to run to compensate for it. For example, the derived
  198. // method might return a type that inherits form from the return
  199. // type of MD and has a prefix.
  200. // For now we just avoid devirtualizing these covariant cases.
  201. DevirtualizedMethod = nullptr;
  202. else if (getCXXRecord(Inner) == DevirtualizedClass)
  203. // If the class of the Inner expression is where the dynamic method
  204. // is defined, build the this pointer from it.
  205. Base = Inner;
  206. else if (getCXXRecord(Base) != DevirtualizedClass) {
  207. // If the method is defined in a class that is not the best dynamic
  208. // one or the one of the full expression, we would have to build
  209. // a derived-to-base cast to compute the correct this pointer, but
  210. // we don't have support for that yet, so do a virtual call.
  211. DevirtualizedMethod = nullptr;
  212. }
  213. }
  214. // C++17 demands that we evaluate the RHS of a (possibly-compound) assignment
  215. // operator before the LHS.
  216. CallArgList RtlArgStorage;
  217. CallArgList *RtlArgs = nullptr;
  218. if (auto *OCE = dyn_cast<CXXOperatorCallExpr>(CE)) {
  219. if (OCE->isAssignmentOp()) {
  220. RtlArgs = &RtlArgStorage;
  221. EmitCallArgs(*RtlArgs, MD->getType()->castAs<FunctionProtoType>(),
  222. drop_begin(CE->arguments(), 1), CE->getDirectCallee(),
  223. /*ParamsToSkip*/0, EvaluationOrder::ForceRightToLeft);
  224. }
  225. }
  226. LValue This;
  227. if (IsArrow) {
  228. LValueBaseInfo BaseInfo;
  229. TBAAAccessInfo TBAAInfo;
  230. Address ThisValue = EmitPointerWithAlignment(Base, &BaseInfo, &TBAAInfo);
  231. This = MakeAddrLValue(ThisValue, Base->getType(), BaseInfo, TBAAInfo);
  232. } else {
  233. This = EmitLValue(Base);
  234. }
  235. if (const CXXConstructorDecl *Ctor = dyn_cast<CXXConstructorDecl>(MD)) {
  236. // This is the MSVC p->Ctor::Ctor(...) extension. We assume that's
  237. // constructing a new complete object of type Ctor.
  238. assert(!RtlArgs);
  239. assert(ReturnValue.isNull() && "Constructor shouldn't have return value");
  240. CallArgList Args;
  241. commonEmitCXXMemberOrOperatorCall(
  242. *this, Ctor, This.getPointer(), /*ImplicitParam=*/nullptr,
  243. /*ImplicitParamTy=*/QualType(), CE, Args, nullptr);
  244. EmitCXXConstructorCall(Ctor, Ctor_Complete, /*ForVirtualBase=*/false,
  245. /*Delegating=*/false, This.getAddress(), Args,
  246. AggValueSlot::DoesNotOverlap, CE->getExprLoc(),
  247. /*NewPointerIsChecked=*/false);
  248. return RValue::get(nullptr);
  249. }
  250. if (MD->isTrivial() || (MD->isDefaulted() && MD->getParent()->isUnion())) {
  251. if (isa<CXXDestructorDecl>(MD)) return RValue::get(nullptr);
  252. if (!MD->getParent()->mayInsertExtraPadding()) {
  253. if (MD->isCopyAssignmentOperator() || MD->isMoveAssignmentOperator()) {
  254. // We don't like to generate the trivial copy/move assignment operator
  255. // when it isn't necessary; just produce the proper effect here.
  256. LValue RHS = isa<CXXOperatorCallExpr>(CE)
  257. ? MakeNaturalAlignAddrLValue(
  258. (*RtlArgs)[0].getRValue(*this).getScalarVal(),
  259. (*(CE->arg_begin() + 1))->getType())
  260. : EmitLValue(*CE->arg_begin());
  261. EmitAggregateAssign(This, RHS, CE->getType());
  262. return RValue::get(This.getPointer());
  263. }
  264. llvm_unreachable("unknown trivial member function");
  265. }
  266. }
  267. // Compute the function type we're calling.
  268. const CXXMethodDecl *CalleeDecl =
  269. DevirtualizedMethod ? DevirtualizedMethod : MD;
  270. const CGFunctionInfo *FInfo = nullptr;
  271. if (const auto *Dtor = dyn_cast<CXXDestructorDecl>(CalleeDecl))
  272. FInfo = &CGM.getTypes().arrangeCXXStructorDeclaration(
  273. GlobalDecl(Dtor, Dtor_Complete));
  274. else
  275. FInfo = &CGM.getTypes().arrangeCXXMethodDeclaration(CalleeDecl);
  276. llvm::FunctionType *Ty = CGM.getTypes().GetFunctionType(*FInfo);
  277. // C++11 [class.mfct.non-static]p2:
  278. // If a non-static member function of a class X is called for an object that
  279. // is not of type X, or of a type derived from X, the behavior is undefined.
  280. SourceLocation CallLoc;
  281. ASTContext &C = getContext();
  282. if (CE)
  283. CallLoc = CE->getExprLoc();
  284. SanitizerSet SkippedChecks;
  285. if (const auto *CMCE = dyn_cast<CXXMemberCallExpr>(CE)) {
  286. auto *IOA = CMCE->getImplicitObjectArgument();
  287. bool IsImplicitObjectCXXThis = IsWrappedCXXThis(IOA);
  288. if (IsImplicitObjectCXXThis)
  289. SkippedChecks.set(SanitizerKind::Alignment, true);
  290. if (IsImplicitObjectCXXThis || isa<DeclRefExpr>(IOA))
  291. SkippedChecks.set(SanitizerKind::Null, true);
  292. }
  293. EmitTypeCheck(CodeGenFunction::TCK_MemberCall, CallLoc, This.getPointer(),
  294. C.getRecordType(CalleeDecl->getParent()),
  295. /*Alignment=*/CharUnits::Zero(), SkippedChecks);
  296. // C++ [class.virtual]p12:
  297. // Explicit qualification with the scope operator (5.1) suppresses the
  298. // virtual call mechanism.
  299. //
  300. // We also don't emit a virtual call if the base expression has a record type
  301. // because then we know what the type is.
  302. bool UseVirtualCall = CanUseVirtualCall && !DevirtualizedMethod;
  303. if (const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(CalleeDecl)) {
  304. assert(CE->arg_begin() == CE->arg_end() &&
  305. "Destructor shouldn't have explicit parameters");
  306. assert(ReturnValue.isNull() && "Destructor shouldn't have return value");
  307. if (UseVirtualCall) {
  308. CGM.getCXXABI().EmitVirtualDestructorCall(
  309. *this, Dtor, Dtor_Complete, This.getAddress(),
  310. cast<CXXMemberCallExpr>(CE));
  311. } else {
  312. GlobalDecl GD(Dtor, Dtor_Complete);
  313. CGCallee Callee;
  314. if (getLangOpts().AppleKext && Dtor->isVirtual() && HasQualifier)
  315. Callee = BuildAppleKextVirtualCall(Dtor, Qualifier, Ty);
  316. else if (!DevirtualizedMethod)
  317. Callee =
  318. CGCallee::forDirect(CGM.getAddrOfCXXStructor(GD, FInfo, Ty), GD);
  319. else {
  320. Callee = CGCallee::forDirect(CGM.GetAddrOfFunction(GD, Ty), GD);
  321. }
  322. QualType ThisTy =
  323. IsArrow ? Base->getType()->getPointeeType() : Base->getType();
  324. EmitCXXDestructorCall(GD, Callee, This.getPointer(), ThisTy,
  325. /*ImplicitParam=*/nullptr,
  326. /*ImplicitParamTy=*/QualType(), nullptr);
  327. }
  328. return RValue::get(nullptr);
  329. }
  330. // FIXME: Uses of 'MD' past this point need to be audited. We may need to use
  331. // 'CalleeDecl' instead.
  332. CGCallee Callee;
  333. if (UseVirtualCall) {
  334. Callee = CGCallee::forVirtual(CE, MD, This.getAddress(), Ty);
  335. } else {
  336. if (SanOpts.has(SanitizerKind::CFINVCall) &&
  337. MD->getParent()->isDynamicClass()) {
  338. llvm::Value *VTable;
  339. const CXXRecordDecl *RD;
  340. std::tie(VTable, RD) =
  341. CGM.getCXXABI().LoadVTablePtr(*this, This.getAddress(),
  342. MD->getParent());
  343. EmitVTablePtrCheckForCall(RD, VTable, CFITCK_NVCall, CE->getBeginLoc());
  344. }
  345. if (getLangOpts().AppleKext && MD->isVirtual() && HasQualifier)
  346. Callee = BuildAppleKextVirtualCall(MD, Qualifier, Ty);
  347. else if (!DevirtualizedMethod)
  348. Callee =
  349. CGCallee::forDirect(CGM.GetAddrOfFunction(MD, Ty), GlobalDecl(MD));
  350. else {
  351. Callee =
  352. CGCallee::forDirect(CGM.GetAddrOfFunction(DevirtualizedMethod, Ty),
  353. GlobalDecl(DevirtualizedMethod));
  354. }
  355. }
  356. if (MD->isVirtual()) {
  357. Address NewThisAddr =
  358. CGM.getCXXABI().adjustThisArgumentForVirtualFunctionCall(
  359. *this, CalleeDecl, This.getAddress(), UseVirtualCall);
  360. This.setAddress(NewThisAddr);
  361. }
  362. return EmitCXXMemberOrOperatorCall(
  363. CalleeDecl, Callee, ReturnValue, This.getPointer(),
  364. /*ImplicitParam=*/nullptr, QualType(), CE, RtlArgs);
  365. }
  366. RValue
  367. CodeGenFunction::EmitCXXMemberPointerCallExpr(const CXXMemberCallExpr *E,
  368. ReturnValueSlot ReturnValue) {
  369. const BinaryOperator *BO =
  370. cast<BinaryOperator>(E->getCallee()->IgnoreParens());
  371. const Expr *BaseExpr = BO->getLHS();
  372. const Expr *MemFnExpr = BO->getRHS();
  373. const auto *MPT = MemFnExpr->getType()->castAs<MemberPointerType>();
  374. const auto *FPT = MPT->getPointeeType()->castAs<FunctionProtoType>();
  375. const auto *RD =
  376. cast<CXXRecordDecl>(MPT->getClass()->castAs<RecordType>()->getDecl());
  377. // Emit the 'this' pointer.
  378. Address This = Address::invalid();
  379. if (BO->getOpcode() == BO_PtrMemI)
  380. This = EmitPointerWithAlignment(BaseExpr);
  381. else
  382. This = EmitLValue(BaseExpr).getAddress();
  383. EmitTypeCheck(TCK_MemberCall, E->getExprLoc(), This.getPointer(),
  384. QualType(MPT->getClass(), 0));
  385. // Get the member function pointer.
  386. llvm::Value *MemFnPtr = EmitScalarExpr(MemFnExpr);
  387. // Ask the ABI to load the callee. Note that This is modified.
  388. llvm::Value *ThisPtrForCall = nullptr;
  389. CGCallee Callee =
  390. CGM.getCXXABI().EmitLoadOfMemberFunctionPointer(*this, BO, This,
  391. ThisPtrForCall, MemFnPtr, MPT);
  392. CallArgList Args;
  393. QualType ThisType =
  394. getContext().getPointerType(getContext().getTagDeclType(RD));
  395. // Push the this ptr.
  396. Args.add(RValue::get(ThisPtrForCall), ThisType);
  397. RequiredArgs required = RequiredArgs::forPrototypePlus(FPT, 1);
  398. // And the rest of the call args
  399. EmitCallArgs(Args, FPT, E->arguments());
  400. return EmitCall(CGM.getTypes().arrangeCXXMethodCall(Args, FPT, required,
  401. /*PrefixSize=*/0),
  402. Callee, ReturnValue, Args, nullptr, E->getExprLoc());
  403. }
  404. RValue
  405. CodeGenFunction::EmitCXXOperatorMemberCallExpr(const CXXOperatorCallExpr *E,
  406. const CXXMethodDecl *MD,
  407. ReturnValueSlot ReturnValue) {
  408. assert(MD->isInstance() &&
  409. "Trying to emit a member call expr on a static method!");
  410. return EmitCXXMemberOrOperatorMemberCallExpr(
  411. E, MD, ReturnValue, /*HasQualifier=*/false, /*Qualifier=*/nullptr,
  412. /*IsArrow=*/false, E->getArg(0));
  413. }
  414. RValue CodeGenFunction::EmitCUDAKernelCallExpr(const CUDAKernelCallExpr *E,
  415. ReturnValueSlot ReturnValue) {
  416. return CGM.getCUDARuntime().EmitCUDAKernelCallExpr(*this, E, ReturnValue);
  417. }
  418. static void EmitNullBaseClassInitialization(CodeGenFunction &CGF,
  419. Address DestPtr,
  420. const CXXRecordDecl *Base) {
  421. if (Base->isEmpty())
  422. return;
  423. DestPtr = CGF.Builder.CreateElementBitCast(DestPtr, CGF.Int8Ty);
  424. const ASTRecordLayout &Layout = CGF.getContext().getASTRecordLayout(Base);
  425. CharUnits NVSize = Layout.getNonVirtualSize();
  426. // We cannot simply zero-initialize the entire base sub-object if vbptrs are
  427. // present, they are initialized by the most derived class before calling the
  428. // constructor.
  429. SmallVector<std::pair<CharUnits, CharUnits>, 1> Stores;
  430. Stores.emplace_back(CharUnits::Zero(), NVSize);
  431. // Each store is split by the existence of a vbptr.
  432. CharUnits VBPtrWidth = CGF.getPointerSize();
  433. std::vector<CharUnits> VBPtrOffsets =
  434. CGF.CGM.getCXXABI().getVBPtrOffsets(Base);
  435. for (CharUnits VBPtrOffset : VBPtrOffsets) {
  436. // Stop before we hit any virtual base pointers located in virtual bases.
  437. if (VBPtrOffset >= NVSize)
  438. break;
  439. std::pair<CharUnits, CharUnits> LastStore = Stores.pop_back_val();
  440. CharUnits LastStoreOffset = LastStore.first;
  441. CharUnits LastStoreSize = LastStore.second;
  442. CharUnits SplitBeforeOffset = LastStoreOffset;
  443. CharUnits SplitBeforeSize = VBPtrOffset - SplitBeforeOffset;
  444. assert(!SplitBeforeSize.isNegative() && "negative store size!");
  445. if (!SplitBeforeSize.isZero())
  446. Stores.emplace_back(SplitBeforeOffset, SplitBeforeSize);
  447. CharUnits SplitAfterOffset = VBPtrOffset + VBPtrWidth;
  448. CharUnits SplitAfterSize = LastStoreSize - SplitAfterOffset;
  449. assert(!SplitAfterSize.isNegative() && "negative store size!");
  450. if (!SplitAfterSize.isZero())
  451. Stores.emplace_back(SplitAfterOffset, SplitAfterSize);
  452. }
  453. // If the type contains a pointer to data member we can't memset it to zero.
  454. // Instead, create a null constant and copy it to the destination.
  455. // TODO: there are other patterns besides zero that we can usefully memset,
  456. // like -1, which happens to be the pattern used by member-pointers.
  457. // TODO: isZeroInitializable can be over-conservative in the case where a
  458. // virtual base contains a member pointer.
  459. llvm::Constant *NullConstantForBase = CGF.CGM.EmitNullConstantForBase(Base);
  460. if (!NullConstantForBase->isNullValue()) {
  461. llvm::GlobalVariable *NullVariable = new llvm::GlobalVariable(
  462. CGF.CGM.getModule(), NullConstantForBase->getType(),
  463. /*isConstant=*/true, llvm::GlobalVariable::PrivateLinkage,
  464. NullConstantForBase, Twine());
  465. CharUnits Align = std::max(Layout.getNonVirtualAlignment(),
  466. DestPtr.getAlignment());
  467. NullVariable->setAlignment(Align.getQuantity());
  468. Address SrcPtr = Address(CGF.EmitCastToVoidPtr(NullVariable), Align);
  469. // Get and call the appropriate llvm.memcpy overload.
  470. for (std::pair<CharUnits, CharUnits> Store : Stores) {
  471. CharUnits StoreOffset = Store.first;
  472. CharUnits StoreSize = Store.second;
  473. llvm::Value *StoreSizeVal = CGF.CGM.getSize(StoreSize);
  474. CGF.Builder.CreateMemCpy(
  475. CGF.Builder.CreateConstInBoundsByteGEP(DestPtr, StoreOffset),
  476. CGF.Builder.CreateConstInBoundsByteGEP(SrcPtr, StoreOffset),
  477. StoreSizeVal);
  478. }
  479. // Otherwise, just memset the whole thing to zero. This is legal
  480. // because in LLVM, all default initializers (other than the ones we just
  481. // handled above) are guaranteed to have a bit pattern of all zeros.
  482. } else {
  483. for (std::pair<CharUnits, CharUnits> Store : Stores) {
  484. CharUnits StoreOffset = Store.first;
  485. CharUnits StoreSize = Store.second;
  486. llvm::Value *StoreSizeVal = CGF.CGM.getSize(StoreSize);
  487. CGF.Builder.CreateMemSet(
  488. CGF.Builder.CreateConstInBoundsByteGEP(DestPtr, StoreOffset),
  489. CGF.Builder.getInt8(0), StoreSizeVal);
  490. }
  491. }
  492. }
  493. void
  494. CodeGenFunction::EmitCXXConstructExpr(const CXXConstructExpr *E,
  495. AggValueSlot Dest) {
  496. assert(!Dest.isIgnored() && "Must have a destination!");
  497. const CXXConstructorDecl *CD = E->getConstructor();
  498. // If we require zero initialization before (or instead of) calling the
  499. // constructor, as can be the case with a non-user-provided default
  500. // constructor, emit the zero initialization now, unless destination is
  501. // already zeroed.
  502. if (E->requiresZeroInitialization() && !Dest.isZeroed()) {
  503. switch (E->getConstructionKind()) {
  504. case CXXConstructExpr::CK_Delegating:
  505. case CXXConstructExpr::CK_Complete:
  506. EmitNullInitialization(Dest.getAddress(), E->getType());
  507. break;
  508. case CXXConstructExpr::CK_VirtualBase:
  509. case CXXConstructExpr::CK_NonVirtualBase:
  510. EmitNullBaseClassInitialization(*this, Dest.getAddress(),
  511. CD->getParent());
  512. break;
  513. }
  514. }
  515. // If this is a call to a trivial default constructor, do nothing.
  516. if (CD->isTrivial() && CD->isDefaultConstructor())
  517. return;
  518. // Elide the constructor if we're constructing from a temporary.
  519. // The temporary check is required because Sema sets this on NRVO
  520. // returns.
  521. if (getLangOpts().ElideConstructors && E->isElidable()) {
  522. assert(getContext().hasSameUnqualifiedType(E->getType(),
  523. E->getArg(0)->getType()));
  524. if (E->getArg(0)->isTemporaryObject(getContext(), CD->getParent())) {
  525. EmitAggExpr(E->getArg(0), Dest);
  526. return;
  527. }
  528. }
  529. if (const ArrayType *arrayType
  530. = getContext().getAsArrayType(E->getType())) {
  531. EmitCXXAggrConstructorCall(CD, arrayType, Dest.getAddress(), E,
  532. Dest.isSanitizerChecked());
  533. } else {
  534. CXXCtorType Type = Ctor_Complete;
  535. bool ForVirtualBase = false;
  536. bool Delegating = false;
  537. switch (E->getConstructionKind()) {
  538. case CXXConstructExpr::CK_Delegating:
  539. // We should be emitting a constructor; GlobalDecl will assert this
  540. Type = CurGD.getCtorType();
  541. Delegating = true;
  542. break;
  543. case CXXConstructExpr::CK_Complete:
  544. Type = Ctor_Complete;
  545. break;
  546. case CXXConstructExpr::CK_VirtualBase:
  547. ForVirtualBase = true;
  548. LLVM_FALLTHROUGH;
  549. case CXXConstructExpr::CK_NonVirtualBase:
  550. Type = Ctor_Base;
  551. }
  552. // Call the constructor.
  553. EmitCXXConstructorCall(CD, Type, ForVirtualBase, Delegating, Dest, E);
  554. }
  555. }
  556. void CodeGenFunction::EmitSynthesizedCXXCopyCtor(Address Dest, Address Src,
  557. const Expr *Exp) {
  558. if (const ExprWithCleanups *E = dyn_cast<ExprWithCleanups>(Exp))
  559. Exp = E->getSubExpr();
  560. assert(isa<CXXConstructExpr>(Exp) &&
  561. "EmitSynthesizedCXXCopyCtor - unknown copy ctor expr");
  562. const CXXConstructExpr* E = cast<CXXConstructExpr>(Exp);
  563. const CXXConstructorDecl *CD = E->getConstructor();
  564. RunCleanupsScope Scope(*this);
  565. // If we require zero initialization before (or instead of) calling the
  566. // constructor, as can be the case with a non-user-provided default
  567. // constructor, emit the zero initialization now.
  568. // FIXME. Do I still need this for a copy ctor synthesis?
  569. if (E->requiresZeroInitialization())
  570. EmitNullInitialization(Dest, E->getType());
  571. assert(!getContext().getAsConstantArrayType(E->getType())
  572. && "EmitSynthesizedCXXCopyCtor - Copied-in Array");
  573. EmitSynthesizedCXXCopyCtorCall(CD, Dest, Src, E);
  574. }
  575. static CharUnits CalculateCookiePadding(CodeGenFunction &CGF,
  576. const CXXNewExpr *E) {
  577. if (!E->isArray())
  578. return CharUnits::Zero();
  579. // No cookie is required if the operator new[] being used is the
  580. // reserved placement operator new[].
  581. if (E->getOperatorNew()->isReservedGlobalPlacementOperator())
  582. return CharUnits::Zero();
  583. return CGF.CGM.getCXXABI().GetArrayCookieSize(E);
  584. }
  585. static llvm::Value *EmitCXXNewAllocSize(CodeGenFunction &CGF,
  586. const CXXNewExpr *e,
  587. unsigned minElements,
  588. llvm::Value *&numElements,
  589. llvm::Value *&sizeWithoutCookie) {
  590. QualType type = e->getAllocatedType();
  591. if (!e->isArray()) {
  592. CharUnits typeSize = CGF.getContext().getTypeSizeInChars(type);
  593. sizeWithoutCookie
  594. = llvm::ConstantInt::get(CGF.SizeTy, typeSize.getQuantity());
  595. return sizeWithoutCookie;
  596. }
  597. // The width of size_t.
  598. unsigned sizeWidth = CGF.SizeTy->getBitWidth();
  599. // Figure out the cookie size.
  600. llvm::APInt cookieSize(sizeWidth,
  601. CalculateCookiePadding(CGF, e).getQuantity());
  602. // Emit the array size expression.
  603. // We multiply the size of all dimensions for NumElements.
  604. // e.g for 'int[2][3]', ElemType is 'int' and NumElements is 6.
  605. numElements =
  606. ConstantEmitter(CGF).tryEmitAbstract(*e->getArraySize(), e->getType());
  607. if (!numElements)
  608. numElements = CGF.EmitScalarExpr(*e->getArraySize());
  609. assert(isa<llvm::IntegerType>(numElements->getType()));
  610. // The number of elements can be have an arbitrary integer type;
  611. // essentially, we need to multiply it by a constant factor, add a
  612. // cookie size, and verify that the result is representable as a
  613. // size_t. That's just a gloss, though, and it's wrong in one
  614. // important way: if the count is negative, it's an error even if
  615. // the cookie size would bring the total size >= 0.
  616. bool isSigned
  617. = (*e->getArraySize())->getType()->isSignedIntegerOrEnumerationType();
  618. llvm::IntegerType *numElementsType
  619. = cast<llvm::IntegerType>(numElements->getType());
  620. unsigned numElementsWidth = numElementsType->getBitWidth();
  621. // Compute the constant factor.
  622. llvm::APInt arraySizeMultiplier(sizeWidth, 1);
  623. while (const ConstantArrayType *CAT
  624. = CGF.getContext().getAsConstantArrayType(type)) {
  625. type = CAT->getElementType();
  626. arraySizeMultiplier *= CAT->getSize();
  627. }
  628. CharUnits typeSize = CGF.getContext().getTypeSizeInChars(type);
  629. llvm::APInt typeSizeMultiplier(sizeWidth, typeSize.getQuantity());
  630. typeSizeMultiplier *= arraySizeMultiplier;
  631. // This will be a size_t.
  632. llvm::Value *size;
  633. // If someone is doing 'new int[42]' there is no need to do a dynamic check.
  634. // Don't bloat the -O0 code.
  635. if (llvm::ConstantInt *numElementsC =
  636. dyn_cast<llvm::ConstantInt>(numElements)) {
  637. const llvm::APInt &count = numElementsC->getValue();
  638. bool hasAnyOverflow = false;
  639. // If 'count' was a negative number, it's an overflow.
  640. if (isSigned && count.isNegative())
  641. hasAnyOverflow = true;
  642. // We want to do all this arithmetic in size_t. If numElements is
  643. // wider than that, check whether it's already too big, and if so,
  644. // overflow.
  645. else if (numElementsWidth > sizeWidth &&
  646. numElementsWidth - sizeWidth > count.countLeadingZeros())
  647. hasAnyOverflow = true;
  648. // Okay, compute a count at the right width.
  649. llvm::APInt adjustedCount = count.zextOrTrunc(sizeWidth);
  650. // If there is a brace-initializer, we cannot allocate fewer elements than
  651. // there are initializers. If we do, that's treated like an overflow.
  652. if (adjustedCount.ult(minElements))
  653. hasAnyOverflow = true;
  654. // Scale numElements by that. This might overflow, but we don't
  655. // care because it only overflows if allocationSize does, too, and
  656. // if that overflows then we shouldn't use this.
  657. numElements = llvm::ConstantInt::get(CGF.SizeTy,
  658. adjustedCount * arraySizeMultiplier);
  659. // Compute the size before cookie, and track whether it overflowed.
  660. bool overflow;
  661. llvm::APInt allocationSize
  662. = adjustedCount.umul_ov(typeSizeMultiplier, overflow);
  663. hasAnyOverflow |= overflow;
  664. // Add in the cookie, and check whether it's overflowed.
  665. if (cookieSize != 0) {
  666. // Save the current size without a cookie. This shouldn't be
  667. // used if there was overflow.
  668. sizeWithoutCookie = llvm::ConstantInt::get(CGF.SizeTy, allocationSize);
  669. allocationSize = allocationSize.uadd_ov(cookieSize, overflow);
  670. hasAnyOverflow |= overflow;
  671. }
  672. // On overflow, produce a -1 so operator new will fail.
  673. if (hasAnyOverflow) {
  674. size = llvm::Constant::getAllOnesValue(CGF.SizeTy);
  675. } else {
  676. size = llvm::ConstantInt::get(CGF.SizeTy, allocationSize);
  677. }
  678. // Otherwise, we might need to use the overflow intrinsics.
  679. } else {
  680. // There are up to five conditions we need to test for:
  681. // 1) if isSigned, we need to check whether numElements is negative;
  682. // 2) if numElementsWidth > sizeWidth, we need to check whether
  683. // numElements is larger than something representable in size_t;
  684. // 3) if minElements > 0, we need to check whether numElements is smaller
  685. // than that.
  686. // 4) we need to compute
  687. // sizeWithoutCookie := numElements * typeSizeMultiplier
  688. // and check whether it overflows; and
  689. // 5) if we need a cookie, we need to compute
  690. // size := sizeWithoutCookie + cookieSize
  691. // and check whether it overflows.
  692. llvm::Value *hasOverflow = nullptr;
  693. // If numElementsWidth > sizeWidth, then one way or another, we're
  694. // going to have to do a comparison for (2), and this happens to
  695. // take care of (1), too.
  696. if (numElementsWidth > sizeWidth) {
  697. llvm::APInt threshold(numElementsWidth, 1);
  698. threshold <<= sizeWidth;
  699. llvm::Value *thresholdV
  700. = llvm::ConstantInt::get(numElementsType, threshold);
  701. hasOverflow = CGF.Builder.CreateICmpUGE(numElements, thresholdV);
  702. numElements = CGF.Builder.CreateTrunc(numElements, CGF.SizeTy);
  703. // Otherwise, if we're signed, we want to sext up to size_t.
  704. } else if (isSigned) {
  705. if (numElementsWidth < sizeWidth)
  706. numElements = CGF.Builder.CreateSExt(numElements, CGF.SizeTy);
  707. // If there's a non-1 type size multiplier, then we can do the
  708. // signedness check at the same time as we do the multiply
  709. // because a negative number times anything will cause an
  710. // unsigned overflow. Otherwise, we have to do it here. But at least
  711. // in this case, we can subsume the >= minElements check.
  712. if (typeSizeMultiplier == 1)
  713. hasOverflow = CGF.Builder.CreateICmpSLT(numElements,
  714. llvm::ConstantInt::get(CGF.SizeTy, minElements));
  715. // Otherwise, zext up to size_t if necessary.
  716. } else if (numElementsWidth < sizeWidth) {
  717. numElements = CGF.Builder.CreateZExt(numElements, CGF.SizeTy);
  718. }
  719. assert(numElements->getType() == CGF.SizeTy);
  720. if (minElements) {
  721. // Don't allow allocation of fewer elements than we have initializers.
  722. if (!hasOverflow) {
  723. hasOverflow = CGF.Builder.CreateICmpULT(numElements,
  724. llvm::ConstantInt::get(CGF.SizeTy, minElements));
  725. } else if (numElementsWidth > sizeWidth) {
  726. // The other existing overflow subsumes this check.
  727. // We do an unsigned comparison, since any signed value < -1 is
  728. // taken care of either above or below.
  729. hasOverflow = CGF.Builder.CreateOr(hasOverflow,
  730. CGF.Builder.CreateICmpULT(numElements,
  731. llvm::ConstantInt::get(CGF.SizeTy, minElements)));
  732. }
  733. }
  734. size = numElements;
  735. // Multiply by the type size if necessary. This multiplier
  736. // includes all the factors for nested arrays.
  737. //
  738. // This step also causes numElements to be scaled up by the
  739. // nested-array factor if necessary. Overflow on this computation
  740. // can be ignored because the result shouldn't be used if
  741. // allocation fails.
  742. if (typeSizeMultiplier != 1) {
  743. llvm::Function *umul_with_overflow
  744. = CGF.CGM.getIntrinsic(llvm::Intrinsic::umul_with_overflow, CGF.SizeTy);
  745. llvm::Value *tsmV =
  746. llvm::ConstantInt::get(CGF.SizeTy, typeSizeMultiplier);
  747. llvm::Value *result =
  748. CGF.Builder.CreateCall(umul_with_overflow, {size, tsmV});
  749. llvm::Value *overflowed = CGF.Builder.CreateExtractValue(result, 1);
  750. if (hasOverflow)
  751. hasOverflow = CGF.Builder.CreateOr(hasOverflow, overflowed);
  752. else
  753. hasOverflow = overflowed;
  754. size = CGF.Builder.CreateExtractValue(result, 0);
  755. // Also scale up numElements by the array size multiplier.
  756. if (arraySizeMultiplier != 1) {
  757. // If the base element type size is 1, then we can re-use the
  758. // multiply we just did.
  759. if (typeSize.isOne()) {
  760. assert(arraySizeMultiplier == typeSizeMultiplier);
  761. numElements = size;
  762. // Otherwise we need a separate multiply.
  763. } else {
  764. llvm::Value *asmV =
  765. llvm::ConstantInt::get(CGF.SizeTy, arraySizeMultiplier);
  766. numElements = CGF.Builder.CreateMul(numElements, asmV);
  767. }
  768. }
  769. } else {
  770. // numElements doesn't need to be scaled.
  771. assert(arraySizeMultiplier == 1);
  772. }
  773. // Add in the cookie size if necessary.
  774. if (cookieSize != 0) {
  775. sizeWithoutCookie = size;
  776. llvm::Function *uadd_with_overflow
  777. = CGF.CGM.getIntrinsic(llvm::Intrinsic::uadd_with_overflow, CGF.SizeTy);
  778. llvm::Value *cookieSizeV = llvm::ConstantInt::get(CGF.SizeTy, cookieSize);
  779. llvm::Value *result =
  780. CGF.Builder.CreateCall(uadd_with_overflow, {size, cookieSizeV});
  781. llvm::Value *overflowed = CGF.Builder.CreateExtractValue(result, 1);
  782. if (hasOverflow)
  783. hasOverflow = CGF.Builder.CreateOr(hasOverflow, overflowed);
  784. else
  785. hasOverflow = overflowed;
  786. size = CGF.Builder.CreateExtractValue(result, 0);
  787. }
  788. // If we had any possibility of dynamic overflow, make a select to
  789. // overwrite 'size' with an all-ones value, which should cause
  790. // operator new to throw.
  791. if (hasOverflow)
  792. size = CGF.Builder.CreateSelect(hasOverflow,
  793. llvm::Constant::getAllOnesValue(CGF.SizeTy),
  794. size);
  795. }
  796. if (cookieSize == 0)
  797. sizeWithoutCookie = size;
  798. else
  799. assert(sizeWithoutCookie && "didn't set sizeWithoutCookie?");
  800. return size;
  801. }
  802. static void StoreAnyExprIntoOneUnit(CodeGenFunction &CGF, const Expr *Init,
  803. QualType AllocType, Address NewPtr,
  804. AggValueSlot::Overlap_t MayOverlap) {
  805. // FIXME: Refactor with EmitExprAsInit.
  806. switch (CGF.getEvaluationKind(AllocType)) {
  807. case TEK_Scalar:
  808. CGF.EmitScalarInit(Init, nullptr,
  809. CGF.MakeAddrLValue(NewPtr, AllocType), false);
  810. return;
  811. case TEK_Complex:
  812. CGF.EmitComplexExprIntoLValue(Init, CGF.MakeAddrLValue(NewPtr, AllocType),
  813. /*isInit*/ true);
  814. return;
  815. case TEK_Aggregate: {
  816. AggValueSlot Slot
  817. = AggValueSlot::forAddr(NewPtr, AllocType.getQualifiers(),
  818. AggValueSlot::IsDestructed,
  819. AggValueSlot::DoesNotNeedGCBarriers,
  820. AggValueSlot::IsNotAliased,
  821. MayOverlap, AggValueSlot::IsNotZeroed,
  822. AggValueSlot::IsSanitizerChecked);
  823. CGF.EmitAggExpr(Init, Slot);
  824. return;
  825. }
  826. }
  827. llvm_unreachable("bad evaluation kind");
  828. }
  829. void CodeGenFunction::EmitNewArrayInitializer(
  830. const CXXNewExpr *E, QualType ElementType, llvm::Type *ElementTy,
  831. Address BeginPtr, llvm::Value *NumElements,
  832. llvm::Value *AllocSizeWithoutCookie) {
  833. // If we have a type with trivial initialization and no initializer,
  834. // there's nothing to do.
  835. if (!E->hasInitializer())
  836. return;
  837. Address CurPtr = BeginPtr;
  838. unsigned InitListElements = 0;
  839. const Expr *Init = E->getInitializer();
  840. Address EndOfInit = Address::invalid();
  841. QualType::DestructionKind DtorKind = ElementType.isDestructedType();
  842. EHScopeStack::stable_iterator Cleanup;
  843. llvm::Instruction *CleanupDominator = nullptr;
  844. CharUnits ElementSize = getContext().getTypeSizeInChars(ElementType);
  845. CharUnits ElementAlign =
  846. BeginPtr.getAlignment().alignmentOfArrayElement(ElementSize);
  847. // Attempt to perform zero-initialization using memset.
  848. auto TryMemsetInitialization = [&]() -> bool {
  849. // FIXME: If the type is a pointer-to-data-member under the Itanium ABI,
  850. // we can initialize with a memset to -1.
  851. if (!CGM.getTypes().isZeroInitializable(ElementType))
  852. return false;
  853. // Optimization: since zero initialization will just set the memory
  854. // to all zeroes, generate a single memset to do it in one shot.
  855. // Subtract out the size of any elements we've already initialized.
  856. auto *RemainingSize = AllocSizeWithoutCookie;
  857. if (InitListElements) {
  858. // We know this can't overflow; we check this when doing the allocation.
  859. auto *InitializedSize = llvm::ConstantInt::get(
  860. RemainingSize->getType(),
  861. getContext().getTypeSizeInChars(ElementType).getQuantity() *
  862. InitListElements);
  863. RemainingSize = Builder.CreateSub(RemainingSize, InitializedSize);
  864. }
  865. // Create the memset.
  866. Builder.CreateMemSet(CurPtr, Builder.getInt8(0), RemainingSize, false);
  867. return true;
  868. };
  869. // If the initializer is an initializer list, first do the explicit elements.
  870. if (const InitListExpr *ILE = dyn_cast<InitListExpr>(Init)) {
  871. // Initializing from a (braced) string literal is a special case; the init
  872. // list element does not initialize a (single) array element.
  873. if (ILE->isStringLiteralInit()) {
  874. // Initialize the initial portion of length equal to that of the string
  875. // literal. The allocation must be for at least this much; we emitted a
  876. // check for that earlier.
  877. AggValueSlot Slot =
  878. AggValueSlot::forAddr(CurPtr, ElementType.getQualifiers(),
  879. AggValueSlot::IsDestructed,
  880. AggValueSlot::DoesNotNeedGCBarriers,
  881. AggValueSlot::IsNotAliased,
  882. AggValueSlot::DoesNotOverlap,
  883. AggValueSlot::IsNotZeroed,
  884. AggValueSlot::IsSanitizerChecked);
  885. EmitAggExpr(ILE->getInit(0), Slot);
  886. // Move past these elements.
  887. InitListElements =
  888. cast<ConstantArrayType>(ILE->getType()->getAsArrayTypeUnsafe())
  889. ->getSize().getZExtValue();
  890. CurPtr =
  891. Address(Builder.CreateInBoundsGEP(CurPtr.getPointer(),
  892. Builder.getSize(InitListElements),
  893. "string.init.end"),
  894. CurPtr.getAlignment().alignmentAtOffset(InitListElements *
  895. ElementSize));
  896. // Zero out the rest, if any remain.
  897. llvm::ConstantInt *ConstNum = dyn_cast<llvm::ConstantInt>(NumElements);
  898. if (!ConstNum || !ConstNum->equalsInt(InitListElements)) {
  899. bool OK = TryMemsetInitialization();
  900. (void)OK;
  901. assert(OK && "couldn't memset character type?");
  902. }
  903. return;
  904. }
  905. InitListElements = ILE->getNumInits();
  906. // If this is a multi-dimensional array new, we will initialize multiple
  907. // elements with each init list element.
  908. QualType AllocType = E->getAllocatedType();
  909. if (const ConstantArrayType *CAT = dyn_cast_or_null<ConstantArrayType>(
  910. AllocType->getAsArrayTypeUnsafe())) {
  911. ElementTy = ConvertTypeForMem(AllocType);
  912. CurPtr = Builder.CreateElementBitCast(CurPtr, ElementTy);
  913. InitListElements *= getContext().getConstantArrayElementCount(CAT);
  914. }
  915. // Enter a partial-destruction Cleanup if necessary.
  916. if (needsEHCleanup(DtorKind)) {
  917. // In principle we could tell the Cleanup where we are more
  918. // directly, but the control flow can get so varied here that it
  919. // would actually be quite complex. Therefore we go through an
  920. // alloca.
  921. EndOfInit = CreateTempAlloca(BeginPtr.getType(), getPointerAlign(),
  922. "array.init.end");
  923. CleanupDominator = Builder.CreateStore(BeginPtr.getPointer(), EndOfInit);
  924. pushIrregularPartialArrayCleanup(BeginPtr.getPointer(), EndOfInit,
  925. ElementType, ElementAlign,
  926. getDestroyer(DtorKind));
  927. Cleanup = EHStack.stable_begin();
  928. }
  929. CharUnits StartAlign = CurPtr.getAlignment();
  930. for (unsigned i = 0, e = ILE->getNumInits(); i != e; ++i) {
  931. // Tell the cleanup that it needs to destroy up to this
  932. // element. TODO: some of these stores can be trivially
  933. // observed to be unnecessary.
  934. if (EndOfInit.isValid()) {
  935. auto FinishedPtr =
  936. Builder.CreateBitCast(CurPtr.getPointer(), BeginPtr.getType());
  937. Builder.CreateStore(FinishedPtr, EndOfInit);
  938. }
  939. // FIXME: If the last initializer is an incomplete initializer list for
  940. // an array, and we have an array filler, we can fold together the two
  941. // initialization loops.
  942. StoreAnyExprIntoOneUnit(*this, ILE->getInit(i),
  943. ILE->getInit(i)->getType(), CurPtr,
  944. AggValueSlot::DoesNotOverlap);
  945. CurPtr = Address(Builder.CreateInBoundsGEP(CurPtr.getPointer(),
  946. Builder.getSize(1),
  947. "array.exp.next"),
  948. StartAlign.alignmentAtOffset((i + 1) * ElementSize));
  949. }
  950. // The remaining elements are filled with the array filler expression.
  951. Init = ILE->getArrayFiller();
  952. // Extract the initializer for the individual array elements by pulling
  953. // out the array filler from all the nested initializer lists. This avoids
  954. // generating a nested loop for the initialization.
  955. while (Init && Init->getType()->isConstantArrayType()) {
  956. auto *SubILE = dyn_cast<InitListExpr>(Init);
  957. if (!SubILE)
  958. break;
  959. assert(SubILE->getNumInits() == 0 && "explicit inits in array filler?");
  960. Init = SubILE->getArrayFiller();
  961. }
  962. // Switch back to initializing one base element at a time.
  963. CurPtr = Builder.CreateBitCast(CurPtr, BeginPtr.getType());
  964. }
  965. // If all elements have already been initialized, skip any further
  966. // initialization.
  967. llvm::ConstantInt *ConstNum = dyn_cast<llvm::ConstantInt>(NumElements);
  968. if (ConstNum && ConstNum->getZExtValue() <= InitListElements) {
  969. // If there was a Cleanup, deactivate it.
  970. if (CleanupDominator)
  971. DeactivateCleanupBlock(Cleanup, CleanupDominator);
  972. return;
  973. }
  974. assert(Init && "have trailing elements to initialize but no initializer");
  975. // If this is a constructor call, try to optimize it out, and failing that
  976. // emit a single loop to initialize all remaining elements.
  977. if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(Init)) {
  978. CXXConstructorDecl *Ctor = CCE->getConstructor();
  979. if (Ctor->isTrivial()) {
  980. // If new expression did not specify value-initialization, then there
  981. // is no initialization.
  982. if (!CCE->requiresZeroInitialization() || Ctor->getParent()->isEmpty())
  983. return;
  984. if (TryMemsetInitialization())
  985. return;
  986. }
  987. // Store the new Cleanup position for irregular Cleanups.
  988. //
  989. // FIXME: Share this cleanup with the constructor call emission rather than
  990. // having it create a cleanup of its own.
  991. if (EndOfInit.isValid())
  992. Builder.CreateStore(CurPtr.getPointer(), EndOfInit);
  993. // Emit a constructor call loop to initialize the remaining elements.
  994. if (InitListElements)
  995. NumElements = Builder.CreateSub(
  996. NumElements,
  997. llvm::ConstantInt::get(NumElements->getType(), InitListElements));
  998. EmitCXXAggrConstructorCall(Ctor, NumElements, CurPtr, CCE,
  999. /*NewPointerIsChecked*/true,
  1000. CCE->requiresZeroInitialization());
  1001. return;
  1002. }
  1003. // If this is value-initialization, we can usually use memset.
  1004. ImplicitValueInitExpr IVIE(ElementType);
  1005. if (isa<ImplicitValueInitExpr>(Init)) {
  1006. if (TryMemsetInitialization())
  1007. return;
  1008. // Switch to an ImplicitValueInitExpr for the element type. This handles
  1009. // only one case: multidimensional array new of pointers to members. In
  1010. // all other cases, we already have an initializer for the array element.
  1011. Init = &IVIE;
  1012. }
  1013. // At this point we should have found an initializer for the individual
  1014. // elements of the array.
  1015. assert(getContext().hasSameUnqualifiedType(ElementType, Init->getType()) &&
  1016. "got wrong type of element to initialize");
  1017. // If we have an empty initializer list, we can usually use memset.
  1018. if (auto *ILE = dyn_cast<InitListExpr>(Init))
  1019. if (ILE->getNumInits() == 0 && TryMemsetInitialization())
  1020. return;
  1021. // If we have a struct whose every field is value-initialized, we can
  1022. // usually use memset.
  1023. if (auto *ILE = dyn_cast<InitListExpr>(Init)) {
  1024. if (const RecordType *RType = ILE->getType()->getAs<RecordType>()) {
  1025. if (RType->getDecl()->isStruct()) {
  1026. unsigned NumElements = 0;
  1027. if (auto *CXXRD = dyn_cast<CXXRecordDecl>(RType->getDecl()))
  1028. NumElements = CXXRD->getNumBases();
  1029. for (auto *Field : RType->getDecl()->fields())
  1030. if (!Field->isUnnamedBitfield())
  1031. ++NumElements;
  1032. // FIXME: Recurse into nested InitListExprs.
  1033. if (ILE->getNumInits() == NumElements)
  1034. for (unsigned i = 0, e = ILE->getNumInits(); i != e; ++i)
  1035. if (!isa<ImplicitValueInitExpr>(ILE->getInit(i)))
  1036. --NumElements;
  1037. if (ILE->getNumInits() == NumElements && TryMemsetInitialization())
  1038. return;
  1039. }
  1040. }
  1041. }
  1042. // Create the loop blocks.
  1043. llvm::BasicBlock *EntryBB = Builder.GetInsertBlock();
  1044. llvm::BasicBlock *LoopBB = createBasicBlock("new.loop");
  1045. llvm::BasicBlock *ContBB = createBasicBlock("new.loop.end");
  1046. // Find the end of the array, hoisted out of the loop.
  1047. llvm::Value *EndPtr =
  1048. Builder.CreateInBoundsGEP(BeginPtr.getPointer(), NumElements, "array.end");
  1049. // If the number of elements isn't constant, we have to now check if there is
  1050. // anything left to initialize.
  1051. if (!ConstNum) {
  1052. llvm::Value *IsEmpty =
  1053. Builder.CreateICmpEQ(CurPtr.getPointer(), EndPtr, "array.isempty");
  1054. Builder.CreateCondBr(IsEmpty, ContBB, LoopBB);
  1055. }
  1056. // Enter the loop.
  1057. EmitBlock(LoopBB);
  1058. // Set up the current-element phi.
  1059. llvm::PHINode *CurPtrPhi =
  1060. Builder.CreatePHI(CurPtr.getType(), 2, "array.cur");
  1061. CurPtrPhi->addIncoming(CurPtr.getPointer(), EntryBB);
  1062. CurPtr = Address(CurPtrPhi, ElementAlign);
  1063. // Store the new Cleanup position for irregular Cleanups.
  1064. if (EndOfInit.isValid())
  1065. Builder.CreateStore(CurPtr.getPointer(), EndOfInit);
  1066. // Enter a partial-destruction Cleanup if necessary.
  1067. if (!CleanupDominator && needsEHCleanup(DtorKind)) {
  1068. pushRegularPartialArrayCleanup(BeginPtr.getPointer(), CurPtr.getPointer(),
  1069. ElementType, ElementAlign,
  1070. getDestroyer(DtorKind));
  1071. Cleanup = EHStack.stable_begin();
  1072. CleanupDominator = Builder.CreateUnreachable();
  1073. }
  1074. // Emit the initializer into this element.
  1075. StoreAnyExprIntoOneUnit(*this, Init, Init->getType(), CurPtr,
  1076. AggValueSlot::DoesNotOverlap);
  1077. // Leave the Cleanup if we entered one.
  1078. if (CleanupDominator) {
  1079. DeactivateCleanupBlock(Cleanup, CleanupDominator);
  1080. CleanupDominator->eraseFromParent();
  1081. }
  1082. // Advance to the next element by adjusting the pointer type as necessary.
  1083. llvm::Value *NextPtr =
  1084. Builder.CreateConstInBoundsGEP1_32(ElementTy, CurPtr.getPointer(), 1,
  1085. "array.next");
  1086. // Check whether we've gotten to the end of the array and, if so,
  1087. // exit the loop.
  1088. llvm::Value *IsEnd = Builder.CreateICmpEQ(NextPtr, EndPtr, "array.atend");
  1089. Builder.CreateCondBr(IsEnd, ContBB, LoopBB);
  1090. CurPtrPhi->addIncoming(NextPtr, Builder.GetInsertBlock());
  1091. EmitBlock(ContBB);
  1092. }
  1093. static void EmitNewInitializer(CodeGenFunction &CGF, const CXXNewExpr *E,
  1094. QualType ElementType, llvm::Type *ElementTy,
  1095. Address NewPtr, llvm::Value *NumElements,
  1096. llvm::Value *AllocSizeWithoutCookie) {
  1097. ApplyDebugLocation DL(CGF, E);
  1098. if (E->isArray())
  1099. CGF.EmitNewArrayInitializer(E, ElementType, ElementTy, NewPtr, NumElements,
  1100. AllocSizeWithoutCookie);
  1101. else if (const Expr *Init = E->getInitializer())
  1102. StoreAnyExprIntoOneUnit(CGF, Init, E->getAllocatedType(), NewPtr,
  1103. AggValueSlot::DoesNotOverlap);
  1104. }
  1105. /// Emit a call to an operator new or operator delete function, as implicitly
  1106. /// created by new-expressions and delete-expressions.
  1107. static RValue EmitNewDeleteCall(CodeGenFunction &CGF,
  1108. const FunctionDecl *CalleeDecl,
  1109. const FunctionProtoType *CalleeType,
  1110. const CallArgList &Args) {
  1111. llvm::CallBase *CallOrInvoke;
  1112. llvm::Constant *CalleePtr = CGF.CGM.GetAddrOfFunction(CalleeDecl);
  1113. CGCallee Callee = CGCallee::forDirect(CalleePtr, GlobalDecl(CalleeDecl));
  1114. RValue RV =
  1115. CGF.EmitCall(CGF.CGM.getTypes().arrangeFreeFunctionCall(
  1116. Args, CalleeType, /*ChainCall=*/false),
  1117. Callee, ReturnValueSlot(), Args, &CallOrInvoke);
  1118. /// C++1y [expr.new]p10:
  1119. /// [In a new-expression,] an implementation is allowed to omit a call
  1120. /// to a replaceable global allocation function.
  1121. ///
  1122. /// We model such elidable calls with the 'builtin' attribute.
  1123. llvm::Function *Fn = dyn_cast<llvm::Function>(CalleePtr);
  1124. if (CalleeDecl->isReplaceableGlobalAllocationFunction() &&
  1125. Fn && Fn->hasFnAttribute(llvm::Attribute::NoBuiltin)) {
  1126. CallOrInvoke->addAttribute(llvm::AttributeList::FunctionIndex,
  1127. llvm::Attribute::Builtin);
  1128. }
  1129. return RV;
  1130. }
  1131. RValue CodeGenFunction::EmitBuiltinNewDeleteCall(const FunctionProtoType *Type,
  1132. const CallExpr *TheCall,
  1133. bool IsDelete) {
  1134. CallArgList Args;
  1135. EmitCallArgs(Args, Type->getParamTypes(), TheCall->arguments());
  1136. // Find the allocation or deallocation function that we're calling.
  1137. ASTContext &Ctx = getContext();
  1138. DeclarationName Name = Ctx.DeclarationNames
  1139. .getCXXOperatorName(IsDelete ? OO_Delete : OO_New);
  1140. for (auto *Decl : Ctx.getTranslationUnitDecl()->lookup(Name))
  1141. if (auto *FD = dyn_cast<FunctionDecl>(Decl))
  1142. if (Ctx.hasSameType(FD->getType(), QualType(Type, 0)))
  1143. return EmitNewDeleteCall(*this, FD, Type, Args);
  1144. llvm_unreachable("predeclared global operator new/delete is missing");
  1145. }
  1146. namespace {
  1147. /// The parameters to pass to a usual operator delete.
  1148. struct UsualDeleteParams {
  1149. bool DestroyingDelete = false;
  1150. bool Size = false;
  1151. bool Alignment = false;
  1152. };
  1153. }
  1154. static UsualDeleteParams getUsualDeleteParams(const FunctionDecl *FD) {
  1155. UsualDeleteParams Params;
  1156. const FunctionProtoType *FPT = FD->getType()->castAs<FunctionProtoType>();
  1157. auto AI = FPT->param_type_begin(), AE = FPT->param_type_end();
  1158. // The first argument is always a void*.
  1159. ++AI;
  1160. // The next parameter may be a std::destroying_delete_t.
  1161. if (FD->isDestroyingOperatorDelete()) {
  1162. Params.DestroyingDelete = true;
  1163. assert(AI != AE);
  1164. ++AI;
  1165. }
  1166. // Figure out what other parameters we should be implicitly passing.
  1167. if (AI != AE && (*AI)->isIntegerType()) {
  1168. Params.Size = true;
  1169. ++AI;
  1170. }
  1171. if (AI != AE && (*AI)->isAlignValT()) {
  1172. Params.Alignment = true;
  1173. ++AI;
  1174. }
  1175. assert(AI == AE && "unexpected usual deallocation function parameter");
  1176. return Params;
  1177. }
  1178. namespace {
  1179. /// A cleanup to call the given 'operator delete' function upon abnormal
  1180. /// exit from a new expression. Templated on a traits type that deals with
  1181. /// ensuring that the arguments dominate the cleanup if necessary.
  1182. template<typename Traits>
  1183. class CallDeleteDuringNew final : public EHScopeStack::Cleanup {
  1184. /// Type used to hold llvm::Value*s.
  1185. typedef typename Traits::ValueTy ValueTy;
  1186. /// Type used to hold RValues.
  1187. typedef typename Traits::RValueTy RValueTy;
  1188. struct PlacementArg {
  1189. RValueTy ArgValue;
  1190. QualType ArgType;
  1191. };
  1192. unsigned NumPlacementArgs : 31;
  1193. unsigned PassAlignmentToPlacementDelete : 1;
  1194. const FunctionDecl *OperatorDelete;
  1195. ValueTy Ptr;
  1196. ValueTy AllocSize;
  1197. CharUnits AllocAlign;
  1198. PlacementArg *getPlacementArgs() {
  1199. return reinterpret_cast<PlacementArg *>(this + 1);
  1200. }
  1201. public:
  1202. static size_t getExtraSize(size_t NumPlacementArgs) {
  1203. return NumPlacementArgs * sizeof(PlacementArg);
  1204. }
  1205. CallDeleteDuringNew(size_t NumPlacementArgs,
  1206. const FunctionDecl *OperatorDelete, ValueTy Ptr,
  1207. ValueTy AllocSize, bool PassAlignmentToPlacementDelete,
  1208. CharUnits AllocAlign)
  1209. : NumPlacementArgs(NumPlacementArgs),
  1210. PassAlignmentToPlacementDelete(PassAlignmentToPlacementDelete),
  1211. OperatorDelete(OperatorDelete), Ptr(Ptr), AllocSize(AllocSize),
  1212. AllocAlign(AllocAlign) {}
  1213. void setPlacementArg(unsigned I, RValueTy Arg, QualType Type) {
  1214. assert(I < NumPlacementArgs && "index out of range");
  1215. getPlacementArgs()[I] = {Arg, Type};
  1216. }
  1217. void Emit(CodeGenFunction &CGF, Flags flags) override {
  1218. const FunctionProtoType *FPT =
  1219. OperatorDelete->getType()->getAs<FunctionProtoType>();
  1220. CallArgList DeleteArgs;
  1221. // The first argument is always a void* (or C* for a destroying operator
  1222. // delete for class type C).
  1223. DeleteArgs.add(Traits::get(CGF, Ptr), FPT->getParamType(0));
  1224. // Figure out what other parameters we should be implicitly passing.
  1225. UsualDeleteParams Params;
  1226. if (NumPlacementArgs) {
  1227. // A placement deallocation function is implicitly passed an alignment
  1228. // if the placement allocation function was, but is never passed a size.
  1229. Params.Alignment = PassAlignmentToPlacementDelete;
  1230. } else {
  1231. // For a non-placement new-expression, 'operator delete' can take a
  1232. // size and/or an alignment if it has the right parameters.
  1233. Params = getUsualDeleteParams(OperatorDelete);
  1234. }
  1235. assert(!Params.DestroyingDelete &&
  1236. "should not call destroying delete in a new-expression");
  1237. // The second argument can be a std::size_t (for non-placement delete).
  1238. if (Params.Size)
  1239. DeleteArgs.add(Traits::get(CGF, AllocSize),
  1240. CGF.getContext().getSizeType());
  1241. // The next (second or third) argument can be a std::align_val_t, which
  1242. // is an enum whose underlying type is std::size_t.
  1243. // FIXME: Use the right type as the parameter type. Note that in a call
  1244. // to operator delete(size_t, ...), we may not have it available.
  1245. if (Params.Alignment)
  1246. DeleteArgs.add(RValue::get(llvm::ConstantInt::get(
  1247. CGF.SizeTy, AllocAlign.getQuantity())),
  1248. CGF.getContext().getSizeType());
  1249. // Pass the rest of the arguments, which must match exactly.
  1250. for (unsigned I = 0; I != NumPlacementArgs; ++I) {
  1251. auto Arg = getPlacementArgs()[I];
  1252. DeleteArgs.add(Traits::get(CGF, Arg.ArgValue), Arg.ArgType);
  1253. }
  1254. // Call 'operator delete'.
  1255. EmitNewDeleteCall(CGF, OperatorDelete, FPT, DeleteArgs);
  1256. }
  1257. };
  1258. }
  1259. /// Enter a cleanup to call 'operator delete' if the initializer in a
  1260. /// new-expression throws.
  1261. static void EnterNewDeleteCleanup(CodeGenFunction &CGF,
  1262. const CXXNewExpr *E,
  1263. Address NewPtr,
  1264. llvm::Value *AllocSize,
  1265. CharUnits AllocAlign,
  1266. const CallArgList &NewArgs) {
  1267. unsigned NumNonPlacementArgs = E->passAlignment() ? 2 : 1;
  1268. // If we're not inside a conditional branch, then the cleanup will
  1269. // dominate and we can do the easier (and more efficient) thing.
  1270. if (!CGF.isInConditionalBranch()) {
  1271. struct DirectCleanupTraits {
  1272. typedef llvm::Value *ValueTy;
  1273. typedef RValue RValueTy;
  1274. static RValue get(CodeGenFunction &, ValueTy V) { return RValue::get(V); }
  1275. static RValue get(CodeGenFunction &, RValueTy V) { return V; }
  1276. };
  1277. typedef CallDeleteDuringNew<DirectCleanupTraits> DirectCleanup;
  1278. DirectCleanup *Cleanup = CGF.EHStack
  1279. .pushCleanupWithExtra<DirectCleanup>(EHCleanup,
  1280. E->getNumPlacementArgs(),
  1281. E->getOperatorDelete(),
  1282. NewPtr.getPointer(),
  1283. AllocSize,
  1284. E->passAlignment(),
  1285. AllocAlign);
  1286. for (unsigned I = 0, N = E->getNumPlacementArgs(); I != N; ++I) {
  1287. auto &Arg = NewArgs[I + NumNonPlacementArgs];
  1288. Cleanup->setPlacementArg(I, Arg.getRValue(CGF), Arg.Ty);
  1289. }
  1290. return;
  1291. }
  1292. // Otherwise, we need to save all this stuff.
  1293. DominatingValue<RValue>::saved_type SavedNewPtr =
  1294. DominatingValue<RValue>::save(CGF, RValue::get(NewPtr.getPointer()));
  1295. DominatingValue<RValue>::saved_type SavedAllocSize =
  1296. DominatingValue<RValue>::save(CGF, RValue::get(AllocSize));
  1297. struct ConditionalCleanupTraits {
  1298. typedef DominatingValue<RValue>::saved_type ValueTy;
  1299. typedef DominatingValue<RValue>::saved_type RValueTy;
  1300. static RValue get(CodeGenFunction &CGF, ValueTy V) {
  1301. return V.restore(CGF);
  1302. }
  1303. };
  1304. typedef CallDeleteDuringNew<ConditionalCleanupTraits> ConditionalCleanup;
  1305. ConditionalCleanup *Cleanup = CGF.EHStack
  1306. .pushCleanupWithExtra<ConditionalCleanup>(EHCleanup,
  1307. E->getNumPlacementArgs(),
  1308. E->getOperatorDelete(),
  1309. SavedNewPtr,
  1310. SavedAllocSize,
  1311. E->passAlignment(),
  1312. AllocAlign);
  1313. for (unsigned I = 0, N = E->getNumPlacementArgs(); I != N; ++I) {
  1314. auto &Arg = NewArgs[I + NumNonPlacementArgs];
  1315. Cleanup->setPlacementArg(
  1316. I, DominatingValue<RValue>::save(CGF, Arg.getRValue(CGF)), Arg.Ty);
  1317. }
  1318. CGF.initFullExprCleanup();
  1319. }
  1320. llvm::Value *CodeGenFunction::EmitCXXNewExpr(const CXXNewExpr *E) {
  1321. // The element type being allocated.
  1322. QualType allocType = getContext().getBaseElementType(E->getAllocatedType());
  1323. // 1. Build a call to the allocation function.
  1324. FunctionDecl *allocator = E->getOperatorNew();
  1325. // If there is a brace-initializer, cannot allocate fewer elements than inits.
  1326. unsigned minElements = 0;
  1327. if (E->isArray() && E->hasInitializer()) {
  1328. const InitListExpr *ILE = dyn_cast<InitListExpr>(E->getInitializer());
  1329. if (ILE && ILE->isStringLiteralInit())
  1330. minElements =
  1331. cast<ConstantArrayType>(ILE->getType()->getAsArrayTypeUnsafe())
  1332. ->getSize().getZExtValue();
  1333. else if (ILE)
  1334. minElements = ILE->getNumInits();
  1335. }
  1336. llvm::Value *numElements = nullptr;
  1337. llvm::Value *allocSizeWithoutCookie = nullptr;
  1338. llvm::Value *allocSize =
  1339. EmitCXXNewAllocSize(*this, E, minElements, numElements,
  1340. allocSizeWithoutCookie);
  1341. CharUnits allocAlign = getContext().getTypeAlignInChars(allocType);
  1342. // Emit the allocation call. If the allocator is a global placement
  1343. // operator, just "inline" it directly.
  1344. Address allocation = Address::invalid();
  1345. CallArgList allocatorArgs;
  1346. if (allocator->isReservedGlobalPlacementOperator()) {
  1347. assert(E->getNumPlacementArgs() == 1);
  1348. const Expr *arg = *E->placement_arguments().begin();
  1349. LValueBaseInfo BaseInfo;
  1350. allocation = EmitPointerWithAlignment(arg, &BaseInfo);
  1351. // The pointer expression will, in many cases, be an opaque void*.
  1352. // In these cases, discard the computed alignment and use the
  1353. // formal alignment of the allocated type.
  1354. if (BaseInfo.getAlignmentSource() != AlignmentSource::Decl)
  1355. allocation = Address(allocation.getPointer(), allocAlign);
  1356. // Set up allocatorArgs for the call to operator delete if it's not
  1357. // the reserved global operator.
  1358. if (E->getOperatorDelete() &&
  1359. !E->getOperatorDelete()->isReservedGlobalPlacementOperator()) {
  1360. allocatorArgs.add(RValue::get(allocSize), getContext().getSizeType());
  1361. allocatorArgs.add(RValue::get(allocation.getPointer()), arg->getType());
  1362. }
  1363. } else {
  1364. const FunctionProtoType *allocatorType =
  1365. allocator->getType()->castAs<FunctionProtoType>();
  1366. unsigned ParamsToSkip = 0;
  1367. // The allocation size is the first argument.
  1368. QualType sizeType = getContext().getSizeType();
  1369. allocatorArgs.add(RValue::get(allocSize), sizeType);
  1370. ++ParamsToSkip;
  1371. if (allocSize != allocSizeWithoutCookie) {
  1372. CharUnits cookieAlign = getSizeAlign(); // FIXME: Ask the ABI.
  1373. allocAlign = std::max(allocAlign, cookieAlign);
  1374. }
  1375. // The allocation alignment may be passed as the second argument.
  1376. if (E->passAlignment()) {
  1377. QualType AlignValT = sizeType;
  1378. if (allocatorType->getNumParams() > 1) {
  1379. AlignValT = allocatorType->getParamType(1);
  1380. assert(getContext().hasSameUnqualifiedType(
  1381. AlignValT->castAs<EnumType>()->getDecl()->getIntegerType(),
  1382. sizeType) &&
  1383. "wrong type for alignment parameter");
  1384. ++ParamsToSkip;
  1385. } else {
  1386. // Corner case, passing alignment to 'operator new(size_t, ...)'.
  1387. assert(allocator->isVariadic() && "can't pass alignment to allocator");
  1388. }
  1389. allocatorArgs.add(
  1390. RValue::get(llvm::ConstantInt::get(SizeTy, allocAlign.getQuantity())),
  1391. AlignValT);
  1392. }
  1393. // FIXME: Why do we not pass a CalleeDecl here?
  1394. EmitCallArgs(allocatorArgs, allocatorType, E->placement_arguments(),
  1395. /*AC*/AbstractCallee(), /*ParamsToSkip*/ParamsToSkip);
  1396. RValue RV =
  1397. EmitNewDeleteCall(*this, allocator, allocatorType, allocatorArgs);
  1398. // If this was a call to a global replaceable allocation function that does
  1399. // not take an alignment argument, the allocator is known to produce
  1400. // storage that's suitably aligned for any object that fits, up to a known
  1401. // threshold. Otherwise assume it's suitably aligned for the allocated type.
  1402. CharUnits allocationAlign = allocAlign;
  1403. if (!E->passAlignment() &&
  1404. allocator->isReplaceableGlobalAllocationFunction()) {
  1405. unsigned AllocatorAlign = llvm::PowerOf2Floor(std::min<uint64_t>(
  1406. Target.getNewAlign(), getContext().getTypeSize(allocType)));
  1407. allocationAlign = std::max(
  1408. allocationAlign, getContext().toCharUnitsFromBits(AllocatorAlign));
  1409. }
  1410. allocation = Address(RV.getScalarVal(), allocationAlign);
  1411. }
  1412. // Emit a null check on the allocation result if the allocation
  1413. // function is allowed to return null (because it has a non-throwing
  1414. // exception spec or is the reserved placement new) and we have an
  1415. // interesting initializer will be running sanitizers on the initialization.
  1416. bool nullCheck = E->shouldNullCheckAllocation() &&
  1417. (!allocType.isPODType(getContext()) || E->hasInitializer() ||
  1418. sanitizePerformTypeCheck());
  1419. llvm::BasicBlock *nullCheckBB = nullptr;
  1420. llvm::BasicBlock *contBB = nullptr;
  1421. // The null-check means that the initializer is conditionally
  1422. // evaluated.
  1423. ConditionalEvaluation conditional(*this);
  1424. if (nullCheck) {
  1425. conditional.begin(*this);
  1426. nullCheckBB = Builder.GetInsertBlock();
  1427. llvm::BasicBlock *notNullBB = createBasicBlock("new.notnull");
  1428. contBB = createBasicBlock("new.cont");
  1429. llvm::Value *isNull =
  1430. Builder.CreateIsNull(allocation.getPointer(), "new.isnull");
  1431. Builder.CreateCondBr(isNull, contBB, notNullBB);
  1432. EmitBlock(notNullBB);
  1433. }
  1434. // If there's an operator delete, enter a cleanup to call it if an
  1435. // exception is thrown.
  1436. EHScopeStack::stable_iterator operatorDeleteCleanup;
  1437. llvm::Instruction *cleanupDominator = nullptr;
  1438. if (E->getOperatorDelete() &&
  1439. !E->getOperatorDelete()->isReservedGlobalPlacementOperator()) {
  1440. EnterNewDeleteCleanup(*this, E, allocation, allocSize, allocAlign,
  1441. allocatorArgs);
  1442. operatorDeleteCleanup = EHStack.stable_begin();
  1443. cleanupDominator = Builder.CreateUnreachable();
  1444. }
  1445. assert((allocSize == allocSizeWithoutCookie) ==
  1446. CalculateCookiePadding(*this, E).isZero());
  1447. if (allocSize != allocSizeWithoutCookie) {
  1448. assert(E->isArray());
  1449. allocation = CGM.getCXXABI().InitializeArrayCookie(*this, allocation,
  1450. numElements,
  1451. E, allocType);
  1452. }
  1453. llvm::Type *elementTy = ConvertTypeForMem(allocType);
  1454. Address result = Builder.CreateElementBitCast(allocation, elementTy);
  1455. // Passing pointer through launder.invariant.group to avoid propagation of
  1456. // vptrs information which may be included in previous type.
  1457. // To not break LTO with different optimizations levels, we do it regardless
  1458. // of optimization level.
  1459. if (CGM.getCodeGenOpts().StrictVTablePointers &&
  1460. allocator->isReservedGlobalPlacementOperator())
  1461. result = Address(Builder.CreateLaunderInvariantGroup(result.getPointer()),
  1462. result.getAlignment());
  1463. // Emit sanitizer checks for pointer value now, so that in the case of an
  1464. // array it was checked only once and not at each constructor call. We may
  1465. // have already checked that the pointer is non-null.
  1466. // FIXME: If we have an array cookie and a potentially-throwing allocator,
  1467. // we'll null check the wrong pointer here.
  1468. SanitizerSet SkippedChecks;
  1469. SkippedChecks.set(SanitizerKind::Null, nullCheck);
  1470. EmitTypeCheck(CodeGenFunction::TCK_ConstructorCall,
  1471. E->getAllocatedTypeSourceInfo()->getTypeLoc().getBeginLoc(),
  1472. result.getPointer(), allocType, result.getAlignment(),
  1473. SkippedChecks, numElements);
  1474. EmitNewInitializer(*this, E, allocType, elementTy, result, numElements,
  1475. allocSizeWithoutCookie);
  1476. if (E->isArray()) {
  1477. // NewPtr is a pointer to the base element type. If we're
  1478. // allocating an array of arrays, we'll need to cast back to the
  1479. // array pointer type.
  1480. llvm::Type *resultType = ConvertTypeForMem(E->getType());
  1481. if (result.getType() != resultType)
  1482. result = Builder.CreateBitCast(result, resultType);
  1483. }
  1484. // Deactivate the 'operator delete' cleanup if we finished
  1485. // initialization.
  1486. if (operatorDeleteCleanup.isValid()) {
  1487. DeactivateCleanupBlock(operatorDeleteCleanup, cleanupDominator);
  1488. cleanupDominator->eraseFromParent();
  1489. }
  1490. llvm::Value *resultPtr = result.getPointer();
  1491. if (nullCheck) {
  1492. conditional.end(*this);
  1493. llvm::BasicBlock *notNullBB = Builder.GetInsertBlock();
  1494. EmitBlock(contBB);
  1495. llvm::PHINode *PHI = Builder.CreatePHI(resultPtr->getType(), 2);
  1496. PHI->addIncoming(resultPtr, notNullBB);
  1497. PHI->addIncoming(llvm::Constant::getNullValue(resultPtr->getType()),
  1498. nullCheckBB);
  1499. resultPtr = PHI;
  1500. }
  1501. return resultPtr;
  1502. }
  1503. void CodeGenFunction::EmitDeleteCall(const FunctionDecl *DeleteFD,
  1504. llvm::Value *Ptr, QualType DeleteTy,
  1505. llvm::Value *NumElements,
  1506. CharUnits CookieSize) {
  1507. assert((!NumElements && CookieSize.isZero()) ||
  1508. DeleteFD->getOverloadedOperator() == OO_Array_Delete);
  1509. const FunctionProtoType *DeleteFTy =
  1510. DeleteFD->getType()->getAs<FunctionProtoType>();
  1511. CallArgList DeleteArgs;
  1512. auto Params = getUsualDeleteParams(DeleteFD);
  1513. auto ParamTypeIt = DeleteFTy->param_type_begin();
  1514. // Pass the pointer itself.
  1515. QualType ArgTy = *ParamTypeIt++;
  1516. llvm::Value *DeletePtr = Builder.CreateBitCast(Ptr, ConvertType(ArgTy));
  1517. DeleteArgs.add(RValue::get(DeletePtr), ArgTy);
  1518. // Pass the std::destroying_delete tag if present.
  1519. if (Params.DestroyingDelete) {
  1520. QualType DDTag = *ParamTypeIt++;
  1521. // Just pass an 'undef'. We expect the tag type to be an empty struct.
  1522. auto *V = llvm::UndefValue::get(getTypes().ConvertType(DDTag));
  1523. DeleteArgs.add(RValue::get(V), DDTag);
  1524. }
  1525. // Pass the size if the delete function has a size_t parameter.
  1526. if (Params.Size) {
  1527. QualType SizeType = *ParamTypeIt++;
  1528. CharUnits DeleteTypeSize = getContext().getTypeSizeInChars(DeleteTy);
  1529. llvm::Value *Size = llvm::ConstantInt::get(ConvertType(SizeType),
  1530. DeleteTypeSize.getQuantity());
  1531. // For array new, multiply by the number of elements.
  1532. if (NumElements)
  1533. Size = Builder.CreateMul(Size, NumElements);
  1534. // If there is a cookie, add the cookie size.
  1535. if (!CookieSize.isZero())
  1536. Size = Builder.CreateAdd(
  1537. Size, llvm::ConstantInt::get(SizeTy, CookieSize.getQuantity()));
  1538. DeleteArgs.add(RValue::get(Size), SizeType);
  1539. }
  1540. // Pass the alignment if the delete function has an align_val_t parameter.
  1541. if (Params.Alignment) {
  1542. QualType AlignValType = *ParamTypeIt++;
  1543. CharUnits DeleteTypeAlign = getContext().toCharUnitsFromBits(
  1544. getContext().getTypeAlignIfKnown(DeleteTy));
  1545. llvm::Value *Align = llvm::ConstantInt::get(ConvertType(AlignValType),
  1546. DeleteTypeAlign.getQuantity());
  1547. DeleteArgs.add(RValue::get(Align), AlignValType);
  1548. }
  1549. assert(ParamTypeIt == DeleteFTy->param_type_end() &&
  1550. "unknown parameter to usual delete function");
  1551. // Emit the call to delete.
  1552. EmitNewDeleteCall(*this, DeleteFD, DeleteFTy, DeleteArgs);
  1553. }
  1554. namespace {
  1555. /// Calls the given 'operator delete' on a single object.
  1556. struct CallObjectDelete final : EHScopeStack::Cleanup {
  1557. llvm::Value *Ptr;
  1558. const FunctionDecl *OperatorDelete;
  1559. QualType ElementType;
  1560. CallObjectDelete(llvm::Value *Ptr,
  1561. const FunctionDecl *OperatorDelete,
  1562. QualType ElementType)
  1563. : Ptr(Ptr), OperatorDelete(OperatorDelete), ElementType(ElementType) {}
  1564. void Emit(CodeGenFunction &CGF, Flags flags) override {
  1565. CGF.EmitDeleteCall(OperatorDelete, Ptr, ElementType);
  1566. }
  1567. };
  1568. }
  1569. void
  1570. CodeGenFunction::pushCallObjectDeleteCleanup(const FunctionDecl *OperatorDelete,
  1571. llvm::Value *CompletePtr,
  1572. QualType ElementType) {
  1573. EHStack.pushCleanup<CallObjectDelete>(NormalAndEHCleanup, CompletePtr,
  1574. OperatorDelete, ElementType);
  1575. }
  1576. /// Emit the code for deleting a single object with a destroying operator
  1577. /// delete. If the element type has a non-virtual destructor, Ptr has already
  1578. /// been converted to the type of the parameter of 'operator delete'. Otherwise
  1579. /// Ptr points to an object of the static type.
  1580. static void EmitDestroyingObjectDelete(CodeGenFunction &CGF,
  1581. const CXXDeleteExpr *DE, Address Ptr,
  1582. QualType ElementType) {
  1583. auto *Dtor = ElementType->getAsCXXRecordDecl()->getDestructor();
  1584. if (Dtor && Dtor->isVirtual())
  1585. CGF.CGM.getCXXABI().emitVirtualObjectDelete(CGF, DE, Ptr, ElementType,
  1586. Dtor);
  1587. else
  1588. CGF.EmitDeleteCall(DE->getOperatorDelete(), Ptr.getPointer(), ElementType);
  1589. }
  1590. /// Emit the code for deleting a single object.
  1591. static void EmitObjectDelete(CodeGenFunction &CGF,
  1592. const CXXDeleteExpr *DE,
  1593. Address Ptr,
  1594. QualType ElementType) {
  1595. // C++11 [expr.delete]p3:
  1596. // If the static type of the object to be deleted is different from its
  1597. // dynamic type, the static type shall be a base class of the dynamic type
  1598. // of the object to be deleted and the static type shall have a virtual
  1599. // destructor or the behavior is undefined.
  1600. CGF.EmitTypeCheck(CodeGenFunction::TCK_MemberCall,
  1601. DE->getExprLoc(), Ptr.getPointer(),
  1602. ElementType);
  1603. const FunctionDecl *OperatorDelete = DE->getOperatorDelete();
  1604. assert(!OperatorDelete->isDestroyingOperatorDelete());
  1605. // Find the destructor for the type, if applicable. If the
  1606. // destructor is virtual, we'll just emit the vcall and return.
  1607. const CXXDestructorDecl *Dtor = nullptr;
  1608. if (const RecordType *RT = ElementType->getAs<RecordType>()) {
  1609. CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
  1610. if (RD->hasDefinition() && !RD->hasTrivialDestructor()) {
  1611. Dtor = RD->getDestructor();
  1612. if (Dtor->isVirtual()) {
  1613. bool UseVirtualCall = true;
  1614. const Expr *Base = DE->getArgument();
  1615. if (auto *DevirtualizedDtor =
  1616. dyn_cast_or_null<const CXXDestructorDecl>(
  1617. Dtor->getDevirtualizedMethod(
  1618. Base, CGF.CGM.getLangOpts().AppleKext))) {
  1619. UseVirtualCall = false;
  1620. const CXXRecordDecl *DevirtualizedClass =
  1621. DevirtualizedDtor->getParent();
  1622. if (declaresSameEntity(getCXXRecord(Base), DevirtualizedClass)) {
  1623. // Devirtualized to the class of the base type (the type of the
  1624. // whole expression).
  1625. Dtor = DevirtualizedDtor;
  1626. } else {
  1627. // Devirtualized to some other type. Would need to cast the this
  1628. // pointer to that type but we don't have support for that yet, so
  1629. // do a virtual call. FIXME: handle the case where it is
  1630. // devirtualized to the derived type (the type of the inner
  1631. // expression) as in EmitCXXMemberOrOperatorMemberCallExpr.
  1632. UseVirtualCall = true;
  1633. }
  1634. }
  1635. if (UseVirtualCall) {
  1636. CGF.CGM.getCXXABI().emitVirtualObjectDelete(CGF, DE, Ptr, ElementType,
  1637. Dtor);
  1638. return;
  1639. }
  1640. }
  1641. }
  1642. }
  1643. // Make sure that we call delete even if the dtor throws.
  1644. // This doesn't have to a conditional cleanup because we're going
  1645. // to pop it off in a second.
  1646. CGF.EHStack.pushCleanup<CallObjectDelete>(NormalAndEHCleanup,
  1647. Ptr.getPointer(),
  1648. OperatorDelete, ElementType);
  1649. if (Dtor)
  1650. CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete,
  1651. /*ForVirtualBase=*/false,
  1652. /*Delegating=*/false,
  1653. Ptr, ElementType);
  1654. else if (auto Lifetime = ElementType.getObjCLifetime()) {
  1655. switch (Lifetime) {
  1656. case Qualifiers::OCL_None:
  1657. case Qualifiers::OCL_ExplicitNone:
  1658. case Qualifiers::OCL_Autoreleasing:
  1659. break;
  1660. case Qualifiers::OCL_Strong:
  1661. CGF.EmitARCDestroyStrong(Ptr, ARCPreciseLifetime);
  1662. break;
  1663. case Qualifiers::OCL_Weak:
  1664. CGF.EmitARCDestroyWeak(Ptr);
  1665. break;
  1666. }
  1667. }
  1668. CGF.PopCleanupBlock();
  1669. }
  1670. namespace {
  1671. /// Calls the given 'operator delete' on an array of objects.
  1672. struct CallArrayDelete final : EHScopeStack::Cleanup {
  1673. llvm::Value *Ptr;
  1674. const FunctionDecl *OperatorDelete;
  1675. llvm::Value *NumElements;
  1676. QualType ElementType;
  1677. CharUnits CookieSize;
  1678. CallArrayDelete(llvm::Value *Ptr,
  1679. const FunctionDecl *OperatorDelete,
  1680. llvm::Value *NumElements,
  1681. QualType ElementType,
  1682. CharUnits CookieSize)
  1683. : Ptr(Ptr), OperatorDelete(OperatorDelete), NumElements(NumElements),
  1684. ElementType(ElementType), CookieSize(CookieSize) {}
  1685. void Emit(CodeGenFunction &CGF, Flags flags) override {
  1686. CGF.EmitDeleteCall(OperatorDelete, Ptr, ElementType, NumElements,
  1687. CookieSize);
  1688. }
  1689. };
  1690. }
  1691. /// Emit the code for deleting an array of objects.
  1692. static void EmitArrayDelete(CodeGenFunction &CGF,
  1693. const CXXDeleteExpr *E,
  1694. Address deletedPtr,
  1695. QualType elementType) {
  1696. llvm::Value *numElements = nullptr;
  1697. llvm::Value *allocatedPtr = nullptr;
  1698. CharUnits cookieSize;
  1699. CGF.CGM.getCXXABI().ReadArrayCookie(CGF, deletedPtr, E, elementType,
  1700. numElements, allocatedPtr, cookieSize);
  1701. assert(allocatedPtr && "ReadArrayCookie didn't set allocated pointer");
  1702. // Make sure that we call delete even if one of the dtors throws.
  1703. const FunctionDecl *operatorDelete = E->getOperatorDelete();
  1704. CGF.EHStack.pushCleanup<CallArrayDelete>(NormalAndEHCleanup,
  1705. allocatedPtr, operatorDelete,
  1706. numElements, elementType,
  1707. cookieSize);
  1708. // Destroy the elements.
  1709. if (QualType::DestructionKind dtorKind = elementType.isDestructedType()) {
  1710. assert(numElements && "no element count for a type with a destructor!");
  1711. CharUnits elementSize = CGF.getContext().getTypeSizeInChars(elementType);
  1712. CharUnits elementAlign =
  1713. deletedPtr.getAlignment().alignmentOfArrayElement(elementSize);
  1714. llvm::Value *arrayBegin = deletedPtr.getPointer();
  1715. llvm::Value *arrayEnd =
  1716. CGF.Builder.CreateInBoundsGEP(arrayBegin, numElements, "delete.end");
  1717. // Note that it is legal to allocate a zero-length array, and we
  1718. // can never fold the check away because the length should always
  1719. // come from a cookie.
  1720. CGF.emitArrayDestroy(arrayBegin, arrayEnd, elementType, elementAlign,
  1721. CGF.getDestroyer(dtorKind),
  1722. /*checkZeroLength*/ true,
  1723. CGF.needsEHCleanup(dtorKind));
  1724. }
  1725. // Pop the cleanup block.
  1726. CGF.PopCleanupBlock();
  1727. }
  1728. void CodeGenFunction::EmitCXXDeleteExpr(const CXXDeleteExpr *E) {
  1729. const Expr *Arg = E->getArgument();
  1730. Address Ptr = EmitPointerWithAlignment(Arg);
  1731. // Null check the pointer.
  1732. llvm::BasicBlock *DeleteNotNull = createBasicBlock("delete.notnull");
  1733. llvm::BasicBlock *DeleteEnd = createBasicBlock("delete.end");
  1734. llvm::Value *IsNull = Builder.CreateIsNull(Ptr.getPointer(), "isnull");
  1735. Builder.CreateCondBr(IsNull, DeleteEnd, DeleteNotNull);
  1736. EmitBlock(DeleteNotNull);
  1737. QualType DeleteTy = E->getDestroyedType();
  1738. // A destroying operator delete overrides the entire operation of the
  1739. // delete expression.
  1740. if (E->getOperatorDelete()->isDestroyingOperatorDelete()) {
  1741. EmitDestroyingObjectDelete(*this, E, Ptr, DeleteTy);
  1742. EmitBlock(DeleteEnd);
  1743. return;
  1744. }
  1745. // We might be deleting a pointer to array. If so, GEP down to the
  1746. // first non-array element.
  1747. // (this assumes that A(*)[3][7] is converted to [3 x [7 x %A]]*)
  1748. if (DeleteTy->isConstantArrayType()) {
  1749. llvm::Value *Zero = Builder.getInt32(0);
  1750. SmallVector<llvm::Value*,8> GEP;
  1751. GEP.push_back(Zero); // point at the outermost array
  1752. // For each layer of array type we're pointing at:
  1753. while (const ConstantArrayType *Arr
  1754. = getContext().getAsConstantArrayType(DeleteTy)) {
  1755. // 1. Unpeel the array type.
  1756. DeleteTy = Arr->getElementType();
  1757. // 2. GEP to the first element of the array.
  1758. GEP.push_back(Zero);
  1759. }
  1760. Ptr = Address(Builder.CreateInBoundsGEP(Ptr.getPointer(), GEP, "del.first"),
  1761. Ptr.getAlignment());
  1762. }
  1763. assert(ConvertTypeForMem(DeleteTy) == Ptr.getElementType());
  1764. if (E->isArrayForm()) {
  1765. EmitArrayDelete(*this, E, Ptr, DeleteTy);
  1766. } else {
  1767. EmitObjectDelete(*this, E, Ptr, DeleteTy);
  1768. }
  1769. EmitBlock(DeleteEnd);
  1770. }
  1771. static bool isGLValueFromPointerDeref(const Expr *E) {
  1772. E = E->IgnoreParens();
  1773. if (const auto *CE = dyn_cast<CastExpr>(E)) {
  1774. if (!CE->getSubExpr()->isGLValue())
  1775. return false;
  1776. return isGLValueFromPointerDeref(CE->getSubExpr());
  1777. }
  1778. if (const auto *OVE = dyn_cast<OpaqueValueExpr>(E))
  1779. return isGLValueFromPointerDeref(OVE->getSourceExpr());
  1780. if (const auto *BO = dyn_cast<BinaryOperator>(E))
  1781. if (BO->getOpcode() == BO_Comma)
  1782. return isGLValueFromPointerDeref(BO->getRHS());
  1783. if (const auto *ACO = dyn_cast<AbstractConditionalOperator>(E))
  1784. return isGLValueFromPointerDeref(ACO->getTrueExpr()) ||
  1785. isGLValueFromPointerDeref(ACO->getFalseExpr());
  1786. // C++11 [expr.sub]p1:
  1787. // The expression E1[E2] is identical (by definition) to *((E1)+(E2))
  1788. if (isa<ArraySubscriptExpr>(E))
  1789. return true;
  1790. if (const auto *UO = dyn_cast<UnaryOperator>(E))
  1791. if (UO->getOpcode() == UO_Deref)
  1792. return true;
  1793. return false;
  1794. }
  1795. static llvm::Value *EmitTypeidFromVTable(CodeGenFunction &CGF, const Expr *E,
  1796. llvm::Type *StdTypeInfoPtrTy) {
  1797. // Get the vtable pointer.
  1798. Address ThisPtr = CGF.EmitLValue(E).getAddress();
  1799. QualType SrcRecordTy = E->getType();
  1800. // C++ [class.cdtor]p4:
  1801. // If the operand of typeid refers to the object under construction or
  1802. // destruction and the static type of the operand is neither the constructor
  1803. // or destructor’s class nor one of its bases, the behavior is undefined.
  1804. CGF.EmitTypeCheck(CodeGenFunction::TCK_DynamicOperation, E->getExprLoc(),
  1805. ThisPtr.getPointer(), SrcRecordTy);
  1806. // C++ [expr.typeid]p2:
  1807. // If the glvalue expression is obtained by applying the unary * operator to
  1808. // a pointer and the pointer is a null pointer value, the typeid expression
  1809. // throws the std::bad_typeid exception.
  1810. //
  1811. // However, this paragraph's intent is not clear. We choose a very generous
  1812. // interpretation which implores us to consider comma operators, conditional
  1813. // operators, parentheses and other such constructs.
  1814. if (CGF.CGM.getCXXABI().shouldTypeidBeNullChecked(
  1815. isGLValueFromPointerDeref(E), SrcRecordTy)) {
  1816. llvm::BasicBlock *BadTypeidBlock =
  1817. CGF.createBasicBlock("typeid.bad_typeid");
  1818. llvm::BasicBlock *EndBlock = CGF.createBasicBlock("typeid.end");
  1819. llvm::Value *IsNull = CGF.Builder.CreateIsNull(ThisPtr.getPointer());
  1820. CGF.Builder.CreateCondBr(IsNull, BadTypeidBlock, EndBlock);
  1821. CGF.EmitBlock(BadTypeidBlock);
  1822. CGF.CGM.getCXXABI().EmitBadTypeidCall(CGF);
  1823. CGF.EmitBlock(EndBlock);
  1824. }
  1825. return CGF.CGM.getCXXABI().EmitTypeid(CGF, SrcRecordTy, ThisPtr,
  1826. StdTypeInfoPtrTy);
  1827. }
  1828. llvm::Value *CodeGenFunction::EmitCXXTypeidExpr(const CXXTypeidExpr *E) {
  1829. llvm::Type *StdTypeInfoPtrTy =
  1830. ConvertType(E->getType())->getPointerTo();
  1831. if (E->isTypeOperand()) {
  1832. llvm::Constant *TypeInfo =
  1833. CGM.GetAddrOfRTTIDescriptor(E->getTypeOperand(getContext()));
  1834. return Builder.CreateBitCast(TypeInfo, StdTypeInfoPtrTy);
  1835. }
  1836. // C++ [expr.typeid]p2:
  1837. // When typeid is applied to a glvalue expression whose type is a
  1838. // polymorphic class type, the result refers to a std::type_info object
  1839. // representing the type of the most derived object (that is, the dynamic
  1840. // type) to which the glvalue refers.
  1841. if (E->isPotentiallyEvaluated())
  1842. return EmitTypeidFromVTable(*this, E->getExprOperand(),
  1843. StdTypeInfoPtrTy);
  1844. QualType OperandTy = E->getExprOperand()->getType();
  1845. return Builder.CreateBitCast(CGM.GetAddrOfRTTIDescriptor(OperandTy),
  1846. StdTypeInfoPtrTy);
  1847. }
  1848. static llvm::Value *EmitDynamicCastToNull(CodeGenFunction &CGF,
  1849. QualType DestTy) {
  1850. llvm::Type *DestLTy = CGF.ConvertType(DestTy);
  1851. if (DestTy->isPointerType())
  1852. return llvm::Constant::getNullValue(DestLTy);
  1853. /// C++ [expr.dynamic.cast]p9:
  1854. /// A failed cast to reference type throws std::bad_cast
  1855. if (!CGF.CGM.getCXXABI().EmitBadCastCall(CGF))
  1856. return nullptr;
  1857. CGF.EmitBlock(CGF.createBasicBlock("dynamic_cast.end"));
  1858. return llvm::UndefValue::get(DestLTy);
  1859. }
  1860. llvm::Value *CodeGenFunction::EmitDynamicCast(Address ThisAddr,
  1861. const CXXDynamicCastExpr *DCE) {
  1862. CGM.EmitExplicitCastExprType(DCE, this);
  1863. QualType DestTy = DCE->getTypeAsWritten();
  1864. QualType SrcTy = DCE->getSubExpr()->getType();
  1865. // C++ [expr.dynamic.cast]p7:
  1866. // If T is "pointer to cv void," then the result is a pointer to the most
  1867. // derived object pointed to by v.
  1868. const PointerType *DestPTy = DestTy->getAs<PointerType>();
  1869. bool isDynamicCastToVoid;
  1870. QualType SrcRecordTy;
  1871. QualType DestRecordTy;
  1872. if (DestPTy) {
  1873. isDynamicCastToVoid = DestPTy->getPointeeType()->isVoidType();
  1874. SrcRecordTy = SrcTy->castAs<PointerType>()->getPointeeType();
  1875. DestRecordTy = DestPTy->getPointeeType();
  1876. } else {
  1877. isDynamicCastToVoid = false;
  1878. SrcRecordTy = SrcTy;
  1879. DestRecordTy = DestTy->castAs<ReferenceType>()->getPointeeType();
  1880. }
  1881. // C++ [class.cdtor]p5:
  1882. // If the operand of the dynamic_cast refers to the object under
  1883. // construction or destruction and the static type of the operand is not a
  1884. // pointer to or object of the constructor or destructor’s own class or one
  1885. // of its bases, the dynamic_cast results in undefined behavior.
  1886. EmitTypeCheck(TCK_DynamicOperation, DCE->getExprLoc(), ThisAddr.getPointer(),
  1887. SrcRecordTy);
  1888. if (DCE->isAlwaysNull())
  1889. if (llvm::Value *T = EmitDynamicCastToNull(*this, DestTy))
  1890. return T;
  1891. assert(SrcRecordTy->isRecordType() && "source type must be a record type!");
  1892. // C++ [expr.dynamic.cast]p4:
  1893. // If the value of v is a null pointer value in the pointer case, the result
  1894. // is the null pointer value of type T.
  1895. bool ShouldNullCheckSrcValue =
  1896. CGM.getCXXABI().shouldDynamicCastCallBeNullChecked(SrcTy->isPointerType(),
  1897. SrcRecordTy);
  1898. llvm::BasicBlock *CastNull = nullptr;
  1899. llvm::BasicBlock *CastNotNull = nullptr;
  1900. llvm::BasicBlock *CastEnd = createBasicBlock("dynamic_cast.end");
  1901. if (ShouldNullCheckSrcValue) {
  1902. CastNull = createBasicBlock("dynamic_cast.null");
  1903. CastNotNull = createBasicBlock("dynamic_cast.notnull");
  1904. llvm::Value *IsNull = Builder.CreateIsNull(ThisAddr.getPointer());
  1905. Builder.CreateCondBr(IsNull, CastNull, CastNotNull);
  1906. EmitBlock(CastNotNull);
  1907. }
  1908. llvm::Value *Value;
  1909. if (isDynamicCastToVoid) {
  1910. Value = CGM.getCXXABI().EmitDynamicCastToVoid(*this, ThisAddr, SrcRecordTy,
  1911. DestTy);
  1912. } else {
  1913. assert(DestRecordTy->isRecordType() &&
  1914. "destination type must be a record type!");
  1915. Value = CGM.getCXXABI().EmitDynamicCastCall(*this, ThisAddr, SrcRecordTy,
  1916. DestTy, DestRecordTy, CastEnd);
  1917. CastNotNull = Builder.GetInsertBlock();
  1918. }
  1919. if (ShouldNullCheckSrcValue) {
  1920. EmitBranch(CastEnd);
  1921. EmitBlock(CastNull);
  1922. EmitBranch(CastEnd);
  1923. }
  1924. EmitBlock(CastEnd);
  1925. if (ShouldNullCheckSrcValue) {
  1926. llvm::PHINode *PHI = Builder.CreatePHI(Value->getType(), 2);
  1927. PHI->addIncoming(Value, CastNotNull);
  1928. PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), CastNull);
  1929. Value = PHI;
  1930. }
  1931. return Value;
  1932. }