CGBlocks.cpp 111 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983
  1. //===--- CGBlocks.cpp - Emit LLVM Code for declarations ---------*- C++ -*-===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This contains code to emit blocks.
  11. //
  12. //===----------------------------------------------------------------------===//
  13. #include "CGBlocks.h"
  14. #include "CGCXXABI.h"
  15. #include "CGDebugInfo.h"
  16. #include "CGObjCRuntime.h"
  17. #include "CGOpenCLRuntime.h"
  18. #include "CodeGenFunction.h"
  19. #include "CodeGenModule.h"
  20. #include "ConstantEmitter.h"
  21. #include "TargetInfo.h"
  22. #include "clang/AST/DeclObjC.h"
  23. #include "clang/CodeGen/ConstantInitBuilder.h"
  24. #include "llvm/ADT/SmallSet.h"
  25. #include "llvm/IR/CallSite.h"
  26. #include "llvm/IR/DataLayout.h"
  27. #include "llvm/IR/Module.h"
  28. #include "llvm/Support/ScopedPrinter.h"
  29. #include <algorithm>
  30. #include <cstdio>
  31. using namespace clang;
  32. using namespace CodeGen;
  33. CGBlockInfo::CGBlockInfo(const BlockDecl *block, StringRef name)
  34. : Name(name), CXXThisIndex(0), CanBeGlobal(false), NeedsCopyDispose(false),
  35. HasCXXObject(false), UsesStret(false), HasCapturedVariableLayout(false),
  36. CapturesNonExternalType(false), LocalAddress(Address::invalid()),
  37. StructureType(nullptr), Block(block), DominatingIP(nullptr) {
  38. // Skip asm prefix, if any. 'name' is usually taken directly from
  39. // the mangled name of the enclosing function.
  40. if (!name.empty() && name[0] == '\01')
  41. name = name.substr(1);
  42. }
  43. // Anchor the vtable to this translation unit.
  44. BlockByrefHelpers::~BlockByrefHelpers() {}
  45. /// Build the given block as a global block.
  46. static llvm::Constant *buildGlobalBlock(CodeGenModule &CGM,
  47. const CGBlockInfo &blockInfo,
  48. llvm::Constant *blockFn);
  49. /// Build the helper function to copy a block.
  50. static llvm::Constant *buildCopyHelper(CodeGenModule &CGM,
  51. const CGBlockInfo &blockInfo) {
  52. return CodeGenFunction(CGM).GenerateCopyHelperFunction(blockInfo);
  53. }
  54. /// Build the helper function to dispose of a block.
  55. static llvm::Constant *buildDisposeHelper(CodeGenModule &CGM,
  56. const CGBlockInfo &blockInfo) {
  57. return CodeGenFunction(CGM).GenerateDestroyHelperFunction(blockInfo);
  58. }
  59. namespace {
  60. /// Represents a type of copy/destroy operation that should be performed for an
  61. /// entity that's captured by a block.
  62. enum class BlockCaptureEntityKind {
  63. CXXRecord, // Copy or destroy
  64. ARCWeak,
  65. ARCStrong,
  66. NonTrivialCStruct,
  67. BlockObject, // Assign or release
  68. None
  69. };
  70. /// Represents a captured entity that requires extra operations in order for
  71. /// this entity to be copied or destroyed correctly.
  72. struct BlockCaptureManagedEntity {
  73. BlockCaptureEntityKind CopyKind, DisposeKind;
  74. BlockFieldFlags CopyFlags, DisposeFlags;
  75. const BlockDecl::Capture *CI;
  76. const CGBlockInfo::Capture *Capture;
  77. BlockCaptureManagedEntity(BlockCaptureEntityKind CopyType,
  78. BlockCaptureEntityKind DisposeType,
  79. BlockFieldFlags CopyFlags,
  80. BlockFieldFlags DisposeFlags,
  81. const BlockDecl::Capture &CI,
  82. const CGBlockInfo::Capture &Capture)
  83. : CopyKind(CopyType), DisposeKind(DisposeType), CopyFlags(CopyFlags),
  84. DisposeFlags(DisposeFlags), CI(&CI), Capture(&Capture) {}
  85. bool operator<(const BlockCaptureManagedEntity &Other) const {
  86. return Capture->getOffset() < Other.Capture->getOffset();
  87. }
  88. };
  89. enum class CaptureStrKind {
  90. // String for the copy helper.
  91. CopyHelper,
  92. // String for the dispose helper.
  93. DisposeHelper,
  94. // Merge the strings for the copy helper and dispose helper.
  95. Merged
  96. };
  97. } // end anonymous namespace
  98. static void findBlockCapturedManagedEntities(
  99. const CGBlockInfo &BlockInfo, const LangOptions &LangOpts,
  100. SmallVectorImpl<BlockCaptureManagedEntity> &ManagedCaptures);
  101. static std::string getBlockCaptureStr(const BlockCaptureManagedEntity &E,
  102. CaptureStrKind StrKind,
  103. CharUnits BlockAlignment,
  104. CodeGenModule &CGM);
  105. static std::string getBlockDescriptorName(const CGBlockInfo &BlockInfo,
  106. CodeGenModule &CGM) {
  107. std::string Name = "__block_descriptor_";
  108. Name += llvm::to_string(BlockInfo.BlockSize.getQuantity()) + "_";
  109. if (BlockInfo.needsCopyDisposeHelpers()) {
  110. if (CGM.getLangOpts().Exceptions)
  111. Name += "e";
  112. if (CGM.getCodeGenOpts().ObjCAutoRefCountExceptions)
  113. Name += "a";
  114. Name += llvm::to_string(BlockInfo.BlockAlign.getQuantity()) + "_";
  115. SmallVector<BlockCaptureManagedEntity, 4> ManagedCaptures;
  116. findBlockCapturedManagedEntities(BlockInfo, CGM.getContext().getLangOpts(),
  117. ManagedCaptures);
  118. for (const BlockCaptureManagedEntity &E : ManagedCaptures) {
  119. Name += llvm::to_string(E.Capture->getOffset().getQuantity());
  120. if (E.CopyKind == E.DisposeKind) {
  121. // If CopyKind and DisposeKind are the same, merge the capture
  122. // information.
  123. assert(E.CopyKind != BlockCaptureEntityKind::None &&
  124. "shouldn't see BlockCaptureManagedEntity that is None");
  125. Name += getBlockCaptureStr(E, CaptureStrKind::Merged,
  126. BlockInfo.BlockAlign, CGM);
  127. } else {
  128. // If CopyKind and DisposeKind are not the same, which can happen when
  129. // either Kind is None or the captured object is a __strong block,
  130. // concatenate the copy and dispose strings.
  131. Name += getBlockCaptureStr(E, CaptureStrKind::CopyHelper,
  132. BlockInfo.BlockAlign, CGM);
  133. Name += getBlockCaptureStr(E, CaptureStrKind::DisposeHelper,
  134. BlockInfo.BlockAlign, CGM);
  135. }
  136. }
  137. Name += "_";
  138. }
  139. std::string TypeAtEncoding =
  140. CGM.getContext().getObjCEncodingForBlock(BlockInfo.getBlockExpr());
  141. Name += "e" + llvm::to_string(TypeAtEncoding.size()) + "_" + TypeAtEncoding;
  142. Name += "l" + CGM.getObjCRuntime().getRCBlockLayoutStr(CGM, BlockInfo);
  143. return Name;
  144. }
  145. /// buildBlockDescriptor - Build the block descriptor meta-data for a block.
  146. /// buildBlockDescriptor is accessed from 5th field of the Block_literal
  147. /// meta-data and contains stationary information about the block literal.
  148. /// Its definition will have 4 (or optionally 6) words.
  149. /// \code
  150. /// struct Block_descriptor {
  151. /// unsigned long reserved;
  152. /// unsigned long size; // size of Block_literal metadata in bytes.
  153. /// void *copy_func_helper_decl; // optional copy helper.
  154. /// void *destroy_func_decl; // optioanl destructor helper.
  155. /// void *block_method_encoding_address; // @encode for block literal signature.
  156. /// void *block_layout_info; // encoding of captured block variables.
  157. /// };
  158. /// \endcode
  159. static llvm::Constant *buildBlockDescriptor(CodeGenModule &CGM,
  160. const CGBlockInfo &blockInfo) {
  161. ASTContext &C = CGM.getContext();
  162. llvm::IntegerType *ulong =
  163. cast<llvm::IntegerType>(CGM.getTypes().ConvertType(C.UnsignedLongTy));
  164. llvm::PointerType *i8p = nullptr;
  165. if (CGM.getLangOpts().OpenCL)
  166. i8p =
  167. llvm::Type::getInt8PtrTy(
  168. CGM.getLLVMContext(), C.getTargetAddressSpace(LangAS::opencl_constant));
  169. else
  170. i8p = CGM.VoidPtrTy;
  171. std::string descName;
  172. // If an equivalent block descriptor global variable exists, return it.
  173. if (C.getLangOpts().ObjC1 &&
  174. CGM.getLangOpts().getGC() == LangOptions::NonGC) {
  175. descName = getBlockDescriptorName(blockInfo, CGM);
  176. if (llvm::GlobalValue *desc = CGM.getModule().getNamedValue(descName))
  177. return llvm::ConstantExpr::getBitCast(desc,
  178. CGM.getBlockDescriptorType());
  179. }
  180. // If there isn't an equivalent block descriptor global variable, create a new
  181. // one.
  182. ConstantInitBuilder builder(CGM);
  183. auto elements = builder.beginStruct();
  184. // reserved
  185. elements.addInt(ulong, 0);
  186. // Size
  187. // FIXME: What is the right way to say this doesn't fit? We should give
  188. // a user diagnostic in that case. Better fix would be to change the
  189. // API to size_t.
  190. elements.addInt(ulong, blockInfo.BlockSize.getQuantity());
  191. // Optional copy/dispose helpers.
  192. bool hasInternalHelper = false;
  193. if (blockInfo.needsCopyDisposeHelpers()) {
  194. // copy_func_helper_decl
  195. llvm::Constant *copyHelper = buildCopyHelper(CGM, blockInfo);
  196. elements.add(copyHelper);
  197. // destroy_func_decl
  198. llvm::Constant *disposeHelper = buildDisposeHelper(CGM, blockInfo);
  199. elements.add(disposeHelper);
  200. if (cast<llvm::Function>(copyHelper->getOperand(0))->hasInternalLinkage() ||
  201. cast<llvm::Function>(disposeHelper->getOperand(0))
  202. ->hasInternalLinkage())
  203. hasInternalHelper = true;
  204. }
  205. // Signature. Mandatory ObjC-style method descriptor @encode sequence.
  206. std::string typeAtEncoding =
  207. CGM.getContext().getObjCEncodingForBlock(blockInfo.getBlockExpr());
  208. elements.add(llvm::ConstantExpr::getBitCast(
  209. CGM.GetAddrOfConstantCString(typeAtEncoding).getPointer(), i8p));
  210. // GC layout.
  211. if (C.getLangOpts().ObjC1) {
  212. if (CGM.getLangOpts().getGC() != LangOptions::NonGC)
  213. elements.add(CGM.getObjCRuntime().BuildGCBlockLayout(CGM, blockInfo));
  214. else
  215. elements.add(CGM.getObjCRuntime().BuildRCBlockLayout(CGM, blockInfo));
  216. }
  217. else
  218. elements.addNullPointer(i8p);
  219. unsigned AddrSpace = 0;
  220. if (C.getLangOpts().OpenCL)
  221. AddrSpace = C.getTargetAddressSpace(LangAS::opencl_constant);
  222. llvm::GlobalValue::LinkageTypes linkage;
  223. if (descName.empty()) {
  224. linkage = llvm::GlobalValue::InternalLinkage;
  225. descName = "__block_descriptor_tmp";
  226. } else if (hasInternalHelper) {
  227. // If either the copy helper or the dispose helper has internal linkage,
  228. // the block descriptor must have internal linkage too.
  229. linkage = llvm::GlobalValue::InternalLinkage;
  230. } else {
  231. linkage = llvm::GlobalValue::LinkOnceODRLinkage;
  232. }
  233. llvm::GlobalVariable *global =
  234. elements.finishAndCreateGlobal(descName, CGM.getPointerAlign(),
  235. /*constant*/ true, linkage, AddrSpace);
  236. if (linkage == llvm::GlobalValue::LinkOnceODRLinkage) {
  237. global->setVisibility(llvm::GlobalValue::HiddenVisibility);
  238. global->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global);
  239. }
  240. return llvm::ConstantExpr::getBitCast(global, CGM.getBlockDescriptorType());
  241. }
  242. /*
  243. Purely notional variadic template describing the layout of a block.
  244. template <class _ResultType, class... _ParamTypes, class... _CaptureTypes>
  245. struct Block_literal {
  246. /// Initialized to one of:
  247. /// extern void *_NSConcreteStackBlock[];
  248. /// extern void *_NSConcreteGlobalBlock[];
  249. ///
  250. /// In theory, we could start one off malloc'ed by setting
  251. /// BLOCK_NEEDS_FREE, giving it a refcount of 1, and using
  252. /// this isa:
  253. /// extern void *_NSConcreteMallocBlock[];
  254. struct objc_class *isa;
  255. /// These are the flags (with corresponding bit number) that the
  256. /// compiler is actually supposed to know about.
  257. /// 23. BLOCK_IS_NOESCAPE - indicates that the block is non-escaping
  258. /// 25. BLOCK_HAS_COPY_DISPOSE - indicates that the block
  259. /// descriptor provides copy and dispose helper functions
  260. /// 26. BLOCK_HAS_CXX_OBJ - indicates that there's a captured
  261. /// object with a nontrivial destructor or copy constructor
  262. /// 28. BLOCK_IS_GLOBAL - indicates that the block is allocated
  263. /// as global memory
  264. /// 29. BLOCK_USE_STRET - indicates that the block function
  265. /// uses stret, which objc_msgSend needs to know about
  266. /// 30. BLOCK_HAS_SIGNATURE - indicates that the block has an
  267. /// @encoded signature string
  268. /// And we're not supposed to manipulate these:
  269. /// 24. BLOCK_NEEDS_FREE - indicates that the block has been moved
  270. /// to malloc'ed memory
  271. /// 27. BLOCK_IS_GC - indicates that the block has been moved to
  272. /// to GC-allocated memory
  273. /// Additionally, the bottom 16 bits are a reference count which
  274. /// should be zero on the stack.
  275. int flags;
  276. /// Reserved; should be zero-initialized.
  277. int reserved;
  278. /// Function pointer generated from block literal.
  279. _ResultType (*invoke)(Block_literal *, _ParamTypes...);
  280. /// Block description metadata generated from block literal.
  281. struct Block_descriptor *block_descriptor;
  282. /// Captured values follow.
  283. _CapturesTypes captures...;
  284. };
  285. */
  286. namespace {
  287. /// A chunk of data that we actually have to capture in the block.
  288. struct BlockLayoutChunk {
  289. CharUnits Alignment;
  290. CharUnits Size;
  291. Qualifiers::ObjCLifetime Lifetime;
  292. const BlockDecl::Capture *Capture; // null for 'this'
  293. llvm::Type *Type;
  294. QualType FieldType;
  295. BlockLayoutChunk(CharUnits align, CharUnits size,
  296. Qualifiers::ObjCLifetime lifetime,
  297. const BlockDecl::Capture *capture,
  298. llvm::Type *type, QualType fieldType)
  299. : Alignment(align), Size(size), Lifetime(lifetime),
  300. Capture(capture), Type(type), FieldType(fieldType) {}
  301. /// Tell the block info that this chunk has the given field index.
  302. void setIndex(CGBlockInfo &info, unsigned index, CharUnits offset) {
  303. if (!Capture) {
  304. info.CXXThisIndex = index;
  305. info.CXXThisOffset = offset;
  306. } else {
  307. auto C = CGBlockInfo::Capture::makeIndex(index, offset, FieldType);
  308. info.Captures.insert({Capture->getVariable(), C});
  309. }
  310. }
  311. };
  312. /// Order by 1) all __strong together 2) next, all byfref together 3) next,
  313. /// all __weak together. Preserve descending alignment in all situations.
  314. bool operator<(const BlockLayoutChunk &left, const BlockLayoutChunk &right) {
  315. if (left.Alignment != right.Alignment)
  316. return left.Alignment > right.Alignment;
  317. auto getPrefOrder = [](const BlockLayoutChunk &chunk) {
  318. if (chunk.Capture && chunk.Capture->isByRef())
  319. return 1;
  320. if (chunk.Lifetime == Qualifiers::OCL_Strong)
  321. return 0;
  322. if (chunk.Lifetime == Qualifiers::OCL_Weak)
  323. return 2;
  324. return 3;
  325. };
  326. return getPrefOrder(left) < getPrefOrder(right);
  327. }
  328. } // end anonymous namespace
  329. /// Determines if the given type is safe for constant capture in C++.
  330. static bool isSafeForCXXConstantCapture(QualType type) {
  331. const RecordType *recordType =
  332. type->getBaseElementTypeUnsafe()->getAs<RecordType>();
  333. // Only records can be unsafe.
  334. if (!recordType) return true;
  335. const auto *record = cast<CXXRecordDecl>(recordType->getDecl());
  336. // Maintain semantics for classes with non-trivial dtors or copy ctors.
  337. if (!record->hasTrivialDestructor()) return false;
  338. if (record->hasNonTrivialCopyConstructor()) return false;
  339. // Otherwise, we just have to make sure there aren't any mutable
  340. // fields that might have changed since initialization.
  341. return !record->hasMutableFields();
  342. }
  343. /// It is illegal to modify a const object after initialization.
  344. /// Therefore, if a const object has a constant initializer, we don't
  345. /// actually need to keep storage for it in the block; we'll just
  346. /// rematerialize it at the start of the block function. This is
  347. /// acceptable because we make no promises about address stability of
  348. /// captured variables.
  349. static llvm::Constant *tryCaptureAsConstant(CodeGenModule &CGM,
  350. CodeGenFunction *CGF,
  351. const VarDecl *var) {
  352. // Return if this is a function parameter. We shouldn't try to
  353. // rematerialize default arguments of function parameters.
  354. if (isa<ParmVarDecl>(var))
  355. return nullptr;
  356. QualType type = var->getType();
  357. // We can only do this if the variable is const.
  358. if (!type.isConstQualified()) return nullptr;
  359. // Furthermore, in C++ we have to worry about mutable fields:
  360. // C++ [dcl.type.cv]p4:
  361. // Except that any class member declared mutable can be
  362. // modified, any attempt to modify a const object during its
  363. // lifetime results in undefined behavior.
  364. if (CGM.getLangOpts().CPlusPlus && !isSafeForCXXConstantCapture(type))
  365. return nullptr;
  366. // If the variable doesn't have any initializer (shouldn't this be
  367. // invalid?), it's not clear what we should do. Maybe capture as
  368. // zero?
  369. const Expr *init = var->getInit();
  370. if (!init) return nullptr;
  371. return ConstantEmitter(CGM, CGF).tryEmitAbstractForInitializer(*var);
  372. }
  373. /// Get the low bit of a nonzero character count. This is the
  374. /// alignment of the nth byte if the 0th byte is universally aligned.
  375. static CharUnits getLowBit(CharUnits v) {
  376. return CharUnits::fromQuantity(v.getQuantity() & (~v.getQuantity() + 1));
  377. }
  378. static void initializeForBlockHeader(CodeGenModule &CGM, CGBlockInfo &info,
  379. SmallVectorImpl<llvm::Type*> &elementTypes) {
  380. assert(elementTypes.empty());
  381. if (CGM.getLangOpts().OpenCL) {
  382. // The header is basically 'struct { int; int;
  383. // custom_fields; }'. Assert that struct is packed.
  384. elementTypes.push_back(CGM.IntTy); /* total size */
  385. elementTypes.push_back(CGM.IntTy); /* align */
  386. unsigned Offset = 2 * CGM.getIntSize().getQuantity();
  387. unsigned BlockAlign = CGM.getIntAlign().getQuantity();
  388. if (auto *Helper =
  389. CGM.getTargetCodeGenInfo().getTargetOpenCLBlockHelper()) {
  390. for (auto I : Helper->getCustomFieldTypes()) /* custom fields */ {
  391. // TargetOpenCLBlockHelp needs to make sure the struct is packed.
  392. // If necessary, add padding fields to the custom fields.
  393. unsigned Align = CGM.getDataLayout().getABITypeAlignment(I);
  394. if (BlockAlign < Align)
  395. BlockAlign = Align;
  396. assert(Offset % Align == 0);
  397. Offset += CGM.getDataLayout().getTypeAllocSize(I);
  398. elementTypes.push_back(I);
  399. }
  400. }
  401. info.BlockAlign = CharUnits::fromQuantity(BlockAlign);
  402. info.BlockSize = CharUnits::fromQuantity(Offset);
  403. } else {
  404. // The header is basically 'struct { void *; int; int; void *; void *; }'.
  405. // Assert that the struct is packed.
  406. assert(CGM.getIntSize() <= CGM.getPointerSize());
  407. assert(CGM.getIntAlign() <= CGM.getPointerAlign());
  408. assert((2 * CGM.getIntSize()).isMultipleOf(CGM.getPointerAlign()));
  409. info.BlockAlign = CGM.getPointerAlign();
  410. info.BlockSize = 3 * CGM.getPointerSize() + 2 * CGM.getIntSize();
  411. elementTypes.push_back(CGM.VoidPtrTy);
  412. elementTypes.push_back(CGM.IntTy);
  413. elementTypes.push_back(CGM.IntTy);
  414. elementTypes.push_back(CGM.VoidPtrTy);
  415. elementTypes.push_back(CGM.getBlockDescriptorType());
  416. }
  417. }
  418. static QualType getCaptureFieldType(const CodeGenFunction &CGF,
  419. const BlockDecl::Capture &CI) {
  420. const VarDecl *VD = CI.getVariable();
  421. // If the variable is captured by an enclosing block or lambda expression,
  422. // use the type of the capture field.
  423. if (CGF.BlockInfo && CI.isNested())
  424. return CGF.BlockInfo->getCapture(VD).fieldType();
  425. if (auto *FD = CGF.LambdaCaptureFields.lookup(VD))
  426. return FD->getType();
  427. return VD->getType();
  428. }
  429. /// Compute the layout of the given block. Attempts to lay the block
  430. /// out with minimal space requirements.
  431. static void computeBlockInfo(CodeGenModule &CGM, CodeGenFunction *CGF,
  432. CGBlockInfo &info) {
  433. ASTContext &C = CGM.getContext();
  434. const BlockDecl *block = info.getBlockDecl();
  435. SmallVector<llvm::Type*, 8> elementTypes;
  436. initializeForBlockHeader(CGM, info, elementTypes);
  437. bool hasNonConstantCustomFields = false;
  438. if (auto *OpenCLHelper =
  439. CGM.getTargetCodeGenInfo().getTargetOpenCLBlockHelper())
  440. hasNonConstantCustomFields =
  441. !OpenCLHelper->areAllCustomFieldValuesConstant(info);
  442. if (!block->hasCaptures() && !hasNonConstantCustomFields) {
  443. info.StructureType =
  444. llvm::StructType::get(CGM.getLLVMContext(), elementTypes, true);
  445. info.CanBeGlobal = true;
  446. return;
  447. }
  448. else if (C.getLangOpts().ObjC1 &&
  449. CGM.getLangOpts().getGC() == LangOptions::NonGC)
  450. info.HasCapturedVariableLayout = true;
  451. // Collect the layout chunks.
  452. SmallVector<BlockLayoutChunk, 16> layout;
  453. layout.reserve(block->capturesCXXThis() +
  454. (block->capture_end() - block->capture_begin()));
  455. CharUnits maxFieldAlign;
  456. // First, 'this'.
  457. if (block->capturesCXXThis()) {
  458. assert(CGF && CGF->CurFuncDecl && isa<CXXMethodDecl>(CGF->CurFuncDecl) &&
  459. "Can't capture 'this' outside a method");
  460. QualType thisType = cast<CXXMethodDecl>(CGF->CurFuncDecl)->getThisType(C);
  461. // Theoretically, this could be in a different address space, so
  462. // don't assume standard pointer size/align.
  463. llvm::Type *llvmType = CGM.getTypes().ConvertType(thisType);
  464. std::pair<CharUnits,CharUnits> tinfo
  465. = CGM.getContext().getTypeInfoInChars(thisType);
  466. maxFieldAlign = std::max(maxFieldAlign, tinfo.second);
  467. layout.push_back(BlockLayoutChunk(tinfo.second, tinfo.first,
  468. Qualifiers::OCL_None,
  469. nullptr, llvmType, thisType));
  470. }
  471. // Next, all the block captures.
  472. for (const auto &CI : block->captures()) {
  473. const VarDecl *variable = CI.getVariable();
  474. if (CI.isByRef()) {
  475. // We have to copy/dispose of the __block reference.
  476. info.NeedsCopyDispose = true;
  477. // Just use void* instead of a pointer to the byref type.
  478. CharUnits align = CGM.getPointerAlign();
  479. maxFieldAlign = std::max(maxFieldAlign, align);
  480. layout.push_back(BlockLayoutChunk(align, CGM.getPointerSize(),
  481. Qualifiers::OCL_None, &CI,
  482. CGM.VoidPtrTy, variable->getType()));
  483. continue;
  484. }
  485. // Otherwise, build a layout chunk with the size and alignment of
  486. // the declaration.
  487. if (llvm::Constant *constant = tryCaptureAsConstant(CGM, CGF, variable)) {
  488. info.Captures[variable] = CGBlockInfo::Capture::makeConstant(constant);
  489. continue;
  490. }
  491. // If we have a lifetime qualifier, honor it for capture purposes.
  492. // That includes *not* copying it if it's __unsafe_unretained.
  493. Qualifiers::ObjCLifetime lifetime =
  494. variable->getType().getObjCLifetime();
  495. if (lifetime) {
  496. switch (lifetime) {
  497. case Qualifiers::OCL_None: llvm_unreachable("impossible");
  498. case Qualifiers::OCL_ExplicitNone:
  499. case Qualifiers::OCL_Autoreleasing:
  500. break;
  501. case Qualifiers::OCL_Strong:
  502. case Qualifiers::OCL_Weak:
  503. info.NeedsCopyDispose = true;
  504. }
  505. // Block pointers require copy/dispose. So do Objective-C pointers.
  506. } else if (variable->getType()->isObjCRetainableType()) {
  507. // But honor the inert __unsafe_unretained qualifier, which doesn't
  508. // actually make it into the type system.
  509. if (variable->getType()->isObjCInertUnsafeUnretainedType()) {
  510. lifetime = Qualifiers::OCL_ExplicitNone;
  511. } else {
  512. info.NeedsCopyDispose = true;
  513. // used for mrr below.
  514. lifetime = Qualifiers::OCL_Strong;
  515. }
  516. // So do types that require non-trivial copy construction.
  517. } else if (CI.hasCopyExpr()) {
  518. info.NeedsCopyDispose = true;
  519. info.HasCXXObject = true;
  520. if (!variable->getType()->getAsCXXRecordDecl()->isExternallyVisible())
  521. info.CapturesNonExternalType = true;
  522. // So do C structs that require non-trivial copy construction or
  523. // destruction.
  524. } else if (variable->getType().isNonTrivialToPrimitiveCopy() ==
  525. QualType::PCK_Struct ||
  526. variable->getType().isDestructedType() ==
  527. QualType::DK_nontrivial_c_struct) {
  528. info.NeedsCopyDispose = true;
  529. // And so do types with destructors.
  530. } else if (CGM.getLangOpts().CPlusPlus) {
  531. if (const CXXRecordDecl *record =
  532. variable->getType()->getAsCXXRecordDecl()) {
  533. if (!record->hasTrivialDestructor()) {
  534. info.HasCXXObject = true;
  535. info.NeedsCopyDispose = true;
  536. if (!record->isExternallyVisible())
  537. info.CapturesNonExternalType = true;
  538. }
  539. }
  540. }
  541. QualType VT = getCaptureFieldType(*CGF, CI);
  542. CharUnits size = C.getTypeSizeInChars(VT);
  543. CharUnits align = C.getDeclAlign(variable);
  544. maxFieldAlign = std::max(maxFieldAlign, align);
  545. llvm::Type *llvmType =
  546. CGM.getTypes().ConvertTypeForMem(VT);
  547. layout.push_back(
  548. BlockLayoutChunk(align, size, lifetime, &CI, llvmType, VT));
  549. }
  550. // If that was everything, we're done here.
  551. if (layout.empty()) {
  552. info.StructureType =
  553. llvm::StructType::get(CGM.getLLVMContext(), elementTypes, true);
  554. info.CanBeGlobal = true;
  555. return;
  556. }
  557. // Sort the layout by alignment. We have to use a stable sort here
  558. // to get reproducible results. There should probably be an
  559. // llvm::array_pod_stable_sort.
  560. std::stable_sort(layout.begin(), layout.end());
  561. // Needed for blocks layout info.
  562. info.BlockHeaderForcedGapOffset = info.BlockSize;
  563. info.BlockHeaderForcedGapSize = CharUnits::Zero();
  564. CharUnits &blockSize = info.BlockSize;
  565. info.BlockAlign = std::max(maxFieldAlign, info.BlockAlign);
  566. // Assuming that the first byte in the header is maximally aligned,
  567. // get the alignment of the first byte following the header.
  568. CharUnits endAlign = getLowBit(blockSize);
  569. // If the end of the header isn't satisfactorily aligned for the
  570. // maximum thing, look for things that are okay with the header-end
  571. // alignment, and keep appending them until we get something that's
  572. // aligned right. This algorithm is only guaranteed optimal if
  573. // that condition is satisfied at some point; otherwise we can get
  574. // things like:
  575. // header // next byte has alignment 4
  576. // something_with_size_5; // next byte has alignment 1
  577. // something_with_alignment_8;
  578. // which has 7 bytes of padding, as opposed to the naive solution
  579. // which might have less (?).
  580. if (endAlign < maxFieldAlign) {
  581. SmallVectorImpl<BlockLayoutChunk>::iterator
  582. li = layout.begin() + 1, le = layout.end();
  583. // Look for something that the header end is already
  584. // satisfactorily aligned for.
  585. for (; li != le && endAlign < li->Alignment; ++li)
  586. ;
  587. // If we found something that's naturally aligned for the end of
  588. // the header, keep adding things...
  589. if (li != le) {
  590. SmallVectorImpl<BlockLayoutChunk>::iterator first = li;
  591. for (; li != le; ++li) {
  592. assert(endAlign >= li->Alignment);
  593. li->setIndex(info, elementTypes.size(), blockSize);
  594. elementTypes.push_back(li->Type);
  595. blockSize += li->Size;
  596. endAlign = getLowBit(blockSize);
  597. // ...until we get to the alignment of the maximum field.
  598. if (endAlign >= maxFieldAlign) {
  599. break;
  600. }
  601. }
  602. // Don't re-append everything we just appended.
  603. layout.erase(first, li);
  604. }
  605. }
  606. assert(endAlign == getLowBit(blockSize));
  607. // At this point, we just have to add padding if the end align still
  608. // isn't aligned right.
  609. if (endAlign < maxFieldAlign) {
  610. CharUnits newBlockSize = blockSize.alignTo(maxFieldAlign);
  611. CharUnits padding = newBlockSize - blockSize;
  612. // If we haven't yet added any fields, remember that there was an
  613. // initial gap; this need to go into the block layout bit map.
  614. if (blockSize == info.BlockHeaderForcedGapOffset) {
  615. info.BlockHeaderForcedGapSize = padding;
  616. }
  617. elementTypes.push_back(llvm::ArrayType::get(CGM.Int8Ty,
  618. padding.getQuantity()));
  619. blockSize = newBlockSize;
  620. endAlign = getLowBit(blockSize); // might be > maxFieldAlign
  621. }
  622. assert(endAlign >= maxFieldAlign);
  623. assert(endAlign == getLowBit(blockSize));
  624. // Slam everything else on now. This works because they have
  625. // strictly decreasing alignment and we expect that size is always a
  626. // multiple of alignment.
  627. for (SmallVectorImpl<BlockLayoutChunk>::iterator
  628. li = layout.begin(), le = layout.end(); li != le; ++li) {
  629. if (endAlign < li->Alignment) {
  630. // size may not be multiple of alignment. This can only happen with
  631. // an over-aligned variable. We will be adding a padding field to
  632. // make the size be multiple of alignment.
  633. CharUnits padding = li->Alignment - endAlign;
  634. elementTypes.push_back(llvm::ArrayType::get(CGM.Int8Ty,
  635. padding.getQuantity()));
  636. blockSize += padding;
  637. endAlign = getLowBit(blockSize);
  638. }
  639. assert(endAlign >= li->Alignment);
  640. li->setIndex(info, elementTypes.size(), blockSize);
  641. elementTypes.push_back(li->Type);
  642. blockSize += li->Size;
  643. endAlign = getLowBit(blockSize);
  644. }
  645. info.StructureType =
  646. llvm::StructType::get(CGM.getLLVMContext(), elementTypes, true);
  647. }
  648. /// Enter the scope of a block. This should be run at the entrance to
  649. /// a full-expression so that the block's cleanups are pushed at the
  650. /// right place in the stack.
  651. static void enterBlockScope(CodeGenFunction &CGF, BlockDecl *block) {
  652. assert(CGF.HaveInsertPoint());
  653. // Allocate the block info and place it at the head of the list.
  654. CGBlockInfo &blockInfo =
  655. *new CGBlockInfo(block, CGF.CurFn->getName());
  656. blockInfo.NextBlockInfo = CGF.FirstBlockInfo;
  657. CGF.FirstBlockInfo = &blockInfo;
  658. // Compute information about the layout, etc., of this block,
  659. // pushing cleanups as necessary.
  660. computeBlockInfo(CGF.CGM, &CGF, blockInfo);
  661. // Nothing else to do if it can be global.
  662. if (blockInfo.CanBeGlobal) return;
  663. // Make the allocation for the block.
  664. blockInfo.LocalAddress = CGF.CreateTempAlloca(blockInfo.StructureType,
  665. blockInfo.BlockAlign, "block");
  666. // If there are cleanups to emit, enter them (but inactive).
  667. if (!blockInfo.NeedsCopyDispose) return;
  668. // Walk through the captures (in order) and find the ones not
  669. // captured by constant.
  670. for (const auto &CI : block->captures()) {
  671. // Ignore __block captures; there's nothing special in the
  672. // on-stack block that we need to do for them.
  673. if (CI.isByRef()) continue;
  674. // Ignore variables that are constant-captured.
  675. const VarDecl *variable = CI.getVariable();
  676. CGBlockInfo::Capture &capture = blockInfo.getCapture(variable);
  677. if (capture.isConstant()) continue;
  678. // Ignore objects that aren't destructed.
  679. QualType VT = getCaptureFieldType(CGF, CI);
  680. QualType::DestructionKind dtorKind = VT.isDestructedType();
  681. if (dtorKind == QualType::DK_none) continue;
  682. CodeGenFunction::Destroyer *destroyer;
  683. // Block captures count as local values and have imprecise semantics.
  684. // They also can't be arrays, so need to worry about that.
  685. //
  686. // For const-qualified captures, emit clang.arc.use to ensure the captured
  687. // object doesn't get released while we are still depending on its validity
  688. // within the block.
  689. if (VT.isConstQualified() &&
  690. VT.getObjCLifetime() == Qualifiers::OCL_Strong &&
  691. CGF.CGM.getCodeGenOpts().OptimizationLevel != 0) {
  692. assert(CGF.CGM.getLangOpts().ObjCAutoRefCount &&
  693. "expected ObjC ARC to be enabled");
  694. destroyer = CodeGenFunction::emitARCIntrinsicUse;
  695. } else if (dtorKind == QualType::DK_objc_strong_lifetime) {
  696. destroyer = CodeGenFunction::destroyARCStrongImprecise;
  697. } else {
  698. destroyer = CGF.getDestroyer(dtorKind);
  699. }
  700. // GEP down to the address.
  701. Address addr = CGF.Builder.CreateStructGEP(blockInfo.LocalAddress,
  702. capture.getIndex(),
  703. capture.getOffset());
  704. // We can use that GEP as the dominating IP.
  705. if (!blockInfo.DominatingIP)
  706. blockInfo.DominatingIP = cast<llvm::Instruction>(addr.getPointer());
  707. CleanupKind cleanupKind = InactiveNormalCleanup;
  708. bool useArrayEHCleanup = CGF.needsEHCleanup(dtorKind);
  709. if (useArrayEHCleanup)
  710. cleanupKind = InactiveNormalAndEHCleanup;
  711. CGF.pushDestroy(cleanupKind, addr, VT,
  712. destroyer, useArrayEHCleanup);
  713. // Remember where that cleanup was.
  714. capture.setCleanup(CGF.EHStack.stable_begin());
  715. }
  716. }
  717. /// Enter a full-expression with a non-trivial number of objects to
  718. /// clean up. This is in this file because, at the moment, the only
  719. /// kind of cleanup object is a BlockDecl*.
  720. void CodeGenFunction::enterNonTrivialFullExpression(const ExprWithCleanups *E) {
  721. assert(E->getNumObjects() != 0);
  722. for (const ExprWithCleanups::CleanupObject &C : E->getObjects())
  723. enterBlockScope(*this, C);
  724. }
  725. /// Find the layout for the given block in a linked list and remove it.
  726. static CGBlockInfo *findAndRemoveBlockInfo(CGBlockInfo **head,
  727. const BlockDecl *block) {
  728. while (true) {
  729. assert(head && *head);
  730. CGBlockInfo *cur = *head;
  731. // If this is the block we're looking for, splice it out of the list.
  732. if (cur->getBlockDecl() == block) {
  733. *head = cur->NextBlockInfo;
  734. return cur;
  735. }
  736. head = &cur->NextBlockInfo;
  737. }
  738. }
  739. /// Destroy a chain of block layouts.
  740. void CodeGenFunction::destroyBlockInfos(CGBlockInfo *head) {
  741. assert(head && "destroying an empty chain");
  742. do {
  743. CGBlockInfo *cur = head;
  744. head = cur->NextBlockInfo;
  745. delete cur;
  746. } while (head != nullptr);
  747. }
  748. /// Emit a block literal expression in the current function.
  749. llvm::Value *CodeGenFunction::EmitBlockLiteral(const BlockExpr *blockExpr) {
  750. // If the block has no captures, we won't have a pre-computed
  751. // layout for it.
  752. if (!blockExpr->getBlockDecl()->hasCaptures()) {
  753. // The block literal is emitted as a global variable, and the block invoke
  754. // function has to be extracted from its initializer.
  755. if (llvm::Constant *Block = CGM.getAddrOfGlobalBlockIfEmitted(blockExpr)) {
  756. return Block;
  757. }
  758. CGBlockInfo blockInfo(blockExpr->getBlockDecl(), CurFn->getName());
  759. computeBlockInfo(CGM, this, blockInfo);
  760. blockInfo.BlockExpression = blockExpr;
  761. return EmitBlockLiteral(blockInfo);
  762. }
  763. // Find the block info for this block and take ownership of it.
  764. std::unique_ptr<CGBlockInfo> blockInfo;
  765. blockInfo.reset(findAndRemoveBlockInfo(&FirstBlockInfo,
  766. blockExpr->getBlockDecl()));
  767. blockInfo->BlockExpression = blockExpr;
  768. return EmitBlockLiteral(*blockInfo);
  769. }
  770. llvm::Value *CodeGenFunction::EmitBlockLiteral(const CGBlockInfo &blockInfo) {
  771. bool IsOpenCL = CGM.getContext().getLangOpts().OpenCL;
  772. // Using the computed layout, generate the actual block function.
  773. bool isLambdaConv = blockInfo.getBlockDecl()->isConversionFromLambda();
  774. CodeGenFunction BlockCGF{CGM, true};
  775. BlockCGF.SanOpts = SanOpts;
  776. auto *InvokeFn = BlockCGF.GenerateBlockFunction(
  777. CurGD, blockInfo, LocalDeclMap, isLambdaConv, blockInfo.CanBeGlobal);
  778. // If there is nothing to capture, we can emit this as a global block.
  779. if (blockInfo.CanBeGlobal)
  780. return CGM.getAddrOfGlobalBlockIfEmitted(blockInfo.BlockExpression);
  781. // Otherwise, we have to emit this as a local block.
  782. Address blockAddr = blockInfo.LocalAddress;
  783. assert(blockAddr.isValid() && "block has no address!");
  784. llvm::Constant *isa;
  785. llvm::Constant *descriptor;
  786. BlockFlags flags;
  787. if (!IsOpenCL) {
  788. // If the block is non-escaping, set field 'isa 'to NSConcreteGlobalBlock
  789. // and set the BLOCK_IS_GLOBAL bit of field 'flags'. Copying a non-escaping
  790. // block just returns the original block and releasing it is a no-op.
  791. llvm::Constant *blockISA = blockInfo.getBlockDecl()->doesNotEscape()
  792. ? CGM.getNSConcreteGlobalBlock()
  793. : CGM.getNSConcreteStackBlock();
  794. isa = llvm::ConstantExpr::getBitCast(blockISA, VoidPtrTy);
  795. // Build the block descriptor.
  796. descriptor = buildBlockDescriptor(CGM, blockInfo);
  797. // Compute the initial on-stack block flags.
  798. flags = BLOCK_HAS_SIGNATURE;
  799. if (blockInfo.HasCapturedVariableLayout)
  800. flags |= BLOCK_HAS_EXTENDED_LAYOUT;
  801. if (blockInfo.needsCopyDisposeHelpers())
  802. flags |= BLOCK_HAS_COPY_DISPOSE;
  803. if (blockInfo.HasCXXObject)
  804. flags |= BLOCK_HAS_CXX_OBJ;
  805. if (blockInfo.UsesStret)
  806. flags |= BLOCK_USE_STRET;
  807. if (blockInfo.getBlockDecl()->doesNotEscape())
  808. flags |= BLOCK_IS_NOESCAPE | BLOCK_IS_GLOBAL;
  809. }
  810. auto projectField =
  811. [&](unsigned index, CharUnits offset, const Twine &name) -> Address {
  812. return Builder.CreateStructGEP(blockAddr, index, offset, name);
  813. };
  814. auto storeField =
  815. [&](llvm::Value *value, unsigned index, CharUnits offset,
  816. const Twine &name) {
  817. Builder.CreateStore(value, projectField(index, offset, name));
  818. };
  819. // Initialize the block header.
  820. {
  821. // We assume all the header fields are densely packed.
  822. unsigned index = 0;
  823. CharUnits offset;
  824. auto addHeaderField =
  825. [&](llvm::Value *value, CharUnits size, const Twine &name) {
  826. storeField(value, index, offset, name);
  827. offset += size;
  828. index++;
  829. };
  830. if (!IsOpenCL) {
  831. addHeaderField(isa, getPointerSize(), "block.isa");
  832. addHeaderField(llvm::ConstantInt::get(IntTy, flags.getBitMask()),
  833. getIntSize(), "block.flags");
  834. addHeaderField(llvm::ConstantInt::get(IntTy, 0), getIntSize(),
  835. "block.reserved");
  836. } else {
  837. addHeaderField(
  838. llvm::ConstantInt::get(IntTy, blockInfo.BlockSize.getQuantity()),
  839. getIntSize(), "block.size");
  840. addHeaderField(
  841. llvm::ConstantInt::get(IntTy, blockInfo.BlockAlign.getQuantity()),
  842. getIntSize(), "block.align");
  843. }
  844. if (!IsOpenCL) {
  845. addHeaderField(llvm::ConstantExpr::getBitCast(InvokeFn, VoidPtrTy),
  846. getPointerSize(), "block.invoke");
  847. addHeaderField(descriptor, getPointerSize(), "block.descriptor");
  848. } else if (auto *Helper =
  849. CGM.getTargetCodeGenInfo().getTargetOpenCLBlockHelper()) {
  850. for (auto I : Helper->getCustomFieldValues(*this, blockInfo)) {
  851. addHeaderField(
  852. I.first,
  853. CharUnits::fromQuantity(
  854. CGM.getDataLayout().getTypeAllocSize(I.first->getType())),
  855. I.second);
  856. }
  857. }
  858. }
  859. // Finally, capture all the values into the block.
  860. const BlockDecl *blockDecl = blockInfo.getBlockDecl();
  861. // First, 'this'.
  862. if (blockDecl->capturesCXXThis()) {
  863. Address addr = projectField(blockInfo.CXXThisIndex, blockInfo.CXXThisOffset,
  864. "block.captured-this.addr");
  865. Builder.CreateStore(LoadCXXThis(), addr);
  866. }
  867. // Next, captured variables.
  868. for (const auto &CI : blockDecl->captures()) {
  869. const VarDecl *variable = CI.getVariable();
  870. const CGBlockInfo::Capture &capture = blockInfo.getCapture(variable);
  871. // Ignore constant captures.
  872. if (capture.isConstant()) continue;
  873. QualType type = capture.fieldType();
  874. // This will be a [[type]]*, except that a byref entry will just be
  875. // an i8**.
  876. Address blockField =
  877. projectField(capture.getIndex(), capture.getOffset(), "block.captured");
  878. // Compute the address of the thing we're going to move into the
  879. // block literal.
  880. Address src = Address::invalid();
  881. if (blockDecl->isConversionFromLambda()) {
  882. // The lambda capture in a lambda's conversion-to-block-pointer is
  883. // special; we'll simply emit it directly.
  884. src = Address::invalid();
  885. } else if (CI.isByRef()) {
  886. if (BlockInfo && CI.isNested()) {
  887. // We need to use the capture from the enclosing block.
  888. const CGBlockInfo::Capture &enclosingCapture =
  889. BlockInfo->getCapture(variable);
  890. // This is a [[type]]*, except that a byref entry will just be an i8**.
  891. src = Builder.CreateStructGEP(LoadBlockStruct(),
  892. enclosingCapture.getIndex(),
  893. enclosingCapture.getOffset(),
  894. "block.capture.addr");
  895. } else {
  896. auto I = LocalDeclMap.find(variable);
  897. assert(I != LocalDeclMap.end());
  898. src = I->second;
  899. }
  900. } else {
  901. DeclRefExpr declRef(const_cast<VarDecl *>(variable),
  902. /*RefersToEnclosingVariableOrCapture*/ CI.isNested(),
  903. type.getNonReferenceType(), VK_LValue,
  904. SourceLocation());
  905. src = EmitDeclRefLValue(&declRef).getAddress();
  906. };
  907. // For byrefs, we just write the pointer to the byref struct into
  908. // the block field. There's no need to chase the forwarding
  909. // pointer at this point, since we're building something that will
  910. // live a shorter life than the stack byref anyway.
  911. if (CI.isByRef()) {
  912. // Get a void* that points to the byref struct.
  913. llvm::Value *byrefPointer;
  914. if (CI.isNested())
  915. byrefPointer = Builder.CreateLoad(src, "byref.capture");
  916. else
  917. byrefPointer = Builder.CreateBitCast(src.getPointer(), VoidPtrTy);
  918. // Write that void* into the capture field.
  919. Builder.CreateStore(byrefPointer, blockField);
  920. // If we have a copy constructor, evaluate that into the block field.
  921. } else if (const Expr *copyExpr = CI.getCopyExpr()) {
  922. if (blockDecl->isConversionFromLambda()) {
  923. // If we have a lambda conversion, emit the expression
  924. // directly into the block instead.
  925. AggValueSlot Slot =
  926. AggValueSlot::forAddr(blockField, Qualifiers(),
  927. AggValueSlot::IsDestructed,
  928. AggValueSlot::DoesNotNeedGCBarriers,
  929. AggValueSlot::IsNotAliased,
  930. AggValueSlot::DoesNotOverlap);
  931. EmitAggExpr(copyExpr, Slot);
  932. } else {
  933. EmitSynthesizedCXXCopyCtor(blockField, src, copyExpr);
  934. }
  935. // If it's a reference variable, copy the reference into the block field.
  936. } else if (type->isReferenceType()) {
  937. Builder.CreateStore(src.getPointer(), blockField);
  938. // If type is const-qualified, copy the value into the block field.
  939. } else if (type.isConstQualified() &&
  940. type.getObjCLifetime() == Qualifiers::OCL_Strong &&
  941. CGM.getCodeGenOpts().OptimizationLevel != 0) {
  942. llvm::Value *value = Builder.CreateLoad(src, "captured");
  943. Builder.CreateStore(value, blockField);
  944. // If this is an ARC __strong block-pointer variable, don't do a
  945. // block copy.
  946. //
  947. // TODO: this can be generalized into the normal initialization logic:
  948. // we should never need to do a block-copy when initializing a local
  949. // variable, because the local variable's lifetime should be strictly
  950. // contained within the stack block's.
  951. } else if (type.getObjCLifetime() == Qualifiers::OCL_Strong &&
  952. type->isBlockPointerType()) {
  953. // Load the block and do a simple retain.
  954. llvm::Value *value = Builder.CreateLoad(src, "block.captured_block");
  955. value = EmitARCRetainNonBlock(value);
  956. // Do a primitive store to the block field.
  957. Builder.CreateStore(value, blockField);
  958. // Otherwise, fake up a POD copy into the block field.
  959. } else {
  960. // Fake up a new variable so that EmitScalarInit doesn't think
  961. // we're referring to the variable in its own initializer.
  962. ImplicitParamDecl BlockFieldPseudoVar(getContext(), type,
  963. ImplicitParamDecl::Other);
  964. // We use one of these or the other depending on whether the
  965. // reference is nested.
  966. DeclRefExpr declRef(const_cast<VarDecl *>(variable),
  967. /*RefersToEnclosingVariableOrCapture*/ CI.isNested(),
  968. type, VK_LValue, SourceLocation());
  969. ImplicitCastExpr l2r(ImplicitCastExpr::OnStack, type, CK_LValueToRValue,
  970. &declRef, VK_RValue);
  971. // FIXME: Pass a specific location for the expr init so that the store is
  972. // attributed to a reasonable location - otherwise it may be attributed to
  973. // locations of subexpressions in the initialization.
  974. EmitExprAsInit(&l2r, &BlockFieldPseudoVar,
  975. MakeAddrLValue(blockField, type, AlignmentSource::Decl),
  976. /*captured by init*/ false);
  977. }
  978. // Activate the cleanup if layout pushed one.
  979. if (!CI.isByRef()) {
  980. EHScopeStack::stable_iterator cleanup = capture.getCleanup();
  981. if (cleanup.isValid())
  982. ActivateCleanupBlock(cleanup, blockInfo.DominatingIP);
  983. }
  984. }
  985. // Cast to the converted block-pointer type, which happens (somewhat
  986. // unfortunately) to be a pointer to function type.
  987. llvm::Value *result = Builder.CreatePointerCast(
  988. blockAddr.getPointer(), ConvertType(blockInfo.getBlockExpr()->getType()));
  989. if (IsOpenCL) {
  990. CGM.getOpenCLRuntime().recordBlockInfo(blockInfo.BlockExpression, InvokeFn,
  991. result);
  992. }
  993. return result;
  994. }
  995. llvm::Type *CodeGenModule::getBlockDescriptorType() {
  996. if (BlockDescriptorType)
  997. return BlockDescriptorType;
  998. llvm::Type *UnsignedLongTy =
  999. getTypes().ConvertType(getContext().UnsignedLongTy);
  1000. // struct __block_descriptor {
  1001. // unsigned long reserved;
  1002. // unsigned long block_size;
  1003. //
  1004. // // later, the following will be added
  1005. //
  1006. // struct {
  1007. // void (*copyHelper)();
  1008. // void (*copyHelper)();
  1009. // } helpers; // !!! optional
  1010. //
  1011. // const char *signature; // the block signature
  1012. // const char *layout; // reserved
  1013. // };
  1014. BlockDescriptorType = llvm::StructType::create(
  1015. "struct.__block_descriptor", UnsignedLongTy, UnsignedLongTy);
  1016. // Now form a pointer to that.
  1017. unsigned AddrSpace = 0;
  1018. if (getLangOpts().OpenCL)
  1019. AddrSpace = getContext().getTargetAddressSpace(LangAS::opencl_constant);
  1020. BlockDescriptorType = llvm::PointerType::get(BlockDescriptorType, AddrSpace);
  1021. return BlockDescriptorType;
  1022. }
  1023. llvm::Type *CodeGenModule::getGenericBlockLiteralType() {
  1024. assert(!getLangOpts().OpenCL && "OpenCL does not need this");
  1025. if (GenericBlockLiteralType)
  1026. return GenericBlockLiteralType;
  1027. llvm::Type *BlockDescPtrTy = getBlockDescriptorType();
  1028. // struct __block_literal_generic {
  1029. // void *__isa;
  1030. // int __flags;
  1031. // int __reserved;
  1032. // void (*__invoke)(void *);
  1033. // struct __block_descriptor *__descriptor;
  1034. // };
  1035. GenericBlockLiteralType =
  1036. llvm::StructType::create("struct.__block_literal_generic", VoidPtrTy,
  1037. IntTy, IntTy, VoidPtrTy, BlockDescPtrTy);
  1038. return GenericBlockLiteralType;
  1039. }
  1040. RValue CodeGenFunction::EmitBlockCallExpr(const CallExpr *E,
  1041. ReturnValueSlot ReturnValue) {
  1042. const BlockPointerType *BPT =
  1043. E->getCallee()->getType()->getAs<BlockPointerType>();
  1044. llvm::Value *BlockPtr = EmitScalarExpr(E->getCallee());
  1045. llvm::Value *FuncPtr = nullptr;
  1046. if (!CGM.getLangOpts().OpenCL) {
  1047. // Get a pointer to the generic block literal.
  1048. llvm::Type *BlockLiteralTy =
  1049. llvm::PointerType::get(CGM.getGenericBlockLiteralType(), 0);
  1050. // Bitcast the callee to a block literal.
  1051. BlockPtr =
  1052. Builder.CreatePointerCast(BlockPtr, BlockLiteralTy, "block.literal");
  1053. // Get the function pointer from the literal.
  1054. FuncPtr =
  1055. Builder.CreateStructGEP(CGM.getGenericBlockLiteralType(), BlockPtr, 3);
  1056. }
  1057. // Add the block literal.
  1058. CallArgList Args;
  1059. QualType VoidPtrQualTy = getContext().VoidPtrTy;
  1060. llvm::Type *GenericVoidPtrTy = VoidPtrTy;
  1061. if (getLangOpts().OpenCL) {
  1062. GenericVoidPtrTy = CGM.getOpenCLRuntime().getGenericVoidPointerType();
  1063. VoidPtrQualTy =
  1064. getContext().getPointerType(getContext().getAddrSpaceQualType(
  1065. getContext().VoidTy, LangAS::opencl_generic));
  1066. }
  1067. BlockPtr = Builder.CreatePointerCast(BlockPtr, GenericVoidPtrTy);
  1068. Args.add(RValue::get(BlockPtr), VoidPtrQualTy);
  1069. QualType FnType = BPT->getPointeeType();
  1070. // And the rest of the arguments.
  1071. EmitCallArgs(Args, FnType->getAs<FunctionProtoType>(), E->arguments());
  1072. // Load the function.
  1073. llvm::Value *Func;
  1074. if (CGM.getLangOpts().OpenCL)
  1075. Func = CGM.getOpenCLRuntime().getInvokeFunction(E->getCallee());
  1076. else
  1077. Func = Builder.CreateAlignedLoad(FuncPtr, getPointerAlign());
  1078. const FunctionType *FuncTy = FnType->castAs<FunctionType>();
  1079. const CGFunctionInfo &FnInfo =
  1080. CGM.getTypes().arrangeBlockFunctionCall(Args, FuncTy);
  1081. // Cast the function pointer to the right type.
  1082. llvm::Type *BlockFTy = CGM.getTypes().GetFunctionType(FnInfo);
  1083. llvm::Type *BlockFTyPtr = llvm::PointerType::getUnqual(BlockFTy);
  1084. Func = Builder.CreatePointerCast(Func, BlockFTyPtr);
  1085. // Prepare the callee.
  1086. CGCallee Callee(CGCalleeInfo(), Func);
  1087. // And call the block.
  1088. return EmitCall(FnInfo, Callee, ReturnValue, Args);
  1089. }
  1090. Address CodeGenFunction::GetAddrOfBlockDecl(const VarDecl *variable,
  1091. bool isByRef) {
  1092. assert(BlockInfo && "evaluating block ref without block information?");
  1093. const CGBlockInfo::Capture &capture = BlockInfo->getCapture(variable);
  1094. // Handle constant captures.
  1095. if (capture.isConstant()) return LocalDeclMap.find(variable)->second;
  1096. Address addr =
  1097. Builder.CreateStructGEP(LoadBlockStruct(), capture.getIndex(),
  1098. capture.getOffset(), "block.capture.addr");
  1099. if (isByRef) {
  1100. // addr should be a void** right now. Load, then cast the result
  1101. // to byref*.
  1102. auto &byrefInfo = getBlockByrefInfo(variable);
  1103. addr = Address(Builder.CreateLoad(addr), byrefInfo.ByrefAlignment);
  1104. auto byrefPointerType = llvm::PointerType::get(byrefInfo.Type, 0);
  1105. addr = Builder.CreateBitCast(addr, byrefPointerType, "byref.addr");
  1106. addr = emitBlockByrefAddress(addr, byrefInfo, /*follow*/ true,
  1107. variable->getName());
  1108. }
  1109. if (capture.fieldType()->isReferenceType())
  1110. addr = EmitLoadOfReference(MakeAddrLValue(addr, capture.fieldType()));
  1111. return addr;
  1112. }
  1113. void CodeGenModule::setAddrOfGlobalBlock(const BlockExpr *BE,
  1114. llvm::Constant *Addr) {
  1115. bool Ok = EmittedGlobalBlocks.insert(std::make_pair(BE, Addr)).second;
  1116. (void)Ok;
  1117. assert(Ok && "Trying to replace an already-existing global block!");
  1118. }
  1119. llvm::Constant *
  1120. CodeGenModule::GetAddrOfGlobalBlock(const BlockExpr *BE,
  1121. StringRef Name) {
  1122. if (llvm::Constant *Block = getAddrOfGlobalBlockIfEmitted(BE))
  1123. return Block;
  1124. CGBlockInfo blockInfo(BE->getBlockDecl(), Name);
  1125. blockInfo.BlockExpression = BE;
  1126. // Compute information about the layout, etc., of this block.
  1127. computeBlockInfo(*this, nullptr, blockInfo);
  1128. // Using that metadata, generate the actual block function.
  1129. {
  1130. CodeGenFunction::DeclMapTy LocalDeclMap;
  1131. CodeGenFunction(*this).GenerateBlockFunction(
  1132. GlobalDecl(), blockInfo, LocalDeclMap,
  1133. /*IsLambdaConversionToBlock*/ false, /*BuildGlobalBlock*/ true);
  1134. }
  1135. return getAddrOfGlobalBlockIfEmitted(BE);
  1136. }
  1137. static llvm::Constant *buildGlobalBlock(CodeGenModule &CGM,
  1138. const CGBlockInfo &blockInfo,
  1139. llvm::Constant *blockFn) {
  1140. assert(blockInfo.CanBeGlobal);
  1141. // Callers should detect this case on their own: calling this function
  1142. // generally requires computing layout information, which is a waste of time
  1143. // if we've already emitted this block.
  1144. assert(!CGM.getAddrOfGlobalBlockIfEmitted(blockInfo.BlockExpression) &&
  1145. "Refusing to re-emit a global block.");
  1146. // Generate the constants for the block literal initializer.
  1147. ConstantInitBuilder builder(CGM);
  1148. auto fields = builder.beginStruct();
  1149. bool IsOpenCL = CGM.getLangOpts().OpenCL;
  1150. bool IsWindows = CGM.getTarget().getTriple().isOSWindows();
  1151. if (!IsOpenCL) {
  1152. // isa
  1153. if (IsWindows)
  1154. fields.addNullPointer(CGM.Int8PtrPtrTy);
  1155. else
  1156. fields.add(CGM.getNSConcreteGlobalBlock());
  1157. // __flags
  1158. BlockFlags flags = BLOCK_IS_GLOBAL | BLOCK_HAS_SIGNATURE;
  1159. if (blockInfo.UsesStret)
  1160. flags |= BLOCK_USE_STRET;
  1161. fields.addInt(CGM.IntTy, flags.getBitMask());
  1162. // Reserved
  1163. fields.addInt(CGM.IntTy, 0);
  1164. // Function
  1165. fields.add(blockFn);
  1166. } else {
  1167. fields.addInt(CGM.IntTy, blockInfo.BlockSize.getQuantity());
  1168. fields.addInt(CGM.IntTy, blockInfo.BlockAlign.getQuantity());
  1169. }
  1170. if (!IsOpenCL) {
  1171. // Descriptor
  1172. fields.add(buildBlockDescriptor(CGM, blockInfo));
  1173. } else if (auto *Helper =
  1174. CGM.getTargetCodeGenInfo().getTargetOpenCLBlockHelper()) {
  1175. for (auto I : Helper->getCustomFieldValues(CGM, blockInfo)) {
  1176. fields.add(I);
  1177. }
  1178. }
  1179. unsigned AddrSpace = 0;
  1180. if (CGM.getContext().getLangOpts().OpenCL)
  1181. AddrSpace = CGM.getContext().getTargetAddressSpace(LangAS::opencl_global);
  1182. llvm::Constant *literal = fields.finishAndCreateGlobal(
  1183. "__block_literal_global", blockInfo.BlockAlign,
  1184. /*constant*/ !IsWindows, llvm::GlobalVariable::InternalLinkage, AddrSpace);
  1185. // Windows does not allow globals to be initialised to point to globals in
  1186. // different DLLs. Any such variables must run code to initialise them.
  1187. if (IsWindows) {
  1188. auto *Init = llvm::Function::Create(llvm::FunctionType::get(CGM.VoidTy,
  1189. {}), llvm::GlobalValue::InternalLinkage, ".block_isa_init",
  1190. &CGM.getModule());
  1191. llvm::IRBuilder<> b(llvm::BasicBlock::Create(CGM.getLLVMContext(), "entry",
  1192. Init));
  1193. b.CreateAlignedStore(CGM.getNSConcreteGlobalBlock(),
  1194. b.CreateStructGEP(literal, 0), CGM.getPointerAlign().getQuantity());
  1195. b.CreateRetVoid();
  1196. // We can't use the normal LLVM global initialisation array, because we
  1197. // need to specify that this runs early in library initialisation.
  1198. auto *InitVar = new llvm::GlobalVariable(CGM.getModule(), Init->getType(),
  1199. /*isConstant*/true, llvm::GlobalValue::InternalLinkage,
  1200. Init, ".block_isa_init_ptr");
  1201. InitVar->setSection(".CRT$XCLa");
  1202. CGM.addUsedGlobal(InitVar);
  1203. }
  1204. // Return a constant of the appropriately-casted type.
  1205. llvm::Type *RequiredType =
  1206. CGM.getTypes().ConvertType(blockInfo.getBlockExpr()->getType());
  1207. llvm::Constant *Result =
  1208. llvm::ConstantExpr::getPointerCast(literal, RequiredType);
  1209. CGM.setAddrOfGlobalBlock(blockInfo.BlockExpression, Result);
  1210. if (CGM.getContext().getLangOpts().OpenCL)
  1211. CGM.getOpenCLRuntime().recordBlockInfo(
  1212. blockInfo.BlockExpression,
  1213. cast<llvm::Function>(blockFn->stripPointerCasts()), Result);
  1214. return Result;
  1215. }
  1216. void CodeGenFunction::setBlockContextParameter(const ImplicitParamDecl *D,
  1217. unsigned argNum,
  1218. llvm::Value *arg) {
  1219. assert(BlockInfo && "not emitting prologue of block invocation function?!");
  1220. // Allocate a stack slot like for any local variable to guarantee optimal
  1221. // debug info at -O0. The mem2reg pass will eliminate it when optimizing.
  1222. Address alloc = CreateMemTemp(D->getType(), D->getName() + ".addr");
  1223. Builder.CreateStore(arg, alloc);
  1224. if (CGDebugInfo *DI = getDebugInfo()) {
  1225. if (CGM.getCodeGenOpts().getDebugInfo() >=
  1226. codegenoptions::LimitedDebugInfo) {
  1227. DI->setLocation(D->getLocation());
  1228. DI->EmitDeclareOfBlockLiteralArgVariable(
  1229. *BlockInfo, D->getName(), argNum,
  1230. cast<llvm::AllocaInst>(alloc.getPointer()), Builder);
  1231. }
  1232. }
  1233. SourceLocation StartLoc = BlockInfo->getBlockExpr()->getBody()->getBeginLoc();
  1234. ApplyDebugLocation Scope(*this, StartLoc);
  1235. // Instead of messing around with LocalDeclMap, just set the value
  1236. // directly as BlockPointer.
  1237. BlockPointer = Builder.CreatePointerCast(
  1238. arg,
  1239. BlockInfo->StructureType->getPointerTo(
  1240. getContext().getLangOpts().OpenCL
  1241. ? getContext().getTargetAddressSpace(LangAS::opencl_generic)
  1242. : 0),
  1243. "block");
  1244. }
  1245. Address CodeGenFunction::LoadBlockStruct() {
  1246. assert(BlockInfo && "not in a block invocation function!");
  1247. assert(BlockPointer && "no block pointer set!");
  1248. return Address(BlockPointer, BlockInfo->BlockAlign);
  1249. }
  1250. llvm::Function *
  1251. CodeGenFunction::GenerateBlockFunction(GlobalDecl GD,
  1252. const CGBlockInfo &blockInfo,
  1253. const DeclMapTy &ldm,
  1254. bool IsLambdaConversionToBlock,
  1255. bool BuildGlobalBlock) {
  1256. const BlockDecl *blockDecl = blockInfo.getBlockDecl();
  1257. CurGD = GD;
  1258. CurEHLocation = blockInfo.getBlockExpr()->getEndLoc();
  1259. BlockInfo = &blockInfo;
  1260. // Arrange for local static and local extern declarations to appear
  1261. // to be local to this function as well, in case they're directly
  1262. // referenced in a block.
  1263. for (DeclMapTy::const_iterator i = ldm.begin(), e = ldm.end(); i != e; ++i) {
  1264. const auto *var = dyn_cast<VarDecl>(i->first);
  1265. if (var && !var->hasLocalStorage())
  1266. setAddrOfLocalVar(var, i->second);
  1267. }
  1268. // Begin building the function declaration.
  1269. // Build the argument list.
  1270. FunctionArgList args;
  1271. // The first argument is the block pointer. Just take it as a void*
  1272. // and cast it later.
  1273. QualType selfTy = getContext().VoidPtrTy;
  1274. // For OpenCL passed block pointer can be private AS local variable or
  1275. // global AS program scope variable (for the case with and without captures).
  1276. // Generic AS is used therefore to be able to accommodate both private and
  1277. // generic AS in one implementation.
  1278. if (getLangOpts().OpenCL)
  1279. selfTy = getContext().getPointerType(getContext().getAddrSpaceQualType(
  1280. getContext().VoidTy, LangAS::opencl_generic));
  1281. IdentifierInfo *II = &CGM.getContext().Idents.get(".block_descriptor");
  1282. ImplicitParamDecl SelfDecl(getContext(), const_cast<BlockDecl *>(blockDecl),
  1283. SourceLocation(), II, selfTy,
  1284. ImplicitParamDecl::ObjCSelf);
  1285. args.push_back(&SelfDecl);
  1286. // Now add the rest of the parameters.
  1287. args.append(blockDecl->param_begin(), blockDecl->param_end());
  1288. // Create the function declaration.
  1289. const FunctionProtoType *fnType = blockInfo.getBlockExpr()->getFunctionType();
  1290. const CGFunctionInfo &fnInfo =
  1291. CGM.getTypes().arrangeBlockFunctionDeclaration(fnType, args);
  1292. if (CGM.ReturnSlotInterferesWithArgs(fnInfo))
  1293. blockInfo.UsesStret = true;
  1294. llvm::FunctionType *fnLLVMType = CGM.getTypes().GetFunctionType(fnInfo);
  1295. StringRef name = CGM.getBlockMangledName(GD, blockDecl);
  1296. llvm::Function *fn = llvm::Function::Create(
  1297. fnLLVMType, llvm::GlobalValue::InternalLinkage, name, &CGM.getModule());
  1298. CGM.SetInternalFunctionAttributes(blockDecl, fn, fnInfo);
  1299. if (BuildGlobalBlock) {
  1300. auto GenVoidPtrTy = getContext().getLangOpts().OpenCL
  1301. ? CGM.getOpenCLRuntime().getGenericVoidPointerType()
  1302. : VoidPtrTy;
  1303. buildGlobalBlock(CGM, blockInfo,
  1304. llvm::ConstantExpr::getPointerCast(fn, GenVoidPtrTy));
  1305. }
  1306. // Begin generating the function.
  1307. StartFunction(blockDecl, fnType->getReturnType(), fn, fnInfo, args,
  1308. blockDecl->getLocation(),
  1309. blockInfo.getBlockExpr()->getBody()->getBeginLoc());
  1310. // Okay. Undo some of what StartFunction did.
  1311. // At -O0 we generate an explicit alloca for the BlockPointer, so the RA
  1312. // won't delete the dbg.declare intrinsics for captured variables.
  1313. llvm::Value *BlockPointerDbgLoc = BlockPointer;
  1314. if (CGM.getCodeGenOpts().OptimizationLevel == 0) {
  1315. // Allocate a stack slot for it, so we can point the debugger to it
  1316. Address Alloca = CreateTempAlloca(BlockPointer->getType(),
  1317. getPointerAlign(),
  1318. "block.addr");
  1319. // Set the DebugLocation to empty, so the store is recognized as a
  1320. // frame setup instruction by llvm::DwarfDebug::beginFunction().
  1321. auto NL = ApplyDebugLocation::CreateEmpty(*this);
  1322. Builder.CreateStore(BlockPointer, Alloca);
  1323. BlockPointerDbgLoc = Alloca.getPointer();
  1324. }
  1325. // If we have a C++ 'this' reference, go ahead and force it into
  1326. // existence now.
  1327. if (blockDecl->capturesCXXThis()) {
  1328. Address addr =
  1329. Builder.CreateStructGEP(LoadBlockStruct(), blockInfo.CXXThisIndex,
  1330. blockInfo.CXXThisOffset, "block.captured-this");
  1331. CXXThisValue = Builder.CreateLoad(addr, "this");
  1332. }
  1333. // Also force all the constant captures.
  1334. for (const auto &CI : blockDecl->captures()) {
  1335. const VarDecl *variable = CI.getVariable();
  1336. const CGBlockInfo::Capture &capture = blockInfo.getCapture(variable);
  1337. if (!capture.isConstant()) continue;
  1338. CharUnits align = getContext().getDeclAlign(variable);
  1339. Address alloca =
  1340. CreateMemTemp(variable->getType(), align, "block.captured-const");
  1341. Builder.CreateStore(capture.getConstant(), alloca);
  1342. setAddrOfLocalVar(variable, alloca);
  1343. }
  1344. // Save a spot to insert the debug information for all the DeclRefExprs.
  1345. llvm::BasicBlock *entry = Builder.GetInsertBlock();
  1346. llvm::BasicBlock::iterator entry_ptr = Builder.GetInsertPoint();
  1347. --entry_ptr;
  1348. if (IsLambdaConversionToBlock)
  1349. EmitLambdaBlockInvokeBody();
  1350. else {
  1351. PGO.assignRegionCounters(GlobalDecl(blockDecl), fn);
  1352. incrementProfileCounter(blockDecl->getBody());
  1353. EmitStmt(blockDecl->getBody());
  1354. }
  1355. // Remember where we were...
  1356. llvm::BasicBlock *resume = Builder.GetInsertBlock();
  1357. // Go back to the entry.
  1358. ++entry_ptr;
  1359. Builder.SetInsertPoint(entry, entry_ptr);
  1360. // Emit debug information for all the DeclRefExprs.
  1361. // FIXME: also for 'this'
  1362. if (CGDebugInfo *DI = getDebugInfo()) {
  1363. for (const auto &CI : blockDecl->captures()) {
  1364. const VarDecl *variable = CI.getVariable();
  1365. DI->EmitLocation(Builder, variable->getLocation());
  1366. if (CGM.getCodeGenOpts().getDebugInfo() >=
  1367. codegenoptions::LimitedDebugInfo) {
  1368. const CGBlockInfo::Capture &capture = blockInfo.getCapture(variable);
  1369. if (capture.isConstant()) {
  1370. auto addr = LocalDeclMap.find(variable)->second;
  1371. (void)DI->EmitDeclareOfAutoVariable(variable, addr.getPointer(),
  1372. Builder);
  1373. continue;
  1374. }
  1375. DI->EmitDeclareOfBlockDeclRefVariable(
  1376. variable, BlockPointerDbgLoc, Builder, blockInfo,
  1377. entry_ptr == entry->end() ? nullptr : &*entry_ptr);
  1378. }
  1379. }
  1380. // Recover location if it was changed in the above loop.
  1381. DI->EmitLocation(Builder,
  1382. cast<CompoundStmt>(blockDecl->getBody())->getRBracLoc());
  1383. }
  1384. // And resume where we left off.
  1385. if (resume == nullptr)
  1386. Builder.ClearInsertionPoint();
  1387. else
  1388. Builder.SetInsertPoint(resume);
  1389. FinishFunction(cast<CompoundStmt>(blockDecl->getBody())->getRBracLoc());
  1390. return fn;
  1391. }
  1392. static std::pair<BlockCaptureEntityKind, BlockFieldFlags>
  1393. computeCopyInfoForBlockCapture(const BlockDecl::Capture &CI, QualType T,
  1394. const LangOptions &LangOpts) {
  1395. if (CI.getCopyExpr()) {
  1396. assert(!CI.isByRef());
  1397. // don't bother computing flags
  1398. return std::make_pair(BlockCaptureEntityKind::CXXRecord, BlockFieldFlags());
  1399. }
  1400. BlockFieldFlags Flags;
  1401. if (CI.isByRef()) {
  1402. Flags = BLOCK_FIELD_IS_BYREF;
  1403. if (T.isObjCGCWeak())
  1404. Flags |= BLOCK_FIELD_IS_WEAK;
  1405. return std::make_pair(BlockCaptureEntityKind::BlockObject, Flags);
  1406. }
  1407. Flags = BLOCK_FIELD_IS_OBJECT;
  1408. bool isBlockPointer = T->isBlockPointerType();
  1409. if (isBlockPointer)
  1410. Flags = BLOCK_FIELD_IS_BLOCK;
  1411. switch (T.isNonTrivialToPrimitiveCopy()) {
  1412. case QualType::PCK_Struct:
  1413. return std::make_pair(BlockCaptureEntityKind::NonTrivialCStruct,
  1414. BlockFieldFlags());
  1415. case QualType::PCK_ARCWeak:
  1416. // We need to register __weak direct captures with the runtime.
  1417. return std::make_pair(BlockCaptureEntityKind::ARCWeak, Flags);
  1418. case QualType::PCK_ARCStrong:
  1419. // We need to retain the copied value for __strong direct captures.
  1420. // If it's a block pointer, we have to copy the block and assign that to
  1421. // the destination pointer, so we might as well use _Block_object_assign.
  1422. // Otherwise we can avoid that.
  1423. return std::make_pair(!isBlockPointer ? BlockCaptureEntityKind::ARCStrong
  1424. : BlockCaptureEntityKind::BlockObject,
  1425. Flags);
  1426. case QualType::PCK_Trivial:
  1427. case QualType::PCK_VolatileTrivial: {
  1428. if (!T->isObjCRetainableType())
  1429. // For all other types, the memcpy is fine.
  1430. return std::make_pair(BlockCaptureEntityKind::None, BlockFieldFlags());
  1431. // Special rules for ARC captures:
  1432. Qualifiers QS = T.getQualifiers();
  1433. // Non-ARC captures of retainable pointers are strong and
  1434. // therefore require a call to _Block_object_assign.
  1435. if (!QS.getObjCLifetime() && !LangOpts.ObjCAutoRefCount)
  1436. return std::make_pair(BlockCaptureEntityKind::BlockObject, Flags);
  1437. // Otherwise the memcpy is fine.
  1438. return std::make_pair(BlockCaptureEntityKind::None, BlockFieldFlags());
  1439. }
  1440. }
  1441. llvm_unreachable("after exhaustive PrimitiveCopyKind switch");
  1442. }
  1443. static std::pair<BlockCaptureEntityKind, BlockFieldFlags>
  1444. computeDestroyInfoForBlockCapture(const BlockDecl::Capture &CI, QualType T,
  1445. const LangOptions &LangOpts);
  1446. /// Find the set of block captures that need to be explicitly copied or destroy.
  1447. static void findBlockCapturedManagedEntities(
  1448. const CGBlockInfo &BlockInfo, const LangOptions &LangOpts,
  1449. SmallVectorImpl<BlockCaptureManagedEntity> &ManagedCaptures) {
  1450. for (const auto &CI : BlockInfo.getBlockDecl()->captures()) {
  1451. const VarDecl *Variable = CI.getVariable();
  1452. const CGBlockInfo::Capture &Capture = BlockInfo.getCapture(Variable);
  1453. if (Capture.isConstant())
  1454. continue;
  1455. auto CopyInfo =
  1456. computeCopyInfoForBlockCapture(CI, Variable->getType(), LangOpts);
  1457. auto DisposeInfo =
  1458. computeDestroyInfoForBlockCapture(CI, Variable->getType(), LangOpts);
  1459. if (CopyInfo.first != BlockCaptureEntityKind::None ||
  1460. DisposeInfo.first != BlockCaptureEntityKind::None)
  1461. ManagedCaptures.emplace_back(CopyInfo.first, DisposeInfo.first,
  1462. CopyInfo.second, DisposeInfo.second, CI,
  1463. Capture);
  1464. }
  1465. // Sort the captures by offset.
  1466. llvm::sort(ManagedCaptures.begin(), ManagedCaptures.end());
  1467. }
  1468. namespace {
  1469. /// Release a __block variable.
  1470. struct CallBlockRelease final : EHScopeStack::Cleanup {
  1471. Address Addr;
  1472. BlockFieldFlags FieldFlags;
  1473. bool LoadBlockVarAddr, CanThrow;
  1474. CallBlockRelease(Address Addr, BlockFieldFlags Flags, bool LoadValue,
  1475. bool CT)
  1476. : Addr(Addr), FieldFlags(Flags), LoadBlockVarAddr(LoadValue),
  1477. CanThrow(CT) {}
  1478. void Emit(CodeGenFunction &CGF, Flags flags) override {
  1479. llvm::Value *BlockVarAddr;
  1480. if (LoadBlockVarAddr) {
  1481. BlockVarAddr = CGF.Builder.CreateLoad(Addr);
  1482. BlockVarAddr = CGF.Builder.CreateBitCast(BlockVarAddr, CGF.VoidPtrTy);
  1483. } else {
  1484. BlockVarAddr = Addr.getPointer();
  1485. }
  1486. CGF.BuildBlockRelease(BlockVarAddr, FieldFlags, CanThrow);
  1487. }
  1488. };
  1489. } // end anonymous namespace
  1490. /// Check if \p T is a C++ class that has a destructor that can throw.
  1491. bool CodeGenFunction::cxxDestructorCanThrow(QualType T) {
  1492. if (const auto *RD = T->getAsCXXRecordDecl())
  1493. if (const CXXDestructorDecl *DD = RD->getDestructor())
  1494. return DD->getType()->getAs<FunctionProtoType>()->canThrow();
  1495. return false;
  1496. }
  1497. // Return a string that has the information about a capture.
  1498. static std::string getBlockCaptureStr(const BlockCaptureManagedEntity &E,
  1499. CaptureStrKind StrKind,
  1500. CharUnits BlockAlignment,
  1501. CodeGenModule &CGM) {
  1502. std::string Str;
  1503. ASTContext &Ctx = CGM.getContext();
  1504. std::unique_ptr<ItaniumMangleContext> MC(
  1505. ItaniumMangleContext::create(Ctx, Ctx.getDiagnostics()));
  1506. const BlockDecl::Capture &CI = *E.CI;
  1507. QualType CaptureTy = CI.getVariable()->getType();
  1508. BlockCaptureEntityKind Kind;
  1509. BlockFieldFlags Flags;
  1510. // CaptureStrKind::Merged should be passed only when the operations and the
  1511. // flags are the same for copy and dispose.
  1512. assert((StrKind != CaptureStrKind::Merged ||
  1513. (E.CopyKind == E.DisposeKind && E.CopyFlags == E.DisposeFlags)) &&
  1514. "different operations and flags");
  1515. if (StrKind == CaptureStrKind::DisposeHelper) {
  1516. Kind = E.DisposeKind;
  1517. Flags = E.DisposeFlags;
  1518. } else {
  1519. Kind = E.CopyKind;
  1520. Flags = E.CopyFlags;
  1521. }
  1522. switch (Kind) {
  1523. case BlockCaptureEntityKind::CXXRecord: {
  1524. Str += "c";
  1525. SmallString<256> TyStr;
  1526. llvm::raw_svector_ostream Out(TyStr);
  1527. MC->mangleTypeName(CaptureTy, Out);
  1528. Str += llvm::to_string(TyStr.size()) + TyStr.c_str();
  1529. break;
  1530. }
  1531. case BlockCaptureEntityKind::ARCWeak:
  1532. Str += "w";
  1533. break;
  1534. case BlockCaptureEntityKind::ARCStrong:
  1535. Str += "s";
  1536. break;
  1537. case BlockCaptureEntityKind::BlockObject: {
  1538. const VarDecl *Var = CI.getVariable();
  1539. unsigned F = Flags.getBitMask();
  1540. if (F & BLOCK_FIELD_IS_BYREF) {
  1541. Str += "r";
  1542. if (F & BLOCK_FIELD_IS_WEAK)
  1543. Str += "w";
  1544. else {
  1545. // If CaptureStrKind::Merged is passed, check both the copy expression
  1546. // and the destructor.
  1547. if (StrKind != CaptureStrKind::DisposeHelper) {
  1548. if (Ctx.getBlockVarCopyInit(Var).canThrow())
  1549. Str += "c";
  1550. }
  1551. if (StrKind != CaptureStrKind::CopyHelper) {
  1552. if (CodeGenFunction::cxxDestructorCanThrow(CaptureTy))
  1553. Str += "d";
  1554. }
  1555. }
  1556. } else {
  1557. assert((F & BLOCK_FIELD_IS_OBJECT) && "unexpected flag value");
  1558. if (F == BLOCK_FIELD_IS_BLOCK)
  1559. Str += "b";
  1560. else
  1561. Str += "o";
  1562. }
  1563. break;
  1564. }
  1565. case BlockCaptureEntityKind::NonTrivialCStruct: {
  1566. bool IsVolatile = CaptureTy.isVolatileQualified();
  1567. CharUnits Alignment =
  1568. BlockAlignment.alignmentAtOffset(E.Capture->getOffset());
  1569. Str += "n";
  1570. std::string FuncStr;
  1571. if (StrKind == CaptureStrKind::DisposeHelper)
  1572. FuncStr = CodeGenFunction::getNonTrivialDestructorStr(
  1573. CaptureTy, Alignment, IsVolatile, Ctx);
  1574. else
  1575. // If CaptureStrKind::Merged is passed, use the copy constructor string.
  1576. // It has all the information that the destructor string has.
  1577. FuncStr = CodeGenFunction::getNonTrivialCopyConstructorStr(
  1578. CaptureTy, Alignment, IsVolatile, Ctx);
  1579. // The underscore is necessary here because non-trivial copy constructor
  1580. // and destructor strings can start with a number.
  1581. Str += llvm::to_string(FuncStr.size()) + "_" + FuncStr;
  1582. break;
  1583. }
  1584. case BlockCaptureEntityKind::None:
  1585. break;
  1586. }
  1587. return Str;
  1588. }
  1589. static std::string getCopyDestroyHelperFuncName(
  1590. const SmallVectorImpl<BlockCaptureManagedEntity> &Captures,
  1591. CharUnits BlockAlignment, CaptureStrKind StrKind, CodeGenModule &CGM) {
  1592. assert((StrKind == CaptureStrKind::CopyHelper ||
  1593. StrKind == CaptureStrKind::DisposeHelper) &&
  1594. "unexpected CaptureStrKind");
  1595. std::string Name = StrKind == CaptureStrKind::CopyHelper
  1596. ? "__copy_helper_block_"
  1597. : "__destroy_helper_block_";
  1598. if (CGM.getLangOpts().Exceptions)
  1599. Name += "e";
  1600. if (CGM.getCodeGenOpts().ObjCAutoRefCountExceptions)
  1601. Name += "a";
  1602. Name += llvm::to_string(BlockAlignment.getQuantity()) + "_";
  1603. for (const BlockCaptureManagedEntity &E : Captures) {
  1604. Name += llvm::to_string(E.Capture->getOffset().getQuantity());
  1605. Name += getBlockCaptureStr(E, StrKind, BlockAlignment, CGM);
  1606. }
  1607. return Name;
  1608. }
  1609. static void pushCaptureCleanup(BlockCaptureEntityKind CaptureKind,
  1610. Address Field, QualType CaptureType,
  1611. BlockFieldFlags Flags, bool ForCopyHelper,
  1612. VarDecl *Var, CodeGenFunction &CGF) {
  1613. bool EHOnly = ForCopyHelper;
  1614. switch (CaptureKind) {
  1615. case BlockCaptureEntityKind::CXXRecord:
  1616. case BlockCaptureEntityKind::ARCWeak:
  1617. case BlockCaptureEntityKind::NonTrivialCStruct:
  1618. case BlockCaptureEntityKind::ARCStrong: {
  1619. if (CaptureType.isDestructedType() &&
  1620. (!EHOnly || CGF.needsEHCleanup(CaptureType.isDestructedType()))) {
  1621. CodeGenFunction::Destroyer *Destroyer =
  1622. CaptureKind == BlockCaptureEntityKind::ARCStrong
  1623. ? CodeGenFunction::destroyARCStrongImprecise
  1624. : CGF.getDestroyer(CaptureType.isDestructedType());
  1625. CleanupKind Kind =
  1626. EHOnly ? EHCleanup
  1627. : CGF.getCleanupKind(CaptureType.isDestructedType());
  1628. CGF.pushDestroy(Kind, Field, CaptureType, Destroyer, Kind & EHCleanup);
  1629. }
  1630. break;
  1631. }
  1632. case BlockCaptureEntityKind::BlockObject: {
  1633. if (!EHOnly || CGF.getLangOpts().Exceptions) {
  1634. CleanupKind Kind = EHOnly ? EHCleanup : NormalAndEHCleanup;
  1635. // Calls to _Block_object_dispose along the EH path in the copy helper
  1636. // function don't throw as newly-copied __block variables always have a
  1637. // reference count of 2.
  1638. bool CanThrow =
  1639. !ForCopyHelper && CGF.cxxDestructorCanThrow(CaptureType);
  1640. CGF.enterByrefCleanup(Kind, Field, Flags, /*LoadBlockVarAddr*/ true,
  1641. CanThrow);
  1642. }
  1643. break;
  1644. }
  1645. case BlockCaptureEntityKind::None:
  1646. break;
  1647. }
  1648. }
  1649. static void setBlockHelperAttributesVisibility(bool CapturesNonExternalType,
  1650. llvm::Function *Fn,
  1651. const CGFunctionInfo &FI,
  1652. CodeGenModule &CGM) {
  1653. if (CapturesNonExternalType) {
  1654. CGM.SetInternalFunctionAttributes(GlobalDecl(), Fn, FI);
  1655. } else {
  1656. Fn->setVisibility(llvm::GlobalValue::HiddenVisibility);
  1657. Fn->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global);
  1658. CGM.SetLLVMFunctionAttributes(nullptr, FI, Fn);
  1659. CGM.SetLLVMFunctionAttributesForDefinition(nullptr, Fn);
  1660. }
  1661. }
  1662. /// Generate the copy-helper function for a block closure object:
  1663. /// static void block_copy_helper(block_t *dst, block_t *src);
  1664. /// The runtime will have previously initialized 'dst' by doing a
  1665. /// bit-copy of 'src'.
  1666. ///
  1667. /// Note that this copies an entire block closure object to the heap;
  1668. /// it should not be confused with a 'byref copy helper', which moves
  1669. /// the contents of an individual __block variable to the heap.
  1670. llvm::Constant *
  1671. CodeGenFunction::GenerateCopyHelperFunction(const CGBlockInfo &blockInfo) {
  1672. SmallVector<BlockCaptureManagedEntity, 4> CopiedCaptures;
  1673. findBlockCapturedManagedEntities(blockInfo, getLangOpts(), CopiedCaptures);
  1674. std::string FuncName =
  1675. getCopyDestroyHelperFuncName(CopiedCaptures, blockInfo.BlockAlign,
  1676. CaptureStrKind::CopyHelper, CGM);
  1677. if (llvm::GlobalValue *Func = CGM.getModule().getNamedValue(FuncName))
  1678. return llvm::ConstantExpr::getBitCast(Func, VoidPtrTy);
  1679. ASTContext &C = getContext();
  1680. FunctionArgList args;
  1681. ImplicitParamDecl DstDecl(getContext(), C.VoidPtrTy,
  1682. ImplicitParamDecl::Other);
  1683. args.push_back(&DstDecl);
  1684. ImplicitParamDecl SrcDecl(getContext(), C.VoidPtrTy,
  1685. ImplicitParamDecl::Other);
  1686. args.push_back(&SrcDecl);
  1687. const CGFunctionInfo &FI =
  1688. CGM.getTypes().arrangeBuiltinFunctionDeclaration(C.VoidTy, args);
  1689. // FIXME: it would be nice if these were mergeable with things with
  1690. // identical semantics.
  1691. llvm::FunctionType *LTy = CGM.getTypes().GetFunctionType(FI);
  1692. llvm::Function *Fn =
  1693. llvm::Function::Create(LTy, llvm::GlobalValue::LinkOnceODRLinkage,
  1694. FuncName, &CGM.getModule());
  1695. IdentifierInfo *II
  1696. = &CGM.getContext().Idents.get(FuncName);
  1697. FunctionDecl *FD = FunctionDecl::Create(C,
  1698. C.getTranslationUnitDecl(),
  1699. SourceLocation(),
  1700. SourceLocation(), II, C.VoidTy,
  1701. nullptr, SC_Static,
  1702. false,
  1703. false);
  1704. setBlockHelperAttributesVisibility(blockInfo.CapturesNonExternalType, Fn, FI,
  1705. CGM);
  1706. StartFunction(FD, C.VoidTy, Fn, FI, args);
  1707. ApplyDebugLocation NL{*this, blockInfo.getBlockExpr()->getBeginLoc()};
  1708. llvm::Type *structPtrTy = blockInfo.StructureType->getPointerTo();
  1709. Address src = GetAddrOfLocalVar(&SrcDecl);
  1710. src = Address(Builder.CreateLoad(src), blockInfo.BlockAlign);
  1711. src = Builder.CreateBitCast(src, structPtrTy, "block.source");
  1712. Address dst = GetAddrOfLocalVar(&DstDecl);
  1713. dst = Address(Builder.CreateLoad(dst), blockInfo.BlockAlign);
  1714. dst = Builder.CreateBitCast(dst, structPtrTy, "block.dest");
  1715. for (const auto &CopiedCapture : CopiedCaptures) {
  1716. const BlockDecl::Capture &CI = *CopiedCapture.CI;
  1717. const CGBlockInfo::Capture &capture = *CopiedCapture.Capture;
  1718. QualType captureType = CI.getVariable()->getType();
  1719. BlockFieldFlags flags = CopiedCapture.CopyFlags;
  1720. unsigned index = capture.getIndex();
  1721. Address srcField = Builder.CreateStructGEP(src, index, capture.getOffset());
  1722. Address dstField = Builder.CreateStructGEP(dst, index, capture.getOffset());
  1723. switch (CopiedCapture.CopyKind) {
  1724. case BlockCaptureEntityKind::CXXRecord:
  1725. // If there's an explicit copy expression, we do that.
  1726. assert(CI.getCopyExpr() && "copy expression for variable is missing");
  1727. EmitSynthesizedCXXCopyCtor(dstField, srcField, CI.getCopyExpr());
  1728. break;
  1729. case BlockCaptureEntityKind::ARCWeak:
  1730. EmitARCCopyWeak(dstField, srcField);
  1731. break;
  1732. case BlockCaptureEntityKind::NonTrivialCStruct: {
  1733. // If this is a C struct that requires non-trivial copy construction,
  1734. // emit a call to its copy constructor.
  1735. QualType varType = CI.getVariable()->getType();
  1736. callCStructCopyConstructor(MakeAddrLValue(dstField, varType),
  1737. MakeAddrLValue(srcField, varType));
  1738. break;
  1739. }
  1740. case BlockCaptureEntityKind::ARCStrong: {
  1741. llvm::Value *srcValue = Builder.CreateLoad(srcField, "blockcopy.src");
  1742. // At -O0, store null into the destination field (so that the
  1743. // storeStrong doesn't over-release) and then call storeStrong.
  1744. // This is a workaround to not having an initStrong call.
  1745. if (CGM.getCodeGenOpts().OptimizationLevel == 0) {
  1746. auto *ty = cast<llvm::PointerType>(srcValue->getType());
  1747. llvm::Value *null = llvm::ConstantPointerNull::get(ty);
  1748. Builder.CreateStore(null, dstField);
  1749. EmitARCStoreStrongCall(dstField, srcValue, true);
  1750. // With optimization enabled, take advantage of the fact that
  1751. // the blocks runtime guarantees a memcpy of the block data, and
  1752. // just emit a retain of the src field.
  1753. } else {
  1754. EmitARCRetainNonBlock(srcValue);
  1755. // Unless EH cleanup is required, we don't need this anymore, so kill
  1756. // it. It's not quite worth the annoyance to avoid creating it in the
  1757. // first place.
  1758. if (!needsEHCleanup(captureType.isDestructedType()))
  1759. cast<llvm::Instruction>(dstField.getPointer())->eraseFromParent();
  1760. }
  1761. break;
  1762. }
  1763. case BlockCaptureEntityKind::BlockObject: {
  1764. llvm::Value *srcValue = Builder.CreateLoad(srcField, "blockcopy.src");
  1765. srcValue = Builder.CreateBitCast(srcValue, VoidPtrTy);
  1766. llvm::Value *dstAddr =
  1767. Builder.CreateBitCast(dstField.getPointer(), VoidPtrTy);
  1768. llvm::Value *args[] = {
  1769. dstAddr, srcValue, llvm::ConstantInt::get(Int32Ty, flags.getBitMask())
  1770. };
  1771. if (CI.isByRef() && C.getBlockVarCopyInit(CI.getVariable()).canThrow())
  1772. EmitRuntimeCallOrInvoke(CGM.getBlockObjectAssign(), args);
  1773. else
  1774. EmitNounwindRuntimeCall(CGM.getBlockObjectAssign(), args);
  1775. break;
  1776. }
  1777. case BlockCaptureEntityKind::None:
  1778. continue;
  1779. }
  1780. // Ensure that we destroy the copied object if an exception is thrown later
  1781. // in the helper function.
  1782. pushCaptureCleanup(CopiedCapture.CopyKind, dstField, captureType, flags,
  1783. /*ForCopyHelper*/ true, CI.getVariable(), *this);
  1784. }
  1785. FinishFunction();
  1786. return llvm::ConstantExpr::getBitCast(Fn, VoidPtrTy);
  1787. }
  1788. static BlockFieldFlags
  1789. getBlockFieldFlagsForObjCObjectPointer(const BlockDecl::Capture &CI,
  1790. QualType T) {
  1791. BlockFieldFlags Flags = BLOCK_FIELD_IS_OBJECT;
  1792. if (T->isBlockPointerType())
  1793. Flags = BLOCK_FIELD_IS_BLOCK;
  1794. return Flags;
  1795. }
  1796. static std::pair<BlockCaptureEntityKind, BlockFieldFlags>
  1797. computeDestroyInfoForBlockCapture(const BlockDecl::Capture &CI, QualType T,
  1798. const LangOptions &LangOpts) {
  1799. if (CI.isByRef()) {
  1800. BlockFieldFlags Flags = BLOCK_FIELD_IS_BYREF;
  1801. if (T.isObjCGCWeak())
  1802. Flags |= BLOCK_FIELD_IS_WEAK;
  1803. return std::make_pair(BlockCaptureEntityKind::BlockObject, Flags);
  1804. }
  1805. switch (T.isDestructedType()) {
  1806. case QualType::DK_cxx_destructor:
  1807. return std::make_pair(BlockCaptureEntityKind::CXXRecord, BlockFieldFlags());
  1808. case QualType::DK_objc_strong_lifetime:
  1809. // Use objc_storeStrong for __strong direct captures; the
  1810. // dynamic tools really like it when we do this.
  1811. return std::make_pair(BlockCaptureEntityKind::ARCStrong,
  1812. getBlockFieldFlagsForObjCObjectPointer(CI, T));
  1813. case QualType::DK_objc_weak_lifetime:
  1814. // Support __weak direct captures.
  1815. return std::make_pair(BlockCaptureEntityKind::ARCWeak,
  1816. getBlockFieldFlagsForObjCObjectPointer(CI, T));
  1817. case QualType::DK_nontrivial_c_struct:
  1818. return std::make_pair(BlockCaptureEntityKind::NonTrivialCStruct,
  1819. BlockFieldFlags());
  1820. case QualType::DK_none: {
  1821. // Non-ARC captures are strong, and we need to use _Block_object_dispose.
  1822. if (T->isObjCRetainableType() && !T.getQualifiers().hasObjCLifetime() &&
  1823. !LangOpts.ObjCAutoRefCount)
  1824. return std::make_pair(BlockCaptureEntityKind::BlockObject,
  1825. getBlockFieldFlagsForObjCObjectPointer(CI, T));
  1826. // Otherwise, we have nothing to do.
  1827. return std::make_pair(BlockCaptureEntityKind::None, BlockFieldFlags());
  1828. }
  1829. }
  1830. llvm_unreachable("after exhaustive DestructionKind switch");
  1831. }
  1832. /// Generate the destroy-helper function for a block closure object:
  1833. /// static void block_destroy_helper(block_t *theBlock);
  1834. ///
  1835. /// Note that this destroys a heap-allocated block closure object;
  1836. /// it should not be confused with a 'byref destroy helper', which
  1837. /// destroys the heap-allocated contents of an individual __block
  1838. /// variable.
  1839. llvm::Constant *
  1840. CodeGenFunction::GenerateDestroyHelperFunction(const CGBlockInfo &blockInfo) {
  1841. SmallVector<BlockCaptureManagedEntity, 4> DestroyedCaptures;
  1842. findBlockCapturedManagedEntities(blockInfo, getLangOpts(), DestroyedCaptures);
  1843. std::string FuncName =
  1844. getCopyDestroyHelperFuncName(DestroyedCaptures, blockInfo.BlockAlign,
  1845. CaptureStrKind::DisposeHelper, CGM);
  1846. if (llvm::GlobalValue *Func = CGM.getModule().getNamedValue(FuncName))
  1847. return llvm::ConstantExpr::getBitCast(Func, VoidPtrTy);
  1848. ASTContext &C = getContext();
  1849. FunctionArgList args;
  1850. ImplicitParamDecl SrcDecl(getContext(), C.VoidPtrTy,
  1851. ImplicitParamDecl::Other);
  1852. args.push_back(&SrcDecl);
  1853. const CGFunctionInfo &FI =
  1854. CGM.getTypes().arrangeBuiltinFunctionDeclaration(C.VoidTy, args);
  1855. // FIXME: We'd like to put these into a mergable by content, with
  1856. // internal linkage.
  1857. llvm::FunctionType *LTy = CGM.getTypes().GetFunctionType(FI);
  1858. llvm::Function *Fn =
  1859. llvm::Function::Create(LTy, llvm::GlobalValue::LinkOnceODRLinkage,
  1860. FuncName, &CGM.getModule());
  1861. IdentifierInfo *II
  1862. = &CGM.getContext().Idents.get(FuncName);
  1863. FunctionDecl *FD = FunctionDecl::Create(C, C.getTranslationUnitDecl(),
  1864. SourceLocation(),
  1865. SourceLocation(), II, C.VoidTy,
  1866. nullptr, SC_Static,
  1867. false, false);
  1868. setBlockHelperAttributesVisibility(blockInfo.CapturesNonExternalType, Fn, FI,
  1869. CGM);
  1870. StartFunction(FD, C.VoidTy, Fn, FI, args);
  1871. markAsIgnoreThreadCheckingAtRuntime(Fn);
  1872. ApplyDebugLocation NL{*this, blockInfo.getBlockExpr()->getBeginLoc()};
  1873. llvm::Type *structPtrTy = blockInfo.StructureType->getPointerTo();
  1874. Address src = GetAddrOfLocalVar(&SrcDecl);
  1875. src = Address(Builder.CreateLoad(src), blockInfo.BlockAlign);
  1876. src = Builder.CreateBitCast(src, structPtrTy, "block");
  1877. CodeGenFunction::RunCleanupsScope cleanups(*this);
  1878. for (const auto &DestroyedCapture : DestroyedCaptures) {
  1879. const BlockDecl::Capture &CI = *DestroyedCapture.CI;
  1880. const CGBlockInfo::Capture &capture = *DestroyedCapture.Capture;
  1881. BlockFieldFlags flags = DestroyedCapture.DisposeFlags;
  1882. Address srcField =
  1883. Builder.CreateStructGEP(src, capture.getIndex(), capture.getOffset());
  1884. pushCaptureCleanup(DestroyedCapture.DisposeKind, srcField,
  1885. CI.getVariable()->getType(), flags,
  1886. /*ForCopyHelper*/ false, CI.getVariable(), *this);
  1887. }
  1888. cleanups.ForceCleanup();
  1889. FinishFunction();
  1890. return llvm::ConstantExpr::getBitCast(Fn, VoidPtrTy);
  1891. }
  1892. namespace {
  1893. /// Emits the copy/dispose helper functions for a __block object of id type.
  1894. class ObjectByrefHelpers final : public BlockByrefHelpers {
  1895. BlockFieldFlags Flags;
  1896. public:
  1897. ObjectByrefHelpers(CharUnits alignment, BlockFieldFlags flags)
  1898. : BlockByrefHelpers(alignment), Flags(flags) {}
  1899. void emitCopy(CodeGenFunction &CGF, Address destField,
  1900. Address srcField) override {
  1901. destField = CGF.Builder.CreateBitCast(destField, CGF.VoidPtrTy);
  1902. srcField = CGF.Builder.CreateBitCast(srcField, CGF.VoidPtrPtrTy);
  1903. llvm::Value *srcValue = CGF.Builder.CreateLoad(srcField);
  1904. unsigned flags = (Flags | BLOCK_BYREF_CALLER).getBitMask();
  1905. llvm::Value *flagsVal = llvm::ConstantInt::get(CGF.Int32Ty, flags);
  1906. llvm::Value *fn = CGF.CGM.getBlockObjectAssign();
  1907. llvm::Value *args[] = { destField.getPointer(), srcValue, flagsVal };
  1908. CGF.EmitNounwindRuntimeCall(fn, args);
  1909. }
  1910. void emitDispose(CodeGenFunction &CGF, Address field) override {
  1911. field = CGF.Builder.CreateBitCast(field, CGF.Int8PtrTy->getPointerTo(0));
  1912. llvm::Value *value = CGF.Builder.CreateLoad(field);
  1913. CGF.BuildBlockRelease(value, Flags | BLOCK_BYREF_CALLER, false);
  1914. }
  1915. void profileImpl(llvm::FoldingSetNodeID &id) const override {
  1916. id.AddInteger(Flags.getBitMask());
  1917. }
  1918. };
  1919. /// Emits the copy/dispose helpers for an ARC __block __weak variable.
  1920. class ARCWeakByrefHelpers final : public BlockByrefHelpers {
  1921. public:
  1922. ARCWeakByrefHelpers(CharUnits alignment) : BlockByrefHelpers(alignment) {}
  1923. void emitCopy(CodeGenFunction &CGF, Address destField,
  1924. Address srcField) override {
  1925. CGF.EmitARCMoveWeak(destField, srcField);
  1926. }
  1927. void emitDispose(CodeGenFunction &CGF, Address field) override {
  1928. CGF.EmitARCDestroyWeak(field);
  1929. }
  1930. void profileImpl(llvm::FoldingSetNodeID &id) const override {
  1931. // 0 is distinguishable from all pointers and byref flags
  1932. id.AddInteger(0);
  1933. }
  1934. };
  1935. /// Emits the copy/dispose helpers for an ARC __block __strong variable
  1936. /// that's not of block-pointer type.
  1937. class ARCStrongByrefHelpers final : public BlockByrefHelpers {
  1938. public:
  1939. ARCStrongByrefHelpers(CharUnits alignment) : BlockByrefHelpers(alignment) {}
  1940. void emitCopy(CodeGenFunction &CGF, Address destField,
  1941. Address srcField) override {
  1942. // Do a "move" by copying the value and then zeroing out the old
  1943. // variable.
  1944. llvm::Value *value = CGF.Builder.CreateLoad(srcField);
  1945. llvm::Value *null =
  1946. llvm::ConstantPointerNull::get(cast<llvm::PointerType>(value->getType()));
  1947. if (CGF.CGM.getCodeGenOpts().OptimizationLevel == 0) {
  1948. CGF.Builder.CreateStore(null, destField);
  1949. CGF.EmitARCStoreStrongCall(destField, value, /*ignored*/ true);
  1950. CGF.EmitARCStoreStrongCall(srcField, null, /*ignored*/ true);
  1951. return;
  1952. }
  1953. CGF.Builder.CreateStore(value, destField);
  1954. CGF.Builder.CreateStore(null, srcField);
  1955. }
  1956. void emitDispose(CodeGenFunction &CGF, Address field) override {
  1957. CGF.EmitARCDestroyStrong(field, ARCImpreciseLifetime);
  1958. }
  1959. void profileImpl(llvm::FoldingSetNodeID &id) const override {
  1960. // 1 is distinguishable from all pointers and byref flags
  1961. id.AddInteger(1);
  1962. }
  1963. };
  1964. /// Emits the copy/dispose helpers for an ARC __block __strong
  1965. /// variable that's of block-pointer type.
  1966. class ARCStrongBlockByrefHelpers final : public BlockByrefHelpers {
  1967. public:
  1968. ARCStrongBlockByrefHelpers(CharUnits alignment)
  1969. : BlockByrefHelpers(alignment) {}
  1970. void emitCopy(CodeGenFunction &CGF, Address destField,
  1971. Address srcField) override {
  1972. // Do the copy with objc_retainBlock; that's all that
  1973. // _Block_object_assign would do anyway, and we'd have to pass the
  1974. // right arguments to make sure it doesn't get no-op'ed.
  1975. llvm::Value *oldValue = CGF.Builder.CreateLoad(srcField);
  1976. llvm::Value *copy = CGF.EmitARCRetainBlock(oldValue, /*mandatory*/ true);
  1977. CGF.Builder.CreateStore(copy, destField);
  1978. }
  1979. void emitDispose(CodeGenFunction &CGF, Address field) override {
  1980. CGF.EmitARCDestroyStrong(field, ARCImpreciseLifetime);
  1981. }
  1982. void profileImpl(llvm::FoldingSetNodeID &id) const override {
  1983. // 2 is distinguishable from all pointers and byref flags
  1984. id.AddInteger(2);
  1985. }
  1986. };
  1987. /// Emits the copy/dispose helpers for a __block variable with a
  1988. /// nontrivial copy constructor or destructor.
  1989. class CXXByrefHelpers final : public BlockByrefHelpers {
  1990. QualType VarType;
  1991. const Expr *CopyExpr;
  1992. public:
  1993. CXXByrefHelpers(CharUnits alignment, QualType type,
  1994. const Expr *copyExpr)
  1995. : BlockByrefHelpers(alignment), VarType(type), CopyExpr(copyExpr) {}
  1996. bool needsCopy() const override { return CopyExpr != nullptr; }
  1997. void emitCopy(CodeGenFunction &CGF, Address destField,
  1998. Address srcField) override {
  1999. if (!CopyExpr) return;
  2000. CGF.EmitSynthesizedCXXCopyCtor(destField, srcField, CopyExpr);
  2001. }
  2002. void emitDispose(CodeGenFunction &CGF, Address field) override {
  2003. EHScopeStack::stable_iterator cleanupDepth = CGF.EHStack.stable_begin();
  2004. CGF.PushDestructorCleanup(VarType, field);
  2005. CGF.PopCleanupBlocks(cleanupDepth);
  2006. }
  2007. void profileImpl(llvm::FoldingSetNodeID &id) const override {
  2008. id.AddPointer(VarType.getCanonicalType().getAsOpaquePtr());
  2009. }
  2010. };
  2011. /// Emits the copy/dispose helpers for a __block variable that is a non-trivial
  2012. /// C struct.
  2013. class NonTrivialCStructByrefHelpers final : public BlockByrefHelpers {
  2014. QualType VarType;
  2015. public:
  2016. NonTrivialCStructByrefHelpers(CharUnits alignment, QualType type)
  2017. : BlockByrefHelpers(alignment), VarType(type) {}
  2018. void emitCopy(CodeGenFunction &CGF, Address destField,
  2019. Address srcField) override {
  2020. CGF.callCStructMoveConstructor(CGF.MakeAddrLValue(destField, VarType),
  2021. CGF.MakeAddrLValue(srcField, VarType));
  2022. }
  2023. bool needsDispose() const override {
  2024. return VarType.isDestructedType();
  2025. }
  2026. void emitDispose(CodeGenFunction &CGF, Address field) override {
  2027. EHScopeStack::stable_iterator cleanupDepth = CGF.EHStack.stable_begin();
  2028. CGF.pushDestroy(VarType.isDestructedType(), field, VarType);
  2029. CGF.PopCleanupBlocks(cleanupDepth);
  2030. }
  2031. void profileImpl(llvm::FoldingSetNodeID &id) const override {
  2032. id.AddPointer(VarType.getCanonicalType().getAsOpaquePtr());
  2033. }
  2034. };
  2035. } // end anonymous namespace
  2036. static llvm::Constant *
  2037. generateByrefCopyHelper(CodeGenFunction &CGF, const BlockByrefInfo &byrefInfo,
  2038. BlockByrefHelpers &generator) {
  2039. ASTContext &Context = CGF.getContext();
  2040. QualType R = Context.VoidTy;
  2041. FunctionArgList args;
  2042. ImplicitParamDecl Dst(CGF.getContext(), Context.VoidPtrTy,
  2043. ImplicitParamDecl::Other);
  2044. args.push_back(&Dst);
  2045. ImplicitParamDecl Src(CGF.getContext(), Context.VoidPtrTy,
  2046. ImplicitParamDecl::Other);
  2047. args.push_back(&Src);
  2048. const CGFunctionInfo &FI =
  2049. CGF.CGM.getTypes().arrangeBuiltinFunctionDeclaration(R, args);
  2050. llvm::FunctionType *LTy = CGF.CGM.getTypes().GetFunctionType(FI);
  2051. // FIXME: We'd like to put these into a mergable by content, with
  2052. // internal linkage.
  2053. llvm::Function *Fn =
  2054. llvm::Function::Create(LTy, llvm::GlobalValue::InternalLinkage,
  2055. "__Block_byref_object_copy_", &CGF.CGM.getModule());
  2056. IdentifierInfo *II
  2057. = &Context.Idents.get("__Block_byref_object_copy_");
  2058. FunctionDecl *FD = FunctionDecl::Create(Context,
  2059. Context.getTranslationUnitDecl(),
  2060. SourceLocation(),
  2061. SourceLocation(), II, R, nullptr,
  2062. SC_Static,
  2063. false, false);
  2064. CGF.CGM.SetInternalFunctionAttributes(GlobalDecl(), Fn, FI);
  2065. CGF.StartFunction(FD, R, Fn, FI, args);
  2066. if (generator.needsCopy()) {
  2067. llvm::Type *byrefPtrType = byrefInfo.Type->getPointerTo(0);
  2068. // dst->x
  2069. Address destField = CGF.GetAddrOfLocalVar(&Dst);
  2070. destField = Address(CGF.Builder.CreateLoad(destField),
  2071. byrefInfo.ByrefAlignment);
  2072. destField = CGF.Builder.CreateBitCast(destField, byrefPtrType);
  2073. destField = CGF.emitBlockByrefAddress(destField, byrefInfo, false,
  2074. "dest-object");
  2075. // src->x
  2076. Address srcField = CGF.GetAddrOfLocalVar(&Src);
  2077. srcField = Address(CGF.Builder.CreateLoad(srcField),
  2078. byrefInfo.ByrefAlignment);
  2079. srcField = CGF.Builder.CreateBitCast(srcField, byrefPtrType);
  2080. srcField = CGF.emitBlockByrefAddress(srcField, byrefInfo, false,
  2081. "src-object");
  2082. generator.emitCopy(CGF, destField, srcField);
  2083. }
  2084. CGF.FinishFunction();
  2085. return llvm::ConstantExpr::getBitCast(Fn, CGF.Int8PtrTy);
  2086. }
  2087. /// Build the copy helper for a __block variable.
  2088. static llvm::Constant *buildByrefCopyHelper(CodeGenModule &CGM,
  2089. const BlockByrefInfo &byrefInfo,
  2090. BlockByrefHelpers &generator) {
  2091. CodeGenFunction CGF(CGM);
  2092. return generateByrefCopyHelper(CGF, byrefInfo, generator);
  2093. }
  2094. /// Generate code for a __block variable's dispose helper.
  2095. static llvm::Constant *
  2096. generateByrefDisposeHelper(CodeGenFunction &CGF,
  2097. const BlockByrefInfo &byrefInfo,
  2098. BlockByrefHelpers &generator) {
  2099. ASTContext &Context = CGF.getContext();
  2100. QualType R = Context.VoidTy;
  2101. FunctionArgList args;
  2102. ImplicitParamDecl Src(CGF.getContext(), Context.VoidPtrTy,
  2103. ImplicitParamDecl::Other);
  2104. args.push_back(&Src);
  2105. const CGFunctionInfo &FI =
  2106. CGF.CGM.getTypes().arrangeBuiltinFunctionDeclaration(R, args);
  2107. llvm::FunctionType *LTy = CGF.CGM.getTypes().GetFunctionType(FI);
  2108. // FIXME: We'd like to put these into a mergable by content, with
  2109. // internal linkage.
  2110. llvm::Function *Fn =
  2111. llvm::Function::Create(LTy, llvm::GlobalValue::InternalLinkage,
  2112. "__Block_byref_object_dispose_",
  2113. &CGF.CGM.getModule());
  2114. IdentifierInfo *II
  2115. = &Context.Idents.get("__Block_byref_object_dispose_");
  2116. FunctionDecl *FD = FunctionDecl::Create(Context,
  2117. Context.getTranslationUnitDecl(),
  2118. SourceLocation(),
  2119. SourceLocation(), II, R, nullptr,
  2120. SC_Static,
  2121. false, false);
  2122. CGF.CGM.SetInternalFunctionAttributes(GlobalDecl(), Fn, FI);
  2123. CGF.StartFunction(FD, R, Fn, FI, args);
  2124. if (generator.needsDispose()) {
  2125. Address addr = CGF.GetAddrOfLocalVar(&Src);
  2126. addr = Address(CGF.Builder.CreateLoad(addr), byrefInfo.ByrefAlignment);
  2127. auto byrefPtrType = byrefInfo.Type->getPointerTo(0);
  2128. addr = CGF.Builder.CreateBitCast(addr, byrefPtrType);
  2129. addr = CGF.emitBlockByrefAddress(addr, byrefInfo, false, "object");
  2130. generator.emitDispose(CGF, addr);
  2131. }
  2132. CGF.FinishFunction();
  2133. return llvm::ConstantExpr::getBitCast(Fn, CGF.Int8PtrTy);
  2134. }
  2135. /// Build the dispose helper for a __block variable.
  2136. static llvm::Constant *buildByrefDisposeHelper(CodeGenModule &CGM,
  2137. const BlockByrefInfo &byrefInfo,
  2138. BlockByrefHelpers &generator) {
  2139. CodeGenFunction CGF(CGM);
  2140. return generateByrefDisposeHelper(CGF, byrefInfo, generator);
  2141. }
  2142. /// Lazily build the copy and dispose helpers for a __block variable
  2143. /// with the given information.
  2144. template <class T>
  2145. static T *buildByrefHelpers(CodeGenModule &CGM, const BlockByrefInfo &byrefInfo,
  2146. T &&generator) {
  2147. llvm::FoldingSetNodeID id;
  2148. generator.Profile(id);
  2149. void *insertPos;
  2150. BlockByrefHelpers *node
  2151. = CGM.ByrefHelpersCache.FindNodeOrInsertPos(id, insertPos);
  2152. if (node) return static_cast<T*>(node);
  2153. generator.CopyHelper = buildByrefCopyHelper(CGM, byrefInfo, generator);
  2154. generator.DisposeHelper = buildByrefDisposeHelper(CGM, byrefInfo, generator);
  2155. T *copy = new (CGM.getContext()) T(std::forward<T>(generator));
  2156. CGM.ByrefHelpersCache.InsertNode(copy, insertPos);
  2157. return copy;
  2158. }
  2159. /// Build the copy and dispose helpers for the given __block variable
  2160. /// emission. Places the helpers in the global cache. Returns null
  2161. /// if no helpers are required.
  2162. BlockByrefHelpers *
  2163. CodeGenFunction::buildByrefHelpers(llvm::StructType &byrefType,
  2164. const AutoVarEmission &emission) {
  2165. const VarDecl &var = *emission.Variable;
  2166. QualType type = var.getType();
  2167. auto &byrefInfo = getBlockByrefInfo(&var);
  2168. // The alignment we care about for the purposes of uniquing byref
  2169. // helpers is the alignment of the actual byref value field.
  2170. CharUnits valueAlignment =
  2171. byrefInfo.ByrefAlignment.alignmentAtOffset(byrefInfo.FieldOffset);
  2172. if (const CXXRecordDecl *record = type->getAsCXXRecordDecl()) {
  2173. const Expr *copyExpr =
  2174. CGM.getContext().getBlockVarCopyInit(&var).getCopyExpr();
  2175. if (!copyExpr && record->hasTrivialDestructor()) return nullptr;
  2176. return ::buildByrefHelpers(
  2177. CGM, byrefInfo, CXXByrefHelpers(valueAlignment, type, copyExpr));
  2178. }
  2179. // If type is a non-trivial C struct type that is non-trivial to
  2180. // destructly move or destroy, build the copy and dispose helpers.
  2181. if (type.isNonTrivialToPrimitiveDestructiveMove() == QualType::PCK_Struct ||
  2182. type.isDestructedType() == QualType::DK_nontrivial_c_struct)
  2183. return ::buildByrefHelpers(
  2184. CGM, byrefInfo, NonTrivialCStructByrefHelpers(valueAlignment, type));
  2185. // Otherwise, if we don't have a retainable type, there's nothing to do.
  2186. // that the runtime does extra copies.
  2187. if (!type->isObjCRetainableType()) return nullptr;
  2188. Qualifiers qs = type.getQualifiers();
  2189. // If we have lifetime, that dominates.
  2190. if (Qualifiers::ObjCLifetime lifetime = qs.getObjCLifetime()) {
  2191. switch (lifetime) {
  2192. case Qualifiers::OCL_None: llvm_unreachable("impossible");
  2193. // These are just bits as far as the runtime is concerned.
  2194. case Qualifiers::OCL_ExplicitNone:
  2195. case Qualifiers::OCL_Autoreleasing:
  2196. return nullptr;
  2197. // Tell the runtime that this is ARC __weak, called by the
  2198. // byref routines.
  2199. case Qualifiers::OCL_Weak:
  2200. return ::buildByrefHelpers(CGM, byrefInfo,
  2201. ARCWeakByrefHelpers(valueAlignment));
  2202. // ARC __strong __block variables need to be retained.
  2203. case Qualifiers::OCL_Strong:
  2204. // Block pointers need to be copied, and there's no direct
  2205. // transfer possible.
  2206. if (type->isBlockPointerType()) {
  2207. return ::buildByrefHelpers(CGM, byrefInfo,
  2208. ARCStrongBlockByrefHelpers(valueAlignment));
  2209. // Otherwise, we transfer ownership of the retain from the stack
  2210. // to the heap.
  2211. } else {
  2212. return ::buildByrefHelpers(CGM, byrefInfo,
  2213. ARCStrongByrefHelpers(valueAlignment));
  2214. }
  2215. }
  2216. llvm_unreachable("fell out of lifetime switch!");
  2217. }
  2218. BlockFieldFlags flags;
  2219. if (type->isBlockPointerType()) {
  2220. flags |= BLOCK_FIELD_IS_BLOCK;
  2221. } else if (CGM.getContext().isObjCNSObjectType(type) ||
  2222. type->isObjCObjectPointerType()) {
  2223. flags |= BLOCK_FIELD_IS_OBJECT;
  2224. } else {
  2225. return nullptr;
  2226. }
  2227. if (type.isObjCGCWeak())
  2228. flags |= BLOCK_FIELD_IS_WEAK;
  2229. return ::buildByrefHelpers(CGM, byrefInfo,
  2230. ObjectByrefHelpers(valueAlignment, flags));
  2231. }
  2232. Address CodeGenFunction::emitBlockByrefAddress(Address baseAddr,
  2233. const VarDecl *var,
  2234. bool followForward) {
  2235. auto &info = getBlockByrefInfo(var);
  2236. return emitBlockByrefAddress(baseAddr, info, followForward, var->getName());
  2237. }
  2238. Address CodeGenFunction::emitBlockByrefAddress(Address baseAddr,
  2239. const BlockByrefInfo &info,
  2240. bool followForward,
  2241. const llvm::Twine &name) {
  2242. // Chase the forwarding address if requested.
  2243. if (followForward) {
  2244. Address forwardingAddr =
  2245. Builder.CreateStructGEP(baseAddr, 1, getPointerSize(), "forwarding");
  2246. baseAddr = Address(Builder.CreateLoad(forwardingAddr), info.ByrefAlignment);
  2247. }
  2248. return Builder.CreateStructGEP(baseAddr, info.FieldIndex,
  2249. info.FieldOffset, name);
  2250. }
  2251. /// BuildByrefInfo - This routine changes a __block variable declared as T x
  2252. /// into:
  2253. ///
  2254. /// struct {
  2255. /// void *__isa;
  2256. /// void *__forwarding;
  2257. /// int32_t __flags;
  2258. /// int32_t __size;
  2259. /// void *__copy_helper; // only if needed
  2260. /// void *__destroy_helper; // only if needed
  2261. /// void *__byref_variable_layout;// only if needed
  2262. /// char padding[X]; // only if needed
  2263. /// T x;
  2264. /// } x
  2265. ///
  2266. const BlockByrefInfo &CodeGenFunction::getBlockByrefInfo(const VarDecl *D) {
  2267. auto it = BlockByrefInfos.find(D);
  2268. if (it != BlockByrefInfos.end())
  2269. return it->second;
  2270. llvm::StructType *byrefType =
  2271. llvm::StructType::create(getLLVMContext(),
  2272. "struct.__block_byref_" + D->getNameAsString());
  2273. QualType Ty = D->getType();
  2274. CharUnits size;
  2275. SmallVector<llvm::Type *, 8> types;
  2276. // void *__isa;
  2277. types.push_back(Int8PtrTy);
  2278. size += getPointerSize();
  2279. // void *__forwarding;
  2280. types.push_back(llvm::PointerType::getUnqual(byrefType));
  2281. size += getPointerSize();
  2282. // int32_t __flags;
  2283. types.push_back(Int32Ty);
  2284. size += CharUnits::fromQuantity(4);
  2285. // int32_t __size;
  2286. types.push_back(Int32Ty);
  2287. size += CharUnits::fromQuantity(4);
  2288. // Note that this must match *exactly* the logic in buildByrefHelpers.
  2289. bool hasCopyAndDispose = getContext().BlockRequiresCopying(Ty, D);
  2290. if (hasCopyAndDispose) {
  2291. /// void *__copy_helper;
  2292. types.push_back(Int8PtrTy);
  2293. size += getPointerSize();
  2294. /// void *__destroy_helper;
  2295. types.push_back(Int8PtrTy);
  2296. size += getPointerSize();
  2297. }
  2298. bool HasByrefExtendedLayout = false;
  2299. Qualifiers::ObjCLifetime Lifetime;
  2300. if (getContext().getByrefLifetime(Ty, Lifetime, HasByrefExtendedLayout) &&
  2301. HasByrefExtendedLayout) {
  2302. /// void *__byref_variable_layout;
  2303. types.push_back(Int8PtrTy);
  2304. size += CharUnits::fromQuantity(PointerSizeInBytes);
  2305. }
  2306. // T x;
  2307. llvm::Type *varTy = ConvertTypeForMem(Ty);
  2308. bool packed = false;
  2309. CharUnits varAlign = getContext().getDeclAlign(D);
  2310. CharUnits varOffset = size.alignTo(varAlign);
  2311. // We may have to insert padding.
  2312. if (varOffset != size) {
  2313. llvm::Type *paddingTy =
  2314. llvm::ArrayType::get(Int8Ty, (varOffset - size).getQuantity());
  2315. types.push_back(paddingTy);
  2316. size = varOffset;
  2317. // Conversely, we might have to prevent LLVM from inserting padding.
  2318. } else if (CGM.getDataLayout().getABITypeAlignment(varTy)
  2319. > varAlign.getQuantity()) {
  2320. packed = true;
  2321. }
  2322. types.push_back(varTy);
  2323. byrefType->setBody(types, packed);
  2324. BlockByrefInfo info;
  2325. info.Type = byrefType;
  2326. info.FieldIndex = types.size() - 1;
  2327. info.FieldOffset = varOffset;
  2328. info.ByrefAlignment = std::max(varAlign, getPointerAlign());
  2329. auto pair = BlockByrefInfos.insert({D, info});
  2330. assert(pair.second && "info was inserted recursively?");
  2331. return pair.first->second;
  2332. }
  2333. /// Initialize the structural components of a __block variable, i.e.
  2334. /// everything but the actual object.
  2335. void CodeGenFunction::emitByrefStructureInit(const AutoVarEmission &emission) {
  2336. // Find the address of the local.
  2337. Address addr = emission.Addr;
  2338. // That's an alloca of the byref structure type.
  2339. llvm::StructType *byrefType = cast<llvm::StructType>(
  2340. cast<llvm::PointerType>(addr.getPointer()->getType())->getElementType());
  2341. unsigned nextHeaderIndex = 0;
  2342. CharUnits nextHeaderOffset;
  2343. auto storeHeaderField = [&](llvm::Value *value, CharUnits fieldSize,
  2344. const Twine &name) {
  2345. auto fieldAddr = Builder.CreateStructGEP(addr, nextHeaderIndex,
  2346. nextHeaderOffset, name);
  2347. Builder.CreateStore(value, fieldAddr);
  2348. nextHeaderIndex++;
  2349. nextHeaderOffset += fieldSize;
  2350. };
  2351. // Build the byref helpers if necessary. This is null if we don't need any.
  2352. BlockByrefHelpers *helpers = buildByrefHelpers(*byrefType, emission);
  2353. const VarDecl &D = *emission.Variable;
  2354. QualType type = D.getType();
  2355. bool HasByrefExtendedLayout;
  2356. Qualifiers::ObjCLifetime ByrefLifetime;
  2357. bool ByRefHasLifetime =
  2358. getContext().getByrefLifetime(type, ByrefLifetime, HasByrefExtendedLayout);
  2359. llvm::Value *V;
  2360. // Initialize the 'isa', which is just 0 or 1.
  2361. int isa = 0;
  2362. if (type.isObjCGCWeak())
  2363. isa = 1;
  2364. V = Builder.CreateIntToPtr(Builder.getInt32(isa), Int8PtrTy, "isa");
  2365. storeHeaderField(V, getPointerSize(), "byref.isa");
  2366. // Store the address of the variable into its own forwarding pointer.
  2367. storeHeaderField(addr.getPointer(), getPointerSize(), "byref.forwarding");
  2368. // Blocks ABI:
  2369. // c) the flags field is set to either 0 if no helper functions are
  2370. // needed or BLOCK_BYREF_HAS_COPY_DISPOSE if they are,
  2371. BlockFlags flags;
  2372. if (helpers) flags |= BLOCK_BYREF_HAS_COPY_DISPOSE;
  2373. if (ByRefHasLifetime) {
  2374. if (HasByrefExtendedLayout) flags |= BLOCK_BYREF_LAYOUT_EXTENDED;
  2375. else switch (ByrefLifetime) {
  2376. case Qualifiers::OCL_Strong:
  2377. flags |= BLOCK_BYREF_LAYOUT_STRONG;
  2378. break;
  2379. case Qualifiers::OCL_Weak:
  2380. flags |= BLOCK_BYREF_LAYOUT_WEAK;
  2381. break;
  2382. case Qualifiers::OCL_ExplicitNone:
  2383. flags |= BLOCK_BYREF_LAYOUT_UNRETAINED;
  2384. break;
  2385. case Qualifiers::OCL_None:
  2386. if (!type->isObjCObjectPointerType() && !type->isBlockPointerType())
  2387. flags |= BLOCK_BYREF_LAYOUT_NON_OBJECT;
  2388. break;
  2389. default:
  2390. break;
  2391. }
  2392. if (CGM.getLangOpts().ObjCGCBitmapPrint) {
  2393. printf("\n Inline flag for BYREF variable layout (%d):", flags.getBitMask());
  2394. if (flags & BLOCK_BYREF_HAS_COPY_DISPOSE)
  2395. printf(" BLOCK_BYREF_HAS_COPY_DISPOSE");
  2396. if (flags & BLOCK_BYREF_LAYOUT_MASK) {
  2397. BlockFlags ThisFlag(flags.getBitMask() & BLOCK_BYREF_LAYOUT_MASK);
  2398. if (ThisFlag == BLOCK_BYREF_LAYOUT_EXTENDED)
  2399. printf(" BLOCK_BYREF_LAYOUT_EXTENDED");
  2400. if (ThisFlag == BLOCK_BYREF_LAYOUT_STRONG)
  2401. printf(" BLOCK_BYREF_LAYOUT_STRONG");
  2402. if (ThisFlag == BLOCK_BYREF_LAYOUT_WEAK)
  2403. printf(" BLOCK_BYREF_LAYOUT_WEAK");
  2404. if (ThisFlag == BLOCK_BYREF_LAYOUT_UNRETAINED)
  2405. printf(" BLOCK_BYREF_LAYOUT_UNRETAINED");
  2406. if (ThisFlag == BLOCK_BYREF_LAYOUT_NON_OBJECT)
  2407. printf(" BLOCK_BYREF_LAYOUT_NON_OBJECT");
  2408. }
  2409. printf("\n");
  2410. }
  2411. }
  2412. storeHeaderField(llvm::ConstantInt::get(IntTy, flags.getBitMask()),
  2413. getIntSize(), "byref.flags");
  2414. CharUnits byrefSize = CGM.GetTargetTypeStoreSize(byrefType);
  2415. V = llvm::ConstantInt::get(IntTy, byrefSize.getQuantity());
  2416. storeHeaderField(V, getIntSize(), "byref.size");
  2417. if (helpers) {
  2418. storeHeaderField(helpers->CopyHelper, getPointerSize(),
  2419. "byref.copyHelper");
  2420. storeHeaderField(helpers->DisposeHelper, getPointerSize(),
  2421. "byref.disposeHelper");
  2422. }
  2423. if (ByRefHasLifetime && HasByrefExtendedLayout) {
  2424. auto layoutInfo = CGM.getObjCRuntime().BuildByrefLayout(CGM, type);
  2425. storeHeaderField(layoutInfo, getPointerSize(), "byref.layout");
  2426. }
  2427. }
  2428. void CodeGenFunction::BuildBlockRelease(llvm::Value *V, BlockFieldFlags flags,
  2429. bool CanThrow) {
  2430. llvm::Value *F = CGM.getBlockObjectDispose();
  2431. llvm::Value *args[] = {
  2432. Builder.CreateBitCast(V, Int8PtrTy),
  2433. llvm::ConstantInt::get(Int32Ty, flags.getBitMask())
  2434. };
  2435. if (CanThrow)
  2436. EmitRuntimeCallOrInvoke(F, args);
  2437. else
  2438. EmitNounwindRuntimeCall(F, args);
  2439. }
  2440. void CodeGenFunction::enterByrefCleanup(CleanupKind Kind, Address Addr,
  2441. BlockFieldFlags Flags,
  2442. bool LoadBlockVarAddr, bool CanThrow) {
  2443. EHStack.pushCleanup<CallBlockRelease>(Kind, Addr, Flags, LoadBlockVarAddr,
  2444. CanThrow);
  2445. }
  2446. /// Adjust the declaration of something from the blocks API.
  2447. static void configureBlocksRuntimeObject(CodeGenModule &CGM,
  2448. llvm::Constant *C) {
  2449. auto *GV = cast<llvm::GlobalValue>(C->stripPointerCasts());
  2450. if (CGM.getTarget().getTriple().isOSBinFormatCOFF()) {
  2451. IdentifierInfo &II = CGM.getContext().Idents.get(C->getName());
  2452. TranslationUnitDecl *TUDecl = CGM.getContext().getTranslationUnitDecl();
  2453. DeclContext *DC = TranslationUnitDecl::castToDeclContext(TUDecl);
  2454. assert((isa<llvm::Function>(C->stripPointerCasts()) ||
  2455. isa<llvm::GlobalVariable>(C->stripPointerCasts())) &&
  2456. "expected Function or GlobalVariable");
  2457. const NamedDecl *ND = nullptr;
  2458. for (const auto &Result : DC->lookup(&II))
  2459. if ((ND = dyn_cast<FunctionDecl>(Result)) ||
  2460. (ND = dyn_cast<VarDecl>(Result)))
  2461. break;
  2462. // TODO: support static blocks runtime
  2463. if (GV->isDeclaration() && (!ND || !ND->hasAttr<DLLExportAttr>())) {
  2464. GV->setDLLStorageClass(llvm::GlobalValue::DLLImportStorageClass);
  2465. GV->setLinkage(llvm::GlobalValue::ExternalLinkage);
  2466. } else {
  2467. GV->setDLLStorageClass(llvm::GlobalValue::DLLExportStorageClass);
  2468. GV->setLinkage(llvm::GlobalValue::ExternalLinkage);
  2469. }
  2470. }
  2471. if (CGM.getLangOpts().BlocksRuntimeOptional && GV->isDeclaration() &&
  2472. GV->hasExternalLinkage())
  2473. GV->setLinkage(llvm::GlobalValue::ExternalWeakLinkage);
  2474. CGM.setDSOLocal(GV);
  2475. }
  2476. llvm::Constant *CodeGenModule::getBlockObjectDispose() {
  2477. if (BlockObjectDispose)
  2478. return BlockObjectDispose;
  2479. llvm::Type *args[] = { Int8PtrTy, Int32Ty };
  2480. llvm::FunctionType *fty
  2481. = llvm::FunctionType::get(VoidTy, args, false);
  2482. BlockObjectDispose = CreateRuntimeFunction(fty, "_Block_object_dispose");
  2483. configureBlocksRuntimeObject(*this, BlockObjectDispose);
  2484. return BlockObjectDispose;
  2485. }
  2486. llvm::Constant *CodeGenModule::getBlockObjectAssign() {
  2487. if (BlockObjectAssign)
  2488. return BlockObjectAssign;
  2489. llvm::Type *args[] = { Int8PtrTy, Int8PtrTy, Int32Ty };
  2490. llvm::FunctionType *fty
  2491. = llvm::FunctionType::get(VoidTy, args, false);
  2492. BlockObjectAssign = CreateRuntimeFunction(fty, "_Block_object_assign");
  2493. configureBlocksRuntimeObject(*this, BlockObjectAssign);
  2494. return BlockObjectAssign;
  2495. }
  2496. llvm::Constant *CodeGenModule::getNSConcreteGlobalBlock() {
  2497. if (NSConcreteGlobalBlock)
  2498. return NSConcreteGlobalBlock;
  2499. NSConcreteGlobalBlock = GetOrCreateLLVMGlobal("_NSConcreteGlobalBlock",
  2500. Int8PtrTy->getPointerTo(),
  2501. nullptr);
  2502. configureBlocksRuntimeObject(*this, NSConcreteGlobalBlock);
  2503. return NSConcreteGlobalBlock;
  2504. }
  2505. llvm::Constant *CodeGenModule::getNSConcreteStackBlock() {
  2506. if (NSConcreteStackBlock)
  2507. return NSConcreteStackBlock;
  2508. NSConcreteStackBlock = GetOrCreateLLVMGlobal("_NSConcreteStackBlock",
  2509. Int8PtrTy->getPointerTo(),
  2510. nullptr);
  2511. configureBlocksRuntimeObject(*this, NSConcreteStackBlock);
  2512. return NSConcreteStackBlock;
  2513. }