CGBlocks.cpp 85 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333
  1. //===--- CGBlocks.cpp - Emit LLVM Code for declarations ---------*- C++ -*-===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This contains code to emit blocks.
  11. //
  12. //===----------------------------------------------------------------------===//
  13. #include "CGBlocks.h"
  14. #include "CGDebugInfo.h"
  15. #include "CGObjCRuntime.h"
  16. #include "CodeGenFunction.h"
  17. #include "CodeGenModule.h"
  18. #include "clang/AST/DeclObjC.h"
  19. #include "llvm/ADT/SmallSet.h"
  20. #include "llvm/IR/CallSite.h"
  21. #include "llvm/IR/DataLayout.h"
  22. #include "llvm/IR/Module.h"
  23. #include <algorithm>
  24. #include <cstdio>
  25. using namespace clang;
  26. using namespace CodeGen;
  27. CGBlockInfo::CGBlockInfo(const BlockDecl *block, StringRef name)
  28. : Name(name), CXXThisIndex(0), CanBeGlobal(false), NeedsCopyDispose(false),
  29. HasCXXObject(false), UsesStret(false), HasCapturedVariableLayout(false),
  30. LocalAddress(Address::invalid()), StructureType(nullptr), Block(block),
  31. DominatingIP(nullptr) {
  32. // Skip asm prefix, if any. 'name' is usually taken directly from
  33. // the mangled name of the enclosing function.
  34. if (!name.empty() && name[0] == '\01')
  35. name = name.substr(1);
  36. }
  37. // Anchor the vtable to this translation unit.
  38. BlockByrefHelpers::~BlockByrefHelpers() {}
  39. /// Build the given block as a global block.
  40. static llvm::Constant *buildGlobalBlock(CodeGenModule &CGM,
  41. const CGBlockInfo &blockInfo,
  42. llvm::Constant *blockFn);
  43. /// Build the helper function to copy a block.
  44. static llvm::Constant *buildCopyHelper(CodeGenModule &CGM,
  45. const CGBlockInfo &blockInfo) {
  46. return CodeGenFunction(CGM).GenerateCopyHelperFunction(blockInfo);
  47. }
  48. /// Build the helper function to dispose of a block.
  49. static llvm::Constant *buildDisposeHelper(CodeGenModule &CGM,
  50. const CGBlockInfo &blockInfo) {
  51. return CodeGenFunction(CGM).GenerateDestroyHelperFunction(blockInfo);
  52. }
  53. /// buildBlockDescriptor - Build the block descriptor meta-data for a block.
  54. /// buildBlockDescriptor is accessed from 5th field of the Block_literal
  55. /// meta-data and contains stationary information about the block literal.
  56. /// Its definition will have 4 (or optinally 6) words.
  57. /// \code
  58. /// struct Block_descriptor {
  59. /// unsigned long reserved;
  60. /// unsigned long size; // size of Block_literal metadata in bytes.
  61. /// void *copy_func_helper_decl; // optional copy helper.
  62. /// void *destroy_func_decl; // optioanl destructor helper.
  63. /// void *block_method_encoding_address; // @encode for block literal signature.
  64. /// void *block_layout_info; // encoding of captured block variables.
  65. /// };
  66. /// \endcode
  67. static llvm::Constant *buildBlockDescriptor(CodeGenModule &CGM,
  68. const CGBlockInfo &blockInfo) {
  69. ASTContext &C = CGM.getContext();
  70. llvm::Type *ulong = CGM.getTypes().ConvertType(C.UnsignedLongTy);
  71. llvm::Type *i8p = nullptr;
  72. if (CGM.getLangOpts().OpenCL)
  73. i8p =
  74. llvm::Type::getInt8PtrTy(
  75. CGM.getLLVMContext(), C.getTargetAddressSpace(LangAS::opencl_constant));
  76. else
  77. i8p = CGM.getTypes().ConvertType(C.VoidPtrTy);
  78. SmallVector<llvm::Constant*, 6> elements;
  79. // reserved
  80. elements.push_back(llvm::ConstantInt::get(ulong, 0));
  81. // Size
  82. // FIXME: What is the right way to say this doesn't fit? We should give
  83. // a user diagnostic in that case. Better fix would be to change the
  84. // API to size_t.
  85. elements.push_back(llvm::ConstantInt::get(ulong,
  86. blockInfo.BlockSize.getQuantity()));
  87. // Optional copy/dispose helpers.
  88. if (blockInfo.NeedsCopyDispose) {
  89. // copy_func_helper_decl
  90. elements.push_back(buildCopyHelper(CGM, blockInfo));
  91. // destroy_func_decl
  92. elements.push_back(buildDisposeHelper(CGM, blockInfo));
  93. }
  94. // Signature. Mandatory ObjC-style method descriptor @encode sequence.
  95. std::string typeAtEncoding =
  96. CGM.getContext().getObjCEncodingForBlock(blockInfo.getBlockExpr());
  97. elements.push_back(llvm::ConstantExpr::getBitCast(
  98. CGM.GetAddrOfConstantCString(typeAtEncoding).getPointer(), i8p));
  99. // GC layout.
  100. if (C.getLangOpts().ObjC1) {
  101. if (CGM.getLangOpts().getGC() != LangOptions::NonGC)
  102. elements.push_back(CGM.getObjCRuntime().BuildGCBlockLayout(CGM, blockInfo));
  103. else
  104. elements.push_back(CGM.getObjCRuntime().BuildRCBlockLayout(CGM, blockInfo));
  105. }
  106. else
  107. elements.push_back(llvm::Constant::getNullValue(i8p));
  108. llvm::Constant *init = llvm::ConstantStruct::getAnon(elements);
  109. llvm::GlobalVariable *global =
  110. new llvm::GlobalVariable(CGM.getModule(), init->getType(), true,
  111. llvm::GlobalValue::InternalLinkage,
  112. init, "__block_descriptor_tmp");
  113. return llvm::ConstantExpr::getBitCast(global, CGM.getBlockDescriptorType());
  114. }
  115. /*
  116. Purely notional variadic template describing the layout of a block.
  117. template <class _ResultType, class... _ParamTypes, class... _CaptureTypes>
  118. struct Block_literal {
  119. /// Initialized to one of:
  120. /// extern void *_NSConcreteStackBlock[];
  121. /// extern void *_NSConcreteGlobalBlock[];
  122. ///
  123. /// In theory, we could start one off malloc'ed by setting
  124. /// BLOCK_NEEDS_FREE, giving it a refcount of 1, and using
  125. /// this isa:
  126. /// extern void *_NSConcreteMallocBlock[];
  127. struct objc_class *isa;
  128. /// These are the flags (with corresponding bit number) that the
  129. /// compiler is actually supposed to know about.
  130. /// 25. BLOCK_HAS_COPY_DISPOSE - indicates that the block
  131. /// descriptor provides copy and dispose helper functions
  132. /// 26. BLOCK_HAS_CXX_OBJ - indicates that there's a captured
  133. /// object with a nontrivial destructor or copy constructor
  134. /// 28. BLOCK_IS_GLOBAL - indicates that the block is allocated
  135. /// as global memory
  136. /// 29. BLOCK_USE_STRET - indicates that the block function
  137. /// uses stret, which objc_msgSend needs to know about
  138. /// 30. BLOCK_HAS_SIGNATURE - indicates that the block has an
  139. /// @encoded signature string
  140. /// And we're not supposed to manipulate these:
  141. /// 24. BLOCK_NEEDS_FREE - indicates that the block has been moved
  142. /// to malloc'ed memory
  143. /// 27. BLOCK_IS_GC - indicates that the block has been moved to
  144. /// to GC-allocated memory
  145. /// Additionally, the bottom 16 bits are a reference count which
  146. /// should be zero on the stack.
  147. int flags;
  148. /// Reserved; should be zero-initialized.
  149. int reserved;
  150. /// Function pointer generated from block literal.
  151. _ResultType (*invoke)(Block_literal *, _ParamTypes...);
  152. /// Block description metadata generated from block literal.
  153. struct Block_descriptor *block_descriptor;
  154. /// Captured values follow.
  155. _CapturesTypes captures...;
  156. };
  157. */
  158. /// The number of fields in a block header.
  159. const unsigned BlockHeaderSize = 5;
  160. namespace {
  161. /// A chunk of data that we actually have to capture in the block.
  162. struct BlockLayoutChunk {
  163. CharUnits Alignment;
  164. CharUnits Size;
  165. Qualifiers::ObjCLifetime Lifetime;
  166. const BlockDecl::Capture *Capture; // null for 'this'
  167. llvm::Type *Type;
  168. BlockLayoutChunk(CharUnits align, CharUnits size,
  169. Qualifiers::ObjCLifetime lifetime,
  170. const BlockDecl::Capture *capture,
  171. llvm::Type *type)
  172. : Alignment(align), Size(size), Lifetime(lifetime),
  173. Capture(capture), Type(type) {}
  174. /// Tell the block info that this chunk has the given field index.
  175. void setIndex(CGBlockInfo &info, unsigned index, CharUnits offset) {
  176. if (!Capture) {
  177. info.CXXThisIndex = index;
  178. info.CXXThisOffset = offset;
  179. } else {
  180. info.Captures.insert({Capture->getVariable(),
  181. CGBlockInfo::Capture::makeIndex(index, offset)});
  182. }
  183. }
  184. };
  185. /// Order by 1) all __strong together 2) next, all byfref together 3) next,
  186. /// all __weak together. Preserve descending alignment in all situations.
  187. bool operator<(const BlockLayoutChunk &left, const BlockLayoutChunk &right) {
  188. if (left.Alignment != right.Alignment)
  189. return left.Alignment > right.Alignment;
  190. auto getPrefOrder = [](const BlockLayoutChunk &chunk) {
  191. if (chunk.Capture && chunk.Capture->isByRef())
  192. return 1;
  193. if (chunk.Lifetime == Qualifiers::OCL_Strong)
  194. return 0;
  195. if (chunk.Lifetime == Qualifiers::OCL_Weak)
  196. return 2;
  197. return 3;
  198. };
  199. return getPrefOrder(left) < getPrefOrder(right);
  200. }
  201. } // end anonymous namespace
  202. /// Determines if the given type is safe for constant capture in C++.
  203. static bool isSafeForCXXConstantCapture(QualType type) {
  204. const RecordType *recordType =
  205. type->getBaseElementTypeUnsafe()->getAs<RecordType>();
  206. // Only records can be unsafe.
  207. if (!recordType) return true;
  208. const auto *record = cast<CXXRecordDecl>(recordType->getDecl());
  209. // Maintain semantics for classes with non-trivial dtors or copy ctors.
  210. if (!record->hasTrivialDestructor()) return false;
  211. if (record->hasNonTrivialCopyConstructor()) return false;
  212. // Otherwise, we just have to make sure there aren't any mutable
  213. // fields that might have changed since initialization.
  214. return !record->hasMutableFields();
  215. }
  216. /// It is illegal to modify a const object after initialization.
  217. /// Therefore, if a const object has a constant initializer, we don't
  218. /// actually need to keep storage for it in the block; we'll just
  219. /// rematerialize it at the start of the block function. This is
  220. /// acceptable because we make no promises about address stability of
  221. /// captured variables.
  222. static llvm::Constant *tryCaptureAsConstant(CodeGenModule &CGM,
  223. CodeGenFunction *CGF,
  224. const VarDecl *var) {
  225. QualType type = var->getType();
  226. // We can only do this if the variable is const.
  227. if (!type.isConstQualified()) return nullptr;
  228. // Furthermore, in C++ we have to worry about mutable fields:
  229. // C++ [dcl.type.cv]p4:
  230. // Except that any class member declared mutable can be
  231. // modified, any attempt to modify a const object during its
  232. // lifetime results in undefined behavior.
  233. if (CGM.getLangOpts().CPlusPlus && !isSafeForCXXConstantCapture(type))
  234. return nullptr;
  235. // If the variable doesn't have any initializer (shouldn't this be
  236. // invalid?), it's not clear what we should do. Maybe capture as
  237. // zero?
  238. const Expr *init = var->getInit();
  239. if (!init) return nullptr;
  240. return CGM.EmitConstantInit(*var, CGF);
  241. }
  242. /// Get the low bit of a nonzero character count. This is the
  243. /// alignment of the nth byte if the 0th byte is universally aligned.
  244. static CharUnits getLowBit(CharUnits v) {
  245. return CharUnits::fromQuantity(v.getQuantity() & (~v.getQuantity() + 1));
  246. }
  247. static void initializeForBlockHeader(CodeGenModule &CGM, CGBlockInfo &info,
  248. SmallVectorImpl<llvm::Type*> &elementTypes) {
  249. // The header is basically 'struct { void *; int; int; void *; void *; }'.
  250. // Assert that that struct is packed.
  251. assert(CGM.getIntSize() <= CGM.getPointerSize());
  252. assert(CGM.getIntAlign() <= CGM.getPointerAlign());
  253. assert((2 * CGM.getIntSize()).isMultipleOf(CGM.getPointerAlign()));
  254. info.BlockAlign = CGM.getPointerAlign();
  255. info.BlockSize = 3 * CGM.getPointerSize() + 2 * CGM.getIntSize();
  256. assert(elementTypes.empty());
  257. elementTypes.push_back(CGM.VoidPtrTy);
  258. elementTypes.push_back(CGM.IntTy);
  259. elementTypes.push_back(CGM.IntTy);
  260. elementTypes.push_back(CGM.VoidPtrTy);
  261. elementTypes.push_back(CGM.getBlockDescriptorType());
  262. assert(elementTypes.size() == BlockHeaderSize);
  263. }
  264. /// Compute the layout of the given block. Attempts to lay the block
  265. /// out with minimal space requirements.
  266. static void computeBlockInfo(CodeGenModule &CGM, CodeGenFunction *CGF,
  267. CGBlockInfo &info) {
  268. ASTContext &C = CGM.getContext();
  269. const BlockDecl *block = info.getBlockDecl();
  270. SmallVector<llvm::Type*, 8> elementTypes;
  271. initializeForBlockHeader(CGM, info, elementTypes);
  272. if (!block->hasCaptures()) {
  273. info.StructureType =
  274. llvm::StructType::get(CGM.getLLVMContext(), elementTypes, true);
  275. info.CanBeGlobal = true;
  276. return;
  277. }
  278. else if (C.getLangOpts().ObjC1 &&
  279. CGM.getLangOpts().getGC() == LangOptions::NonGC)
  280. info.HasCapturedVariableLayout = true;
  281. // Collect the layout chunks.
  282. SmallVector<BlockLayoutChunk, 16> layout;
  283. layout.reserve(block->capturesCXXThis() +
  284. (block->capture_end() - block->capture_begin()));
  285. CharUnits maxFieldAlign;
  286. // First, 'this'.
  287. if (block->capturesCXXThis()) {
  288. assert(CGF && CGF->CurFuncDecl && isa<CXXMethodDecl>(CGF->CurFuncDecl) &&
  289. "Can't capture 'this' outside a method");
  290. QualType thisType = cast<CXXMethodDecl>(CGF->CurFuncDecl)->getThisType(C);
  291. // Theoretically, this could be in a different address space, so
  292. // don't assume standard pointer size/align.
  293. llvm::Type *llvmType = CGM.getTypes().ConvertType(thisType);
  294. std::pair<CharUnits,CharUnits> tinfo
  295. = CGM.getContext().getTypeInfoInChars(thisType);
  296. maxFieldAlign = std::max(maxFieldAlign, tinfo.second);
  297. layout.push_back(BlockLayoutChunk(tinfo.second, tinfo.first,
  298. Qualifiers::OCL_None,
  299. nullptr, llvmType));
  300. }
  301. // Next, all the block captures.
  302. for (const auto &CI : block->captures()) {
  303. const VarDecl *variable = CI.getVariable();
  304. if (CI.isByRef()) {
  305. // We have to copy/dispose of the __block reference.
  306. info.NeedsCopyDispose = true;
  307. // Just use void* instead of a pointer to the byref type.
  308. CharUnits align = CGM.getPointerAlign();
  309. maxFieldAlign = std::max(maxFieldAlign, align);
  310. layout.push_back(BlockLayoutChunk(align, CGM.getPointerSize(),
  311. Qualifiers::OCL_None, &CI,
  312. CGM.VoidPtrTy));
  313. continue;
  314. }
  315. // Otherwise, build a layout chunk with the size and alignment of
  316. // the declaration.
  317. if (llvm::Constant *constant = tryCaptureAsConstant(CGM, CGF, variable)) {
  318. info.Captures[variable] = CGBlockInfo::Capture::makeConstant(constant);
  319. continue;
  320. }
  321. // If we have a lifetime qualifier, honor it for capture purposes.
  322. // That includes *not* copying it if it's __unsafe_unretained.
  323. Qualifiers::ObjCLifetime lifetime =
  324. variable->getType().getObjCLifetime();
  325. if (lifetime) {
  326. switch (lifetime) {
  327. case Qualifiers::OCL_None: llvm_unreachable("impossible");
  328. case Qualifiers::OCL_ExplicitNone:
  329. case Qualifiers::OCL_Autoreleasing:
  330. break;
  331. case Qualifiers::OCL_Strong:
  332. case Qualifiers::OCL_Weak:
  333. info.NeedsCopyDispose = true;
  334. }
  335. // Block pointers require copy/dispose. So do Objective-C pointers.
  336. } else if (variable->getType()->isObjCRetainableType()) {
  337. info.NeedsCopyDispose = true;
  338. // used for mrr below.
  339. lifetime = Qualifiers::OCL_Strong;
  340. // So do types that require non-trivial copy construction.
  341. } else if (CI.hasCopyExpr()) {
  342. info.NeedsCopyDispose = true;
  343. info.HasCXXObject = true;
  344. // And so do types with destructors.
  345. } else if (CGM.getLangOpts().CPlusPlus) {
  346. if (const CXXRecordDecl *record =
  347. variable->getType()->getAsCXXRecordDecl()) {
  348. if (!record->hasTrivialDestructor()) {
  349. info.HasCXXObject = true;
  350. info.NeedsCopyDispose = true;
  351. }
  352. }
  353. }
  354. QualType VT = variable->getType();
  355. CharUnits size = C.getTypeSizeInChars(VT);
  356. CharUnits align = C.getDeclAlign(variable);
  357. maxFieldAlign = std::max(maxFieldAlign, align);
  358. llvm::Type *llvmType =
  359. CGM.getTypes().ConvertTypeForMem(VT);
  360. layout.push_back(BlockLayoutChunk(align, size, lifetime, &CI, llvmType));
  361. }
  362. // If that was everything, we're done here.
  363. if (layout.empty()) {
  364. info.StructureType =
  365. llvm::StructType::get(CGM.getLLVMContext(), elementTypes, true);
  366. info.CanBeGlobal = true;
  367. return;
  368. }
  369. // Sort the layout by alignment. We have to use a stable sort here
  370. // to get reproducible results. There should probably be an
  371. // llvm::array_pod_stable_sort.
  372. std::stable_sort(layout.begin(), layout.end());
  373. // Needed for blocks layout info.
  374. info.BlockHeaderForcedGapOffset = info.BlockSize;
  375. info.BlockHeaderForcedGapSize = CharUnits::Zero();
  376. CharUnits &blockSize = info.BlockSize;
  377. info.BlockAlign = std::max(maxFieldAlign, info.BlockAlign);
  378. // Assuming that the first byte in the header is maximally aligned,
  379. // get the alignment of the first byte following the header.
  380. CharUnits endAlign = getLowBit(blockSize);
  381. // If the end of the header isn't satisfactorily aligned for the
  382. // maximum thing, look for things that are okay with the header-end
  383. // alignment, and keep appending them until we get something that's
  384. // aligned right. This algorithm is only guaranteed optimal if
  385. // that condition is satisfied at some point; otherwise we can get
  386. // things like:
  387. // header // next byte has alignment 4
  388. // something_with_size_5; // next byte has alignment 1
  389. // something_with_alignment_8;
  390. // which has 7 bytes of padding, as opposed to the naive solution
  391. // which might have less (?).
  392. if (endAlign < maxFieldAlign) {
  393. SmallVectorImpl<BlockLayoutChunk>::iterator
  394. li = layout.begin() + 1, le = layout.end();
  395. // Look for something that the header end is already
  396. // satisfactorily aligned for.
  397. for (; li != le && endAlign < li->Alignment; ++li)
  398. ;
  399. // If we found something that's naturally aligned for the end of
  400. // the header, keep adding things...
  401. if (li != le) {
  402. SmallVectorImpl<BlockLayoutChunk>::iterator first = li;
  403. for (; li != le; ++li) {
  404. assert(endAlign >= li->Alignment);
  405. li->setIndex(info, elementTypes.size(), blockSize);
  406. elementTypes.push_back(li->Type);
  407. blockSize += li->Size;
  408. endAlign = getLowBit(blockSize);
  409. // ...until we get to the alignment of the maximum field.
  410. if (endAlign >= maxFieldAlign) {
  411. break;
  412. }
  413. }
  414. // Don't re-append everything we just appended.
  415. layout.erase(first, li);
  416. }
  417. }
  418. assert(endAlign == getLowBit(blockSize));
  419. // At this point, we just have to add padding if the end align still
  420. // isn't aligned right.
  421. if (endAlign < maxFieldAlign) {
  422. CharUnits newBlockSize = blockSize.RoundUpToAlignment(maxFieldAlign);
  423. CharUnits padding = newBlockSize - blockSize;
  424. // If we haven't yet added any fields, remember that there was an
  425. // initial gap; this need to go into the block layout bit map.
  426. if (blockSize == info.BlockHeaderForcedGapOffset) {
  427. info.BlockHeaderForcedGapSize = padding;
  428. }
  429. elementTypes.push_back(llvm::ArrayType::get(CGM.Int8Ty,
  430. padding.getQuantity()));
  431. blockSize = newBlockSize;
  432. endAlign = getLowBit(blockSize); // might be > maxFieldAlign
  433. }
  434. assert(endAlign >= maxFieldAlign);
  435. assert(endAlign == getLowBit(blockSize));
  436. // Slam everything else on now. This works because they have
  437. // strictly decreasing alignment and we expect that size is always a
  438. // multiple of alignment.
  439. for (SmallVectorImpl<BlockLayoutChunk>::iterator
  440. li = layout.begin(), le = layout.end(); li != le; ++li) {
  441. if (endAlign < li->Alignment) {
  442. // size may not be multiple of alignment. This can only happen with
  443. // an over-aligned variable. We will be adding a padding field to
  444. // make the size be multiple of alignment.
  445. CharUnits padding = li->Alignment - endAlign;
  446. elementTypes.push_back(llvm::ArrayType::get(CGM.Int8Ty,
  447. padding.getQuantity()));
  448. blockSize += padding;
  449. endAlign = getLowBit(blockSize);
  450. }
  451. assert(endAlign >= li->Alignment);
  452. li->setIndex(info, elementTypes.size(), blockSize);
  453. elementTypes.push_back(li->Type);
  454. blockSize += li->Size;
  455. endAlign = getLowBit(blockSize);
  456. }
  457. info.StructureType =
  458. llvm::StructType::get(CGM.getLLVMContext(), elementTypes, true);
  459. }
  460. /// Enter the scope of a block. This should be run at the entrance to
  461. /// a full-expression so that the block's cleanups are pushed at the
  462. /// right place in the stack.
  463. static void enterBlockScope(CodeGenFunction &CGF, BlockDecl *block) {
  464. assert(CGF.HaveInsertPoint());
  465. // Allocate the block info and place it at the head of the list.
  466. CGBlockInfo &blockInfo =
  467. *new CGBlockInfo(block, CGF.CurFn->getName());
  468. blockInfo.NextBlockInfo = CGF.FirstBlockInfo;
  469. CGF.FirstBlockInfo = &blockInfo;
  470. // Compute information about the layout, etc., of this block,
  471. // pushing cleanups as necessary.
  472. computeBlockInfo(CGF.CGM, &CGF, blockInfo);
  473. // Nothing else to do if it can be global.
  474. if (blockInfo.CanBeGlobal) return;
  475. // Make the allocation for the block.
  476. blockInfo.LocalAddress = CGF.CreateTempAlloca(blockInfo.StructureType,
  477. blockInfo.BlockAlign, "block");
  478. // If there are cleanups to emit, enter them (but inactive).
  479. if (!blockInfo.NeedsCopyDispose) return;
  480. // Walk through the captures (in order) and find the ones not
  481. // captured by constant.
  482. for (const auto &CI : block->captures()) {
  483. // Ignore __block captures; there's nothing special in the
  484. // on-stack block that we need to do for them.
  485. if (CI.isByRef()) continue;
  486. // Ignore variables that are constant-captured.
  487. const VarDecl *variable = CI.getVariable();
  488. CGBlockInfo::Capture &capture = blockInfo.getCapture(variable);
  489. if (capture.isConstant()) continue;
  490. // Ignore objects that aren't destructed.
  491. QualType::DestructionKind dtorKind =
  492. variable->getType().isDestructedType();
  493. if (dtorKind == QualType::DK_none) continue;
  494. CodeGenFunction::Destroyer *destroyer;
  495. // Block captures count as local values and have imprecise semantics.
  496. // They also can't be arrays, so need to worry about that.
  497. if (dtorKind == QualType::DK_objc_strong_lifetime) {
  498. destroyer = CodeGenFunction::destroyARCStrongImprecise;
  499. } else {
  500. destroyer = CGF.getDestroyer(dtorKind);
  501. }
  502. // GEP down to the address.
  503. Address addr = CGF.Builder.CreateStructGEP(blockInfo.LocalAddress,
  504. capture.getIndex(),
  505. capture.getOffset());
  506. // We can use that GEP as the dominating IP.
  507. if (!blockInfo.DominatingIP)
  508. blockInfo.DominatingIP = cast<llvm::Instruction>(addr.getPointer());
  509. CleanupKind cleanupKind = InactiveNormalCleanup;
  510. bool useArrayEHCleanup = CGF.needsEHCleanup(dtorKind);
  511. if (useArrayEHCleanup)
  512. cleanupKind = InactiveNormalAndEHCleanup;
  513. CGF.pushDestroy(cleanupKind, addr, variable->getType(),
  514. destroyer, useArrayEHCleanup);
  515. // Remember where that cleanup was.
  516. capture.setCleanup(CGF.EHStack.stable_begin());
  517. }
  518. }
  519. /// Enter a full-expression with a non-trivial number of objects to
  520. /// clean up. This is in this file because, at the moment, the only
  521. /// kind of cleanup object is a BlockDecl*.
  522. void CodeGenFunction::enterNonTrivialFullExpression(const ExprWithCleanups *E) {
  523. assert(E->getNumObjects() != 0);
  524. ArrayRef<ExprWithCleanups::CleanupObject> cleanups = E->getObjects();
  525. for (ArrayRef<ExprWithCleanups::CleanupObject>::iterator
  526. i = cleanups.begin(), e = cleanups.end(); i != e; ++i) {
  527. enterBlockScope(*this, *i);
  528. }
  529. }
  530. /// Find the layout for the given block in a linked list and remove it.
  531. static CGBlockInfo *findAndRemoveBlockInfo(CGBlockInfo **head,
  532. const BlockDecl *block) {
  533. while (true) {
  534. assert(head && *head);
  535. CGBlockInfo *cur = *head;
  536. // If this is the block we're looking for, splice it out of the list.
  537. if (cur->getBlockDecl() == block) {
  538. *head = cur->NextBlockInfo;
  539. return cur;
  540. }
  541. head = &cur->NextBlockInfo;
  542. }
  543. }
  544. /// Destroy a chain of block layouts.
  545. void CodeGenFunction::destroyBlockInfos(CGBlockInfo *head) {
  546. assert(head && "destroying an empty chain");
  547. do {
  548. CGBlockInfo *cur = head;
  549. head = cur->NextBlockInfo;
  550. delete cur;
  551. } while (head != nullptr);
  552. }
  553. /// Emit a block literal expression in the current function.
  554. llvm::Value *CodeGenFunction::EmitBlockLiteral(const BlockExpr *blockExpr) {
  555. // If the block has no captures, we won't have a pre-computed
  556. // layout for it.
  557. if (!blockExpr->getBlockDecl()->hasCaptures()) {
  558. CGBlockInfo blockInfo(blockExpr->getBlockDecl(), CurFn->getName());
  559. computeBlockInfo(CGM, this, blockInfo);
  560. blockInfo.BlockExpression = blockExpr;
  561. return EmitBlockLiteral(blockInfo);
  562. }
  563. // Find the block info for this block and take ownership of it.
  564. std::unique_ptr<CGBlockInfo> blockInfo;
  565. blockInfo.reset(findAndRemoveBlockInfo(&FirstBlockInfo,
  566. blockExpr->getBlockDecl()));
  567. blockInfo->BlockExpression = blockExpr;
  568. return EmitBlockLiteral(*blockInfo);
  569. }
  570. llvm::Value *CodeGenFunction::EmitBlockLiteral(const CGBlockInfo &blockInfo) {
  571. // Using the computed layout, generate the actual block function.
  572. bool isLambdaConv = blockInfo.getBlockDecl()->isConversionFromLambda();
  573. llvm::Constant *blockFn
  574. = CodeGenFunction(CGM, true).GenerateBlockFunction(CurGD, blockInfo,
  575. LocalDeclMap,
  576. isLambdaConv);
  577. blockFn = llvm::ConstantExpr::getBitCast(blockFn, VoidPtrTy);
  578. // If there is nothing to capture, we can emit this as a global block.
  579. if (blockInfo.CanBeGlobal)
  580. return buildGlobalBlock(CGM, blockInfo, blockFn);
  581. // Otherwise, we have to emit this as a local block.
  582. llvm::Constant *isa = CGM.getNSConcreteStackBlock();
  583. isa = llvm::ConstantExpr::getBitCast(isa, VoidPtrTy);
  584. // Build the block descriptor.
  585. llvm::Constant *descriptor = buildBlockDescriptor(CGM, blockInfo);
  586. Address blockAddr = blockInfo.LocalAddress;
  587. assert(blockAddr.isValid() && "block has no address!");
  588. // Compute the initial on-stack block flags.
  589. BlockFlags flags = BLOCK_HAS_SIGNATURE;
  590. if (blockInfo.HasCapturedVariableLayout) flags |= BLOCK_HAS_EXTENDED_LAYOUT;
  591. if (blockInfo.NeedsCopyDispose) flags |= BLOCK_HAS_COPY_DISPOSE;
  592. if (blockInfo.HasCXXObject) flags |= BLOCK_HAS_CXX_OBJ;
  593. if (blockInfo.UsesStret) flags |= BLOCK_USE_STRET;
  594. auto projectField =
  595. [&](unsigned index, CharUnits offset, const Twine &name) -> Address {
  596. return Builder.CreateStructGEP(blockAddr, index, offset, name);
  597. };
  598. auto storeField =
  599. [&](llvm::Value *value, unsigned index, CharUnits offset,
  600. const Twine &name) {
  601. Builder.CreateStore(value, projectField(index, offset, name));
  602. };
  603. // Initialize the block header.
  604. {
  605. // We assume all the header fields are densely packed.
  606. unsigned index = 0;
  607. CharUnits offset;
  608. auto addHeaderField =
  609. [&](llvm::Value *value, CharUnits size, const Twine &name) {
  610. storeField(value, index, offset, name);
  611. offset += size;
  612. index++;
  613. };
  614. addHeaderField(isa, getPointerSize(), "block.isa");
  615. addHeaderField(llvm::ConstantInt::get(IntTy, flags.getBitMask()),
  616. getIntSize(), "block.flags");
  617. addHeaderField(llvm::ConstantInt::get(IntTy, 0),
  618. getIntSize(), "block.reserved");
  619. addHeaderField(blockFn, getPointerSize(), "block.invoke");
  620. addHeaderField(descriptor, getPointerSize(), "block.descriptor");
  621. }
  622. // Finally, capture all the values into the block.
  623. const BlockDecl *blockDecl = blockInfo.getBlockDecl();
  624. // First, 'this'.
  625. if (blockDecl->capturesCXXThis()) {
  626. Address addr = projectField(blockInfo.CXXThisIndex, blockInfo.CXXThisOffset,
  627. "block.captured-this.addr");
  628. Builder.CreateStore(LoadCXXThis(), addr);
  629. }
  630. // Next, captured variables.
  631. for (const auto &CI : blockDecl->captures()) {
  632. const VarDecl *variable = CI.getVariable();
  633. const CGBlockInfo::Capture &capture = blockInfo.getCapture(variable);
  634. // Ignore constant captures.
  635. if (capture.isConstant()) continue;
  636. QualType type = variable->getType();
  637. // This will be a [[type]]*, except that a byref entry will just be
  638. // an i8**.
  639. Address blockField =
  640. projectField(capture.getIndex(), capture.getOffset(), "block.captured");
  641. // Compute the address of the thing we're going to move into the
  642. // block literal.
  643. Address src = Address::invalid();
  644. if (BlockInfo && CI.isNested()) {
  645. // We need to use the capture from the enclosing block.
  646. const CGBlockInfo::Capture &enclosingCapture =
  647. BlockInfo->getCapture(variable);
  648. // This is a [[type]]*, except that a byref entry wil just be an i8**.
  649. src = Builder.CreateStructGEP(LoadBlockStruct(),
  650. enclosingCapture.getIndex(),
  651. enclosingCapture.getOffset(),
  652. "block.capture.addr");
  653. } else if (blockDecl->isConversionFromLambda()) {
  654. // The lambda capture in a lambda's conversion-to-block-pointer is
  655. // special; we'll simply emit it directly.
  656. src = Address::invalid();
  657. } else {
  658. // Just look it up in the locals map, which will give us back a
  659. // [[type]]*. If that doesn't work, do the more elaborate DRE
  660. // emission.
  661. auto it = LocalDeclMap.find(variable);
  662. if (it != LocalDeclMap.end()) {
  663. src = it->second;
  664. } else {
  665. DeclRefExpr declRef(
  666. const_cast<VarDecl *>(variable),
  667. /*RefersToEnclosingVariableOrCapture*/ CI.isNested(), type,
  668. VK_LValue, SourceLocation());
  669. src = EmitDeclRefLValue(&declRef).getAddress();
  670. }
  671. }
  672. // For byrefs, we just write the pointer to the byref struct into
  673. // the block field. There's no need to chase the forwarding
  674. // pointer at this point, since we're building something that will
  675. // live a shorter life than the stack byref anyway.
  676. if (CI.isByRef()) {
  677. // Get a void* that points to the byref struct.
  678. llvm::Value *byrefPointer;
  679. if (CI.isNested())
  680. byrefPointer = Builder.CreateLoad(src, "byref.capture");
  681. else
  682. byrefPointer = Builder.CreateBitCast(src.getPointer(), VoidPtrTy);
  683. // Write that void* into the capture field.
  684. Builder.CreateStore(byrefPointer, blockField);
  685. // If we have a copy constructor, evaluate that into the block field.
  686. } else if (const Expr *copyExpr = CI.getCopyExpr()) {
  687. if (blockDecl->isConversionFromLambda()) {
  688. // If we have a lambda conversion, emit the expression
  689. // directly into the block instead.
  690. AggValueSlot Slot =
  691. AggValueSlot::forAddr(blockField, Qualifiers(),
  692. AggValueSlot::IsDestructed,
  693. AggValueSlot::DoesNotNeedGCBarriers,
  694. AggValueSlot::IsNotAliased);
  695. EmitAggExpr(copyExpr, Slot);
  696. } else {
  697. EmitSynthesizedCXXCopyCtor(blockField, src, copyExpr);
  698. }
  699. // If it's a reference variable, copy the reference into the block field.
  700. } else if (type->isReferenceType()) {
  701. llvm::Value *ref = Builder.CreateLoad(src, "ref.val");
  702. Builder.CreateStore(ref, blockField);
  703. // If this is an ARC __strong block-pointer variable, don't do a
  704. // block copy.
  705. //
  706. // TODO: this can be generalized into the normal initialization logic:
  707. // we should never need to do a block-copy when initializing a local
  708. // variable, because the local variable's lifetime should be strictly
  709. // contained within the stack block's.
  710. } else if (type.getObjCLifetime() == Qualifiers::OCL_Strong &&
  711. type->isBlockPointerType()) {
  712. // Load the block and do a simple retain.
  713. llvm::Value *value = Builder.CreateLoad(src, "block.captured_block");
  714. value = EmitARCRetainNonBlock(value);
  715. // Do a primitive store to the block field.
  716. Builder.CreateStore(value, blockField);
  717. // Otherwise, fake up a POD copy into the block field.
  718. } else {
  719. // Fake up a new variable so that EmitScalarInit doesn't think
  720. // we're referring to the variable in its own initializer.
  721. ImplicitParamDecl blockFieldPseudoVar(getContext(), /*DC*/ nullptr,
  722. SourceLocation(), /*name*/ nullptr,
  723. type);
  724. // We use one of these or the other depending on whether the
  725. // reference is nested.
  726. DeclRefExpr declRef(const_cast<VarDecl *>(variable),
  727. /*RefersToEnclosingVariableOrCapture*/ CI.isNested(),
  728. type, VK_LValue, SourceLocation());
  729. ImplicitCastExpr l2r(ImplicitCastExpr::OnStack, type, CK_LValueToRValue,
  730. &declRef, VK_RValue);
  731. // FIXME: Pass a specific location for the expr init so that the store is
  732. // attributed to a reasonable location - otherwise it may be attributed to
  733. // locations of subexpressions in the initialization.
  734. EmitExprAsInit(&l2r, &blockFieldPseudoVar,
  735. MakeAddrLValue(blockField, type, AlignmentSource::Decl),
  736. /*captured by init*/ false);
  737. }
  738. // Activate the cleanup if layout pushed one.
  739. if (!CI.isByRef()) {
  740. EHScopeStack::stable_iterator cleanup = capture.getCleanup();
  741. if (cleanup.isValid())
  742. ActivateCleanupBlock(cleanup, blockInfo.DominatingIP);
  743. }
  744. }
  745. // Cast to the converted block-pointer type, which happens (somewhat
  746. // unfortunately) to be a pointer to function type.
  747. llvm::Value *result =
  748. Builder.CreateBitCast(blockAddr.getPointer(),
  749. ConvertType(blockInfo.getBlockExpr()->getType()));
  750. return result;
  751. }
  752. llvm::Type *CodeGenModule::getBlockDescriptorType() {
  753. if (BlockDescriptorType)
  754. return BlockDescriptorType;
  755. llvm::Type *UnsignedLongTy =
  756. getTypes().ConvertType(getContext().UnsignedLongTy);
  757. // struct __block_descriptor {
  758. // unsigned long reserved;
  759. // unsigned long block_size;
  760. //
  761. // // later, the following will be added
  762. //
  763. // struct {
  764. // void (*copyHelper)();
  765. // void (*copyHelper)();
  766. // } helpers; // !!! optional
  767. //
  768. // const char *signature; // the block signature
  769. // const char *layout; // reserved
  770. // };
  771. BlockDescriptorType =
  772. llvm::StructType::create("struct.__block_descriptor",
  773. UnsignedLongTy, UnsignedLongTy, nullptr);
  774. // Now form a pointer to that.
  775. BlockDescriptorType = llvm::PointerType::getUnqual(BlockDescriptorType);
  776. return BlockDescriptorType;
  777. }
  778. llvm::Type *CodeGenModule::getGenericBlockLiteralType() {
  779. if (GenericBlockLiteralType)
  780. return GenericBlockLiteralType;
  781. llvm::Type *BlockDescPtrTy = getBlockDescriptorType();
  782. // struct __block_literal_generic {
  783. // void *__isa;
  784. // int __flags;
  785. // int __reserved;
  786. // void (*__invoke)(void *);
  787. // struct __block_descriptor *__descriptor;
  788. // };
  789. GenericBlockLiteralType =
  790. llvm::StructType::create("struct.__block_literal_generic",
  791. VoidPtrTy, IntTy, IntTy, VoidPtrTy,
  792. BlockDescPtrTy, nullptr);
  793. return GenericBlockLiteralType;
  794. }
  795. RValue CodeGenFunction::EmitBlockCallExpr(const CallExpr *E,
  796. ReturnValueSlot ReturnValue) {
  797. const BlockPointerType *BPT =
  798. E->getCallee()->getType()->getAs<BlockPointerType>();
  799. llvm::Value *Callee = EmitScalarExpr(E->getCallee());
  800. // Get a pointer to the generic block literal.
  801. llvm::Type *BlockLiteralTy =
  802. llvm::PointerType::getUnqual(CGM.getGenericBlockLiteralType());
  803. // Bitcast the callee to a block literal.
  804. llvm::Value *BlockLiteral =
  805. Builder.CreateBitCast(Callee, BlockLiteralTy, "block.literal");
  806. // Get the function pointer from the literal.
  807. llvm::Value *FuncPtr =
  808. Builder.CreateStructGEP(CGM.getGenericBlockLiteralType(), BlockLiteral, 3);
  809. BlockLiteral = Builder.CreateBitCast(BlockLiteral, VoidPtrTy);
  810. // Add the block literal.
  811. CallArgList Args;
  812. Args.add(RValue::get(BlockLiteral), getContext().VoidPtrTy);
  813. QualType FnType = BPT->getPointeeType();
  814. // And the rest of the arguments.
  815. EmitCallArgs(Args, FnType->getAs<FunctionProtoType>(), E->arguments());
  816. // Load the function.
  817. llvm::Value *Func = Builder.CreateAlignedLoad(FuncPtr, getPointerAlign());
  818. const FunctionType *FuncTy = FnType->castAs<FunctionType>();
  819. const CGFunctionInfo &FnInfo =
  820. CGM.getTypes().arrangeBlockFunctionCall(Args, FuncTy);
  821. // Cast the function pointer to the right type.
  822. llvm::Type *BlockFTy = CGM.getTypes().GetFunctionType(FnInfo);
  823. llvm::Type *BlockFTyPtr = llvm::PointerType::getUnqual(BlockFTy);
  824. Func = Builder.CreateBitCast(Func, BlockFTyPtr);
  825. // And call the block.
  826. return EmitCall(FnInfo, Func, ReturnValue, Args);
  827. }
  828. Address CodeGenFunction::GetAddrOfBlockDecl(const VarDecl *variable,
  829. bool isByRef) {
  830. assert(BlockInfo && "evaluating block ref without block information?");
  831. const CGBlockInfo::Capture &capture = BlockInfo->getCapture(variable);
  832. // Handle constant captures.
  833. if (capture.isConstant()) return LocalDeclMap.find(variable)->second;
  834. Address addr =
  835. Builder.CreateStructGEP(LoadBlockStruct(), capture.getIndex(),
  836. capture.getOffset(), "block.capture.addr");
  837. if (isByRef) {
  838. // addr should be a void** right now. Load, then cast the result
  839. // to byref*.
  840. auto &byrefInfo = getBlockByrefInfo(variable);
  841. addr = Address(Builder.CreateLoad(addr), byrefInfo.ByrefAlignment);
  842. auto byrefPointerType = llvm::PointerType::get(byrefInfo.Type, 0);
  843. addr = Builder.CreateBitCast(addr, byrefPointerType, "byref.addr");
  844. addr = emitBlockByrefAddress(addr, byrefInfo, /*follow*/ true,
  845. variable->getName());
  846. }
  847. if (auto refType = variable->getType()->getAs<ReferenceType>()) {
  848. addr = EmitLoadOfReference(addr, refType);
  849. }
  850. return addr;
  851. }
  852. llvm::Constant *
  853. CodeGenModule::GetAddrOfGlobalBlock(const BlockExpr *blockExpr,
  854. const char *name) {
  855. CGBlockInfo blockInfo(blockExpr->getBlockDecl(), name);
  856. blockInfo.BlockExpression = blockExpr;
  857. // Compute information about the layout, etc., of this block.
  858. computeBlockInfo(*this, nullptr, blockInfo);
  859. // Using that metadata, generate the actual block function.
  860. llvm::Constant *blockFn;
  861. {
  862. CodeGenFunction::DeclMapTy LocalDeclMap;
  863. blockFn = CodeGenFunction(*this).GenerateBlockFunction(GlobalDecl(),
  864. blockInfo,
  865. LocalDeclMap,
  866. false);
  867. }
  868. blockFn = llvm::ConstantExpr::getBitCast(blockFn, VoidPtrTy);
  869. return buildGlobalBlock(*this, blockInfo, blockFn);
  870. }
  871. static llvm::Constant *buildGlobalBlock(CodeGenModule &CGM,
  872. const CGBlockInfo &blockInfo,
  873. llvm::Constant *blockFn) {
  874. assert(blockInfo.CanBeGlobal);
  875. // Generate the constants for the block literal initializer.
  876. llvm::Constant *fields[BlockHeaderSize];
  877. // isa
  878. fields[0] = CGM.getNSConcreteGlobalBlock();
  879. // __flags
  880. BlockFlags flags = BLOCK_IS_GLOBAL | BLOCK_HAS_SIGNATURE;
  881. if (blockInfo.UsesStret) flags |= BLOCK_USE_STRET;
  882. fields[1] = llvm::ConstantInt::get(CGM.IntTy, flags.getBitMask());
  883. // Reserved
  884. fields[2] = llvm::Constant::getNullValue(CGM.IntTy);
  885. // Function
  886. fields[3] = blockFn;
  887. // Descriptor
  888. fields[4] = buildBlockDescriptor(CGM, blockInfo);
  889. llvm::Constant *init = llvm::ConstantStruct::getAnon(fields);
  890. llvm::GlobalVariable *literal =
  891. new llvm::GlobalVariable(CGM.getModule(),
  892. init->getType(),
  893. /*constant*/ true,
  894. llvm::GlobalVariable::InternalLinkage,
  895. init,
  896. "__block_literal_global");
  897. literal->setAlignment(blockInfo.BlockAlign.getQuantity());
  898. // Return a constant of the appropriately-casted type.
  899. llvm::Type *requiredType =
  900. CGM.getTypes().ConvertType(blockInfo.getBlockExpr()->getType());
  901. return llvm::ConstantExpr::getBitCast(literal, requiredType);
  902. }
  903. void CodeGenFunction::setBlockContextParameter(const ImplicitParamDecl *D,
  904. unsigned argNum,
  905. llvm::Value *arg) {
  906. assert(BlockInfo && "not emitting prologue of block invocation function?!");
  907. llvm::Value *localAddr = nullptr;
  908. if (CGM.getCodeGenOpts().OptimizationLevel == 0) {
  909. // Allocate a stack slot to let the debug info survive the RA.
  910. Address alloc = CreateMemTemp(D->getType(), D->getName() + ".addr");
  911. Builder.CreateStore(arg, alloc);
  912. localAddr = Builder.CreateLoad(alloc);
  913. }
  914. if (CGDebugInfo *DI = getDebugInfo()) {
  915. if (CGM.getCodeGenOpts().getDebugInfo()
  916. >= CodeGenOptions::LimitedDebugInfo) {
  917. DI->setLocation(D->getLocation());
  918. DI->EmitDeclareOfBlockLiteralArgVariable(*BlockInfo, arg, argNum,
  919. localAddr, Builder);
  920. }
  921. }
  922. SourceLocation StartLoc = BlockInfo->getBlockExpr()->getBody()->getLocStart();
  923. ApplyDebugLocation Scope(*this, StartLoc);
  924. // Instead of messing around with LocalDeclMap, just set the value
  925. // directly as BlockPointer.
  926. BlockPointer = Builder.CreateBitCast(arg,
  927. BlockInfo->StructureType->getPointerTo(),
  928. "block");
  929. }
  930. Address CodeGenFunction::LoadBlockStruct() {
  931. assert(BlockInfo && "not in a block invocation function!");
  932. assert(BlockPointer && "no block pointer set!");
  933. return Address(BlockPointer, BlockInfo->BlockAlign);
  934. }
  935. llvm::Function *
  936. CodeGenFunction::GenerateBlockFunction(GlobalDecl GD,
  937. const CGBlockInfo &blockInfo,
  938. const DeclMapTy &ldm,
  939. bool IsLambdaConversionToBlock) {
  940. const BlockDecl *blockDecl = blockInfo.getBlockDecl();
  941. CurGD = GD;
  942. CurEHLocation = blockInfo.getBlockExpr()->getLocEnd();
  943. BlockInfo = &blockInfo;
  944. // Arrange for local static and local extern declarations to appear
  945. // to be local to this function as well, in case they're directly
  946. // referenced in a block.
  947. for (DeclMapTy::const_iterator i = ldm.begin(), e = ldm.end(); i != e; ++i) {
  948. const auto *var = dyn_cast<VarDecl>(i->first);
  949. if (var && !var->hasLocalStorage())
  950. setAddrOfLocalVar(var, i->second);
  951. }
  952. // Begin building the function declaration.
  953. // Build the argument list.
  954. FunctionArgList args;
  955. // The first argument is the block pointer. Just take it as a void*
  956. // and cast it later.
  957. QualType selfTy = getContext().VoidPtrTy;
  958. IdentifierInfo *II = &CGM.getContext().Idents.get(".block_descriptor");
  959. ImplicitParamDecl selfDecl(getContext(), const_cast<BlockDecl*>(blockDecl),
  960. SourceLocation(), II, selfTy);
  961. args.push_back(&selfDecl);
  962. // Now add the rest of the parameters.
  963. args.append(blockDecl->param_begin(), blockDecl->param_end());
  964. // Create the function declaration.
  965. const FunctionProtoType *fnType = blockInfo.getBlockExpr()->getFunctionType();
  966. const CGFunctionInfo &fnInfo = CGM.getTypes().arrangeFreeFunctionDeclaration(
  967. fnType->getReturnType(), args, fnType->getExtInfo(),
  968. fnType->isVariadic());
  969. if (CGM.ReturnSlotInterferesWithArgs(fnInfo))
  970. blockInfo.UsesStret = true;
  971. llvm::FunctionType *fnLLVMType = CGM.getTypes().GetFunctionType(fnInfo);
  972. StringRef name = CGM.getBlockMangledName(GD, blockDecl);
  973. llvm::Function *fn = llvm::Function::Create(
  974. fnLLVMType, llvm::GlobalValue::InternalLinkage, name, &CGM.getModule());
  975. CGM.SetInternalFunctionAttributes(blockDecl, fn, fnInfo);
  976. // Begin generating the function.
  977. StartFunction(blockDecl, fnType->getReturnType(), fn, fnInfo, args,
  978. blockDecl->getLocation(),
  979. blockInfo.getBlockExpr()->getBody()->getLocStart());
  980. // Okay. Undo some of what StartFunction did.
  981. // At -O0 we generate an explicit alloca for the BlockPointer, so the RA
  982. // won't delete the dbg.declare intrinsics for captured variables.
  983. llvm::Value *BlockPointerDbgLoc = BlockPointer;
  984. if (CGM.getCodeGenOpts().OptimizationLevel == 0) {
  985. // Allocate a stack slot for it, so we can point the debugger to it
  986. Address Alloca = CreateTempAlloca(BlockPointer->getType(),
  987. getPointerAlign(),
  988. "block.addr");
  989. // Set the DebugLocation to empty, so the store is recognized as a
  990. // frame setup instruction by llvm::DwarfDebug::beginFunction().
  991. auto NL = ApplyDebugLocation::CreateEmpty(*this);
  992. Builder.CreateStore(BlockPointer, Alloca);
  993. BlockPointerDbgLoc = Alloca.getPointer();
  994. }
  995. // If we have a C++ 'this' reference, go ahead and force it into
  996. // existence now.
  997. if (blockDecl->capturesCXXThis()) {
  998. Address addr =
  999. Builder.CreateStructGEP(LoadBlockStruct(), blockInfo.CXXThisIndex,
  1000. blockInfo.CXXThisOffset, "block.captured-this");
  1001. CXXThisValue = Builder.CreateLoad(addr, "this");
  1002. }
  1003. // Also force all the constant captures.
  1004. for (const auto &CI : blockDecl->captures()) {
  1005. const VarDecl *variable = CI.getVariable();
  1006. const CGBlockInfo::Capture &capture = blockInfo.getCapture(variable);
  1007. if (!capture.isConstant()) continue;
  1008. CharUnits align = getContext().getDeclAlign(variable);
  1009. Address alloca =
  1010. CreateMemTemp(variable->getType(), align, "block.captured-const");
  1011. Builder.CreateStore(capture.getConstant(), alloca);
  1012. setAddrOfLocalVar(variable, alloca);
  1013. }
  1014. // Save a spot to insert the debug information for all the DeclRefExprs.
  1015. llvm::BasicBlock *entry = Builder.GetInsertBlock();
  1016. llvm::BasicBlock::iterator entry_ptr = Builder.GetInsertPoint();
  1017. --entry_ptr;
  1018. if (IsLambdaConversionToBlock)
  1019. EmitLambdaBlockInvokeBody();
  1020. else {
  1021. PGO.assignRegionCounters(blockDecl, fn);
  1022. incrementProfileCounter(blockDecl->getBody());
  1023. EmitStmt(blockDecl->getBody());
  1024. }
  1025. // Remember where we were...
  1026. llvm::BasicBlock *resume = Builder.GetInsertBlock();
  1027. // Go back to the entry.
  1028. ++entry_ptr;
  1029. Builder.SetInsertPoint(entry, entry_ptr);
  1030. // Emit debug information for all the DeclRefExprs.
  1031. // FIXME: also for 'this'
  1032. if (CGDebugInfo *DI = getDebugInfo()) {
  1033. for (const auto &CI : blockDecl->captures()) {
  1034. const VarDecl *variable = CI.getVariable();
  1035. DI->EmitLocation(Builder, variable->getLocation());
  1036. if (CGM.getCodeGenOpts().getDebugInfo()
  1037. >= CodeGenOptions::LimitedDebugInfo) {
  1038. const CGBlockInfo::Capture &capture = blockInfo.getCapture(variable);
  1039. if (capture.isConstant()) {
  1040. auto addr = LocalDeclMap.find(variable)->second;
  1041. DI->EmitDeclareOfAutoVariable(variable, addr.getPointer(),
  1042. Builder);
  1043. continue;
  1044. }
  1045. DI->EmitDeclareOfBlockDeclRefVariable(variable, BlockPointerDbgLoc,
  1046. Builder, blockInfo,
  1047. entry_ptr == entry->end()
  1048. ? nullptr : entry_ptr);
  1049. }
  1050. }
  1051. // Recover location if it was changed in the above loop.
  1052. DI->EmitLocation(Builder,
  1053. cast<CompoundStmt>(blockDecl->getBody())->getRBracLoc());
  1054. }
  1055. // And resume where we left off.
  1056. if (resume == nullptr)
  1057. Builder.ClearInsertionPoint();
  1058. else
  1059. Builder.SetInsertPoint(resume);
  1060. FinishFunction(cast<CompoundStmt>(blockDecl->getBody())->getRBracLoc());
  1061. return fn;
  1062. }
  1063. /*
  1064. notes.push_back(HelperInfo());
  1065. HelperInfo &note = notes.back();
  1066. note.index = capture.getIndex();
  1067. note.RequiresCopying = (ci->hasCopyExpr() || BlockRequiresCopying(type));
  1068. note.cxxbar_import = ci->getCopyExpr();
  1069. if (ci->isByRef()) {
  1070. note.flag = BLOCK_FIELD_IS_BYREF;
  1071. if (type.isObjCGCWeak())
  1072. note.flag |= BLOCK_FIELD_IS_WEAK;
  1073. } else if (type->isBlockPointerType()) {
  1074. note.flag = BLOCK_FIELD_IS_BLOCK;
  1075. } else {
  1076. note.flag = BLOCK_FIELD_IS_OBJECT;
  1077. }
  1078. */
  1079. /// Generate the copy-helper function for a block closure object:
  1080. /// static void block_copy_helper(block_t *dst, block_t *src);
  1081. /// The runtime will have previously initialized 'dst' by doing a
  1082. /// bit-copy of 'src'.
  1083. ///
  1084. /// Note that this copies an entire block closure object to the heap;
  1085. /// it should not be confused with a 'byref copy helper', which moves
  1086. /// the contents of an individual __block variable to the heap.
  1087. llvm::Constant *
  1088. CodeGenFunction::GenerateCopyHelperFunction(const CGBlockInfo &blockInfo) {
  1089. ASTContext &C = getContext();
  1090. FunctionArgList args;
  1091. ImplicitParamDecl dstDecl(getContext(), nullptr, SourceLocation(), nullptr,
  1092. C.VoidPtrTy);
  1093. args.push_back(&dstDecl);
  1094. ImplicitParamDecl srcDecl(getContext(), nullptr, SourceLocation(), nullptr,
  1095. C.VoidPtrTy);
  1096. args.push_back(&srcDecl);
  1097. const CGFunctionInfo &FI = CGM.getTypes().arrangeFreeFunctionDeclaration(
  1098. C.VoidTy, args, FunctionType::ExtInfo(), /*variadic=*/false);
  1099. // FIXME: it would be nice if these were mergeable with things with
  1100. // identical semantics.
  1101. llvm::FunctionType *LTy = CGM.getTypes().GetFunctionType(FI);
  1102. llvm::Function *Fn =
  1103. llvm::Function::Create(LTy, llvm::GlobalValue::InternalLinkage,
  1104. "__copy_helper_block_", &CGM.getModule());
  1105. IdentifierInfo *II
  1106. = &CGM.getContext().Idents.get("__copy_helper_block_");
  1107. FunctionDecl *FD = FunctionDecl::Create(C,
  1108. C.getTranslationUnitDecl(),
  1109. SourceLocation(),
  1110. SourceLocation(), II, C.VoidTy,
  1111. nullptr, SC_Static,
  1112. false,
  1113. false);
  1114. CGM.SetInternalFunctionAttributes(nullptr, Fn, FI);
  1115. auto NL = ApplyDebugLocation::CreateEmpty(*this);
  1116. StartFunction(FD, C.VoidTy, Fn, FI, args);
  1117. // Create a scope with an artificial location for the body of this function.
  1118. auto AL = ApplyDebugLocation::CreateArtificial(*this);
  1119. llvm::Type *structPtrTy = blockInfo.StructureType->getPointerTo();
  1120. Address src = GetAddrOfLocalVar(&srcDecl);
  1121. src = Address(Builder.CreateLoad(src), blockInfo.BlockAlign);
  1122. src = Builder.CreateBitCast(src, structPtrTy, "block.source");
  1123. Address dst = GetAddrOfLocalVar(&dstDecl);
  1124. dst = Address(Builder.CreateLoad(dst), blockInfo.BlockAlign);
  1125. dst = Builder.CreateBitCast(dst, structPtrTy, "block.dest");
  1126. const BlockDecl *blockDecl = blockInfo.getBlockDecl();
  1127. for (const auto &CI : blockDecl->captures()) {
  1128. const VarDecl *variable = CI.getVariable();
  1129. QualType type = variable->getType();
  1130. const CGBlockInfo::Capture &capture = blockInfo.getCapture(variable);
  1131. if (capture.isConstant()) continue;
  1132. const Expr *copyExpr = CI.getCopyExpr();
  1133. BlockFieldFlags flags;
  1134. bool useARCWeakCopy = false;
  1135. bool useARCStrongCopy = false;
  1136. if (copyExpr) {
  1137. assert(!CI.isByRef());
  1138. // don't bother computing flags
  1139. } else if (CI.isByRef()) {
  1140. flags = BLOCK_FIELD_IS_BYREF;
  1141. if (type.isObjCGCWeak())
  1142. flags |= BLOCK_FIELD_IS_WEAK;
  1143. } else if (type->isObjCRetainableType()) {
  1144. flags = BLOCK_FIELD_IS_OBJECT;
  1145. bool isBlockPointer = type->isBlockPointerType();
  1146. if (isBlockPointer)
  1147. flags = BLOCK_FIELD_IS_BLOCK;
  1148. // Special rules for ARC captures:
  1149. if (getLangOpts().ObjCAutoRefCount) {
  1150. Qualifiers qs = type.getQualifiers();
  1151. // We need to register __weak direct captures with the runtime.
  1152. if (qs.getObjCLifetime() == Qualifiers::OCL_Weak) {
  1153. useARCWeakCopy = true;
  1154. // We need to retain the copied value for __strong direct captures.
  1155. } else if (qs.getObjCLifetime() == Qualifiers::OCL_Strong) {
  1156. // If it's a block pointer, we have to copy the block and
  1157. // assign that to the destination pointer, so we might as
  1158. // well use _Block_object_assign. Otherwise we can avoid that.
  1159. if (!isBlockPointer)
  1160. useARCStrongCopy = true;
  1161. // Otherwise the memcpy is fine.
  1162. } else {
  1163. continue;
  1164. }
  1165. // Non-ARC captures of retainable pointers are strong and
  1166. // therefore require a call to _Block_object_assign.
  1167. } else {
  1168. // fall through
  1169. }
  1170. } else {
  1171. continue;
  1172. }
  1173. unsigned index = capture.getIndex();
  1174. Address srcField = Builder.CreateStructGEP(src, index, capture.getOffset());
  1175. Address dstField = Builder.CreateStructGEP(dst, index, capture.getOffset());
  1176. // If there's an explicit copy expression, we do that.
  1177. if (copyExpr) {
  1178. EmitSynthesizedCXXCopyCtor(dstField, srcField, copyExpr);
  1179. } else if (useARCWeakCopy) {
  1180. EmitARCCopyWeak(dstField, srcField);
  1181. } else {
  1182. llvm::Value *srcValue = Builder.CreateLoad(srcField, "blockcopy.src");
  1183. if (useARCStrongCopy) {
  1184. // At -O0, store null into the destination field (so that the
  1185. // storeStrong doesn't over-release) and then call storeStrong.
  1186. // This is a workaround to not having an initStrong call.
  1187. if (CGM.getCodeGenOpts().OptimizationLevel == 0) {
  1188. auto *ty = cast<llvm::PointerType>(srcValue->getType());
  1189. llvm::Value *null = llvm::ConstantPointerNull::get(ty);
  1190. Builder.CreateStore(null, dstField);
  1191. EmitARCStoreStrongCall(dstField, srcValue, true);
  1192. // With optimization enabled, take advantage of the fact that
  1193. // the blocks runtime guarantees a memcpy of the block data, and
  1194. // just emit a retain of the src field.
  1195. } else {
  1196. EmitARCRetainNonBlock(srcValue);
  1197. // We don't need this anymore, so kill it. It's not quite
  1198. // worth the annoyance to avoid creating it in the first place.
  1199. cast<llvm::Instruction>(dstField.getPointer())->eraseFromParent();
  1200. }
  1201. } else {
  1202. srcValue = Builder.CreateBitCast(srcValue, VoidPtrTy);
  1203. llvm::Value *dstAddr =
  1204. Builder.CreateBitCast(dstField.getPointer(), VoidPtrTy);
  1205. llvm::Value *args[] = {
  1206. dstAddr, srcValue, llvm::ConstantInt::get(Int32Ty, flags.getBitMask())
  1207. };
  1208. bool copyCanThrow = false;
  1209. if (CI.isByRef() && variable->getType()->getAsCXXRecordDecl()) {
  1210. const Expr *copyExpr =
  1211. CGM.getContext().getBlockVarCopyInits(variable);
  1212. if (copyExpr) {
  1213. copyCanThrow = true; // FIXME: reuse the noexcept logic
  1214. }
  1215. }
  1216. if (copyCanThrow) {
  1217. EmitRuntimeCallOrInvoke(CGM.getBlockObjectAssign(), args);
  1218. } else {
  1219. EmitNounwindRuntimeCall(CGM.getBlockObjectAssign(), args);
  1220. }
  1221. }
  1222. }
  1223. }
  1224. FinishFunction();
  1225. return llvm::ConstantExpr::getBitCast(Fn, VoidPtrTy);
  1226. }
  1227. /// Generate the destroy-helper function for a block closure object:
  1228. /// static void block_destroy_helper(block_t *theBlock);
  1229. ///
  1230. /// Note that this destroys a heap-allocated block closure object;
  1231. /// it should not be confused with a 'byref destroy helper', which
  1232. /// destroys the heap-allocated contents of an individual __block
  1233. /// variable.
  1234. llvm::Constant *
  1235. CodeGenFunction::GenerateDestroyHelperFunction(const CGBlockInfo &blockInfo) {
  1236. ASTContext &C = getContext();
  1237. FunctionArgList args;
  1238. ImplicitParamDecl srcDecl(getContext(), nullptr, SourceLocation(), nullptr,
  1239. C.VoidPtrTy);
  1240. args.push_back(&srcDecl);
  1241. const CGFunctionInfo &FI = CGM.getTypes().arrangeFreeFunctionDeclaration(
  1242. C.VoidTy, args, FunctionType::ExtInfo(), /*variadic=*/false);
  1243. // FIXME: We'd like to put these into a mergable by content, with
  1244. // internal linkage.
  1245. llvm::FunctionType *LTy = CGM.getTypes().GetFunctionType(FI);
  1246. llvm::Function *Fn =
  1247. llvm::Function::Create(LTy, llvm::GlobalValue::InternalLinkage,
  1248. "__destroy_helper_block_", &CGM.getModule());
  1249. IdentifierInfo *II
  1250. = &CGM.getContext().Idents.get("__destroy_helper_block_");
  1251. FunctionDecl *FD = FunctionDecl::Create(C, C.getTranslationUnitDecl(),
  1252. SourceLocation(),
  1253. SourceLocation(), II, C.VoidTy,
  1254. nullptr, SC_Static,
  1255. false, false);
  1256. CGM.SetInternalFunctionAttributes(nullptr, Fn, FI);
  1257. // Create a scope with an artificial location for the body of this function.
  1258. auto NL = ApplyDebugLocation::CreateEmpty(*this);
  1259. StartFunction(FD, C.VoidTy, Fn, FI, args);
  1260. auto AL = ApplyDebugLocation::CreateArtificial(*this);
  1261. llvm::Type *structPtrTy = blockInfo.StructureType->getPointerTo();
  1262. Address src = GetAddrOfLocalVar(&srcDecl);
  1263. src = Address(Builder.CreateLoad(src), blockInfo.BlockAlign);
  1264. src = Builder.CreateBitCast(src, structPtrTy, "block");
  1265. const BlockDecl *blockDecl = blockInfo.getBlockDecl();
  1266. CodeGenFunction::RunCleanupsScope cleanups(*this);
  1267. for (const auto &CI : blockDecl->captures()) {
  1268. const VarDecl *variable = CI.getVariable();
  1269. QualType type = variable->getType();
  1270. const CGBlockInfo::Capture &capture = blockInfo.getCapture(variable);
  1271. if (capture.isConstant()) continue;
  1272. BlockFieldFlags flags;
  1273. const CXXDestructorDecl *dtor = nullptr;
  1274. bool useARCWeakDestroy = false;
  1275. bool useARCStrongDestroy = false;
  1276. if (CI.isByRef()) {
  1277. flags = BLOCK_FIELD_IS_BYREF;
  1278. if (type.isObjCGCWeak())
  1279. flags |= BLOCK_FIELD_IS_WEAK;
  1280. } else if (const CXXRecordDecl *record = type->getAsCXXRecordDecl()) {
  1281. if (record->hasTrivialDestructor())
  1282. continue;
  1283. dtor = record->getDestructor();
  1284. } else if (type->isObjCRetainableType()) {
  1285. flags = BLOCK_FIELD_IS_OBJECT;
  1286. if (type->isBlockPointerType())
  1287. flags = BLOCK_FIELD_IS_BLOCK;
  1288. // Special rules for ARC captures.
  1289. if (getLangOpts().ObjCAutoRefCount) {
  1290. Qualifiers qs = type.getQualifiers();
  1291. // Don't generate special dispose logic for a captured object
  1292. // unless it's __strong or __weak.
  1293. if (!qs.hasStrongOrWeakObjCLifetime())
  1294. continue;
  1295. // Support __weak direct captures.
  1296. if (qs.getObjCLifetime() == Qualifiers::OCL_Weak)
  1297. useARCWeakDestroy = true;
  1298. // Tools really want us to use objc_storeStrong here.
  1299. else
  1300. useARCStrongDestroy = true;
  1301. }
  1302. } else {
  1303. continue;
  1304. }
  1305. Address srcField =
  1306. Builder.CreateStructGEP(src, capture.getIndex(), capture.getOffset());
  1307. // If there's an explicit copy expression, we do that.
  1308. if (dtor) {
  1309. PushDestructorCleanup(dtor, srcField);
  1310. // If this is a __weak capture, emit the release directly.
  1311. } else if (useARCWeakDestroy) {
  1312. EmitARCDestroyWeak(srcField);
  1313. // Destroy strong objects with a call if requested.
  1314. } else if (useARCStrongDestroy) {
  1315. EmitARCDestroyStrong(srcField, ARCImpreciseLifetime);
  1316. // Otherwise we call _Block_object_dispose. It wouldn't be too
  1317. // hard to just emit this as a cleanup if we wanted to make sure
  1318. // that things were done in reverse.
  1319. } else {
  1320. llvm::Value *value = Builder.CreateLoad(srcField);
  1321. value = Builder.CreateBitCast(value, VoidPtrTy);
  1322. BuildBlockRelease(value, flags);
  1323. }
  1324. }
  1325. cleanups.ForceCleanup();
  1326. FinishFunction();
  1327. return llvm::ConstantExpr::getBitCast(Fn, VoidPtrTy);
  1328. }
  1329. namespace {
  1330. /// Emits the copy/dispose helper functions for a __block object of id type.
  1331. class ObjectByrefHelpers final : public BlockByrefHelpers {
  1332. BlockFieldFlags Flags;
  1333. public:
  1334. ObjectByrefHelpers(CharUnits alignment, BlockFieldFlags flags)
  1335. : BlockByrefHelpers(alignment), Flags(flags) {}
  1336. void emitCopy(CodeGenFunction &CGF, Address destField,
  1337. Address srcField) override {
  1338. destField = CGF.Builder.CreateBitCast(destField, CGF.VoidPtrTy);
  1339. srcField = CGF.Builder.CreateBitCast(srcField, CGF.VoidPtrPtrTy);
  1340. llvm::Value *srcValue = CGF.Builder.CreateLoad(srcField);
  1341. unsigned flags = (Flags | BLOCK_BYREF_CALLER).getBitMask();
  1342. llvm::Value *flagsVal = llvm::ConstantInt::get(CGF.Int32Ty, flags);
  1343. llvm::Value *fn = CGF.CGM.getBlockObjectAssign();
  1344. llvm::Value *args[] = { destField.getPointer(), srcValue, flagsVal };
  1345. CGF.EmitNounwindRuntimeCall(fn, args);
  1346. }
  1347. void emitDispose(CodeGenFunction &CGF, Address field) override {
  1348. field = CGF.Builder.CreateBitCast(field, CGF.Int8PtrTy->getPointerTo(0));
  1349. llvm::Value *value = CGF.Builder.CreateLoad(field);
  1350. CGF.BuildBlockRelease(value, Flags | BLOCK_BYREF_CALLER);
  1351. }
  1352. void profileImpl(llvm::FoldingSetNodeID &id) const override {
  1353. id.AddInteger(Flags.getBitMask());
  1354. }
  1355. };
  1356. /// Emits the copy/dispose helpers for an ARC __block __weak variable.
  1357. class ARCWeakByrefHelpers final : public BlockByrefHelpers {
  1358. public:
  1359. ARCWeakByrefHelpers(CharUnits alignment) : BlockByrefHelpers(alignment) {}
  1360. void emitCopy(CodeGenFunction &CGF, Address destField,
  1361. Address srcField) override {
  1362. CGF.EmitARCMoveWeak(destField, srcField);
  1363. }
  1364. void emitDispose(CodeGenFunction &CGF, Address field) override {
  1365. CGF.EmitARCDestroyWeak(field);
  1366. }
  1367. void profileImpl(llvm::FoldingSetNodeID &id) const override {
  1368. // 0 is distinguishable from all pointers and byref flags
  1369. id.AddInteger(0);
  1370. }
  1371. };
  1372. /// Emits the copy/dispose helpers for an ARC __block __strong variable
  1373. /// that's not of block-pointer type.
  1374. class ARCStrongByrefHelpers final : public BlockByrefHelpers {
  1375. public:
  1376. ARCStrongByrefHelpers(CharUnits alignment) : BlockByrefHelpers(alignment) {}
  1377. void emitCopy(CodeGenFunction &CGF, Address destField,
  1378. Address srcField) override {
  1379. // Do a "move" by copying the value and then zeroing out the old
  1380. // variable.
  1381. llvm::Value *value = CGF.Builder.CreateLoad(srcField);
  1382. llvm::Value *null =
  1383. llvm::ConstantPointerNull::get(cast<llvm::PointerType>(value->getType()));
  1384. if (CGF.CGM.getCodeGenOpts().OptimizationLevel == 0) {
  1385. CGF.Builder.CreateStore(null, destField);
  1386. CGF.EmitARCStoreStrongCall(destField, value, /*ignored*/ true);
  1387. CGF.EmitARCStoreStrongCall(srcField, null, /*ignored*/ true);
  1388. return;
  1389. }
  1390. CGF.Builder.CreateStore(value, destField);
  1391. CGF.Builder.CreateStore(null, srcField);
  1392. }
  1393. void emitDispose(CodeGenFunction &CGF, Address field) override {
  1394. CGF.EmitARCDestroyStrong(field, ARCImpreciseLifetime);
  1395. }
  1396. void profileImpl(llvm::FoldingSetNodeID &id) const override {
  1397. // 1 is distinguishable from all pointers and byref flags
  1398. id.AddInteger(1);
  1399. }
  1400. };
  1401. /// Emits the copy/dispose helpers for an ARC __block __strong
  1402. /// variable that's of block-pointer type.
  1403. class ARCStrongBlockByrefHelpers final : public BlockByrefHelpers {
  1404. public:
  1405. ARCStrongBlockByrefHelpers(CharUnits alignment)
  1406. : BlockByrefHelpers(alignment) {}
  1407. void emitCopy(CodeGenFunction &CGF, Address destField,
  1408. Address srcField) override {
  1409. // Do the copy with objc_retainBlock; that's all that
  1410. // _Block_object_assign would do anyway, and we'd have to pass the
  1411. // right arguments to make sure it doesn't get no-op'ed.
  1412. llvm::Value *oldValue = CGF.Builder.CreateLoad(srcField);
  1413. llvm::Value *copy = CGF.EmitARCRetainBlock(oldValue, /*mandatory*/ true);
  1414. CGF.Builder.CreateStore(copy, destField);
  1415. }
  1416. void emitDispose(CodeGenFunction &CGF, Address field) override {
  1417. CGF.EmitARCDestroyStrong(field, ARCImpreciseLifetime);
  1418. }
  1419. void profileImpl(llvm::FoldingSetNodeID &id) const override {
  1420. // 2 is distinguishable from all pointers and byref flags
  1421. id.AddInteger(2);
  1422. }
  1423. };
  1424. /// Emits the copy/dispose helpers for a __block variable with a
  1425. /// nontrivial copy constructor or destructor.
  1426. class CXXByrefHelpers final : public BlockByrefHelpers {
  1427. QualType VarType;
  1428. const Expr *CopyExpr;
  1429. public:
  1430. CXXByrefHelpers(CharUnits alignment, QualType type,
  1431. const Expr *copyExpr)
  1432. : BlockByrefHelpers(alignment), VarType(type), CopyExpr(copyExpr) {}
  1433. bool needsCopy() const override { return CopyExpr != nullptr; }
  1434. void emitCopy(CodeGenFunction &CGF, Address destField,
  1435. Address srcField) override {
  1436. if (!CopyExpr) return;
  1437. CGF.EmitSynthesizedCXXCopyCtor(destField, srcField, CopyExpr);
  1438. }
  1439. void emitDispose(CodeGenFunction &CGF, Address field) override {
  1440. EHScopeStack::stable_iterator cleanupDepth = CGF.EHStack.stable_begin();
  1441. CGF.PushDestructorCleanup(VarType, field);
  1442. CGF.PopCleanupBlocks(cleanupDepth);
  1443. }
  1444. void profileImpl(llvm::FoldingSetNodeID &id) const override {
  1445. id.AddPointer(VarType.getCanonicalType().getAsOpaquePtr());
  1446. }
  1447. };
  1448. } // end anonymous namespace
  1449. static llvm::Constant *
  1450. generateByrefCopyHelper(CodeGenFunction &CGF, const BlockByrefInfo &byrefInfo,
  1451. BlockByrefHelpers &generator) {
  1452. ASTContext &Context = CGF.getContext();
  1453. QualType R = Context.VoidTy;
  1454. FunctionArgList args;
  1455. ImplicitParamDecl dst(CGF.getContext(), nullptr, SourceLocation(), nullptr,
  1456. Context.VoidPtrTy);
  1457. args.push_back(&dst);
  1458. ImplicitParamDecl src(CGF.getContext(), nullptr, SourceLocation(), nullptr,
  1459. Context.VoidPtrTy);
  1460. args.push_back(&src);
  1461. const CGFunctionInfo &FI = CGF.CGM.getTypes().arrangeFreeFunctionDeclaration(
  1462. R, args, FunctionType::ExtInfo(), /*variadic=*/false);
  1463. llvm::FunctionType *LTy = CGF.CGM.getTypes().GetFunctionType(FI);
  1464. // FIXME: We'd like to put these into a mergable by content, with
  1465. // internal linkage.
  1466. llvm::Function *Fn =
  1467. llvm::Function::Create(LTy, llvm::GlobalValue::InternalLinkage,
  1468. "__Block_byref_object_copy_", &CGF.CGM.getModule());
  1469. IdentifierInfo *II
  1470. = &Context.Idents.get("__Block_byref_object_copy_");
  1471. FunctionDecl *FD = FunctionDecl::Create(Context,
  1472. Context.getTranslationUnitDecl(),
  1473. SourceLocation(),
  1474. SourceLocation(), II, R, nullptr,
  1475. SC_Static,
  1476. false, false);
  1477. CGF.CGM.SetInternalFunctionAttributes(nullptr, Fn, FI);
  1478. CGF.StartFunction(FD, R, Fn, FI, args);
  1479. if (generator.needsCopy()) {
  1480. llvm::Type *byrefPtrType = byrefInfo.Type->getPointerTo(0);
  1481. // dst->x
  1482. Address destField = CGF.GetAddrOfLocalVar(&dst);
  1483. destField = Address(CGF.Builder.CreateLoad(destField),
  1484. byrefInfo.ByrefAlignment);
  1485. destField = CGF.Builder.CreateBitCast(destField, byrefPtrType);
  1486. destField = CGF.emitBlockByrefAddress(destField, byrefInfo, false,
  1487. "dest-object");
  1488. // src->x
  1489. Address srcField = CGF.GetAddrOfLocalVar(&src);
  1490. srcField = Address(CGF.Builder.CreateLoad(srcField),
  1491. byrefInfo.ByrefAlignment);
  1492. srcField = CGF.Builder.CreateBitCast(srcField, byrefPtrType);
  1493. srcField = CGF.emitBlockByrefAddress(srcField, byrefInfo, false,
  1494. "src-object");
  1495. generator.emitCopy(CGF, destField, srcField);
  1496. }
  1497. CGF.FinishFunction();
  1498. return llvm::ConstantExpr::getBitCast(Fn, CGF.Int8PtrTy);
  1499. }
  1500. /// Build the copy helper for a __block variable.
  1501. static llvm::Constant *buildByrefCopyHelper(CodeGenModule &CGM,
  1502. const BlockByrefInfo &byrefInfo,
  1503. BlockByrefHelpers &generator) {
  1504. CodeGenFunction CGF(CGM);
  1505. return generateByrefCopyHelper(CGF, byrefInfo, generator);
  1506. }
  1507. /// Generate code for a __block variable's dispose helper.
  1508. static llvm::Constant *
  1509. generateByrefDisposeHelper(CodeGenFunction &CGF,
  1510. const BlockByrefInfo &byrefInfo,
  1511. BlockByrefHelpers &generator) {
  1512. ASTContext &Context = CGF.getContext();
  1513. QualType R = Context.VoidTy;
  1514. FunctionArgList args;
  1515. ImplicitParamDecl src(CGF.getContext(), nullptr, SourceLocation(), nullptr,
  1516. Context.VoidPtrTy);
  1517. args.push_back(&src);
  1518. const CGFunctionInfo &FI = CGF.CGM.getTypes().arrangeFreeFunctionDeclaration(
  1519. R, args, FunctionType::ExtInfo(), /*variadic=*/false);
  1520. llvm::FunctionType *LTy = CGF.CGM.getTypes().GetFunctionType(FI);
  1521. // FIXME: We'd like to put these into a mergable by content, with
  1522. // internal linkage.
  1523. llvm::Function *Fn =
  1524. llvm::Function::Create(LTy, llvm::GlobalValue::InternalLinkage,
  1525. "__Block_byref_object_dispose_",
  1526. &CGF.CGM.getModule());
  1527. IdentifierInfo *II
  1528. = &Context.Idents.get("__Block_byref_object_dispose_");
  1529. FunctionDecl *FD = FunctionDecl::Create(Context,
  1530. Context.getTranslationUnitDecl(),
  1531. SourceLocation(),
  1532. SourceLocation(), II, R, nullptr,
  1533. SC_Static,
  1534. false, false);
  1535. CGF.CGM.SetInternalFunctionAttributes(nullptr, Fn, FI);
  1536. CGF.StartFunction(FD, R, Fn, FI, args);
  1537. if (generator.needsDispose()) {
  1538. Address addr = CGF.GetAddrOfLocalVar(&src);
  1539. addr = Address(CGF.Builder.CreateLoad(addr), byrefInfo.ByrefAlignment);
  1540. auto byrefPtrType = byrefInfo.Type->getPointerTo(0);
  1541. addr = CGF.Builder.CreateBitCast(addr, byrefPtrType);
  1542. addr = CGF.emitBlockByrefAddress(addr, byrefInfo, false, "object");
  1543. generator.emitDispose(CGF, addr);
  1544. }
  1545. CGF.FinishFunction();
  1546. return llvm::ConstantExpr::getBitCast(Fn, CGF.Int8PtrTy);
  1547. }
  1548. /// Build the dispose helper for a __block variable.
  1549. static llvm::Constant *buildByrefDisposeHelper(CodeGenModule &CGM,
  1550. const BlockByrefInfo &byrefInfo,
  1551. BlockByrefHelpers &generator) {
  1552. CodeGenFunction CGF(CGM);
  1553. return generateByrefDisposeHelper(CGF, byrefInfo, generator);
  1554. }
  1555. /// Lazily build the copy and dispose helpers for a __block variable
  1556. /// with the given information.
  1557. template <class T>
  1558. static T *buildByrefHelpers(CodeGenModule &CGM, const BlockByrefInfo &byrefInfo,
  1559. T &&generator) {
  1560. llvm::FoldingSetNodeID id;
  1561. generator.Profile(id);
  1562. void *insertPos;
  1563. BlockByrefHelpers *node
  1564. = CGM.ByrefHelpersCache.FindNodeOrInsertPos(id, insertPos);
  1565. if (node) return static_cast<T*>(node);
  1566. generator.CopyHelper = buildByrefCopyHelper(CGM, byrefInfo, generator);
  1567. generator.DisposeHelper = buildByrefDisposeHelper(CGM, byrefInfo, generator);
  1568. T *copy = new (CGM.getContext()) T(std::move(generator));
  1569. CGM.ByrefHelpersCache.InsertNode(copy, insertPos);
  1570. return copy;
  1571. }
  1572. /// Build the copy and dispose helpers for the given __block variable
  1573. /// emission. Places the helpers in the global cache. Returns null
  1574. /// if no helpers are required.
  1575. BlockByrefHelpers *
  1576. CodeGenFunction::buildByrefHelpers(llvm::StructType &byrefType,
  1577. const AutoVarEmission &emission) {
  1578. const VarDecl &var = *emission.Variable;
  1579. QualType type = var.getType();
  1580. auto &byrefInfo = getBlockByrefInfo(&var);
  1581. // The alignment we care about for the purposes of uniquing byref
  1582. // helpers is the alignment of the actual byref value field.
  1583. CharUnits valueAlignment =
  1584. byrefInfo.ByrefAlignment.alignmentAtOffset(byrefInfo.FieldOffset);
  1585. if (const CXXRecordDecl *record = type->getAsCXXRecordDecl()) {
  1586. const Expr *copyExpr = CGM.getContext().getBlockVarCopyInits(&var);
  1587. if (!copyExpr && record->hasTrivialDestructor()) return nullptr;
  1588. return ::buildByrefHelpers(
  1589. CGM, byrefInfo, CXXByrefHelpers(valueAlignment, type, copyExpr));
  1590. }
  1591. // Otherwise, if we don't have a retainable type, there's nothing to do.
  1592. // that the runtime does extra copies.
  1593. if (!type->isObjCRetainableType()) return nullptr;
  1594. Qualifiers qs = type.getQualifiers();
  1595. // If we have lifetime, that dominates.
  1596. if (Qualifiers::ObjCLifetime lifetime = qs.getObjCLifetime()) {
  1597. assert(getLangOpts().ObjCAutoRefCount);
  1598. switch (lifetime) {
  1599. case Qualifiers::OCL_None: llvm_unreachable("impossible");
  1600. // These are just bits as far as the runtime is concerned.
  1601. case Qualifiers::OCL_ExplicitNone:
  1602. case Qualifiers::OCL_Autoreleasing:
  1603. return nullptr;
  1604. // Tell the runtime that this is ARC __weak, called by the
  1605. // byref routines.
  1606. case Qualifiers::OCL_Weak:
  1607. return ::buildByrefHelpers(CGM, byrefInfo,
  1608. ARCWeakByrefHelpers(valueAlignment));
  1609. // ARC __strong __block variables need to be retained.
  1610. case Qualifiers::OCL_Strong:
  1611. // Block pointers need to be copied, and there's no direct
  1612. // transfer possible.
  1613. if (type->isBlockPointerType()) {
  1614. return ::buildByrefHelpers(CGM, byrefInfo,
  1615. ARCStrongBlockByrefHelpers(valueAlignment));
  1616. // Otherwise, we transfer ownership of the retain from the stack
  1617. // to the heap.
  1618. } else {
  1619. return ::buildByrefHelpers(CGM, byrefInfo,
  1620. ARCStrongByrefHelpers(valueAlignment));
  1621. }
  1622. }
  1623. llvm_unreachable("fell out of lifetime switch!");
  1624. }
  1625. BlockFieldFlags flags;
  1626. if (type->isBlockPointerType()) {
  1627. flags |= BLOCK_FIELD_IS_BLOCK;
  1628. } else if (CGM.getContext().isObjCNSObjectType(type) ||
  1629. type->isObjCObjectPointerType()) {
  1630. flags |= BLOCK_FIELD_IS_OBJECT;
  1631. } else {
  1632. return nullptr;
  1633. }
  1634. if (type.isObjCGCWeak())
  1635. flags |= BLOCK_FIELD_IS_WEAK;
  1636. return ::buildByrefHelpers(CGM, byrefInfo,
  1637. ObjectByrefHelpers(valueAlignment, flags));
  1638. }
  1639. Address CodeGenFunction::emitBlockByrefAddress(Address baseAddr,
  1640. const VarDecl *var,
  1641. bool followForward) {
  1642. auto &info = getBlockByrefInfo(var);
  1643. return emitBlockByrefAddress(baseAddr, info, followForward, var->getName());
  1644. }
  1645. Address CodeGenFunction::emitBlockByrefAddress(Address baseAddr,
  1646. const BlockByrefInfo &info,
  1647. bool followForward,
  1648. const llvm::Twine &name) {
  1649. // Chase the forwarding address if requested.
  1650. if (followForward) {
  1651. Address forwardingAddr =
  1652. Builder.CreateStructGEP(baseAddr, 1, getPointerSize(), "forwarding");
  1653. baseAddr = Address(Builder.CreateLoad(forwardingAddr), info.ByrefAlignment);
  1654. }
  1655. return Builder.CreateStructGEP(baseAddr, info.FieldIndex,
  1656. info.FieldOffset, name);
  1657. }
  1658. /// BuildByrefInfo - This routine changes a __block variable declared as T x
  1659. /// into:
  1660. ///
  1661. /// struct {
  1662. /// void *__isa;
  1663. /// void *__forwarding;
  1664. /// int32_t __flags;
  1665. /// int32_t __size;
  1666. /// void *__copy_helper; // only if needed
  1667. /// void *__destroy_helper; // only if needed
  1668. /// void *__byref_variable_layout;// only if needed
  1669. /// char padding[X]; // only if needed
  1670. /// T x;
  1671. /// } x
  1672. ///
  1673. const BlockByrefInfo &CodeGenFunction::getBlockByrefInfo(const VarDecl *D) {
  1674. auto it = BlockByrefInfos.find(D);
  1675. if (it != BlockByrefInfos.end())
  1676. return it->second;
  1677. llvm::StructType *byrefType =
  1678. llvm::StructType::create(getLLVMContext(),
  1679. "struct.__block_byref_" + D->getNameAsString());
  1680. QualType Ty = D->getType();
  1681. CharUnits size;
  1682. SmallVector<llvm::Type *, 8> types;
  1683. // void *__isa;
  1684. types.push_back(Int8PtrTy);
  1685. size += getPointerSize();
  1686. // void *__forwarding;
  1687. types.push_back(llvm::PointerType::getUnqual(byrefType));
  1688. size += getPointerSize();
  1689. // int32_t __flags;
  1690. types.push_back(Int32Ty);
  1691. size += CharUnits::fromQuantity(4);
  1692. // int32_t __size;
  1693. types.push_back(Int32Ty);
  1694. size += CharUnits::fromQuantity(4);
  1695. // Note that this must match *exactly* the logic in buildByrefHelpers.
  1696. bool hasCopyAndDispose = getContext().BlockRequiresCopying(Ty, D);
  1697. if (hasCopyAndDispose) {
  1698. /// void *__copy_helper;
  1699. types.push_back(Int8PtrTy);
  1700. size += getPointerSize();
  1701. /// void *__destroy_helper;
  1702. types.push_back(Int8PtrTy);
  1703. size += getPointerSize();
  1704. }
  1705. bool HasByrefExtendedLayout = false;
  1706. Qualifiers::ObjCLifetime Lifetime;
  1707. if (getContext().getByrefLifetime(Ty, Lifetime, HasByrefExtendedLayout) &&
  1708. HasByrefExtendedLayout) {
  1709. /// void *__byref_variable_layout;
  1710. types.push_back(Int8PtrTy);
  1711. size += CharUnits::fromQuantity(PointerSizeInBytes);
  1712. }
  1713. // T x;
  1714. llvm::Type *varTy = ConvertTypeForMem(Ty);
  1715. bool packed = false;
  1716. CharUnits varAlign = getContext().getDeclAlign(D);
  1717. CharUnits varOffset = size.RoundUpToAlignment(varAlign);
  1718. // We may have to insert padding.
  1719. if (varOffset != size) {
  1720. llvm::Type *paddingTy =
  1721. llvm::ArrayType::get(Int8Ty, (varOffset - size).getQuantity());
  1722. types.push_back(paddingTy);
  1723. size = varOffset;
  1724. // Conversely, we might have to prevent LLVM from inserting padding.
  1725. } else if (CGM.getDataLayout().getABITypeAlignment(varTy)
  1726. > varAlign.getQuantity()) {
  1727. packed = true;
  1728. }
  1729. types.push_back(varTy);
  1730. byrefType->setBody(types, packed);
  1731. BlockByrefInfo info;
  1732. info.Type = byrefType;
  1733. info.FieldIndex = types.size() - 1;
  1734. info.FieldOffset = varOffset;
  1735. info.ByrefAlignment = std::max(varAlign, getPointerAlign());
  1736. auto pair = BlockByrefInfos.insert({D, info});
  1737. assert(pair.second && "info was inserted recursively?");
  1738. return pair.first->second;
  1739. }
  1740. /// Initialize the structural components of a __block variable, i.e.
  1741. /// everything but the actual object.
  1742. void CodeGenFunction::emitByrefStructureInit(const AutoVarEmission &emission) {
  1743. // Find the address of the local.
  1744. Address addr = emission.Addr;
  1745. // That's an alloca of the byref structure type.
  1746. llvm::StructType *byrefType = cast<llvm::StructType>(
  1747. cast<llvm::PointerType>(addr.getPointer()->getType())->getElementType());
  1748. unsigned nextHeaderIndex = 0;
  1749. CharUnits nextHeaderOffset;
  1750. auto storeHeaderField = [&](llvm::Value *value, CharUnits fieldSize,
  1751. const Twine &name) {
  1752. auto fieldAddr = Builder.CreateStructGEP(addr, nextHeaderIndex,
  1753. nextHeaderOffset, name);
  1754. Builder.CreateStore(value, fieldAddr);
  1755. nextHeaderIndex++;
  1756. nextHeaderOffset += fieldSize;
  1757. };
  1758. // Build the byref helpers if necessary. This is null if we don't need any.
  1759. BlockByrefHelpers *helpers = buildByrefHelpers(*byrefType, emission);
  1760. const VarDecl &D = *emission.Variable;
  1761. QualType type = D.getType();
  1762. bool HasByrefExtendedLayout;
  1763. Qualifiers::ObjCLifetime ByrefLifetime;
  1764. bool ByRefHasLifetime =
  1765. getContext().getByrefLifetime(type, ByrefLifetime, HasByrefExtendedLayout);
  1766. llvm::Value *V;
  1767. // Initialize the 'isa', which is just 0 or 1.
  1768. int isa = 0;
  1769. if (type.isObjCGCWeak())
  1770. isa = 1;
  1771. V = Builder.CreateIntToPtr(Builder.getInt32(isa), Int8PtrTy, "isa");
  1772. storeHeaderField(V, getPointerSize(), "byref.isa");
  1773. // Store the address of the variable into its own forwarding pointer.
  1774. storeHeaderField(addr.getPointer(), getPointerSize(), "byref.forwarding");
  1775. // Blocks ABI:
  1776. // c) the flags field is set to either 0 if no helper functions are
  1777. // needed or BLOCK_BYREF_HAS_COPY_DISPOSE if they are,
  1778. BlockFlags flags;
  1779. if (helpers) flags |= BLOCK_BYREF_HAS_COPY_DISPOSE;
  1780. if (ByRefHasLifetime) {
  1781. if (HasByrefExtendedLayout) flags |= BLOCK_BYREF_LAYOUT_EXTENDED;
  1782. else switch (ByrefLifetime) {
  1783. case Qualifiers::OCL_Strong:
  1784. flags |= BLOCK_BYREF_LAYOUT_STRONG;
  1785. break;
  1786. case Qualifiers::OCL_Weak:
  1787. flags |= BLOCK_BYREF_LAYOUT_WEAK;
  1788. break;
  1789. case Qualifiers::OCL_ExplicitNone:
  1790. flags |= BLOCK_BYREF_LAYOUT_UNRETAINED;
  1791. break;
  1792. case Qualifiers::OCL_None:
  1793. if (!type->isObjCObjectPointerType() && !type->isBlockPointerType())
  1794. flags |= BLOCK_BYREF_LAYOUT_NON_OBJECT;
  1795. break;
  1796. default:
  1797. break;
  1798. }
  1799. if (CGM.getLangOpts().ObjCGCBitmapPrint) {
  1800. printf("\n Inline flag for BYREF variable layout (%d):", flags.getBitMask());
  1801. if (flags & BLOCK_BYREF_HAS_COPY_DISPOSE)
  1802. printf(" BLOCK_BYREF_HAS_COPY_DISPOSE");
  1803. if (flags & BLOCK_BYREF_LAYOUT_MASK) {
  1804. BlockFlags ThisFlag(flags.getBitMask() & BLOCK_BYREF_LAYOUT_MASK);
  1805. if (ThisFlag == BLOCK_BYREF_LAYOUT_EXTENDED)
  1806. printf(" BLOCK_BYREF_LAYOUT_EXTENDED");
  1807. if (ThisFlag == BLOCK_BYREF_LAYOUT_STRONG)
  1808. printf(" BLOCK_BYREF_LAYOUT_STRONG");
  1809. if (ThisFlag == BLOCK_BYREF_LAYOUT_WEAK)
  1810. printf(" BLOCK_BYREF_LAYOUT_WEAK");
  1811. if (ThisFlag == BLOCK_BYREF_LAYOUT_UNRETAINED)
  1812. printf(" BLOCK_BYREF_LAYOUT_UNRETAINED");
  1813. if (ThisFlag == BLOCK_BYREF_LAYOUT_NON_OBJECT)
  1814. printf(" BLOCK_BYREF_LAYOUT_NON_OBJECT");
  1815. }
  1816. printf("\n");
  1817. }
  1818. }
  1819. storeHeaderField(llvm::ConstantInt::get(IntTy, flags.getBitMask()),
  1820. getIntSize(), "byref.flags");
  1821. CharUnits byrefSize = CGM.GetTargetTypeStoreSize(byrefType);
  1822. V = llvm::ConstantInt::get(IntTy, byrefSize.getQuantity());
  1823. storeHeaderField(V, getIntSize(), "byref.size");
  1824. if (helpers) {
  1825. storeHeaderField(helpers->CopyHelper, getPointerSize(),
  1826. "byref.copyHelper");
  1827. storeHeaderField(helpers->DisposeHelper, getPointerSize(),
  1828. "byref.disposeHelper");
  1829. }
  1830. if (ByRefHasLifetime && HasByrefExtendedLayout) {
  1831. auto layoutInfo = CGM.getObjCRuntime().BuildByrefLayout(CGM, type);
  1832. storeHeaderField(layoutInfo, getPointerSize(), "byref.layout");
  1833. }
  1834. }
  1835. void CodeGenFunction::BuildBlockRelease(llvm::Value *V, BlockFieldFlags flags) {
  1836. llvm::Value *F = CGM.getBlockObjectDispose();
  1837. llvm::Value *args[] = {
  1838. Builder.CreateBitCast(V, Int8PtrTy),
  1839. llvm::ConstantInt::get(Int32Ty, flags.getBitMask())
  1840. };
  1841. EmitNounwindRuntimeCall(F, args); // FIXME: throwing destructors?
  1842. }
  1843. namespace {
  1844. /// Release a __block variable.
  1845. struct CallBlockRelease final : EHScopeStack::Cleanup {
  1846. llvm::Value *Addr;
  1847. CallBlockRelease(llvm::Value *Addr) : Addr(Addr) {}
  1848. void Emit(CodeGenFunction &CGF, Flags flags) override {
  1849. // Should we be passing FIELD_IS_WEAK here?
  1850. CGF.BuildBlockRelease(Addr, BLOCK_FIELD_IS_BYREF);
  1851. }
  1852. };
  1853. } // end anonymous namespace
  1854. /// Enter a cleanup to destroy a __block variable. Note that this
  1855. /// cleanup should be a no-op if the variable hasn't left the stack
  1856. /// yet; if a cleanup is required for the variable itself, that needs
  1857. /// to be done externally.
  1858. void CodeGenFunction::enterByrefCleanup(const AutoVarEmission &emission) {
  1859. // We don't enter this cleanup if we're in pure-GC mode.
  1860. if (CGM.getLangOpts().getGC() == LangOptions::GCOnly)
  1861. return;
  1862. EHStack.pushCleanup<CallBlockRelease>(NormalAndEHCleanup,
  1863. emission.Addr.getPointer());
  1864. }
  1865. /// Adjust the declaration of something from the blocks API.
  1866. static void configureBlocksRuntimeObject(CodeGenModule &CGM,
  1867. llvm::Constant *C) {
  1868. if (!CGM.getLangOpts().BlocksRuntimeOptional) return;
  1869. auto *GV = cast<llvm::GlobalValue>(C->stripPointerCasts());
  1870. if (GV->isDeclaration() && GV->hasExternalLinkage())
  1871. GV->setLinkage(llvm::GlobalValue::ExternalWeakLinkage);
  1872. }
  1873. llvm::Constant *CodeGenModule::getBlockObjectDispose() {
  1874. if (BlockObjectDispose)
  1875. return BlockObjectDispose;
  1876. llvm::Type *args[] = { Int8PtrTy, Int32Ty };
  1877. llvm::FunctionType *fty
  1878. = llvm::FunctionType::get(VoidTy, args, false);
  1879. BlockObjectDispose = CreateRuntimeFunction(fty, "_Block_object_dispose");
  1880. configureBlocksRuntimeObject(*this, BlockObjectDispose);
  1881. return BlockObjectDispose;
  1882. }
  1883. llvm::Constant *CodeGenModule::getBlockObjectAssign() {
  1884. if (BlockObjectAssign)
  1885. return BlockObjectAssign;
  1886. llvm::Type *args[] = { Int8PtrTy, Int8PtrTy, Int32Ty };
  1887. llvm::FunctionType *fty
  1888. = llvm::FunctionType::get(VoidTy, args, false);
  1889. BlockObjectAssign = CreateRuntimeFunction(fty, "_Block_object_assign");
  1890. configureBlocksRuntimeObject(*this, BlockObjectAssign);
  1891. return BlockObjectAssign;
  1892. }
  1893. llvm::Constant *CodeGenModule::getNSConcreteGlobalBlock() {
  1894. if (NSConcreteGlobalBlock)
  1895. return NSConcreteGlobalBlock;
  1896. NSConcreteGlobalBlock = GetOrCreateLLVMGlobal("_NSConcreteGlobalBlock",
  1897. Int8PtrTy->getPointerTo(),
  1898. nullptr);
  1899. configureBlocksRuntimeObject(*this, NSConcreteGlobalBlock);
  1900. return NSConcreteGlobalBlock;
  1901. }
  1902. llvm::Constant *CodeGenModule::getNSConcreteStackBlock() {
  1903. if (NSConcreteStackBlock)
  1904. return NSConcreteStackBlock;
  1905. NSConcreteStackBlock = GetOrCreateLLVMGlobal("_NSConcreteStackBlock",
  1906. Int8PtrTy->getPointerTo(),
  1907. nullptr);
  1908. configureBlocksRuntimeObject(*this, NSConcreteStackBlock);
  1909. return NSConcreteStackBlock;
  1910. }