CGCleanup.cpp 47 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267
  1. //===--- CGCleanup.cpp - Bookkeeping and code emission for cleanups -------===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This file contains code dealing with the IR generation for cleanups
  11. // and related information.
  12. //
  13. // A "cleanup" is a piece of code which needs to be executed whenever
  14. // control transfers out of a particular scope. This can be
  15. // conditionalized to occur only on exceptional control flow, only on
  16. // normal control flow, or both.
  17. //
  18. //===----------------------------------------------------------------------===//
  19. #include "CGCleanup.h"
  20. #include "CodeGenFunction.h"
  21. #include "llvm/Support/SaveAndRestore.h"
  22. using namespace clang;
  23. using namespace CodeGen;
  24. bool DominatingValue<RValue>::saved_type::needsSaving(RValue rv) {
  25. if (rv.isScalar())
  26. return DominatingLLVMValue::needsSaving(rv.getScalarVal());
  27. if (rv.isAggregate())
  28. return DominatingLLVMValue::needsSaving(rv.getAggregatePointer());
  29. return true;
  30. }
  31. DominatingValue<RValue>::saved_type
  32. DominatingValue<RValue>::saved_type::save(CodeGenFunction &CGF, RValue rv) {
  33. if (rv.isScalar()) {
  34. llvm::Value *V = rv.getScalarVal();
  35. // These automatically dominate and don't need to be saved.
  36. if (!DominatingLLVMValue::needsSaving(V))
  37. return saved_type(V, ScalarLiteral);
  38. // Everything else needs an alloca.
  39. Address addr =
  40. CGF.CreateDefaultAlignTempAlloca(V->getType(), "saved-rvalue");
  41. CGF.Builder.CreateStore(V, addr);
  42. return saved_type(addr.getPointer(), ScalarAddress);
  43. }
  44. if (rv.isComplex()) {
  45. CodeGenFunction::ComplexPairTy V = rv.getComplexVal();
  46. llvm::Type *ComplexTy =
  47. llvm::StructType::get(V.first->getType(), V.second->getType());
  48. Address addr = CGF.CreateDefaultAlignTempAlloca(ComplexTy, "saved-complex");
  49. CGF.Builder.CreateStore(V.first,
  50. CGF.Builder.CreateStructGEP(addr, 0, CharUnits()));
  51. CharUnits offset = CharUnits::fromQuantity(
  52. CGF.CGM.getDataLayout().getTypeAllocSize(V.first->getType()));
  53. CGF.Builder.CreateStore(V.second,
  54. CGF.Builder.CreateStructGEP(addr, 1, offset));
  55. return saved_type(addr.getPointer(), ComplexAddress);
  56. }
  57. assert(rv.isAggregate());
  58. Address V = rv.getAggregateAddress(); // TODO: volatile?
  59. if (!DominatingLLVMValue::needsSaving(V.getPointer()))
  60. return saved_type(V.getPointer(), AggregateLiteral,
  61. V.getAlignment().getQuantity());
  62. Address addr =
  63. CGF.CreateTempAlloca(V.getType(), CGF.getPointerAlign(), "saved-rvalue");
  64. CGF.Builder.CreateStore(V.getPointer(), addr);
  65. return saved_type(addr.getPointer(), AggregateAddress,
  66. V.getAlignment().getQuantity());
  67. }
  68. /// Given a saved r-value produced by SaveRValue, perform the code
  69. /// necessary to restore it to usability at the current insertion
  70. /// point.
  71. RValue DominatingValue<RValue>::saved_type::restore(CodeGenFunction &CGF) {
  72. auto getSavingAddress = [&](llvm::Value *value) {
  73. auto alignment = cast<llvm::AllocaInst>(value)->getAlignment();
  74. return Address(value, CharUnits::fromQuantity(alignment));
  75. };
  76. switch (K) {
  77. case ScalarLiteral:
  78. return RValue::get(Value);
  79. case ScalarAddress:
  80. return RValue::get(CGF.Builder.CreateLoad(getSavingAddress(Value)));
  81. case AggregateLiteral:
  82. return RValue::getAggregate(Address(Value, CharUnits::fromQuantity(Align)));
  83. case AggregateAddress: {
  84. auto addr = CGF.Builder.CreateLoad(getSavingAddress(Value));
  85. return RValue::getAggregate(Address(addr, CharUnits::fromQuantity(Align)));
  86. }
  87. case ComplexAddress: {
  88. Address address = getSavingAddress(Value);
  89. llvm::Value *real = CGF.Builder.CreateLoad(
  90. CGF.Builder.CreateStructGEP(address, 0, CharUnits()));
  91. CharUnits offset = CharUnits::fromQuantity(
  92. CGF.CGM.getDataLayout().getTypeAllocSize(real->getType()));
  93. llvm::Value *imag = CGF.Builder.CreateLoad(
  94. CGF.Builder.CreateStructGEP(address, 1, offset));
  95. return RValue::getComplex(real, imag);
  96. }
  97. }
  98. llvm_unreachable("bad saved r-value kind");
  99. }
  100. /// Push an entry of the given size onto this protected-scope stack.
  101. char *EHScopeStack::allocate(size_t Size) {
  102. Size = llvm::alignTo(Size, ScopeStackAlignment);
  103. if (!StartOfBuffer) {
  104. unsigned Capacity = 1024;
  105. while (Capacity < Size) Capacity *= 2;
  106. StartOfBuffer = new char[Capacity];
  107. StartOfData = EndOfBuffer = StartOfBuffer + Capacity;
  108. } else if (static_cast<size_t>(StartOfData - StartOfBuffer) < Size) {
  109. unsigned CurrentCapacity = EndOfBuffer - StartOfBuffer;
  110. unsigned UsedCapacity = CurrentCapacity - (StartOfData - StartOfBuffer);
  111. unsigned NewCapacity = CurrentCapacity;
  112. do {
  113. NewCapacity *= 2;
  114. } while (NewCapacity < UsedCapacity + Size);
  115. char *NewStartOfBuffer = new char[NewCapacity];
  116. char *NewEndOfBuffer = NewStartOfBuffer + NewCapacity;
  117. char *NewStartOfData = NewEndOfBuffer - UsedCapacity;
  118. memcpy(NewStartOfData, StartOfData, UsedCapacity);
  119. delete [] StartOfBuffer;
  120. StartOfBuffer = NewStartOfBuffer;
  121. EndOfBuffer = NewEndOfBuffer;
  122. StartOfData = NewStartOfData;
  123. }
  124. assert(StartOfBuffer + Size <= StartOfData);
  125. StartOfData -= Size;
  126. return StartOfData;
  127. }
  128. void EHScopeStack::deallocate(size_t Size) {
  129. StartOfData += llvm::alignTo(Size, ScopeStackAlignment);
  130. }
  131. bool EHScopeStack::containsOnlyLifetimeMarkers(
  132. EHScopeStack::stable_iterator Old) const {
  133. for (EHScopeStack::iterator it = begin(); stabilize(it) != Old; it++) {
  134. EHCleanupScope *cleanup = dyn_cast<EHCleanupScope>(&*it);
  135. if (!cleanup || !cleanup->isLifetimeMarker())
  136. return false;
  137. }
  138. return true;
  139. }
  140. bool EHScopeStack::requiresLandingPad() const {
  141. for (stable_iterator si = getInnermostEHScope(); si != stable_end(); ) {
  142. // Skip lifetime markers.
  143. if (auto *cleanup = dyn_cast<EHCleanupScope>(&*find(si)))
  144. if (cleanup->isLifetimeMarker()) {
  145. si = cleanup->getEnclosingEHScope();
  146. continue;
  147. }
  148. return true;
  149. }
  150. return false;
  151. }
  152. EHScopeStack::stable_iterator
  153. EHScopeStack::getInnermostActiveNormalCleanup() const {
  154. for (stable_iterator si = getInnermostNormalCleanup(), se = stable_end();
  155. si != se; ) {
  156. EHCleanupScope &cleanup = cast<EHCleanupScope>(*find(si));
  157. if (cleanup.isActive()) return si;
  158. si = cleanup.getEnclosingNormalCleanup();
  159. }
  160. return stable_end();
  161. }
  162. void *EHScopeStack::pushCleanup(CleanupKind Kind, size_t Size) {
  163. char *Buffer = allocate(EHCleanupScope::getSizeForCleanupSize(Size));
  164. bool IsNormalCleanup = Kind & NormalCleanup;
  165. bool IsEHCleanup = Kind & EHCleanup;
  166. bool IsActive = !(Kind & InactiveCleanup);
  167. bool IsLifetimeMarker = Kind & LifetimeMarker;
  168. EHCleanupScope *Scope =
  169. new (Buffer) EHCleanupScope(IsNormalCleanup,
  170. IsEHCleanup,
  171. IsActive,
  172. Size,
  173. BranchFixups.size(),
  174. InnermostNormalCleanup,
  175. InnermostEHScope);
  176. if (IsNormalCleanup)
  177. InnermostNormalCleanup = stable_begin();
  178. if (IsEHCleanup)
  179. InnermostEHScope = stable_begin();
  180. if (IsLifetimeMarker)
  181. Scope->setLifetimeMarker();
  182. return Scope->getCleanupBuffer();
  183. }
  184. void EHScopeStack::popCleanup() {
  185. assert(!empty() && "popping exception stack when not empty");
  186. assert(isa<EHCleanupScope>(*begin()));
  187. EHCleanupScope &Cleanup = cast<EHCleanupScope>(*begin());
  188. InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup();
  189. InnermostEHScope = Cleanup.getEnclosingEHScope();
  190. deallocate(Cleanup.getAllocatedSize());
  191. // Destroy the cleanup.
  192. Cleanup.Destroy();
  193. // Check whether we can shrink the branch-fixups stack.
  194. if (!BranchFixups.empty()) {
  195. // If we no longer have any normal cleanups, all the fixups are
  196. // complete.
  197. if (!hasNormalCleanups())
  198. BranchFixups.clear();
  199. // Otherwise we can still trim out unnecessary nulls.
  200. else
  201. popNullFixups();
  202. }
  203. }
  204. EHFilterScope *EHScopeStack::pushFilter(unsigned numFilters) {
  205. assert(getInnermostEHScope() == stable_end());
  206. char *buffer = allocate(EHFilterScope::getSizeForNumFilters(numFilters));
  207. EHFilterScope *filter = new (buffer) EHFilterScope(numFilters);
  208. InnermostEHScope = stable_begin();
  209. return filter;
  210. }
  211. void EHScopeStack::popFilter() {
  212. assert(!empty() && "popping exception stack when not empty");
  213. EHFilterScope &filter = cast<EHFilterScope>(*begin());
  214. deallocate(EHFilterScope::getSizeForNumFilters(filter.getNumFilters()));
  215. InnermostEHScope = filter.getEnclosingEHScope();
  216. }
  217. EHCatchScope *EHScopeStack::pushCatch(unsigned numHandlers) {
  218. char *buffer = allocate(EHCatchScope::getSizeForNumHandlers(numHandlers));
  219. EHCatchScope *scope =
  220. new (buffer) EHCatchScope(numHandlers, InnermostEHScope);
  221. InnermostEHScope = stable_begin();
  222. return scope;
  223. }
  224. void EHScopeStack::pushTerminate() {
  225. char *Buffer = allocate(EHTerminateScope::getSize());
  226. new (Buffer) EHTerminateScope(InnermostEHScope);
  227. InnermostEHScope = stable_begin();
  228. }
  229. /// Remove any 'null' fixups on the stack. However, we can't pop more
  230. /// fixups than the fixup depth on the innermost normal cleanup, or
  231. /// else fixups that we try to add to that cleanup will end up in the
  232. /// wrong place. We *could* try to shrink fixup depths, but that's
  233. /// actually a lot of work for little benefit.
  234. void EHScopeStack::popNullFixups() {
  235. // We expect this to only be called when there's still an innermost
  236. // normal cleanup; otherwise there really shouldn't be any fixups.
  237. assert(hasNormalCleanups());
  238. EHScopeStack::iterator it = find(InnermostNormalCleanup);
  239. unsigned MinSize = cast<EHCleanupScope>(*it).getFixupDepth();
  240. assert(BranchFixups.size() >= MinSize && "fixup stack out of order");
  241. while (BranchFixups.size() > MinSize &&
  242. BranchFixups.back().Destination == nullptr)
  243. BranchFixups.pop_back();
  244. }
  245. void CodeGenFunction::initFullExprCleanup() {
  246. // Create a variable to decide whether the cleanup needs to be run.
  247. Address active = CreateTempAlloca(Builder.getInt1Ty(), CharUnits::One(),
  248. "cleanup.cond");
  249. // Initialize it to false at a site that's guaranteed to be run
  250. // before each evaluation.
  251. setBeforeOutermostConditional(Builder.getFalse(), active);
  252. // Initialize it to true at the current location.
  253. Builder.CreateStore(Builder.getTrue(), active);
  254. // Set that as the active flag in the cleanup.
  255. EHCleanupScope &cleanup = cast<EHCleanupScope>(*EHStack.begin());
  256. assert(!cleanup.hasActiveFlag() && "cleanup already has active flag?");
  257. cleanup.setActiveFlag(active);
  258. if (cleanup.isNormalCleanup()) cleanup.setTestFlagInNormalCleanup();
  259. if (cleanup.isEHCleanup()) cleanup.setTestFlagInEHCleanup();
  260. }
  261. void EHScopeStack::Cleanup::anchor() {}
  262. static void createStoreInstBefore(llvm::Value *value, Address addr,
  263. llvm::Instruction *beforeInst) {
  264. auto store = new llvm::StoreInst(value, addr.getPointer(), beforeInst);
  265. store->setAlignment(addr.getAlignment().getQuantity());
  266. }
  267. static llvm::LoadInst *createLoadInstBefore(Address addr, const Twine &name,
  268. llvm::Instruction *beforeInst) {
  269. auto load = new llvm::LoadInst(addr.getPointer(), name, beforeInst);
  270. load->setAlignment(addr.getAlignment().getQuantity());
  271. return load;
  272. }
  273. /// All the branch fixups on the EH stack have propagated out past the
  274. /// outermost normal cleanup; resolve them all by adding cases to the
  275. /// given switch instruction.
  276. static void ResolveAllBranchFixups(CodeGenFunction &CGF,
  277. llvm::SwitchInst *Switch,
  278. llvm::BasicBlock *CleanupEntry) {
  279. llvm::SmallPtrSet<llvm::BasicBlock*, 4> CasesAdded;
  280. for (unsigned I = 0, E = CGF.EHStack.getNumBranchFixups(); I != E; ++I) {
  281. // Skip this fixup if its destination isn't set.
  282. BranchFixup &Fixup = CGF.EHStack.getBranchFixup(I);
  283. if (Fixup.Destination == nullptr) continue;
  284. // If there isn't an OptimisticBranchBlock, then InitialBranch is
  285. // still pointing directly to its destination; forward it to the
  286. // appropriate cleanup entry. This is required in the specific
  287. // case of
  288. // { std::string s; goto lbl; }
  289. // lbl:
  290. // i.e. where there's an unresolved fixup inside a single cleanup
  291. // entry which we're currently popping.
  292. if (Fixup.OptimisticBranchBlock == nullptr) {
  293. createStoreInstBefore(CGF.Builder.getInt32(Fixup.DestinationIndex),
  294. CGF.getNormalCleanupDestSlot(),
  295. Fixup.InitialBranch);
  296. Fixup.InitialBranch->setSuccessor(0, CleanupEntry);
  297. }
  298. // Don't add this case to the switch statement twice.
  299. if (!CasesAdded.insert(Fixup.Destination).second)
  300. continue;
  301. Switch->addCase(CGF.Builder.getInt32(Fixup.DestinationIndex),
  302. Fixup.Destination);
  303. }
  304. CGF.EHStack.clearFixups();
  305. }
  306. /// Transitions the terminator of the given exit-block of a cleanup to
  307. /// be a cleanup switch.
  308. static llvm::SwitchInst *TransitionToCleanupSwitch(CodeGenFunction &CGF,
  309. llvm::BasicBlock *Block) {
  310. // If it's a branch, turn it into a switch whose default
  311. // destination is its original target.
  312. llvm::TerminatorInst *Term = Block->getTerminator();
  313. assert(Term && "can't transition block without terminator");
  314. if (llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Term)) {
  315. assert(Br->isUnconditional());
  316. auto Load = createLoadInstBefore(CGF.getNormalCleanupDestSlot(),
  317. "cleanup.dest", Term);
  318. llvm::SwitchInst *Switch =
  319. llvm::SwitchInst::Create(Load, Br->getSuccessor(0), 4, Block);
  320. Br->eraseFromParent();
  321. return Switch;
  322. } else {
  323. return cast<llvm::SwitchInst>(Term);
  324. }
  325. }
  326. void CodeGenFunction::ResolveBranchFixups(llvm::BasicBlock *Block) {
  327. assert(Block && "resolving a null target block");
  328. if (!EHStack.getNumBranchFixups()) return;
  329. assert(EHStack.hasNormalCleanups() &&
  330. "branch fixups exist with no normal cleanups on stack");
  331. llvm::SmallPtrSet<llvm::BasicBlock*, 4> ModifiedOptimisticBlocks;
  332. bool ResolvedAny = false;
  333. for (unsigned I = 0, E = EHStack.getNumBranchFixups(); I != E; ++I) {
  334. // Skip this fixup if its destination doesn't match.
  335. BranchFixup &Fixup = EHStack.getBranchFixup(I);
  336. if (Fixup.Destination != Block) continue;
  337. Fixup.Destination = nullptr;
  338. ResolvedAny = true;
  339. // If it doesn't have an optimistic branch block, LatestBranch is
  340. // already pointing to the right place.
  341. llvm::BasicBlock *BranchBB = Fixup.OptimisticBranchBlock;
  342. if (!BranchBB)
  343. continue;
  344. // Don't process the same optimistic branch block twice.
  345. if (!ModifiedOptimisticBlocks.insert(BranchBB).second)
  346. continue;
  347. llvm::SwitchInst *Switch = TransitionToCleanupSwitch(*this, BranchBB);
  348. // Add a case to the switch.
  349. Switch->addCase(Builder.getInt32(Fixup.DestinationIndex), Block);
  350. }
  351. if (ResolvedAny)
  352. EHStack.popNullFixups();
  353. }
  354. /// Pops cleanup blocks until the given savepoint is reached.
  355. void CodeGenFunction::PopCleanupBlocks(
  356. EHScopeStack::stable_iterator Old,
  357. std::initializer_list<llvm::Value **> ValuesToReload) {
  358. assert(Old.isValid());
  359. bool HadBranches = false;
  360. while (EHStack.stable_begin() != Old) {
  361. EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin());
  362. HadBranches |= Scope.hasBranches();
  363. // As long as Old strictly encloses the scope's enclosing normal
  364. // cleanup, we're going to emit another normal cleanup which
  365. // fallthrough can propagate through.
  366. bool FallThroughIsBranchThrough =
  367. Old.strictlyEncloses(Scope.getEnclosingNormalCleanup());
  368. PopCleanupBlock(FallThroughIsBranchThrough);
  369. }
  370. // If we didn't have any branches, the insertion point before cleanups must
  371. // dominate the current insertion point and we don't need to reload any
  372. // values.
  373. if (!HadBranches)
  374. return;
  375. // Spill and reload all values that the caller wants to be live at the current
  376. // insertion point.
  377. for (llvm::Value **ReloadedValue : ValuesToReload) {
  378. auto *Inst = dyn_cast_or_null<llvm::Instruction>(*ReloadedValue);
  379. if (!Inst)
  380. continue;
  381. // Don't spill static allocas, they dominate all cleanups. These are created
  382. // by binding a reference to a local variable or temporary.
  383. auto *AI = dyn_cast<llvm::AllocaInst>(Inst);
  384. if (AI && AI->isStaticAlloca())
  385. continue;
  386. Address Tmp =
  387. CreateDefaultAlignTempAlloca(Inst->getType(), "tmp.exprcleanup");
  388. // Find an insertion point after Inst and spill it to the temporary.
  389. llvm::BasicBlock::iterator InsertBefore;
  390. if (auto *Invoke = dyn_cast<llvm::InvokeInst>(Inst))
  391. InsertBefore = Invoke->getNormalDest()->getFirstInsertionPt();
  392. else
  393. InsertBefore = std::next(Inst->getIterator());
  394. CGBuilderTy(CGM, &*InsertBefore).CreateStore(Inst, Tmp);
  395. // Reload the value at the current insertion point.
  396. *ReloadedValue = Builder.CreateLoad(Tmp);
  397. }
  398. }
  399. /// Pops cleanup blocks until the given savepoint is reached, then add the
  400. /// cleanups from the given savepoint in the lifetime-extended cleanups stack.
  401. void CodeGenFunction::PopCleanupBlocks(
  402. EHScopeStack::stable_iterator Old, size_t OldLifetimeExtendedSize,
  403. std::initializer_list<llvm::Value **> ValuesToReload) {
  404. PopCleanupBlocks(Old, ValuesToReload);
  405. // Move our deferred cleanups onto the EH stack.
  406. for (size_t I = OldLifetimeExtendedSize,
  407. E = LifetimeExtendedCleanupStack.size(); I != E; /**/) {
  408. // Alignment should be guaranteed by the vptrs in the individual cleanups.
  409. assert((I % alignof(LifetimeExtendedCleanupHeader) == 0) &&
  410. "misaligned cleanup stack entry");
  411. LifetimeExtendedCleanupHeader &Header =
  412. reinterpret_cast<LifetimeExtendedCleanupHeader&>(
  413. LifetimeExtendedCleanupStack[I]);
  414. I += sizeof(Header);
  415. EHStack.pushCopyOfCleanup(Header.getKind(),
  416. &LifetimeExtendedCleanupStack[I],
  417. Header.getSize());
  418. I += Header.getSize();
  419. }
  420. LifetimeExtendedCleanupStack.resize(OldLifetimeExtendedSize);
  421. }
  422. static llvm::BasicBlock *CreateNormalEntry(CodeGenFunction &CGF,
  423. EHCleanupScope &Scope) {
  424. assert(Scope.isNormalCleanup());
  425. llvm::BasicBlock *Entry = Scope.getNormalBlock();
  426. if (!Entry) {
  427. Entry = CGF.createBasicBlock("cleanup");
  428. Scope.setNormalBlock(Entry);
  429. }
  430. return Entry;
  431. }
  432. /// Attempts to reduce a cleanup's entry block to a fallthrough. This
  433. /// is basically llvm::MergeBlockIntoPredecessor, except
  434. /// simplified/optimized for the tighter constraints on cleanup blocks.
  435. ///
  436. /// Returns the new block, whatever it is.
  437. static llvm::BasicBlock *SimplifyCleanupEntry(CodeGenFunction &CGF,
  438. llvm::BasicBlock *Entry) {
  439. llvm::BasicBlock *Pred = Entry->getSinglePredecessor();
  440. if (!Pred) return Entry;
  441. llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Pred->getTerminator());
  442. if (!Br || Br->isConditional()) return Entry;
  443. assert(Br->getSuccessor(0) == Entry);
  444. // If we were previously inserting at the end of the cleanup entry
  445. // block, we'll need to continue inserting at the end of the
  446. // predecessor.
  447. bool WasInsertBlock = CGF.Builder.GetInsertBlock() == Entry;
  448. assert(!WasInsertBlock || CGF.Builder.GetInsertPoint() == Entry->end());
  449. // Kill the branch.
  450. Br->eraseFromParent();
  451. // Replace all uses of the entry with the predecessor, in case there
  452. // are phis in the cleanup.
  453. Entry->replaceAllUsesWith(Pred);
  454. // Merge the blocks.
  455. Pred->getInstList().splice(Pred->end(), Entry->getInstList());
  456. // Kill the entry block.
  457. Entry->eraseFromParent();
  458. if (WasInsertBlock)
  459. CGF.Builder.SetInsertPoint(Pred);
  460. return Pred;
  461. }
  462. static void EmitCleanup(CodeGenFunction &CGF,
  463. EHScopeStack::Cleanup *Fn,
  464. EHScopeStack::Cleanup::Flags flags,
  465. Address ActiveFlag) {
  466. // If there's an active flag, load it and skip the cleanup if it's
  467. // false.
  468. llvm::BasicBlock *ContBB = nullptr;
  469. if (ActiveFlag.isValid()) {
  470. ContBB = CGF.createBasicBlock("cleanup.done");
  471. llvm::BasicBlock *CleanupBB = CGF.createBasicBlock("cleanup.action");
  472. llvm::Value *IsActive
  473. = CGF.Builder.CreateLoad(ActiveFlag, "cleanup.is_active");
  474. CGF.Builder.CreateCondBr(IsActive, CleanupBB, ContBB);
  475. CGF.EmitBlock(CleanupBB);
  476. }
  477. // Ask the cleanup to emit itself.
  478. Fn->Emit(CGF, flags);
  479. assert(CGF.HaveInsertPoint() && "cleanup ended with no insertion point?");
  480. // Emit the continuation block if there was an active flag.
  481. if (ActiveFlag.isValid())
  482. CGF.EmitBlock(ContBB);
  483. }
  484. static void ForwardPrebranchedFallthrough(llvm::BasicBlock *Exit,
  485. llvm::BasicBlock *From,
  486. llvm::BasicBlock *To) {
  487. // Exit is the exit block of a cleanup, so it always terminates in
  488. // an unconditional branch or a switch.
  489. llvm::TerminatorInst *Term = Exit->getTerminator();
  490. if (llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Term)) {
  491. assert(Br->isUnconditional() && Br->getSuccessor(0) == From);
  492. Br->setSuccessor(0, To);
  493. } else {
  494. llvm::SwitchInst *Switch = cast<llvm::SwitchInst>(Term);
  495. for (unsigned I = 0, E = Switch->getNumSuccessors(); I != E; ++I)
  496. if (Switch->getSuccessor(I) == From)
  497. Switch->setSuccessor(I, To);
  498. }
  499. }
  500. /// We don't need a normal entry block for the given cleanup.
  501. /// Optimistic fixup branches can cause these blocks to come into
  502. /// existence anyway; if so, destroy it.
  503. ///
  504. /// The validity of this transformation is very much specific to the
  505. /// exact ways in which we form branches to cleanup entries.
  506. static void destroyOptimisticNormalEntry(CodeGenFunction &CGF,
  507. EHCleanupScope &scope) {
  508. llvm::BasicBlock *entry = scope.getNormalBlock();
  509. if (!entry) return;
  510. // Replace all the uses with unreachable.
  511. llvm::BasicBlock *unreachableBB = CGF.getUnreachableBlock();
  512. for (llvm::BasicBlock::use_iterator
  513. i = entry->use_begin(), e = entry->use_end(); i != e; ) {
  514. llvm::Use &use = *i;
  515. ++i;
  516. use.set(unreachableBB);
  517. // The only uses should be fixup switches.
  518. llvm::SwitchInst *si = cast<llvm::SwitchInst>(use.getUser());
  519. if (si->getNumCases() == 1 && si->getDefaultDest() == unreachableBB) {
  520. // Replace the switch with a branch.
  521. llvm::BranchInst::Create(si->case_begin()->getCaseSuccessor(), si);
  522. // The switch operand is a load from the cleanup-dest alloca.
  523. llvm::LoadInst *condition = cast<llvm::LoadInst>(si->getCondition());
  524. // Destroy the switch.
  525. si->eraseFromParent();
  526. // Destroy the load.
  527. assert(condition->getOperand(0) == CGF.NormalCleanupDest.getPointer());
  528. assert(condition->use_empty());
  529. condition->eraseFromParent();
  530. }
  531. }
  532. assert(entry->use_empty());
  533. delete entry;
  534. }
  535. /// Pops a cleanup block. If the block includes a normal cleanup, the
  536. /// current insertion point is threaded through the cleanup, as are
  537. /// any branch fixups on the cleanup.
  538. void CodeGenFunction::PopCleanupBlock(bool FallthroughIsBranchThrough) {
  539. assert(!EHStack.empty() && "cleanup stack is empty!");
  540. assert(isa<EHCleanupScope>(*EHStack.begin()) && "top not a cleanup!");
  541. EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin());
  542. assert(Scope.getFixupDepth() <= EHStack.getNumBranchFixups());
  543. // Remember activation information.
  544. bool IsActive = Scope.isActive();
  545. Address NormalActiveFlag =
  546. Scope.shouldTestFlagInNormalCleanup() ? Scope.getActiveFlag()
  547. : Address::invalid();
  548. Address EHActiveFlag =
  549. Scope.shouldTestFlagInEHCleanup() ? Scope.getActiveFlag()
  550. : Address::invalid();
  551. // Check whether we need an EH cleanup. This is only true if we've
  552. // generated a lazy EH cleanup block.
  553. llvm::BasicBlock *EHEntry = Scope.getCachedEHDispatchBlock();
  554. assert(Scope.hasEHBranches() == (EHEntry != nullptr));
  555. bool RequiresEHCleanup = (EHEntry != nullptr);
  556. EHScopeStack::stable_iterator EHParent = Scope.getEnclosingEHScope();
  557. // Check the three conditions which might require a normal cleanup:
  558. // - whether there are branch fix-ups through this cleanup
  559. unsigned FixupDepth = Scope.getFixupDepth();
  560. bool HasFixups = EHStack.getNumBranchFixups() != FixupDepth;
  561. // - whether there are branch-throughs or branch-afters
  562. bool HasExistingBranches = Scope.hasBranches();
  563. // - whether there's a fallthrough
  564. llvm::BasicBlock *FallthroughSource = Builder.GetInsertBlock();
  565. bool HasFallthrough = (FallthroughSource != nullptr && IsActive);
  566. // Branch-through fall-throughs leave the insertion point set to the
  567. // end of the last cleanup, which points to the current scope. The
  568. // rest of IR gen doesn't need to worry about this; it only happens
  569. // during the execution of PopCleanupBlocks().
  570. bool HasPrebranchedFallthrough =
  571. (FallthroughSource && FallthroughSource->getTerminator());
  572. // If this is a normal cleanup, then having a prebranched
  573. // fallthrough implies that the fallthrough source unconditionally
  574. // jumps here.
  575. assert(!Scope.isNormalCleanup() || !HasPrebranchedFallthrough ||
  576. (Scope.getNormalBlock() &&
  577. FallthroughSource->getTerminator()->getSuccessor(0)
  578. == Scope.getNormalBlock()));
  579. bool RequiresNormalCleanup = false;
  580. if (Scope.isNormalCleanup() &&
  581. (HasFixups || HasExistingBranches || HasFallthrough)) {
  582. RequiresNormalCleanup = true;
  583. }
  584. // If we have a prebranched fallthrough into an inactive normal
  585. // cleanup, rewrite it so that it leads to the appropriate place.
  586. if (Scope.isNormalCleanup() && HasPrebranchedFallthrough && !IsActive) {
  587. llvm::BasicBlock *prebranchDest;
  588. // If the prebranch is semantically branching through the next
  589. // cleanup, just forward it to the next block, leaving the
  590. // insertion point in the prebranched block.
  591. if (FallthroughIsBranchThrough) {
  592. EHScope &enclosing = *EHStack.find(Scope.getEnclosingNormalCleanup());
  593. prebranchDest = CreateNormalEntry(*this, cast<EHCleanupScope>(enclosing));
  594. // Otherwise, we need to make a new block. If the normal cleanup
  595. // isn't being used at all, we could actually reuse the normal
  596. // entry block, but this is simpler, and it avoids conflicts with
  597. // dead optimistic fixup branches.
  598. } else {
  599. prebranchDest = createBasicBlock("forwarded-prebranch");
  600. EmitBlock(prebranchDest);
  601. }
  602. llvm::BasicBlock *normalEntry = Scope.getNormalBlock();
  603. assert(normalEntry && !normalEntry->use_empty());
  604. ForwardPrebranchedFallthrough(FallthroughSource,
  605. normalEntry, prebranchDest);
  606. }
  607. // If we don't need the cleanup at all, we're done.
  608. if (!RequiresNormalCleanup && !RequiresEHCleanup) {
  609. destroyOptimisticNormalEntry(*this, Scope);
  610. EHStack.popCleanup(); // safe because there are no fixups
  611. assert(EHStack.getNumBranchFixups() == 0 ||
  612. EHStack.hasNormalCleanups());
  613. return;
  614. }
  615. // Copy the cleanup emission data out. This uses either a stack
  616. // array or malloc'd memory, depending on the size, which is
  617. // behavior that SmallVector would provide, if we could use it
  618. // here. Unfortunately, if you ask for a SmallVector<char>, the
  619. // alignment isn't sufficient.
  620. auto *CleanupSource = reinterpret_cast<char *>(Scope.getCleanupBuffer());
  621. llvm::AlignedCharArray<EHScopeStack::ScopeStackAlignment, 8 * sizeof(void *)> CleanupBufferStack;
  622. std::unique_ptr<char[]> CleanupBufferHeap;
  623. size_t CleanupSize = Scope.getCleanupSize();
  624. EHScopeStack::Cleanup *Fn;
  625. if (CleanupSize <= sizeof(CleanupBufferStack)) {
  626. memcpy(CleanupBufferStack.buffer, CleanupSource, CleanupSize);
  627. Fn = reinterpret_cast<EHScopeStack::Cleanup *>(CleanupBufferStack.buffer);
  628. } else {
  629. CleanupBufferHeap.reset(new char[CleanupSize]);
  630. memcpy(CleanupBufferHeap.get(), CleanupSource, CleanupSize);
  631. Fn = reinterpret_cast<EHScopeStack::Cleanup *>(CleanupBufferHeap.get());
  632. }
  633. EHScopeStack::Cleanup::Flags cleanupFlags;
  634. if (Scope.isNormalCleanup())
  635. cleanupFlags.setIsNormalCleanupKind();
  636. if (Scope.isEHCleanup())
  637. cleanupFlags.setIsEHCleanupKind();
  638. if (!RequiresNormalCleanup) {
  639. destroyOptimisticNormalEntry(*this, Scope);
  640. EHStack.popCleanup();
  641. } else {
  642. // If we have a fallthrough and no other need for the cleanup,
  643. // emit it directly.
  644. if (HasFallthrough && !HasPrebranchedFallthrough &&
  645. !HasFixups && !HasExistingBranches) {
  646. destroyOptimisticNormalEntry(*this, Scope);
  647. EHStack.popCleanup();
  648. EmitCleanup(*this, Fn, cleanupFlags, NormalActiveFlag);
  649. // Otherwise, the best approach is to thread everything through
  650. // the cleanup block and then try to clean up after ourselves.
  651. } else {
  652. // Force the entry block to exist.
  653. llvm::BasicBlock *NormalEntry = CreateNormalEntry(*this, Scope);
  654. // I. Set up the fallthrough edge in.
  655. CGBuilderTy::InsertPoint savedInactiveFallthroughIP;
  656. // If there's a fallthrough, we need to store the cleanup
  657. // destination index. For fall-throughs this is always zero.
  658. if (HasFallthrough) {
  659. if (!HasPrebranchedFallthrough)
  660. Builder.CreateStore(Builder.getInt32(0), getNormalCleanupDestSlot());
  661. // Otherwise, save and clear the IP if we don't have fallthrough
  662. // because the cleanup is inactive.
  663. } else if (FallthroughSource) {
  664. assert(!IsActive && "source without fallthrough for active cleanup");
  665. savedInactiveFallthroughIP = Builder.saveAndClearIP();
  666. }
  667. // II. Emit the entry block. This implicitly branches to it if
  668. // we have fallthrough. All the fixups and existing branches
  669. // should already be branched to it.
  670. EmitBlock(NormalEntry);
  671. // III. Figure out where we're going and build the cleanup
  672. // epilogue.
  673. bool HasEnclosingCleanups =
  674. (Scope.getEnclosingNormalCleanup() != EHStack.stable_end());
  675. // Compute the branch-through dest if we need it:
  676. // - if there are branch-throughs threaded through the scope
  677. // - if fall-through is a branch-through
  678. // - if there are fixups that will be optimistically forwarded
  679. // to the enclosing cleanup
  680. llvm::BasicBlock *BranchThroughDest = nullptr;
  681. if (Scope.hasBranchThroughs() ||
  682. (FallthroughSource && FallthroughIsBranchThrough) ||
  683. (HasFixups && HasEnclosingCleanups)) {
  684. assert(HasEnclosingCleanups);
  685. EHScope &S = *EHStack.find(Scope.getEnclosingNormalCleanup());
  686. BranchThroughDest = CreateNormalEntry(*this, cast<EHCleanupScope>(S));
  687. }
  688. llvm::BasicBlock *FallthroughDest = nullptr;
  689. SmallVector<llvm::Instruction*, 2> InstsToAppend;
  690. // If there's exactly one branch-after and no other threads,
  691. // we can route it without a switch.
  692. if (!Scope.hasBranchThroughs() && !HasFixups && !HasFallthrough &&
  693. Scope.getNumBranchAfters() == 1) {
  694. assert(!BranchThroughDest || !IsActive);
  695. // Clean up the possibly dead store to the cleanup dest slot.
  696. llvm::Instruction *NormalCleanupDestSlot =
  697. cast<llvm::Instruction>(getNormalCleanupDestSlot().getPointer());
  698. if (NormalCleanupDestSlot->hasOneUse()) {
  699. NormalCleanupDestSlot->user_back()->eraseFromParent();
  700. NormalCleanupDestSlot->eraseFromParent();
  701. NormalCleanupDest = Address::invalid();
  702. }
  703. llvm::BasicBlock *BranchAfter = Scope.getBranchAfterBlock(0);
  704. InstsToAppend.push_back(llvm::BranchInst::Create(BranchAfter));
  705. // Build a switch-out if we need it:
  706. // - if there are branch-afters threaded through the scope
  707. // - if fall-through is a branch-after
  708. // - if there are fixups that have nowhere left to go and
  709. // so must be immediately resolved
  710. } else if (Scope.getNumBranchAfters() ||
  711. (HasFallthrough && !FallthroughIsBranchThrough) ||
  712. (HasFixups && !HasEnclosingCleanups)) {
  713. llvm::BasicBlock *Default =
  714. (BranchThroughDest ? BranchThroughDest : getUnreachableBlock());
  715. // TODO: base this on the number of branch-afters and fixups
  716. const unsigned SwitchCapacity = 10;
  717. llvm::LoadInst *Load =
  718. createLoadInstBefore(getNormalCleanupDestSlot(), "cleanup.dest",
  719. nullptr);
  720. llvm::SwitchInst *Switch =
  721. llvm::SwitchInst::Create(Load, Default, SwitchCapacity);
  722. InstsToAppend.push_back(Load);
  723. InstsToAppend.push_back(Switch);
  724. // Branch-after fallthrough.
  725. if (FallthroughSource && !FallthroughIsBranchThrough) {
  726. FallthroughDest = createBasicBlock("cleanup.cont");
  727. if (HasFallthrough)
  728. Switch->addCase(Builder.getInt32(0), FallthroughDest);
  729. }
  730. for (unsigned I = 0, E = Scope.getNumBranchAfters(); I != E; ++I) {
  731. Switch->addCase(Scope.getBranchAfterIndex(I),
  732. Scope.getBranchAfterBlock(I));
  733. }
  734. // If there aren't any enclosing cleanups, we can resolve all
  735. // the fixups now.
  736. if (HasFixups && !HasEnclosingCleanups)
  737. ResolveAllBranchFixups(*this, Switch, NormalEntry);
  738. } else {
  739. // We should always have a branch-through destination in this case.
  740. assert(BranchThroughDest);
  741. InstsToAppend.push_back(llvm::BranchInst::Create(BranchThroughDest));
  742. }
  743. // IV. Pop the cleanup and emit it.
  744. EHStack.popCleanup();
  745. assert(EHStack.hasNormalCleanups() == HasEnclosingCleanups);
  746. EmitCleanup(*this, Fn, cleanupFlags, NormalActiveFlag);
  747. // Append the prepared cleanup prologue from above.
  748. llvm::BasicBlock *NormalExit = Builder.GetInsertBlock();
  749. for (unsigned I = 0, E = InstsToAppend.size(); I != E; ++I)
  750. NormalExit->getInstList().push_back(InstsToAppend[I]);
  751. // Optimistically hope that any fixups will continue falling through.
  752. for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups();
  753. I < E; ++I) {
  754. BranchFixup &Fixup = EHStack.getBranchFixup(I);
  755. if (!Fixup.Destination) continue;
  756. if (!Fixup.OptimisticBranchBlock) {
  757. createStoreInstBefore(Builder.getInt32(Fixup.DestinationIndex),
  758. getNormalCleanupDestSlot(),
  759. Fixup.InitialBranch);
  760. Fixup.InitialBranch->setSuccessor(0, NormalEntry);
  761. }
  762. Fixup.OptimisticBranchBlock = NormalExit;
  763. }
  764. // V. Set up the fallthrough edge out.
  765. // Case 1: a fallthrough source exists but doesn't branch to the
  766. // cleanup because the cleanup is inactive.
  767. if (!HasFallthrough && FallthroughSource) {
  768. // Prebranched fallthrough was forwarded earlier.
  769. // Non-prebranched fallthrough doesn't need to be forwarded.
  770. // Either way, all we need to do is restore the IP we cleared before.
  771. assert(!IsActive);
  772. Builder.restoreIP(savedInactiveFallthroughIP);
  773. // Case 2: a fallthrough source exists and should branch to the
  774. // cleanup, but we're not supposed to branch through to the next
  775. // cleanup.
  776. } else if (HasFallthrough && FallthroughDest) {
  777. assert(!FallthroughIsBranchThrough);
  778. EmitBlock(FallthroughDest);
  779. // Case 3: a fallthrough source exists and should branch to the
  780. // cleanup and then through to the next.
  781. } else if (HasFallthrough) {
  782. // Everything is already set up for this.
  783. // Case 4: no fallthrough source exists.
  784. } else {
  785. Builder.ClearInsertionPoint();
  786. }
  787. // VI. Assorted cleaning.
  788. // Check whether we can merge NormalEntry into a single predecessor.
  789. // This might invalidate (non-IR) pointers to NormalEntry.
  790. llvm::BasicBlock *NewNormalEntry =
  791. SimplifyCleanupEntry(*this, NormalEntry);
  792. // If it did invalidate those pointers, and NormalEntry was the same
  793. // as NormalExit, go back and patch up the fixups.
  794. if (NewNormalEntry != NormalEntry && NormalEntry == NormalExit)
  795. for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups();
  796. I < E; ++I)
  797. EHStack.getBranchFixup(I).OptimisticBranchBlock = NewNormalEntry;
  798. }
  799. }
  800. assert(EHStack.hasNormalCleanups() || EHStack.getNumBranchFixups() == 0);
  801. // Emit the EH cleanup if required.
  802. if (RequiresEHCleanup) {
  803. CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
  804. EmitBlock(EHEntry);
  805. llvm::BasicBlock *NextAction = getEHDispatchBlock(EHParent);
  806. // Push a terminate scope or cleanupendpad scope around the potentially
  807. // throwing cleanups. For funclet EH personalities, the cleanupendpad models
  808. // program termination when cleanups throw.
  809. bool PushedTerminate = false;
  810. SaveAndRestore<llvm::Instruction *> RestoreCurrentFuncletPad(
  811. CurrentFuncletPad);
  812. llvm::CleanupPadInst *CPI = nullptr;
  813. if (!EHPersonality::get(*this).usesFuncletPads()) {
  814. EHStack.pushTerminate();
  815. PushedTerminate = true;
  816. } else {
  817. llvm::Value *ParentPad = CurrentFuncletPad;
  818. if (!ParentPad)
  819. ParentPad = llvm::ConstantTokenNone::get(CGM.getLLVMContext());
  820. CurrentFuncletPad = CPI = Builder.CreateCleanupPad(ParentPad);
  821. }
  822. // We only actually emit the cleanup code if the cleanup is either
  823. // active or was used before it was deactivated.
  824. if (EHActiveFlag.isValid() || IsActive) {
  825. cleanupFlags.setIsForEHCleanup();
  826. EmitCleanup(*this, Fn, cleanupFlags, EHActiveFlag);
  827. }
  828. if (CPI)
  829. Builder.CreateCleanupRet(CPI, NextAction);
  830. else
  831. Builder.CreateBr(NextAction);
  832. // Leave the terminate scope.
  833. if (PushedTerminate)
  834. EHStack.popTerminate();
  835. Builder.restoreIP(SavedIP);
  836. SimplifyCleanupEntry(*this, EHEntry);
  837. }
  838. }
  839. /// isObviouslyBranchWithoutCleanups - Return true if a branch to the
  840. /// specified destination obviously has no cleanups to run. 'false' is always
  841. /// a conservatively correct answer for this method.
  842. bool CodeGenFunction::isObviouslyBranchWithoutCleanups(JumpDest Dest) const {
  843. assert(Dest.getScopeDepth().encloses(EHStack.stable_begin())
  844. && "stale jump destination");
  845. // Calculate the innermost active normal cleanup.
  846. EHScopeStack::stable_iterator TopCleanup =
  847. EHStack.getInnermostActiveNormalCleanup();
  848. // If we're not in an active normal cleanup scope, or if the
  849. // destination scope is within the innermost active normal cleanup
  850. // scope, we don't need to worry about fixups.
  851. if (TopCleanup == EHStack.stable_end() ||
  852. TopCleanup.encloses(Dest.getScopeDepth())) // works for invalid
  853. return true;
  854. // Otherwise, we might need some cleanups.
  855. return false;
  856. }
  857. /// Terminate the current block by emitting a branch which might leave
  858. /// the current cleanup-protected scope. The target scope may not yet
  859. /// be known, in which case this will require a fixup.
  860. ///
  861. /// As a side-effect, this method clears the insertion point.
  862. void CodeGenFunction::EmitBranchThroughCleanup(JumpDest Dest) {
  863. assert(Dest.getScopeDepth().encloses(EHStack.stable_begin())
  864. && "stale jump destination");
  865. if (!HaveInsertPoint())
  866. return;
  867. // Create the branch.
  868. llvm::BranchInst *BI = Builder.CreateBr(Dest.getBlock());
  869. // Calculate the innermost active normal cleanup.
  870. EHScopeStack::stable_iterator
  871. TopCleanup = EHStack.getInnermostActiveNormalCleanup();
  872. // If we're not in an active normal cleanup scope, or if the
  873. // destination scope is within the innermost active normal cleanup
  874. // scope, we don't need to worry about fixups.
  875. if (TopCleanup == EHStack.stable_end() ||
  876. TopCleanup.encloses(Dest.getScopeDepth())) { // works for invalid
  877. Builder.ClearInsertionPoint();
  878. return;
  879. }
  880. // If we can't resolve the destination cleanup scope, just add this
  881. // to the current cleanup scope as a branch fixup.
  882. if (!Dest.getScopeDepth().isValid()) {
  883. BranchFixup &Fixup = EHStack.addBranchFixup();
  884. Fixup.Destination = Dest.getBlock();
  885. Fixup.DestinationIndex = Dest.getDestIndex();
  886. Fixup.InitialBranch = BI;
  887. Fixup.OptimisticBranchBlock = nullptr;
  888. Builder.ClearInsertionPoint();
  889. return;
  890. }
  891. // Otherwise, thread through all the normal cleanups in scope.
  892. // Store the index at the start.
  893. llvm::ConstantInt *Index = Builder.getInt32(Dest.getDestIndex());
  894. createStoreInstBefore(Index, getNormalCleanupDestSlot(), BI);
  895. // Adjust BI to point to the first cleanup block.
  896. {
  897. EHCleanupScope &Scope =
  898. cast<EHCleanupScope>(*EHStack.find(TopCleanup));
  899. BI->setSuccessor(0, CreateNormalEntry(*this, Scope));
  900. }
  901. // Add this destination to all the scopes involved.
  902. EHScopeStack::stable_iterator I = TopCleanup;
  903. EHScopeStack::stable_iterator E = Dest.getScopeDepth();
  904. if (E.strictlyEncloses(I)) {
  905. while (true) {
  906. EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(I));
  907. assert(Scope.isNormalCleanup());
  908. I = Scope.getEnclosingNormalCleanup();
  909. // If this is the last cleanup we're propagating through, tell it
  910. // that there's a resolved jump moving through it.
  911. if (!E.strictlyEncloses(I)) {
  912. Scope.addBranchAfter(Index, Dest.getBlock());
  913. break;
  914. }
  915. // Otherwise, tell the scope that there's a jump propagating
  916. // through it. If this isn't new information, all the rest of
  917. // the work has been done before.
  918. if (!Scope.addBranchThrough(Dest.getBlock()))
  919. break;
  920. }
  921. }
  922. Builder.ClearInsertionPoint();
  923. }
  924. static bool IsUsedAsNormalCleanup(EHScopeStack &EHStack,
  925. EHScopeStack::stable_iterator C) {
  926. // If we needed a normal block for any reason, that counts.
  927. if (cast<EHCleanupScope>(*EHStack.find(C)).getNormalBlock())
  928. return true;
  929. // Check whether any enclosed cleanups were needed.
  930. for (EHScopeStack::stable_iterator
  931. I = EHStack.getInnermostNormalCleanup();
  932. I != C; ) {
  933. assert(C.strictlyEncloses(I));
  934. EHCleanupScope &S = cast<EHCleanupScope>(*EHStack.find(I));
  935. if (S.getNormalBlock()) return true;
  936. I = S.getEnclosingNormalCleanup();
  937. }
  938. return false;
  939. }
  940. static bool IsUsedAsEHCleanup(EHScopeStack &EHStack,
  941. EHScopeStack::stable_iterator cleanup) {
  942. // If we needed an EH block for any reason, that counts.
  943. if (EHStack.find(cleanup)->hasEHBranches())
  944. return true;
  945. // Check whether any enclosed cleanups were needed.
  946. for (EHScopeStack::stable_iterator
  947. i = EHStack.getInnermostEHScope(); i != cleanup; ) {
  948. assert(cleanup.strictlyEncloses(i));
  949. EHScope &scope = *EHStack.find(i);
  950. if (scope.hasEHBranches())
  951. return true;
  952. i = scope.getEnclosingEHScope();
  953. }
  954. return false;
  955. }
  956. enum ForActivation_t {
  957. ForActivation,
  958. ForDeactivation
  959. };
  960. /// The given cleanup block is changing activation state. Configure a
  961. /// cleanup variable if necessary.
  962. ///
  963. /// It would be good if we had some way of determining if there were
  964. /// extra uses *after* the change-over point.
  965. static void SetupCleanupBlockActivation(CodeGenFunction &CGF,
  966. EHScopeStack::stable_iterator C,
  967. ForActivation_t kind,
  968. llvm::Instruction *dominatingIP) {
  969. EHCleanupScope &Scope = cast<EHCleanupScope>(*CGF.EHStack.find(C));
  970. // We always need the flag if we're activating the cleanup in a
  971. // conditional context, because we have to assume that the current
  972. // location doesn't necessarily dominate the cleanup's code.
  973. bool isActivatedInConditional =
  974. (kind == ForActivation && CGF.isInConditionalBranch());
  975. bool needFlag = false;
  976. // Calculate whether the cleanup was used:
  977. // - as a normal cleanup
  978. if (Scope.isNormalCleanup() &&
  979. (isActivatedInConditional || IsUsedAsNormalCleanup(CGF.EHStack, C))) {
  980. Scope.setTestFlagInNormalCleanup();
  981. needFlag = true;
  982. }
  983. // - as an EH cleanup
  984. if (Scope.isEHCleanup() &&
  985. (isActivatedInConditional || IsUsedAsEHCleanup(CGF.EHStack, C))) {
  986. Scope.setTestFlagInEHCleanup();
  987. needFlag = true;
  988. }
  989. // If it hasn't yet been used as either, we're done.
  990. if (!needFlag) return;
  991. Address var = Scope.getActiveFlag();
  992. if (!var.isValid()) {
  993. var = CGF.CreateTempAlloca(CGF.Builder.getInt1Ty(), CharUnits::One(),
  994. "cleanup.isactive");
  995. Scope.setActiveFlag(var);
  996. assert(dominatingIP && "no existing variable and no dominating IP!");
  997. // Initialize to true or false depending on whether it was
  998. // active up to this point.
  999. llvm::Constant *value = CGF.Builder.getInt1(kind == ForDeactivation);
  1000. // If we're in a conditional block, ignore the dominating IP and
  1001. // use the outermost conditional branch.
  1002. if (CGF.isInConditionalBranch()) {
  1003. CGF.setBeforeOutermostConditional(value, var);
  1004. } else {
  1005. createStoreInstBefore(value, var, dominatingIP);
  1006. }
  1007. }
  1008. CGF.Builder.CreateStore(CGF.Builder.getInt1(kind == ForActivation), var);
  1009. }
  1010. /// Activate a cleanup that was created in an inactivated state.
  1011. void CodeGenFunction::ActivateCleanupBlock(EHScopeStack::stable_iterator C,
  1012. llvm::Instruction *dominatingIP) {
  1013. assert(C != EHStack.stable_end() && "activating bottom of stack?");
  1014. EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(C));
  1015. assert(!Scope.isActive() && "double activation");
  1016. SetupCleanupBlockActivation(*this, C, ForActivation, dominatingIP);
  1017. Scope.setActive(true);
  1018. }
  1019. /// Deactive a cleanup that was created in an active state.
  1020. void CodeGenFunction::DeactivateCleanupBlock(EHScopeStack::stable_iterator C,
  1021. llvm::Instruction *dominatingIP) {
  1022. assert(C != EHStack.stable_end() && "deactivating bottom of stack?");
  1023. EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(C));
  1024. assert(Scope.isActive() && "double deactivation");
  1025. // If it's the top of the stack, just pop it, but do so only if it belongs
  1026. // to the current RunCleanupsScope.
  1027. if (C == EHStack.stable_begin() &&
  1028. CurrentCleanupScopeDepth.strictlyEncloses(C)) {
  1029. // If it's a normal cleanup, we need to pretend that the
  1030. // fallthrough is unreachable.
  1031. CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
  1032. PopCleanupBlock();
  1033. Builder.restoreIP(SavedIP);
  1034. return;
  1035. }
  1036. // Otherwise, follow the general case.
  1037. SetupCleanupBlockActivation(*this, C, ForDeactivation, dominatingIP);
  1038. Scope.setActive(false);
  1039. }
  1040. Address CodeGenFunction::getNormalCleanupDestSlot() {
  1041. if (!NormalCleanupDest.isValid())
  1042. NormalCleanupDest =
  1043. CreateDefaultAlignTempAlloca(Builder.getInt32Ty(), "cleanup.dest.slot");
  1044. return NormalCleanupDest;
  1045. }
  1046. /// Emits all the code to cause the given temporary to be cleaned up.
  1047. void CodeGenFunction::EmitCXXTemporary(const CXXTemporary *Temporary,
  1048. QualType TempType,
  1049. Address Ptr) {
  1050. pushDestroy(NormalAndEHCleanup, Ptr, TempType, destroyCXXObject,
  1051. /*useEHCleanup*/ true);
  1052. }