CGCleanup.cpp 47 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277
  1. //===--- CGCleanup.cpp - Bookkeeping and code emission for cleanups -------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This file contains code dealing with the IR generation for cleanups
  10. // and related information.
  11. //
  12. // A "cleanup" is a piece of code which needs to be executed whenever
  13. // control transfers out of a particular scope. This can be
  14. // conditionalized to occur only on exceptional control flow, only on
  15. // normal control flow, or both.
  16. //
  17. //===----------------------------------------------------------------------===//
  18. #include "CGCleanup.h"
  19. #include "CodeGenFunction.h"
  20. #include "llvm/Support/SaveAndRestore.h"
  21. using namespace clang;
  22. using namespace CodeGen;
  23. bool DominatingValue<RValue>::saved_type::needsSaving(RValue rv) {
  24. if (rv.isScalar())
  25. return DominatingLLVMValue::needsSaving(rv.getScalarVal());
  26. if (rv.isAggregate())
  27. return DominatingLLVMValue::needsSaving(rv.getAggregatePointer());
  28. return true;
  29. }
  30. DominatingValue<RValue>::saved_type
  31. DominatingValue<RValue>::saved_type::save(CodeGenFunction &CGF, RValue rv) {
  32. if (rv.isScalar()) {
  33. llvm::Value *V = rv.getScalarVal();
  34. // These automatically dominate and don't need to be saved.
  35. if (!DominatingLLVMValue::needsSaving(V))
  36. return saved_type(V, ScalarLiteral);
  37. // Everything else needs an alloca.
  38. Address addr =
  39. CGF.CreateDefaultAlignTempAlloca(V->getType(), "saved-rvalue");
  40. CGF.Builder.CreateStore(V, addr);
  41. return saved_type(addr.getPointer(), ScalarAddress);
  42. }
  43. if (rv.isComplex()) {
  44. CodeGenFunction::ComplexPairTy V = rv.getComplexVal();
  45. llvm::Type *ComplexTy =
  46. llvm::StructType::get(V.first->getType(), V.second->getType());
  47. Address addr = CGF.CreateDefaultAlignTempAlloca(ComplexTy, "saved-complex");
  48. CGF.Builder.CreateStore(V.first, CGF.Builder.CreateStructGEP(addr, 0));
  49. CGF.Builder.CreateStore(V.second, CGF.Builder.CreateStructGEP(addr, 1));
  50. return saved_type(addr.getPointer(), ComplexAddress);
  51. }
  52. assert(rv.isAggregate());
  53. Address V = rv.getAggregateAddress(); // TODO: volatile?
  54. if (!DominatingLLVMValue::needsSaving(V.getPointer()))
  55. return saved_type(V.getPointer(), AggregateLiteral,
  56. V.getAlignment().getQuantity());
  57. Address addr =
  58. CGF.CreateTempAlloca(V.getType(), CGF.getPointerAlign(), "saved-rvalue");
  59. CGF.Builder.CreateStore(V.getPointer(), addr);
  60. return saved_type(addr.getPointer(), AggregateAddress,
  61. V.getAlignment().getQuantity());
  62. }
  63. /// Given a saved r-value produced by SaveRValue, perform the code
  64. /// necessary to restore it to usability at the current insertion
  65. /// point.
  66. RValue DominatingValue<RValue>::saved_type::restore(CodeGenFunction &CGF) {
  67. auto getSavingAddress = [&](llvm::Value *value) {
  68. auto alignment = cast<llvm::AllocaInst>(value)->getAlignment();
  69. return Address(value, CharUnits::fromQuantity(alignment));
  70. };
  71. switch (K) {
  72. case ScalarLiteral:
  73. return RValue::get(Value);
  74. case ScalarAddress:
  75. return RValue::get(CGF.Builder.CreateLoad(getSavingAddress(Value)));
  76. case AggregateLiteral:
  77. return RValue::getAggregate(Address(Value, CharUnits::fromQuantity(Align)));
  78. case AggregateAddress: {
  79. auto addr = CGF.Builder.CreateLoad(getSavingAddress(Value));
  80. return RValue::getAggregate(Address(addr, CharUnits::fromQuantity(Align)));
  81. }
  82. case ComplexAddress: {
  83. Address address = getSavingAddress(Value);
  84. llvm::Value *real =
  85. CGF.Builder.CreateLoad(CGF.Builder.CreateStructGEP(address, 0));
  86. llvm::Value *imag =
  87. CGF.Builder.CreateLoad(CGF.Builder.CreateStructGEP(address, 1));
  88. return RValue::getComplex(real, imag);
  89. }
  90. }
  91. llvm_unreachable("bad saved r-value kind");
  92. }
  93. /// Push an entry of the given size onto this protected-scope stack.
  94. char *EHScopeStack::allocate(size_t Size) {
  95. Size = llvm::alignTo(Size, ScopeStackAlignment);
  96. if (!StartOfBuffer) {
  97. unsigned Capacity = 1024;
  98. while (Capacity < Size) Capacity *= 2;
  99. StartOfBuffer = new char[Capacity];
  100. StartOfData = EndOfBuffer = StartOfBuffer + Capacity;
  101. } else if (static_cast<size_t>(StartOfData - StartOfBuffer) < Size) {
  102. unsigned CurrentCapacity = EndOfBuffer - StartOfBuffer;
  103. unsigned UsedCapacity = CurrentCapacity - (StartOfData - StartOfBuffer);
  104. unsigned NewCapacity = CurrentCapacity;
  105. do {
  106. NewCapacity *= 2;
  107. } while (NewCapacity < UsedCapacity + Size);
  108. char *NewStartOfBuffer = new char[NewCapacity];
  109. char *NewEndOfBuffer = NewStartOfBuffer + NewCapacity;
  110. char *NewStartOfData = NewEndOfBuffer - UsedCapacity;
  111. memcpy(NewStartOfData, StartOfData, UsedCapacity);
  112. delete [] StartOfBuffer;
  113. StartOfBuffer = NewStartOfBuffer;
  114. EndOfBuffer = NewEndOfBuffer;
  115. StartOfData = NewStartOfData;
  116. }
  117. assert(StartOfBuffer + Size <= StartOfData);
  118. StartOfData -= Size;
  119. return StartOfData;
  120. }
  121. void EHScopeStack::deallocate(size_t Size) {
  122. StartOfData += llvm::alignTo(Size, ScopeStackAlignment);
  123. }
  124. bool EHScopeStack::containsOnlyLifetimeMarkers(
  125. EHScopeStack::stable_iterator Old) const {
  126. for (EHScopeStack::iterator it = begin(); stabilize(it) != Old; it++) {
  127. EHCleanupScope *cleanup = dyn_cast<EHCleanupScope>(&*it);
  128. if (!cleanup || !cleanup->isLifetimeMarker())
  129. return false;
  130. }
  131. return true;
  132. }
  133. bool EHScopeStack::requiresLandingPad() const {
  134. for (stable_iterator si = getInnermostEHScope(); si != stable_end(); ) {
  135. // Skip lifetime markers.
  136. if (auto *cleanup = dyn_cast<EHCleanupScope>(&*find(si)))
  137. if (cleanup->isLifetimeMarker()) {
  138. si = cleanup->getEnclosingEHScope();
  139. continue;
  140. }
  141. return true;
  142. }
  143. return false;
  144. }
  145. EHScopeStack::stable_iterator
  146. EHScopeStack::getInnermostActiveNormalCleanup() const {
  147. for (stable_iterator si = getInnermostNormalCleanup(), se = stable_end();
  148. si != se; ) {
  149. EHCleanupScope &cleanup = cast<EHCleanupScope>(*find(si));
  150. if (cleanup.isActive()) return si;
  151. si = cleanup.getEnclosingNormalCleanup();
  152. }
  153. return stable_end();
  154. }
  155. void *EHScopeStack::pushCleanup(CleanupKind Kind, size_t Size) {
  156. char *Buffer = allocate(EHCleanupScope::getSizeForCleanupSize(Size));
  157. bool IsNormalCleanup = Kind & NormalCleanup;
  158. bool IsEHCleanup = Kind & EHCleanup;
  159. bool IsActive = !(Kind & InactiveCleanup);
  160. bool IsLifetimeMarker = Kind & LifetimeMarker;
  161. EHCleanupScope *Scope =
  162. new (Buffer) EHCleanupScope(IsNormalCleanup,
  163. IsEHCleanup,
  164. IsActive,
  165. Size,
  166. BranchFixups.size(),
  167. InnermostNormalCleanup,
  168. InnermostEHScope);
  169. if (IsNormalCleanup)
  170. InnermostNormalCleanup = stable_begin();
  171. if (IsEHCleanup)
  172. InnermostEHScope = stable_begin();
  173. if (IsLifetimeMarker)
  174. Scope->setLifetimeMarker();
  175. return Scope->getCleanupBuffer();
  176. }
  177. void EHScopeStack::popCleanup() {
  178. assert(!empty() && "popping exception stack when not empty");
  179. assert(isa<EHCleanupScope>(*begin()));
  180. EHCleanupScope &Cleanup = cast<EHCleanupScope>(*begin());
  181. InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup();
  182. InnermostEHScope = Cleanup.getEnclosingEHScope();
  183. deallocate(Cleanup.getAllocatedSize());
  184. // Destroy the cleanup.
  185. Cleanup.Destroy();
  186. // Check whether we can shrink the branch-fixups stack.
  187. if (!BranchFixups.empty()) {
  188. // If we no longer have any normal cleanups, all the fixups are
  189. // complete.
  190. if (!hasNormalCleanups())
  191. BranchFixups.clear();
  192. // Otherwise we can still trim out unnecessary nulls.
  193. else
  194. popNullFixups();
  195. }
  196. }
  197. EHFilterScope *EHScopeStack::pushFilter(unsigned numFilters) {
  198. assert(getInnermostEHScope() == stable_end());
  199. char *buffer = allocate(EHFilterScope::getSizeForNumFilters(numFilters));
  200. EHFilterScope *filter = new (buffer) EHFilterScope(numFilters);
  201. InnermostEHScope = stable_begin();
  202. return filter;
  203. }
  204. void EHScopeStack::popFilter() {
  205. assert(!empty() && "popping exception stack when not empty");
  206. EHFilterScope &filter = cast<EHFilterScope>(*begin());
  207. deallocate(EHFilterScope::getSizeForNumFilters(filter.getNumFilters()));
  208. InnermostEHScope = filter.getEnclosingEHScope();
  209. }
  210. EHCatchScope *EHScopeStack::pushCatch(unsigned numHandlers) {
  211. char *buffer = allocate(EHCatchScope::getSizeForNumHandlers(numHandlers));
  212. EHCatchScope *scope =
  213. new (buffer) EHCatchScope(numHandlers, InnermostEHScope);
  214. InnermostEHScope = stable_begin();
  215. return scope;
  216. }
  217. void EHScopeStack::pushTerminate() {
  218. char *Buffer = allocate(EHTerminateScope::getSize());
  219. new (Buffer) EHTerminateScope(InnermostEHScope);
  220. InnermostEHScope = stable_begin();
  221. }
  222. /// Remove any 'null' fixups on the stack. However, we can't pop more
  223. /// fixups than the fixup depth on the innermost normal cleanup, or
  224. /// else fixups that we try to add to that cleanup will end up in the
  225. /// wrong place. We *could* try to shrink fixup depths, but that's
  226. /// actually a lot of work for little benefit.
  227. void EHScopeStack::popNullFixups() {
  228. // We expect this to only be called when there's still an innermost
  229. // normal cleanup; otherwise there really shouldn't be any fixups.
  230. assert(hasNormalCleanups());
  231. EHScopeStack::iterator it = find(InnermostNormalCleanup);
  232. unsigned MinSize = cast<EHCleanupScope>(*it).getFixupDepth();
  233. assert(BranchFixups.size() >= MinSize && "fixup stack out of order");
  234. while (BranchFixups.size() > MinSize &&
  235. BranchFixups.back().Destination == nullptr)
  236. BranchFixups.pop_back();
  237. }
  238. Address CodeGenFunction::createCleanupActiveFlag() {
  239. // Create a variable to decide whether the cleanup needs to be run.
  240. Address active = CreateTempAllocaWithoutCast(
  241. Builder.getInt1Ty(), CharUnits::One(), "cleanup.cond");
  242. // Initialize it to false at a site that's guaranteed to be run
  243. // before each evaluation.
  244. setBeforeOutermostConditional(Builder.getFalse(), active);
  245. // Initialize it to true at the current location.
  246. Builder.CreateStore(Builder.getTrue(), active);
  247. return active;
  248. }
  249. void CodeGenFunction::initFullExprCleanupWithFlag(Address ActiveFlag) {
  250. // Set that as the active flag in the cleanup.
  251. EHCleanupScope &cleanup = cast<EHCleanupScope>(*EHStack.begin());
  252. assert(!cleanup.hasActiveFlag() && "cleanup already has active flag?");
  253. cleanup.setActiveFlag(ActiveFlag);
  254. if (cleanup.isNormalCleanup()) cleanup.setTestFlagInNormalCleanup();
  255. if (cleanup.isEHCleanup()) cleanup.setTestFlagInEHCleanup();
  256. }
  257. void EHScopeStack::Cleanup::anchor() {}
  258. static void createStoreInstBefore(llvm::Value *value, Address addr,
  259. llvm::Instruction *beforeInst) {
  260. auto store = new llvm::StoreInst(value, addr.getPointer(), beforeInst);
  261. store->setAlignment(addr.getAlignment().getQuantity());
  262. }
  263. static llvm::LoadInst *createLoadInstBefore(Address addr, const Twine &name,
  264. llvm::Instruction *beforeInst) {
  265. auto load = new llvm::LoadInst(addr.getPointer(), name, beforeInst);
  266. load->setAlignment(addr.getAlignment().getQuantity());
  267. return load;
  268. }
  269. /// All the branch fixups on the EH stack have propagated out past the
  270. /// outermost normal cleanup; resolve them all by adding cases to the
  271. /// given switch instruction.
  272. static void ResolveAllBranchFixups(CodeGenFunction &CGF,
  273. llvm::SwitchInst *Switch,
  274. llvm::BasicBlock *CleanupEntry) {
  275. llvm::SmallPtrSet<llvm::BasicBlock*, 4> CasesAdded;
  276. for (unsigned I = 0, E = CGF.EHStack.getNumBranchFixups(); I != E; ++I) {
  277. // Skip this fixup if its destination isn't set.
  278. BranchFixup &Fixup = CGF.EHStack.getBranchFixup(I);
  279. if (Fixup.Destination == nullptr) continue;
  280. // If there isn't an OptimisticBranchBlock, then InitialBranch is
  281. // still pointing directly to its destination; forward it to the
  282. // appropriate cleanup entry. This is required in the specific
  283. // case of
  284. // { std::string s; goto lbl; }
  285. // lbl:
  286. // i.e. where there's an unresolved fixup inside a single cleanup
  287. // entry which we're currently popping.
  288. if (Fixup.OptimisticBranchBlock == nullptr) {
  289. createStoreInstBefore(CGF.Builder.getInt32(Fixup.DestinationIndex),
  290. CGF.getNormalCleanupDestSlot(),
  291. Fixup.InitialBranch);
  292. Fixup.InitialBranch->setSuccessor(0, CleanupEntry);
  293. }
  294. // Don't add this case to the switch statement twice.
  295. if (!CasesAdded.insert(Fixup.Destination).second)
  296. continue;
  297. Switch->addCase(CGF.Builder.getInt32(Fixup.DestinationIndex),
  298. Fixup.Destination);
  299. }
  300. CGF.EHStack.clearFixups();
  301. }
  302. /// Transitions the terminator of the given exit-block of a cleanup to
  303. /// be a cleanup switch.
  304. static llvm::SwitchInst *TransitionToCleanupSwitch(CodeGenFunction &CGF,
  305. llvm::BasicBlock *Block) {
  306. // If it's a branch, turn it into a switch whose default
  307. // destination is its original target.
  308. llvm::Instruction *Term = Block->getTerminator();
  309. assert(Term && "can't transition block without terminator");
  310. if (llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Term)) {
  311. assert(Br->isUnconditional());
  312. auto Load = createLoadInstBefore(CGF.getNormalCleanupDestSlot(),
  313. "cleanup.dest", Term);
  314. llvm::SwitchInst *Switch =
  315. llvm::SwitchInst::Create(Load, Br->getSuccessor(0), 4, Block);
  316. Br->eraseFromParent();
  317. return Switch;
  318. } else {
  319. return cast<llvm::SwitchInst>(Term);
  320. }
  321. }
  322. void CodeGenFunction::ResolveBranchFixups(llvm::BasicBlock *Block) {
  323. assert(Block && "resolving a null target block");
  324. if (!EHStack.getNumBranchFixups()) return;
  325. assert(EHStack.hasNormalCleanups() &&
  326. "branch fixups exist with no normal cleanups on stack");
  327. llvm::SmallPtrSet<llvm::BasicBlock*, 4> ModifiedOptimisticBlocks;
  328. bool ResolvedAny = false;
  329. for (unsigned I = 0, E = EHStack.getNumBranchFixups(); I != E; ++I) {
  330. // Skip this fixup if its destination doesn't match.
  331. BranchFixup &Fixup = EHStack.getBranchFixup(I);
  332. if (Fixup.Destination != Block) continue;
  333. Fixup.Destination = nullptr;
  334. ResolvedAny = true;
  335. // If it doesn't have an optimistic branch block, LatestBranch is
  336. // already pointing to the right place.
  337. llvm::BasicBlock *BranchBB = Fixup.OptimisticBranchBlock;
  338. if (!BranchBB)
  339. continue;
  340. // Don't process the same optimistic branch block twice.
  341. if (!ModifiedOptimisticBlocks.insert(BranchBB).second)
  342. continue;
  343. llvm::SwitchInst *Switch = TransitionToCleanupSwitch(*this, BranchBB);
  344. // Add a case to the switch.
  345. Switch->addCase(Builder.getInt32(Fixup.DestinationIndex), Block);
  346. }
  347. if (ResolvedAny)
  348. EHStack.popNullFixups();
  349. }
  350. /// Pops cleanup blocks until the given savepoint is reached.
  351. void CodeGenFunction::PopCleanupBlocks(
  352. EHScopeStack::stable_iterator Old,
  353. std::initializer_list<llvm::Value **> ValuesToReload) {
  354. assert(Old.isValid());
  355. bool HadBranches = false;
  356. while (EHStack.stable_begin() != Old) {
  357. EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin());
  358. HadBranches |= Scope.hasBranches();
  359. // As long as Old strictly encloses the scope's enclosing normal
  360. // cleanup, we're going to emit another normal cleanup which
  361. // fallthrough can propagate through.
  362. bool FallThroughIsBranchThrough =
  363. Old.strictlyEncloses(Scope.getEnclosingNormalCleanup());
  364. PopCleanupBlock(FallThroughIsBranchThrough);
  365. }
  366. // If we didn't have any branches, the insertion point before cleanups must
  367. // dominate the current insertion point and we don't need to reload any
  368. // values.
  369. if (!HadBranches)
  370. return;
  371. // Spill and reload all values that the caller wants to be live at the current
  372. // insertion point.
  373. for (llvm::Value **ReloadedValue : ValuesToReload) {
  374. auto *Inst = dyn_cast_or_null<llvm::Instruction>(*ReloadedValue);
  375. if (!Inst)
  376. continue;
  377. // Don't spill static allocas, they dominate all cleanups. These are created
  378. // by binding a reference to a local variable or temporary.
  379. auto *AI = dyn_cast<llvm::AllocaInst>(Inst);
  380. if (AI && AI->isStaticAlloca())
  381. continue;
  382. Address Tmp =
  383. CreateDefaultAlignTempAlloca(Inst->getType(), "tmp.exprcleanup");
  384. // Find an insertion point after Inst and spill it to the temporary.
  385. llvm::BasicBlock::iterator InsertBefore;
  386. if (auto *Invoke = dyn_cast<llvm::InvokeInst>(Inst))
  387. InsertBefore = Invoke->getNormalDest()->getFirstInsertionPt();
  388. else
  389. InsertBefore = std::next(Inst->getIterator());
  390. CGBuilderTy(CGM, &*InsertBefore).CreateStore(Inst, Tmp);
  391. // Reload the value at the current insertion point.
  392. *ReloadedValue = Builder.CreateLoad(Tmp);
  393. }
  394. }
  395. /// Pops cleanup blocks until the given savepoint is reached, then add the
  396. /// cleanups from the given savepoint in the lifetime-extended cleanups stack.
  397. void CodeGenFunction::PopCleanupBlocks(
  398. EHScopeStack::stable_iterator Old, size_t OldLifetimeExtendedSize,
  399. std::initializer_list<llvm::Value **> ValuesToReload) {
  400. PopCleanupBlocks(Old, ValuesToReload);
  401. // Move our deferred cleanups onto the EH stack.
  402. for (size_t I = OldLifetimeExtendedSize,
  403. E = LifetimeExtendedCleanupStack.size(); I != E; /**/) {
  404. // Alignment should be guaranteed by the vptrs in the individual cleanups.
  405. assert((I % alignof(LifetimeExtendedCleanupHeader) == 0) &&
  406. "misaligned cleanup stack entry");
  407. LifetimeExtendedCleanupHeader &Header =
  408. reinterpret_cast<LifetimeExtendedCleanupHeader&>(
  409. LifetimeExtendedCleanupStack[I]);
  410. I += sizeof(Header);
  411. EHStack.pushCopyOfCleanup(Header.getKind(),
  412. &LifetimeExtendedCleanupStack[I],
  413. Header.getSize());
  414. I += Header.getSize();
  415. if (Header.isConditional()) {
  416. Address ActiveFlag =
  417. reinterpret_cast<Address &>(LifetimeExtendedCleanupStack[I]);
  418. initFullExprCleanupWithFlag(ActiveFlag);
  419. I += sizeof(ActiveFlag);
  420. }
  421. }
  422. LifetimeExtendedCleanupStack.resize(OldLifetimeExtendedSize);
  423. }
  424. static llvm::BasicBlock *CreateNormalEntry(CodeGenFunction &CGF,
  425. EHCleanupScope &Scope) {
  426. assert(Scope.isNormalCleanup());
  427. llvm::BasicBlock *Entry = Scope.getNormalBlock();
  428. if (!Entry) {
  429. Entry = CGF.createBasicBlock("cleanup");
  430. Scope.setNormalBlock(Entry);
  431. }
  432. return Entry;
  433. }
  434. /// Attempts to reduce a cleanup's entry block to a fallthrough. This
  435. /// is basically llvm::MergeBlockIntoPredecessor, except
  436. /// simplified/optimized for the tighter constraints on cleanup blocks.
  437. ///
  438. /// Returns the new block, whatever it is.
  439. static llvm::BasicBlock *SimplifyCleanupEntry(CodeGenFunction &CGF,
  440. llvm::BasicBlock *Entry) {
  441. llvm::BasicBlock *Pred = Entry->getSinglePredecessor();
  442. if (!Pred) return Entry;
  443. llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Pred->getTerminator());
  444. if (!Br || Br->isConditional()) return Entry;
  445. assert(Br->getSuccessor(0) == Entry);
  446. // If we were previously inserting at the end of the cleanup entry
  447. // block, we'll need to continue inserting at the end of the
  448. // predecessor.
  449. bool WasInsertBlock = CGF.Builder.GetInsertBlock() == Entry;
  450. assert(!WasInsertBlock || CGF.Builder.GetInsertPoint() == Entry->end());
  451. // Kill the branch.
  452. Br->eraseFromParent();
  453. // Replace all uses of the entry with the predecessor, in case there
  454. // are phis in the cleanup.
  455. Entry->replaceAllUsesWith(Pred);
  456. // Merge the blocks.
  457. Pred->getInstList().splice(Pred->end(), Entry->getInstList());
  458. // Kill the entry block.
  459. Entry->eraseFromParent();
  460. if (WasInsertBlock)
  461. CGF.Builder.SetInsertPoint(Pred);
  462. return Pred;
  463. }
  464. static void EmitCleanup(CodeGenFunction &CGF,
  465. EHScopeStack::Cleanup *Fn,
  466. EHScopeStack::Cleanup::Flags flags,
  467. Address ActiveFlag) {
  468. // If there's an active flag, load it and skip the cleanup if it's
  469. // false.
  470. llvm::BasicBlock *ContBB = nullptr;
  471. if (ActiveFlag.isValid()) {
  472. ContBB = CGF.createBasicBlock("cleanup.done");
  473. llvm::BasicBlock *CleanupBB = CGF.createBasicBlock("cleanup.action");
  474. llvm::Value *IsActive
  475. = CGF.Builder.CreateLoad(ActiveFlag, "cleanup.is_active");
  476. CGF.Builder.CreateCondBr(IsActive, CleanupBB, ContBB);
  477. CGF.EmitBlock(CleanupBB);
  478. }
  479. // Ask the cleanup to emit itself.
  480. Fn->Emit(CGF, flags);
  481. assert(CGF.HaveInsertPoint() && "cleanup ended with no insertion point?");
  482. // Emit the continuation block if there was an active flag.
  483. if (ActiveFlag.isValid())
  484. CGF.EmitBlock(ContBB);
  485. }
  486. static void ForwardPrebranchedFallthrough(llvm::BasicBlock *Exit,
  487. llvm::BasicBlock *From,
  488. llvm::BasicBlock *To) {
  489. // Exit is the exit block of a cleanup, so it always terminates in
  490. // an unconditional branch or a switch.
  491. llvm::Instruction *Term = Exit->getTerminator();
  492. if (llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Term)) {
  493. assert(Br->isUnconditional() && Br->getSuccessor(0) == From);
  494. Br->setSuccessor(0, To);
  495. } else {
  496. llvm::SwitchInst *Switch = cast<llvm::SwitchInst>(Term);
  497. for (unsigned I = 0, E = Switch->getNumSuccessors(); I != E; ++I)
  498. if (Switch->getSuccessor(I) == From)
  499. Switch->setSuccessor(I, To);
  500. }
  501. }
  502. /// We don't need a normal entry block for the given cleanup.
  503. /// Optimistic fixup branches can cause these blocks to come into
  504. /// existence anyway; if so, destroy it.
  505. ///
  506. /// The validity of this transformation is very much specific to the
  507. /// exact ways in which we form branches to cleanup entries.
  508. static void destroyOptimisticNormalEntry(CodeGenFunction &CGF,
  509. EHCleanupScope &scope) {
  510. llvm::BasicBlock *entry = scope.getNormalBlock();
  511. if (!entry) return;
  512. // Replace all the uses with unreachable.
  513. llvm::BasicBlock *unreachableBB = CGF.getUnreachableBlock();
  514. for (llvm::BasicBlock::use_iterator
  515. i = entry->use_begin(), e = entry->use_end(); i != e; ) {
  516. llvm::Use &use = *i;
  517. ++i;
  518. use.set(unreachableBB);
  519. // The only uses should be fixup switches.
  520. llvm::SwitchInst *si = cast<llvm::SwitchInst>(use.getUser());
  521. if (si->getNumCases() == 1 && si->getDefaultDest() == unreachableBB) {
  522. // Replace the switch with a branch.
  523. llvm::BranchInst::Create(si->case_begin()->getCaseSuccessor(), si);
  524. // The switch operand is a load from the cleanup-dest alloca.
  525. llvm::LoadInst *condition = cast<llvm::LoadInst>(si->getCondition());
  526. // Destroy the switch.
  527. si->eraseFromParent();
  528. // Destroy the load.
  529. assert(condition->getOperand(0) == CGF.NormalCleanupDest.getPointer());
  530. assert(condition->use_empty());
  531. condition->eraseFromParent();
  532. }
  533. }
  534. assert(entry->use_empty());
  535. delete entry;
  536. }
  537. /// Pops a cleanup block. If the block includes a normal cleanup, the
  538. /// current insertion point is threaded through the cleanup, as are
  539. /// any branch fixups on the cleanup.
  540. void CodeGenFunction::PopCleanupBlock(bool FallthroughIsBranchThrough) {
  541. assert(!EHStack.empty() && "cleanup stack is empty!");
  542. assert(isa<EHCleanupScope>(*EHStack.begin()) && "top not a cleanup!");
  543. EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin());
  544. assert(Scope.getFixupDepth() <= EHStack.getNumBranchFixups());
  545. // Remember activation information.
  546. bool IsActive = Scope.isActive();
  547. Address NormalActiveFlag =
  548. Scope.shouldTestFlagInNormalCleanup() ? Scope.getActiveFlag()
  549. : Address::invalid();
  550. Address EHActiveFlag =
  551. Scope.shouldTestFlagInEHCleanup() ? Scope.getActiveFlag()
  552. : Address::invalid();
  553. // Check whether we need an EH cleanup. This is only true if we've
  554. // generated a lazy EH cleanup block.
  555. llvm::BasicBlock *EHEntry = Scope.getCachedEHDispatchBlock();
  556. assert(Scope.hasEHBranches() == (EHEntry != nullptr));
  557. bool RequiresEHCleanup = (EHEntry != nullptr);
  558. EHScopeStack::stable_iterator EHParent = Scope.getEnclosingEHScope();
  559. // Check the three conditions which might require a normal cleanup:
  560. // - whether there are branch fix-ups through this cleanup
  561. unsigned FixupDepth = Scope.getFixupDepth();
  562. bool HasFixups = EHStack.getNumBranchFixups() != FixupDepth;
  563. // - whether there are branch-throughs or branch-afters
  564. bool HasExistingBranches = Scope.hasBranches();
  565. // - whether there's a fallthrough
  566. llvm::BasicBlock *FallthroughSource = Builder.GetInsertBlock();
  567. bool HasFallthrough = (FallthroughSource != nullptr && IsActive);
  568. // Branch-through fall-throughs leave the insertion point set to the
  569. // end of the last cleanup, which points to the current scope. The
  570. // rest of IR gen doesn't need to worry about this; it only happens
  571. // during the execution of PopCleanupBlocks().
  572. bool HasPrebranchedFallthrough =
  573. (FallthroughSource && FallthroughSource->getTerminator());
  574. // If this is a normal cleanup, then having a prebranched
  575. // fallthrough implies that the fallthrough source unconditionally
  576. // jumps here.
  577. assert(!Scope.isNormalCleanup() || !HasPrebranchedFallthrough ||
  578. (Scope.getNormalBlock() &&
  579. FallthroughSource->getTerminator()->getSuccessor(0)
  580. == Scope.getNormalBlock()));
  581. bool RequiresNormalCleanup = false;
  582. if (Scope.isNormalCleanup() &&
  583. (HasFixups || HasExistingBranches || HasFallthrough)) {
  584. RequiresNormalCleanup = true;
  585. }
  586. // If we have a prebranched fallthrough into an inactive normal
  587. // cleanup, rewrite it so that it leads to the appropriate place.
  588. if (Scope.isNormalCleanup() && HasPrebranchedFallthrough && !IsActive) {
  589. llvm::BasicBlock *prebranchDest;
  590. // If the prebranch is semantically branching through the next
  591. // cleanup, just forward it to the next block, leaving the
  592. // insertion point in the prebranched block.
  593. if (FallthroughIsBranchThrough) {
  594. EHScope &enclosing = *EHStack.find(Scope.getEnclosingNormalCleanup());
  595. prebranchDest = CreateNormalEntry(*this, cast<EHCleanupScope>(enclosing));
  596. // Otherwise, we need to make a new block. If the normal cleanup
  597. // isn't being used at all, we could actually reuse the normal
  598. // entry block, but this is simpler, and it avoids conflicts with
  599. // dead optimistic fixup branches.
  600. } else {
  601. prebranchDest = createBasicBlock("forwarded-prebranch");
  602. EmitBlock(prebranchDest);
  603. }
  604. llvm::BasicBlock *normalEntry = Scope.getNormalBlock();
  605. assert(normalEntry && !normalEntry->use_empty());
  606. ForwardPrebranchedFallthrough(FallthroughSource,
  607. normalEntry, prebranchDest);
  608. }
  609. // If we don't need the cleanup at all, we're done.
  610. if (!RequiresNormalCleanup && !RequiresEHCleanup) {
  611. destroyOptimisticNormalEntry(*this, Scope);
  612. EHStack.popCleanup(); // safe because there are no fixups
  613. assert(EHStack.getNumBranchFixups() == 0 ||
  614. EHStack.hasNormalCleanups());
  615. return;
  616. }
  617. // Copy the cleanup emission data out. This uses either a stack
  618. // array or malloc'd memory, depending on the size, which is
  619. // behavior that SmallVector would provide, if we could use it
  620. // here. Unfortunately, if you ask for a SmallVector<char>, the
  621. // alignment isn't sufficient.
  622. auto *CleanupSource = reinterpret_cast<char *>(Scope.getCleanupBuffer());
  623. alignas(EHScopeStack::ScopeStackAlignment) char
  624. CleanupBufferStack[8 * sizeof(void *)];
  625. std::unique_ptr<char[]> CleanupBufferHeap;
  626. size_t CleanupSize = Scope.getCleanupSize();
  627. EHScopeStack::Cleanup *Fn;
  628. if (CleanupSize <= sizeof(CleanupBufferStack)) {
  629. memcpy(CleanupBufferStack, CleanupSource, CleanupSize);
  630. Fn = reinterpret_cast<EHScopeStack::Cleanup *>(CleanupBufferStack);
  631. } else {
  632. CleanupBufferHeap.reset(new char[CleanupSize]);
  633. memcpy(CleanupBufferHeap.get(), CleanupSource, CleanupSize);
  634. Fn = reinterpret_cast<EHScopeStack::Cleanup *>(CleanupBufferHeap.get());
  635. }
  636. EHScopeStack::Cleanup::Flags cleanupFlags;
  637. if (Scope.isNormalCleanup())
  638. cleanupFlags.setIsNormalCleanupKind();
  639. if (Scope.isEHCleanup())
  640. cleanupFlags.setIsEHCleanupKind();
  641. if (!RequiresNormalCleanup) {
  642. destroyOptimisticNormalEntry(*this, Scope);
  643. EHStack.popCleanup();
  644. } else {
  645. // If we have a fallthrough and no other need for the cleanup,
  646. // emit it directly.
  647. if (HasFallthrough && !HasPrebranchedFallthrough &&
  648. !HasFixups && !HasExistingBranches) {
  649. destroyOptimisticNormalEntry(*this, Scope);
  650. EHStack.popCleanup();
  651. EmitCleanup(*this, Fn, cleanupFlags, NormalActiveFlag);
  652. // Otherwise, the best approach is to thread everything through
  653. // the cleanup block and then try to clean up after ourselves.
  654. } else {
  655. // Force the entry block to exist.
  656. llvm::BasicBlock *NormalEntry = CreateNormalEntry(*this, Scope);
  657. // I. Set up the fallthrough edge in.
  658. CGBuilderTy::InsertPoint savedInactiveFallthroughIP;
  659. // If there's a fallthrough, we need to store the cleanup
  660. // destination index. For fall-throughs this is always zero.
  661. if (HasFallthrough) {
  662. if (!HasPrebranchedFallthrough)
  663. Builder.CreateStore(Builder.getInt32(0), getNormalCleanupDestSlot());
  664. // Otherwise, save and clear the IP if we don't have fallthrough
  665. // because the cleanup is inactive.
  666. } else if (FallthroughSource) {
  667. assert(!IsActive && "source without fallthrough for active cleanup");
  668. savedInactiveFallthroughIP = Builder.saveAndClearIP();
  669. }
  670. // II. Emit the entry block. This implicitly branches to it if
  671. // we have fallthrough. All the fixups and existing branches
  672. // should already be branched to it.
  673. EmitBlock(NormalEntry);
  674. // III. Figure out where we're going and build the cleanup
  675. // epilogue.
  676. bool HasEnclosingCleanups =
  677. (Scope.getEnclosingNormalCleanup() != EHStack.stable_end());
  678. // Compute the branch-through dest if we need it:
  679. // - if there are branch-throughs threaded through the scope
  680. // - if fall-through is a branch-through
  681. // - if there are fixups that will be optimistically forwarded
  682. // to the enclosing cleanup
  683. llvm::BasicBlock *BranchThroughDest = nullptr;
  684. if (Scope.hasBranchThroughs() ||
  685. (FallthroughSource && FallthroughIsBranchThrough) ||
  686. (HasFixups && HasEnclosingCleanups)) {
  687. assert(HasEnclosingCleanups);
  688. EHScope &S = *EHStack.find(Scope.getEnclosingNormalCleanup());
  689. BranchThroughDest = CreateNormalEntry(*this, cast<EHCleanupScope>(S));
  690. }
  691. llvm::BasicBlock *FallthroughDest = nullptr;
  692. SmallVector<llvm::Instruction*, 2> InstsToAppend;
  693. // If there's exactly one branch-after and no other threads,
  694. // we can route it without a switch.
  695. if (!Scope.hasBranchThroughs() && !HasFixups && !HasFallthrough &&
  696. Scope.getNumBranchAfters() == 1) {
  697. assert(!BranchThroughDest || !IsActive);
  698. // Clean up the possibly dead store to the cleanup dest slot.
  699. llvm::Instruction *NormalCleanupDestSlot =
  700. cast<llvm::Instruction>(getNormalCleanupDestSlot().getPointer());
  701. if (NormalCleanupDestSlot->hasOneUse()) {
  702. NormalCleanupDestSlot->user_back()->eraseFromParent();
  703. NormalCleanupDestSlot->eraseFromParent();
  704. NormalCleanupDest = Address::invalid();
  705. }
  706. llvm::BasicBlock *BranchAfter = Scope.getBranchAfterBlock(0);
  707. InstsToAppend.push_back(llvm::BranchInst::Create(BranchAfter));
  708. // Build a switch-out if we need it:
  709. // - if there are branch-afters threaded through the scope
  710. // - if fall-through is a branch-after
  711. // - if there are fixups that have nowhere left to go and
  712. // so must be immediately resolved
  713. } else if (Scope.getNumBranchAfters() ||
  714. (HasFallthrough && !FallthroughIsBranchThrough) ||
  715. (HasFixups && !HasEnclosingCleanups)) {
  716. llvm::BasicBlock *Default =
  717. (BranchThroughDest ? BranchThroughDest : getUnreachableBlock());
  718. // TODO: base this on the number of branch-afters and fixups
  719. const unsigned SwitchCapacity = 10;
  720. llvm::LoadInst *Load =
  721. createLoadInstBefore(getNormalCleanupDestSlot(), "cleanup.dest",
  722. nullptr);
  723. llvm::SwitchInst *Switch =
  724. llvm::SwitchInst::Create(Load, Default, SwitchCapacity);
  725. InstsToAppend.push_back(Load);
  726. InstsToAppend.push_back(Switch);
  727. // Branch-after fallthrough.
  728. if (FallthroughSource && !FallthroughIsBranchThrough) {
  729. FallthroughDest = createBasicBlock("cleanup.cont");
  730. if (HasFallthrough)
  731. Switch->addCase(Builder.getInt32(0), FallthroughDest);
  732. }
  733. for (unsigned I = 0, E = Scope.getNumBranchAfters(); I != E; ++I) {
  734. Switch->addCase(Scope.getBranchAfterIndex(I),
  735. Scope.getBranchAfterBlock(I));
  736. }
  737. // If there aren't any enclosing cleanups, we can resolve all
  738. // the fixups now.
  739. if (HasFixups && !HasEnclosingCleanups)
  740. ResolveAllBranchFixups(*this, Switch, NormalEntry);
  741. } else {
  742. // We should always have a branch-through destination in this case.
  743. assert(BranchThroughDest);
  744. InstsToAppend.push_back(llvm::BranchInst::Create(BranchThroughDest));
  745. }
  746. // IV. Pop the cleanup and emit it.
  747. EHStack.popCleanup();
  748. assert(EHStack.hasNormalCleanups() == HasEnclosingCleanups);
  749. EmitCleanup(*this, Fn, cleanupFlags, NormalActiveFlag);
  750. // Append the prepared cleanup prologue from above.
  751. llvm::BasicBlock *NormalExit = Builder.GetInsertBlock();
  752. for (unsigned I = 0, E = InstsToAppend.size(); I != E; ++I)
  753. NormalExit->getInstList().push_back(InstsToAppend[I]);
  754. // Optimistically hope that any fixups will continue falling through.
  755. for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups();
  756. I < E; ++I) {
  757. BranchFixup &Fixup = EHStack.getBranchFixup(I);
  758. if (!Fixup.Destination) continue;
  759. if (!Fixup.OptimisticBranchBlock) {
  760. createStoreInstBefore(Builder.getInt32(Fixup.DestinationIndex),
  761. getNormalCleanupDestSlot(),
  762. Fixup.InitialBranch);
  763. Fixup.InitialBranch->setSuccessor(0, NormalEntry);
  764. }
  765. Fixup.OptimisticBranchBlock = NormalExit;
  766. }
  767. // V. Set up the fallthrough edge out.
  768. // Case 1: a fallthrough source exists but doesn't branch to the
  769. // cleanup because the cleanup is inactive.
  770. if (!HasFallthrough && FallthroughSource) {
  771. // Prebranched fallthrough was forwarded earlier.
  772. // Non-prebranched fallthrough doesn't need to be forwarded.
  773. // Either way, all we need to do is restore the IP we cleared before.
  774. assert(!IsActive);
  775. Builder.restoreIP(savedInactiveFallthroughIP);
  776. // Case 2: a fallthrough source exists and should branch to the
  777. // cleanup, but we're not supposed to branch through to the next
  778. // cleanup.
  779. } else if (HasFallthrough && FallthroughDest) {
  780. assert(!FallthroughIsBranchThrough);
  781. EmitBlock(FallthroughDest);
  782. // Case 3: a fallthrough source exists and should branch to the
  783. // cleanup and then through to the next.
  784. } else if (HasFallthrough) {
  785. // Everything is already set up for this.
  786. // Case 4: no fallthrough source exists.
  787. } else {
  788. Builder.ClearInsertionPoint();
  789. }
  790. // VI. Assorted cleaning.
  791. // Check whether we can merge NormalEntry into a single predecessor.
  792. // This might invalidate (non-IR) pointers to NormalEntry.
  793. llvm::BasicBlock *NewNormalEntry =
  794. SimplifyCleanupEntry(*this, NormalEntry);
  795. // If it did invalidate those pointers, and NormalEntry was the same
  796. // as NormalExit, go back and patch up the fixups.
  797. if (NewNormalEntry != NormalEntry && NormalEntry == NormalExit)
  798. for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups();
  799. I < E; ++I)
  800. EHStack.getBranchFixup(I).OptimisticBranchBlock = NewNormalEntry;
  801. }
  802. }
  803. assert(EHStack.hasNormalCleanups() || EHStack.getNumBranchFixups() == 0);
  804. // Emit the EH cleanup if required.
  805. if (RequiresEHCleanup) {
  806. CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
  807. EmitBlock(EHEntry);
  808. llvm::BasicBlock *NextAction = getEHDispatchBlock(EHParent);
  809. // Push a terminate scope or cleanupendpad scope around the potentially
  810. // throwing cleanups. For funclet EH personalities, the cleanupendpad models
  811. // program termination when cleanups throw.
  812. bool PushedTerminate = false;
  813. SaveAndRestore<llvm::Instruction *> RestoreCurrentFuncletPad(
  814. CurrentFuncletPad);
  815. llvm::CleanupPadInst *CPI = nullptr;
  816. const EHPersonality &Personality = EHPersonality::get(*this);
  817. if (Personality.usesFuncletPads()) {
  818. llvm::Value *ParentPad = CurrentFuncletPad;
  819. if (!ParentPad)
  820. ParentPad = llvm::ConstantTokenNone::get(CGM.getLLVMContext());
  821. CurrentFuncletPad = CPI = Builder.CreateCleanupPad(ParentPad);
  822. }
  823. // Non-MSVC personalities need to terminate when an EH cleanup throws.
  824. if (!Personality.isMSVCPersonality()) {
  825. EHStack.pushTerminate();
  826. PushedTerminate = true;
  827. }
  828. // We only actually emit the cleanup code if the cleanup is either
  829. // active or was used before it was deactivated.
  830. if (EHActiveFlag.isValid() || IsActive) {
  831. cleanupFlags.setIsForEHCleanup();
  832. EmitCleanup(*this, Fn, cleanupFlags, EHActiveFlag);
  833. }
  834. if (CPI)
  835. Builder.CreateCleanupRet(CPI, NextAction);
  836. else
  837. Builder.CreateBr(NextAction);
  838. // Leave the terminate scope.
  839. if (PushedTerminate)
  840. EHStack.popTerminate();
  841. Builder.restoreIP(SavedIP);
  842. SimplifyCleanupEntry(*this, EHEntry);
  843. }
  844. }
  845. /// isObviouslyBranchWithoutCleanups - Return true if a branch to the
  846. /// specified destination obviously has no cleanups to run. 'false' is always
  847. /// a conservatively correct answer for this method.
  848. bool CodeGenFunction::isObviouslyBranchWithoutCleanups(JumpDest Dest) const {
  849. assert(Dest.getScopeDepth().encloses(EHStack.stable_begin())
  850. && "stale jump destination");
  851. // Calculate the innermost active normal cleanup.
  852. EHScopeStack::stable_iterator TopCleanup =
  853. EHStack.getInnermostActiveNormalCleanup();
  854. // If we're not in an active normal cleanup scope, or if the
  855. // destination scope is within the innermost active normal cleanup
  856. // scope, we don't need to worry about fixups.
  857. if (TopCleanup == EHStack.stable_end() ||
  858. TopCleanup.encloses(Dest.getScopeDepth())) // works for invalid
  859. return true;
  860. // Otherwise, we might need some cleanups.
  861. return false;
  862. }
  863. /// Terminate the current block by emitting a branch which might leave
  864. /// the current cleanup-protected scope. The target scope may not yet
  865. /// be known, in which case this will require a fixup.
  866. ///
  867. /// As a side-effect, this method clears the insertion point.
  868. void CodeGenFunction::EmitBranchThroughCleanup(JumpDest Dest) {
  869. assert(Dest.getScopeDepth().encloses(EHStack.stable_begin())
  870. && "stale jump destination");
  871. if (!HaveInsertPoint())
  872. return;
  873. // Create the branch.
  874. llvm::BranchInst *BI = Builder.CreateBr(Dest.getBlock());
  875. // Calculate the innermost active normal cleanup.
  876. EHScopeStack::stable_iterator
  877. TopCleanup = EHStack.getInnermostActiveNormalCleanup();
  878. // If we're not in an active normal cleanup scope, or if the
  879. // destination scope is within the innermost active normal cleanup
  880. // scope, we don't need to worry about fixups.
  881. if (TopCleanup == EHStack.stable_end() ||
  882. TopCleanup.encloses(Dest.getScopeDepth())) { // works for invalid
  883. Builder.ClearInsertionPoint();
  884. return;
  885. }
  886. // If we can't resolve the destination cleanup scope, just add this
  887. // to the current cleanup scope as a branch fixup.
  888. if (!Dest.getScopeDepth().isValid()) {
  889. BranchFixup &Fixup = EHStack.addBranchFixup();
  890. Fixup.Destination = Dest.getBlock();
  891. Fixup.DestinationIndex = Dest.getDestIndex();
  892. Fixup.InitialBranch = BI;
  893. Fixup.OptimisticBranchBlock = nullptr;
  894. Builder.ClearInsertionPoint();
  895. return;
  896. }
  897. // Otherwise, thread through all the normal cleanups in scope.
  898. // Store the index at the start.
  899. llvm::ConstantInt *Index = Builder.getInt32(Dest.getDestIndex());
  900. createStoreInstBefore(Index, getNormalCleanupDestSlot(), BI);
  901. // Adjust BI to point to the first cleanup block.
  902. {
  903. EHCleanupScope &Scope =
  904. cast<EHCleanupScope>(*EHStack.find(TopCleanup));
  905. BI->setSuccessor(0, CreateNormalEntry(*this, Scope));
  906. }
  907. // Add this destination to all the scopes involved.
  908. EHScopeStack::stable_iterator I = TopCleanup;
  909. EHScopeStack::stable_iterator E = Dest.getScopeDepth();
  910. if (E.strictlyEncloses(I)) {
  911. while (true) {
  912. EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(I));
  913. assert(Scope.isNormalCleanup());
  914. I = Scope.getEnclosingNormalCleanup();
  915. // If this is the last cleanup we're propagating through, tell it
  916. // that there's a resolved jump moving through it.
  917. if (!E.strictlyEncloses(I)) {
  918. Scope.addBranchAfter(Index, Dest.getBlock());
  919. break;
  920. }
  921. // Otherwise, tell the scope that there's a jump propagating
  922. // through it. If this isn't new information, all the rest of
  923. // the work has been done before.
  924. if (!Scope.addBranchThrough(Dest.getBlock()))
  925. break;
  926. }
  927. }
  928. Builder.ClearInsertionPoint();
  929. }
  930. static bool IsUsedAsNormalCleanup(EHScopeStack &EHStack,
  931. EHScopeStack::stable_iterator C) {
  932. // If we needed a normal block for any reason, that counts.
  933. if (cast<EHCleanupScope>(*EHStack.find(C)).getNormalBlock())
  934. return true;
  935. // Check whether any enclosed cleanups were needed.
  936. for (EHScopeStack::stable_iterator
  937. I = EHStack.getInnermostNormalCleanup();
  938. I != C; ) {
  939. assert(C.strictlyEncloses(I));
  940. EHCleanupScope &S = cast<EHCleanupScope>(*EHStack.find(I));
  941. if (S.getNormalBlock()) return true;
  942. I = S.getEnclosingNormalCleanup();
  943. }
  944. return false;
  945. }
  946. static bool IsUsedAsEHCleanup(EHScopeStack &EHStack,
  947. EHScopeStack::stable_iterator cleanup) {
  948. // If we needed an EH block for any reason, that counts.
  949. if (EHStack.find(cleanup)->hasEHBranches())
  950. return true;
  951. // Check whether any enclosed cleanups were needed.
  952. for (EHScopeStack::stable_iterator
  953. i = EHStack.getInnermostEHScope(); i != cleanup; ) {
  954. assert(cleanup.strictlyEncloses(i));
  955. EHScope &scope = *EHStack.find(i);
  956. if (scope.hasEHBranches())
  957. return true;
  958. i = scope.getEnclosingEHScope();
  959. }
  960. return false;
  961. }
  962. enum ForActivation_t {
  963. ForActivation,
  964. ForDeactivation
  965. };
  966. /// The given cleanup block is changing activation state. Configure a
  967. /// cleanup variable if necessary.
  968. ///
  969. /// It would be good if we had some way of determining if there were
  970. /// extra uses *after* the change-over point.
  971. static void SetupCleanupBlockActivation(CodeGenFunction &CGF,
  972. EHScopeStack::stable_iterator C,
  973. ForActivation_t kind,
  974. llvm::Instruction *dominatingIP) {
  975. EHCleanupScope &Scope = cast<EHCleanupScope>(*CGF.EHStack.find(C));
  976. // We always need the flag if we're activating the cleanup in a
  977. // conditional context, because we have to assume that the current
  978. // location doesn't necessarily dominate the cleanup's code.
  979. bool isActivatedInConditional =
  980. (kind == ForActivation && CGF.isInConditionalBranch());
  981. bool needFlag = false;
  982. // Calculate whether the cleanup was used:
  983. // - as a normal cleanup
  984. if (Scope.isNormalCleanup() &&
  985. (isActivatedInConditional || IsUsedAsNormalCleanup(CGF.EHStack, C))) {
  986. Scope.setTestFlagInNormalCleanup();
  987. needFlag = true;
  988. }
  989. // - as an EH cleanup
  990. if (Scope.isEHCleanup() &&
  991. (isActivatedInConditional || IsUsedAsEHCleanup(CGF.EHStack, C))) {
  992. Scope.setTestFlagInEHCleanup();
  993. needFlag = true;
  994. }
  995. // If it hasn't yet been used as either, we're done.
  996. if (!needFlag) return;
  997. Address var = Scope.getActiveFlag();
  998. if (!var.isValid()) {
  999. var = CGF.CreateTempAlloca(CGF.Builder.getInt1Ty(), CharUnits::One(),
  1000. "cleanup.isactive");
  1001. Scope.setActiveFlag(var);
  1002. assert(dominatingIP && "no existing variable and no dominating IP!");
  1003. // Initialize to true or false depending on whether it was
  1004. // active up to this point.
  1005. llvm::Constant *value = CGF.Builder.getInt1(kind == ForDeactivation);
  1006. // If we're in a conditional block, ignore the dominating IP and
  1007. // use the outermost conditional branch.
  1008. if (CGF.isInConditionalBranch()) {
  1009. CGF.setBeforeOutermostConditional(value, var);
  1010. } else {
  1011. createStoreInstBefore(value, var, dominatingIP);
  1012. }
  1013. }
  1014. CGF.Builder.CreateStore(CGF.Builder.getInt1(kind == ForActivation), var);
  1015. }
  1016. /// Activate a cleanup that was created in an inactivated state.
  1017. void CodeGenFunction::ActivateCleanupBlock(EHScopeStack::stable_iterator C,
  1018. llvm::Instruction *dominatingIP) {
  1019. assert(C != EHStack.stable_end() && "activating bottom of stack?");
  1020. EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(C));
  1021. assert(!Scope.isActive() && "double activation");
  1022. SetupCleanupBlockActivation(*this, C, ForActivation, dominatingIP);
  1023. Scope.setActive(true);
  1024. }
  1025. /// Deactive a cleanup that was created in an active state.
  1026. void CodeGenFunction::DeactivateCleanupBlock(EHScopeStack::stable_iterator C,
  1027. llvm::Instruction *dominatingIP) {
  1028. assert(C != EHStack.stable_end() && "deactivating bottom of stack?");
  1029. EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(C));
  1030. assert(Scope.isActive() && "double deactivation");
  1031. // If it's the top of the stack, just pop it, but do so only if it belongs
  1032. // to the current RunCleanupsScope.
  1033. if (C == EHStack.stable_begin() &&
  1034. CurrentCleanupScopeDepth.strictlyEncloses(C)) {
  1035. // If it's a normal cleanup, we need to pretend that the
  1036. // fallthrough is unreachable.
  1037. CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
  1038. PopCleanupBlock();
  1039. Builder.restoreIP(SavedIP);
  1040. return;
  1041. }
  1042. // Otherwise, follow the general case.
  1043. SetupCleanupBlockActivation(*this, C, ForDeactivation, dominatingIP);
  1044. Scope.setActive(false);
  1045. }
  1046. Address CodeGenFunction::getNormalCleanupDestSlot() {
  1047. if (!NormalCleanupDest.isValid())
  1048. NormalCleanupDest =
  1049. CreateDefaultAlignTempAlloca(Builder.getInt32Ty(), "cleanup.dest.slot");
  1050. return NormalCleanupDest;
  1051. }
  1052. /// Emits all the code to cause the given temporary to be cleaned up.
  1053. void CodeGenFunction::EmitCXXTemporary(const CXXTemporary *Temporary,
  1054. QualType TempType,
  1055. Address Ptr) {
  1056. pushDestroy(NormalAndEHCleanup, Ptr, TempType, destroyCXXObject,
  1057. /*useEHCleanup*/ true);
  1058. }