LowerInvoke.cpp 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630
  1. //===- LowerInvoke.cpp - Eliminate Invoke & Unwind instructions -----------===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This transformation is designed for use by code generators which do not yet
  11. // support stack unwinding. This pass supports two models of exception handling
  12. // lowering, the 'cheap' support and the 'expensive' support.
  13. //
  14. // 'Cheap' exception handling support gives the program the ability to execute
  15. // any program which does not "throw an exception", by turning 'invoke'
  16. // instructions into calls and by turning 'unwind' instructions into calls to
  17. // abort(). If the program does dynamically use the unwind instruction, the
  18. // program will print a message then abort.
  19. //
  20. // 'Expensive' exception handling support gives the full exception handling
  21. // support to the program at the cost of making the 'invoke' instruction
  22. // really expensive. It basically inserts setjmp/longjmp calls to emulate the
  23. // exception handling as necessary.
  24. //
  25. // Because the 'expensive' support slows down programs a lot, and EH is only
  26. // used for a subset of the programs, it must be specifically enabled by an
  27. // option.
  28. //
  29. // Note that after this pass runs the CFG is not entirely accurate (exceptional
  30. // control flow edges are not correct anymore) so only very simple things should
  31. // be done after the lowerinvoke pass has run (like generation of native code).
  32. // This should not be used as a general purpose "my LLVM-to-LLVM pass doesn't
  33. // support the invoke instruction yet" lowering pass.
  34. //
  35. //===----------------------------------------------------------------------===//
  36. #define DEBUG_TYPE "lowerinvoke"
  37. #include "llvm/Transforms/Scalar.h"
  38. #include "llvm/Constants.h"
  39. #include "llvm/DerivedTypes.h"
  40. #include "llvm/Instructions.h"
  41. #include "llvm/Intrinsics.h"
  42. #include "llvm/LLVMContext.h"
  43. #include "llvm/Module.h"
  44. #include "llvm/Pass.h"
  45. #include "llvm/Transforms/Utils/BasicBlockUtils.h"
  46. #include "llvm/Transforms/Utils/Local.h"
  47. #include "llvm/ADT/Statistic.h"
  48. #include "llvm/Support/CommandLine.h"
  49. #include "llvm/Target/TargetLowering.h"
  50. #include <csetjmp>
  51. #include <set>
  52. using namespace llvm;
  53. STATISTIC(NumInvokes, "Number of invokes replaced");
  54. STATISTIC(NumUnwinds, "Number of unwinds replaced");
  55. STATISTIC(NumSpilled, "Number of registers live across unwind edges");
  56. static cl::opt<bool> ExpensiveEHSupport("enable-correct-eh-support",
  57. cl::desc("Make the -lowerinvoke pass insert expensive, but correct, EH code"));
  58. namespace {
  59. class LowerInvoke : public FunctionPass {
  60. // Used for both models.
  61. Constant *WriteFn;
  62. Constant *AbortFn;
  63. Value *AbortMessage;
  64. unsigned AbortMessageLength;
  65. // Used for expensive EH support.
  66. const Type *JBLinkTy;
  67. GlobalVariable *JBListHead;
  68. Constant *SetJmpFn, *LongJmpFn;
  69. // We peek in TLI to grab the target's jmp_buf size and alignment
  70. const TargetLowering *TLI;
  71. public:
  72. static char ID; // Pass identification, replacement for typeid
  73. explicit LowerInvoke(const TargetLowering *tli = NULL)
  74. : FunctionPass(&ID), TLI(tli) { }
  75. bool doInitialization(Module &M);
  76. bool runOnFunction(Function &F);
  77. virtual void getAnalysisUsage(AnalysisUsage &AU) const {
  78. // This is a cluster of orthogonal Transforms
  79. AU.addPreservedID(PromoteMemoryToRegisterID);
  80. AU.addPreservedID(LowerSwitchID);
  81. AU.addPreservedID(LowerAllocationsID);
  82. }
  83. private:
  84. void createAbortMessage(Module *M);
  85. void writeAbortMessage(Instruction *IB);
  86. bool insertCheapEHSupport(Function &F);
  87. void splitLiveRangesLiveAcrossInvokes(std::vector<InvokeInst*> &Invokes);
  88. void rewriteExpensiveInvoke(InvokeInst *II, unsigned InvokeNo,
  89. AllocaInst *InvokeNum, SwitchInst *CatchSwitch);
  90. bool insertExpensiveEHSupport(Function &F);
  91. };
  92. }
  93. char LowerInvoke::ID = 0;
  94. static RegisterPass<LowerInvoke>
  95. X("lowerinvoke", "Lower invoke and unwind, for unwindless code generators");
  96. const PassInfo *const llvm::LowerInvokePassID = &X;
  97. // Public Interface To the LowerInvoke pass.
  98. FunctionPass *llvm::createLowerInvokePass(const TargetLowering *TLI) {
  99. return new LowerInvoke(TLI);
  100. }
  101. // doInitialization - Make sure that there is a prototype for abort in the
  102. // current module.
  103. bool LowerInvoke::doInitialization(Module &M) {
  104. const Type *VoidPtrTy =
  105. Type::getInt8PtrTy(M.getContext());
  106. AbortMessage = 0;
  107. if (ExpensiveEHSupport) {
  108. // Insert a type for the linked list of jump buffers.
  109. unsigned JBSize = TLI ? TLI->getJumpBufSize() : 0;
  110. JBSize = JBSize ? JBSize : 200;
  111. const Type *JmpBufTy = ArrayType::get(VoidPtrTy, JBSize);
  112. { // The type is recursive, so use a type holder.
  113. std::vector<const Type*> Elements;
  114. Elements.push_back(JmpBufTy);
  115. OpaqueType *OT = OpaqueType::get(M.getContext());
  116. Elements.push_back(PointerType::getUnqual(OT));
  117. PATypeHolder JBLType(StructType::get(M.getContext(), Elements));
  118. OT->refineAbstractTypeTo(JBLType.get()); // Complete the cycle.
  119. JBLinkTy = JBLType.get();
  120. M.addTypeName("llvm.sjljeh.jmpbufty", JBLinkTy);
  121. }
  122. const Type *PtrJBList = PointerType::getUnqual(JBLinkTy);
  123. // Now that we've done that, insert the jmpbuf list head global, unless it
  124. // already exists.
  125. if (!(JBListHead = M.getGlobalVariable("llvm.sjljeh.jblist", PtrJBList))) {
  126. JBListHead = new GlobalVariable(M, PtrJBList, false,
  127. GlobalValue::LinkOnceAnyLinkage,
  128. Constant::getNullValue(PtrJBList),
  129. "llvm.sjljeh.jblist");
  130. }
  131. // VisualStudio defines setjmp as _setjmp via #include <csetjmp> / <setjmp.h>,
  132. // so it looks like Intrinsic::_setjmp
  133. #if defined(_MSC_VER) && defined(setjmp)
  134. #define setjmp_undefined_for_visual_studio
  135. #undef setjmp
  136. #endif
  137. SetJmpFn = Intrinsic::getDeclaration(&M, Intrinsic::setjmp);
  138. #if defined(_MSC_VER) && defined(setjmp_undefined_for_visual_studio)
  139. // let's return it to _setjmp state in case anyone ever needs it after this
  140. // point under VisualStudio
  141. #define setjmp _setjmp
  142. #endif
  143. LongJmpFn = Intrinsic::getDeclaration(&M, Intrinsic::longjmp);
  144. }
  145. // We need the 'write' and 'abort' functions for both models.
  146. AbortFn = M.getOrInsertFunction("abort", Type::getVoidTy(M.getContext()),
  147. (Type *)0);
  148. #if 0 // "write" is Unix-specific.. code is going away soon anyway.
  149. WriteFn = M.getOrInsertFunction("write", Type::VoidTy, Type::Int32Ty,
  150. VoidPtrTy, Type::Int32Ty, (Type *)0);
  151. #else
  152. WriteFn = 0;
  153. #endif
  154. return true;
  155. }
  156. void LowerInvoke::createAbortMessage(Module *M) {
  157. if (ExpensiveEHSupport) {
  158. // The abort message for expensive EH support tells the user that the
  159. // program 'unwound' without an 'invoke' instruction.
  160. Constant *Msg =
  161. ConstantArray::get(M->getContext(),
  162. "ERROR: Exception thrown, but not caught!\n");
  163. AbortMessageLength = Msg->getNumOperands()-1; // don't include \0
  164. GlobalVariable *MsgGV = new GlobalVariable(*M, Msg->getType(), true,
  165. GlobalValue::InternalLinkage,
  166. Msg, "abortmsg");
  167. std::vector<Constant*> GEPIdx(2,
  168. Constant::getNullValue(Type::getInt32Ty(M->getContext())));
  169. AbortMessage = ConstantExpr::getGetElementPtr(MsgGV, &GEPIdx[0], 2);
  170. } else {
  171. // The abort message for cheap EH support tells the user that EH is not
  172. // enabled.
  173. Constant *Msg =
  174. ConstantArray::get(M->getContext(),
  175. "Exception handler needed, but not enabled."
  176. "Recompile program with -enable-correct-eh-support.\n");
  177. AbortMessageLength = Msg->getNumOperands()-1; // don't include \0
  178. GlobalVariable *MsgGV = new GlobalVariable(*M, Msg->getType(), true,
  179. GlobalValue::InternalLinkage,
  180. Msg, "abortmsg");
  181. std::vector<Constant*> GEPIdx(2, Constant::getNullValue(
  182. Type::getInt32Ty(M->getContext())));
  183. AbortMessage = ConstantExpr::getGetElementPtr(MsgGV, &GEPIdx[0], 2);
  184. }
  185. }
  186. void LowerInvoke::writeAbortMessage(Instruction *IB) {
  187. #if 0
  188. if (AbortMessage == 0)
  189. createAbortMessage(IB->getParent()->getParent()->getParent());
  190. // These are the arguments we WANT...
  191. Value* Args[3];
  192. Args[0] = ConstantInt::get(Type::Int32Ty, 2);
  193. Args[1] = AbortMessage;
  194. Args[2] = ConstantInt::get(Type::Int32Ty, AbortMessageLength);
  195. (new CallInst(WriteFn, Args, 3, "", IB))->setTailCall();
  196. #endif
  197. }
  198. bool LowerInvoke::insertCheapEHSupport(Function &F) {
  199. bool Changed = false;
  200. for (Function::iterator BB = F.begin(), E = F.end(); BB != E; ++BB)
  201. if (InvokeInst *II = dyn_cast<InvokeInst>(BB->getTerminator())) {
  202. std::vector<Value*> CallArgs(II->op_begin()+3, II->op_end());
  203. // Insert a normal call instruction...
  204. CallInst *NewCall = CallInst::Create(II->getCalledValue(),
  205. CallArgs.begin(), CallArgs.end(), "",II);
  206. NewCall->takeName(II);
  207. NewCall->setCallingConv(II->getCallingConv());
  208. NewCall->setAttributes(II->getAttributes());
  209. II->replaceAllUsesWith(NewCall);
  210. // Insert an unconditional branch to the normal destination.
  211. BranchInst::Create(II->getNormalDest(), II);
  212. // Remove any PHI node entries from the exception destination.
  213. II->getUnwindDest()->removePredecessor(BB);
  214. // Remove the invoke instruction now.
  215. BB->getInstList().erase(II);
  216. ++NumInvokes; Changed = true;
  217. } else if (UnwindInst *UI = dyn_cast<UnwindInst>(BB->getTerminator())) {
  218. // Insert a new call to write(2, AbortMessage, AbortMessageLength);
  219. writeAbortMessage(UI);
  220. // Insert a call to abort()
  221. CallInst::Create(AbortFn, "", UI)->setTailCall();
  222. // Insert a return instruction. This really should be a "barrier", as it
  223. // is unreachable.
  224. ReturnInst::Create(F.getContext(),
  225. F.getReturnType() == Type::getVoidTy(F.getContext()) ?
  226. 0 : Constant::getNullValue(F.getReturnType()), UI);
  227. // Remove the unwind instruction now.
  228. BB->getInstList().erase(UI);
  229. ++NumUnwinds; Changed = true;
  230. }
  231. return Changed;
  232. }
  233. /// rewriteExpensiveInvoke - Insert code and hack the function to replace the
  234. /// specified invoke instruction with a call.
  235. void LowerInvoke::rewriteExpensiveInvoke(InvokeInst *II, unsigned InvokeNo,
  236. AllocaInst *InvokeNum,
  237. SwitchInst *CatchSwitch) {
  238. ConstantInt *InvokeNoC = ConstantInt::get(Type::getInt32Ty(II->getContext()),
  239. InvokeNo);
  240. // If the unwind edge has phi nodes, split the edge.
  241. if (isa<PHINode>(II->getUnwindDest()->begin())) {
  242. SplitCriticalEdge(II, 1, this);
  243. // If there are any phi nodes left, they must have a single predecessor.
  244. while (PHINode *PN = dyn_cast<PHINode>(II->getUnwindDest()->begin())) {
  245. PN->replaceAllUsesWith(PN->getIncomingValue(0));
  246. PN->eraseFromParent();
  247. }
  248. }
  249. // Insert a store of the invoke num before the invoke and store zero into the
  250. // location afterward.
  251. new StoreInst(InvokeNoC, InvokeNum, true, II); // volatile
  252. BasicBlock::iterator NI = II->getNormalDest()->getFirstNonPHI();
  253. // nonvolatile.
  254. new StoreInst(Constant::getNullValue(Type::getInt32Ty(II->getContext())),
  255. InvokeNum, false, NI);
  256. // Add a switch case to our unwind block.
  257. CatchSwitch->addCase(InvokeNoC, II->getUnwindDest());
  258. // Insert a normal call instruction.
  259. std::vector<Value*> CallArgs(II->op_begin()+3, II->op_end());
  260. CallInst *NewCall = CallInst::Create(II->getCalledValue(),
  261. CallArgs.begin(), CallArgs.end(), "",
  262. II);
  263. NewCall->takeName(II);
  264. NewCall->setCallingConv(II->getCallingConv());
  265. NewCall->setAttributes(II->getAttributes());
  266. II->replaceAllUsesWith(NewCall);
  267. // Replace the invoke with an uncond branch.
  268. BranchInst::Create(II->getNormalDest(), NewCall->getParent());
  269. II->eraseFromParent();
  270. }
  271. /// MarkBlocksLiveIn - Insert BB and all of its predescessors into LiveBBs until
  272. /// we reach blocks we've already seen.
  273. static void MarkBlocksLiveIn(BasicBlock *BB, std::set<BasicBlock*> &LiveBBs) {
  274. if (!LiveBBs.insert(BB).second) return; // already been here.
  275. for (pred_iterator PI = pred_begin(BB), E = pred_end(BB); PI != E; ++PI)
  276. MarkBlocksLiveIn(*PI, LiveBBs);
  277. }
  278. // First thing we need to do is scan the whole function for values that are
  279. // live across unwind edges. Each value that is live across an unwind edge
  280. // we spill into a stack location, guaranteeing that there is nothing live
  281. // across the unwind edge. This process also splits all critical edges
  282. // coming out of invoke's.
  283. void LowerInvoke::
  284. splitLiveRangesLiveAcrossInvokes(std::vector<InvokeInst*> &Invokes) {
  285. // First step, split all critical edges from invoke instructions.
  286. for (unsigned i = 0, e = Invokes.size(); i != e; ++i) {
  287. InvokeInst *II = Invokes[i];
  288. SplitCriticalEdge(II, 0, this);
  289. SplitCriticalEdge(II, 1, this);
  290. assert(!isa<PHINode>(II->getNormalDest()) &&
  291. !isa<PHINode>(II->getUnwindDest()) &&
  292. "critical edge splitting left single entry phi nodes?");
  293. }
  294. Function *F = Invokes.back()->getParent()->getParent();
  295. // To avoid having to handle incoming arguments specially, we lower each arg
  296. // to a copy instruction in the entry block. This ensures that the argument
  297. // value itself cannot be live across the entry block.
  298. BasicBlock::iterator AfterAllocaInsertPt = F->begin()->begin();
  299. while (isa<AllocaInst>(AfterAllocaInsertPt) &&
  300. isa<ConstantInt>(cast<AllocaInst>(AfterAllocaInsertPt)->getArraySize()))
  301. ++AfterAllocaInsertPt;
  302. for (Function::arg_iterator AI = F->arg_begin(), E = F->arg_end();
  303. AI != E; ++AI) {
  304. // This is always a no-op cast because we're casting AI to AI->getType() so
  305. // src and destination types are identical. BitCast is the only possibility.
  306. CastInst *NC = new BitCastInst(
  307. AI, AI->getType(), AI->getName()+".tmp", AfterAllocaInsertPt);
  308. AI->replaceAllUsesWith(NC);
  309. // Normally its is forbidden to replace a CastInst's operand because it
  310. // could cause the opcode to reflect an illegal conversion. However, we're
  311. // replacing it here with the same value it was constructed with to simply
  312. // make NC its user.
  313. NC->setOperand(0, AI);
  314. }
  315. // Finally, scan the code looking for instructions with bad live ranges.
  316. for (Function::iterator BB = F->begin(), E = F->end(); BB != E; ++BB)
  317. for (BasicBlock::iterator II = BB->begin(), E = BB->end(); II != E; ++II) {
  318. // Ignore obvious cases we don't have to handle. In particular, most
  319. // instructions either have no uses or only have a single use inside the
  320. // current block. Ignore them quickly.
  321. Instruction *Inst = II;
  322. if (Inst->use_empty()) continue;
  323. if (Inst->hasOneUse() &&
  324. cast<Instruction>(Inst->use_back())->getParent() == BB &&
  325. !isa<PHINode>(Inst->use_back())) continue;
  326. // If this is an alloca in the entry block, it's not a real register
  327. // value.
  328. if (AllocaInst *AI = dyn_cast<AllocaInst>(Inst))
  329. if (isa<ConstantInt>(AI->getArraySize()) && BB == F->begin())
  330. continue;
  331. // Avoid iterator invalidation by copying users to a temporary vector.
  332. std::vector<Instruction*> Users;
  333. for (Value::use_iterator UI = Inst->use_begin(), E = Inst->use_end();
  334. UI != E; ++UI) {
  335. Instruction *User = cast<Instruction>(*UI);
  336. if (User->getParent() != BB || isa<PHINode>(User))
  337. Users.push_back(User);
  338. }
  339. // Scan all of the uses and see if the live range is live across an unwind
  340. // edge. If we find a use live across an invoke edge, create an alloca
  341. // and spill the value.
  342. std::set<InvokeInst*> InvokesWithStoreInserted;
  343. // Find all of the blocks that this value is live in.
  344. std::set<BasicBlock*> LiveBBs;
  345. LiveBBs.insert(Inst->getParent());
  346. while (!Users.empty()) {
  347. Instruction *U = Users.back();
  348. Users.pop_back();
  349. if (!isa<PHINode>(U)) {
  350. MarkBlocksLiveIn(U->getParent(), LiveBBs);
  351. } else {
  352. // Uses for a PHI node occur in their predecessor block.
  353. PHINode *PN = cast<PHINode>(U);
  354. for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i)
  355. if (PN->getIncomingValue(i) == Inst)
  356. MarkBlocksLiveIn(PN->getIncomingBlock(i), LiveBBs);
  357. }
  358. }
  359. // Now that we know all of the blocks that this thing is live in, see if
  360. // it includes any of the unwind locations.
  361. bool NeedsSpill = false;
  362. for (unsigned i = 0, e = Invokes.size(); i != e; ++i) {
  363. BasicBlock *UnwindBlock = Invokes[i]->getUnwindDest();
  364. if (UnwindBlock != BB && LiveBBs.count(UnwindBlock)) {
  365. NeedsSpill = true;
  366. }
  367. }
  368. // If we decided we need a spill, do it.
  369. if (NeedsSpill) {
  370. ++NumSpilled;
  371. DemoteRegToStack(*Inst, true);
  372. }
  373. }
  374. }
  375. bool LowerInvoke::insertExpensiveEHSupport(Function &F) {
  376. std::vector<ReturnInst*> Returns;
  377. std::vector<UnwindInst*> Unwinds;
  378. std::vector<InvokeInst*> Invokes;
  379. for (Function::iterator BB = F.begin(), E = F.end(); BB != E; ++BB)
  380. if (ReturnInst *RI = dyn_cast<ReturnInst>(BB->getTerminator())) {
  381. // Remember all return instructions in case we insert an invoke into this
  382. // function.
  383. Returns.push_back(RI);
  384. } else if (InvokeInst *II = dyn_cast<InvokeInst>(BB->getTerminator())) {
  385. Invokes.push_back(II);
  386. } else if (UnwindInst *UI = dyn_cast<UnwindInst>(BB->getTerminator())) {
  387. Unwinds.push_back(UI);
  388. }
  389. if (Unwinds.empty() && Invokes.empty()) return false;
  390. NumInvokes += Invokes.size();
  391. NumUnwinds += Unwinds.size();
  392. // TODO: This is not an optimal way to do this. In particular, this always
  393. // inserts setjmp calls into the entries of functions with invoke instructions
  394. // even though there are possibly paths through the function that do not
  395. // execute any invokes. In particular, for functions with early exits, e.g.
  396. // the 'addMove' method in hexxagon, it would be nice to not have to do the
  397. // setjmp stuff on the early exit path. This requires a bit of dataflow, but
  398. // would not be too hard to do.
  399. // If we have an invoke instruction, insert a setjmp that dominates all
  400. // invokes. After the setjmp, use a cond branch that goes to the original
  401. // code path on zero, and to a designated 'catch' block of nonzero.
  402. Value *OldJmpBufPtr = 0;
  403. if (!Invokes.empty()) {
  404. // First thing we need to do is scan the whole function for values that are
  405. // live across unwind edges. Each value that is live across an unwind edge
  406. // we spill into a stack location, guaranteeing that there is nothing live
  407. // across the unwind edge. This process also splits all critical edges
  408. // coming out of invoke's.
  409. splitLiveRangesLiveAcrossInvokes(Invokes);
  410. BasicBlock *EntryBB = F.begin();
  411. // Create an alloca for the incoming jump buffer ptr and the new jump buffer
  412. // that needs to be restored on all exits from the function. This is an
  413. // alloca because the value needs to be live across invokes.
  414. unsigned Align = TLI ? TLI->getJumpBufAlignment() : 0;
  415. AllocaInst *JmpBuf =
  416. new AllocaInst(JBLinkTy, 0, Align,
  417. "jblink", F.begin()->begin());
  418. std::vector<Value*> Idx;
  419. Idx.push_back(Constant::getNullValue(Type::getInt32Ty(F.getContext())));
  420. Idx.push_back(ConstantInt::get(Type::getInt32Ty(F.getContext()), 1));
  421. OldJmpBufPtr = GetElementPtrInst::Create(JmpBuf, Idx.begin(), Idx.end(),
  422. "OldBuf",
  423. EntryBB->getTerminator());
  424. // Copy the JBListHead to the alloca.
  425. Value *OldBuf = new LoadInst(JBListHead, "oldjmpbufptr", true,
  426. EntryBB->getTerminator());
  427. new StoreInst(OldBuf, OldJmpBufPtr, true, EntryBB->getTerminator());
  428. // Add the new jumpbuf to the list.
  429. new StoreInst(JmpBuf, JBListHead, true, EntryBB->getTerminator());
  430. // Create the catch block. The catch block is basically a big switch
  431. // statement that goes to all of the invoke catch blocks.
  432. BasicBlock *CatchBB =
  433. BasicBlock::Create(F.getContext(), "setjmp.catch", &F);
  434. // Create an alloca which keeps track of which invoke is currently
  435. // executing. For normal calls it contains zero.
  436. AllocaInst *InvokeNum = new AllocaInst(Type::getInt32Ty(F.getContext()), 0,
  437. "invokenum",EntryBB->begin());
  438. new StoreInst(ConstantInt::get(Type::getInt32Ty(F.getContext()), 0),
  439. InvokeNum, true, EntryBB->getTerminator());
  440. // Insert a load in the Catch block, and a switch on its value. By default,
  441. // we go to a block that just does an unwind (which is the correct action
  442. // for a standard call).
  443. BasicBlock *UnwindBB = BasicBlock::Create(F.getContext(), "unwindbb", &F);
  444. Unwinds.push_back(new UnwindInst(F.getContext(), UnwindBB));
  445. Value *CatchLoad = new LoadInst(InvokeNum, "invoke.num", true, CatchBB);
  446. SwitchInst *CatchSwitch =
  447. SwitchInst::Create(CatchLoad, UnwindBB, Invokes.size(), CatchBB);
  448. // Now that things are set up, insert the setjmp call itself.
  449. // Split the entry block to insert the conditional branch for the setjmp.
  450. BasicBlock *ContBlock = EntryBB->splitBasicBlock(EntryBB->getTerminator(),
  451. "setjmp.cont");
  452. Idx[1] = ConstantInt::get(Type::getInt32Ty(F.getContext()), 0);
  453. Value *JmpBufPtr = GetElementPtrInst::Create(JmpBuf, Idx.begin(), Idx.end(),
  454. "TheJmpBuf",
  455. EntryBB->getTerminator());
  456. JmpBufPtr = new BitCastInst(JmpBufPtr,
  457. Type::getInt8PtrTy(F.getContext()),
  458. "tmp", EntryBB->getTerminator());
  459. Value *SJRet = CallInst::Create(SetJmpFn, JmpBufPtr, "sjret",
  460. EntryBB->getTerminator());
  461. // Compare the return value to zero.
  462. Value *IsNormal = new ICmpInst(EntryBB->getTerminator(),
  463. ICmpInst::ICMP_EQ, SJRet,
  464. Constant::getNullValue(SJRet->getType()),
  465. "notunwind");
  466. // Nuke the uncond branch.
  467. EntryBB->getTerminator()->eraseFromParent();
  468. // Put in a new condbranch in its place.
  469. BranchInst::Create(ContBlock, CatchBB, IsNormal, EntryBB);
  470. // At this point, we are all set up, rewrite each invoke instruction.
  471. for (unsigned i = 0, e = Invokes.size(); i != e; ++i)
  472. rewriteExpensiveInvoke(Invokes[i], i+1, InvokeNum, CatchSwitch);
  473. }
  474. // We know that there is at least one unwind.
  475. // Create three new blocks, the block to load the jmpbuf ptr and compare
  476. // against null, the block to do the longjmp, and the error block for if it
  477. // is null. Add them at the end of the function because they are not hot.
  478. BasicBlock *UnwindHandler = BasicBlock::Create(F.getContext(),
  479. "dounwind", &F);
  480. BasicBlock *UnwindBlock = BasicBlock::Create(F.getContext(), "unwind", &F);
  481. BasicBlock *TermBlock = BasicBlock::Create(F.getContext(), "unwinderror", &F);
  482. // If this function contains an invoke, restore the old jumpbuf ptr.
  483. Value *BufPtr;
  484. if (OldJmpBufPtr) {
  485. // Before the return, insert a copy from the saved value to the new value.
  486. BufPtr = new LoadInst(OldJmpBufPtr, "oldjmpbufptr", UnwindHandler);
  487. new StoreInst(BufPtr, JBListHead, UnwindHandler);
  488. } else {
  489. BufPtr = new LoadInst(JBListHead, "ehlist", UnwindHandler);
  490. }
  491. // Load the JBList, if it's null, then there was no catch!
  492. Value *NotNull = new ICmpInst(*UnwindHandler, ICmpInst::ICMP_NE, BufPtr,
  493. Constant::getNullValue(BufPtr->getType()),
  494. "notnull");
  495. BranchInst::Create(UnwindBlock, TermBlock, NotNull, UnwindHandler);
  496. // Create the block to do the longjmp.
  497. // Get a pointer to the jmpbuf and longjmp.
  498. std::vector<Value*> Idx;
  499. Idx.push_back(Constant::getNullValue(Type::getInt32Ty(F.getContext())));
  500. Idx.push_back(ConstantInt::get(Type::getInt32Ty(F.getContext()), 0));
  501. Idx[0] = GetElementPtrInst::Create(BufPtr, Idx.begin(), Idx.end(), "JmpBuf",
  502. UnwindBlock);
  503. Idx[0] = new BitCastInst(Idx[0],
  504. Type::getInt8PtrTy(F.getContext()),
  505. "tmp", UnwindBlock);
  506. Idx[1] = ConstantInt::get(Type::getInt32Ty(F.getContext()), 1);
  507. CallInst::Create(LongJmpFn, Idx.begin(), Idx.end(), "", UnwindBlock);
  508. new UnreachableInst(F.getContext(), UnwindBlock);
  509. // Set up the term block ("throw without a catch").
  510. new UnreachableInst(F.getContext(), TermBlock);
  511. // Insert a new call to write(2, AbortMessage, AbortMessageLength);
  512. writeAbortMessage(TermBlock->getTerminator());
  513. // Insert a call to abort()
  514. CallInst::Create(AbortFn, "",
  515. TermBlock->getTerminator())->setTailCall();
  516. // Replace all unwinds with a branch to the unwind handler.
  517. for (unsigned i = 0, e = Unwinds.size(); i != e; ++i) {
  518. BranchInst::Create(UnwindHandler, Unwinds[i]);
  519. Unwinds[i]->eraseFromParent();
  520. }
  521. // Finally, for any returns from this function, if this function contains an
  522. // invoke, restore the old jmpbuf pointer to its input value.
  523. if (OldJmpBufPtr) {
  524. for (unsigned i = 0, e = Returns.size(); i != e; ++i) {
  525. ReturnInst *R = Returns[i];
  526. // Before the return, insert a copy from the saved value to the new value.
  527. Value *OldBuf = new LoadInst(OldJmpBufPtr, "oldjmpbufptr", true, R);
  528. new StoreInst(OldBuf, JBListHead, true, R);
  529. }
  530. }
  531. return true;
  532. }
  533. bool LowerInvoke::runOnFunction(Function &F) {
  534. if (ExpensiveEHSupport)
  535. return insertExpensiveEHSupport(F);
  536. else
  537. return insertCheapEHSupport(F);
  538. }