LowerInvoke.cpp 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580
  1. //===- LowerInvoke.cpp - Eliminate Invoke & Unwind instructions -----------===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file was developed by the LLVM research group and is distributed under
  6. // the University of Illinois Open Source License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This transformation is designed for use by code generators which do not yet
  11. // support stack unwinding. This pass supports two models of exception handling
  12. // lowering, the 'cheap' support and the 'expensive' support.
  13. //
  14. // 'Cheap' exception handling support gives the program the ability to execute
  15. // any program which does not "throw an exception", by turning 'invoke'
  16. // instructions into calls and by turning 'unwind' instructions into calls to
  17. // abort(). If the program does dynamically use the unwind instruction, the
  18. // program will print a message then abort.
  19. //
  20. // 'Expensive' exception handling support gives the full exception handling
  21. // support to the program at the cost of making the 'invoke' instruction
  22. // really expensive. It basically inserts setjmp/longjmp calls to emulate the
  23. // exception handling as necessary.
  24. //
  25. // Because the 'expensive' support slows down programs a lot, and EH is only
  26. // used for a subset of the programs, it must be specifically enabled by an
  27. // option.
  28. //
  29. // Note that after this pass runs the CFG is not entirely accurate (exceptional
  30. // control flow edges are not correct anymore) so only very simple things should
  31. // be done after the lowerinvoke pass has run (like generation of native code).
  32. // This should not be used as a general purpose "my LLVM-to-LLVM pass doesn't
  33. // support the invoke instruction yet" lowering pass.
  34. //
  35. //===----------------------------------------------------------------------===//
  36. #include "llvm/Transforms/Scalar.h"
  37. #include "llvm/Constants.h"
  38. #include "llvm/DerivedTypes.h"
  39. #include "llvm/Instructions.h"
  40. #include "llvm/Module.h"
  41. #include "llvm/Pass.h"
  42. #include "llvm/Transforms/Utils/BasicBlockUtils.h"
  43. #include "llvm/Transforms/Utils/Local.h"
  44. #include "llvm/ADT/Statistic.h"
  45. #include "llvm/Support/CommandLine.h"
  46. #include <csetjmp>
  47. using namespace llvm;
  48. namespace {
  49. Statistic<> NumInvokes("lowerinvoke", "Number of invokes replaced");
  50. Statistic<> NumUnwinds("lowerinvoke", "Number of unwinds replaced");
  51. Statistic<> NumSpilled("lowerinvoke",
  52. "Number of registers live across unwind edges");
  53. cl::opt<bool> ExpensiveEHSupport("enable-correct-eh-support",
  54. cl::desc("Make the -lowerinvoke pass insert expensive, but correct, EH code"));
  55. class LowerInvoke : public FunctionPass {
  56. // Used for both models.
  57. Function *WriteFn;
  58. Function *AbortFn;
  59. Value *AbortMessage;
  60. unsigned AbortMessageLength;
  61. // Used for expensive EH support.
  62. const Type *JBLinkTy;
  63. GlobalVariable *JBListHead;
  64. Function *SetJmpFn, *LongJmpFn;
  65. public:
  66. bool doInitialization(Module &M);
  67. bool runOnFunction(Function &F);
  68. private:
  69. void createAbortMessage();
  70. void writeAbortMessage(Instruction *IB);
  71. bool insertCheapEHSupport(Function &F);
  72. void splitLiveRangesLiveAcrossInvokes(std::vector<InvokeInst*> &Invokes);
  73. void rewriteExpensiveInvoke(InvokeInst *II, unsigned InvokeNo,
  74. AllocaInst *InvokeNum, SwitchInst *CatchSwitch);
  75. bool insertExpensiveEHSupport(Function &F);
  76. };
  77. RegisterOpt<LowerInvoke>
  78. X("lowerinvoke", "Lower invoke and unwind, for unwindless code generators");
  79. }
  80. const PassInfo *llvm::LowerInvokePassID = X.getPassInfo();
  81. // Public Interface To the LowerInvoke pass.
  82. FunctionPass *llvm::createLowerInvokePass() { return new LowerInvoke(); }
  83. // doInitialization - Make sure that there is a prototype for abort in the
  84. // current module.
  85. bool LowerInvoke::doInitialization(Module &M) {
  86. const Type *VoidPtrTy = PointerType::get(Type::SByteTy);
  87. AbortMessage = 0;
  88. if (ExpensiveEHSupport) {
  89. // Insert a type for the linked list of jump buffers. Unfortunately, we
  90. // don't know the size of the target's setjmp buffer, so we make a guess.
  91. // If this guess turns out to be too small, bad stuff could happen.
  92. unsigned JmpBufSize = 200; // PPC has 192 words
  93. assert(sizeof(jmp_buf) <= JmpBufSize*sizeof(void*) &&
  94. "LowerInvoke doesn't know about targets with jmp_buf size > 200 words!");
  95. const Type *JmpBufTy = ArrayType::get(VoidPtrTy, JmpBufSize);
  96. { // The type is recursive, so use a type holder.
  97. std::vector<const Type*> Elements;
  98. Elements.push_back(JmpBufTy);
  99. OpaqueType *OT = OpaqueType::get();
  100. Elements.push_back(PointerType::get(OT));
  101. PATypeHolder JBLType(StructType::get(Elements));
  102. OT->refineAbstractTypeTo(JBLType.get()); // Complete the cycle.
  103. JBLinkTy = JBLType.get();
  104. M.addTypeName("llvm.sjljeh.jmpbufty", JBLinkTy);
  105. }
  106. const Type *PtrJBList = PointerType::get(JBLinkTy);
  107. // Now that we've done that, insert the jmpbuf list head global, unless it
  108. // already exists.
  109. if (!(JBListHead = M.getGlobalVariable("llvm.sjljeh.jblist", PtrJBList)))
  110. JBListHead = new GlobalVariable(PtrJBList, false,
  111. GlobalValue::LinkOnceLinkage,
  112. Constant::getNullValue(PtrJBList),
  113. "llvm.sjljeh.jblist", &M);
  114. SetJmpFn = M.getOrInsertFunction("llvm.setjmp", Type::IntTy,
  115. PointerType::get(JmpBufTy), (Type *)0);
  116. LongJmpFn = M.getOrInsertFunction("llvm.longjmp", Type::VoidTy,
  117. PointerType::get(JmpBufTy),
  118. Type::IntTy, (Type *)0);
  119. }
  120. // We need the 'write' and 'abort' functions for both models.
  121. AbortFn = M.getOrInsertFunction("abort", Type::VoidTy, (Type *)0);
  122. // Unfortunately, 'write' can end up being prototyped in several different
  123. // ways. If the user defines a three (or more) operand function named 'write'
  124. // we will use their prototype. We _do not_ want to insert another instance
  125. // of a write prototype, because we don't know that the funcresolve pass will
  126. // run after us. If there is a definition of a write function, but it's not
  127. // suitable for our uses, we just don't emit write calls. If there is no
  128. // write prototype at all, we just add one.
  129. if (Function *WF = M.getNamedFunction("write")) {
  130. if (WF->getFunctionType()->getNumParams() > 3 ||
  131. WF->getFunctionType()->isVarArg())
  132. WriteFn = WF;
  133. else
  134. WriteFn = 0;
  135. } else {
  136. WriteFn = M.getOrInsertFunction("write", Type::VoidTy, Type::IntTy,
  137. VoidPtrTy, Type::IntTy, (Type *)0);
  138. }
  139. return true;
  140. }
  141. void LowerInvoke::createAbortMessage() {
  142. Module &M = *WriteFn->getParent();
  143. if (ExpensiveEHSupport) {
  144. // The abort message for expensive EH support tells the user that the
  145. // program 'unwound' without an 'invoke' instruction.
  146. Constant *Msg =
  147. ConstantArray::get("ERROR: Exception thrown, but not caught!\n");
  148. AbortMessageLength = Msg->getNumOperands()-1; // don't include \0
  149. GlobalVariable *MsgGV = new GlobalVariable(Msg->getType(), true,
  150. GlobalValue::InternalLinkage,
  151. Msg, "abortmsg", &M);
  152. std::vector<Constant*> GEPIdx(2, Constant::getNullValue(Type::IntTy));
  153. AbortMessage = ConstantExpr::getGetElementPtr(MsgGV, GEPIdx);
  154. } else {
  155. // The abort message for cheap EH support tells the user that EH is not
  156. // enabled.
  157. Constant *Msg =
  158. ConstantArray::get("Exception handler needed, but not enabled. Recompile"
  159. " program with -enable-correct-eh-support.\n");
  160. AbortMessageLength = Msg->getNumOperands()-1; // don't include \0
  161. GlobalVariable *MsgGV = new GlobalVariable(Msg->getType(), true,
  162. GlobalValue::InternalLinkage,
  163. Msg, "abortmsg", &M);
  164. std::vector<Constant*> GEPIdx(2, Constant::getNullValue(Type::IntTy));
  165. AbortMessage = ConstantExpr::getGetElementPtr(MsgGV, GEPIdx);
  166. }
  167. }
  168. void LowerInvoke::writeAbortMessage(Instruction *IB) {
  169. if (WriteFn) {
  170. if (AbortMessage == 0) createAbortMessage();
  171. // These are the arguments we WANT...
  172. std::vector<Value*> Args;
  173. Args.push_back(ConstantInt::get(Type::IntTy, 2));
  174. Args.push_back(AbortMessage);
  175. Args.push_back(ConstantInt::get(Type::IntTy, AbortMessageLength));
  176. // If the actual declaration of write disagrees, insert casts as
  177. // appropriate.
  178. const FunctionType *FT = WriteFn->getFunctionType();
  179. unsigned NumArgs = FT->getNumParams();
  180. for (unsigned i = 0; i != 3; ++i)
  181. if (i < NumArgs && FT->getParamType(i) != Args[i]->getType())
  182. Args[i] = ConstantExpr::getCast(cast<Constant>(Args[i]),
  183. FT->getParamType(i));
  184. (new CallInst(WriteFn, Args, "", IB))->setTailCall();
  185. }
  186. }
  187. bool LowerInvoke::insertCheapEHSupport(Function &F) {
  188. bool Changed = false;
  189. for (Function::iterator BB = F.begin(), E = F.end(); BB != E; ++BB)
  190. if (InvokeInst *II = dyn_cast<InvokeInst>(BB->getTerminator())) {
  191. // Insert a normal call instruction...
  192. std::string Name = II->getName(); II->setName("");
  193. CallInst *NewCall = new CallInst(II->getCalledValue(),
  194. std::vector<Value*>(II->op_begin()+3,
  195. II->op_end()), Name, II);
  196. NewCall->setCallingConv(II->getCallingConv());
  197. II->replaceAllUsesWith(NewCall);
  198. // Insert an unconditional branch to the normal destination.
  199. new BranchInst(II->getNormalDest(), II);
  200. // Remove any PHI node entries from the exception destination.
  201. II->getUnwindDest()->removePredecessor(BB);
  202. // Remove the invoke instruction now.
  203. BB->getInstList().erase(II);
  204. ++NumInvokes; Changed = true;
  205. } else if (UnwindInst *UI = dyn_cast<UnwindInst>(BB->getTerminator())) {
  206. // Insert a new call to write(2, AbortMessage, AbortMessageLength);
  207. writeAbortMessage(UI);
  208. // Insert a call to abort()
  209. (new CallInst(AbortFn, std::vector<Value*>(), "", UI))->setTailCall();
  210. // Insert a return instruction. This really should be a "barrier", as it
  211. // is unreachable.
  212. new ReturnInst(F.getReturnType() == Type::VoidTy ? 0 :
  213. Constant::getNullValue(F.getReturnType()), UI);
  214. // Remove the unwind instruction now.
  215. BB->getInstList().erase(UI);
  216. ++NumUnwinds; Changed = true;
  217. }
  218. return Changed;
  219. }
  220. /// rewriteExpensiveInvoke - Insert code and hack the function to replace the
  221. /// specified invoke instruction with a call.
  222. void LowerInvoke::rewriteExpensiveInvoke(InvokeInst *II, unsigned InvokeNo,
  223. AllocaInst *InvokeNum,
  224. SwitchInst *CatchSwitch) {
  225. ConstantUInt *InvokeNoC = ConstantUInt::get(Type::UIntTy, InvokeNo);
  226. // Insert a store of the invoke num before the invoke and store zero into the
  227. // location afterward.
  228. new StoreInst(InvokeNoC, InvokeNum, true, II); // volatile
  229. BasicBlock::iterator NI = II->getNormalDest()->begin();
  230. while (isa<PHINode>(NI)) ++NI;
  231. // nonvolatile.
  232. new StoreInst(Constant::getNullValue(Type::UIntTy), InvokeNum, false, NI);
  233. // Add a switch case to our unwind block.
  234. CatchSwitch->addCase(InvokeNoC, II->getUnwindDest());
  235. // Insert a normal call instruction.
  236. std::string Name = II->getName(); II->setName("");
  237. CallInst *NewCall = new CallInst(II->getCalledValue(),
  238. std::vector<Value*>(II->op_begin()+3,
  239. II->op_end()), Name,
  240. II);
  241. NewCall->setCallingConv(II->getCallingConv());
  242. II->replaceAllUsesWith(NewCall);
  243. // Replace the invoke with an uncond branch.
  244. new BranchInst(II->getNormalDest(), NewCall->getParent());
  245. II->eraseFromParent();
  246. }
  247. /// MarkBlocksLiveIn - Insert BB and all of its predescessors into LiveBBs until
  248. /// we reach blocks we've already seen.
  249. static void MarkBlocksLiveIn(BasicBlock *BB, std::set<BasicBlock*> &LiveBBs) {
  250. if (!LiveBBs.insert(BB).second) return; // already been here.
  251. for (pred_iterator PI = pred_begin(BB), E = pred_end(BB); PI != E; ++PI)
  252. MarkBlocksLiveIn(*PI, LiveBBs);
  253. }
  254. // First thing we need to do is scan the whole function for values that are
  255. // live across unwind edges. Each value that is live across an unwind edge
  256. // we spill into a stack location, guaranteeing that there is nothing live
  257. // across the unwind edge. This process also splits all critical edges
  258. // coming out of invoke's.
  259. void LowerInvoke::
  260. splitLiveRangesLiveAcrossInvokes(std::vector<InvokeInst*> &Invokes) {
  261. // First step, split all critical edges from invoke instructions.
  262. for (unsigned i = 0, e = Invokes.size(); i != e; ++i) {
  263. InvokeInst *II = Invokes[i];
  264. SplitCriticalEdge(II, 0, this);
  265. SplitCriticalEdge(II, 1, this);
  266. assert(!isa<PHINode>(II->getNormalDest()) &&
  267. !isa<PHINode>(II->getUnwindDest()) &&
  268. "critical edge splitting left single entry phi nodes?");
  269. }
  270. Function *F = Invokes.back()->getParent()->getParent();
  271. // To avoid having to handle incoming arguments specially, we lower each arg
  272. // to a copy instruction in the entry block. This ensure that the argument
  273. // value itself cannot be live across the entry block.
  274. BasicBlock::iterator AfterAllocaInsertPt = F->begin()->begin();
  275. while (isa<AllocaInst>(AfterAllocaInsertPt) &&
  276. isa<ConstantInt>(cast<AllocaInst>(AfterAllocaInsertPt)->getArraySize()))
  277. ++AfterAllocaInsertPt;
  278. for (Function::arg_iterator AI = F->arg_begin(), E = F->arg_end();
  279. AI != E; ++AI) {
  280. CastInst *NC = new CastInst(AI, AI->getType(), AI->getName()+".tmp",
  281. AfterAllocaInsertPt);
  282. AI->replaceAllUsesWith(NC);
  283. NC->setOperand(0, AI);
  284. }
  285. // Finally, scan the code looking for instructions with bad live ranges.
  286. for (Function::iterator BB = F->begin(), E = F->end(); BB != E; ++BB)
  287. for (BasicBlock::iterator II = BB->begin(), E = BB->end(); II != E; ++II) {
  288. // Ignore obvious cases we don't have to handle. In particular, most
  289. // instructions either have no uses or only have a single use inside the
  290. // current block. Ignore them quickly.
  291. Instruction *Inst = II;
  292. if (Inst->use_empty()) continue;
  293. if (Inst->hasOneUse() &&
  294. cast<Instruction>(Inst->use_back())->getParent() == BB &&
  295. !isa<PHINode>(Inst->use_back())) continue;
  296. // If this is an alloca in the entry block, it's not a real register
  297. // value.
  298. if (AllocaInst *AI = dyn_cast<AllocaInst>(Inst))
  299. if (isa<ConstantInt>(AI->getArraySize()) && BB == F->begin())
  300. continue;
  301. // Avoid iterator invalidation by copying users to a temporary vector.
  302. std::vector<Instruction*> Users;
  303. for (Value::use_iterator UI = Inst->use_begin(), E = Inst->use_end();
  304. UI != E; ++UI) {
  305. Instruction *User = cast<Instruction>(*UI);
  306. if (User->getParent() != BB || isa<PHINode>(User))
  307. Users.push_back(User);
  308. }
  309. // Scan all of the uses and see if the live range is live across an unwind
  310. // edge. If we find a use live across an invoke edge, create an alloca
  311. // and spill the value.
  312. AllocaInst *SpillLoc = 0;
  313. std::set<InvokeInst*> InvokesWithStoreInserted;
  314. // Find all of the blocks that this value is live in.
  315. std::set<BasicBlock*> LiveBBs;
  316. LiveBBs.insert(Inst->getParent());
  317. while (!Users.empty()) {
  318. Instruction *U = Users.back();
  319. Users.pop_back();
  320. if (!isa<PHINode>(U)) {
  321. MarkBlocksLiveIn(U->getParent(), LiveBBs);
  322. } else {
  323. // Uses for a PHI node occur in their predecessor block.
  324. PHINode *PN = cast<PHINode>(U);
  325. for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i)
  326. if (PN->getIncomingValue(i) == Inst)
  327. MarkBlocksLiveIn(PN->getIncomingBlock(i), LiveBBs);
  328. }
  329. }
  330. // Now that we know all of the blocks that this thing is live in, see if
  331. // it includes any of the unwind locations.
  332. bool NeedsSpill = false;
  333. for (unsigned i = 0, e = Invokes.size(); i != e; ++i) {
  334. BasicBlock *UnwindBlock = Invokes[i]->getUnwindDest();
  335. if (UnwindBlock != BB && LiveBBs.count(UnwindBlock)) {
  336. NeedsSpill = true;
  337. }
  338. }
  339. // If we decided we need a spill, do it.
  340. if (NeedsSpill) {
  341. ++NumSpilled;
  342. DemoteRegToStack(*Inst, true);
  343. }
  344. }
  345. }
  346. bool LowerInvoke::insertExpensiveEHSupport(Function &F) {
  347. std::vector<ReturnInst*> Returns;
  348. std::vector<UnwindInst*> Unwinds;
  349. std::vector<InvokeInst*> Invokes;
  350. for (Function::iterator BB = F.begin(), E = F.end(); BB != E; ++BB)
  351. if (ReturnInst *RI = dyn_cast<ReturnInst>(BB->getTerminator())) {
  352. // Remember all return instructions in case we insert an invoke into this
  353. // function.
  354. Returns.push_back(RI);
  355. } else if (InvokeInst *II = dyn_cast<InvokeInst>(BB->getTerminator())) {
  356. Invokes.push_back(II);
  357. } else if (UnwindInst *UI = dyn_cast<UnwindInst>(BB->getTerminator())) {
  358. Unwinds.push_back(UI);
  359. }
  360. if (Unwinds.empty() && Invokes.empty()) return false;
  361. NumInvokes += Invokes.size();
  362. NumUnwinds += Unwinds.size();
  363. // TODO: This is not an optimal way to do this. In particular, this always
  364. // inserts setjmp calls into the entries of functions with invoke instructions
  365. // even though there are possibly paths through the function that do not
  366. // execute any invokes. In particular, for functions with early exits, e.g.
  367. // the 'addMove' method in hexxagon, it would be nice to not have to do the
  368. // setjmp stuff on the early exit path. This requires a bit of dataflow, but
  369. // would not be too hard to do.
  370. // If we have an invoke instruction, insert a setjmp that dominates all
  371. // invokes. After the setjmp, use a cond branch that goes to the original
  372. // code path on zero, and to a designated 'catch' block of nonzero.
  373. Value *OldJmpBufPtr = 0;
  374. if (!Invokes.empty()) {
  375. // First thing we need to do is scan the whole function for values that are
  376. // live across unwind edges. Each value that is live across an unwind edge
  377. // we spill into a stack location, guaranteeing that there is nothing live
  378. // across the unwind edge. This process also splits all critical edges
  379. // coming out of invoke's.
  380. splitLiveRangesLiveAcrossInvokes(Invokes);
  381. BasicBlock *EntryBB = F.begin();
  382. // Create an alloca for the incoming jump buffer ptr and the new jump buffer
  383. // that needs to be restored on all exits from the function. This is an
  384. // alloca because the value needs to be live across invokes.
  385. AllocaInst *JmpBuf =
  386. new AllocaInst(JBLinkTy, 0, "jblink", F.begin()->begin());
  387. std::vector<Value*> Idx;
  388. Idx.push_back(Constant::getNullValue(Type::IntTy));
  389. Idx.push_back(ConstantUInt::get(Type::UIntTy, 1));
  390. OldJmpBufPtr = new GetElementPtrInst(JmpBuf, Idx, "OldBuf",
  391. EntryBB->getTerminator());
  392. // Copy the JBListHead to the alloca.
  393. Value *OldBuf = new LoadInst(JBListHead, "oldjmpbufptr", true,
  394. EntryBB->getTerminator());
  395. new StoreInst(OldBuf, OldJmpBufPtr, true, EntryBB->getTerminator());
  396. // Add the new jumpbuf to the list.
  397. new StoreInst(JmpBuf, JBListHead, true, EntryBB->getTerminator());
  398. // Create the catch block. The catch block is basically a big switch
  399. // statement that goes to all of the invoke catch blocks.
  400. BasicBlock *CatchBB = new BasicBlock("setjmp.catch", &F);
  401. // Create an alloca which keeps track of which invoke is currently
  402. // executing. For normal calls it contains zero.
  403. AllocaInst *InvokeNum = new AllocaInst(Type::UIntTy, 0, "invokenum",
  404. EntryBB->begin());
  405. new StoreInst(ConstantInt::get(Type::UIntTy, 0), InvokeNum, true,
  406. EntryBB->getTerminator());
  407. // Insert a load in the Catch block, and a switch on its value. By default,
  408. // we go to a block that just does an unwind (which is the correct action
  409. // for a standard call).
  410. BasicBlock *UnwindBB = new BasicBlock("unwindbb", &F);
  411. Unwinds.push_back(new UnwindInst(UnwindBB));
  412. Value *CatchLoad = new LoadInst(InvokeNum, "invoke.num", true, CatchBB);
  413. SwitchInst *CatchSwitch =
  414. new SwitchInst(CatchLoad, UnwindBB, Invokes.size(), CatchBB);
  415. // Now that things are set up, insert the setjmp call itself.
  416. // Split the entry block to insert the conditional branch for the setjmp.
  417. BasicBlock *ContBlock = EntryBB->splitBasicBlock(EntryBB->getTerminator(),
  418. "setjmp.cont");
  419. Idx[1] = ConstantUInt::get(Type::UIntTy, 0);
  420. Value *JmpBufPtr = new GetElementPtrInst(JmpBuf, Idx, "TheJmpBuf",
  421. EntryBB->getTerminator());
  422. Value *SJRet = new CallInst(SetJmpFn, JmpBufPtr, "sjret",
  423. EntryBB->getTerminator());
  424. // Compare the return value to zero.
  425. Value *IsNormal = BinaryOperator::createSetEQ(SJRet,
  426. Constant::getNullValue(SJRet->getType()),
  427. "notunwind", EntryBB->getTerminator());
  428. // Nuke the uncond branch.
  429. EntryBB->getTerminator()->eraseFromParent();
  430. // Put in a new condbranch in its place.
  431. new BranchInst(ContBlock, CatchBB, IsNormal, EntryBB);
  432. // At this point, we are all set up, rewrite each invoke instruction.
  433. for (unsigned i = 0, e = Invokes.size(); i != e; ++i)
  434. rewriteExpensiveInvoke(Invokes[i], i+1, InvokeNum, CatchSwitch);
  435. }
  436. // We know that there is at least one unwind.
  437. // Create three new blocks, the block to load the jmpbuf ptr and compare
  438. // against null, the block to do the longjmp, and the error block for if it
  439. // is null. Add them at the end of the function because they are not hot.
  440. BasicBlock *UnwindHandler = new BasicBlock("dounwind", &F);
  441. BasicBlock *UnwindBlock = new BasicBlock("unwind", &F);
  442. BasicBlock *TermBlock = new BasicBlock("unwinderror", &F);
  443. // If this function contains an invoke, restore the old jumpbuf ptr.
  444. Value *BufPtr;
  445. if (OldJmpBufPtr) {
  446. // Before the return, insert a copy from the saved value to the new value.
  447. BufPtr = new LoadInst(OldJmpBufPtr, "oldjmpbufptr", UnwindHandler);
  448. new StoreInst(BufPtr, JBListHead, UnwindHandler);
  449. } else {
  450. BufPtr = new LoadInst(JBListHead, "ehlist", UnwindHandler);
  451. }
  452. // Load the JBList, if it's null, then there was no catch!
  453. Value *NotNull = BinaryOperator::createSetNE(BufPtr,
  454. Constant::getNullValue(BufPtr->getType()),
  455. "notnull", UnwindHandler);
  456. new BranchInst(UnwindBlock, TermBlock, NotNull, UnwindHandler);
  457. // Create the block to do the longjmp.
  458. // Get a pointer to the jmpbuf and longjmp.
  459. std::vector<Value*> Idx;
  460. Idx.push_back(Constant::getNullValue(Type::IntTy));
  461. Idx.push_back(ConstantUInt::get(Type::UIntTy, 0));
  462. Idx[0] = new GetElementPtrInst(BufPtr, Idx, "JmpBuf", UnwindBlock);
  463. Idx[1] = ConstantInt::get(Type::IntTy, 1);
  464. new CallInst(LongJmpFn, Idx, "", UnwindBlock);
  465. new UnreachableInst(UnwindBlock);
  466. // Set up the term block ("throw without a catch").
  467. new UnreachableInst(TermBlock);
  468. // Insert a new call to write(2, AbortMessage, AbortMessageLength);
  469. writeAbortMessage(TermBlock->getTerminator());
  470. // Insert a call to abort()
  471. (new CallInst(AbortFn, std::vector<Value*>(), "",
  472. TermBlock->getTerminator()))->setTailCall();
  473. // Replace all unwinds with a branch to the unwind handler.
  474. for (unsigned i = 0, e = Unwinds.size(); i != e; ++i) {
  475. new BranchInst(UnwindHandler, Unwinds[i]);
  476. Unwinds[i]->eraseFromParent();
  477. }
  478. // Finally, for any returns from this function, if this function contains an
  479. // invoke, restore the old jmpbuf pointer to its input value.
  480. if (OldJmpBufPtr) {
  481. for (unsigned i = 0, e = Returns.size(); i != e; ++i) {
  482. ReturnInst *R = Returns[i];
  483. // Before the return, insert a copy from the saved value to the new value.
  484. Value *OldBuf = new LoadInst(OldJmpBufPtr, "oldjmpbufptr", true, R);
  485. new StoreInst(OldBuf, JBListHead, true, R);
  486. }
  487. }
  488. return true;
  489. }
  490. bool LowerInvoke::runOnFunction(Function &F) {
  491. if (ExpensiveEHSupport)
  492. return insertExpensiveEHSupport(F);
  493. else
  494. return insertCheapEHSupport(F);
  495. }