CGCoroutine.cpp 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660
  1. //===----- CGCoroutine.cpp - Emit LLVM Code for C++ coroutines ------------===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This contains code dealing with C++ code generation of coroutines.
  11. //
  12. //===----------------------------------------------------------------------===//
  13. #include "CGCleanup.h"
  14. #include "CodeGenFunction.h"
  15. #include "llvm/ADT/ScopeExit.h"
  16. #include "clang/AST/StmtCXX.h"
  17. #include "clang/AST/StmtVisitor.h"
  18. using namespace clang;
  19. using namespace CodeGen;
  20. using llvm::Value;
  21. using llvm::BasicBlock;
  22. namespace {
  23. enum class AwaitKind { Init, Normal, Yield, Final };
  24. static constexpr llvm::StringLiteral AwaitKindStr[] = {"init", "await", "yield",
  25. "final"};
  26. }
  27. struct clang::CodeGen::CGCoroData {
  28. // What is the current await expression kind and how many
  29. // await/yield expressions were encountered so far.
  30. // These are used to generate pretty labels for await expressions in LLVM IR.
  31. AwaitKind CurrentAwaitKind = AwaitKind::Init;
  32. unsigned AwaitNum = 0;
  33. unsigned YieldNum = 0;
  34. // How many co_return statements are in the coroutine. Used to decide whether
  35. // we need to add co_return; equivalent at the end of the user authored body.
  36. unsigned CoreturnCount = 0;
  37. // A branch to this block is emitted when coroutine needs to suspend.
  38. llvm::BasicBlock *SuspendBB = nullptr;
  39. // Stores the jump destination just before the coroutine memory is freed.
  40. // This is the destination that every suspend point jumps to for the cleanup
  41. // branch.
  42. CodeGenFunction::JumpDest CleanupJD;
  43. // Stores the jump destination just before the final suspend. The co_return
  44. // statements jumps to this point after calling return_xxx promise member.
  45. CodeGenFunction::JumpDest FinalJD;
  46. // Stores the llvm.coro.id emitted in the function so that we can supply it
  47. // as the first argument to coro.begin, coro.alloc and coro.free intrinsics.
  48. // Note: llvm.coro.id returns a token that cannot be directly expressed in a
  49. // builtin.
  50. llvm::CallInst *CoroId = nullptr;
  51. // Stores the llvm.coro.begin emitted in the function so that we can replace
  52. // all coro.frame intrinsics with direct SSA value of coro.begin that returns
  53. // the address of the coroutine frame of the current coroutine.
  54. llvm::CallInst *CoroBegin = nullptr;
  55. // Stores the last emitted coro.free for the deallocate expressions, we use it
  56. // to wrap dealloc code with if(auto mem = coro.free) dealloc(mem).
  57. llvm::CallInst *LastCoroFree = nullptr;
  58. // If coro.id came from the builtin, remember the expression to give better
  59. // diagnostic. If CoroIdExpr is nullptr, the coro.id was created by
  60. // EmitCoroutineBody.
  61. CallExpr const *CoroIdExpr = nullptr;
  62. };
  63. // Defining these here allows to keep CGCoroData private to this file.
  64. clang::CodeGen::CodeGenFunction::CGCoroInfo::CGCoroInfo() {}
  65. CodeGenFunction::CGCoroInfo::~CGCoroInfo() {}
  66. static void createCoroData(CodeGenFunction &CGF,
  67. CodeGenFunction::CGCoroInfo &CurCoro,
  68. llvm::CallInst *CoroId,
  69. CallExpr const *CoroIdExpr = nullptr) {
  70. if (CurCoro.Data) {
  71. if (CurCoro.Data->CoroIdExpr)
  72. CGF.CGM.Error(CoroIdExpr->getLocStart(),
  73. "only one __builtin_coro_id can be used in a function");
  74. else if (CoroIdExpr)
  75. CGF.CGM.Error(CoroIdExpr->getLocStart(),
  76. "__builtin_coro_id shall not be used in a C++ coroutine");
  77. else
  78. llvm_unreachable("EmitCoroutineBodyStatement called twice?");
  79. return;
  80. }
  81. CurCoro.Data = std::unique_ptr<CGCoroData>(new CGCoroData);
  82. CurCoro.Data->CoroId = CoroId;
  83. CurCoro.Data->CoroIdExpr = CoroIdExpr;
  84. }
  85. // Synthesize a pretty name for a suspend point.
  86. static SmallString<32> buildSuspendPrefixStr(CGCoroData &Coro, AwaitKind Kind) {
  87. unsigned No = 0;
  88. switch (Kind) {
  89. case AwaitKind::Init:
  90. case AwaitKind::Final:
  91. break;
  92. case AwaitKind::Normal:
  93. No = ++Coro.AwaitNum;
  94. break;
  95. case AwaitKind::Yield:
  96. No = ++Coro.YieldNum;
  97. break;
  98. }
  99. SmallString<32> Prefix(AwaitKindStr[static_cast<unsigned>(Kind)]);
  100. if (No > 1) {
  101. Twine(No).toVector(Prefix);
  102. }
  103. return Prefix;
  104. }
  105. // Emit suspend expression which roughly looks like:
  106. //
  107. // auto && x = CommonExpr();
  108. // if (!x.await_ready()) {
  109. // llvm_coro_save();
  110. // x.await_suspend(...); (*)
  111. // llvm_coro_suspend(); (**)
  112. // }
  113. // x.await_resume();
  114. //
  115. // where the result of the entire expression is the result of x.await_resume()
  116. //
  117. // (*) If x.await_suspend return type is bool, it allows to veto a suspend:
  118. // if (x.await_suspend(...))
  119. // llvm_coro_suspend();
  120. //
  121. // (**) llvm_coro_suspend() encodes three possible continuations as
  122. // a switch instruction:
  123. //
  124. // %where-to = call i8 @llvm.coro.suspend(...)
  125. // switch i8 %where-to, label %coro.ret [ ; jump to epilogue to suspend
  126. // i8 0, label %yield.ready ; go here when resumed
  127. // i8 1, label %yield.cleanup ; go here when destroyed
  128. // ]
  129. //
  130. // See llvm's docs/Coroutines.rst for more details.
  131. //
  132. static RValue emitSuspendExpression(CodeGenFunction &CGF, CGCoroData &Coro,
  133. CoroutineSuspendExpr const &S,
  134. AwaitKind Kind, AggValueSlot aggSlot,
  135. bool ignoreResult) {
  136. auto *E = S.getCommonExpr();
  137. // FIXME: rsmith 5/22/2017. Does it still make sense for us to have a
  138. // UO_Coawait at all? As I recall, the only purpose it ever had was to
  139. // represent a dependent co_await expression that couldn't yet be resolved to
  140. // a CoawaitExpr. But now we have (and need!) a separate DependentCoawaitExpr
  141. // node to store unqualified lookup results, it seems that the UnaryOperator
  142. // portion of the representation serves no purpose (and as seen in this patch,
  143. // it's getting in the way). Can we remove it?
  144. // Skip passthrough operator co_await (present when awaiting on an LValue).
  145. if (auto *UO = dyn_cast<UnaryOperator>(E))
  146. if (UO->getOpcode() == UO_Coawait)
  147. E = UO->getSubExpr();
  148. auto Binder =
  149. CodeGenFunction::OpaqueValueMappingData::bind(CGF, S.getOpaqueValue(), E);
  150. auto UnbindOnExit = llvm::make_scope_exit([&] { Binder.unbind(CGF); });
  151. auto Prefix = buildSuspendPrefixStr(Coro, Kind);
  152. BasicBlock *ReadyBlock = CGF.createBasicBlock(Prefix + Twine(".ready"));
  153. BasicBlock *SuspendBlock = CGF.createBasicBlock(Prefix + Twine(".suspend"));
  154. BasicBlock *CleanupBlock = CGF.createBasicBlock(Prefix + Twine(".cleanup"));
  155. // If expression is ready, no need to suspend.
  156. CGF.EmitBranchOnBoolExpr(S.getReadyExpr(), ReadyBlock, SuspendBlock, 0);
  157. // Otherwise, emit suspend logic.
  158. CGF.EmitBlock(SuspendBlock);
  159. auto &Builder = CGF.Builder;
  160. llvm::Function *CoroSave = CGF.CGM.getIntrinsic(llvm::Intrinsic::coro_save);
  161. auto *NullPtr = llvm::ConstantPointerNull::get(CGF.CGM.Int8PtrTy);
  162. auto *SaveCall = Builder.CreateCall(CoroSave, {NullPtr});
  163. auto *SuspendRet = CGF.EmitScalarExpr(S.getSuspendExpr());
  164. if (SuspendRet != nullptr) {
  165. // Veto suspension if requested by bool returning await_suspend.
  166. assert(SuspendRet->getType()->isIntegerTy(1) &&
  167. "Sema should have already checked that it is void or bool");
  168. BasicBlock *RealSuspendBlock =
  169. CGF.createBasicBlock(Prefix + Twine(".suspend.bool"));
  170. CGF.Builder.CreateCondBr(SuspendRet, RealSuspendBlock, ReadyBlock);
  171. SuspendBlock = RealSuspendBlock;
  172. CGF.EmitBlock(RealSuspendBlock);
  173. }
  174. // Emit the suspend point.
  175. const bool IsFinalSuspend = (Kind == AwaitKind::Final);
  176. llvm::Function *CoroSuspend =
  177. CGF.CGM.getIntrinsic(llvm::Intrinsic::coro_suspend);
  178. auto *SuspendResult = Builder.CreateCall(
  179. CoroSuspend, {SaveCall, Builder.getInt1(IsFinalSuspend)});
  180. // Create a switch capturing three possible continuations.
  181. auto *Switch = Builder.CreateSwitch(SuspendResult, Coro.SuspendBB, 2);
  182. Switch->addCase(Builder.getInt8(0), ReadyBlock);
  183. Switch->addCase(Builder.getInt8(1), CleanupBlock);
  184. // Emit cleanup for this suspend point.
  185. CGF.EmitBlock(CleanupBlock);
  186. CGF.EmitBranchThroughCleanup(Coro.CleanupJD);
  187. // Emit await_resume expression.
  188. CGF.EmitBlock(ReadyBlock);
  189. return CGF.EmitAnyExpr(S.getResumeExpr(), aggSlot, ignoreResult);
  190. }
  191. RValue CodeGenFunction::EmitCoawaitExpr(const CoawaitExpr &E,
  192. AggValueSlot aggSlot,
  193. bool ignoreResult) {
  194. return emitSuspendExpression(*this, *CurCoro.Data, E,
  195. CurCoro.Data->CurrentAwaitKind, aggSlot,
  196. ignoreResult);
  197. }
  198. RValue CodeGenFunction::EmitCoyieldExpr(const CoyieldExpr &E,
  199. AggValueSlot aggSlot,
  200. bool ignoreResult) {
  201. return emitSuspendExpression(*this, *CurCoro.Data, E, AwaitKind::Yield,
  202. aggSlot, ignoreResult);
  203. }
  204. void CodeGenFunction::EmitCoreturnStmt(CoreturnStmt const &S) {
  205. ++CurCoro.Data->CoreturnCount;
  206. EmitStmt(S.getPromiseCall());
  207. EmitBranchThroughCleanup(CurCoro.Data->FinalJD);
  208. }
  209. // Hunts for the parameter reference in the parameter copy/move declaration.
  210. namespace {
  211. struct GetParamRef : public StmtVisitor<GetParamRef> {
  212. public:
  213. DeclRefExpr *Expr = nullptr;
  214. GetParamRef() {}
  215. void VisitDeclRefExpr(DeclRefExpr *E) {
  216. assert(Expr == nullptr && "multilple declref in param move");
  217. Expr = E;
  218. }
  219. void VisitStmt(Stmt *S) {
  220. for (auto *C : S->children()) {
  221. if (C)
  222. Visit(C);
  223. }
  224. }
  225. };
  226. }
  227. // This class replaces references to parameters to their copies by changing
  228. // the addresses in CGF.LocalDeclMap and restoring back the original values in
  229. // its destructor.
  230. namespace {
  231. struct ParamReferenceReplacerRAII {
  232. CodeGenFunction::DeclMapTy SavedLocals;
  233. CodeGenFunction::DeclMapTy& LocalDeclMap;
  234. ParamReferenceReplacerRAII(CodeGenFunction::DeclMapTy &LocalDeclMap)
  235. : LocalDeclMap(LocalDeclMap) {}
  236. void addCopy(DeclStmt const *PM) {
  237. // Figure out what param it refers to.
  238. assert(PM->isSingleDecl());
  239. VarDecl const*VD = static_cast<VarDecl const*>(PM->getSingleDecl());
  240. Expr const *InitExpr = VD->getInit();
  241. GetParamRef Visitor;
  242. Visitor.Visit(const_cast<Expr*>(InitExpr));
  243. assert(Visitor.Expr);
  244. auto *DREOrig = cast<DeclRefExpr>(Visitor.Expr);
  245. auto *PD = DREOrig->getDecl();
  246. auto it = LocalDeclMap.find(PD);
  247. assert(it != LocalDeclMap.end() && "parameter is not found");
  248. SavedLocals.insert({ PD, it->second });
  249. auto copyIt = LocalDeclMap.find(VD);
  250. assert(copyIt != LocalDeclMap.end() && "parameter copy is not found");
  251. it->second = copyIt->getSecond();
  252. }
  253. ~ParamReferenceReplacerRAII() {
  254. for (auto&& SavedLocal : SavedLocals) {
  255. LocalDeclMap.insert({SavedLocal.first, SavedLocal.second});
  256. }
  257. }
  258. };
  259. }
  260. // For WinEH exception representation backend needs to know what funclet coro.end
  261. // belongs to. That information is passed in a funclet bundle.
  262. static SmallVector<llvm::OperandBundleDef, 1>
  263. getBundlesForCoroEnd(CodeGenFunction &CGF) {
  264. SmallVector<llvm::OperandBundleDef, 1> BundleList;
  265. if (llvm::Instruction *EHPad = CGF.CurrentFuncletPad)
  266. BundleList.emplace_back("funclet", EHPad);
  267. return BundleList;
  268. }
  269. namespace {
  270. // We will insert coro.end to cut any of the destructors for objects that
  271. // do not need to be destroyed once the coroutine is resumed.
  272. // See llvm/docs/Coroutines.rst for more details about coro.end.
  273. struct CallCoroEnd final : public EHScopeStack::Cleanup {
  274. void Emit(CodeGenFunction &CGF, Flags flags) override {
  275. auto &CGM = CGF.CGM;
  276. auto *NullPtr = llvm::ConstantPointerNull::get(CGF.Int8PtrTy);
  277. llvm::Function *CoroEndFn = CGM.getIntrinsic(llvm::Intrinsic::coro_end);
  278. // See if we have a funclet bundle to associate coro.end with. (WinEH)
  279. auto Bundles = getBundlesForCoroEnd(CGF);
  280. auto *CoroEnd = CGF.Builder.CreateCall(
  281. CoroEndFn, {NullPtr, CGF.Builder.getTrue()}, Bundles);
  282. if (Bundles.empty()) {
  283. // Otherwise, (landingpad model), create a conditional branch that leads
  284. // either to a cleanup block or a block with EH resume instruction.
  285. auto *ResumeBB = CGF.getEHResumeBlock(/*cleanup=*/true);
  286. auto *CleanupContBB = CGF.createBasicBlock("cleanup.cont");
  287. CGF.Builder.CreateCondBr(CoroEnd, ResumeBB, CleanupContBB);
  288. CGF.EmitBlock(CleanupContBB);
  289. }
  290. }
  291. };
  292. }
  293. namespace {
  294. // Make sure to call coro.delete on scope exit.
  295. struct CallCoroDelete final : public EHScopeStack::Cleanup {
  296. Stmt *Deallocate;
  297. // Emit "if (coro.free(CoroId, CoroBegin)) Deallocate;"
  298. // Note: That deallocation will be emitted twice: once for a normal exit and
  299. // once for exceptional exit. This usage is safe because Deallocate does not
  300. // contain any declarations. The SubStmtBuilder::makeNewAndDeleteExpr()
  301. // builds a single call to a deallocation function which is safe to emit
  302. // multiple times.
  303. void Emit(CodeGenFunction &CGF, Flags) override {
  304. // Remember the current point, as we are going to emit deallocation code
  305. // first to get to coro.free instruction that is an argument to a delete
  306. // call.
  307. BasicBlock *SaveInsertBlock = CGF.Builder.GetInsertBlock();
  308. auto *FreeBB = CGF.createBasicBlock("coro.free");
  309. CGF.EmitBlock(FreeBB);
  310. CGF.EmitStmt(Deallocate);
  311. auto *AfterFreeBB = CGF.createBasicBlock("after.coro.free");
  312. CGF.EmitBlock(AfterFreeBB);
  313. // We should have captured coro.free from the emission of deallocate.
  314. auto *CoroFree = CGF.CurCoro.Data->LastCoroFree;
  315. if (!CoroFree) {
  316. CGF.CGM.Error(Deallocate->getLocStart(),
  317. "Deallocation expressoin does not refer to coro.free");
  318. return;
  319. }
  320. // Get back to the block we were originally and move coro.free there.
  321. auto *InsertPt = SaveInsertBlock->getTerminator();
  322. CoroFree->moveBefore(InsertPt);
  323. CGF.Builder.SetInsertPoint(InsertPt);
  324. // Add if (auto *mem = coro.free) Deallocate;
  325. auto *NullPtr = llvm::ConstantPointerNull::get(CGF.Int8PtrTy);
  326. auto *Cond = CGF.Builder.CreateICmpNE(CoroFree, NullPtr);
  327. CGF.Builder.CreateCondBr(Cond, FreeBB, AfterFreeBB);
  328. // No longer need old terminator.
  329. InsertPt->eraseFromParent();
  330. CGF.Builder.SetInsertPoint(AfterFreeBB);
  331. }
  332. explicit CallCoroDelete(Stmt *DeallocStmt) : Deallocate(DeallocStmt) {}
  333. };
  334. }
  335. namespace {
  336. struct GetReturnObjectManager {
  337. CodeGenFunction &CGF;
  338. CGBuilderTy &Builder;
  339. const CoroutineBodyStmt &S;
  340. Address GroActiveFlag;
  341. CodeGenFunction::AutoVarEmission GroEmission;
  342. GetReturnObjectManager(CodeGenFunction &CGF, const CoroutineBodyStmt &S)
  343. : CGF(CGF), Builder(CGF.Builder), S(S), GroActiveFlag(Address::invalid()),
  344. GroEmission(CodeGenFunction::AutoVarEmission::invalid()) {}
  345. // The gro variable has to outlive coroutine frame and coroutine promise, but,
  346. // it can only be initialized after coroutine promise was created, thus, we
  347. // split its emission in two parts. EmitGroAlloca emits an alloca and sets up
  348. // cleanups. Later when coroutine promise is available we initialize the gro
  349. // and sets the flag that the cleanup is now active.
  350. void EmitGroAlloca() {
  351. auto *GroDeclStmt = dyn_cast<DeclStmt>(S.getResultDecl());
  352. if (!GroDeclStmt) {
  353. // If get_return_object returns void, no need to do an alloca.
  354. return;
  355. }
  356. auto *GroVarDecl = cast<VarDecl>(GroDeclStmt->getSingleDecl());
  357. // Set GRO flag that it is not initialized yet
  358. GroActiveFlag =
  359. CGF.CreateTempAlloca(Builder.getInt1Ty(), CharUnits::One(), "gro.active");
  360. Builder.CreateStore(Builder.getFalse(), GroActiveFlag);
  361. GroEmission = CGF.EmitAutoVarAlloca(*GroVarDecl);
  362. // Remember the top of EHStack before emitting the cleanup.
  363. auto old_top = CGF.EHStack.stable_begin();
  364. CGF.EmitAutoVarCleanups(GroEmission);
  365. auto top = CGF.EHStack.stable_begin();
  366. // Make the cleanup conditional on gro.active
  367. for (auto b = CGF.EHStack.find(top), e = CGF.EHStack.find(old_top);
  368. b != e; b++) {
  369. if (auto *Cleanup = dyn_cast<EHCleanupScope>(&*b)) {
  370. assert(!Cleanup->hasActiveFlag() && "cleanup already has active flag?");
  371. Cleanup->setActiveFlag(GroActiveFlag);
  372. Cleanup->setTestFlagInEHCleanup();
  373. Cleanup->setTestFlagInNormalCleanup();
  374. }
  375. }
  376. }
  377. void EmitGroInit() {
  378. if (!GroActiveFlag.isValid()) {
  379. // No Gro variable was allocated. Simply emit the call to
  380. // get_return_object.
  381. CGF.EmitStmt(S.getResultDecl());
  382. return;
  383. }
  384. CGF.EmitAutoVarInit(GroEmission);
  385. Builder.CreateStore(Builder.getTrue(), GroActiveFlag);
  386. }
  387. };
  388. }
  389. static void emitBodyAndFallthrough(CodeGenFunction &CGF,
  390. const CoroutineBodyStmt &S, Stmt *Body) {
  391. CGF.EmitStmt(Body);
  392. const bool CanFallthrough = CGF.Builder.GetInsertBlock();
  393. if (CanFallthrough)
  394. if (Stmt *OnFallthrough = S.getFallthroughHandler())
  395. CGF.EmitStmt(OnFallthrough);
  396. }
  397. void CodeGenFunction::EmitCoroutineBody(const CoroutineBodyStmt &S) {
  398. auto *NullPtr = llvm::ConstantPointerNull::get(Builder.getInt8PtrTy());
  399. auto &TI = CGM.getContext().getTargetInfo();
  400. unsigned NewAlign = TI.getNewAlign() / TI.getCharWidth();
  401. auto *EntryBB = Builder.GetInsertBlock();
  402. auto *AllocBB = createBasicBlock("coro.alloc");
  403. auto *InitBB = createBasicBlock("coro.init");
  404. auto *FinalBB = createBasicBlock("coro.final");
  405. auto *RetBB = createBasicBlock("coro.ret");
  406. auto *CoroId = Builder.CreateCall(
  407. CGM.getIntrinsic(llvm::Intrinsic::coro_id),
  408. {Builder.getInt32(NewAlign), NullPtr, NullPtr, NullPtr});
  409. createCoroData(*this, CurCoro, CoroId);
  410. CurCoro.Data->SuspendBB = RetBB;
  411. // Backend is allowed to elide memory allocations, to help it, emit
  412. // auto mem = coro.alloc() ? 0 : ... allocation code ...;
  413. auto *CoroAlloc = Builder.CreateCall(
  414. CGM.getIntrinsic(llvm::Intrinsic::coro_alloc), {CoroId});
  415. Builder.CreateCondBr(CoroAlloc, AllocBB, InitBB);
  416. EmitBlock(AllocBB);
  417. auto *AllocateCall = EmitScalarExpr(S.getAllocate());
  418. auto *AllocOrInvokeContBB = Builder.GetInsertBlock();
  419. // Handle allocation failure if 'ReturnStmtOnAllocFailure' was provided.
  420. if (auto *RetOnAllocFailure = S.getReturnStmtOnAllocFailure()) {
  421. auto *RetOnFailureBB = createBasicBlock("coro.ret.on.failure");
  422. // See if allocation was successful.
  423. auto *NullPtr = llvm::ConstantPointerNull::get(Int8PtrTy);
  424. auto *Cond = Builder.CreateICmpNE(AllocateCall, NullPtr);
  425. Builder.CreateCondBr(Cond, InitBB, RetOnFailureBB);
  426. // If not, return OnAllocFailure object.
  427. EmitBlock(RetOnFailureBB);
  428. EmitStmt(RetOnAllocFailure);
  429. }
  430. else {
  431. Builder.CreateBr(InitBB);
  432. }
  433. EmitBlock(InitBB);
  434. // Pass the result of the allocation to coro.begin.
  435. auto *Phi = Builder.CreatePHI(VoidPtrTy, 2);
  436. Phi->addIncoming(NullPtr, EntryBB);
  437. Phi->addIncoming(AllocateCall, AllocOrInvokeContBB);
  438. auto *CoroBegin = Builder.CreateCall(
  439. CGM.getIntrinsic(llvm::Intrinsic::coro_begin), {CoroId, Phi});
  440. CurCoro.Data->CoroBegin = CoroBegin;
  441. GetReturnObjectManager GroManager(*this, S);
  442. GroManager.EmitGroAlloca();
  443. CurCoro.Data->CleanupJD = getJumpDestInCurrentScope(RetBB);
  444. {
  445. ParamReferenceReplacerRAII ParamReplacer(LocalDeclMap);
  446. CodeGenFunction::RunCleanupsScope ResumeScope(*this);
  447. EHStack.pushCleanup<CallCoroDelete>(NormalAndEHCleanup, S.getDeallocate());
  448. // Create parameter copies. We do it before creating a promise, since an
  449. // evolution of coroutine TS may allow promise constructor to observe
  450. // parameter copies.
  451. for (auto *PM : S.getParamMoves()) {
  452. EmitStmt(PM);
  453. ParamReplacer.addCopy(cast<DeclStmt>(PM));
  454. // TODO: if(CoroParam(...)) need to surround ctor and dtor
  455. // for the copy, so that llvm can elide it if the copy is
  456. // not needed.
  457. }
  458. EmitStmt(S.getPromiseDeclStmt());
  459. Address PromiseAddr = GetAddrOfLocalVar(S.getPromiseDecl());
  460. auto *PromiseAddrVoidPtr =
  461. new llvm::BitCastInst(PromiseAddr.getPointer(), VoidPtrTy, "", CoroId);
  462. // Update CoroId to refer to the promise. We could not do it earlier because
  463. // promise local variable was not emitted yet.
  464. CoroId->setArgOperand(1, PromiseAddrVoidPtr);
  465. // Now we have the promise, initialize the GRO
  466. GroManager.EmitGroInit();
  467. EHStack.pushCleanup<CallCoroEnd>(EHCleanup);
  468. CurCoro.Data->CurrentAwaitKind = AwaitKind::Init;
  469. EmitStmt(S.getInitSuspendStmt());
  470. CurCoro.Data->FinalJD = getJumpDestInCurrentScope(FinalBB);
  471. CurCoro.Data->CurrentAwaitKind = AwaitKind::Normal;
  472. if (auto *OnException = S.getExceptionHandler()) {
  473. auto Loc = S.getLocStart();
  474. CXXCatchStmt Catch(Loc, /*exDecl=*/nullptr, OnException);
  475. auto *TryStmt = CXXTryStmt::Create(getContext(), Loc, S.getBody(), &Catch);
  476. EnterCXXTryStmt(*TryStmt);
  477. emitBodyAndFallthrough(*this, S, TryStmt->getTryBlock());
  478. ExitCXXTryStmt(*TryStmt);
  479. }
  480. else {
  481. emitBodyAndFallthrough(*this, S, S.getBody());
  482. }
  483. // See if we need to generate final suspend.
  484. const bool CanFallthrough = Builder.GetInsertBlock();
  485. const bool HasCoreturns = CurCoro.Data->CoreturnCount > 0;
  486. if (CanFallthrough || HasCoreturns) {
  487. EmitBlock(FinalBB);
  488. CurCoro.Data->CurrentAwaitKind = AwaitKind::Final;
  489. EmitStmt(S.getFinalSuspendStmt());
  490. }
  491. else {
  492. // We don't need FinalBB. Emit it to make sure the block is deleted.
  493. EmitBlock(FinalBB, /*IsFinished=*/true);
  494. }
  495. }
  496. EmitBlock(RetBB);
  497. // Emit coro.end before getReturnStmt (and parameter destructors), since
  498. // resume and destroy parts of the coroutine should not include them.
  499. llvm::Function *CoroEnd = CGM.getIntrinsic(llvm::Intrinsic::coro_end);
  500. Builder.CreateCall(CoroEnd, {NullPtr, Builder.getFalse()});
  501. if (Stmt *Ret = S.getReturnStmt())
  502. EmitStmt(Ret);
  503. }
  504. // Emit coroutine intrinsic and patch up arguments of the token type.
  505. RValue CodeGenFunction::EmitCoroutineIntrinsic(const CallExpr *E,
  506. unsigned int IID) {
  507. SmallVector<llvm::Value *, 8> Args;
  508. switch (IID) {
  509. default:
  510. break;
  511. // The coro.frame builtin is replaced with an SSA value of the coro.begin
  512. // intrinsic.
  513. case llvm::Intrinsic::coro_frame: {
  514. if (CurCoro.Data && CurCoro.Data->CoroBegin) {
  515. return RValue::get(CurCoro.Data->CoroBegin);
  516. }
  517. CGM.Error(E->getLocStart(), "this builtin expect that __builtin_coro_begin "
  518. "has been used earlier in this function");
  519. auto NullPtr = llvm::ConstantPointerNull::get(Builder.getInt8PtrTy());
  520. return RValue::get(NullPtr);
  521. }
  522. // The following three intrinsics take a token parameter referring to a token
  523. // returned by earlier call to @llvm.coro.id. Since we cannot represent it in
  524. // builtins, we patch it up here.
  525. case llvm::Intrinsic::coro_alloc:
  526. case llvm::Intrinsic::coro_begin:
  527. case llvm::Intrinsic::coro_free: {
  528. if (CurCoro.Data && CurCoro.Data->CoroId) {
  529. Args.push_back(CurCoro.Data->CoroId);
  530. break;
  531. }
  532. CGM.Error(E->getLocStart(), "this builtin expect that __builtin_coro_id has"
  533. " been used earlier in this function");
  534. // Fallthrough to the next case to add TokenNone as the first argument.
  535. }
  536. // @llvm.coro.suspend takes a token parameter. Add token 'none' as the first
  537. // argument.
  538. case llvm::Intrinsic::coro_suspend:
  539. Args.push_back(llvm::ConstantTokenNone::get(getLLVMContext()));
  540. break;
  541. }
  542. for (auto &Arg : E->arguments())
  543. Args.push_back(EmitScalarExpr(Arg));
  544. llvm::Value *F = CGM.getIntrinsic(IID);
  545. llvm::CallInst *Call = Builder.CreateCall(F, Args);
  546. // Note: The following code is to enable to emit coro.id and coro.begin by
  547. // hand to experiment with coroutines in C.
  548. // If we see @llvm.coro.id remember it in the CoroData. We will update
  549. // coro.alloc, coro.begin and coro.free intrinsics to refer to it.
  550. if (IID == llvm::Intrinsic::coro_id) {
  551. createCoroData(*this, CurCoro, Call, E);
  552. }
  553. else if (IID == llvm::Intrinsic::coro_begin) {
  554. if (CurCoro.Data)
  555. CurCoro.Data->CoroBegin = Call;
  556. }
  557. else if (IID == llvm::Intrinsic::coro_free) {
  558. // Remember the last coro_free as we need it to build the conditional
  559. // deletion of the coroutine frame.
  560. if (CurCoro.Data)
  561. CurCoro.Data->LastCoroFree = Call;
  562. }
  563. return RValue::get(Call);
  564. }