CGStmt.cpp 60 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676
  1. //===--- CGStmt.cpp - Emit LLVM Code from Statements ----------------------===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This contains code to emit Stmt nodes as LLVM code.
  11. //
  12. //===----------------------------------------------------------------------===//
  13. #include "CGDebugInfo.h"
  14. #include "CodeGenModule.h"
  15. #include "CodeGenFunction.h"
  16. #include "TargetInfo.h"
  17. #include "clang/AST/StmtVisitor.h"
  18. #include "clang/Basic/PrettyStackTrace.h"
  19. #include "clang/Basic/TargetInfo.h"
  20. #include "llvm/ADT/StringExtras.h"
  21. #include "llvm/InlineAsm.h"
  22. #include "llvm/Intrinsics.h"
  23. #include "llvm/Target/TargetData.h"
  24. using namespace clang;
  25. using namespace CodeGen;
  26. //===----------------------------------------------------------------------===//
  27. // Statement Emission
  28. //===----------------------------------------------------------------------===//
  29. void CodeGenFunction::EmitStopPoint(const Stmt *S) {
  30. if (CGDebugInfo *DI = getDebugInfo()) {
  31. SourceLocation Loc;
  32. if (isa<DeclStmt>(S))
  33. Loc = S->getLocEnd();
  34. else
  35. Loc = S->getLocStart();
  36. DI->EmitLocation(Builder, Loc);
  37. }
  38. }
  39. void CodeGenFunction::EmitStmt(const Stmt *S) {
  40. assert(S && "Null statement?");
  41. // These statements have their own debug info handling.
  42. if (EmitSimpleStmt(S))
  43. return;
  44. // Check if we are generating unreachable code.
  45. if (!HaveInsertPoint()) {
  46. // If so, and the statement doesn't contain a label, then we do not need to
  47. // generate actual code. This is safe because (1) the current point is
  48. // unreachable, so we don't need to execute the code, and (2) we've already
  49. // handled the statements which update internal data structures (like the
  50. // local variable map) which could be used by subsequent statements.
  51. if (!ContainsLabel(S)) {
  52. // Verify that any decl statements were handled as simple, they may be in
  53. // scope of subsequent reachable statements.
  54. assert(!isa<DeclStmt>(*S) && "Unexpected DeclStmt!");
  55. return;
  56. }
  57. // Otherwise, make a new block to hold the code.
  58. EnsureInsertPoint();
  59. }
  60. // Generate a stoppoint if we are emitting debug info.
  61. EmitStopPoint(S);
  62. switch (S->getStmtClass()) {
  63. case Stmt::NoStmtClass:
  64. case Stmt::CXXCatchStmtClass:
  65. case Stmt::SEHExceptStmtClass:
  66. case Stmt::SEHFinallyStmtClass:
  67. case Stmt::MSDependentExistsStmtClass:
  68. llvm_unreachable("invalid statement class to emit generically");
  69. case Stmt::NullStmtClass:
  70. case Stmt::CompoundStmtClass:
  71. case Stmt::DeclStmtClass:
  72. case Stmt::LabelStmtClass:
  73. case Stmt::GotoStmtClass:
  74. case Stmt::BreakStmtClass:
  75. case Stmt::ContinueStmtClass:
  76. case Stmt::DefaultStmtClass:
  77. case Stmt::CaseStmtClass:
  78. llvm_unreachable("should have emitted these statements as simple");
  79. #define STMT(Type, Base)
  80. #define ABSTRACT_STMT(Op)
  81. #define EXPR(Type, Base) \
  82. case Stmt::Type##Class:
  83. #include "clang/AST/StmtNodes.inc"
  84. {
  85. // Remember the block we came in on.
  86. llvm::BasicBlock *incoming = Builder.GetInsertBlock();
  87. assert(incoming && "expression emission must have an insertion point");
  88. EmitIgnoredExpr(cast<Expr>(S));
  89. llvm::BasicBlock *outgoing = Builder.GetInsertBlock();
  90. assert(outgoing && "expression emission cleared block!");
  91. // The expression emitters assume (reasonably!) that the insertion
  92. // point is always set. To maintain that, the call-emission code
  93. // for noreturn functions has to enter a new block with no
  94. // predecessors. We want to kill that block and mark the current
  95. // insertion point unreachable in the common case of a call like
  96. // "exit();". Since expression emission doesn't otherwise create
  97. // blocks with no predecessors, we can just test for that.
  98. // However, we must be careful not to do this to our incoming
  99. // block, because *statement* emission does sometimes create
  100. // reachable blocks which will have no predecessors until later in
  101. // the function. This occurs with, e.g., labels that are not
  102. // reachable by fallthrough.
  103. if (incoming != outgoing && outgoing->use_empty()) {
  104. outgoing->eraseFromParent();
  105. Builder.ClearInsertionPoint();
  106. }
  107. break;
  108. }
  109. case Stmt::IndirectGotoStmtClass:
  110. EmitIndirectGotoStmt(cast<IndirectGotoStmt>(*S)); break;
  111. case Stmt::IfStmtClass: EmitIfStmt(cast<IfStmt>(*S)); break;
  112. case Stmt::WhileStmtClass: EmitWhileStmt(cast<WhileStmt>(*S)); break;
  113. case Stmt::DoStmtClass: EmitDoStmt(cast<DoStmt>(*S)); break;
  114. case Stmt::ForStmtClass: EmitForStmt(cast<ForStmt>(*S)); break;
  115. case Stmt::ReturnStmtClass: EmitReturnStmt(cast<ReturnStmt>(*S)); break;
  116. case Stmt::SwitchStmtClass: EmitSwitchStmt(cast<SwitchStmt>(*S)); break;
  117. case Stmt::AsmStmtClass: EmitAsmStmt(cast<AsmStmt>(*S)); break;
  118. case Stmt::ObjCAtTryStmtClass:
  119. EmitObjCAtTryStmt(cast<ObjCAtTryStmt>(*S));
  120. break;
  121. case Stmt::ObjCAtCatchStmtClass:
  122. llvm_unreachable(
  123. "@catch statements should be handled by EmitObjCAtTryStmt");
  124. case Stmt::ObjCAtFinallyStmtClass:
  125. llvm_unreachable(
  126. "@finally statements should be handled by EmitObjCAtTryStmt");
  127. case Stmt::ObjCAtThrowStmtClass:
  128. EmitObjCAtThrowStmt(cast<ObjCAtThrowStmt>(*S));
  129. break;
  130. case Stmt::ObjCAtSynchronizedStmtClass:
  131. EmitObjCAtSynchronizedStmt(cast<ObjCAtSynchronizedStmt>(*S));
  132. break;
  133. case Stmt::ObjCForCollectionStmtClass:
  134. EmitObjCForCollectionStmt(cast<ObjCForCollectionStmt>(*S));
  135. break;
  136. case Stmt::ObjCAutoreleasePoolStmtClass:
  137. EmitObjCAutoreleasePoolStmt(cast<ObjCAutoreleasePoolStmt>(*S));
  138. break;
  139. case Stmt::CXXTryStmtClass:
  140. EmitCXXTryStmt(cast<CXXTryStmt>(*S));
  141. break;
  142. case Stmt::CXXForRangeStmtClass:
  143. EmitCXXForRangeStmt(cast<CXXForRangeStmt>(*S));
  144. case Stmt::SEHTryStmtClass:
  145. // FIXME Not yet implemented
  146. break;
  147. }
  148. }
  149. bool CodeGenFunction::EmitSimpleStmt(const Stmt *S) {
  150. switch (S->getStmtClass()) {
  151. default: return false;
  152. case Stmt::NullStmtClass: break;
  153. case Stmt::CompoundStmtClass: EmitCompoundStmt(cast<CompoundStmt>(*S)); break;
  154. case Stmt::DeclStmtClass: EmitDeclStmt(cast<DeclStmt>(*S)); break;
  155. case Stmt::LabelStmtClass: EmitLabelStmt(cast<LabelStmt>(*S)); break;
  156. case Stmt::GotoStmtClass: EmitGotoStmt(cast<GotoStmt>(*S)); break;
  157. case Stmt::BreakStmtClass: EmitBreakStmt(cast<BreakStmt>(*S)); break;
  158. case Stmt::ContinueStmtClass: EmitContinueStmt(cast<ContinueStmt>(*S)); break;
  159. case Stmt::DefaultStmtClass: EmitDefaultStmt(cast<DefaultStmt>(*S)); break;
  160. case Stmt::CaseStmtClass: EmitCaseStmt(cast<CaseStmt>(*S)); break;
  161. }
  162. return true;
  163. }
  164. /// EmitCompoundStmt - Emit a compound statement {..} node. If GetLast is true,
  165. /// this captures the expression result of the last sub-statement and returns it
  166. /// (for use by the statement expression extension).
  167. RValue CodeGenFunction::EmitCompoundStmt(const CompoundStmt &S, bool GetLast,
  168. AggValueSlot AggSlot) {
  169. PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),S.getLBracLoc(),
  170. "LLVM IR generation of compound statement ('{}')");
  171. CGDebugInfo *DI = getDebugInfo();
  172. if (DI)
  173. DI->EmitLexicalBlockStart(Builder, S.getLBracLoc());
  174. // Keep track of the current cleanup stack depth.
  175. RunCleanupsScope Scope(*this);
  176. for (CompoundStmt::const_body_iterator I = S.body_begin(),
  177. E = S.body_end()-GetLast; I != E; ++I)
  178. EmitStmt(*I);
  179. if (DI)
  180. DI->EmitLexicalBlockEnd(Builder, S.getRBracLoc());
  181. RValue RV;
  182. if (!GetLast)
  183. RV = RValue::get(0);
  184. else {
  185. // We have to special case labels here. They are statements, but when put
  186. // at the end of a statement expression, they yield the value of their
  187. // subexpression. Handle this by walking through all labels we encounter,
  188. // emitting them before we evaluate the subexpr.
  189. const Stmt *LastStmt = S.body_back();
  190. while (const LabelStmt *LS = dyn_cast<LabelStmt>(LastStmt)) {
  191. EmitLabel(LS->getDecl());
  192. LastStmt = LS->getSubStmt();
  193. }
  194. EnsureInsertPoint();
  195. RV = EmitAnyExpr(cast<Expr>(LastStmt), AggSlot);
  196. }
  197. return RV;
  198. }
  199. void CodeGenFunction::SimplifyForwardingBlocks(llvm::BasicBlock *BB) {
  200. llvm::BranchInst *BI = dyn_cast<llvm::BranchInst>(BB->getTerminator());
  201. // If there is a cleanup stack, then we it isn't worth trying to
  202. // simplify this block (we would need to remove it from the scope map
  203. // and cleanup entry).
  204. if (!EHStack.empty())
  205. return;
  206. // Can only simplify direct branches.
  207. if (!BI || !BI->isUnconditional())
  208. return;
  209. BB->replaceAllUsesWith(BI->getSuccessor(0));
  210. BI->eraseFromParent();
  211. BB->eraseFromParent();
  212. }
  213. void CodeGenFunction::EmitBlock(llvm::BasicBlock *BB, bool IsFinished) {
  214. llvm::BasicBlock *CurBB = Builder.GetInsertBlock();
  215. // Fall out of the current block (if necessary).
  216. EmitBranch(BB);
  217. if (IsFinished && BB->use_empty()) {
  218. delete BB;
  219. return;
  220. }
  221. // Place the block after the current block, if possible, or else at
  222. // the end of the function.
  223. if (CurBB && CurBB->getParent())
  224. CurFn->getBasicBlockList().insertAfter(CurBB, BB);
  225. else
  226. CurFn->getBasicBlockList().push_back(BB);
  227. Builder.SetInsertPoint(BB);
  228. }
  229. void CodeGenFunction::EmitBranch(llvm::BasicBlock *Target) {
  230. // Emit a branch from the current block to the target one if this
  231. // was a real block. If this was just a fall-through block after a
  232. // terminator, don't emit it.
  233. llvm::BasicBlock *CurBB = Builder.GetInsertBlock();
  234. if (!CurBB || CurBB->getTerminator()) {
  235. // If there is no insert point or the previous block is already
  236. // terminated, don't touch it.
  237. } else {
  238. // Otherwise, create a fall-through branch.
  239. Builder.CreateBr(Target);
  240. }
  241. Builder.ClearInsertionPoint();
  242. }
  243. void CodeGenFunction::EmitBlockAfterUses(llvm::BasicBlock *block) {
  244. bool inserted = false;
  245. for (llvm::BasicBlock::use_iterator
  246. i = block->use_begin(), e = block->use_end(); i != e; ++i) {
  247. if (llvm::Instruction *insn = dyn_cast<llvm::Instruction>(*i)) {
  248. CurFn->getBasicBlockList().insertAfter(insn->getParent(), block);
  249. inserted = true;
  250. break;
  251. }
  252. }
  253. if (!inserted)
  254. CurFn->getBasicBlockList().push_back(block);
  255. Builder.SetInsertPoint(block);
  256. }
  257. CodeGenFunction::JumpDest
  258. CodeGenFunction::getJumpDestForLabel(const LabelDecl *D) {
  259. JumpDest &Dest = LabelMap[D];
  260. if (Dest.isValid()) return Dest;
  261. // Create, but don't insert, the new block.
  262. Dest = JumpDest(createBasicBlock(D->getName()),
  263. EHScopeStack::stable_iterator::invalid(),
  264. NextCleanupDestIndex++);
  265. return Dest;
  266. }
  267. void CodeGenFunction::EmitLabel(const LabelDecl *D) {
  268. JumpDest &Dest = LabelMap[D];
  269. // If we didn't need a forward reference to this label, just go
  270. // ahead and create a destination at the current scope.
  271. if (!Dest.isValid()) {
  272. Dest = getJumpDestInCurrentScope(D->getName());
  273. // Otherwise, we need to give this label a target depth and remove
  274. // it from the branch-fixups list.
  275. } else {
  276. assert(!Dest.getScopeDepth().isValid() && "already emitted label!");
  277. Dest = JumpDest(Dest.getBlock(),
  278. EHStack.stable_begin(),
  279. Dest.getDestIndex());
  280. ResolveBranchFixups(Dest.getBlock());
  281. }
  282. EmitBlock(Dest.getBlock());
  283. }
  284. void CodeGenFunction::EmitLabelStmt(const LabelStmt &S) {
  285. EmitLabel(S.getDecl());
  286. EmitStmt(S.getSubStmt());
  287. }
  288. void CodeGenFunction::EmitGotoStmt(const GotoStmt &S) {
  289. // If this code is reachable then emit a stop point (if generating
  290. // debug info). We have to do this ourselves because we are on the
  291. // "simple" statement path.
  292. if (HaveInsertPoint())
  293. EmitStopPoint(&S);
  294. EmitBranchThroughCleanup(getJumpDestForLabel(S.getLabel()));
  295. }
  296. void CodeGenFunction::EmitIndirectGotoStmt(const IndirectGotoStmt &S) {
  297. if (const LabelDecl *Target = S.getConstantTarget()) {
  298. EmitBranchThroughCleanup(getJumpDestForLabel(Target));
  299. return;
  300. }
  301. // Ensure that we have an i8* for our PHI node.
  302. llvm::Value *V = Builder.CreateBitCast(EmitScalarExpr(S.getTarget()),
  303. Int8PtrTy, "addr");
  304. llvm::BasicBlock *CurBB = Builder.GetInsertBlock();
  305. // Get the basic block for the indirect goto.
  306. llvm::BasicBlock *IndGotoBB = GetIndirectGotoBlock();
  307. // The first instruction in the block has to be the PHI for the switch dest,
  308. // add an entry for this branch.
  309. cast<llvm::PHINode>(IndGotoBB->begin())->addIncoming(V, CurBB);
  310. EmitBranch(IndGotoBB);
  311. }
  312. void CodeGenFunction::EmitIfStmt(const IfStmt &S) {
  313. // C99 6.8.4.1: The first substatement is executed if the expression compares
  314. // unequal to 0. The condition must be a scalar type.
  315. RunCleanupsScope ConditionScope(*this);
  316. if (S.getConditionVariable())
  317. EmitAutoVarDecl(*S.getConditionVariable());
  318. // If the condition constant folds and can be elided, try to avoid emitting
  319. // the condition and the dead arm of the if/else.
  320. bool CondConstant;
  321. if (ConstantFoldsToSimpleInteger(S.getCond(), CondConstant)) {
  322. // Figure out which block (then or else) is executed.
  323. const Stmt *Executed = S.getThen();
  324. const Stmt *Skipped = S.getElse();
  325. if (!CondConstant) // Condition false?
  326. std::swap(Executed, Skipped);
  327. // If the skipped block has no labels in it, just emit the executed block.
  328. // This avoids emitting dead code and simplifies the CFG substantially.
  329. if (!ContainsLabel(Skipped)) {
  330. if (Executed) {
  331. RunCleanupsScope ExecutedScope(*this);
  332. EmitStmt(Executed);
  333. }
  334. return;
  335. }
  336. }
  337. // Otherwise, the condition did not fold, or we couldn't elide it. Just emit
  338. // the conditional branch.
  339. llvm::BasicBlock *ThenBlock = createBasicBlock("if.then");
  340. llvm::BasicBlock *ContBlock = createBasicBlock("if.end");
  341. llvm::BasicBlock *ElseBlock = ContBlock;
  342. if (S.getElse())
  343. ElseBlock = createBasicBlock("if.else");
  344. EmitBranchOnBoolExpr(S.getCond(), ThenBlock, ElseBlock);
  345. // Emit the 'then' code.
  346. EmitBlock(ThenBlock);
  347. {
  348. RunCleanupsScope ThenScope(*this);
  349. EmitStmt(S.getThen());
  350. }
  351. EmitBranch(ContBlock);
  352. // Emit the 'else' code if present.
  353. if (const Stmt *Else = S.getElse()) {
  354. // There is no need to emit line number for unconditional branch.
  355. if (getDebugInfo())
  356. Builder.SetCurrentDebugLocation(llvm::DebugLoc());
  357. EmitBlock(ElseBlock);
  358. {
  359. RunCleanupsScope ElseScope(*this);
  360. EmitStmt(Else);
  361. }
  362. // There is no need to emit line number for unconditional branch.
  363. if (getDebugInfo())
  364. Builder.SetCurrentDebugLocation(llvm::DebugLoc());
  365. EmitBranch(ContBlock);
  366. }
  367. // Emit the continuation block for code after the if.
  368. EmitBlock(ContBlock, true);
  369. }
  370. void CodeGenFunction::EmitWhileStmt(const WhileStmt &S) {
  371. // Emit the header for the loop, which will also become
  372. // the continue target.
  373. JumpDest LoopHeader = getJumpDestInCurrentScope("while.cond");
  374. EmitBlock(LoopHeader.getBlock());
  375. // Create an exit block for when the condition fails, which will
  376. // also become the break target.
  377. JumpDest LoopExit = getJumpDestInCurrentScope("while.end");
  378. // Store the blocks to use for break and continue.
  379. BreakContinueStack.push_back(BreakContinue(LoopExit, LoopHeader));
  380. // C++ [stmt.while]p2:
  381. // When the condition of a while statement is a declaration, the
  382. // scope of the variable that is declared extends from its point
  383. // of declaration (3.3.2) to the end of the while statement.
  384. // [...]
  385. // The object created in a condition is destroyed and created
  386. // with each iteration of the loop.
  387. RunCleanupsScope ConditionScope(*this);
  388. if (S.getConditionVariable())
  389. EmitAutoVarDecl(*S.getConditionVariable());
  390. // Evaluate the conditional in the while header. C99 6.8.5.1: The
  391. // evaluation of the controlling expression takes place before each
  392. // execution of the loop body.
  393. llvm::Value *BoolCondVal = EvaluateExprAsBool(S.getCond());
  394. // while(1) is common, avoid extra exit blocks. Be sure
  395. // to correctly handle break/continue though.
  396. bool EmitBoolCondBranch = true;
  397. if (llvm::ConstantInt *C = dyn_cast<llvm::ConstantInt>(BoolCondVal))
  398. if (C->isOne())
  399. EmitBoolCondBranch = false;
  400. // As long as the condition is true, go to the loop body.
  401. llvm::BasicBlock *LoopBody = createBasicBlock("while.body");
  402. if (EmitBoolCondBranch) {
  403. llvm::BasicBlock *ExitBlock = LoopExit.getBlock();
  404. if (ConditionScope.requiresCleanups())
  405. ExitBlock = createBasicBlock("while.exit");
  406. Builder.CreateCondBr(BoolCondVal, LoopBody, ExitBlock);
  407. if (ExitBlock != LoopExit.getBlock()) {
  408. EmitBlock(ExitBlock);
  409. EmitBranchThroughCleanup(LoopExit);
  410. }
  411. }
  412. // Emit the loop body. We have to emit this in a cleanup scope
  413. // because it might be a singleton DeclStmt.
  414. {
  415. RunCleanupsScope BodyScope(*this);
  416. EmitBlock(LoopBody);
  417. EmitStmt(S.getBody());
  418. }
  419. BreakContinueStack.pop_back();
  420. // Immediately force cleanup.
  421. ConditionScope.ForceCleanup();
  422. // Branch to the loop header again.
  423. EmitBranch(LoopHeader.getBlock());
  424. // Emit the exit block.
  425. EmitBlock(LoopExit.getBlock(), true);
  426. // The LoopHeader typically is just a branch if we skipped emitting
  427. // a branch, try to erase it.
  428. if (!EmitBoolCondBranch)
  429. SimplifyForwardingBlocks(LoopHeader.getBlock());
  430. }
  431. void CodeGenFunction::EmitDoStmt(const DoStmt &S) {
  432. JumpDest LoopExit = getJumpDestInCurrentScope("do.end");
  433. JumpDest LoopCond = getJumpDestInCurrentScope("do.cond");
  434. // Store the blocks to use for break and continue.
  435. BreakContinueStack.push_back(BreakContinue(LoopExit, LoopCond));
  436. // Emit the body of the loop.
  437. llvm::BasicBlock *LoopBody = createBasicBlock("do.body");
  438. EmitBlock(LoopBody);
  439. {
  440. RunCleanupsScope BodyScope(*this);
  441. EmitStmt(S.getBody());
  442. }
  443. BreakContinueStack.pop_back();
  444. EmitBlock(LoopCond.getBlock());
  445. // C99 6.8.5.2: "The evaluation of the controlling expression takes place
  446. // after each execution of the loop body."
  447. // Evaluate the conditional in the while header.
  448. // C99 6.8.5p2/p4: The first substatement is executed if the expression
  449. // compares unequal to 0. The condition must be a scalar type.
  450. llvm::Value *BoolCondVal = EvaluateExprAsBool(S.getCond());
  451. // "do {} while (0)" is common in macros, avoid extra blocks. Be sure
  452. // to correctly handle break/continue though.
  453. bool EmitBoolCondBranch = true;
  454. if (llvm::ConstantInt *C = dyn_cast<llvm::ConstantInt>(BoolCondVal))
  455. if (C->isZero())
  456. EmitBoolCondBranch = false;
  457. // As long as the condition is true, iterate the loop.
  458. if (EmitBoolCondBranch)
  459. Builder.CreateCondBr(BoolCondVal, LoopBody, LoopExit.getBlock());
  460. // Emit the exit block.
  461. EmitBlock(LoopExit.getBlock());
  462. // The DoCond block typically is just a branch if we skipped
  463. // emitting a branch, try to erase it.
  464. if (!EmitBoolCondBranch)
  465. SimplifyForwardingBlocks(LoopCond.getBlock());
  466. }
  467. void CodeGenFunction::EmitForStmt(const ForStmt &S) {
  468. JumpDest LoopExit = getJumpDestInCurrentScope("for.end");
  469. RunCleanupsScope ForScope(*this);
  470. CGDebugInfo *DI = getDebugInfo();
  471. if (DI)
  472. DI->EmitLexicalBlockStart(Builder, S.getSourceRange().getBegin());
  473. // Evaluate the first part before the loop.
  474. if (S.getInit())
  475. EmitStmt(S.getInit());
  476. // Start the loop with a block that tests the condition.
  477. // If there's an increment, the continue scope will be overwritten
  478. // later.
  479. JumpDest Continue = getJumpDestInCurrentScope("for.cond");
  480. llvm::BasicBlock *CondBlock = Continue.getBlock();
  481. EmitBlock(CondBlock);
  482. // Create a cleanup scope for the condition variable cleanups.
  483. RunCleanupsScope ConditionScope(*this);
  484. llvm::Value *BoolCondVal = 0;
  485. if (S.getCond()) {
  486. // If the for statement has a condition scope, emit the local variable
  487. // declaration.
  488. llvm::BasicBlock *ExitBlock = LoopExit.getBlock();
  489. if (S.getConditionVariable()) {
  490. EmitAutoVarDecl(*S.getConditionVariable());
  491. }
  492. // If there are any cleanups between here and the loop-exit scope,
  493. // create a block to stage a loop exit along.
  494. if (ForScope.requiresCleanups())
  495. ExitBlock = createBasicBlock("for.cond.cleanup");
  496. // As long as the condition is true, iterate the loop.
  497. llvm::BasicBlock *ForBody = createBasicBlock("for.body");
  498. // C99 6.8.5p2/p4: The first substatement is executed if the expression
  499. // compares unequal to 0. The condition must be a scalar type.
  500. BoolCondVal = EvaluateExprAsBool(S.getCond());
  501. Builder.CreateCondBr(BoolCondVal, ForBody, ExitBlock);
  502. if (ExitBlock != LoopExit.getBlock()) {
  503. EmitBlock(ExitBlock);
  504. EmitBranchThroughCleanup(LoopExit);
  505. }
  506. EmitBlock(ForBody);
  507. } else {
  508. // Treat it as a non-zero constant. Don't even create a new block for the
  509. // body, just fall into it.
  510. }
  511. // If the for loop doesn't have an increment we can just use the
  512. // condition as the continue block. Otherwise we'll need to create
  513. // a block for it (in the current scope, i.e. in the scope of the
  514. // condition), and that we will become our continue block.
  515. if (S.getInc())
  516. Continue = getJumpDestInCurrentScope("for.inc");
  517. // Store the blocks to use for break and continue.
  518. BreakContinueStack.push_back(BreakContinue(LoopExit, Continue));
  519. {
  520. // Create a separate cleanup scope for the body, in case it is not
  521. // a compound statement.
  522. RunCleanupsScope BodyScope(*this);
  523. EmitStmt(S.getBody());
  524. }
  525. // If there is an increment, emit it next.
  526. if (S.getInc()) {
  527. EmitBlock(Continue.getBlock());
  528. EmitStmt(S.getInc());
  529. }
  530. BreakContinueStack.pop_back();
  531. ConditionScope.ForceCleanup();
  532. EmitBranch(CondBlock);
  533. ForScope.ForceCleanup();
  534. if (DI)
  535. DI->EmitLexicalBlockEnd(Builder, S.getSourceRange().getEnd());
  536. // Emit the fall-through block.
  537. EmitBlock(LoopExit.getBlock(), true);
  538. }
  539. void CodeGenFunction::EmitCXXForRangeStmt(const CXXForRangeStmt &S) {
  540. JumpDest LoopExit = getJumpDestInCurrentScope("for.end");
  541. RunCleanupsScope ForScope(*this);
  542. CGDebugInfo *DI = getDebugInfo();
  543. if (DI)
  544. DI->EmitLexicalBlockStart(Builder, S.getSourceRange().getBegin());
  545. // Evaluate the first pieces before the loop.
  546. EmitStmt(S.getRangeStmt());
  547. EmitStmt(S.getBeginEndStmt());
  548. // Start the loop with a block that tests the condition.
  549. // If there's an increment, the continue scope will be overwritten
  550. // later.
  551. llvm::BasicBlock *CondBlock = createBasicBlock("for.cond");
  552. EmitBlock(CondBlock);
  553. // If there are any cleanups between here and the loop-exit scope,
  554. // create a block to stage a loop exit along.
  555. llvm::BasicBlock *ExitBlock = LoopExit.getBlock();
  556. if (ForScope.requiresCleanups())
  557. ExitBlock = createBasicBlock("for.cond.cleanup");
  558. // The loop body, consisting of the specified body and the loop variable.
  559. llvm::BasicBlock *ForBody = createBasicBlock("for.body");
  560. // The body is executed if the expression, contextually converted
  561. // to bool, is true.
  562. llvm::Value *BoolCondVal = EvaluateExprAsBool(S.getCond());
  563. Builder.CreateCondBr(BoolCondVal, ForBody, ExitBlock);
  564. if (ExitBlock != LoopExit.getBlock()) {
  565. EmitBlock(ExitBlock);
  566. EmitBranchThroughCleanup(LoopExit);
  567. }
  568. EmitBlock(ForBody);
  569. // Create a block for the increment. In case of a 'continue', we jump there.
  570. JumpDest Continue = getJumpDestInCurrentScope("for.inc");
  571. // Store the blocks to use for break and continue.
  572. BreakContinueStack.push_back(BreakContinue(LoopExit, Continue));
  573. {
  574. // Create a separate cleanup scope for the loop variable and body.
  575. RunCleanupsScope BodyScope(*this);
  576. EmitStmt(S.getLoopVarStmt());
  577. EmitStmt(S.getBody());
  578. }
  579. // If there is an increment, emit it next.
  580. EmitBlock(Continue.getBlock());
  581. EmitStmt(S.getInc());
  582. BreakContinueStack.pop_back();
  583. EmitBranch(CondBlock);
  584. ForScope.ForceCleanup();
  585. if (DI)
  586. DI->EmitLexicalBlockEnd(Builder, S.getSourceRange().getEnd());
  587. // Emit the fall-through block.
  588. EmitBlock(LoopExit.getBlock(), true);
  589. }
  590. void CodeGenFunction::EmitReturnOfRValue(RValue RV, QualType Ty) {
  591. if (RV.isScalar()) {
  592. Builder.CreateStore(RV.getScalarVal(), ReturnValue);
  593. } else if (RV.isAggregate()) {
  594. EmitAggregateCopy(ReturnValue, RV.getAggregateAddr(), Ty);
  595. } else {
  596. StoreComplexToAddr(RV.getComplexVal(), ReturnValue, false);
  597. }
  598. EmitBranchThroughCleanup(ReturnBlock);
  599. }
  600. /// EmitReturnStmt - Note that due to GCC extensions, this can have an operand
  601. /// if the function returns void, or may be missing one if the function returns
  602. /// non-void. Fun stuff :).
  603. void CodeGenFunction::EmitReturnStmt(const ReturnStmt &S) {
  604. // Emit the result value, even if unused, to evalute the side effects.
  605. const Expr *RV = S.getRetValue();
  606. // FIXME: Clean this up by using an LValue for ReturnTemp,
  607. // EmitStoreThroughLValue, and EmitAnyExpr.
  608. if (S.getNRVOCandidate() && S.getNRVOCandidate()->isNRVOVariable() &&
  609. !Target.useGlobalsForAutomaticVariables()) {
  610. // Apply the named return value optimization for this return statement,
  611. // which means doing nothing: the appropriate result has already been
  612. // constructed into the NRVO variable.
  613. // If there is an NRVO flag for this variable, set it to 1 into indicate
  614. // that the cleanup code should not destroy the variable.
  615. if (llvm::Value *NRVOFlag = NRVOFlags[S.getNRVOCandidate()])
  616. Builder.CreateStore(Builder.getTrue(), NRVOFlag);
  617. } else if (!ReturnValue) {
  618. // Make sure not to return anything, but evaluate the expression
  619. // for side effects.
  620. if (RV)
  621. EmitAnyExpr(RV);
  622. } else if (RV == 0) {
  623. // Do nothing (return value is left uninitialized)
  624. } else if (FnRetTy->isReferenceType()) {
  625. // If this function returns a reference, take the address of the expression
  626. // rather than the value.
  627. RValue Result = EmitReferenceBindingToExpr(RV, /*InitializedDecl=*/0);
  628. Builder.CreateStore(Result.getScalarVal(), ReturnValue);
  629. } else if (!hasAggregateLLVMType(RV->getType())) {
  630. Builder.CreateStore(EmitScalarExpr(RV), ReturnValue);
  631. } else if (RV->getType()->isAnyComplexType()) {
  632. EmitComplexExprIntoAddr(RV, ReturnValue, false);
  633. } else {
  634. CharUnits Alignment = getContext().getTypeAlignInChars(RV->getType());
  635. EmitAggExpr(RV, AggValueSlot::forAddr(ReturnValue, Alignment, Qualifiers(),
  636. AggValueSlot::IsDestructed,
  637. AggValueSlot::DoesNotNeedGCBarriers,
  638. AggValueSlot::IsNotAliased));
  639. }
  640. EmitBranchThroughCleanup(ReturnBlock);
  641. }
  642. void CodeGenFunction::EmitDeclStmt(const DeclStmt &S) {
  643. // As long as debug info is modeled with instructions, we have to ensure we
  644. // have a place to insert here and write the stop point here.
  645. if (getDebugInfo() && HaveInsertPoint())
  646. EmitStopPoint(&S);
  647. for (DeclStmt::const_decl_iterator I = S.decl_begin(), E = S.decl_end();
  648. I != E; ++I)
  649. EmitDecl(**I);
  650. }
  651. void CodeGenFunction::EmitBreakStmt(const BreakStmt &S) {
  652. assert(!BreakContinueStack.empty() && "break stmt not in a loop or switch!");
  653. // If this code is reachable then emit a stop point (if generating
  654. // debug info). We have to do this ourselves because we are on the
  655. // "simple" statement path.
  656. if (HaveInsertPoint())
  657. EmitStopPoint(&S);
  658. JumpDest Block = BreakContinueStack.back().BreakBlock;
  659. EmitBranchThroughCleanup(Block);
  660. }
  661. void CodeGenFunction::EmitContinueStmt(const ContinueStmt &S) {
  662. assert(!BreakContinueStack.empty() && "continue stmt not in a loop!");
  663. // If this code is reachable then emit a stop point (if generating
  664. // debug info). We have to do this ourselves because we are on the
  665. // "simple" statement path.
  666. if (HaveInsertPoint())
  667. EmitStopPoint(&S);
  668. JumpDest Block = BreakContinueStack.back().ContinueBlock;
  669. EmitBranchThroughCleanup(Block);
  670. }
  671. /// EmitCaseStmtRange - If case statement range is not too big then
  672. /// add multiple cases to switch instruction, one for each value within
  673. /// the range. If range is too big then emit "if" condition check.
  674. void CodeGenFunction::EmitCaseStmtRange(const CaseStmt &S) {
  675. assert(S.getRHS() && "Expected RHS value in CaseStmt");
  676. llvm::APSInt LHS = S.getLHS()->EvaluateKnownConstInt(getContext());
  677. llvm::APSInt RHS = S.getRHS()->EvaluateKnownConstInt(getContext());
  678. // Emit the code for this case. We do this first to make sure it is
  679. // properly chained from our predecessor before generating the
  680. // switch machinery to enter this block.
  681. EmitBlock(createBasicBlock("sw.bb"));
  682. llvm::BasicBlock *CaseDest = Builder.GetInsertBlock();
  683. EmitStmt(S.getSubStmt());
  684. // If range is empty, do nothing.
  685. if (LHS.isSigned() ? RHS.slt(LHS) : RHS.ult(LHS))
  686. return;
  687. llvm::APInt Range = RHS - LHS;
  688. // FIXME: parameters such as this should not be hardcoded.
  689. if (Range.ult(llvm::APInt(Range.getBitWidth(), 64))) {
  690. // Range is small enough to add multiple switch instruction cases.
  691. for (unsigned i = 0, e = Range.getZExtValue() + 1; i != e; ++i) {
  692. SwitchInsn->addCase(Builder.getInt(LHS), CaseDest);
  693. LHS++;
  694. }
  695. return;
  696. }
  697. // The range is too big. Emit "if" condition into a new block,
  698. // making sure to save and restore the current insertion point.
  699. llvm::BasicBlock *RestoreBB = Builder.GetInsertBlock();
  700. // Push this test onto the chain of range checks (which terminates
  701. // in the default basic block). The switch's default will be changed
  702. // to the top of this chain after switch emission is complete.
  703. llvm::BasicBlock *FalseDest = CaseRangeBlock;
  704. CaseRangeBlock = createBasicBlock("sw.caserange");
  705. CurFn->getBasicBlockList().push_back(CaseRangeBlock);
  706. Builder.SetInsertPoint(CaseRangeBlock);
  707. // Emit range check.
  708. llvm::Value *Diff =
  709. Builder.CreateSub(SwitchInsn->getCondition(), Builder.getInt(LHS));
  710. llvm::Value *Cond =
  711. Builder.CreateICmpULE(Diff, Builder.getInt(Range), "inbounds");
  712. Builder.CreateCondBr(Cond, CaseDest, FalseDest);
  713. // Restore the appropriate insertion point.
  714. if (RestoreBB)
  715. Builder.SetInsertPoint(RestoreBB);
  716. else
  717. Builder.ClearInsertionPoint();
  718. }
  719. void CodeGenFunction::EmitCaseStmt(const CaseStmt &S) {
  720. // If there is no enclosing switch instance that we're aware of, then this
  721. // case statement and its block can be elided. This situation only happens
  722. // when we've constant-folded the switch, are emitting the constant case,
  723. // and part of the constant case includes another case statement. For
  724. // instance: switch (4) { case 4: do { case 5: } while (1); }
  725. if (!SwitchInsn) {
  726. EmitStmt(S.getSubStmt());
  727. return;
  728. }
  729. // Handle case ranges.
  730. if (S.getRHS()) {
  731. EmitCaseStmtRange(S);
  732. return;
  733. }
  734. llvm::ConstantInt *CaseVal =
  735. Builder.getInt(S.getLHS()->EvaluateKnownConstInt(getContext()));
  736. // If the body of the case is just a 'break', and if there was no fallthrough,
  737. // try to not emit an empty block.
  738. if (isa<BreakStmt>(S.getSubStmt())) {
  739. JumpDest Block = BreakContinueStack.back().BreakBlock;
  740. // Only do this optimization if there are no cleanups that need emitting.
  741. if (isObviouslyBranchWithoutCleanups(Block)) {
  742. SwitchInsn->addCase(CaseVal, Block.getBlock());
  743. // If there was a fallthrough into this case, make sure to redirect it to
  744. // the end of the switch as well.
  745. if (Builder.GetInsertBlock()) {
  746. Builder.CreateBr(Block.getBlock());
  747. Builder.ClearInsertionPoint();
  748. }
  749. return;
  750. }
  751. }
  752. EmitBlock(createBasicBlock("sw.bb"));
  753. llvm::BasicBlock *CaseDest = Builder.GetInsertBlock();
  754. SwitchInsn->addCase(CaseVal, CaseDest);
  755. // Recursively emitting the statement is acceptable, but is not wonderful for
  756. // code where we have many case statements nested together, i.e.:
  757. // case 1:
  758. // case 2:
  759. // case 3: etc.
  760. // Handling this recursively will create a new block for each case statement
  761. // that falls through to the next case which is IR intensive. It also causes
  762. // deep recursion which can run into stack depth limitations. Handle
  763. // sequential non-range case statements specially.
  764. const CaseStmt *CurCase = &S;
  765. const CaseStmt *NextCase = dyn_cast<CaseStmt>(S.getSubStmt());
  766. // Otherwise, iteratively add consecutive cases to this switch stmt.
  767. while (NextCase && NextCase->getRHS() == 0) {
  768. CurCase = NextCase;
  769. llvm::ConstantInt *CaseVal =
  770. Builder.getInt(CurCase->getLHS()->EvaluateKnownConstInt(getContext()));
  771. SwitchInsn->addCase(CaseVal, CaseDest);
  772. NextCase = dyn_cast<CaseStmt>(CurCase->getSubStmt());
  773. }
  774. // Normal default recursion for non-cases.
  775. EmitStmt(CurCase->getSubStmt());
  776. }
  777. void CodeGenFunction::EmitDefaultStmt(const DefaultStmt &S) {
  778. llvm::BasicBlock *DefaultBlock = SwitchInsn->getDefaultDest();
  779. assert(DefaultBlock->empty() &&
  780. "EmitDefaultStmt: Default block already defined?");
  781. EmitBlock(DefaultBlock);
  782. EmitStmt(S.getSubStmt());
  783. }
  784. /// CollectStatementsForCase - Given the body of a 'switch' statement and a
  785. /// constant value that is being switched on, see if we can dead code eliminate
  786. /// the body of the switch to a simple series of statements to emit. Basically,
  787. /// on a switch (5) we want to find these statements:
  788. /// case 5:
  789. /// printf(...); <--
  790. /// ++i; <--
  791. /// break;
  792. ///
  793. /// and add them to the ResultStmts vector. If it is unsafe to do this
  794. /// transformation (for example, one of the elided statements contains a label
  795. /// that might be jumped to), return CSFC_Failure. If we handled it and 'S'
  796. /// should include statements after it (e.g. the printf() line is a substmt of
  797. /// the case) then return CSFC_FallThrough. If we handled it and found a break
  798. /// statement, then return CSFC_Success.
  799. ///
  800. /// If Case is non-null, then we are looking for the specified case, checking
  801. /// that nothing we jump over contains labels. If Case is null, then we found
  802. /// the case and are looking for the break.
  803. ///
  804. /// If the recursive walk actually finds our Case, then we set FoundCase to
  805. /// true.
  806. ///
  807. enum CSFC_Result { CSFC_Failure, CSFC_FallThrough, CSFC_Success };
  808. static CSFC_Result CollectStatementsForCase(const Stmt *S,
  809. const SwitchCase *Case,
  810. bool &FoundCase,
  811. SmallVectorImpl<const Stmt*> &ResultStmts) {
  812. // If this is a null statement, just succeed.
  813. if (S == 0)
  814. return Case ? CSFC_Success : CSFC_FallThrough;
  815. // If this is the switchcase (case 4: or default) that we're looking for, then
  816. // we're in business. Just add the substatement.
  817. if (const SwitchCase *SC = dyn_cast<SwitchCase>(S)) {
  818. if (S == Case) {
  819. FoundCase = true;
  820. return CollectStatementsForCase(SC->getSubStmt(), 0, FoundCase,
  821. ResultStmts);
  822. }
  823. // Otherwise, this is some other case or default statement, just ignore it.
  824. return CollectStatementsForCase(SC->getSubStmt(), Case, FoundCase,
  825. ResultStmts);
  826. }
  827. // If we are in the live part of the code and we found our break statement,
  828. // return a success!
  829. if (Case == 0 && isa<BreakStmt>(S))
  830. return CSFC_Success;
  831. // If this is a switch statement, then it might contain the SwitchCase, the
  832. // break, or neither.
  833. if (const CompoundStmt *CS = dyn_cast<CompoundStmt>(S)) {
  834. // Handle this as two cases: we might be looking for the SwitchCase (if so
  835. // the skipped statements must be skippable) or we might already have it.
  836. CompoundStmt::const_body_iterator I = CS->body_begin(), E = CS->body_end();
  837. if (Case) {
  838. // Keep track of whether we see a skipped declaration. The code could be
  839. // using the declaration even if it is skipped, so we can't optimize out
  840. // the decl if the kept statements might refer to it.
  841. bool HadSkippedDecl = false;
  842. // If we're looking for the case, just see if we can skip each of the
  843. // substatements.
  844. for (; Case && I != E; ++I) {
  845. HadSkippedDecl |= isa<DeclStmt>(*I);
  846. switch (CollectStatementsForCase(*I, Case, FoundCase, ResultStmts)) {
  847. case CSFC_Failure: return CSFC_Failure;
  848. case CSFC_Success:
  849. // A successful result means that either 1) that the statement doesn't
  850. // have the case and is skippable, or 2) does contain the case value
  851. // and also contains the break to exit the switch. In the later case,
  852. // we just verify the rest of the statements are elidable.
  853. if (FoundCase) {
  854. // If we found the case and skipped declarations, we can't do the
  855. // optimization.
  856. if (HadSkippedDecl)
  857. return CSFC_Failure;
  858. for (++I; I != E; ++I)
  859. if (CodeGenFunction::ContainsLabel(*I, true))
  860. return CSFC_Failure;
  861. return CSFC_Success;
  862. }
  863. break;
  864. case CSFC_FallThrough:
  865. // If we have a fallthrough condition, then we must have found the
  866. // case started to include statements. Consider the rest of the
  867. // statements in the compound statement as candidates for inclusion.
  868. assert(FoundCase && "Didn't find case but returned fallthrough?");
  869. // We recursively found Case, so we're not looking for it anymore.
  870. Case = 0;
  871. // If we found the case and skipped declarations, we can't do the
  872. // optimization.
  873. if (HadSkippedDecl)
  874. return CSFC_Failure;
  875. break;
  876. }
  877. }
  878. }
  879. // If we have statements in our range, then we know that the statements are
  880. // live and need to be added to the set of statements we're tracking.
  881. for (; I != E; ++I) {
  882. switch (CollectStatementsForCase(*I, 0, FoundCase, ResultStmts)) {
  883. case CSFC_Failure: return CSFC_Failure;
  884. case CSFC_FallThrough:
  885. // A fallthrough result means that the statement was simple and just
  886. // included in ResultStmt, keep adding them afterwards.
  887. break;
  888. case CSFC_Success:
  889. // A successful result means that we found the break statement and
  890. // stopped statement inclusion. We just ensure that any leftover stmts
  891. // are skippable and return success ourselves.
  892. for (++I; I != E; ++I)
  893. if (CodeGenFunction::ContainsLabel(*I, true))
  894. return CSFC_Failure;
  895. return CSFC_Success;
  896. }
  897. }
  898. return Case ? CSFC_Success : CSFC_FallThrough;
  899. }
  900. // Okay, this is some other statement that we don't handle explicitly, like a
  901. // for statement or increment etc. If we are skipping over this statement,
  902. // just verify it doesn't have labels, which would make it invalid to elide.
  903. if (Case) {
  904. if (CodeGenFunction::ContainsLabel(S, true))
  905. return CSFC_Failure;
  906. return CSFC_Success;
  907. }
  908. // Otherwise, we want to include this statement. Everything is cool with that
  909. // so long as it doesn't contain a break out of the switch we're in.
  910. if (CodeGenFunction::containsBreak(S)) return CSFC_Failure;
  911. // Otherwise, everything is great. Include the statement and tell the caller
  912. // that we fall through and include the next statement as well.
  913. ResultStmts.push_back(S);
  914. return CSFC_FallThrough;
  915. }
  916. /// FindCaseStatementsForValue - Find the case statement being jumped to and
  917. /// then invoke CollectStatementsForCase to find the list of statements to emit
  918. /// for a switch on constant. See the comment above CollectStatementsForCase
  919. /// for more details.
  920. static bool FindCaseStatementsForValue(const SwitchStmt &S,
  921. const llvm::APInt &ConstantCondValue,
  922. SmallVectorImpl<const Stmt*> &ResultStmts,
  923. ASTContext &C) {
  924. // First step, find the switch case that is being branched to. We can do this
  925. // efficiently by scanning the SwitchCase list.
  926. const SwitchCase *Case = S.getSwitchCaseList();
  927. const DefaultStmt *DefaultCase = 0;
  928. for (; Case; Case = Case->getNextSwitchCase()) {
  929. // It's either a default or case. Just remember the default statement in
  930. // case we're not jumping to any numbered cases.
  931. if (const DefaultStmt *DS = dyn_cast<DefaultStmt>(Case)) {
  932. DefaultCase = DS;
  933. continue;
  934. }
  935. // Check to see if this case is the one we're looking for.
  936. const CaseStmt *CS = cast<CaseStmt>(Case);
  937. // Don't handle case ranges yet.
  938. if (CS->getRHS()) return false;
  939. // If we found our case, remember it as 'case'.
  940. if (CS->getLHS()->EvaluateKnownConstInt(C) == ConstantCondValue)
  941. break;
  942. }
  943. // If we didn't find a matching case, we use a default if it exists, or we
  944. // elide the whole switch body!
  945. if (Case == 0) {
  946. // It is safe to elide the body of the switch if it doesn't contain labels
  947. // etc. If it is safe, return successfully with an empty ResultStmts list.
  948. if (DefaultCase == 0)
  949. return !CodeGenFunction::ContainsLabel(&S);
  950. Case = DefaultCase;
  951. }
  952. // Ok, we know which case is being jumped to, try to collect all the
  953. // statements that follow it. This can fail for a variety of reasons. Also,
  954. // check to see that the recursive walk actually found our case statement.
  955. // Insane cases like this can fail to find it in the recursive walk since we
  956. // don't handle every stmt kind:
  957. // switch (4) {
  958. // while (1) {
  959. // case 4: ...
  960. bool FoundCase = false;
  961. return CollectStatementsForCase(S.getBody(), Case, FoundCase,
  962. ResultStmts) != CSFC_Failure &&
  963. FoundCase;
  964. }
  965. void CodeGenFunction::EmitSwitchStmt(const SwitchStmt &S) {
  966. JumpDest SwitchExit = getJumpDestInCurrentScope("sw.epilog");
  967. RunCleanupsScope ConditionScope(*this);
  968. if (S.getConditionVariable())
  969. EmitAutoVarDecl(*S.getConditionVariable());
  970. // Handle nested switch statements.
  971. llvm::SwitchInst *SavedSwitchInsn = SwitchInsn;
  972. llvm::BasicBlock *SavedCRBlock = CaseRangeBlock;
  973. // See if we can constant fold the condition of the switch and therefore only
  974. // emit the live case statement (if any) of the switch.
  975. llvm::APInt ConstantCondValue;
  976. if (ConstantFoldsToSimpleInteger(S.getCond(), ConstantCondValue)) {
  977. SmallVector<const Stmt*, 4> CaseStmts;
  978. if (FindCaseStatementsForValue(S, ConstantCondValue, CaseStmts,
  979. getContext())) {
  980. RunCleanupsScope ExecutedScope(*this);
  981. // At this point, we are no longer "within" a switch instance, so
  982. // we can temporarily enforce this to ensure that any embedded case
  983. // statements are not emitted.
  984. SwitchInsn = 0;
  985. // Okay, we can dead code eliminate everything except this case. Emit the
  986. // specified series of statements and we're good.
  987. for (unsigned i = 0, e = CaseStmts.size(); i != e; ++i)
  988. EmitStmt(CaseStmts[i]);
  989. // Now we want to restore the saved switch instance so that nested switches
  990. // continue to function properly
  991. SwitchInsn = SavedSwitchInsn;
  992. return;
  993. }
  994. }
  995. llvm::Value *CondV = EmitScalarExpr(S.getCond());
  996. // Create basic block to hold stuff that comes after switch
  997. // statement. We also need to create a default block now so that
  998. // explicit case ranges tests can have a place to jump to on
  999. // failure.
  1000. llvm::BasicBlock *DefaultBlock = createBasicBlock("sw.default");
  1001. SwitchInsn = Builder.CreateSwitch(CondV, DefaultBlock);
  1002. CaseRangeBlock = DefaultBlock;
  1003. // Clear the insertion point to indicate we are in unreachable code.
  1004. Builder.ClearInsertionPoint();
  1005. // All break statements jump to NextBlock. If BreakContinueStack is non empty
  1006. // then reuse last ContinueBlock.
  1007. JumpDest OuterContinue;
  1008. if (!BreakContinueStack.empty())
  1009. OuterContinue = BreakContinueStack.back().ContinueBlock;
  1010. BreakContinueStack.push_back(BreakContinue(SwitchExit, OuterContinue));
  1011. // Emit switch body.
  1012. EmitStmt(S.getBody());
  1013. BreakContinueStack.pop_back();
  1014. // Update the default block in case explicit case range tests have
  1015. // been chained on top.
  1016. SwitchInsn->setDefaultDest(CaseRangeBlock);
  1017. // If a default was never emitted:
  1018. if (!DefaultBlock->getParent()) {
  1019. // If we have cleanups, emit the default block so that there's a
  1020. // place to jump through the cleanups from.
  1021. if (ConditionScope.requiresCleanups()) {
  1022. EmitBlock(DefaultBlock);
  1023. // Otherwise, just forward the default block to the switch end.
  1024. } else {
  1025. DefaultBlock->replaceAllUsesWith(SwitchExit.getBlock());
  1026. delete DefaultBlock;
  1027. }
  1028. }
  1029. ConditionScope.ForceCleanup();
  1030. // Emit continuation.
  1031. EmitBlock(SwitchExit.getBlock(), true);
  1032. SwitchInsn = SavedSwitchInsn;
  1033. CaseRangeBlock = SavedCRBlock;
  1034. }
  1035. static std::string
  1036. SimplifyConstraint(const char *Constraint, const TargetInfo &Target,
  1037. SmallVectorImpl<TargetInfo::ConstraintInfo> *OutCons=0) {
  1038. std::string Result;
  1039. while (*Constraint) {
  1040. switch (*Constraint) {
  1041. default:
  1042. Result += Target.convertConstraint(Constraint);
  1043. break;
  1044. // Ignore these
  1045. case '*':
  1046. case '?':
  1047. case '!':
  1048. case '=': // Will see this and the following in mult-alt constraints.
  1049. case '+':
  1050. break;
  1051. case ',':
  1052. Result += "|";
  1053. break;
  1054. case 'g':
  1055. Result += "imr";
  1056. break;
  1057. case '[': {
  1058. assert(OutCons &&
  1059. "Must pass output names to constraints with a symbolic name");
  1060. unsigned Index;
  1061. bool result = Target.resolveSymbolicName(Constraint,
  1062. &(*OutCons)[0],
  1063. OutCons->size(), Index);
  1064. assert(result && "Could not resolve symbolic name"); (void)result;
  1065. Result += llvm::utostr(Index);
  1066. break;
  1067. }
  1068. }
  1069. Constraint++;
  1070. }
  1071. return Result;
  1072. }
  1073. /// AddVariableConstraints - Look at AsmExpr and if it is a variable declared
  1074. /// as using a particular register add that as a constraint that will be used
  1075. /// in this asm stmt.
  1076. static std::string
  1077. AddVariableConstraints(const std::string &Constraint, const Expr &AsmExpr,
  1078. const TargetInfo &Target, CodeGenModule &CGM,
  1079. const AsmStmt &Stmt) {
  1080. const DeclRefExpr *AsmDeclRef = dyn_cast<DeclRefExpr>(&AsmExpr);
  1081. if (!AsmDeclRef)
  1082. return Constraint;
  1083. const ValueDecl &Value = *AsmDeclRef->getDecl();
  1084. const VarDecl *Variable = dyn_cast<VarDecl>(&Value);
  1085. if (!Variable)
  1086. return Constraint;
  1087. AsmLabelAttr *Attr = Variable->getAttr<AsmLabelAttr>();
  1088. if (!Attr)
  1089. return Constraint;
  1090. StringRef Register = Attr->getLabel();
  1091. assert(Target.isValidGCCRegisterName(Register));
  1092. // We're using validateOutputConstraint here because we only care if
  1093. // this is a register constraint.
  1094. TargetInfo::ConstraintInfo Info(Constraint, "");
  1095. if (Target.validateOutputConstraint(Info) &&
  1096. !Info.allowsRegister()) {
  1097. CGM.ErrorUnsupported(&Stmt, "__asm__");
  1098. return Constraint;
  1099. }
  1100. // Canonicalize the register here before returning it.
  1101. Register = Target.getNormalizedGCCRegisterName(Register);
  1102. return "{" + Register.str() + "}";
  1103. }
  1104. llvm::Value*
  1105. CodeGenFunction::EmitAsmInputLValue(const AsmStmt &S,
  1106. const TargetInfo::ConstraintInfo &Info,
  1107. LValue InputValue, QualType InputType,
  1108. std::string &ConstraintStr) {
  1109. llvm::Value *Arg;
  1110. if (Info.allowsRegister() || !Info.allowsMemory()) {
  1111. if (!CodeGenFunction::hasAggregateLLVMType(InputType)) {
  1112. Arg = EmitLoadOfLValue(InputValue).getScalarVal();
  1113. } else {
  1114. llvm::Type *Ty = ConvertType(InputType);
  1115. uint64_t Size = CGM.getTargetData().getTypeSizeInBits(Ty);
  1116. if (Size <= 64 && llvm::isPowerOf2_64(Size)) {
  1117. Ty = llvm::IntegerType::get(getLLVMContext(), Size);
  1118. Ty = llvm::PointerType::getUnqual(Ty);
  1119. Arg = Builder.CreateLoad(Builder.CreateBitCast(InputValue.getAddress(),
  1120. Ty));
  1121. } else {
  1122. Arg = InputValue.getAddress();
  1123. ConstraintStr += '*';
  1124. }
  1125. }
  1126. } else {
  1127. Arg = InputValue.getAddress();
  1128. ConstraintStr += '*';
  1129. }
  1130. return Arg;
  1131. }
  1132. llvm::Value* CodeGenFunction::EmitAsmInput(const AsmStmt &S,
  1133. const TargetInfo::ConstraintInfo &Info,
  1134. const Expr *InputExpr,
  1135. std::string &ConstraintStr) {
  1136. if (Info.allowsRegister() || !Info.allowsMemory())
  1137. if (!CodeGenFunction::hasAggregateLLVMType(InputExpr->getType()))
  1138. return EmitScalarExpr(InputExpr);
  1139. InputExpr = InputExpr->IgnoreParenNoopCasts(getContext());
  1140. LValue Dest = EmitLValue(InputExpr);
  1141. return EmitAsmInputLValue(S, Info, Dest, InputExpr->getType(), ConstraintStr);
  1142. }
  1143. /// getAsmSrcLocInfo - Return the !srcloc metadata node to attach to an inline
  1144. /// asm call instruction. The !srcloc MDNode contains a list of constant
  1145. /// integers which are the source locations of the start of each line in the
  1146. /// asm.
  1147. static llvm::MDNode *getAsmSrcLocInfo(const StringLiteral *Str,
  1148. CodeGenFunction &CGF) {
  1149. SmallVector<llvm::Value *, 8> Locs;
  1150. // Add the location of the first line to the MDNode.
  1151. Locs.push_back(llvm::ConstantInt::get(CGF.Int32Ty,
  1152. Str->getLocStart().getRawEncoding()));
  1153. StringRef StrVal = Str->getString();
  1154. if (!StrVal.empty()) {
  1155. const SourceManager &SM = CGF.CGM.getContext().getSourceManager();
  1156. const LangOptions &LangOpts = CGF.CGM.getLangOptions();
  1157. // Add the location of the start of each subsequent line of the asm to the
  1158. // MDNode.
  1159. for (unsigned i = 0, e = StrVal.size()-1; i != e; ++i) {
  1160. if (StrVal[i] != '\n') continue;
  1161. SourceLocation LineLoc = Str->getLocationOfByte(i+1, SM, LangOpts,
  1162. CGF.Target);
  1163. Locs.push_back(llvm::ConstantInt::get(CGF.Int32Ty,
  1164. LineLoc.getRawEncoding()));
  1165. }
  1166. }
  1167. return llvm::MDNode::get(CGF.getLLVMContext(), Locs);
  1168. }
  1169. void CodeGenFunction::EmitAsmStmt(const AsmStmt &S) {
  1170. // Analyze the asm string to decompose it into its pieces. We know that Sema
  1171. // has already done this, so it is guaranteed to be successful.
  1172. SmallVector<AsmStmt::AsmStringPiece, 4> Pieces;
  1173. unsigned DiagOffs;
  1174. S.AnalyzeAsmString(Pieces, getContext(), DiagOffs);
  1175. // Assemble the pieces into the final asm string.
  1176. std::string AsmString;
  1177. for (unsigned i = 0, e = Pieces.size(); i != e; ++i) {
  1178. if (Pieces[i].isString())
  1179. AsmString += Pieces[i].getString();
  1180. else if (Pieces[i].getModifier() == '\0')
  1181. AsmString += '$' + llvm::utostr(Pieces[i].getOperandNo());
  1182. else
  1183. AsmString += "${" + llvm::utostr(Pieces[i].getOperandNo()) + ':' +
  1184. Pieces[i].getModifier() + '}';
  1185. }
  1186. // Get all the output and input constraints together.
  1187. SmallVector<TargetInfo::ConstraintInfo, 4> OutputConstraintInfos;
  1188. SmallVector<TargetInfo::ConstraintInfo, 4> InputConstraintInfos;
  1189. for (unsigned i = 0, e = S.getNumOutputs(); i != e; i++) {
  1190. TargetInfo::ConstraintInfo Info(S.getOutputConstraint(i),
  1191. S.getOutputName(i));
  1192. bool IsValid = Target.validateOutputConstraint(Info); (void)IsValid;
  1193. assert(IsValid && "Failed to parse output constraint");
  1194. OutputConstraintInfos.push_back(Info);
  1195. }
  1196. for (unsigned i = 0, e = S.getNumInputs(); i != e; i++) {
  1197. TargetInfo::ConstraintInfo Info(S.getInputConstraint(i),
  1198. S.getInputName(i));
  1199. bool IsValid = Target.validateInputConstraint(OutputConstraintInfos.data(),
  1200. S.getNumOutputs(), Info);
  1201. assert(IsValid && "Failed to parse input constraint"); (void)IsValid;
  1202. InputConstraintInfos.push_back(Info);
  1203. }
  1204. std::string Constraints;
  1205. std::vector<LValue> ResultRegDests;
  1206. std::vector<QualType> ResultRegQualTys;
  1207. std::vector<llvm::Type *> ResultRegTypes;
  1208. std::vector<llvm::Type *> ResultTruncRegTypes;
  1209. std::vector<llvm::Type*> ArgTypes;
  1210. std::vector<llvm::Value*> Args;
  1211. // Keep track of inout constraints.
  1212. std::string InOutConstraints;
  1213. std::vector<llvm::Value*> InOutArgs;
  1214. std::vector<llvm::Type*> InOutArgTypes;
  1215. for (unsigned i = 0, e = S.getNumOutputs(); i != e; i++) {
  1216. TargetInfo::ConstraintInfo &Info = OutputConstraintInfos[i];
  1217. // Simplify the output constraint.
  1218. std::string OutputConstraint(S.getOutputConstraint(i));
  1219. OutputConstraint = SimplifyConstraint(OutputConstraint.c_str() + 1, Target);
  1220. const Expr *OutExpr = S.getOutputExpr(i);
  1221. OutExpr = OutExpr->IgnoreParenNoopCasts(getContext());
  1222. OutputConstraint = AddVariableConstraints(OutputConstraint, *OutExpr,
  1223. Target, CGM, S);
  1224. LValue Dest = EmitLValue(OutExpr);
  1225. if (!Constraints.empty())
  1226. Constraints += ',';
  1227. // If this is a register output, then make the inline asm return it
  1228. // by-value. If this is a memory result, return the value by-reference.
  1229. if (!Info.allowsMemory() && !hasAggregateLLVMType(OutExpr->getType())) {
  1230. Constraints += "=" + OutputConstraint;
  1231. ResultRegQualTys.push_back(OutExpr->getType());
  1232. ResultRegDests.push_back(Dest);
  1233. ResultRegTypes.push_back(ConvertTypeForMem(OutExpr->getType()));
  1234. ResultTruncRegTypes.push_back(ResultRegTypes.back());
  1235. // If this output is tied to an input, and if the input is larger, then
  1236. // we need to set the actual result type of the inline asm node to be the
  1237. // same as the input type.
  1238. if (Info.hasMatchingInput()) {
  1239. unsigned InputNo;
  1240. for (InputNo = 0; InputNo != S.getNumInputs(); ++InputNo) {
  1241. TargetInfo::ConstraintInfo &Input = InputConstraintInfos[InputNo];
  1242. if (Input.hasTiedOperand() && Input.getTiedOperand() == i)
  1243. break;
  1244. }
  1245. assert(InputNo != S.getNumInputs() && "Didn't find matching input!");
  1246. QualType InputTy = S.getInputExpr(InputNo)->getType();
  1247. QualType OutputType = OutExpr->getType();
  1248. uint64_t InputSize = getContext().getTypeSize(InputTy);
  1249. if (getContext().getTypeSize(OutputType) < InputSize) {
  1250. // Form the asm to return the value as a larger integer or fp type.
  1251. ResultRegTypes.back() = ConvertType(InputTy);
  1252. }
  1253. }
  1254. if (llvm::Type* AdjTy =
  1255. getTargetHooks().adjustInlineAsmType(*this, OutputConstraint,
  1256. ResultRegTypes.back()))
  1257. ResultRegTypes.back() = AdjTy;
  1258. } else {
  1259. ArgTypes.push_back(Dest.getAddress()->getType());
  1260. Args.push_back(Dest.getAddress());
  1261. Constraints += "=*";
  1262. Constraints += OutputConstraint;
  1263. }
  1264. if (Info.isReadWrite()) {
  1265. InOutConstraints += ',';
  1266. const Expr *InputExpr = S.getOutputExpr(i);
  1267. llvm::Value *Arg = EmitAsmInputLValue(S, Info, Dest, InputExpr->getType(),
  1268. InOutConstraints);
  1269. if (Info.allowsRegister())
  1270. InOutConstraints += llvm::utostr(i);
  1271. else
  1272. InOutConstraints += OutputConstraint;
  1273. InOutArgTypes.push_back(Arg->getType());
  1274. InOutArgs.push_back(Arg);
  1275. }
  1276. }
  1277. unsigned NumConstraints = S.getNumOutputs() + S.getNumInputs();
  1278. for (unsigned i = 0, e = S.getNumInputs(); i != e; i++) {
  1279. const Expr *InputExpr = S.getInputExpr(i);
  1280. TargetInfo::ConstraintInfo &Info = InputConstraintInfos[i];
  1281. if (!Constraints.empty())
  1282. Constraints += ',';
  1283. // Simplify the input constraint.
  1284. std::string InputConstraint(S.getInputConstraint(i));
  1285. InputConstraint = SimplifyConstraint(InputConstraint.c_str(), Target,
  1286. &OutputConstraintInfos);
  1287. InputConstraint =
  1288. AddVariableConstraints(InputConstraint,
  1289. *InputExpr->IgnoreParenNoopCasts(getContext()),
  1290. Target, CGM, S);
  1291. llvm::Value *Arg = EmitAsmInput(S, Info, InputExpr, Constraints);
  1292. // If this input argument is tied to a larger output result, extend the
  1293. // input to be the same size as the output. The LLVM backend wants to see
  1294. // the input and output of a matching constraint be the same size. Note
  1295. // that GCC does not define what the top bits are here. We use zext because
  1296. // that is usually cheaper, but LLVM IR should really get an anyext someday.
  1297. if (Info.hasTiedOperand()) {
  1298. unsigned Output = Info.getTiedOperand();
  1299. QualType OutputType = S.getOutputExpr(Output)->getType();
  1300. QualType InputTy = InputExpr->getType();
  1301. if (getContext().getTypeSize(OutputType) >
  1302. getContext().getTypeSize(InputTy)) {
  1303. // Use ptrtoint as appropriate so that we can do our extension.
  1304. if (isa<llvm::PointerType>(Arg->getType()))
  1305. Arg = Builder.CreatePtrToInt(Arg, IntPtrTy);
  1306. llvm::Type *OutputTy = ConvertType(OutputType);
  1307. if (isa<llvm::IntegerType>(OutputTy))
  1308. Arg = Builder.CreateZExt(Arg, OutputTy);
  1309. else if (isa<llvm::PointerType>(OutputTy))
  1310. Arg = Builder.CreateZExt(Arg, IntPtrTy);
  1311. else {
  1312. assert(OutputTy->isFloatingPointTy() && "Unexpected output type");
  1313. Arg = Builder.CreateFPExt(Arg, OutputTy);
  1314. }
  1315. }
  1316. }
  1317. if (llvm::Type* AdjTy =
  1318. getTargetHooks().adjustInlineAsmType(*this, InputConstraint,
  1319. Arg->getType()))
  1320. Arg = Builder.CreateBitCast(Arg, AdjTy);
  1321. ArgTypes.push_back(Arg->getType());
  1322. Args.push_back(Arg);
  1323. Constraints += InputConstraint;
  1324. }
  1325. // Append the "input" part of inout constraints last.
  1326. for (unsigned i = 0, e = InOutArgs.size(); i != e; i++) {
  1327. ArgTypes.push_back(InOutArgTypes[i]);
  1328. Args.push_back(InOutArgs[i]);
  1329. }
  1330. Constraints += InOutConstraints;
  1331. // Clobbers
  1332. for (unsigned i = 0, e = S.getNumClobbers(); i != e; i++) {
  1333. StringRef Clobber = S.getClobber(i)->getString();
  1334. if (Clobber != "memory" && Clobber != "cc")
  1335. Clobber = Target.getNormalizedGCCRegisterName(Clobber);
  1336. if (i != 0 || NumConstraints != 0)
  1337. Constraints += ',';
  1338. Constraints += "~{";
  1339. Constraints += Clobber;
  1340. Constraints += '}';
  1341. }
  1342. // Add machine specific clobbers
  1343. std::string MachineClobbers = Target.getClobbers();
  1344. if (!MachineClobbers.empty()) {
  1345. if (!Constraints.empty())
  1346. Constraints += ',';
  1347. Constraints += MachineClobbers;
  1348. }
  1349. llvm::Type *ResultType;
  1350. if (ResultRegTypes.empty())
  1351. ResultType = VoidTy;
  1352. else if (ResultRegTypes.size() == 1)
  1353. ResultType = ResultRegTypes[0];
  1354. else
  1355. ResultType = llvm::StructType::get(getLLVMContext(), ResultRegTypes);
  1356. llvm::FunctionType *FTy =
  1357. llvm::FunctionType::get(ResultType, ArgTypes, false);
  1358. llvm::InlineAsm *IA =
  1359. llvm::InlineAsm::get(FTy, AsmString, Constraints,
  1360. S.isVolatile() || S.getNumOutputs() == 0);
  1361. llvm::CallInst *Result = Builder.CreateCall(IA, Args);
  1362. Result->addAttribute(~0, llvm::Attribute::NoUnwind);
  1363. // Slap the source location of the inline asm into a !srcloc metadata on the
  1364. // call.
  1365. Result->setMetadata("srcloc", getAsmSrcLocInfo(S.getAsmString(), *this));
  1366. // Extract all of the register value results from the asm.
  1367. std::vector<llvm::Value*> RegResults;
  1368. if (ResultRegTypes.size() == 1) {
  1369. RegResults.push_back(Result);
  1370. } else {
  1371. for (unsigned i = 0, e = ResultRegTypes.size(); i != e; ++i) {
  1372. llvm::Value *Tmp = Builder.CreateExtractValue(Result, i, "asmresult");
  1373. RegResults.push_back(Tmp);
  1374. }
  1375. }
  1376. for (unsigned i = 0, e = RegResults.size(); i != e; ++i) {
  1377. llvm::Value *Tmp = RegResults[i];
  1378. // If the result type of the LLVM IR asm doesn't match the result type of
  1379. // the expression, do the conversion.
  1380. if (ResultRegTypes[i] != ResultTruncRegTypes[i]) {
  1381. llvm::Type *TruncTy = ResultTruncRegTypes[i];
  1382. // Truncate the integer result to the right size, note that TruncTy can be
  1383. // a pointer.
  1384. if (TruncTy->isFloatingPointTy())
  1385. Tmp = Builder.CreateFPTrunc(Tmp, TruncTy);
  1386. else if (TruncTy->isPointerTy() && Tmp->getType()->isIntegerTy()) {
  1387. uint64_t ResSize = CGM.getTargetData().getTypeSizeInBits(TruncTy);
  1388. Tmp = Builder.CreateTrunc(Tmp,
  1389. llvm::IntegerType::get(getLLVMContext(), (unsigned)ResSize));
  1390. Tmp = Builder.CreateIntToPtr(Tmp, TruncTy);
  1391. } else if (Tmp->getType()->isPointerTy() && TruncTy->isIntegerTy()) {
  1392. uint64_t TmpSize =CGM.getTargetData().getTypeSizeInBits(Tmp->getType());
  1393. Tmp = Builder.CreatePtrToInt(Tmp,
  1394. llvm::IntegerType::get(getLLVMContext(), (unsigned)TmpSize));
  1395. Tmp = Builder.CreateTrunc(Tmp, TruncTy);
  1396. } else if (TruncTy->isIntegerTy()) {
  1397. Tmp = Builder.CreateTrunc(Tmp, TruncTy);
  1398. } else if (TruncTy->isVectorTy()) {
  1399. Tmp = Builder.CreateBitCast(Tmp, TruncTy);
  1400. }
  1401. }
  1402. EmitStoreThroughLValue(RValue::get(Tmp), ResultRegDests[i]);
  1403. }
  1404. }