CGStmt.cpp 86 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342
  1. //===--- CGStmt.cpp - Emit LLVM Code from Statements ----------------------===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This contains code to emit Stmt nodes as LLVM code.
  11. //
  12. //===----------------------------------------------------------------------===//
  13. #include "CodeGenFunction.h"
  14. #include "CGDebugInfo.h"
  15. #include "CodeGenModule.h"
  16. #include "TargetInfo.h"
  17. #include "clang/AST/StmtVisitor.h"
  18. #include "clang/Basic/Builtins.h"
  19. #include "clang/Basic/PrettyStackTrace.h"
  20. #include "clang/Basic/TargetInfo.h"
  21. #include "clang/Sema/LoopHint.h"
  22. #include "clang/Sema/SemaDiagnostic.h"
  23. #include "llvm/ADT/StringExtras.h"
  24. #include "llvm/IR/CallSite.h"
  25. #include "llvm/IR/DataLayout.h"
  26. #include "llvm/IR/InlineAsm.h"
  27. #include "llvm/IR/Intrinsics.h"
  28. #include "llvm/IR/MDBuilder.h"
  29. using namespace clang;
  30. using namespace CodeGen;
  31. //===----------------------------------------------------------------------===//
  32. // Statement Emission
  33. //===----------------------------------------------------------------------===//
  34. void CodeGenFunction::EmitStopPoint(const Stmt *S) {
  35. if (CGDebugInfo *DI = getDebugInfo()) {
  36. SourceLocation Loc;
  37. Loc = S->getBeginLoc();
  38. DI->EmitLocation(Builder, Loc);
  39. LastStopPoint = Loc;
  40. }
  41. }
  42. void CodeGenFunction::EmitStmt(const Stmt *S, ArrayRef<const Attr *> Attrs) {
  43. assert(S && "Null statement?");
  44. PGO.setCurrentStmt(S);
  45. // These statements have their own debug info handling.
  46. if (EmitSimpleStmt(S))
  47. return;
  48. // Check if we are generating unreachable code.
  49. if (!HaveInsertPoint()) {
  50. // If so, and the statement doesn't contain a label, then we do not need to
  51. // generate actual code. This is safe because (1) the current point is
  52. // unreachable, so we don't need to execute the code, and (2) we've already
  53. // handled the statements which update internal data structures (like the
  54. // local variable map) which could be used by subsequent statements.
  55. if (!ContainsLabel(S)) {
  56. // Verify that any decl statements were handled as simple, they may be in
  57. // scope of subsequent reachable statements.
  58. assert(!isa<DeclStmt>(*S) && "Unexpected DeclStmt!");
  59. return;
  60. }
  61. // Otherwise, make a new block to hold the code.
  62. EnsureInsertPoint();
  63. }
  64. // Generate a stoppoint if we are emitting debug info.
  65. EmitStopPoint(S);
  66. // Ignore all OpenMP directives except for simd if OpenMP with Simd is
  67. // enabled.
  68. if (getLangOpts().OpenMP && getLangOpts().OpenMPSimd) {
  69. if (const auto *D = dyn_cast<OMPExecutableDirective>(S)) {
  70. EmitSimpleOMPExecutableDirective(*D);
  71. return;
  72. }
  73. }
  74. switch (S->getStmtClass()) {
  75. case Stmt::NoStmtClass:
  76. case Stmt::CXXCatchStmtClass:
  77. case Stmt::SEHExceptStmtClass:
  78. case Stmt::SEHFinallyStmtClass:
  79. case Stmt::MSDependentExistsStmtClass:
  80. llvm_unreachable("invalid statement class to emit generically");
  81. case Stmt::NullStmtClass:
  82. case Stmt::CompoundStmtClass:
  83. case Stmt::DeclStmtClass:
  84. case Stmt::LabelStmtClass:
  85. case Stmt::AttributedStmtClass:
  86. case Stmt::GotoStmtClass:
  87. case Stmt::BreakStmtClass:
  88. case Stmt::ContinueStmtClass:
  89. case Stmt::DefaultStmtClass:
  90. case Stmt::CaseStmtClass:
  91. case Stmt::SEHLeaveStmtClass:
  92. llvm_unreachable("should have emitted these statements as simple");
  93. #define STMT(Type, Base)
  94. #define ABSTRACT_STMT(Op)
  95. #define EXPR(Type, Base) \
  96. case Stmt::Type##Class:
  97. #include "clang/AST/StmtNodes.inc"
  98. {
  99. // Remember the block we came in on.
  100. llvm::BasicBlock *incoming = Builder.GetInsertBlock();
  101. assert(incoming && "expression emission must have an insertion point");
  102. EmitIgnoredExpr(cast<Expr>(S));
  103. llvm::BasicBlock *outgoing = Builder.GetInsertBlock();
  104. assert(outgoing && "expression emission cleared block!");
  105. // The expression emitters assume (reasonably!) that the insertion
  106. // point is always set. To maintain that, the call-emission code
  107. // for noreturn functions has to enter a new block with no
  108. // predecessors. We want to kill that block and mark the current
  109. // insertion point unreachable in the common case of a call like
  110. // "exit();". Since expression emission doesn't otherwise create
  111. // blocks with no predecessors, we can just test for that.
  112. // However, we must be careful not to do this to our incoming
  113. // block, because *statement* emission does sometimes create
  114. // reachable blocks which will have no predecessors until later in
  115. // the function. This occurs with, e.g., labels that are not
  116. // reachable by fallthrough.
  117. if (incoming != outgoing && outgoing->use_empty()) {
  118. outgoing->eraseFromParent();
  119. Builder.ClearInsertionPoint();
  120. }
  121. break;
  122. }
  123. case Stmt::IndirectGotoStmtClass:
  124. EmitIndirectGotoStmt(cast<IndirectGotoStmt>(*S)); break;
  125. case Stmt::IfStmtClass: EmitIfStmt(cast<IfStmt>(*S)); break;
  126. case Stmt::WhileStmtClass: EmitWhileStmt(cast<WhileStmt>(*S), Attrs); break;
  127. case Stmt::DoStmtClass: EmitDoStmt(cast<DoStmt>(*S), Attrs); break;
  128. case Stmt::ForStmtClass: EmitForStmt(cast<ForStmt>(*S), Attrs); break;
  129. case Stmt::ReturnStmtClass: EmitReturnStmt(cast<ReturnStmt>(*S)); break;
  130. case Stmt::SwitchStmtClass: EmitSwitchStmt(cast<SwitchStmt>(*S)); break;
  131. case Stmt::GCCAsmStmtClass: // Intentional fall-through.
  132. case Stmt::MSAsmStmtClass: EmitAsmStmt(cast<AsmStmt>(*S)); break;
  133. case Stmt::CoroutineBodyStmtClass:
  134. EmitCoroutineBody(cast<CoroutineBodyStmt>(*S));
  135. break;
  136. case Stmt::CoreturnStmtClass:
  137. EmitCoreturnStmt(cast<CoreturnStmt>(*S));
  138. break;
  139. case Stmt::CapturedStmtClass: {
  140. const CapturedStmt *CS = cast<CapturedStmt>(S);
  141. EmitCapturedStmt(*CS, CS->getCapturedRegionKind());
  142. }
  143. break;
  144. case Stmt::ObjCAtTryStmtClass:
  145. EmitObjCAtTryStmt(cast<ObjCAtTryStmt>(*S));
  146. break;
  147. case Stmt::ObjCAtCatchStmtClass:
  148. llvm_unreachable(
  149. "@catch statements should be handled by EmitObjCAtTryStmt");
  150. case Stmt::ObjCAtFinallyStmtClass:
  151. llvm_unreachable(
  152. "@finally statements should be handled by EmitObjCAtTryStmt");
  153. case Stmt::ObjCAtThrowStmtClass:
  154. EmitObjCAtThrowStmt(cast<ObjCAtThrowStmt>(*S));
  155. break;
  156. case Stmt::ObjCAtSynchronizedStmtClass:
  157. EmitObjCAtSynchronizedStmt(cast<ObjCAtSynchronizedStmt>(*S));
  158. break;
  159. case Stmt::ObjCForCollectionStmtClass:
  160. EmitObjCForCollectionStmt(cast<ObjCForCollectionStmt>(*S));
  161. break;
  162. case Stmt::ObjCAutoreleasePoolStmtClass:
  163. EmitObjCAutoreleasePoolStmt(cast<ObjCAutoreleasePoolStmt>(*S));
  164. break;
  165. case Stmt::CXXTryStmtClass:
  166. EmitCXXTryStmt(cast<CXXTryStmt>(*S));
  167. break;
  168. case Stmt::CXXForRangeStmtClass:
  169. EmitCXXForRangeStmt(cast<CXXForRangeStmt>(*S), Attrs);
  170. break;
  171. case Stmt::SEHTryStmtClass:
  172. EmitSEHTryStmt(cast<SEHTryStmt>(*S));
  173. break;
  174. case Stmt::OMPParallelDirectiveClass:
  175. EmitOMPParallelDirective(cast<OMPParallelDirective>(*S));
  176. break;
  177. case Stmt::OMPSimdDirectiveClass:
  178. EmitOMPSimdDirective(cast<OMPSimdDirective>(*S));
  179. break;
  180. case Stmt::OMPForDirectiveClass:
  181. EmitOMPForDirective(cast<OMPForDirective>(*S));
  182. break;
  183. case Stmt::OMPForSimdDirectiveClass:
  184. EmitOMPForSimdDirective(cast<OMPForSimdDirective>(*S));
  185. break;
  186. case Stmt::OMPSectionsDirectiveClass:
  187. EmitOMPSectionsDirective(cast<OMPSectionsDirective>(*S));
  188. break;
  189. case Stmt::OMPSectionDirectiveClass:
  190. EmitOMPSectionDirective(cast<OMPSectionDirective>(*S));
  191. break;
  192. case Stmt::OMPSingleDirectiveClass:
  193. EmitOMPSingleDirective(cast<OMPSingleDirective>(*S));
  194. break;
  195. case Stmt::OMPMasterDirectiveClass:
  196. EmitOMPMasterDirective(cast<OMPMasterDirective>(*S));
  197. break;
  198. case Stmt::OMPCriticalDirectiveClass:
  199. EmitOMPCriticalDirective(cast<OMPCriticalDirective>(*S));
  200. break;
  201. case Stmt::OMPParallelForDirectiveClass:
  202. EmitOMPParallelForDirective(cast<OMPParallelForDirective>(*S));
  203. break;
  204. case Stmt::OMPParallelForSimdDirectiveClass:
  205. EmitOMPParallelForSimdDirective(cast<OMPParallelForSimdDirective>(*S));
  206. break;
  207. case Stmt::OMPParallelSectionsDirectiveClass:
  208. EmitOMPParallelSectionsDirective(cast<OMPParallelSectionsDirective>(*S));
  209. break;
  210. case Stmt::OMPTaskDirectiveClass:
  211. EmitOMPTaskDirective(cast<OMPTaskDirective>(*S));
  212. break;
  213. case Stmt::OMPTaskyieldDirectiveClass:
  214. EmitOMPTaskyieldDirective(cast<OMPTaskyieldDirective>(*S));
  215. break;
  216. case Stmt::OMPBarrierDirectiveClass:
  217. EmitOMPBarrierDirective(cast<OMPBarrierDirective>(*S));
  218. break;
  219. case Stmt::OMPTaskwaitDirectiveClass:
  220. EmitOMPTaskwaitDirective(cast<OMPTaskwaitDirective>(*S));
  221. break;
  222. case Stmt::OMPTaskgroupDirectiveClass:
  223. EmitOMPTaskgroupDirective(cast<OMPTaskgroupDirective>(*S));
  224. break;
  225. case Stmt::OMPFlushDirectiveClass:
  226. EmitOMPFlushDirective(cast<OMPFlushDirective>(*S));
  227. break;
  228. case Stmt::OMPOrderedDirectiveClass:
  229. EmitOMPOrderedDirective(cast<OMPOrderedDirective>(*S));
  230. break;
  231. case Stmt::OMPAtomicDirectiveClass:
  232. EmitOMPAtomicDirective(cast<OMPAtomicDirective>(*S));
  233. break;
  234. case Stmt::OMPTargetDirectiveClass:
  235. EmitOMPTargetDirective(cast<OMPTargetDirective>(*S));
  236. break;
  237. case Stmt::OMPTeamsDirectiveClass:
  238. EmitOMPTeamsDirective(cast<OMPTeamsDirective>(*S));
  239. break;
  240. case Stmt::OMPCancellationPointDirectiveClass:
  241. EmitOMPCancellationPointDirective(cast<OMPCancellationPointDirective>(*S));
  242. break;
  243. case Stmt::OMPCancelDirectiveClass:
  244. EmitOMPCancelDirective(cast<OMPCancelDirective>(*S));
  245. break;
  246. case Stmt::OMPTargetDataDirectiveClass:
  247. EmitOMPTargetDataDirective(cast<OMPTargetDataDirective>(*S));
  248. break;
  249. case Stmt::OMPTargetEnterDataDirectiveClass:
  250. EmitOMPTargetEnterDataDirective(cast<OMPTargetEnterDataDirective>(*S));
  251. break;
  252. case Stmt::OMPTargetExitDataDirectiveClass:
  253. EmitOMPTargetExitDataDirective(cast<OMPTargetExitDataDirective>(*S));
  254. break;
  255. case Stmt::OMPTargetParallelDirectiveClass:
  256. EmitOMPTargetParallelDirective(cast<OMPTargetParallelDirective>(*S));
  257. break;
  258. case Stmt::OMPTargetParallelForDirectiveClass:
  259. EmitOMPTargetParallelForDirective(cast<OMPTargetParallelForDirective>(*S));
  260. break;
  261. case Stmt::OMPTaskLoopDirectiveClass:
  262. EmitOMPTaskLoopDirective(cast<OMPTaskLoopDirective>(*S));
  263. break;
  264. case Stmt::OMPTaskLoopSimdDirectiveClass:
  265. EmitOMPTaskLoopSimdDirective(cast<OMPTaskLoopSimdDirective>(*S));
  266. break;
  267. case Stmt::OMPDistributeDirectiveClass:
  268. EmitOMPDistributeDirective(cast<OMPDistributeDirective>(*S));
  269. break;
  270. case Stmt::OMPTargetUpdateDirectiveClass:
  271. EmitOMPTargetUpdateDirective(cast<OMPTargetUpdateDirective>(*S));
  272. break;
  273. case Stmt::OMPDistributeParallelForDirectiveClass:
  274. EmitOMPDistributeParallelForDirective(
  275. cast<OMPDistributeParallelForDirective>(*S));
  276. break;
  277. case Stmt::OMPDistributeParallelForSimdDirectiveClass:
  278. EmitOMPDistributeParallelForSimdDirective(
  279. cast<OMPDistributeParallelForSimdDirective>(*S));
  280. break;
  281. case Stmt::OMPDistributeSimdDirectiveClass:
  282. EmitOMPDistributeSimdDirective(cast<OMPDistributeSimdDirective>(*S));
  283. break;
  284. case Stmt::OMPTargetParallelForSimdDirectiveClass:
  285. EmitOMPTargetParallelForSimdDirective(
  286. cast<OMPTargetParallelForSimdDirective>(*S));
  287. break;
  288. case Stmt::OMPTargetSimdDirectiveClass:
  289. EmitOMPTargetSimdDirective(cast<OMPTargetSimdDirective>(*S));
  290. break;
  291. case Stmt::OMPTeamsDistributeDirectiveClass:
  292. EmitOMPTeamsDistributeDirective(cast<OMPTeamsDistributeDirective>(*S));
  293. break;
  294. case Stmt::OMPTeamsDistributeSimdDirectiveClass:
  295. EmitOMPTeamsDistributeSimdDirective(
  296. cast<OMPTeamsDistributeSimdDirective>(*S));
  297. break;
  298. case Stmt::OMPTeamsDistributeParallelForSimdDirectiveClass:
  299. EmitOMPTeamsDistributeParallelForSimdDirective(
  300. cast<OMPTeamsDistributeParallelForSimdDirective>(*S));
  301. break;
  302. case Stmt::OMPTeamsDistributeParallelForDirectiveClass:
  303. EmitOMPTeamsDistributeParallelForDirective(
  304. cast<OMPTeamsDistributeParallelForDirective>(*S));
  305. break;
  306. case Stmt::OMPTargetTeamsDirectiveClass:
  307. EmitOMPTargetTeamsDirective(cast<OMPTargetTeamsDirective>(*S));
  308. break;
  309. case Stmt::OMPTargetTeamsDistributeDirectiveClass:
  310. EmitOMPTargetTeamsDistributeDirective(
  311. cast<OMPTargetTeamsDistributeDirective>(*S));
  312. break;
  313. case Stmt::OMPTargetTeamsDistributeParallelForDirectiveClass:
  314. EmitOMPTargetTeamsDistributeParallelForDirective(
  315. cast<OMPTargetTeamsDistributeParallelForDirective>(*S));
  316. break;
  317. case Stmt::OMPTargetTeamsDistributeParallelForSimdDirectiveClass:
  318. EmitOMPTargetTeamsDistributeParallelForSimdDirective(
  319. cast<OMPTargetTeamsDistributeParallelForSimdDirective>(*S));
  320. break;
  321. case Stmt::OMPTargetTeamsDistributeSimdDirectiveClass:
  322. EmitOMPTargetTeamsDistributeSimdDirective(
  323. cast<OMPTargetTeamsDistributeSimdDirective>(*S));
  324. break;
  325. }
  326. }
  327. bool CodeGenFunction::EmitSimpleStmt(const Stmt *S) {
  328. switch (S->getStmtClass()) {
  329. default: return false;
  330. case Stmt::NullStmtClass: break;
  331. case Stmt::CompoundStmtClass: EmitCompoundStmt(cast<CompoundStmt>(*S)); break;
  332. case Stmt::DeclStmtClass: EmitDeclStmt(cast<DeclStmt>(*S)); break;
  333. case Stmt::LabelStmtClass: EmitLabelStmt(cast<LabelStmt>(*S)); break;
  334. case Stmt::AttributedStmtClass:
  335. EmitAttributedStmt(cast<AttributedStmt>(*S)); break;
  336. case Stmt::GotoStmtClass: EmitGotoStmt(cast<GotoStmt>(*S)); break;
  337. case Stmt::BreakStmtClass: EmitBreakStmt(cast<BreakStmt>(*S)); break;
  338. case Stmt::ContinueStmtClass: EmitContinueStmt(cast<ContinueStmt>(*S)); break;
  339. case Stmt::DefaultStmtClass: EmitDefaultStmt(cast<DefaultStmt>(*S)); break;
  340. case Stmt::CaseStmtClass: EmitCaseStmt(cast<CaseStmt>(*S)); break;
  341. case Stmt::SEHLeaveStmtClass: EmitSEHLeaveStmt(cast<SEHLeaveStmt>(*S)); break;
  342. }
  343. return true;
  344. }
  345. /// EmitCompoundStmt - Emit a compound statement {..} node. If GetLast is true,
  346. /// this captures the expression result of the last sub-statement and returns it
  347. /// (for use by the statement expression extension).
  348. Address CodeGenFunction::EmitCompoundStmt(const CompoundStmt &S, bool GetLast,
  349. AggValueSlot AggSlot) {
  350. PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),S.getLBracLoc(),
  351. "LLVM IR generation of compound statement ('{}')");
  352. // Keep track of the current cleanup stack depth, including debug scopes.
  353. LexicalScope Scope(*this, S.getSourceRange());
  354. return EmitCompoundStmtWithoutScope(S, GetLast, AggSlot);
  355. }
  356. Address
  357. CodeGenFunction::EmitCompoundStmtWithoutScope(const CompoundStmt &S,
  358. bool GetLast,
  359. AggValueSlot AggSlot) {
  360. for (CompoundStmt::const_body_iterator I = S.body_begin(),
  361. E = S.body_end()-GetLast; I != E; ++I)
  362. EmitStmt(*I);
  363. Address RetAlloca = Address::invalid();
  364. if (GetLast) {
  365. // We have to special case labels here. They are statements, but when put
  366. // at the end of a statement expression, they yield the value of their
  367. // subexpression. Handle this by walking through all labels we encounter,
  368. // emitting them before we evaluate the subexpr.
  369. const Stmt *LastStmt = S.body_back();
  370. while (const LabelStmt *LS = dyn_cast<LabelStmt>(LastStmt)) {
  371. EmitLabel(LS->getDecl());
  372. LastStmt = LS->getSubStmt();
  373. }
  374. EnsureInsertPoint();
  375. QualType ExprTy = cast<Expr>(LastStmt)->getType();
  376. if (hasAggregateEvaluationKind(ExprTy)) {
  377. EmitAggExpr(cast<Expr>(LastStmt), AggSlot);
  378. } else {
  379. // We can't return an RValue here because there might be cleanups at
  380. // the end of the StmtExpr. Because of that, we have to emit the result
  381. // here into a temporary alloca.
  382. RetAlloca = CreateMemTemp(ExprTy);
  383. EmitAnyExprToMem(cast<Expr>(LastStmt), RetAlloca, Qualifiers(),
  384. /*IsInit*/false);
  385. }
  386. }
  387. return RetAlloca;
  388. }
  389. void CodeGenFunction::SimplifyForwardingBlocks(llvm::BasicBlock *BB) {
  390. llvm::BranchInst *BI = dyn_cast<llvm::BranchInst>(BB->getTerminator());
  391. // If there is a cleanup stack, then we it isn't worth trying to
  392. // simplify this block (we would need to remove it from the scope map
  393. // and cleanup entry).
  394. if (!EHStack.empty())
  395. return;
  396. // Can only simplify direct branches.
  397. if (!BI || !BI->isUnconditional())
  398. return;
  399. // Can only simplify empty blocks.
  400. if (BI->getIterator() != BB->begin())
  401. return;
  402. BB->replaceAllUsesWith(BI->getSuccessor(0));
  403. BI->eraseFromParent();
  404. BB->eraseFromParent();
  405. }
  406. void CodeGenFunction::EmitBlock(llvm::BasicBlock *BB, bool IsFinished) {
  407. llvm::BasicBlock *CurBB = Builder.GetInsertBlock();
  408. // Fall out of the current block (if necessary).
  409. EmitBranch(BB);
  410. if (IsFinished && BB->use_empty()) {
  411. delete BB;
  412. return;
  413. }
  414. // Place the block after the current block, if possible, or else at
  415. // the end of the function.
  416. if (CurBB && CurBB->getParent())
  417. CurFn->getBasicBlockList().insertAfter(CurBB->getIterator(), BB);
  418. else
  419. CurFn->getBasicBlockList().push_back(BB);
  420. Builder.SetInsertPoint(BB);
  421. }
  422. void CodeGenFunction::EmitBranch(llvm::BasicBlock *Target) {
  423. // Emit a branch from the current block to the target one if this
  424. // was a real block. If this was just a fall-through block after a
  425. // terminator, don't emit it.
  426. llvm::BasicBlock *CurBB = Builder.GetInsertBlock();
  427. if (!CurBB || CurBB->getTerminator()) {
  428. // If there is no insert point or the previous block is already
  429. // terminated, don't touch it.
  430. } else {
  431. // Otherwise, create a fall-through branch.
  432. Builder.CreateBr(Target);
  433. }
  434. Builder.ClearInsertionPoint();
  435. }
  436. void CodeGenFunction::EmitBlockAfterUses(llvm::BasicBlock *block) {
  437. bool inserted = false;
  438. for (llvm::User *u : block->users()) {
  439. if (llvm::Instruction *insn = dyn_cast<llvm::Instruction>(u)) {
  440. CurFn->getBasicBlockList().insertAfter(insn->getParent()->getIterator(),
  441. block);
  442. inserted = true;
  443. break;
  444. }
  445. }
  446. if (!inserted)
  447. CurFn->getBasicBlockList().push_back(block);
  448. Builder.SetInsertPoint(block);
  449. }
  450. CodeGenFunction::JumpDest
  451. CodeGenFunction::getJumpDestForLabel(const LabelDecl *D) {
  452. JumpDest &Dest = LabelMap[D];
  453. if (Dest.isValid()) return Dest;
  454. // Create, but don't insert, the new block.
  455. Dest = JumpDest(createBasicBlock(D->getName()),
  456. EHScopeStack::stable_iterator::invalid(),
  457. NextCleanupDestIndex++);
  458. return Dest;
  459. }
  460. void CodeGenFunction::EmitLabel(const LabelDecl *D) {
  461. // Add this label to the current lexical scope if we're within any
  462. // normal cleanups. Jumps "in" to this label --- when permitted by
  463. // the language --- may need to be routed around such cleanups.
  464. if (EHStack.hasNormalCleanups() && CurLexicalScope)
  465. CurLexicalScope->addLabel(D);
  466. JumpDest &Dest = LabelMap[D];
  467. // If we didn't need a forward reference to this label, just go
  468. // ahead and create a destination at the current scope.
  469. if (!Dest.isValid()) {
  470. Dest = getJumpDestInCurrentScope(D->getName());
  471. // Otherwise, we need to give this label a target depth and remove
  472. // it from the branch-fixups list.
  473. } else {
  474. assert(!Dest.getScopeDepth().isValid() && "already emitted label!");
  475. Dest.setScopeDepth(EHStack.stable_begin());
  476. ResolveBranchFixups(Dest.getBlock());
  477. }
  478. EmitBlock(Dest.getBlock());
  479. incrementProfileCounter(D->getStmt());
  480. }
  481. /// Change the cleanup scope of the labels in this lexical scope to
  482. /// match the scope of the enclosing context.
  483. void CodeGenFunction::LexicalScope::rescopeLabels() {
  484. assert(!Labels.empty());
  485. EHScopeStack::stable_iterator innermostScope
  486. = CGF.EHStack.getInnermostNormalCleanup();
  487. // Change the scope depth of all the labels.
  488. for (SmallVectorImpl<const LabelDecl*>::const_iterator
  489. i = Labels.begin(), e = Labels.end(); i != e; ++i) {
  490. assert(CGF.LabelMap.count(*i));
  491. JumpDest &dest = CGF.LabelMap.find(*i)->second;
  492. assert(dest.getScopeDepth().isValid());
  493. assert(innermostScope.encloses(dest.getScopeDepth()));
  494. dest.setScopeDepth(innermostScope);
  495. }
  496. // Reparent the labels if the new scope also has cleanups.
  497. if (innermostScope != EHScopeStack::stable_end() && ParentScope) {
  498. ParentScope->Labels.append(Labels.begin(), Labels.end());
  499. }
  500. }
  501. void CodeGenFunction::EmitLabelStmt(const LabelStmt &S) {
  502. EmitLabel(S.getDecl());
  503. EmitStmt(S.getSubStmt());
  504. }
  505. void CodeGenFunction::EmitAttributedStmt(const AttributedStmt &S) {
  506. EmitStmt(S.getSubStmt(), S.getAttrs());
  507. }
  508. void CodeGenFunction::EmitGotoStmt(const GotoStmt &S) {
  509. // If this code is reachable then emit a stop point (if generating
  510. // debug info). We have to do this ourselves because we are on the
  511. // "simple" statement path.
  512. if (HaveInsertPoint())
  513. EmitStopPoint(&S);
  514. EmitBranchThroughCleanup(getJumpDestForLabel(S.getLabel()));
  515. }
  516. void CodeGenFunction::EmitIndirectGotoStmt(const IndirectGotoStmt &S) {
  517. if (const LabelDecl *Target = S.getConstantTarget()) {
  518. EmitBranchThroughCleanup(getJumpDestForLabel(Target));
  519. return;
  520. }
  521. // Ensure that we have an i8* for our PHI node.
  522. llvm::Value *V = Builder.CreateBitCast(EmitScalarExpr(S.getTarget()),
  523. Int8PtrTy, "addr");
  524. llvm::BasicBlock *CurBB = Builder.GetInsertBlock();
  525. // Get the basic block for the indirect goto.
  526. llvm::BasicBlock *IndGotoBB = GetIndirectGotoBlock();
  527. // The first instruction in the block has to be the PHI for the switch dest,
  528. // add an entry for this branch.
  529. cast<llvm::PHINode>(IndGotoBB->begin())->addIncoming(V, CurBB);
  530. EmitBranch(IndGotoBB);
  531. }
  532. void CodeGenFunction::EmitIfStmt(const IfStmt &S) {
  533. // C99 6.8.4.1: The first substatement is executed if the expression compares
  534. // unequal to 0. The condition must be a scalar type.
  535. LexicalScope ConditionScope(*this, S.getCond()->getSourceRange());
  536. if (S.getInit())
  537. EmitStmt(S.getInit());
  538. if (S.getConditionVariable())
  539. EmitDecl(*S.getConditionVariable());
  540. // If the condition constant folds and can be elided, try to avoid emitting
  541. // the condition and the dead arm of the if/else.
  542. bool CondConstant;
  543. if (ConstantFoldsToSimpleInteger(S.getCond(), CondConstant,
  544. S.isConstexpr())) {
  545. // Figure out which block (then or else) is executed.
  546. const Stmt *Executed = S.getThen();
  547. const Stmt *Skipped = S.getElse();
  548. if (!CondConstant) // Condition false?
  549. std::swap(Executed, Skipped);
  550. // If the skipped block has no labels in it, just emit the executed block.
  551. // This avoids emitting dead code and simplifies the CFG substantially.
  552. if (S.isConstexpr() || !ContainsLabel(Skipped)) {
  553. if (CondConstant)
  554. incrementProfileCounter(&S);
  555. if (Executed) {
  556. RunCleanupsScope ExecutedScope(*this);
  557. EmitStmt(Executed);
  558. }
  559. return;
  560. }
  561. }
  562. // Otherwise, the condition did not fold, or we couldn't elide it. Just emit
  563. // the conditional branch.
  564. llvm::BasicBlock *ThenBlock = createBasicBlock("if.then");
  565. llvm::BasicBlock *ContBlock = createBasicBlock("if.end");
  566. llvm::BasicBlock *ElseBlock = ContBlock;
  567. if (S.getElse())
  568. ElseBlock = createBasicBlock("if.else");
  569. EmitBranchOnBoolExpr(S.getCond(), ThenBlock, ElseBlock,
  570. getProfileCount(S.getThen()));
  571. // Emit the 'then' code.
  572. EmitBlock(ThenBlock);
  573. incrementProfileCounter(&S);
  574. {
  575. RunCleanupsScope ThenScope(*this);
  576. EmitStmt(S.getThen());
  577. }
  578. EmitBranch(ContBlock);
  579. // Emit the 'else' code if present.
  580. if (const Stmt *Else = S.getElse()) {
  581. {
  582. // There is no need to emit line number for an unconditional branch.
  583. auto NL = ApplyDebugLocation::CreateEmpty(*this);
  584. EmitBlock(ElseBlock);
  585. }
  586. {
  587. RunCleanupsScope ElseScope(*this);
  588. EmitStmt(Else);
  589. }
  590. {
  591. // There is no need to emit line number for an unconditional branch.
  592. auto NL = ApplyDebugLocation::CreateEmpty(*this);
  593. EmitBranch(ContBlock);
  594. }
  595. }
  596. // Emit the continuation block for code after the if.
  597. EmitBlock(ContBlock, true);
  598. }
  599. void CodeGenFunction::EmitWhileStmt(const WhileStmt &S,
  600. ArrayRef<const Attr *> WhileAttrs) {
  601. // Emit the header for the loop, which will also become
  602. // the continue target.
  603. JumpDest LoopHeader = getJumpDestInCurrentScope("while.cond");
  604. EmitBlock(LoopHeader.getBlock());
  605. const SourceRange &R = S.getSourceRange();
  606. LoopStack.push(LoopHeader.getBlock(), CGM.getContext(), WhileAttrs,
  607. SourceLocToDebugLoc(R.getBegin()),
  608. SourceLocToDebugLoc(R.getEnd()));
  609. // Create an exit block for when the condition fails, which will
  610. // also become the break target.
  611. JumpDest LoopExit = getJumpDestInCurrentScope("while.end");
  612. // Store the blocks to use for break and continue.
  613. BreakContinueStack.push_back(BreakContinue(LoopExit, LoopHeader));
  614. // C++ [stmt.while]p2:
  615. // When the condition of a while statement is a declaration, the
  616. // scope of the variable that is declared extends from its point
  617. // of declaration (3.3.2) to the end of the while statement.
  618. // [...]
  619. // The object created in a condition is destroyed and created
  620. // with each iteration of the loop.
  621. RunCleanupsScope ConditionScope(*this);
  622. if (S.getConditionVariable())
  623. EmitDecl(*S.getConditionVariable());
  624. // Evaluate the conditional in the while header. C99 6.8.5.1: The
  625. // evaluation of the controlling expression takes place before each
  626. // execution of the loop body.
  627. llvm::Value *BoolCondVal = EvaluateExprAsBool(S.getCond());
  628. // while(1) is common, avoid extra exit blocks. Be sure
  629. // to correctly handle break/continue though.
  630. bool EmitBoolCondBranch = true;
  631. if (llvm::ConstantInt *C = dyn_cast<llvm::ConstantInt>(BoolCondVal))
  632. if (C->isOne())
  633. EmitBoolCondBranch = false;
  634. // As long as the condition is true, go to the loop body.
  635. llvm::BasicBlock *LoopBody = createBasicBlock("while.body");
  636. if (EmitBoolCondBranch) {
  637. llvm::BasicBlock *ExitBlock = LoopExit.getBlock();
  638. if (ConditionScope.requiresCleanups())
  639. ExitBlock = createBasicBlock("while.exit");
  640. Builder.CreateCondBr(
  641. BoolCondVal, LoopBody, ExitBlock,
  642. createProfileWeightsForLoop(S.getCond(), getProfileCount(S.getBody())));
  643. if (ExitBlock != LoopExit.getBlock()) {
  644. EmitBlock(ExitBlock);
  645. EmitBranchThroughCleanup(LoopExit);
  646. }
  647. }
  648. // Emit the loop body. We have to emit this in a cleanup scope
  649. // because it might be a singleton DeclStmt.
  650. {
  651. RunCleanupsScope BodyScope(*this);
  652. EmitBlock(LoopBody);
  653. incrementProfileCounter(&S);
  654. EmitStmt(S.getBody());
  655. }
  656. BreakContinueStack.pop_back();
  657. // Immediately force cleanup.
  658. ConditionScope.ForceCleanup();
  659. EmitStopPoint(&S);
  660. // Branch to the loop header again.
  661. EmitBranch(LoopHeader.getBlock());
  662. LoopStack.pop();
  663. // Emit the exit block.
  664. EmitBlock(LoopExit.getBlock(), true);
  665. // The LoopHeader typically is just a branch if we skipped emitting
  666. // a branch, try to erase it.
  667. if (!EmitBoolCondBranch)
  668. SimplifyForwardingBlocks(LoopHeader.getBlock());
  669. }
  670. void CodeGenFunction::EmitDoStmt(const DoStmt &S,
  671. ArrayRef<const Attr *> DoAttrs) {
  672. JumpDest LoopExit = getJumpDestInCurrentScope("do.end");
  673. JumpDest LoopCond = getJumpDestInCurrentScope("do.cond");
  674. uint64_t ParentCount = getCurrentProfileCount();
  675. // Store the blocks to use for break and continue.
  676. BreakContinueStack.push_back(BreakContinue(LoopExit, LoopCond));
  677. // Emit the body of the loop.
  678. llvm::BasicBlock *LoopBody = createBasicBlock("do.body");
  679. EmitBlockWithFallThrough(LoopBody, &S);
  680. {
  681. RunCleanupsScope BodyScope(*this);
  682. EmitStmt(S.getBody());
  683. }
  684. EmitBlock(LoopCond.getBlock());
  685. const SourceRange &R = S.getSourceRange();
  686. LoopStack.push(LoopBody, CGM.getContext(), DoAttrs,
  687. SourceLocToDebugLoc(R.getBegin()),
  688. SourceLocToDebugLoc(R.getEnd()));
  689. // C99 6.8.5.2: "The evaluation of the controlling expression takes place
  690. // after each execution of the loop body."
  691. // Evaluate the conditional in the while header.
  692. // C99 6.8.5p2/p4: The first substatement is executed if the expression
  693. // compares unequal to 0. The condition must be a scalar type.
  694. llvm::Value *BoolCondVal = EvaluateExprAsBool(S.getCond());
  695. BreakContinueStack.pop_back();
  696. // "do {} while (0)" is common in macros, avoid extra blocks. Be sure
  697. // to correctly handle break/continue though.
  698. bool EmitBoolCondBranch = true;
  699. if (llvm::ConstantInt *C = dyn_cast<llvm::ConstantInt>(BoolCondVal))
  700. if (C->isZero())
  701. EmitBoolCondBranch = false;
  702. // As long as the condition is true, iterate the loop.
  703. if (EmitBoolCondBranch) {
  704. uint64_t BackedgeCount = getProfileCount(S.getBody()) - ParentCount;
  705. Builder.CreateCondBr(
  706. BoolCondVal, LoopBody, LoopExit.getBlock(),
  707. createProfileWeightsForLoop(S.getCond(), BackedgeCount));
  708. }
  709. LoopStack.pop();
  710. // Emit the exit block.
  711. EmitBlock(LoopExit.getBlock());
  712. // The DoCond block typically is just a branch if we skipped
  713. // emitting a branch, try to erase it.
  714. if (!EmitBoolCondBranch)
  715. SimplifyForwardingBlocks(LoopCond.getBlock());
  716. }
  717. void CodeGenFunction::EmitForStmt(const ForStmt &S,
  718. ArrayRef<const Attr *> ForAttrs) {
  719. JumpDest LoopExit = getJumpDestInCurrentScope("for.end");
  720. LexicalScope ForScope(*this, S.getSourceRange());
  721. // Evaluate the first part before the loop.
  722. if (S.getInit())
  723. EmitStmt(S.getInit());
  724. // Start the loop with a block that tests the condition.
  725. // If there's an increment, the continue scope will be overwritten
  726. // later.
  727. JumpDest Continue = getJumpDestInCurrentScope("for.cond");
  728. llvm::BasicBlock *CondBlock = Continue.getBlock();
  729. EmitBlock(CondBlock);
  730. const SourceRange &R = S.getSourceRange();
  731. LoopStack.push(CondBlock, CGM.getContext(), ForAttrs,
  732. SourceLocToDebugLoc(R.getBegin()),
  733. SourceLocToDebugLoc(R.getEnd()));
  734. // If the for loop doesn't have an increment we can just use the
  735. // condition as the continue block. Otherwise we'll need to create
  736. // a block for it (in the current scope, i.e. in the scope of the
  737. // condition), and that we will become our continue block.
  738. if (S.getInc())
  739. Continue = getJumpDestInCurrentScope("for.inc");
  740. // Store the blocks to use for break and continue.
  741. BreakContinueStack.push_back(BreakContinue(LoopExit, Continue));
  742. // Create a cleanup scope for the condition variable cleanups.
  743. LexicalScope ConditionScope(*this, S.getSourceRange());
  744. if (S.getCond()) {
  745. // If the for statement has a condition scope, emit the local variable
  746. // declaration.
  747. if (S.getConditionVariable()) {
  748. EmitDecl(*S.getConditionVariable());
  749. }
  750. llvm::BasicBlock *ExitBlock = LoopExit.getBlock();
  751. // If there are any cleanups between here and the loop-exit scope,
  752. // create a block to stage a loop exit along.
  753. if (ForScope.requiresCleanups())
  754. ExitBlock = createBasicBlock("for.cond.cleanup");
  755. // As long as the condition is true, iterate the loop.
  756. llvm::BasicBlock *ForBody = createBasicBlock("for.body");
  757. // C99 6.8.5p2/p4: The first substatement is executed if the expression
  758. // compares unequal to 0. The condition must be a scalar type.
  759. llvm::Value *BoolCondVal = EvaluateExprAsBool(S.getCond());
  760. Builder.CreateCondBr(
  761. BoolCondVal, ForBody, ExitBlock,
  762. createProfileWeightsForLoop(S.getCond(), getProfileCount(S.getBody())));
  763. if (ExitBlock != LoopExit.getBlock()) {
  764. EmitBlock(ExitBlock);
  765. EmitBranchThroughCleanup(LoopExit);
  766. }
  767. EmitBlock(ForBody);
  768. } else {
  769. // Treat it as a non-zero constant. Don't even create a new block for the
  770. // body, just fall into it.
  771. }
  772. incrementProfileCounter(&S);
  773. {
  774. // Create a separate cleanup scope for the body, in case it is not
  775. // a compound statement.
  776. RunCleanupsScope BodyScope(*this);
  777. EmitStmt(S.getBody());
  778. }
  779. // If there is an increment, emit it next.
  780. if (S.getInc()) {
  781. EmitBlock(Continue.getBlock());
  782. EmitStmt(S.getInc());
  783. }
  784. BreakContinueStack.pop_back();
  785. ConditionScope.ForceCleanup();
  786. EmitStopPoint(&S);
  787. EmitBranch(CondBlock);
  788. ForScope.ForceCleanup();
  789. LoopStack.pop();
  790. // Emit the fall-through block.
  791. EmitBlock(LoopExit.getBlock(), true);
  792. }
  793. void
  794. CodeGenFunction::EmitCXXForRangeStmt(const CXXForRangeStmt &S,
  795. ArrayRef<const Attr *> ForAttrs) {
  796. JumpDest LoopExit = getJumpDestInCurrentScope("for.end");
  797. LexicalScope ForScope(*this, S.getSourceRange());
  798. // Evaluate the first pieces before the loop.
  799. if (S.getInit())
  800. EmitStmt(S.getInit());
  801. EmitStmt(S.getRangeStmt());
  802. EmitStmt(S.getBeginStmt());
  803. EmitStmt(S.getEndStmt());
  804. // Start the loop with a block that tests the condition.
  805. // If there's an increment, the continue scope will be overwritten
  806. // later.
  807. llvm::BasicBlock *CondBlock = createBasicBlock("for.cond");
  808. EmitBlock(CondBlock);
  809. const SourceRange &R = S.getSourceRange();
  810. LoopStack.push(CondBlock, CGM.getContext(), ForAttrs,
  811. SourceLocToDebugLoc(R.getBegin()),
  812. SourceLocToDebugLoc(R.getEnd()));
  813. // If there are any cleanups between here and the loop-exit scope,
  814. // create a block to stage a loop exit along.
  815. llvm::BasicBlock *ExitBlock = LoopExit.getBlock();
  816. if (ForScope.requiresCleanups())
  817. ExitBlock = createBasicBlock("for.cond.cleanup");
  818. // The loop body, consisting of the specified body and the loop variable.
  819. llvm::BasicBlock *ForBody = createBasicBlock("for.body");
  820. // The body is executed if the expression, contextually converted
  821. // to bool, is true.
  822. llvm::Value *BoolCondVal = EvaluateExprAsBool(S.getCond());
  823. Builder.CreateCondBr(
  824. BoolCondVal, ForBody, ExitBlock,
  825. createProfileWeightsForLoop(S.getCond(), getProfileCount(S.getBody())));
  826. if (ExitBlock != LoopExit.getBlock()) {
  827. EmitBlock(ExitBlock);
  828. EmitBranchThroughCleanup(LoopExit);
  829. }
  830. EmitBlock(ForBody);
  831. incrementProfileCounter(&S);
  832. // Create a block for the increment. In case of a 'continue', we jump there.
  833. JumpDest Continue = getJumpDestInCurrentScope("for.inc");
  834. // Store the blocks to use for break and continue.
  835. BreakContinueStack.push_back(BreakContinue(LoopExit, Continue));
  836. {
  837. // Create a separate cleanup scope for the loop variable and body.
  838. LexicalScope BodyScope(*this, S.getSourceRange());
  839. EmitStmt(S.getLoopVarStmt());
  840. EmitStmt(S.getBody());
  841. }
  842. EmitStopPoint(&S);
  843. // If there is an increment, emit it next.
  844. EmitBlock(Continue.getBlock());
  845. EmitStmt(S.getInc());
  846. BreakContinueStack.pop_back();
  847. EmitBranch(CondBlock);
  848. ForScope.ForceCleanup();
  849. LoopStack.pop();
  850. // Emit the fall-through block.
  851. EmitBlock(LoopExit.getBlock(), true);
  852. }
  853. void CodeGenFunction::EmitReturnOfRValue(RValue RV, QualType Ty) {
  854. if (RV.isScalar()) {
  855. Builder.CreateStore(RV.getScalarVal(), ReturnValue);
  856. } else if (RV.isAggregate()) {
  857. LValue Dest = MakeAddrLValue(ReturnValue, Ty);
  858. LValue Src = MakeAddrLValue(RV.getAggregateAddress(), Ty);
  859. EmitAggregateCopy(Dest, Src, Ty, overlapForReturnValue());
  860. } else {
  861. EmitStoreOfComplex(RV.getComplexVal(), MakeAddrLValue(ReturnValue, Ty),
  862. /*init*/ true);
  863. }
  864. EmitBranchThroughCleanup(ReturnBlock);
  865. }
  866. /// EmitReturnStmt - Note that due to GCC extensions, this can have an operand
  867. /// if the function returns void, or may be missing one if the function returns
  868. /// non-void. Fun stuff :).
  869. void CodeGenFunction::EmitReturnStmt(const ReturnStmt &S) {
  870. if (requiresReturnValueCheck()) {
  871. llvm::Constant *SLoc = EmitCheckSourceLocation(S.getBeginLoc());
  872. auto *SLocPtr =
  873. new llvm::GlobalVariable(CGM.getModule(), SLoc->getType(), false,
  874. llvm::GlobalVariable::PrivateLinkage, SLoc);
  875. SLocPtr->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global);
  876. CGM.getSanitizerMetadata()->disableSanitizerForGlobal(SLocPtr);
  877. assert(ReturnLocation.isValid() && "No valid return location");
  878. Builder.CreateStore(Builder.CreateBitCast(SLocPtr, Int8PtrTy),
  879. ReturnLocation);
  880. }
  881. // Returning from an outlined SEH helper is UB, and we already warn on it.
  882. if (IsOutlinedSEHHelper) {
  883. Builder.CreateUnreachable();
  884. Builder.ClearInsertionPoint();
  885. }
  886. // Emit the result value, even if unused, to evaluate the side effects.
  887. const Expr *RV = S.getRetValue();
  888. // Treat block literals in a return expression as if they appeared
  889. // in their own scope. This permits a small, easily-implemented
  890. // exception to our over-conservative rules about not jumping to
  891. // statements following block literals with non-trivial cleanups.
  892. RunCleanupsScope cleanupScope(*this);
  893. if (const FullExpr *fe = dyn_cast_or_null<FullExpr>(RV)) {
  894. enterFullExpression(fe);
  895. RV = fe->getSubExpr();
  896. }
  897. // FIXME: Clean this up by using an LValue for ReturnTemp,
  898. // EmitStoreThroughLValue, and EmitAnyExpr.
  899. if (getLangOpts().ElideConstructors &&
  900. S.getNRVOCandidate() && S.getNRVOCandidate()->isNRVOVariable()) {
  901. // Apply the named return value optimization for this return statement,
  902. // which means doing nothing: the appropriate result has already been
  903. // constructed into the NRVO variable.
  904. // If there is an NRVO flag for this variable, set it to 1 into indicate
  905. // that the cleanup code should not destroy the variable.
  906. if (llvm::Value *NRVOFlag = NRVOFlags[S.getNRVOCandidate()])
  907. Builder.CreateFlagStore(Builder.getTrue(), NRVOFlag);
  908. } else if (!ReturnValue.isValid() || (RV && RV->getType()->isVoidType())) {
  909. // Make sure not to return anything, but evaluate the expression
  910. // for side effects.
  911. if (RV)
  912. EmitAnyExpr(RV);
  913. } else if (!RV) {
  914. // Do nothing (return value is left uninitialized)
  915. } else if (FnRetTy->isReferenceType()) {
  916. // If this function returns a reference, take the address of the expression
  917. // rather than the value.
  918. RValue Result = EmitReferenceBindingToExpr(RV);
  919. Builder.CreateStore(Result.getScalarVal(), ReturnValue);
  920. } else {
  921. switch (getEvaluationKind(RV->getType())) {
  922. case TEK_Scalar:
  923. Builder.CreateStore(EmitScalarExpr(RV), ReturnValue);
  924. break;
  925. case TEK_Complex:
  926. EmitComplexExprIntoLValue(RV, MakeAddrLValue(ReturnValue, RV->getType()),
  927. /*isInit*/ true);
  928. break;
  929. case TEK_Aggregate:
  930. EmitAggExpr(RV, AggValueSlot::forAddr(
  931. ReturnValue, Qualifiers(),
  932. AggValueSlot::IsDestructed,
  933. AggValueSlot::DoesNotNeedGCBarriers,
  934. AggValueSlot::IsNotAliased,
  935. overlapForReturnValue()));
  936. break;
  937. }
  938. }
  939. ++NumReturnExprs;
  940. if (!RV || RV->isEvaluatable(getContext()))
  941. ++NumSimpleReturnExprs;
  942. cleanupScope.ForceCleanup();
  943. EmitBranchThroughCleanup(ReturnBlock);
  944. }
  945. void CodeGenFunction::EmitDeclStmt(const DeclStmt &S) {
  946. // As long as debug info is modeled with instructions, we have to ensure we
  947. // have a place to insert here and write the stop point here.
  948. if (HaveInsertPoint())
  949. EmitStopPoint(&S);
  950. for (const auto *I : S.decls())
  951. EmitDecl(*I);
  952. }
  953. void CodeGenFunction::EmitBreakStmt(const BreakStmt &S) {
  954. assert(!BreakContinueStack.empty() && "break stmt not in a loop or switch!");
  955. // If this code is reachable then emit a stop point (if generating
  956. // debug info). We have to do this ourselves because we are on the
  957. // "simple" statement path.
  958. if (HaveInsertPoint())
  959. EmitStopPoint(&S);
  960. EmitBranchThroughCleanup(BreakContinueStack.back().BreakBlock);
  961. }
  962. void CodeGenFunction::EmitContinueStmt(const ContinueStmt &S) {
  963. assert(!BreakContinueStack.empty() && "continue stmt not in a loop!");
  964. // If this code is reachable then emit a stop point (if generating
  965. // debug info). We have to do this ourselves because we are on the
  966. // "simple" statement path.
  967. if (HaveInsertPoint())
  968. EmitStopPoint(&S);
  969. EmitBranchThroughCleanup(BreakContinueStack.back().ContinueBlock);
  970. }
  971. /// EmitCaseStmtRange - If case statement range is not too big then
  972. /// add multiple cases to switch instruction, one for each value within
  973. /// the range. If range is too big then emit "if" condition check.
  974. void CodeGenFunction::EmitCaseStmtRange(const CaseStmt &S) {
  975. assert(S.getRHS() && "Expected RHS value in CaseStmt");
  976. llvm::APSInt LHS = S.getLHS()->EvaluateKnownConstInt(getContext());
  977. llvm::APSInt RHS = S.getRHS()->EvaluateKnownConstInt(getContext());
  978. // Emit the code for this case. We do this first to make sure it is
  979. // properly chained from our predecessor before generating the
  980. // switch machinery to enter this block.
  981. llvm::BasicBlock *CaseDest = createBasicBlock("sw.bb");
  982. EmitBlockWithFallThrough(CaseDest, &S);
  983. EmitStmt(S.getSubStmt());
  984. // If range is empty, do nothing.
  985. if (LHS.isSigned() ? RHS.slt(LHS) : RHS.ult(LHS))
  986. return;
  987. llvm::APInt Range = RHS - LHS;
  988. // FIXME: parameters such as this should not be hardcoded.
  989. if (Range.ult(llvm::APInt(Range.getBitWidth(), 64))) {
  990. // Range is small enough to add multiple switch instruction cases.
  991. uint64_t Total = getProfileCount(&S);
  992. unsigned NCases = Range.getZExtValue() + 1;
  993. // We only have one region counter for the entire set of cases here, so we
  994. // need to divide the weights evenly between the generated cases, ensuring
  995. // that the total weight is preserved. E.g., a weight of 5 over three cases
  996. // will be distributed as weights of 2, 2, and 1.
  997. uint64_t Weight = Total / NCases, Rem = Total % NCases;
  998. for (unsigned I = 0; I != NCases; ++I) {
  999. if (SwitchWeights)
  1000. SwitchWeights->push_back(Weight + (Rem ? 1 : 0));
  1001. if (Rem)
  1002. Rem--;
  1003. SwitchInsn->addCase(Builder.getInt(LHS), CaseDest);
  1004. ++LHS;
  1005. }
  1006. return;
  1007. }
  1008. // The range is too big. Emit "if" condition into a new block,
  1009. // making sure to save and restore the current insertion point.
  1010. llvm::BasicBlock *RestoreBB = Builder.GetInsertBlock();
  1011. // Push this test onto the chain of range checks (which terminates
  1012. // in the default basic block). The switch's default will be changed
  1013. // to the top of this chain after switch emission is complete.
  1014. llvm::BasicBlock *FalseDest = CaseRangeBlock;
  1015. CaseRangeBlock = createBasicBlock("sw.caserange");
  1016. CurFn->getBasicBlockList().push_back(CaseRangeBlock);
  1017. Builder.SetInsertPoint(CaseRangeBlock);
  1018. // Emit range check.
  1019. llvm::Value *Diff =
  1020. Builder.CreateSub(SwitchInsn->getCondition(), Builder.getInt(LHS));
  1021. llvm::Value *Cond =
  1022. Builder.CreateICmpULE(Diff, Builder.getInt(Range), "inbounds");
  1023. llvm::MDNode *Weights = nullptr;
  1024. if (SwitchWeights) {
  1025. uint64_t ThisCount = getProfileCount(&S);
  1026. uint64_t DefaultCount = (*SwitchWeights)[0];
  1027. Weights = createProfileWeights(ThisCount, DefaultCount);
  1028. // Since we're chaining the switch default through each large case range, we
  1029. // need to update the weight for the default, ie, the first case, to include
  1030. // this case.
  1031. (*SwitchWeights)[0] += ThisCount;
  1032. }
  1033. Builder.CreateCondBr(Cond, CaseDest, FalseDest, Weights);
  1034. // Restore the appropriate insertion point.
  1035. if (RestoreBB)
  1036. Builder.SetInsertPoint(RestoreBB);
  1037. else
  1038. Builder.ClearInsertionPoint();
  1039. }
  1040. void CodeGenFunction::EmitCaseStmt(const CaseStmt &S) {
  1041. // If there is no enclosing switch instance that we're aware of, then this
  1042. // case statement and its block can be elided. This situation only happens
  1043. // when we've constant-folded the switch, are emitting the constant case,
  1044. // and part of the constant case includes another case statement. For
  1045. // instance: switch (4) { case 4: do { case 5: } while (1); }
  1046. if (!SwitchInsn) {
  1047. EmitStmt(S.getSubStmt());
  1048. return;
  1049. }
  1050. // Handle case ranges.
  1051. if (S.getRHS()) {
  1052. EmitCaseStmtRange(S);
  1053. return;
  1054. }
  1055. llvm::ConstantInt *CaseVal =
  1056. Builder.getInt(S.getLHS()->EvaluateKnownConstInt(getContext()));
  1057. // If the body of the case is just a 'break', try to not emit an empty block.
  1058. // If we're profiling or we're not optimizing, leave the block in for better
  1059. // debug and coverage analysis.
  1060. if (!CGM.getCodeGenOpts().hasProfileClangInstr() &&
  1061. CGM.getCodeGenOpts().OptimizationLevel > 0 &&
  1062. isa<BreakStmt>(S.getSubStmt())) {
  1063. JumpDest Block = BreakContinueStack.back().BreakBlock;
  1064. // Only do this optimization if there are no cleanups that need emitting.
  1065. if (isObviouslyBranchWithoutCleanups(Block)) {
  1066. if (SwitchWeights)
  1067. SwitchWeights->push_back(getProfileCount(&S));
  1068. SwitchInsn->addCase(CaseVal, Block.getBlock());
  1069. // If there was a fallthrough into this case, make sure to redirect it to
  1070. // the end of the switch as well.
  1071. if (Builder.GetInsertBlock()) {
  1072. Builder.CreateBr(Block.getBlock());
  1073. Builder.ClearInsertionPoint();
  1074. }
  1075. return;
  1076. }
  1077. }
  1078. llvm::BasicBlock *CaseDest = createBasicBlock("sw.bb");
  1079. EmitBlockWithFallThrough(CaseDest, &S);
  1080. if (SwitchWeights)
  1081. SwitchWeights->push_back(getProfileCount(&S));
  1082. SwitchInsn->addCase(CaseVal, CaseDest);
  1083. // Recursively emitting the statement is acceptable, but is not wonderful for
  1084. // code where we have many case statements nested together, i.e.:
  1085. // case 1:
  1086. // case 2:
  1087. // case 3: etc.
  1088. // Handling this recursively will create a new block for each case statement
  1089. // that falls through to the next case which is IR intensive. It also causes
  1090. // deep recursion which can run into stack depth limitations. Handle
  1091. // sequential non-range case statements specially.
  1092. const CaseStmt *CurCase = &S;
  1093. const CaseStmt *NextCase = dyn_cast<CaseStmt>(S.getSubStmt());
  1094. // Otherwise, iteratively add consecutive cases to this switch stmt.
  1095. while (NextCase && NextCase->getRHS() == nullptr) {
  1096. CurCase = NextCase;
  1097. llvm::ConstantInt *CaseVal =
  1098. Builder.getInt(CurCase->getLHS()->EvaluateKnownConstInt(getContext()));
  1099. if (SwitchWeights)
  1100. SwitchWeights->push_back(getProfileCount(NextCase));
  1101. if (CGM.getCodeGenOpts().hasProfileClangInstr()) {
  1102. CaseDest = createBasicBlock("sw.bb");
  1103. EmitBlockWithFallThrough(CaseDest, &S);
  1104. }
  1105. SwitchInsn->addCase(CaseVal, CaseDest);
  1106. NextCase = dyn_cast<CaseStmt>(CurCase->getSubStmt());
  1107. }
  1108. // Normal default recursion for non-cases.
  1109. EmitStmt(CurCase->getSubStmt());
  1110. }
  1111. void CodeGenFunction::EmitDefaultStmt(const DefaultStmt &S) {
  1112. // If there is no enclosing switch instance that we're aware of, then this
  1113. // default statement can be elided. This situation only happens when we've
  1114. // constant-folded the switch.
  1115. if (!SwitchInsn) {
  1116. EmitStmt(S.getSubStmt());
  1117. return;
  1118. }
  1119. llvm::BasicBlock *DefaultBlock = SwitchInsn->getDefaultDest();
  1120. assert(DefaultBlock->empty() &&
  1121. "EmitDefaultStmt: Default block already defined?");
  1122. EmitBlockWithFallThrough(DefaultBlock, &S);
  1123. EmitStmt(S.getSubStmt());
  1124. }
  1125. /// CollectStatementsForCase - Given the body of a 'switch' statement and a
  1126. /// constant value that is being switched on, see if we can dead code eliminate
  1127. /// the body of the switch to a simple series of statements to emit. Basically,
  1128. /// on a switch (5) we want to find these statements:
  1129. /// case 5:
  1130. /// printf(...); <--
  1131. /// ++i; <--
  1132. /// break;
  1133. ///
  1134. /// and add them to the ResultStmts vector. If it is unsafe to do this
  1135. /// transformation (for example, one of the elided statements contains a label
  1136. /// that might be jumped to), return CSFC_Failure. If we handled it and 'S'
  1137. /// should include statements after it (e.g. the printf() line is a substmt of
  1138. /// the case) then return CSFC_FallThrough. If we handled it and found a break
  1139. /// statement, then return CSFC_Success.
  1140. ///
  1141. /// If Case is non-null, then we are looking for the specified case, checking
  1142. /// that nothing we jump over contains labels. If Case is null, then we found
  1143. /// the case and are looking for the break.
  1144. ///
  1145. /// If the recursive walk actually finds our Case, then we set FoundCase to
  1146. /// true.
  1147. ///
  1148. enum CSFC_Result { CSFC_Failure, CSFC_FallThrough, CSFC_Success };
  1149. static CSFC_Result CollectStatementsForCase(const Stmt *S,
  1150. const SwitchCase *Case,
  1151. bool &FoundCase,
  1152. SmallVectorImpl<const Stmt*> &ResultStmts) {
  1153. // If this is a null statement, just succeed.
  1154. if (!S)
  1155. return Case ? CSFC_Success : CSFC_FallThrough;
  1156. // If this is the switchcase (case 4: or default) that we're looking for, then
  1157. // we're in business. Just add the substatement.
  1158. if (const SwitchCase *SC = dyn_cast<SwitchCase>(S)) {
  1159. if (S == Case) {
  1160. FoundCase = true;
  1161. return CollectStatementsForCase(SC->getSubStmt(), nullptr, FoundCase,
  1162. ResultStmts);
  1163. }
  1164. // Otherwise, this is some other case or default statement, just ignore it.
  1165. return CollectStatementsForCase(SC->getSubStmt(), Case, FoundCase,
  1166. ResultStmts);
  1167. }
  1168. // If we are in the live part of the code and we found our break statement,
  1169. // return a success!
  1170. if (!Case && isa<BreakStmt>(S))
  1171. return CSFC_Success;
  1172. // If this is a switch statement, then it might contain the SwitchCase, the
  1173. // break, or neither.
  1174. if (const CompoundStmt *CS = dyn_cast<CompoundStmt>(S)) {
  1175. // Handle this as two cases: we might be looking for the SwitchCase (if so
  1176. // the skipped statements must be skippable) or we might already have it.
  1177. CompoundStmt::const_body_iterator I = CS->body_begin(), E = CS->body_end();
  1178. bool StartedInLiveCode = FoundCase;
  1179. unsigned StartSize = ResultStmts.size();
  1180. // If we've not found the case yet, scan through looking for it.
  1181. if (Case) {
  1182. // Keep track of whether we see a skipped declaration. The code could be
  1183. // using the declaration even if it is skipped, so we can't optimize out
  1184. // the decl if the kept statements might refer to it.
  1185. bool HadSkippedDecl = false;
  1186. // If we're looking for the case, just see if we can skip each of the
  1187. // substatements.
  1188. for (; Case && I != E; ++I) {
  1189. HadSkippedDecl |= CodeGenFunction::mightAddDeclToScope(*I);
  1190. switch (CollectStatementsForCase(*I, Case, FoundCase, ResultStmts)) {
  1191. case CSFC_Failure: return CSFC_Failure;
  1192. case CSFC_Success:
  1193. // A successful result means that either 1) that the statement doesn't
  1194. // have the case and is skippable, or 2) does contain the case value
  1195. // and also contains the break to exit the switch. In the later case,
  1196. // we just verify the rest of the statements are elidable.
  1197. if (FoundCase) {
  1198. // If we found the case and skipped declarations, we can't do the
  1199. // optimization.
  1200. if (HadSkippedDecl)
  1201. return CSFC_Failure;
  1202. for (++I; I != E; ++I)
  1203. if (CodeGenFunction::ContainsLabel(*I, true))
  1204. return CSFC_Failure;
  1205. return CSFC_Success;
  1206. }
  1207. break;
  1208. case CSFC_FallThrough:
  1209. // If we have a fallthrough condition, then we must have found the
  1210. // case started to include statements. Consider the rest of the
  1211. // statements in the compound statement as candidates for inclusion.
  1212. assert(FoundCase && "Didn't find case but returned fallthrough?");
  1213. // We recursively found Case, so we're not looking for it anymore.
  1214. Case = nullptr;
  1215. // If we found the case and skipped declarations, we can't do the
  1216. // optimization.
  1217. if (HadSkippedDecl)
  1218. return CSFC_Failure;
  1219. break;
  1220. }
  1221. }
  1222. if (!FoundCase)
  1223. return CSFC_Success;
  1224. assert(!HadSkippedDecl && "fallthrough after skipping decl");
  1225. }
  1226. // If we have statements in our range, then we know that the statements are
  1227. // live and need to be added to the set of statements we're tracking.
  1228. bool AnyDecls = false;
  1229. for (; I != E; ++I) {
  1230. AnyDecls |= CodeGenFunction::mightAddDeclToScope(*I);
  1231. switch (CollectStatementsForCase(*I, nullptr, FoundCase, ResultStmts)) {
  1232. case CSFC_Failure: return CSFC_Failure;
  1233. case CSFC_FallThrough:
  1234. // A fallthrough result means that the statement was simple and just
  1235. // included in ResultStmt, keep adding them afterwards.
  1236. break;
  1237. case CSFC_Success:
  1238. // A successful result means that we found the break statement and
  1239. // stopped statement inclusion. We just ensure that any leftover stmts
  1240. // are skippable and return success ourselves.
  1241. for (++I; I != E; ++I)
  1242. if (CodeGenFunction::ContainsLabel(*I, true))
  1243. return CSFC_Failure;
  1244. return CSFC_Success;
  1245. }
  1246. }
  1247. // If we're about to fall out of a scope without hitting a 'break;', we
  1248. // can't perform the optimization if there were any decls in that scope
  1249. // (we'd lose their end-of-lifetime).
  1250. if (AnyDecls) {
  1251. // If the entire compound statement was live, there's one more thing we
  1252. // can try before giving up: emit the whole thing as a single statement.
  1253. // We can do that unless the statement contains a 'break;'.
  1254. // FIXME: Such a break must be at the end of a construct within this one.
  1255. // We could emit this by just ignoring the BreakStmts entirely.
  1256. if (StartedInLiveCode && !CodeGenFunction::containsBreak(S)) {
  1257. ResultStmts.resize(StartSize);
  1258. ResultStmts.push_back(S);
  1259. } else {
  1260. return CSFC_Failure;
  1261. }
  1262. }
  1263. return CSFC_FallThrough;
  1264. }
  1265. // Okay, this is some other statement that we don't handle explicitly, like a
  1266. // for statement or increment etc. If we are skipping over this statement,
  1267. // just verify it doesn't have labels, which would make it invalid to elide.
  1268. if (Case) {
  1269. if (CodeGenFunction::ContainsLabel(S, true))
  1270. return CSFC_Failure;
  1271. return CSFC_Success;
  1272. }
  1273. // Otherwise, we want to include this statement. Everything is cool with that
  1274. // so long as it doesn't contain a break out of the switch we're in.
  1275. if (CodeGenFunction::containsBreak(S)) return CSFC_Failure;
  1276. // Otherwise, everything is great. Include the statement and tell the caller
  1277. // that we fall through and include the next statement as well.
  1278. ResultStmts.push_back(S);
  1279. return CSFC_FallThrough;
  1280. }
  1281. /// FindCaseStatementsForValue - Find the case statement being jumped to and
  1282. /// then invoke CollectStatementsForCase to find the list of statements to emit
  1283. /// for a switch on constant. See the comment above CollectStatementsForCase
  1284. /// for more details.
  1285. static bool FindCaseStatementsForValue(const SwitchStmt &S,
  1286. const llvm::APSInt &ConstantCondValue,
  1287. SmallVectorImpl<const Stmt*> &ResultStmts,
  1288. ASTContext &C,
  1289. const SwitchCase *&ResultCase) {
  1290. // First step, find the switch case that is being branched to. We can do this
  1291. // efficiently by scanning the SwitchCase list.
  1292. const SwitchCase *Case = S.getSwitchCaseList();
  1293. const DefaultStmt *DefaultCase = nullptr;
  1294. for (; Case; Case = Case->getNextSwitchCase()) {
  1295. // It's either a default or case. Just remember the default statement in
  1296. // case we're not jumping to any numbered cases.
  1297. if (const DefaultStmt *DS = dyn_cast<DefaultStmt>(Case)) {
  1298. DefaultCase = DS;
  1299. continue;
  1300. }
  1301. // Check to see if this case is the one we're looking for.
  1302. const CaseStmt *CS = cast<CaseStmt>(Case);
  1303. // Don't handle case ranges yet.
  1304. if (CS->getRHS()) return false;
  1305. // If we found our case, remember it as 'case'.
  1306. if (CS->getLHS()->EvaluateKnownConstInt(C) == ConstantCondValue)
  1307. break;
  1308. }
  1309. // If we didn't find a matching case, we use a default if it exists, or we
  1310. // elide the whole switch body!
  1311. if (!Case) {
  1312. // It is safe to elide the body of the switch if it doesn't contain labels
  1313. // etc. If it is safe, return successfully with an empty ResultStmts list.
  1314. if (!DefaultCase)
  1315. return !CodeGenFunction::ContainsLabel(&S);
  1316. Case = DefaultCase;
  1317. }
  1318. // Ok, we know which case is being jumped to, try to collect all the
  1319. // statements that follow it. This can fail for a variety of reasons. Also,
  1320. // check to see that the recursive walk actually found our case statement.
  1321. // Insane cases like this can fail to find it in the recursive walk since we
  1322. // don't handle every stmt kind:
  1323. // switch (4) {
  1324. // while (1) {
  1325. // case 4: ...
  1326. bool FoundCase = false;
  1327. ResultCase = Case;
  1328. return CollectStatementsForCase(S.getBody(), Case, FoundCase,
  1329. ResultStmts) != CSFC_Failure &&
  1330. FoundCase;
  1331. }
  1332. void CodeGenFunction::EmitSwitchStmt(const SwitchStmt &S) {
  1333. // Handle nested switch statements.
  1334. llvm::SwitchInst *SavedSwitchInsn = SwitchInsn;
  1335. SmallVector<uint64_t, 16> *SavedSwitchWeights = SwitchWeights;
  1336. llvm::BasicBlock *SavedCRBlock = CaseRangeBlock;
  1337. // See if we can constant fold the condition of the switch and therefore only
  1338. // emit the live case statement (if any) of the switch.
  1339. llvm::APSInt ConstantCondValue;
  1340. if (ConstantFoldsToSimpleInteger(S.getCond(), ConstantCondValue)) {
  1341. SmallVector<const Stmt*, 4> CaseStmts;
  1342. const SwitchCase *Case = nullptr;
  1343. if (FindCaseStatementsForValue(S, ConstantCondValue, CaseStmts,
  1344. getContext(), Case)) {
  1345. if (Case)
  1346. incrementProfileCounter(Case);
  1347. RunCleanupsScope ExecutedScope(*this);
  1348. if (S.getInit())
  1349. EmitStmt(S.getInit());
  1350. // Emit the condition variable if needed inside the entire cleanup scope
  1351. // used by this special case for constant folded switches.
  1352. if (S.getConditionVariable())
  1353. EmitDecl(*S.getConditionVariable());
  1354. // At this point, we are no longer "within" a switch instance, so
  1355. // we can temporarily enforce this to ensure that any embedded case
  1356. // statements are not emitted.
  1357. SwitchInsn = nullptr;
  1358. // Okay, we can dead code eliminate everything except this case. Emit the
  1359. // specified series of statements and we're good.
  1360. for (unsigned i = 0, e = CaseStmts.size(); i != e; ++i)
  1361. EmitStmt(CaseStmts[i]);
  1362. incrementProfileCounter(&S);
  1363. // Now we want to restore the saved switch instance so that nested
  1364. // switches continue to function properly
  1365. SwitchInsn = SavedSwitchInsn;
  1366. return;
  1367. }
  1368. }
  1369. JumpDest SwitchExit = getJumpDestInCurrentScope("sw.epilog");
  1370. RunCleanupsScope ConditionScope(*this);
  1371. if (S.getInit())
  1372. EmitStmt(S.getInit());
  1373. if (S.getConditionVariable())
  1374. EmitDecl(*S.getConditionVariable());
  1375. llvm::Value *CondV = EmitScalarExpr(S.getCond());
  1376. // Create basic block to hold stuff that comes after switch
  1377. // statement. We also need to create a default block now so that
  1378. // explicit case ranges tests can have a place to jump to on
  1379. // failure.
  1380. llvm::BasicBlock *DefaultBlock = createBasicBlock("sw.default");
  1381. SwitchInsn = Builder.CreateSwitch(CondV, DefaultBlock);
  1382. if (PGO.haveRegionCounts()) {
  1383. // Walk the SwitchCase list to find how many there are.
  1384. uint64_t DefaultCount = 0;
  1385. unsigned NumCases = 0;
  1386. for (const SwitchCase *Case = S.getSwitchCaseList();
  1387. Case;
  1388. Case = Case->getNextSwitchCase()) {
  1389. if (isa<DefaultStmt>(Case))
  1390. DefaultCount = getProfileCount(Case);
  1391. NumCases += 1;
  1392. }
  1393. SwitchWeights = new SmallVector<uint64_t, 16>();
  1394. SwitchWeights->reserve(NumCases);
  1395. // The default needs to be first. We store the edge count, so we already
  1396. // know the right weight.
  1397. SwitchWeights->push_back(DefaultCount);
  1398. }
  1399. CaseRangeBlock = DefaultBlock;
  1400. // Clear the insertion point to indicate we are in unreachable code.
  1401. Builder.ClearInsertionPoint();
  1402. // All break statements jump to NextBlock. If BreakContinueStack is non-empty
  1403. // then reuse last ContinueBlock.
  1404. JumpDest OuterContinue;
  1405. if (!BreakContinueStack.empty())
  1406. OuterContinue = BreakContinueStack.back().ContinueBlock;
  1407. BreakContinueStack.push_back(BreakContinue(SwitchExit, OuterContinue));
  1408. // Emit switch body.
  1409. EmitStmt(S.getBody());
  1410. BreakContinueStack.pop_back();
  1411. // Update the default block in case explicit case range tests have
  1412. // been chained on top.
  1413. SwitchInsn->setDefaultDest(CaseRangeBlock);
  1414. // If a default was never emitted:
  1415. if (!DefaultBlock->getParent()) {
  1416. // If we have cleanups, emit the default block so that there's a
  1417. // place to jump through the cleanups from.
  1418. if (ConditionScope.requiresCleanups()) {
  1419. EmitBlock(DefaultBlock);
  1420. // Otherwise, just forward the default block to the switch end.
  1421. } else {
  1422. DefaultBlock->replaceAllUsesWith(SwitchExit.getBlock());
  1423. delete DefaultBlock;
  1424. }
  1425. }
  1426. ConditionScope.ForceCleanup();
  1427. // Emit continuation.
  1428. EmitBlock(SwitchExit.getBlock(), true);
  1429. incrementProfileCounter(&S);
  1430. // If the switch has a condition wrapped by __builtin_unpredictable,
  1431. // create metadata that specifies that the switch is unpredictable.
  1432. // Don't bother if not optimizing because that metadata would not be used.
  1433. auto *Call = dyn_cast<CallExpr>(S.getCond());
  1434. if (Call && CGM.getCodeGenOpts().OptimizationLevel != 0) {
  1435. auto *FD = dyn_cast_or_null<FunctionDecl>(Call->getCalleeDecl());
  1436. if (FD && FD->getBuiltinID() == Builtin::BI__builtin_unpredictable) {
  1437. llvm::MDBuilder MDHelper(getLLVMContext());
  1438. SwitchInsn->setMetadata(llvm::LLVMContext::MD_unpredictable,
  1439. MDHelper.createUnpredictable());
  1440. }
  1441. }
  1442. if (SwitchWeights) {
  1443. assert(SwitchWeights->size() == 1 + SwitchInsn->getNumCases() &&
  1444. "switch weights do not match switch cases");
  1445. // If there's only one jump destination there's no sense weighting it.
  1446. if (SwitchWeights->size() > 1)
  1447. SwitchInsn->setMetadata(llvm::LLVMContext::MD_prof,
  1448. createProfileWeights(*SwitchWeights));
  1449. delete SwitchWeights;
  1450. }
  1451. SwitchInsn = SavedSwitchInsn;
  1452. SwitchWeights = SavedSwitchWeights;
  1453. CaseRangeBlock = SavedCRBlock;
  1454. }
  1455. static std::string
  1456. SimplifyConstraint(const char *Constraint, const TargetInfo &Target,
  1457. SmallVectorImpl<TargetInfo::ConstraintInfo> *OutCons=nullptr) {
  1458. std::string Result;
  1459. while (*Constraint) {
  1460. switch (*Constraint) {
  1461. default:
  1462. Result += Target.convertConstraint(Constraint);
  1463. break;
  1464. // Ignore these
  1465. case '*':
  1466. case '?':
  1467. case '!':
  1468. case '=': // Will see this and the following in mult-alt constraints.
  1469. case '+':
  1470. break;
  1471. case '#': // Ignore the rest of the constraint alternative.
  1472. while (Constraint[1] && Constraint[1] != ',')
  1473. Constraint++;
  1474. break;
  1475. case '&':
  1476. case '%':
  1477. Result += *Constraint;
  1478. while (Constraint[1] && Constraint[1] == *Constraint)
  1479. Constraint++;
  1480. break;
  1481. case ',':
  1482. Result += "|";
  1483. break;
  1484. case 'g':
  1485. Result += "imr";
  1486. break;
  1487. case '[': {
  1488. assert(OutCons &&
  1489. "Must pass output names to constraints with a symbolic name");
  1490. unsigned Index;
  1491. bool result = Target.resolveSymbolicName(Constraint, *OutCons, Index);
  1492. assert(result && "Could not resolve symbolic name"); (void)result;
  1493. Result += llvm::utostr(Index);
  1494. break;
  1495. }
  1496. }
  1497. Constraint++;
  1498. }
  1499. return Result;
  1500. }
  1501. /// AddVariableConstraints - Look at AsmExpr and if it is a variable declared
  1502. /// as using a particular register add that as a constraint that will be used
  1503. /// in this asm stmt.
  1504. static std::string
  1505. AddVariableConstraints(const std::string &Constraint, const Expr &AsmExpr,
  1506. const TargetInfo &Target, CodeGenModule &CGM,
  1507. const AsmStmt &Stmt, const bool EarlyClobber) {
  1508. const DeclRefExpr *AsmDeclRef = dyn_cast<DeclRefExpr>(&AsmExpr);
  1509. if (!AsmDeclRef)
  1510. return Constraint;
  1511. const ValueDecl &Value = *AsmDeclRef->getDecl();
  1512. const VarDecl *Variable = dyn_cast<VarDecl>(&Value);
  1513. if (!Variable)
  1514. return Constraint;
  1515. if (Variable->getStorageClass() != SC_Register)
  1516. return Constraint;
  1517. AsmLabelAttr *Attr = Variable->getAttr<AsmLabelAttr>();
  1518. if (!Attr)
  1519. return Constraint;
  1520. StringRef Register = Attr->getLabel();
  1521. assert(Target.isValidGCCRegisterName(Register));
  1522. // We're using validateOutputConstraint here because we only care if
  1523. // this is a register constraint.
  1524. TargetInfo::ConstraintInfo Info(Constraint, "");
  1525. if (Target.validateOutputConstraint(Info) &&
  1526. !Info.allowsRegister()) {
  1527. CGM.ErrorUnsupported(&Stmt, "__asm__");
  1528. return Constraint;
  1529. }
  1530. // Canonicalize the register here before returning it.
  1531. Register = Target.getNormalizedGCCRegisterName(Register);
  1532. return (EarlyClobber ? "&{" : "{") + Register.str() + "}";
  1533. }
  1534. llvm::Value*
  1535. CodeGenFunction::EmitAsmInputLValue(const TargetInfo::ConstraintInfo &Info,
  1536. LValue InputValue, QualType InputType,
  1537. std::string &ConstraintStr,
  1538. SourceLocation Loc) {
  1539. llvm::Value *Arg;
  1540. if (Info.allowsRegister() || !Info.allowsMemory()) {
  1541. if (CodeGenFunction::hasScalarEvaluationKind(InputType)) {
  1542. Arg = EmitLoadOfLValue(InputValue, Loc).getScalarVal();
  1543. } else {
  1544. llvm::Type *Ty = ConvertType(InputType);
  1545. uint64_t Size = CGM.getDataLayout().getTypeSizeInBits(Ty);
  1546. if (Size <= 64 && llvm::isPowerOf2_64(Size)) {
  1547. Ty = llvm::IntegerType::get(getLLVMContext(), Size);
  1548. Ty = llvm::PointerType::getUnqual(Ty);
  1549. Arg = Builder.CreateLoad(Builder.CreateBitCast(InputValue.getAddress(),
  1550. Ty));
  1551. } else {
  1552. Arg = InputValue.getPointer();
  1553. ConstraintStr += '*';
  1554. }
  1555. }
  1556. } else {
  1557. Arg = InputValue.getPointer();
  1558. ConstraintStr += '*';
  1559. }
  1560. return Arg;
  1561. }
  1562. llvm::Value* CodeGenFunction::EmitAsmInput(
  1563. const TargetInfo::ConstraintInfo &Info,
  1564. const Expr *InputExpr,
  1565. std::string &ConstraintStr) {
  1566. // If this can't be a register or memory, i.e., has to be a constant
  1567. // (immediate or symbolic), try to emit it as such.
  1568. if (!Info.allowsRegister() && !Info.allowsMemory()) {
  1569. llvm::APSInt Result;
  1570. if (InputExpr->EvaluateAsInt(Result, getContext()))
  1571. return llvm::ConstantInt::get(getLLVMContext(), Result);
  1572. assert(!Info.requiresImmediateConstant() &&
  1573. "Required-immediate inlineasm arg isn't constant?");
  1574. }
  1575. if (Info.allowsRegister() || !Info.allowsMemory())
  1576. if (CodeGenFunction::hasScalarEvaluationKind(InputExpr->getType()))
  1577. return EmitScalarExpr(InputExpr);
  1578. if (InputExpr->getStmtClass() == Expr::CXXThisExprClass)
  1579. return EmitScalarExpr(InputExpr);
  1580. InputExpr = InputExpr->IgnoreParenNoopCasts(getContext());
  1581. LValue Dest = EmitLValue(InputExpr);
  1582. return EmitAsmInputLValue(Info, Dest, InputExpr->getType(), ConstraintStr,
  1583. InputExpr->getExprLoc());
  1584. }
  1585. /// getAsmSrcLocInfo - Return the !srcloc metadata node to attach to an inline
  1586. /// asm call instruction. The !srcloc MDNode contains a list of constant
  1587. /// integers which are the source locations of the start of each line in the
  1588. /// asm.
  1589. static llvm::MDNode *getAsmSrcLocInfo(const StringLiteral *Str,
  1590. CodeGenFunction &CGF) {
  1591. SmallVector<llvm::Metadata *, 8> Locs;
  1592. // Add the location of the first line to the MDNode.
  1593. Locs.push_back(llvm::ConstantAsMetadata::get(llvm::ConstantInt::get(
  1594. CGF.Int32Ty, Str->getBeginLoc().getRawEncoding())));
  1595. StringRef StrVal = Str->getString();
  1596. if (!StrVal.empty()) {
  1597. const SourceManager &SM = CGF.CGM.getContext().getSourceManager();
  1598. const LangOptions &LangOpts = CGF.CGM.getLangOpts();
  1599. unsigned StartToken = 0;
  1600. unsigned ByteOffset = 0;
  1601. // Add the location of the start of each subsequent line of the asm to the
  1602. // MDNode.
  1603. for (unsigned i = 0, e = StrVal.size() - 1; i != e; ++i) {
  1604. if (StrVal[i] != '\n') continue;
  1605. SourceLocation LineLoc = Str->getLocationOfByte(
  1606. i + 1, SM, LangOpts, CGF.getTarget(), &StartToken, &ByteOffset);
  1607. Locs.push_back(llvm::ConstantAsMetadata::get(
  1608. llvm::ConstantInt::get(CGF.Int32Ty, LineLoc.getRawEncoding())));
  1609. }
  1610. }
  1611. return llvm::MDNode::get(CGF.getLLVMContext(), Locs);
  1612. }
  1613. void CodeGenFunction::EmitAsmStmt(const AsmStmt &S) {
  1614. // Assemble the final asm string.
  1615. std::string AsmString = S.generateAsmString(getContext());
  1616. // Get all the output and input constraints together.
  1617. SmallVector<TargetInfo::ConstraintInfo, 4> OutputConstraintInfos;
  1618. SmallVector<TargetInfo::ConstraintInfo, 4> InputConstraintInfos;
  1619. for (unsigned i = 0, e = S.getNumOutputs(); i != e; i++) {
  1620. StringRef Name;
  1621. if (const GCCAsmStmt *GAS = dyn_cast<GCCAsmStmt>(&S))
  1622. Name = GAS->getOutputName(i);
  1623. TargetInfo::ConstraintInfo Info(S.getOutputConstraint(i), Name);
  1624. bool IsValid = getTarget().validateOutputConstraint(Info); (void)IsValid;
  1625. assert(IsValid && "Failed to parse output constraint");
  1626. OutputConstraintInfos.push_back(Info);
  1627. }
  1628. for (unsigned i = 0, e = S.getNumInputs(); i != e; i++) {
  1629. StringRef Name;
  1630. if (const GCCAsmStmt *GAS = dyn_cast<GCCAsmStmt>(&S))
  1631. Name = GAS->getInputName(i);
  1632. TargetInfo::ConstraintInfo Info(S.getInputConstraint(i), Name);
  1633. bool IsValid =
  1634. getTarget().validateInputConstraint(OutputConstraintInfos, Info);
  1635. assert(IsValid && "Failed to parse input constraint"); (void)IsValid;
  1636. InputConstraintInfos.push_back(Info);
  1637. }
  1638. std::string Constraints;
  1639. std::vector<LValue> ResultRegDests;
  1640. std::vector<QualType> ResultRegQualTys;
  1641. std::vector<llvm::Type *> ResultRegTypes;
  1642. std::vector<llvm::Type *> ResultTruncRegTypes;
  1643. std::vector<llvm::Type *> ArgTypes;
  1644. std::vector<llvm::Value*> Args;
  1645. // Keep track of inout constraints.
  1646. std::string InOutConstraints;
  1647. std::vector<llvm::Value*> InOutArgs;
  1648. std::vector<llvm::Type*> InOutArgTypes;
  1649. // An inline asm can be marked readonly if it meets the following conditions:
  1650. // - it doesn't have any sideeffects
  1651. // - it doesn't clobber memory
  1652. // - it doesn't return a value by-reference
  1653. // It can be marked readnone if it doesn't have any input memory constraints
  1654. // in addition to meeting the conditions listed above.
  1655. bool ReadOnly = true, ReadNone = true;
  1656. for (unsigned i = 0, e = S.getNumOutputs(); i != e; i++) {
  1657. TargetInfo::ConstraintInfo &Info = OutputConstraintInfos[i];
  1658. // Simplify the output constraint.
  1659. std::string OutputConstraint(S.getOutputConstraint(i));
  1660. OutputConstraint = SimplifyConstraint(OutputConstraint.c_str() + 1,
  1661. getTarget(), &OutputConstraintInfos);
  1662. const Expr *OutExpr = S.getOutputExpr(i);
  1663. OutExpr = OutExpr->IgnoreParenNoopCasts(getContext());
  1664. OutputConstraint = AddVariableConstraints(OutputConstraint, *OutExpr,
  1665. getTarget(), CGM, S,
  1666. Info.earlyClobber());
  1667. LValue Dest = EmitLValue(OutExpr);
  1668. if (!Constraints.empty())
  1669. Constraints += ',';
  1670. // If this is a register output, then make the inline asm return it
  1671. // by-value. If this is a memory result, return the value by-reference.
  1672. if (!Info.allowsMemory() && hasScalarEvaluationKind(OutExpr->getType())) {
  1673. Constraints += "=" + OutputConstraint;
  1674. ResultRegQualTys.push_back(OutExpr->getType());
  1675. ResultRegDests.push_back(Dest);
  1676. ResultRegTypes.push_back(ConvertTypeForMem(OutExpr->getType()));
  1677. ResultTruncRegTypes.push_back(ResultRegTypes.back());
  1678. // If this output is tied to an input, and if the input is larger, then
  1679. // we need to set the actual result type of the inline asm node to be the
  1680. // same as the input type.
  1681. if (Info.hasMatchingInput()) {
  1682. unsigned InputNo;
  1683. for (InputNo = 0; InputNo != S.getNumInputs(); ++InputNo) {
  1684. TargetInfo::ConstraintInfo &Input = InputConstraintInfos[InputNo];
  1685. if (Input.hasTiedOperand() && Input.getTiedOperand() == i)
  1686. break;
  1687. }
  1688. assert(InputNo != S.getNumInputs() && "Didn't find matching input!");
  1689. QualType InputTy = S.getInputExpr(InputNo)->getType();
  1690. QualType OutputType = OutExpr->getType();
  1691. uint64_t InputSize = getContext().getTypeSize(InputTy);
  1692. if (getContext().getTypeSize(OutputType) < InputSize) {
  1693. // Form the asm to return the value as a larger integer or fp type.
  1694. ResultRegTypes.back() = ConvertType(InputTy);
  1695. }
  1696. }
  1697. if (llvm::Type* AdjTy =
  1698. getTargetHooks().adjustInlineAsmType(*this, OutputConstraint,
  1699. ResultRegTypes.back()))
  1700. ResultRegTypes.back() = AdjTy;
  1701. else {
  1702. CGM.getDiags().Report(S.getAsmLoc(),
  1703. diag::err_asm_invalid_type_in_input)
  1704. << OutExpr->getType() << OutputConstraint;
  1705. }
  1706. // Update largest vector width for any vector types.
  1707. if (auto *VT = dyn_cast<llvm::VectorType>(ResultRegTypes.back()))
  1708. LargestVectorWidth = std::max(LargestVectorWidth,
  1709. VT->getPrimitiveSizeInBits());
  1710. } else {
  1711. ArgTypes.push_back(Dest.getAddress().getType());
  1712. Args.push_back(Dest.getPointer());
  1713. Constraints += "=*";
  1714. Constraints += OutputConstraint;
  1715. ReadOnly = ReadNone = false;
  1716. }
  1717. if (Info.isReadWrite()) {
  1718. InOutConstraints += ',';
  1719. const Expr *InputExpr = S.getOutputExpr(i);
  1720. llvm::Value *Arg = EmitAsmInputLValue(Info, Dest, InputExpr->getType(),
  1721. InOutConstraints,
  1722. InputExpr->getExprLoc());
  1723. if (llvm::Type* AdjTy =
  1724. getTargetHooks().adjustInlineAsmType(*this, OutputConstraint,
  1725. Arg->getType()))
  1726. Arg = Builder.CreateBitCast(Arg, AdjTy);
  1727. // Update largest vector width for any vector types.
  1728. if (auto *VT = dyn_cast<llvm::VectorType>(Arg->getType()))
  1729. LargestVectorWidth = std::max(LargestVectorWidth,
  1730. VT->getPrimitiveSizeInBits());
  1731. if (Info.allowsRegister())
  1732. InOutConstraints += llvm::utostr(i);
  1733. else
  1734. InOutConstraints += OutputConstraint;
  1735. InOutArgTypes.push_back(Arg->getType());
  1736. InOutArgs.push_back(Arg);
  1737. }
  1738. }
  1739. // If this is a Microsoft-style asm blob, store the return registers (EAX:EDX)
  1740. // to the return value slot. Only do this when returning in registers.
  1741. if (isa<MSAsmStmt>(&S)) {
  1742. const ABIArgInfo &RetAI = CurFnInfo->getReturnInfo();
  1743. if (RetAI.isDirect() || RetAI.isExtend()) {
  1744. // Make a fake lvalue for the return value slot.
  1745. LValue ReturnSlot = MakeAddrLValue(ReturnValue, FnRetTy);
  1746. CGM.getTargetCodeGenInfo().addReturnRegisterOutputs(
  1747. *this, ReturnSlot, Constraints, ResultRegTypes, ResultTruncRegTypes,
  1748. ResultRegDests, AsmString, S.getNumOutputs());
  1749. SawAsmBlock = true;
  1750. }
  1751. }
  1752. for (unsigned i = 0, e = S.getNumInputs(); i != e; i++) {
  1753. const Expr *InputExpr = S.getInputExpr(i);
  1754. TargetInfo::ConstraintInfo &Info = InputConstraintInfos[i];
  1755. if (Info.allowsMemory())
  1756. ReadNone = false;
  1757. if (!Constraints.empty())
  1758. Constraints += ',';
  1759. // Simplify the input constraint.
  1760. std::string InputConstraint(S.getInputConstraint(i));
  1761. InputConstraint = SimplifyConstraint(InputConstraint.c_str(), getTarget(),
  1762. &OutputConstraintInfos);
  1763. InputConstraint = AddVariableConstraints(
  1764. InputConstraint, *InputExpr->IgnoreParenNoopCasts(getContext()),
  1765. getTarget(), CGM, S, false /* No EarlyClobber */);
  1766. llvm::Value *Arg = EmitAsmInput(Info, InputExpr, Constraints);
  1767. // If this input argument is tied to a larger output result, extend the
  1768. // input to be the same size as the output. The LLVM backend wants to see
  1769. // the input and output of a matching constraint be the same size. Note
  1770. // that GCC does not define what the top bits are here. We use zext because
  1771. // that is usually cheaper, but LLVM IR should really get an anyext someday.
  1772. if (Info.hasTiedOperand()) {
  1773. unsigned Output = Info.getTiedOperand();
  1774. QualType OutputType = S.getOutputExpr(Output)->getType();
  1775. QualType InputTy = InputExpr->getType();
  1776. if (getContext().getTypeSize(OutputType) >
  1777. getContext().getTypeSize(InputTy)) {
  1778. // Use ptrtoint as appropriate so that we can do our extension.
  1779. if (isa<llvm::PointerType>(Arg->getType()))
  1780. Arg = Builder.CreatePtrToInt(Arg, IntPtrTy);
  1781. llvm::Type *OutputTy = ConvertType(OutputType);
  1782. if (isa<llvm::IntegerType>(OutputTy))
  1783. Arg = Builder.CreateZExt(Arg, OutputTy);
  1784. else if (isa<llvm::PointerType>(OutputTy))
  1785. Arg = Builder.CreateZExt(Arg, IntPtrTy);
  1786. else {
  1787. assert(OutputTy->isFloatingPointTy() && "Unexpected output type");
  1788. Arg = Builder.CreateFPExt(Arg, OutputTy);
  1789. }
  1790. }
  1791. }
  1792. if (llvm::Type* AdjTy =
  1793. getTargetHooks().adjustInlineAsmType(*this, InputConstraint,
  1794. Arg->getType()))
  1795. Arg = Builder.CreateBitCast(Arg, AdjTy);
  1796. else
  1797. CGM.getDiags().Report(S.getAsmLoc(), diag::err_asm_invalid_type_in_input)
  1798. << InputExpr->getType() << InputConstraint;
  1799. // Update largest vector width for any vector types.
  1800. if (auto *VT = dyn_cast<llvm::VectorType>(Arg->getType()))
  1801. LargestVectorWidth = std::max(LargestVectorWidth,
  1802. VT->getPrimitiveSizeInBits());
  1803. ArgTypes.push_back(Arg->getType());
  1804. Args.push_back(Arg);
  1805. Constraints += InputConstraint;
  1806. }
  1807. // Append the "input" part of inout constraints last.
  1808. for (unsigned i = 0, e = InOutArgs.size(); i != e; i++) {
  1809. ArgTypes.push_back(InOutArgTypes[i]);
  1810. Args.push_back(InOutArgs[i]);
  1811. }
  1812. Constraints += InOutConstraints;
  1813. // Clobbers
  1814. for (unsigned i = 0, e = S.getNumClobbers(); i != e; i++) {
  1815. StringRef Clobber = S.getClobber(i);
  1816. if (Clobber == "memory")
  1817. ReadOnly = ReadNone = false;
  1818. else if (Clobber != "cc")
  1819. Clobber = getTarget().getNormalizedGCCRegisterName(Clobber);
  1820. if (!Constraints.empty())
  1821. Constraints += ',';
  1822. Constraints += "~{";
  1823. Constraints += Clobber;
  1824. Constraints += '}';
  1825. }
  1826. // Add machine specific clobbers
  1827. std::string MachineClobbers = getTarget().getClobbers();
  1828. if (!MachineClobbers.empty()) {
  1829. if (!Constraints.empty())
  1830. Constraints += ',';
  1831. Constraints += MachineClobbers;
  1832. }
  1833. llvm::Type *ResultType;
  1834. if (ResultRegTypes.empty())
  1835. ResultType = VoidTy;
  1836. else if (ResultRegTypes.size() == 1)
  1837. ResultType = ResultRegTypes[0];
  1838. else
  1839. ResultType = llvm::StructType::get(getLLVMContext(), ResultRegTypes);
  1840. llvm::FunctionType *FTy =
  1841. llvm::FunctionType::get(ResultType, ArgTypes, false);
  1842. bool HasSideEffect = S.isVolatile() || S.getNumOutputs() == 0;
  1843. llvm::InlineAsm::AsmDialect AsmDialect = isa<MSAsmStmt>(&S) ?
  1844. llvm::InlineAsm::AD_Intel : llvm::InlineAsm::AD_ATT;
  1845. llvm::InlineAsm *IA =
  1846. llvm::InlineAsm::get(FTy, AsmString, Constraints, HasSideEffect,
  1847. /* IsAlignStack */ false, AsmDialect);
  1848. llvm::CallInst *Result =
  1849. Builder.CreateCall(IA, Args, getBundlesForFunclet(IA));
  1850. Result->addAttribute(llvm::AttributeList::FunctionIndex,
  1851. llvm::Attribute::NoUnwind);
  1852. // Attach readnone and readonly attributes.
  1853. if (!HasSideEffect) {
  1854. if (ReadNone)
  1855. Result->addAttribute(llvm::AttributeList::FunctionIndex,
  1856. llvm::Attribute::ReadNone);
  1857. else if (ReadOnly)
  1858. Result->addAttribute(llvm::AttributeList::FunctionIndex,
  1859. llvm::Attribute::ReadOnly);
  1860. }
  1861. // Slap the source location of the inline asm into a !srcloc metadata on the
  1862. // call.
  1863. if (const GCCAsmStmt *gccAsmStmt = dyn_cast<GCCAsmStmt>(&S)) {
  1864. Result->setMetadata("srcloc", getAsmSrcLocInfo(gccAsmStmt->getAsmString(),
  1865. *this));
  1866. } else {
  1867. // At least put the line number on MS inline asm blobs.
  1868. auto Loc = llvm::ConstantInt::get(Int32Ty, S.getAsmLoc().getRawEncoding());
  1869. Result->setMetadata("srcloc",
  1870. llvm::MDNode::get(getLLVMContext(),
  1871. llvm::ConstantAsMetadata::get(Loc)));
  1872. }
  1873. if (getLangOpts().assumeFunctionsAreConvergent()) {
  1874. // Conservatively, mark all inline asm blocks in CUDA or OpenCL as
  1875. // convergent (meaning, they may call an intrinsically convergent op, such
  1876. // as bar.sync, and so can't have certain optimizations applied around
  1877. // them).
  1878. Result->addAttribute(llvm::AttributeList::FunctionIndex,
  1879. llvm::Attribute::Convergent);
  1880. }
  1881. // Extract all of the register value results from the asm.
  1882. std::vector<llvm::Value*> RegResults;
  1883. if (ResultRegTypes.size() == 1) {
  1884. RegResults.push_back(Result);
  1885. } else {
  1886. for (unsigned i = 0, e = ResultRegTypes.size(); i != e; ++i) {
  1887. llvm::Value *Tmp = Builder.CreateExtractValue(Result, i, "asmresult");
  1888. RegResults.push_back(Tmp);
  1889. }
  1890. }
  1891. assert(RegResults.size() == ResultRegTypes.size());
  1892. assert(RegResults.size() == ResultTruncRegTypes.size());
  1893. assert(RegResults.size() == ResultRegDests.size());
  1894. for (unsigned i = 0, e = RegResults.size(); i != e; ++i) {
  1895. llvm::Value *Tmp = RegResults[i];
  1896. // If the result type of the LLVM IR asm doesn't match the result type of
  1897. // the expression, do the conversion.
  1898. if (ResultRegTypes[i] != ResultTruncRegTypes[i]) {
  1899. llvm::Type *TruncTy = ResultTruncRegTypes[i];
  1900. // Truncate the integer result to the right size, note that TruncTy can be
  1901. // a pointer.
  1902. if (TruncTy->isFloatingPointTy())
  1903. Tmp = Builder.CreateFPTrunc(Tmp, TruncTy);
  1904. else if (TruncTy->isPointerTy() && Tmp->getType()->isIntegerTy()) {
  1905. uint64_t ResSize = CGM.getDataLayout().getTypeSizeInBits(TruncTy);
  1906. Tmp = Builder.CreateTrunc(Tmp,
  1907. llvm::IntegerType::get(getLLVMContext(), (unsigned)ResSize));
  1908. Tmp = Builder.CreateIntToPtr(Tmp, TruncTy);
  1909. } else if (Tmp->getType()->isPointerTy() && TruncTy->isIntegerTy()) {
  1910. uint64_t TmpSize =CGM.getDataLayout().getTypeSizeInBits(Tmp->getType());
  1911. Tmp = Builder.CreatePtrToInt(Tmp,
  1912. llvm::IntegerType::get(getLLVMContext(), (unsigned)TmpSize));
  1913. Tmp = Builder.CreateTrunc(Tmp, TruncTy);
  1914. } else if (TruncTy->isIntegerTy()) {
  1915. Tmp = Builder.CreateZExtOrTrunc(Tmp, TruncTy);
  1916. } else if (TruncTy->isVectorTy()) {
  1917. Tmp = Builder.CreateBitCast(Tmp, TruncTy);
  1918. }
  1919. }
  1920. EmitStoreThroughLValue(RValue::get(Tmp), ResultRegDests[i]);
  1921. }
  1922. }
  1923. LValue CodeGenFunction::InitCapturedStruct(const CapturedStmt &S) {
  1924. const RecordDecl *RD = S.getCapturedRecordDecl();
  1925. QualType RecordTy = getContext().getRecordType(RD);
  1926. // Initialize the captured struct.
  1927. LValue SlotLV =
  1928. MakeAddrLValue(CreateMemTemp(RecordTy, "agg.captured"), RecordTy);
  1929. RecordDecl::field_iterator CurField = RD->field_begin();
  1930. for (CapturedStmt::const_capture_init_iterator I = S.capture_init_begin(),
  1931. E = S.capture_init_end();
  1932. I != E; ++I, ++CurField) {
  1933. LValue LV = EmitLValueForFieldInitialization(SlotLV, *CurField);
  1934. if (CurField->hasCapturedVLAType()) {
  1935. auto VAT = CurField->getCapturedVLAType();
  1936. EmitStoreThroughLValue(RValue::get(VLASizeMap[VAT->getSizeExpr()]), LV);
  1937. } else {
  1938. EmitInitializerForField(*CurField, LV, *I);
  1939. }
  1940. }
  1941. return SlotLV;
  1942. }
  1943. /// Generate an outlined function for the body of a CapturedStmt, store any
  1944. /// captured variables into the captured struct, and call the outlined function.
  1945. llvm::Function *
  1946. CodeGenFunction::EmitCapturedStmt(const CapturedStmt &S, CapturedRegionKind K) {
  1947. LValue CapStruct = InitCapturedStruct(S);
  1948. // Emit the CapturedDecl
  1949. CodeGenFunction CGF(CGM, true);
  1950. CGCapturedStmtRAII CapInfoRAII(CGF, new CGCapturedStmtInfo(S, K));
  1951. llvm::Function *F = CGF.GenerateCapturedStmtFunction(S);
  1952. delete CGF.CapturedStmtInfo;
  1953. // Emit call to the helper function.
  1954. EmitCallOrInvoke(F, CapStruct.getPointer());
  1955. return F;
  1956. }
  1957. Address CodeGenFunction::GenerateCapturedStmtArgument(const CapturedStmt &S) {
  1958. LValue CapStruct = InitCapturedStruct(S);
  1959. return CapStruct.getAddress();
  1960. }
  1961. /// Creates the outlined function for a CapturedStmt.
  1962. llvm::Function *
  1963. CodeGenFunction::GenerateCapturedStmtFunction(const CapturedStmt &S) {
  1964. assert(CapturedStmtInfo &&
  1965. "CapturedStmtInfo should be set when generating the captured function");
  1966. const CapturedDecl *CD = S.getCapturedDecl();
  1967. const RecordDecl *RD = S.getCapturedRecordDecl();
  1968. SourceLocation Loc = S.getBeginLoc();
  1969. assert(CD->hasBody() && "missing CapturedDecl body");
  1970. // Build the argument list.
  1971. ASTContext &Ctx = CGM.getContext();
  1972. FunctionArgList Args;
  1973. Args.append(CD->param_begin(), CD->param_end());
  1974. // Create the function declaration.
  1975. const CGFunctionInfo &FuncInfo =
  1976. CGM.getTypes().arrangeBuiltinFunctionDeclaration(Ctx.VoidTy, Args);
  1977. llvm::FunctionType *FuncLLVMTy = CGM.getTypes().GetFunctionType(FuncInfo);
  1978. llvm::Function *F =
  1979. llvm::Function::Create(FuncLLVMTy, llvm::GlobalValue::InternalLinkage,
  1980. CapturedStmtInfo->getHelperName(), &CGM.getModule());
  1981. CGM.SetInternalFunctionAttributes(CD, F, FuncInfo);
  1982. if (CD->isNothrow())
  1983. F->addFnAttr(llvm::Attribute::NoUnwind);
  1984. // Generate the function.
  1985. StartFunction(CD, Ctx.VoidTy, F, FuncInfo, Args, CD->getLocation(),
  1986. CD->getBody()->getBeginLoc());
  1987. // Set the context parameter in CapturedStmtInfo.
  1988. Address DeclPtr = GetAddrOfLocalVar(CD->getContextParam());
  1989. CapturedStmtInfo->setContextValue(Builder.CreateLoad(DeclPtr));
  1990. // Initialize variable-length arrays.
  1991. LValue Base = MakeNaturalAlignAddrLValue(CapturedStmtInfo->getContextValue(),
  1992. Ctx.getTagDeclType(RD));
  1993. for (auto *FD : RD->fields()) {
  1994. if (FD->hasCapturedVLAType()) {
  1995. auto *ExprArg =
  1996. EmitLoadOfLValue(EmitLValueForField(Base, FD), S.getBeginLoc())
  1997. .getScalarVal();
  1998. auto VAT = FD->getCapturedVLAType();
  1999. VLASizeMap[VAT->getSizeExpr()] = ExprArg;
  2000. }
  2001. }
  2002. // If 'this' is captured, load it into CXXThisValue.
  2003. if (CapturedStmtInfo->isCXXThisExprCaptured()) {
  2004. FieldDecl *FD = CapturedStmtInfo->getThisFieldDecl();
  2005. LValue ThisLValue = EmitLValueForField(Base, FD);
  2006. CXXThisValue = EmitLoadOfLValue(ThisLValue, Loc).getScalarVal();
  2007. }
  2008. PGO.assignRegionCounters(GlobalDecl(CD), F);
  2009. CapturedStmtInfo->EmitBody(*this, CD->getBody());
  2010. FinishFunction(CD->getBodyRBrace());
  2011. return F;
  2012. }