CodeGenFunction.h 139 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329
  1. //===-- CodeGenFunction.h - Per-Function state for LLVM CodeGen -*- C++ -*-===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This is the internal per-function state used for llvm translation.
  11. //
  12. //===----------------------------------------------------------------------===//
  13. #ifndef LLVM_CLANG_LIB_CODEGEN_CODEGENFUNCTION_H
  14. #define LLVM_CLANG_LIB_CODEGEN_CODEGENFUNCTION_H
  15. #include "CGBuilder.h"
  16. #include "CGDebugInfo.h"
  17. #include "CGLoopInfo.h"
  18. #include "CGValue.h"
  19. #include "CodeGenModule.h"
  20. #include "CodeGenPGO.h"
  21. #include "EHScopeStack.h"
  22. #include "clang/AST/CharUnits.h"
  23. #include "clang/AST/ExprCXX.h"
  24. #include "clang/AST/ExprObjC.h"
  25. #include "clang/AST/ExprOpenMP.h"
  26. #include "clang/AST/Type.h"
  27. #include "clang/Basic/ABI.h"
  28. #include "clang/Basic/CapturedStmt.h"
  29. #include "clang/Basic/OpenMPKinds.h"
  30. #include "clang/Basic/TargetInfo.h"
  31. #include "clang/Frontend/CodeGenOptions.h"
  32. #include "llvm/ADT/ArrayRef.h"
  33. #include "llvm/ADT/DenseMap.h"
  34. #include "llvm/ADT/SmallVector.h"
  35. #include "llvm/IR/ValueHandle.h"
  36. #include "llvm/Support/Debug.h"
  37. #include "llvm/Transforms/Utils/SanitizerStats.h"
  38. namespace llvm {
  39. class BasicBlock;
  40. class LLVMContext;
  41. class MDNode;
  42. class Module;
  43. class SwitchInst;
  44. class Twine;
  45. class Value;
  46. class CallSite;
  47. }
  48. namespace clang {
  49. class ASTContext;
  50. class BlockDecl;
  51. class CXXDestructorDecl;
  52. class CXXForRangeStmt;
  53. class CXXTryStmt;
  54. class Decl;
  55. class LabelDecl;
  56. class EnumConstantDecl;
  57. class FunctionDecl;
  58. class FunctionProtoType;
  59. class LabelStmt;
  60. class ObjCContainerDecl;
  61. class ObjCInterfaceDecl;
  62. class ObjCIvarDecl;
  63. class ObjCMethodDecl;
  64. class ObjCImplementationDecl;
  65. class ObjCPropertyImplDecl;
  66. class TargetInfo;
  67. class TargetCodeGenInfo;
  68. class VarDecl;
  69. class ObjCForCollectionStmt;
  70. class ObjCAtTryStmt;
  71. class ObjCAtThrowStmt;
  72. class ObjCAtSynchronizedStmt;
  73. class ObjCAutoreleasePoolStmt;
  74. namespace CodeGen {
  75. class CodeGenTypes;
  76. class CGFunctionInfo;
  77. class CGRecordLayout;
  78. class CGBlockInfo;
  79. class CGCXXABI;
  80. class BlockByrefHelpers;
  81. class BlockByrefInfo;
  82. class BlockFlags;
  83. class BlockFieldFlags;
  84. /// The kind of evaluation to perform on values of a particular
  85. /// type. Basically, is the code in CGExprScalar, CGExprComplex, or
  86. /// CGExprAgg?
  87. ///
  88. /// TODO: should vectors maybe be split out into their own thing?
  89. enum TypeEvaluationKind {
  90. TEK_Scalar,
  91. TEK_Complex,
  92. TEK_Aggregate
  93. };
  94. /// CodeGenFunction - This class organizes the per-function state that is used
  95. /// while generating LLVM code.
  96. class CodeGenFunction : public CodeGenTypeCache {
  97. CodeGenFunction(const CodeGenFunction &) = delete;
  98. void operator=(const CodeGenFunction &) = delete;
  99. friend class CGCXXABI;
  100. public:
  101. /// A jump destination is an abstract label, branching to which may
  102. /// require a jump out through normal cleanups.
  103. struct JumpDest {
  104. JumpDest() : Block(nullptr), ScopeDepth(), Index(0) {}
  105. JumpDest(llvm::BasicBlock *Block,
  106. EHScopeStack::stable_iterator Depth,
  107. unsigned Index)
  108. : Block(Block), ScopeDepth(Depth), Index(Index) {}
  109. bool isValid() const { return Block != nullptr; }
  110. llvm::BasicBlock *getBlock() const { return Block; }
  111. EHScopeStack::stable_iterator getScopeDepth() const { return ScopeDepth; }
  112. unsigned getDestIndex() const { return Index; }
  113. // This should be used cautiously.
  114. void setScopeDepth(EHScopeStack::stable_iterator depth) {
  115. ScopeDepth = depth;
  116. }
  117. private:
  118. llvm::BasicBlock *Block;
  119. EHScopeStack::stable_iterator ScopeDepth;
  120. unsigned Index;
  121. };
  122. CodeGenModule &CGM; // Per-module state.
  123. const TargetInfo &Target;
  124. typedef std::pair<llvm::Value *, llvm::Value *> ComplexPairTy;
  125. LoopInfoStack LoopStack;
  126. CGBuilderTy Builder;
  127. /// \brief CGBuilder insert helper. This function is called after an
  128. /// instruction is created using Builder.
  129. void InsertHelper(llvm::Instruction *I, const llvm::Twine &Name,
  130. llvm::BasicBlock *BB,
  131. llvm::BasicBlock::iterator InsertPt) const;
  132. /// CurFuncDecl - Holds the Decl for the current outermost
  133. /// non-closure context.
  134. const Decl *CurFuncDecl;
  135. /// CurCodeDecl - This is the inner-most code context, which includes blocks.
  136. const Decl *CurCodeDecl;
  137. const CGFunctionInfo *CurFnInfo;
  138. QualType FnRetTy;
  139. llvm::Function *CurFn;
  140. /// CurGD - The GlobalDecl for the current function being compiled.
  141. GlobalDecl CurGD;
  142. /// PrologueCleanupDepth - The cleanup depth enclosing all the
  143. /// cleanups associated with the parameters.
  144. EHScopeStack::stable_iterator PrologueCleanupDepth;
  145. /// ReturnBlock - Unified return block.
  146. JumpDest ReturnBlock;
  147. /// ReturnValue - The temporary alloca to hold the return
  148. /// value. This is invalid iff the function has no return value.
  149. Address ReturnValue;
  150. /// AllocaInsertPoint - This is an instruction in the entry block before which
  151. /// we prefer to insert allocas.
  152. llvm::AssertingVH<llvm::Instruction> AllocaInsertPt;
  153. /// \brief API for captured statement code generation.
  154. class CGCapturedStmtInfo {
  155. public:
  156. explicit CGCapturedStmtInfo(CapturedRegionKind K = CR_Default)
  157. : Kind(K), ThisValue(nullptr), CXXThisFieldDecl(nullptr) {}
  158. explicit CGCapturedStmtInfo(const CapturedStmt &S,
  159. CapturedRegionKind K = CR_Default)
  160. : Kind(K), ThisValue(nullptr), CXXThisFieldDecl(nullptr) {
  161. RecordDecl::field_iterator Field =
  162. S.getCapturedRecordDecl()->field_begin();
  163. for (CapturedStmt::const_capture_iterator I = S.capture_begin(),
  164. E = S.capture_end();
  165. I != E; ++I, ++Field) {
  166. if (I->capturesThis())
  167. CXXThisFieldDecl = *Field;
  168. else if (I->capturesVariable())
  169. CaptureFields[I->getCapturedVar()] = *Field;
  170. }
  171. }
  172. virtual ~CGCapturedStmtInfo();
  173. CapturedRegionKind getKind() const { return Kind; }
  174. virtual void setContextValue(llvm::Value *V) { ThisValue = V; }
  175. // \brief Retrieve the value of the context parameter.
  176. virtual llvm::Value *getContextValue() const { return ThisValue; }
  177. /// \brief Lookup the captured field decl for a variable.
  178. virtual const FieldDecl *lookup(const VarDecl *VD) const {
  179. return CaptureFields.lookup(VD);
  180. }
  181. bool isCXXThisExprCaptured() const { return getThisFieldDecl() != nullptr; }
  182. virtual FieldDecl *getThisFieldDecl() const { return CXXThisFieldDecl; }
  183. static bool classof(const CGCapturedStmtInfo *) {
  184. return true;
  185. }
  186. /// \brief Emit the captured statement body.
  187. virtual void EmitBody(CodeGenFunction &CGF, const Stmt *S) {
  188. CGF.incrementProfileCounter(S);
  189. CGF.EmitStmt(S);
  190. }
  191. /// \brief Get the name of the capture helper.
  192. virtual StringRef getHelperName() const { return "__captured_stmt"; }
  193. private:
  194. /// \brief The kind of captured statement being generated.
  195. CapturedRegionKind Kind;
  196. /// \brief Keep the map between VarDecl and FieldDecl.
  197. llvm::SmallDenseMap<const VarDecl *, FieldDecl *> CaptureFields;
  198. /// \brief The base address of the captured record, passed in as the first
  199. /// argument of the parallel region function.
  200. llvm::Value *ThisValue;
  201. /// \brief Captured 'this' type.
  202. FieldDecl *CXXThisFieldDecl;
  203. };
  204. CGCapturedStmtInfo *CapturedStmtInfo;
  205. /// \brief RAII for correct setting/restoring of CapturedStmtInfo.
  206. class CGCapturedStmtRAII {
  207. private:
  208. CodeGenFunction &CGF;
  209. CGCapturedStmtInfo *PrevCapturedStmtInfo;
  210. public:
  211. CGCapturedStmtRAII(CodeGenFunction &CGF,
  212. CGCapturedStmtInfo *NewCapturedStmtInfo)
  213. : CGF(CGF), PrevCapturedStmtInfo(CGF.CapturedStmtInfo) {
  214. CGF.CapturedStmtInfo = NewCapturedStmtInfo;
  215. }
  216. ~CGCapturedStmtRAII() { CGF.CapturedStmtInfo = PrevCapturedStmtInfo; }
  217. };
  218. /// \brief Sanitizers enabled for this function.
  219. SanitizerSet SanOpts;
  220. /// \brief True if CodeGen currently emits code implementing sanitizer checks.
  221. bool IsSanitizerScope;
  222. /// \brief RAII object to set/unset CodeGenFunction::IsSanitizerScope.
  223. class SanitizerScope {
  224. CodeGenFunction *CGF;
  225. public:
  226. SanitizerScope(CodeGenFunction *CGF);
  227. ~SanitizerScope();
  228. };
  229. /// In C++, whether we are code generating a thunk. This controls whether we
  230. /// should emit cleanups.
  231. bool CurFuncIsThunk;
  232. /// In ARC, whether we should autorelease the return value.
  233. bool AutoreleaseResult;
  234. /// Whether we processed a Microsoft-style asm block during CodeGen. These can
  235. /// potentially set the return value.
  236. bool SawAsmBlock;
  237. /// True if the current function is an outlined SEH helper. This can be a
  238. /// finally block or filter expression.
  239. bool IsOutlinedSEHHelper;
  240. const CodeGen::CGBlockInfo *BlockInfo;
  241. llvm::Value *BlockPointer;
  242. llvm::DenseMap<const VarDecl *, FieldDecl *> LambdaCaptureFields;
  243. FieldDecl *LambdaThisCaptureField;
  244. /// \brief A mapping from NRVO variables to the flags used to indicate
  245. /// when the NRVO has been applied to this variable.
  246. llvm::DenseMap<const VarDecl *, llvm::Value *> NRVOFlags;
  247. EHScopeStack EHStack;
  248. llvm::SmallVector<char, 256> LifetimeExtendedCleanupStack;
  249. llvm::SmallVector<const JumpDest *, 2> SEHTryEpilogueStack;
  250. llvm::Instruction *CurrentFuncletPad = nullptr;
  251. /// Header for data within LifetimeExtendedCleanupStack.
  252. struct LifetimeExtendedCleanupHeader {
  253. /// The size of the following cleanup object.
  254. unsigned Size;
  255. /// The kind of cleanup to push: a value from the CleanupKind enumeration.
  256. CleanupKind Kind;
  257. size_t getSize() const { return Size; }
  258. CleanupKind getKind() const { return Kind; }
  259. };
  260. /// i32s containing the indexes of the cleanup destinations.
  261. llvm::AllocaInst *NormalCleanupDest;
  262. unsigned NextCleanupDestIndex;
  263. /// FirstBlockInfo - The head of a singly-linked-list of block layouts.
  264. CGBlockInfo *FirstBlockInfo;
  265. /// EHResumeBlock - Unified block containing a call to llvm.eh.resume.
  266. llvm::BasicBlock *EHResumeBlock;
  267. /// The exception slot. All landing pads write the current exception pointer
  268. /// into this alloca.
  269. llvm::Value *ExceptionSlot;
  270. /// The selector slot. Under the MandatoryCleanup model, all landing pads
  271. /// write the current selector value into this alloca.
  272. llvm::AllocaInst *EHSelectorSlot;
  273. /// A stack of exception code slots. Entering an __except block pushes a slot
  274. /// on the stack and leaving pops one. The __exception_code() intrinsic loads
  275. /// a value from the top of the stack.
  276. SmallVector<Address, 1> SEHCodeSlotStack;
  277. /// Value returned by __exception_info intrinsic.
  278. llvm::Value *SEHInfo = nullptr;
  279. /// Emits a landing pad for the current EH stack.
  280. llvm::BasicBlock *EmitLandingPad();
  281. llvm::BasicBlock *getInvokeDestImpl();
  282. template <class T>
  283. typename DominatingValue<T>::saved_type saveValueInCond(T value) {
  284. return DominatingValue<T>::save(*this, value);
  285. }
  286. public:
  287. /// ObjCEHValueStack - Stack of Objective-C exception values, used for
  288. /// rethrows.
  289. SmallVector<llvm::Value*, 8> ObjCEHValueStack;
  290. /// A class controlling the emission of a finally block.
  291. class FinallyInfo {
  292. /// Where the catchall's edge through the cleanup should go.
  293. JumpDest RethrowDest;
  294. /// A function to call to enter the catch.
  295. llvm::Constant *BeginCatchFn;
  296. /// An i1 variable indicating whether or not the @finally is
  297. /// running for an exception.
  298. llvm::AllocaInst *ForEHVar;
  299. /// An i8* variable into which the exception pointer to rethrow
  300. /// has been saved.
  301. llvm::AllocaInst *SavedExnVar;
  302. public:
  303. void enter(CodeGenFunction &CGF, const Stmt *Finally,
  304. llvm::Constant *beginCatchFn, llvm::Constant *endCatchFn,
  305. llvm::Constant *rethrowFn);
  306. void exit(CodeGenFunction &CGF);
  307. };
  308. /// Returns true inside SEH __try blocks.
  309. bool isSEHTryScope() const { return !SEHTryEpilogueStack.empty(); }
  310. /// Returns true while emitting a cleanuppad.
  311. bool isCleanupPadScope() const {
  312. return CurrentFuncletPad && isa<llvm::CleanupPadInst>(CurrentFuncletPad);
  313. }
  314. /// pushFullExprCleanup - Push a cleanup to be run at the end of the
  315. /// current full-expression. Safe against the possibility that
  316. /// we're currently inside a conditionally-evaluated expression.
  317. template <class T, class... As>
  318. void pushFullExprCleanup(CleanupKind kind, As... A) {
  319. // If we're not in a conditional branch, or if none of the
  320. // arguments requires saving, then use the unconditional cleanup.
  321. if (!isInConditionalBranch())
  322. return EHStack.pushCleanup<T>(kind, A...);
  323. // Stash values in a tuple so we can guarantee the order of saves.
  324. typedef std::tuple<typename DominatingValue<As>::saved_type...> SavedTuple;
  325. SavedTuple Saved{saveValueInCond(A)...};
  326. typedef EHScopeStack::ConditionalCleanup<T, As...> CleanupType;
  327. EHStack.pushCleanupTuple<CleanupType>(kind, Saved);
  328. initFullExprCleanup();
  329. }
  330. /// \brief Queue a cleanup to be pushed after finishing the current
  331. /// full-expression.
  332. template <class T, class... As>
  333. void pushCleanupAfterFullExpr(CleanupKind Kind, As... A) {
  334. assert(!isInConditionalBranch() && "can't defer conditional cleanup");
  335. LifetimeExtendedCleanupHeader Header = { sizeof(T), Kind };
  336. size_t OldSize = LifetimeExtendedCleanupStack.size();
  337. LifetimeExtendedCleanupStack.resize(
  338. LifetimeExtendedCleanupStack.size() + sizeof(Header) + Header.Size);
  339. static_assert(sizeof(Header) % llvm::AlignOf<T>::Alignment == 0,
  340. "Cleanup will be allocated on misaligned address");
  341. char *Buffer = &LifetimeExtendedCleanupStack[OldSize];
  342. new (Buffer) LifetimeExtendedCleanupHeader(Header);
  343. new (Buffer + sizeof(Header)) T(A...);
  344. }
  345. /// Set up the last cleaup that was pushed as a conditional
  346. /// full-expression cleanup.
  347. void initFullExprCleanup();
  348. /// PushDestructorCleanup - Push a cleanup to call the
  349. /// complete-object destructor of an object of the given type at the
  350. /// given address. Does nothing if T is not a C++ class type with a
  351. /// non-trivial destructor.
  352. void PushDestructorCleanup(QualType T, Address Addr);
  353. /// PushDestructorCleanup - Push a cleanup to call the
  354. /// complete-object variant of the given destructor on the object at
  355. /// the given address.
  356. void PushDestructorCleanup(const CXXDestructorDecl *Dtor, Address Addr);
  357. /// PopCleanupBlock - Will pop the cleanup entry on the stack and
  358. /// process all branch fixups.
  359. void PopCleanupBlock(bool FallThroughIsBranchThrough = false);
  360. /// DeactivateCleanupBlock - Deactivates the given cleanup block.
  361. /// The block cannot be reactivated. Pops it if it's the top of the
  362. /// stack.
  363. ///
  364. /// \param DominatingIP - An instruction which is known to
  365. /// dominate the current IP (if set) and which lies along
  366. /// all paths of execution between the current IP and the
  367. /// the point at which the cleanup comes into scope.
  368. void DeactivateCleanupBlock(EHScopeStack::stable_iterator Cleanup,
  369. llvm::Instruction *DominatingIP);
  370. /// ActivateCleanupBlock - Activates an initially-inactive cleanup.
  371. /// Cannot be used to resurrect a deactivated cleanup.
  372. ///
  373. /// \param DominatingIP - An instruction which is known to
  374. /// dominate the current IP (if set) and which lies along
  375. /// all paths of execution between the current IP and the
  376. /// the point at which the cleanup comes into scope.
  377. void ActivateCleanupBlock(EHScopeStack::stable_iterator Cleanup,
  378. llvm::Instruction *DominatingIP);
  379. /// \brief Enters a new scope for capturing cleanups, all of which
  380. /// will be executed once the scope is exited.
  381. class RunCleanupsScope {
  382. EHScopeStack::stable_iterator CleanupStackDepth;
  383. size_t LifetimeExtendedCleanupStackSize;
  384. bool OldDidCallStackSave;
  385. protected:
  386. bool PerformCleanup;
  387. private:
  388. RunCleanupsScope(const RunCleanupsScope &) = delete;
  389. void operator=(const RunCleanupsScope &) = delete;
  390. protected:
  391. CodeGenFunction& CGF;
  392. public:
  393. /// \brief Enter a new cleanup scope.
  394. explicit RunCleanupsScope(CodeGenFunction &CGF)
  395. : PerformCleanup(true), CGF(CGF)
  396. {
  397. CleanupStackDepth = CGF.EHStack.stable_begin();
  398. LifetimeExtendedCleanupStackSize =
  399. CGF.LifetimeExtendedCleanupStack.size();
  400. OldDidCallStackSave = CGF.DidCallStackSave;
  401. CGF.DidCallStackSave = false;
  402. }
  403. /// \brief Exit this cleanup scope, emitting any accumulated
  404. /// cleanups.
  405. ~RunCleanupsScope() {
  406. if (PerformCleanup) {
  407. CGF.DidCallStackSave = OldDidCallStackSave;
  408. CGF.PopCleanupBlocks(CleanupStackDepth,
  409. LifetimeExtendedCleanupStackSize);
  410. }
  411. }
  412. /// \brief Determine whether this scope requires any cleanups.
  413. bool requiresCleanups() const {
  414. return CGF.EHStack.stable_begin() != CleanupStackDepth;
  415. }
  416. /// \brief Force the emission of cleanups now, instead of waiting
  417. /// until this object is destroyed.
  418. void ForceCleanup() {
  419. assert(PerformCleanup && "Already forced cleanup");
  420. CGF.DidCallStackSave = OldDidCallStackSave;
  421. CGF.PopCleanupBlocks(CleanupStackDepth,
  422. LifetimeExtendedCleanupStackSize);
  423. PerformCleanup = false;
  424. }
  425. };
  426. class LexicalScope : public RunCleanupsScope {
  427. SourceRange Range;
  428. SmallVector<const LabelDecl*, 4> Labels;
  429. LexicalScope *ParentScope;
  430. LexicalScope(const LexicalScope &) = delete;
  431. void operator=(const LexicalScope &) = delete;
  432. public:
  433. /// \brief Enter a new cleanup scope.
  434. explicit LexicalScope(CodeGenFunction &CGF, SourceRange Range)
  435. : RunCleanupsScope(CGF), Range(Range), ParentScope(CGF.CurLexicalScope) {
  436. CGF.CurLexicalScope = this;
  437. if (CGDebugInfo *DI = CGF.getDebugInfo())
  438. DI->EmitLexicalBlockStart(CGF.Builder, Range.getBegin());
  439. }
  440. void addLabel(const LabelDecl *label) {
  441. assert(PerformCleanup && "adding label to dead scope?");
  442. Labels.push_back(label);
  443. }
  444. /// \brief Exit this cleanup scope, emitting any accumulated
  445. /// cleanups.
  446. ~LexicalScope() {
  447. if (CGDebugInfo *DI = CGF.getDebugInfo())
  448. DI->EmitLexicalBlockEnd(CGF.Builder, Range.getEnd());
  449. // If we should perform a cleanup, force them now. Note that
  450. // this ends the cleanup scope before rescoping any labels.
  451. if (PerformCleanup) {
  452. ApplyDebugLocation DL(CGF, Range.getEnd());
  453. ForceCleanup();
  454. }
  455. }
  456. /// \brief Force the emission of cleanups now, instead of waiting
  457. /// until this object is destroyed.
  458. void ForceCleanup() {
  459. CGF.CurLexicalScope = ParentScope;
  460. RunCleanupsScope::ForceCleanup();
  461. if (!Labels.empty())
  462. rescopeLabels();
  463. }
  464. void rescopeLabels();
  465. };
  466. typedef llvm::DenseMap<const Decl *, Address> DeclMapTy;
  467. /// \brief The scope used to remap some variables as private in the OpenMP
  468. /// loop body (or other captured region emitted without outlining), and to
  469. /// restore old vars back on exit.
  470. class OMPPrivateScope : public RunCleanupsScope {
  471. DeclMapTy SavedLocals;
  472. DeclMapTy SavedPrivates;
  473. private:
  474. OMPPrivateScope(const OMPPrivateScope &) = delete;
  475. void operator=(const OMPPrivateScope &) = delete;
  476. public:
  477. /// \brief Enter a new OpenMP private scope.
  478. explicit OMPPrivateScope(CodeGenFunction &CGF) : RunCleanupsScope(CGF) {}
  479. /// \brief Registers \a LocalVD variable as a private and apply \a
  480. /// PrivateGen function for it to generate corresponding private variable.
  481. /// \a PrivateGen returns an address of the generated private variable.
  482. /// \return true if the variable is registered as private, false if it has
  483. /// been privatized already.
  484. bool
  485. addPrivate(const VarDecl *LocalVD,
  486. llvm::function_ref<Address()> PrivateGen) {
  487. assert(PerformCleanup && "adding private to dead scope");
  488. // Only save it once.
  489. if (SavedLocals.count(LocalVD)) return false;
  490. // Copy the existing local entry to SavedLocals.
  491. auto it = CGF.LocalDeclMap.find(LocalVD);
  492. if (it != CGF.LocalDeclMap.end()) {
  493. SavedLocals.insert({LocalVD, it->second});
  494. } else {
  495. SavedLocals.insert({LocalVD, Address::invalid()});
  496. }
  497. // Generate the private entry.
  498. Address Addr = PrivateGen();
  499. QualType VarTy = LocalVD->getType();
  500. if (VarTy->isReferenceType()) {
  501. Address Temp = CGF.CreateMemTemp(VarTy);
  502. CGF.Builder.CreateStore(Addr.getPointer(), Temp);
  503. Addr = Temp;
  504. }
  505. SavedPrivates.insert({LocalVD, Addr});
  506. return true;
  507. }
  508. /// \brief Privatizes local variables previously registered as private.
  509. /// Registration is separate from the actual privatization to allow
  510. /// initializers use values of the original variables, not the private one.
  511. /// This is important, for example, if the private variable is a class
  512. /// variable initialized by a constructor that references other private
  513. /// variables. But at initialization original variables must be used, not
  514. /// private copies.
  515. /// \return true if at least one variable was privatized, false otherwise.
  516. bool Privatize() {
  517. copyInto(SavedPrivates, CGF.LocalDeclMap);
  518. SavedPrivates.clear();
  519. return !SavedLocals.empty();
  520. }
  521. void ForceCleanup() {
  522. RunCleanupsScope::ForceCleanup();
  523. copyInto(SavedLocals, CGF.LocalDeclMap);
  524. SavedLocals.clear();
  525. }
  526. /// \brief Exit scope - all the mapped variables are restored.
  527. ~OMPPrivateScope() {
  528. if (PerformCleanup)
  529. ForceCleanup();
  530. }
  531. private:
  532. /// Copy all the entries in the source map over the corresponding
  533. /// entries in the destination, which must exist.
  534. static void copyInto(const DeclMapTy &src, DeclMapTy &dest) {
  535. for (auto &pair : src) {
  536. if (!pair.second.isValid()) {
  537. dest.erase(pair.first);
  538. continue;
  539. }
  540. auto it = dest.find(pair.first);
  541. if (it != dest.end()) {
  542. it->second = pair.second;
  543. } else {
  544. dest.insert(pair);
  545. }
  546. }
  547. }
  548. };
  549. /// \brief Takes the old cleanup stack size and emits the cleanup blocks
  550. /// that have been added.
  551. void PopCleanupBlocks(EHScopeStack::stable_iterator OldCleanupStackSize);
  552. /// \brief Takes the old cleanup stack size and emits the cleanup blocks
  553. /// that have been added, then adds all lifetime-extended cleanups from
  554. /// the given position to the stack.
  555. void PopCleanupBlocks(EHScopeStack::stable_iterator OldCleanupStackSize,
  556. size_t OldLifetimeExtendedStackSize);
  557. void ResolveBranchFixups(llvm::BasicBlock *Target);
  558. /// The given basic block lies in the current EH scope, but may be a
  559. /// target of a potentially scope-crossing jump; get a stable handle
  560. /// to which we can perform this jump later.
  561. JumpDest getJumpDestInCurrentScope(llvm::BasicBlock *Target) {
  562. return JumpDest(Target,
  563. EHStack.getInnermostNormalCleanup(),
  564. NextCleanupDestIndex++);
  565. }
  566. /// The given basic block lies in the current EH scope, but may be a
  567. /// target of a potentially scope-crossing jump; get a stable handle
  568. /// to which we can perform this jump later.
  569. JumpDest getJumpDestInCurrentScope(StringRef Name = StringRef()) {
  570. return getJumpDestInCurrentScope(createBasicBlock(Name));
  571. }
  572. /// EmitBranchThroughCleanup - Emit a branch from the current insert
  573. /// block through the normal cleanup handling code (if any) and then
  574. /// on to \arg Dest.
  575. void EmitBranchThroughCleanup(JumpDest Dest);
  576. /// isObviouslyBranchWithoutCleanups - Return true if a branch to the
  577. /// specified destination obviously has no cleanups to run. 'false' is always
  578. /// a conservatively correct answer for this method.
  579. bool isObviouslyBranchWithoutCleanups(JumpDest Dest) const;
  580. /// popCatchScope - Pops the catch scope at the top of the EHScope
  581. /// stack, emitting any required code (other than the catch handlers
  582. /// themselves).
  583. void popCatchScope();
  584. llvm::BasicBlock *getEHResumeBlock(bool isCleanup);
  585. llvm::BasicBlock *getEHDispatchBlock(EHScopeStack::stable_iterator scope);
  586. llvm::BasicBlock *getMSVCDispatchBlock(EHScopeStack::stable_iterator scope);
  587. /// An object to manage conditionally-evaluated expressions.
  588. class ConditionalEvaluation {
  589. llvm::BasicBlock *StartBB;
  590. public:
  591. ConditionalEvaluation(CodeGenFunction &CGF)
  592. : StartBB(CGF.Builder.GetInsertBlock()) {}
  593. void begin(CodeGenFunction &CGF) {
  594. assert(CGF.OutermostConditional != this);
  595. if (!CGF.OutermostConditional)
  596. CGF.OutermostConditional = this;
  597. }
  598. void end(CodeGenFunction &CGF) {
  599. assert(CGF.OutermostConditional != nullptr);
  600. if (CGF.OutermostConditional == this)
  601. CGF.OutermostConditional = nullptr;
  602. }
  603. /// Returns a block which will be executed prior to each
  604. /// evaluation of the conditional code.
  605. llvm::BasicBlock *getStartingBlock() const {
  606. return StartBB;
  607. }
  608. };
  609. /// isInConditionalBranch - Return true if we're currently emitting
  610. /// one branch or the other of a conditional expression.
  611. bool isInConditionalBranch() const { return OutermostConditional != nullptr; }
  612. void setBeforeOutermostConditional(llvm::Value *value, Address addr) {
  613. assert(isInConditionalBranch());
  614. llvm::BasicBlock *block = OutermostConditional->getStartingBlock();
  615. auto store = new llvm::StoreInst(value, addr.getPointer(), &block->back());
  616. store->setAlignment(addr.getAlignment().getQuantity());
  617. }
  618. /// An RAII object to record that we're evaluating a statement
  619. /// expression.
  620. class StmtExprEvaluation {
  621. CodeGenFunction &CGF;
  622. /// We have to save the outermost conditional: cleanups in a
  623. /// statement expression aren't conditional just because the
  624. /// StmtExpr is.
  625. ConditionalEvaluation *SavedOutermostConditional;
  626. public:
  627. StmtExprEvaluation(CodeGenFunction &CGF)
  628. : CGF(CGF), SavedOutermostConditional(CGF.OutermostConditional) {
  629. CGF.OutermostConditional = nullptr;
  630. }
  631. ~StmtExprEvaluation() {
  632. CGF.OutermostConditional = SavedOutermostConditional;
  633. CGF.EnsureInsertPoint();
  634. }
  635. };
  636. /// An object which temporarily prevents a value from being
  637. /// destroyed by aggressive peephole optimizations that assume that
  638. /// all uses of a value have been realized in the IR.
  639. class PeepholeProtection {
  640. llvm::Instruction *Inst;
  641. friend class CodeGenFunction;
  642. public:
  643. PeepholeProtection() : Inst(nullptr) {}
  644. };
  645. /// A non-RAII class containing all the information about a bound
  646. /// opaque value. OpaqueValueMapping, below, is a RAII wrapper for
  647. /// this which makes individual mappings very simple; using this
  648. /// class directly is useful when you have a variable number of
  649. /// opaque values or don't want the RAII functionality for some
  650. /// reason.
  651. class OpaqueValueMappingData {
  652. const OpaqueValueExpr *OpaqueValue;
  653. bool BoundLValue;
  654. CodeGenFunction::PeepholeProtection Protection;
  655. OpaqueValueMappingData(const OpaqueValueExpr *ov,
  656. bool boundLValue)
  657. : OpaqueValue(ov), BoundLValue(boundLValue) {}
  658. public:
  659. OpaqueValueMappingData() : OpaqueValue(nullptr) {}
  660. static bool shouldBindAsLValue(const Expr *expr) {
  661. // gl-values should be bound as l-values for obvious reasons.
  662. // Records should be bound as l-values because IR generation
  663. // always keeps them in memory. Expressions of function type
  664. // act exactly like l-values but are formally required to be
  665. // r-values in C.
  666. return expr->isGLValue() ||
  667. expr->getType()->isFunctionType() ||
  668. hasAggregateEvaluationKind(expr->getType());
  669. }
  670. static OpaqueValueMappingData bind(CodeGenFunction &CGF,
  671. const OpaqueValueExpr *ov,
  672. const Expr *e) {
  673. if (shouldBindAsLValue(ov))
  674. return bind(CGF, ov, CGF.EmitLValue(e));
  675. return bind(CGF, ov, CGF.EmitAnyExpr(e));
  676. }
  677. static OpaqueValueMappingData bind(CodeGenFunction &CGF,
  678. const OpaqueValueExpr *ov,
  679. const LValue &lv) {
  680. assert(shouldBindAsLValue(ov));
  681. CGF.OpaqueLValues.insert(std::make_pair(ov, lv));
  682. return OpaqueValueMappingData(ov, true);
  683. }
  684. static OpaqueValueMappingData bind(CodeGenFunction &CGF,
  685. const OpaqueValueExpr *ov,
  686. const RValue &rv) {
  687. assert(!shouldBindAsLValue(ov));
  688. CGF.OpaqueRValues.insert(std::make_pair(ov, rv));
  689. OpaqueValueMappingData data(ov, false);
  690. // Work around an extremely aggressive peephole optimization in
  691. // EmitScalarConversion which assumes that all other uses of a
  692. // value are extant.
  693. data.Protection = CGF.protectFromPeepholes(rv);
  694. return data;
  695. }
  696. bool isValid() const { return OpaqueValue != nullptr; }
  697. void clear() { OpaqueValue = nullptr; }
  698. void unbind(CodeGenFunction &CGF) {
  699. assert(OpaqueValue && "no data to unbind!");
  700. if (BoundLValue) {
  701. CGF.OpaqueLValues.erase(OpaqueValue);
  702. } else {
  703. CGF.OpaqueRValues.erase(OpaqueValue);
  704. CGF.unprotectFromPeepholes(Protection);
  705. }
  706. }
  707. };
  708. /// An RAII object to set (and then clear) a mapping for an OpaqueValueExpr.
  709. class OpaqueValueMapping {
  710. CodeGenFunction &CGF;
  711. OpaqueValueMappingData Data;
  712. public:
  713. static bool shouldBindAsLValue(const Expr *expr) {
  714. return OpaqueValueMappingData::shouldBindAsLValue(expr);
  715. }
  716. /// Build the opaque value mapping for the given conditional
  717. /// operator if it's the GNU ?: extension. This is a common
  718. /// enough pattern that the convenience operator is really
  719. /// helpful.
  720. ///
  721. OpaqueValueMapping(CodeGenFunction &CGF,
  722. const AbstractConditionalOperator *op) : CGF(CGF) {
  723. if (isa<ConditionalOperator>(op))
  724. // Leave Data empty.
  725. return;
  726. const BinaryConditionalOperator *e = cast<BinaryConditionalOperator>(op);
  727. Data = OpaqueValueMappingData::bind(CGF, e->getOpaqueValue(),
  728. e->getCommon());
  729. }
  730. OpaqueValueMapping(CodeGenFunction &CGF,
  731. const OpaqueValueExpr *opaqueValue,
  732. LValue lvalue)
  733. : CGF(CGF), Data(OpaqueValueMappingData::bind(CGF, opaqueValue, lvalue)) {
  734. }
  735. OpaqueValueMapping(CodeGenFunction &CGF,
  736. const OpaqueValueExpr *opaqueValue,
  737. RValue rvalue)
  738. : CGF(CGF), Data(OpaqueValueMappingData::bind(CGF, opaqueValue, rvalue)) {
  739. }
  740. void pop() {
  741. Data.unbind(CGF);
  742. Data.clear();
  743. }
  744. ~OpaqueValueMapping() {
  745. if (Data.isValid()) Data.unbind(CGF);
  746. }
  747. };
  748. private:
  749. CGDebugInfo *DebugInfo;
  750. bool DisableDebugInfo;
  751. /// DidCallStackSave - Whether llvm.stacksave has been called. Used to avoid
  752. /// calling llvm.stacksave for multiple VLAs in the same scope.
  753. bool DidCallStackSave;
  754. /// IndirectBranch - The first time an indirect goto is seen we create a block
  755. /// with an indirect branch. Every time we see the address of a label taken,
  756. /// we add the label to the indirect goto. Every subsequent indirect goto is
  757. /// codegen'd as a jump to the IndirectBranch's basic block.
  758. llvm::IndirectBrInst *IndirectBranch;
  759. /// LocalDeclMap - This keeps track of the LLVM allocas or globals for local C
  760. /// decls.
  761. DeclMapTy LocalDeclMap;
  762. /// SizeArguments - If a ParmVarDecl had the pass_object_size attribute, this
  763. /// will contain a mapping from said ParmVarDecl to its implicit "object_size"
  764. /// parameter.
  765. llvm::SmallDenseMap<const ParmVarDecl *, const ImplicitParamDecl *, 2>
  766. SizeArguments;
  767. /// Track escaped local variables with auto storage. Used during SEH
  768. /// outlining to produce a call to llvm.localescape.
  769. llvm::DenseMap<llvm::AllocaInst *, int> EscapedLocals;
  770. /// LabelMap - This keeps track of the LLVM basic block for each C label.
  771. llvm::DenseMap<const LabelDecl*, JumpDest> LabelMap;
  772. // BreakContinueStack - This keeps track of where break and continue
  773. // statements should jump to.
  774. struct BreakContinue {
  775. BreakContinue(JumpDest Break, JumpDest Continue)
  776. : BreakBlock(Break), ContinueBlock(Continue) {}
  777. JumpDest BreakBlock;
  778. JumpDest ContinueBlock;
  779. };
  780. SmallVector<BreakContinue, 8> BreakContinueStack;
  781. CodeGenPGO PGO;
  782. /// Calculate branch weights appropriate for PGO data
  783. llvm::MDNode *createProfileWeights(uint64_t TrueCount, uint64_t FalseCount);
  784. llvm::MDNode *createProfileWeights(ArrayRef<uint64_t> Weights);
  785. llvm::MDNode *createProfileWeightsForLoop(const Stmt *Cond,
  786. uint64_t LoopCount);
  787. public:
  788. /// Increment the profiler's counter for the given statement.
  789. void incrementProfileCounter(const Stmt *S) {
  790. if (CGM.getCodeGenOpts().ProfileInstrGenerate)
  791. PGO.emitCounterIncrement(Builder, S);
  792. PGO.setCurrentStmt(S);
  793. }
  794. /// Get the profiler's count for the given statement.
  795. uint64_t getProfileCount(const Stmt *S) {
  796. Optional<uint64_t> Count = PGO.getStmtCount(S);
  797. if (!Count.hasValue())
  798. return 0;
  799. return *Count;
  800. }
  801. /// Set the profiler's current count.
  802. void setCurrentProfileCount(uint64_t Count) {
  803. PGO.setCurrentRegionCount(Count);
  804. }
  805. /// Get the profiler's current count. This is generally the count for the most
  806. /// recently incremented counter.
  807. uint64_t getCurrentProfileCount() {
  808. return PGO.getCurrentRegionCount();
  809. }
  810. private:
  811. /// SwitchInsn - This is nearest current switch instruction. It is null if
  812. /// current context is not in a switch.
  813. llvm::SwitchInst *SwitchInsn;
  814. /// The branch weights of SwitchInsn when doing instrumentation based PGO.
  815. SmallVector<uint64_t, 16> *SwitchWeights;
  816. /// CaseRangeBlock - This block holds if condition check for last case
  817. /// statement range in current switch instruction.
  818. llvm::BasicBlock *CaseRangeBlock;
  819. /// OpaqueLValues - Keeps track of the current set of opaque value
  820. /// expressions.
  821. llvm::DenseMap<const OpaqueValueExpr *, LValue> OpaqueLValues;
  822. llvm::DenseMap<const OpaqueValueExpr *, RValue> OpaqueRValues;
  823. // VLASizeMap - This keeps track of the associated size for each VLA type.
  824. // We track this by the size expression rather than the type itself because
  825. // in certain situations, like a const qualifier applied to an VLA typedef,
  826. // multiple VLA types can share the same size expression.
  827. // FIXME: Maybe this could be a stack of maps that is pushed/popped as we
  828. // enter/leave scopes.
  829. llvm::DenseMap<const Expr*, llvm::Value*> VLASizeMap;
  830. /// A block containing a single 'unreachable' instruction. Created
  831. /// lazily by getUnreachableBlock().
  832. llvm::BasicBlock *UnreachableBlock;
  833. /// Counts of the number return expressions in the function.
  834. unsigned NumReturnExprs;
  835. /// Count the number of simple (constant) return expressions in the function.
  836. unsigned NumSimpleReturnExprs;
  837. /// The last regular (non-return) debug location (breakpoint) in the function.
  838. SourceLocation LastStopPoint;
  839. public:
  840. /// A scope within which we are constructing the fields of an object which
  841. /// might use a CXXDefaultInitExpr. This stashes away a 'this' value to use
  842. /// if we need to evaluate a CXXDefaultInitExpr within the evaluation.
  843. class FieldConstructionScope {
  844. public:
  845. FieldConstructionScope(CodeGenFunction &CGF, Address This)
  846. : CGF(CGF), OldCXXDefaultInitExprThis(CGF.CXXDefaultInitExprThis) {
  847. CGF.CXXDefaultInitExprThis = This;
  848. }
  849. ~FieldConstructionScope() {
  850. CGF.CXXDefaultInitExprThis = OldCXXDefaultInitExprThis;
  851. }
  852. private:
  853. CodeGenFunction &CGF;
  854. Address OldCXXDefaultInitExprThis;
  855. };
  856. /// The scope of a CXXDefaultInitExpr. Within this scope, the value of 'this'
  857. /// is overridden to be the object under construction.
  858. class CXXDefaultInitExprScope {
  859. public:
  860. CXXDefaultInitExprScope(CodeGenFunction &CGF)
  861. : CGF(CGF), OldCXXThisValue(CGF.CXXThisValue),
  862. OldCXXThisAlignment(CGF.CXXThisAlignment) {
  863. CGF.CXXThisValue = CGF.CXXDefaultInitExprThis.getPointer();
  864. CGF.CXXThisAlignment = CGF.CXXDefaultInitExprThis.getAlignment();
  865. }
  866. ~CXXDefaultInitExprScope() {
  867. CGF.CXXThisValue = OldCXXThisValue;
  868. CGF.CXXThisAlignment = OldCXXThisAlignment;
  869. }
  870. public:
  871. CodeGenFunction &CGF;
  872. llvm::Value *OldCXXThisValue;
  873. CharUnits OldCXXThisAlignment;
  874. };
  875. private:
  876. /// CXXThisDecl - When generating code for a C++ member function,
  877. /// this will hold the implicit 'this' declaration.
  878. ImplicitParamDecl *CXXABIThisDecl;
  879. llvm::Value *CXXABIThisValue;
  880. llvm::Value *CXXThisValue;
  881. CharUnits CXXABIThisAlignment;
  882. CharUnits CXXThisAlignment;
  883. /// The value of 'this' to use when evaluating CXXDefaultInitExprs within
  884. /// this expression.
  885. Address CXXDefaultInitExprThis = Address::invalid();
  886. /// CXXStructorImplicitParamDecl - When generating code for a constructor or
  887. /// destructor, this will hold the implicit argument (e.g. VTT).
  888. ImplicitParamDecl *CXXStructorImplicitParamDecl;
  889. llvm::Value *CXXStructorImplicitParamValue;
  890. /// OutermostConditional - Points to the outermost active
  891. /// conditional control. This is used so that we know if a
  892. /// temporary should be destroyed conditionally.
  893. ConditionalEvaluation *OutermostConditional;
  894. /// The current lexical scope.
  895. LexicalScope *CurLexicalScope;
  896. /// The current source location that should be used for exception
  897. /// handling code.
  898. SourceLocation CurEHLocation;
  899. /// BlockByrefInfos - For each __block variable, contains
  900. /// information about the layout of the variable.
  901. llvm::DenseMap<const ValueDecl *, BlockByrefInfo> BlockByrefInfos;
  902. llvm::BasicBlock *TerminateLandingPad;
  903. llvm::BasicBlock *TerminateHandler;
  904. llvm::BasicBlock *TrapBB;
  905. /// Add a kernel metadata node to the named metadata node 'opencl.kernels'.
  906. /// In the kernel metadata node, reference the kernel function and metadata
  907. /// nodes for its optional attribute qualifiers (OpenCL 1.1 6.7.2):
  908. /// - A node for the vec_type_hint(<type>) qualifier contains string
  909. /// "vec_type_hint", an undefined value of the <type> data type,
  910. /// and a Boolean that is true if the <type> is integer and signed.
  911. /// - A node for the work_group_size_hint(X,Y,Z) qualifier contains string
  912. /// "work_group_size_hint", and three 32-bit integers X, Y and Z.
  913. /// - A node for the reqd_work_group_size(X,Y,Z) qualifier contains string
  914. /// "reqd_work_group_size", and three 32-bit integers X, Y and Z.
  915. void EmitOpenCLKernelMetadata(const FunctionDecl *FD,
  916. llvm::Function *Fn);
  917. public:
  918. CodeGenFunction(CodeGenModule &cgm, bool suppressNewContext=false);
  919. ~CodeGenFunction();
  920. CodeGenTypes &getTypes() const { return CGM.getTypes(); }
  921. ASTContext &getContext() const { return CGM.getContext(); }
  922. CGDebugInfo *getDebugInfo() {
  923. if (DisableDebugInfo)
  924. return nullptr;
  925. return DebugInfo;
  926. }
  927. void disableDebugInfo() { DisableDebugInfo = true; }
  928. void enableDebugInfo() { DisableDebugInfo = false; }
  929. bool shouldUseFusedARCCalls() {
  930. return CGM.getCodeGenOpts().OptimizationLevel == 0;
  931. }
  932. const LangOptions &getLangOpts() const { return CGM.getLangOpts(); }
  933. /// Returns a pointer to the function's exception object and selector slot,
  934. /// which is assigned in every landing pad.
  935. Address getExceptionSlot();
  936. Address getEHSelectorSlot();
  937. /// Returns the contents of the function's exception object and selector
  938. /// slots.
  939. llvm::Value *getExceptionFromSlot();
  940. llvm::Value *getSelectorFromSlot();
  941. Address getNormalCleanupDestSlot();
  942. llvm::BasicBlock *getUnreachableBlock() {
  943. if (!UnreachableBlock) {
  944. UnreachableBlock = createBasicBlock("unreachable");
  945. new llvm::UnreachableInst(getLLVMContext(), UnreachableBlock);
  946. }
  947. return UnreachableBlock;
  948. }
  949. llvm::BasicBlock *getInvokeDest() {
  950. if (!EHStack.requiresLandingPad()) return nullptr;
  951. return getInvokeDestImpl();
  952. }
  953. bool currentFunctionUsesSEHTry() const {
  954. const auto *FD = dyn_cast_or_null<FunctionDecl>(CurCodeDecl);
  955. return FD && FD->usesSEHTry();
  956. }
  957. const TargetInfo &getTarget() const { return Target; }
  958. llvm::LLVMContext &getLLVMContext() { return CGM.getLLVMContext(); }
  959. //===--------------------------------------------------------------------===//
  960. // Cleanups
  961. //===--------------------------------------------------------------------===//
  962. typedef void Destroyer(CodeGenFunction &CGF, Address addr, QualType ty);
  963. void pushIrregularPartialArrayCleanup(llvm::Value *arrayBegin,
  964. Address arrayEndPointer,
  965. QualType elementType,
  966. CharUnits elementAlignment,
  967. Destroyer *destroyer);
  968. void pushRegularPartialArrayCleanup(llvm::Value *arrayBegin,
  969. llvm::Value *arrayEnd,
  970. QualType elementType,
  971. CharUnits elementAlignment,
  972. Destroyer *destroyer);
  973. void pushDestroy(QualType::DestructionKind dtorKind,
  974. Address addr, QualType type);
  975. void pushEHDestroy(QualType::DestructionKind dtorKind,
  976. Address addr, QualType type);
  977. void pushDestroy(CleanupKind kind, Address addr, QualType type,
  978. Destroyer *destroyer, bool useEHCleanupForArray);
  979. void pushLifetimeExtendedDestroy(CleanupKind kind, Address addr,
  980. QualType type, Destroyer *destroyer,
  981. bool useEHCleanupForArray);
  982. void pushCallObjectDeleteCleanup(const FunctionDecl *OperatorDelete,
  983. llvm::Value *CompletePtr,
  984. QualType ElementType);
  985. void pushStackRestore(CleanupKind kind, Address SPMem);
  986. void emitDestroy(Address addr, QualType type, Destroyer *destroyer,
  987. bool useEHCleanupForArray);
  988. llvm::Function *generateDestroyHelper(Address addr, QualType type,
  989. Destroyer *destroyer,
  990. bool useEHCleanupForArray,
  991. const VarDecl *VD);
  992. void emitArrayDestroy(llvm::Value *begin, llvm::Value *end,
  993. QualType elementType, CharUnits elementAlign,
  994. Destroyer *destroyer,
  995. bool checkZeroLength, bool useEHCleanup);
  996. Destroyer *getDestroyer(QualType::DestructionKind destructionKind);
  997. /// Determines whether an EH cleanup is required to destroy a type
  998. /// with the given destruction kind.
  999. bool needsEHCleanup(QualType::DestructionKind kind) {
  1000. switch (kind) {
  1001. case QualType::DK_none:
  1002. return false;
  1003. case QualType::DK_cxx_destructor:
  1004. case QualType::DK_objc_weak_lifetime:
  1005. return getLangOpts().Exceptions;
  1006. case QualType::DK_objc_strong_lifetime:
  1007. return getLangOpts().Exceptions &&
  1008. CGM.getCodeGenOpts().ObjCAutoRefCountExceptions;
  1009. }
  1010. llvm_unreachable("bad destruction kind");
  1011. }
  1012. CleanupKind getCleanupKind(QualType::DestructionKind kind) {
  1013. return (needsEHCleanup(kind) ? NormalAndEHCleanup : NormalCleanup);
  1014. }
  1015. //===--------------------------------------------------------------------===//
  1016. // Objective-C
  1017. //===--------------------------------------------------------------------===//
  1018. void GenerateObjCMethod(const ObjCMethodDecl *OMD);
  1019. void StartObjCMethod(const ObjCMethodDecl *MD, const ObjCContainerDecl *CD);
  1020. /// GenerateObjCGetter - Synthesize an Objective-C property getter function.
  1021. void GenerateObjCGetter(ObjCImplementationDecl *IMP,
  1022. const ObjCPropertyImplDecl *PID);
  1023. void generateObjCGetterBody(const ObjCImplementationDecl *classImpl,
  1024. const ObjCPropertyImplDecl *propImpl,
  1025. const ObjCMethodDecl *GetterMothodDecl,
  1026. llvm::Constant *AtomicHelperFn);
  1027. void GenerateObjCCtorDtorMethod(ObjCImplementationDecl *IMP,
  1028. ObjCMethodDecl *MD, bool ctor);
  1029. /// GenerateObjCSetter - Synthesize an Objective-C property setter function
  1030. /// for the given property.
  1031. void GenerateObjCSetter(ObjCImplementationDecl *IMP,
  1032. const ObjCPropertyImplDecl *PID);
  1033. void generateObjCSetterBody(const ObjCImplementationDecl *classImpl,
  1034. const ObjCPropertyImplDecl *propImpl,
  1035. llvm::Constant *AtomicHelperFn);
  1036. //===--------------------------------------------------------------------===//
  1037. // Block Bits
  1038. //===--------------------------------------------------------------------===//
  1039. llvm::Value *EmitBlockLiteral(const BlockExpr *);
  1040. llvm::Value *EmitBlockLiteral(const CGBlockInfo &Info);
  1041. static void destroyBlockInfos(CGBlockInfo *info);
  1042. llvm::Function *GenerateBlockFunction(GlobalDecl GD,
  1043. const CGBlockInfo &Info,
  1044. const DeclMapTy &ldm,
  1045. bool IsLambdaConversionToBlock);
  1046. llvm::Constant *GenerateCopyHelperFunction(const CGBlockInfo &blockInfo);
  1047. llvm::Constant *GenerateDestroyHelperFunction(const CGBlockInfo &blockInfo);
  1048. llvm::Constant *GenerateObjCAtomicSetterCopyHelperFunction(
  1049. const ObjCPropertyImplDecl *PID);
  1050. llvm::Constant *GenerateObjCAtomicGetterCopyHelperFunction(
  1051. const ObjCPropertyImplDecl *PID);
  1052. llvm::Value *EmitBlockCopyAndAutorelease(llvm::Value *Block, QualType Ty);
  1053. void BuildBlockRelease(llvm::Value *DeclPtr, BlockFieldFlags flags);
  1054. class AutoVarEmission;
  1055. void emitByrefStructureInit(const AutoVarEmission &emission);
  1056. void enterByrefCleanup(const AutoVarEmission &emission);
  1057. void setBlockContextParameter(const ImplicitParamDecl *D, unsigned argNum,
  1058. llvm::Value *ptr);
  1059. Address LoadBlockStruct();
  1060. Address GetAddrOfBlockDecl(const VarDecl *var, bool ByRef);
  1061. /// BuildBlockByrefAddress - Computes the location of the
  1062. /// data in a variable which is declared as __block.
  1063. Address emitBlockByrefAddress(Address baseAddr, const VarDecl *V,
  1064. bool followForward = true);
  1065. Address emitBlockByrefAddress(Address baseAddr,
  1066. const BlockByrefInfo &info,
  1067. bool followForward,
  1068. const llvm::Twine &name);
  1069. const BlockByrefInfo &getBlockByrefInfo(const VarDecl *var);
  1070. void GenerateCode(GlobalDecl GD, llvm::Function *Fn,
  1071. const CGFunctionInfo &FnInfo);
  1072. /// \brief Emit code for the start of a function.
  1073. /// \param Loc The location to be associated with the function.
  1074. /// \param StartLoc The location of the function body.
  1075. void StartFunction(GlobalDecl GD,
  1076. QualType RetTy,
  1077. llvm::Function *Fn,
  1078. const CGFunctionInfo &FnInfo,
  1079. const FunctionArgList &Args,
  1080. SourceLocation Loc = SourceLocation(),
  1081. SourceLocation StartLoc = SourceLocation());
  1082. void EmitConstructorBody(FunctionArgList &Args);
  1083. void EmitDestructorBody(FunctionArgList &Args);
  1084. void emitImplicitAssignmentOperatorBody(FunctionArgList &Args);
  1085. void EmitFunctionBody(FunctionArgList &Args, const Stmt *Body);
  1086. void EmitBlockWithFallThrough(llvm::BasicBlock *BB, const Stmt *S);
  1087. void EmitForwardingCallToLambda(const CXXMethodDecl *LambdaCallOperator,
  1088. CallArgList &CallArgs);
  1089. void EmitLambdaToBlockPointerBody(FunctionArgList &Args);
  1090. void EmitLambdaBlockInvokeBody();
  1091. void EmitLambdaDelegatingInvokeBody(const CXXMethodDecl *MD);
  1092. void EmitLambdaStaticInvokeFunction(const CXXMethodDecl *MD);
  1093. void EmitAsanPrologueOrEpilogue(bool Prologue);
  1094. /// \brief Emit the unified return block, trying to avoid its emission when
  1095. /// possible.
  1096. /// \return The debug location of the user written return statement if the
  1097. /// return block is is avoided.
  1098. llvm::DebugLoc EmitReturnBlock();
  1099. /// FinishFunction - Complete IR generation of the current function. It is
  1100. /// legal to call this function even if there is no current insertion point.
  1101. void FinishFunction(SourceLocation EndLoc=SourceLocation());
  1102. void StartThunk(llvm::Function *Fn, GlobalDecl GD,
  1103. const CGFunctionInfo &FnInfo);
  1104. void EmitCallAndReturnForThunk(llvm::Value *Callee, const ThunkInfo *Thunk);
  1105. void FinishThunk();
  1106. /// Emit a musttail call for a thunk with a potentially adjusted this pointer.
  1107. void EmitMustTailThunk(const CXXMethodDecl *MD, llvm::Value *AdjustedThisPtr,
  1108. llvm::Value *Callee);
  1109. /// Generate a thunk for the given method.
  1110. void generateThunk(llvm::Function *Fn, const CGFunctionInfo &FnInfo,
  1111. GlobalDecl GD, const ThunkInfo &Thunk);
  1112. llvm::Function *GenerateVarArgsThunk(llvm::Function *Fn,
  1113. const CGFunctionInfo &FnInfo,
  1114. GlobalDecl GD, const ThunkInfo &Thunk);
  1115. void EmitCtorPrologue(const CXXConstructorDecl *CD, CXXCtorType Type,
  1116. FunctionArgList &Args);
  1117. void EmitInitializerForField(FieldDecl *Field, LValue LHS, Expr *Init,
  1118. ArrayRef<VarDecl *> ArrayIndexes);
  1119. /// Struct with all informations about dynamic [sub]class needed to set vptr.
  1120. struct VPtr {
  1121. BaseSubobject Base;
  1122. const CXXRecordDecl *NearestVBase;
  1123. CharUnits OffsetFromNearestVBase;
  1124. const CXXRecordDecl *VTableClass;
  1125. };
  1126. /// Initialize the vtable pointer of the given subobject.
  1127. void InitializeVTablePointer(const VPtr &vptr);
  1128. typedef llvm::SmallVector<VPtr, 4> VPtrsVector;
  1129. typedef llvm::SmallPtrSet<const CXXRecordDecl *, 4> VisitedVirtualBasesSetTy;
  1130. VPtrsVector getVTablePointers(const CXXRecordDecl *VTableClass);
  1131. void getVTablePointers(BaseSubobject Base, const CXXRecordDecl *NearestVBase,
  1132. CharUnits OffsetFromNearestVBase,
  1133. bool BaseIsNonVirtualPrimaryBase,
  1134. const CXXRecordDecl *VTableClass,
  1135. VisitedVirtualBasesSetTy &VBases, VPtrsVector &vptrs);
  1136. void InitializeVTablePointers(const CXXRecordDecl *ClassDecl);
  1137. /// GetVTablePtr - Return the Value of the vtable pointer member pointed
  1138. /// to by This.
  1139. llvm::Value *GetVTablePtr(Address This, llvm::Type *VTableTy,
  1140. const CXXRecordDecl *VTableClass);
  1141. enum CFITypeCheckKind {
  1142. CFITCK_VCall,
  1143. CFITCK_NVCall,
  1144. CFITCK_DerivedCast,
  1145. CFITCK_UnrelatedCast,
  1146. CFITCK_ICall,
  1147. };
  1148. /// \brief Derived is the presumed address of an object of type T after a
  1149. /// cast. If T is a polymorphic class type, emit a check that the virtual
  1150. /// table for Derived belongs to a class derived from T.
  1151. void EmitVTablePtrCheckForCast(QualType T, llvm::Value *Derived,
  1152. bool MayBeNull, CFITypeCheckKind TCK,
  1153. SourceLocation Loc);
  1154. /// EmitVTablePtrCheckForCall - Virtual method MD is being called via VTable.
  1155. /// If vptr CFI is enabled, emit a check that VTable is valid.
  1156. void EmitVTablePtrCheckForCall(const CXXMethodDecl *MD, llvm::Value *VTable,
  1157. CFITypeCheckKind TCK, SourceLocation Loc);
  1158. /// EmitVTablePtrCheck - Emit a check that VTable is a valid virtual table for
  1159. /// RD using llvm.bitset.test.
  1160. void EmitVTablePtrCheck(const CXXRecordDecl *RD, llvm::Value *VTable,
  1161. CFITypeCheckKind TCK, SourceLocation Loc);
  1162. /// CanDevirtualizeMemberFunctionCalls - Checks whether virtual calls on given
  1163. /// expr can be devirtualized.
  1164. bool CanDevirtualizeMemberFunctionCall(const Expr *Base,
  1165. const CXXMethodDecl *MD);
  1166. /// EnterDtorCleanups - Enter the cleanups necessary to complete the
  1167. /// given phase of destruction for a destructor. The end result
  1168. /// should call destructors on members and base classes in reverse
  1169. /// order of their construction.
  1170. void EnterDtorCleanups(const CXXDestructorDecl *Dtor, CXXDtorType Type);
  1171. /// ShouldInstrumentFunction - Return true if the current function should be
  1172. /// instrumented with __cyg_profile_func_* calls
  1173. bool ShouldInstrumentFunction();
  1174. /// EmitFunctionInstrumentation - Emit LLVM code to call the specified
  1175. /// instrumentation function with the current function and the call site, if
  1176. /// function instrumentation is enabled.
  1177. void EmitFunctionInstrumentation(const char *Fn);
  1178. /// EmitMCountInstrumentation - Emit call to .mcount.
  1179. void EmitMCountInstrumentation();
  1180. /// EmitFunctionProlog - Emit the target specific LLVM code to load the
  1181. /// arguments for the given function. This is also responsible for naming the
  1182. /// LLVM function arguments.
  1183. void EmitFunctionProlog(const CGFunctionInfo &FI,
  1184. llvm::Function *Fn,
  1185. const FunctionArgList &Args);
  1186. /// EmitFunctionEpilog - Emit the target specific LLVM code to return the
  1187. /// given temporary.
  1188. void EmitFunctionEpilog(const CGFunctionInfo &FI, bool EmitRetDbgLoc,
  1189. SourceLocation EndLoc);
  1190. /// EmitStartEHSpec - Emit the start of the exception spec.
  1191. void EmitStartEHSpec(const Decl *D);
  1192. /// EmitEndEHSpec - Emit the end of the exception spec.
  1193. void EmitEndEHSpec(const Decl *D);
  1194. /// getTerminateLandingPad - Return a landing pad that just calls terminate.
  1195. llvm::BasicBlock *getTerminateLandingPad();
  1196. /// getTerminateHandler - Return a handler (not a landing pad, just
  1197. /// a catch handler) that just calls terminate. This is used when
  1198. /// a terminate scope encloses a try.
  1199. llvm::BasicBlock *getTerminateHandler();
  1200. llvm::Type *ConvertTypeForMem(QualType T);
  1201. llvm::Type *ConvertType(QualType T);
  1202. llvm::Type *ConvertType(const TypeDecl *T) {
  1203. return ConvertType(getContext().getTypeDeclType(T));
  1204. }
  1205. /// LoadObjCSelf - Load the value of self. This function is only valid while
  1206. /// generating code for an Objective-C method.
  1207. llvm::Value *LoadObjCSelf();
  1208. /// TypeOfSelfObject - Return type of object that this self represents.
  1209. QualType TypeOfSelfObject();
  1210. /// hasAggregateLLVMType - Return true if the specified AST type will map into
  1211. /// an aggregate LLVM type or is void.
  1212. static TypeEvaluationKind getEvaluationKind(QualType T);
  1213. static bool hasScalarEvaluationKind(QualType T) {
  1214. return getEvaluationKind(T) == TEK_Scalar;
  1215. }
  1216. static bool hasAggregateEvaluationKind(QualType T) {
  1217. return getEvaluationKind(T) == TEK_Aggregate;
  1218. }
  1219. /// createBasicBlock - Create an LLVM basic block.
  1220. llvm::BasicBlock *createBasicBlock(const Twine &name = "",
  1221. llvm::Function *parent = nullptr,
  1222. llvm::BasicBlock *before = nullptr) {
  1223. #ifdef NDEBUG
  1224. return llvm::BasicBlock::Create(getLLVMContext(), "", parent, before);
  1225. #else
  1226. return llvm::BasicBlock::Create(getLLVMContext(), name, parent, before);
  1227. #endif
  1228. }
  1229. /// getBasicBlockForLabel - Return the LLVM basicblock that the specified
  1230. /// label maps to.
  1231. JumpDest getJumpDestForLabel(const LabelDecl *S);
  1232. /// SimplifyForwardingBlocks - If the given basic block is only a branch to
  1233. /// another basic block, simplify it. This assumes that no other code could
  1234. /// potentially reference the basic block.
  1235. void SimplifyForwardingBlocks(llvm::BasicBlock *BB);
  1236. /// EmitBlock - Emit the given block \arg BB and set it as the insert point,
  1237. /// adding a fall-through branch from the current insert block if
  1238. /// necessary. It is legal to call this function even if there is no current
  1239. /// insertion point.
  1240. ///
  1241. /// IsFinished - If true, indicates that the caller has finished emitting
  1242. /// branches to the given block and does not expect to emit code into it. This
  1243. /// means the block can be ignored if it is unreachable.
  1244. void EmitBlock(llvm::BasicBlock *BB, bool IsFinished=false);
  1245. /// EmitBlockAfterUses - Emit the given block somewhere hopefully
  1246. /// near its uses, and leave the insertion point in it.
  1247. void EmitBlockAfterUses(llvm::BasicBlock *BB);
  1248. /// EmitBranch - Emit a branch to the specified basic block from the current
  1249. /// insert block, taking care to avoid creation of branches from dummy
  1250. /// blocks. It is legal to call this function even if there is no current
  1251. /// insertion point.
  1252. ///
  1253. /// This function clears the current insertion point. The caller should follow
  1254. /// calls to this function with calls to Emit*Block prior to generation new
  1255. /// code.
  1256. void EmitBranch(llvm::BasicBlock *Block);
  1257. /// HaveInsertPoint - True if an insertion point is defined. If not, this
  1258. /// indicates that the current code being emitted is unreachable.
  1259. bool HaveInsertPoint() const {
  1260. return Builder.GetInsertBlock() != nullptr;
  1261. }
  1262. /// EnsureInsertPoint - Ensure that an insertion point is defined so that
  1263. /// emitted IR has a place to go. Note that by definition, if this function
  1264. /// creates a block then that block is unreachable; callers may do better to
  1265. /// detect when no insertion point is defined and simply skip IR generation.
  1266. void EnsureInsertPoint() {
  1267. if (!HaveInsertPoint())
  1268. EmitBlock(createBasicBlock());
  1269. }
  1270. /// ErrorUnsupported - Print out an error that codegen doesn't support the
  1271. /// specified stmt yet.
  1272. void ErrorUnsupported(const Stmt *S, const char *Type);
  1273. //===--------------------------------------------------------------------===//
  1274. // Helpers
  1275. //===--------------------------------------------------------------------===//
  1276. LValue MakeAddrLValue(Address Addr, QualType T,
  1277. AlignmentSource AlignSource = AlignmentSource::Type) {
  1278. return LValue::MakeAddr(Addr, T, getContext(), AlignSource,
  1279. CGM.getTBAAInfo(T));
  1280. }
  1281. LValue MakeAddrLValue(llvm::Value *V, QualType T, CharUnits Alignment,
  1282. AlignmentSource AlignSource = AlignmentSource::Type) {
  1283. return LValue::MakeAddr(Address(V, Alignment), T, getContext(),
  1284. AlignSource, CGM.getTBAAInfo(T));
  1285. }
  1286. LValue MakeNaturalAlignPointeeAddrLValue(llvm::Value *V, QualType T);
  1287. LValue MakeNaturalAlignAddrLValue(llvm::Value *V, QualType T);
  1288. CharUnits getNaturalTypeAlignment(QualType T,
  1289. AlignmentSource *Source = nullptr,
  1290. bool forPointeeType = false);
  1291. CharUnits getNaturalPointeeTypeAlignment(QualType T,
  1292. AlignmentSource *Source = nullptr);
  1293. Address EmitLoadOfReference(Address Ref, const ReferenceType *RefTy,
  1294. AlignmentSource *Source = nullptr);
  1295. LValue EmitLoadOfReferenceLValue(Address Ref, const ReferenceType *RefTy);
  1296. /// CreateTempAlloca - This creates a alloca and inserts it into the entry
  1297. /// block. The caller is responsible for setting an appropriate alignment on
  1298. /// the alloca.
  1299. llvm::AllocaInst *CreateTempAlloca(llvm::Type *Ty,
  1300. const Twine &Name = "tmp");
  1301. Address CreateTempAlloca(llvm::Type *Ty, CharUnits align,
  1302. const Twine &Name = "tmp");
  1303. /// CreateDefaultAlignedTempAlloca - This creates an alloca with the
  1304. /// default ABI alignment of the given LLVM type.
  1305. ///
  1306. /// IMPORTANT NOTE: This is *not* generally the right alignment for
  1307. /// any given AST type that happens to have been lowered to the
  1308. /// given IR type. This should only ever be used for function-local,
  1309. /// IR-driven manipulations like saving and restoring a value. Do
  1310. /// not hand this address off to arbitrary IRGen routines, and especially
  1311. /// do not pass it as an argument to a function that might expect a
  1312. /// properly ABI-aligned value.
  1313. Address CreateDefaultAlignTempAlloca(llvm::Type *Ty,
  1314. const Twine &Name = "tmp");
  1315. /// InitTempAlloca - Provide an initial value for the given alloca which
  1316. /// will be observable at all locations in the function.
  1317. ///
  1318. /// The address should be something that was returned from one of
  1319. /// the CreateTempAlloca or CreateMemTemp routines, and the
  1320. /// initializer must be valid in the entry block (i.e. it must
  1321. /// either be a constant or an argument value).
  1322. void InitTempAlloca(Address Alloca, llvm::Value *Value);
  1323. /// CreateIRTemp - Create a temporary IR object of the given type, with
  1324. /// appropriate alignment. This routine should only be used when an temporary
  1325. /// value needs to be stored into an alloca (for example, to avoid explicit
  1326. /// PHI construction), but the type is the IR type, not the type appropriate
  1327. /// for storing in memory.
  1328. ///
  1329. /// That is, this is exactly equivalent to CreateMemTemp, but calling
  1330. /// ConvertType instead of ConvertTypeForMem.
  1331. Address CreateIRTemp(QualType T, const Twine &Name = "tmp");
  1332. /// CreateMemTemp - Create a temporary memory object of the given type, with
  1333. /// appropriate alignment.
  1334. Address CreateMemTemp(QualType T, const Twine &Name = "tmp");
  1335. Address CreateMemTemp(QualType T, CharUnits Align, const Twine &Name = "tmp");
  1336. /// CreateAggTemp - Create a temporary memory object for the given
  1337. /// aggregate type.
  1338. AggValueSlot CreateAggTemp(QualType T, const Twine &Name = "tmp") {
  1339. return AggValueSlot::forAddr(CreateMemTemp(T, Name),
  1340. T.getQualifiers(),
  1341. AggValueSlot::IsNotDestructed,
  1342. AggValueSlot::DoesNotNeedGCBarriers,
  1343. AggValueSlot::IsNotAliased);
  1344. }
  1345. /// Emit a cast to void* in the appropriate address space.
  1346. llvm::Value *EmitCastToVoidPtr(llvm::Value *value);
  1347. /// EvaluateExprAsBool - Perform the usual unary conversions on the specified
  1348. /// expression and compare the result against zero, returning an Int1Ty value.
  1349. llvm::Value *EvaluateExprAsBool(const Expr *E);
  1350. /// EmitIgnoredExpr - Emit an expression in a context which ignores the result.
  1351. void EmitIgnoredExpr(const Expr *E);
  1352. /// EmitAnyExpr - Emit code to compute the specified expression which can have
  1353. /// any type. The result is returned as an RValue struct. If this is an
  1354. /// aggregate expression, the aggloc/agglocvolatile arguments indicate where
  1355. /// the result should be returned.
  1356. ///
  1357. /// \param ignoreResult True if the resulting value isn't used.
  1358. RValue EmitAnyExpr(const Expr *E,
  1359. AggValueSlot aggSlot = AggValueSlot::ignored(),
  1360. bool ignoreResult = false);
  1361. // EmitVAListRef - Emit a "reference" to a va_list; this is either the address
  1362. // or the value of the expression, depending on how va_list is defined.
  1363. Address EmitVAListRef(const Expr *E);
  1364. /// Emit a "reference" to a __builtin_ms_va_list; this is
  1365. /// always the value of the expression, because a __builtin_ms_va_list is a
  1366. /// pointer to a char.
  1367. Address EmitMSVAListRef(const Expr *E);
  1368. /// EmitAnyExprToTemp - Similary to EmitAnyExpr(), however, the result will
  1369. /// always be accessible even if no aggregate location is provided.
  1370. RValue EmitAnyExprToTemp(const Expr *E);
  1371. /// EmitAnyExprToMem - Emits the code necessary to evaluate an
  1372. /// arbitrary expression into the given memory location.
  1373. void EmitAnyExprToMem(const Expr *E, Address Location,
  1374. Qualifiers Quals, bool IsInitializer);
  1375. void EmitAnyExprToExn(const Expr *E, Address Addr);
  1376. /// EmitExprAsInit - Emits the code necessary to initialize a
  1377. /// location in memory with the given initializer.
  1378. void EmitExprAsInit(const Expr *init, const ValueDecl *D, LValue lvalue,
  1379. bool capturedByInit);
  1380. /// hasVolatileMember - returns true if aggregate type has a volatile
  1381. /// member.
  1382. bool hasVolatileMember(QualType T) {
  1383. if (const RecordType *RT = T->getAs<RecordType>()) {
  1384. const RecordDecl *RD = cast<RecordDecl>(RT->getDecl());
  1385. return RD->hasVolatileMember();
  1386. }
  1387. return false;
  1388. }
  1389. /// EmitAggregateCopy - Emit an aggregate assignment.
  1390. ///
  1391. /// The difference to EmitAggregateCopy is that tail padding is not copied.
  1392. /// This is required for correctness when assigning non-POD structures in C++.
  1393. void EmitAggregateAssign(Address DestPtr, Address SrcPtr,
  1394. QualType EltTy) {
  1395. bool IsVolatile = hasVolatileMember(EltTy);
  1396. EmitAggregateCopy(DestPtr, SrcPtr, EltTy, IsVolatile, true);
  1397. }
  1398. void EmitAggregateCopyCtor(Address DestPtr, Address SrcPtr,
  1399. QualType DestTy, QualType SrcTy) {
  1400. EmitAggregateCopy(DestPtr, SrcPtr, SrcTy, /*IsVolatile=*/false,
  1401. /*IsAssignment=*/false);
  1402. }
  1403. /// EmitAggregateCopy - Emit an aggregate copy.
  1404. ///
  1405. /// \param isVolatile - True iff either the source or the destination is
  1406. /// volatile.
  1407. /// \param isAssignment - If false, allow padding to be copied. This often
  1408. /// yields more efficient.
  1409. void EmitAggregateCopy(Address DestPtr, Address SrcPtr,
  1410. QualType EltTy, bool isVolatile=false,
  1411. bool isAssignment = false);
  1412. /// GetAddrOfLocalVar - Return the address of a local variable.
  1413. Address GetAddrOfLocalVar(const VarDecl *VD) {
  1414. auto it = LocalDeclMap.find(VD);
  1415. assert(it != LocalDeclMap.end() &&
  1416. "Invalid argument to GetAddrOfLocalVar(), no decl!");
  1417. return it->second;
  1418. }
  1419. /// getOpaqueLValueMapping - Given an opaque value expression (which
  1420. /// must be mapped to an l-value), return its mapping.
  1421. const LValue &getOpaqueLValueMapping(const OpaqueValueExpr *e) {
  1422. assert(OpaqueValueMapping::shouldBindAsLValue(e));
  1423. llvm::DenseMap<const OpaqueValueExpr*,LValue>::iterator
  1424. it = OpaqueLValues.find(e);
  1425. assert(it != OpaqueLValues.end() && "no mapping for opaque value!");
  1426. return it->second;
  1427. }
  1428. /// getOpaqueRValueMapping - Given an opaque value expression (which
  1429. /// must be mapped to an r-value), return its mapping.
  1430. const RValue &getOpaqueRValueMapping(const OpaqueValueExpr *e) {
  1431. assert(!OpaqueValueMapping::shouldBindAsLValue(e));
  1432. llvm::DenseMap<const OpaqueValueExpr*,RValue>::iterator
  1433. it = OpaqueRValues.find(e);
  1434. assert(it != OpaqueRValues.end() && "no mapping for opaque value!");
  1435. return it->second;
  1436. }
  1437. /// getAccessedFieldNo - Given an encoded value and a result number, return
  1438. /// the input field number being accessed.
  1439. static unsigned getAccessedFieldNo(unsigned Idx, const llvm::Constant *Elts);
  1440. llvm::BlockAddress *GetAddrOfLabel(const LabelDecl *L);
  1441. llvm::BasicBlock *GetIndirectGotoBlock();
  1442. /// EmitNullInitialization - Generate code to set a value of the given type to
  1443. /// null, If the type contains data member pointers, they will be initialized
  1444. /// to -1 in accordance with the Itanium C++ ABI.
  1445. void EmitNullInitialization(Address DestPtr, QualType Ty);
  1446. /// Emits a call to an LLVM variable-argument intrinsic, either
  1447. /// \c llvm.va_start or \c llvm.va_end.
  1448. /// \param ArgValue A reference to the \c va_list as emitted by either
  1449. /// \c EmitVAListRef or \c EmitMSVAListRef.
  1450. /// \param IsStart If \c true, emits a call to \c llvm.va_start; otherwise,
  1451. /// calls \c llvm.va_end.
  1452. llvm::Value *EmitVAStartEnd(llvm::Value *ArgValue, bool IsStart);
  1453. /// Generate code to get an argument from the passed in pointer
  1454. /// and update it accordingly.
  1455. /// \param VE The \c VAArgExpr for which to generate code.
  1456. /// \param VAListAddr Receives a reference to the \c va_list as emitted by
  1457. /// either \c EmitVAListRef or \c EmitMSVAListRef.
  1458. /// \returns A pointer to the argument.
  1459. // FIXME: We should be able to get rid of this method and use the va_arg
  1460. // instruction in LLVM instead once it works well enough.
  1461. Address EmitVAArg(VAArgExpr *VE, Address &VAListAddr);
  1462. /// emitArrayLength - Compute the length of an array, even if it's a
  1463. /// VLA, and drill down to the base element type.
  1464. llvm::Value *emitArrayLength(const ArrayType *arrayType,
  1465. QualType &baseType,
  1466. Address &addr);
  1467. /// EmitVLASize - Capture all the sizes for the VLA expressions in
  1468. /// the given variably-modified type and store them in the VLASizeMap.
  1469. ///
  1470. /// This function can be called with a null (unreachable) insert point.
  1471. void EmitVariablyModifiedType(QualType Ty);
  1472. /// getVLASize - Returns an LLVM value that corresponds to the size,
  1473. /// in non-variably-sized elements, of a variable length array type,
  1474. /// plus that largest non-variably-sized element type. Assumes that
  1475. /// the type has already been emitted with EmitVariablyModifiedType.
  1476. std::pair<llvm::Value*,QualType> getVLASize(const VariableArrayType *vla);
  1477. std::pair<llvm::Value*,QualType> getVLASize(QualType vla);
  1478. /// LoadCXXThis - Load the value of 'this'. This function is only valid while
  1479. /// generating code for an C++ member function.
  1480. llvm::Value *LoadCXXThis() {
  1481. assert(CXXThisValue && "no 'this' value for this function");
  1482. return CXXThisValue;
  1483. }
  1484. Address LoadCXXThisAddress();
  1485. /// LoadCXXVTT - Load the VTT parameter to base constructors/destructors have
  1486. /// virtual bases.
  1487. // FIXME: Every place that calls LoadCXXVTT is something
  1488. // that needs to be abstracted properly.
  1489. llvm::Value *LoadCXXVTT() {
  1490. assert(CXXStructorImplicitParamValue && "no VTT value for this function");
  1491. return CXXStructorImplicitParamValue;
  1492. }
  1493. /// GetAddressOfBaseOfCompleteClass - Convert the given pointer to a
  1494. /// complete class to the given direct base.
  1495. Address
  1496. GetAddressOfDirectBaseInCompleteClass(Address Value,
  1497. const CXXRecordDecl *Derived,
  1498. const CXXRecordDecl *Base,
  1499. bool BaseIsVirtual);
  1500. static bool ShouldNullCheckClassCastValue(const CastExpr *Cast);
  1501. /// GetAddressOfBaseClass - This function will add the necessary delta to the
  1502. /// load of 'this' and returns address of the base class.
  1503. Address GetAddressOfBaseClass(Address Value,
  1504. const CXXRecordDecl *Derived,
  1505. CastExpr::path_const_iterator PathBegin,
  1506. CastExpr::path_const_iterator PathEnd,
  1507. bool NullCheckValue, SourceLocation Loc);
  1508. Address GetAddressOfDerivedClass(Address Value,
  1509. const CXXRecordDecl *Derived,
  1510. CastExpr::path_const_iterator PathBegin,
  1511. CastExpr::path_const_iterator PathEnd,
  1512. bool NullCheckValue);
  1513. /// GetVTTParameter - Return the VTT parameter that should be passed to a
  1514. /// base constructor/destructor with virtual bases.
  1515. /// FIXME: VTTs are Itanium ABI-specific, so the definition should move
  1516. /// to ItaniumCXXABI.cpp together with all the references to VTT.
  1517. llvm::Value *GetVTTParameter(GlobalDecl GD, bool ForVirtualBase,
  1518. bool Delegating);
  1519. void EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor,
  1520. CXXCtorType CtorType,
  1521. const FunctionArgList &Args,
  1522. SourceLocation Loc);
  1523. // It's important not to confuse this and the previous function. Delegating
  1524. // constructors are the C++0x feature. The constructor delegate optimization
  1525. // is used to reduce duplication in the base and complete consturctors where
  1526. // they are substantially the same.
  1527. void EmitDelegatingCXXConstructorCall(const CXXConstructorDecl *Ctor,
  1528. const FunctionArgList &Args);
  1529. void EmitCXXConstructorCall(const CXXConstructorDecl *D, CXXCtorType Type,
  1530. bool ForVirtualBase, bool Delegating,
  1531. Address This, const CXXConstructExpr *E);
  1532. /// Emit assumption load for all bases. Requires to be be called only on
  1533. /// most-derived class and not under construction of the object.
  1534. void EmitVTableAssumptionLoads(const CXXRecordDecl *ClassDecl, Address This);
  1535. /// Emit assumption that vptr load == global vtable.
  1536. void EmitVTableAssumptionLoad(const VPtr &vptr, Address This);
  1537. void EmitSynthesizedCXXCopyCtorCall(const CXXConstructorDecl *D,
  1538. Address This, Address Src,
  1539. const CXXConstructExpr *E);
  1540. void EmitCXXAggrConstructorCall(const CXXConstructorDecl *D,
  1541. const ConstantArrayType *ArrayTy,
  1542. Address ArrayPtr,
  1543. const CXXConstructExpr *E,
  1544. bool ZeroInitialization = false);
  1545. void EmitCXXAggrConstructorCall(const CXXConstructorDecl *D,
  1546. llvm::Value *NumElements,
  1547. Address ArrayPtr,
  1548. const CXXConstructExpr *E,
  1549. bool ZeroInitialization = false);
  1550. static Destroyer destroyCXXObject;
  1551. void EmitCXXDestructorCall(const CXXDestructorDecl *D, CXXDtorType Type,
  1552. bool ForVirtualBase, bool Delegating,
  1553. Address This);
  1554. void EmitNewArrayInitializer(const CXXNewExpr *E, QualType elementType,
  1555. llvm::Type *ElementTy, Address NewPtr,
  1556. llvm::Value *NumElements,
  1557. llvm::Value *AllocSizeWithoutCookie);
  1558. void EmitCXXTemporary(const CXXTemporary *Temporary, QualType TempType,
  1559. Address Ptr);
  1560. llvm::Value *EmitLifetimeStart(uint64_t Size, llvm::Value *Addr);
  1561. void EmitLifetimeEnd(llvm::Value *Size, llvm::Value *Addr);
  1562. llvm::Value *EmitCXXNewExpr(const CXXNewExpr *E);
  1563. void EmitCXXDeleteExpr(const CXXDeleteExpr *E);
  1564. void EmitDeleteCall(const FunctionDecl *DeleteFD, llvm::Value *Ptr,
  1565. QualType DeleteTy);
  1566. RValue EmitBuiltinNewDeleteCall(const FunctionProtoType *Type,
  1567. const Expr *Arg, bool IsDelete);
  1568. llvm::Value *EmitCXXTypeidExpr(const CXXTypeidExpr *E);
  1569. llvm::Value *EmitDynamicCast(Address V, const CXXDynamicCastExpr *DCE);
  1570. Address EmitCXXUuidofExpr(const CXXUuidofExpr *E);
  1571. /// \brief Situations in which we might emit a check for the suitability of a
  1572. /// pointer or glvalue.
  1573. enum TypeCheckKind {
  1574. /// Checking the operand of a load. Must be suitably sized and aligned.
  1575. TCK_Load,
  1576. /// Checking the destination of a store. Must be suitably sized and aligned.
  1577. TCK_Store,
  1578. /// Checking the bound value in a reference binding. Must be suitably sized
  1579. /// and aligned, but is not required to refer to an object (until the
  1580. /// reference is used), per core issue 453.
  1581. TCK_ReferenceBinding,
  1582. /// Checking the object expression in a non-static data member access. Must
  1583. /// be an object within its lifetime.
  1584. TCK_MemberAccess,
  1585. /// Checking the 'this' pointer for a call to a non-static member function.
  1586. /// Must be an object within its lifetime.
  1587. TCK_MemberCall,
  1588. /// Checking the 'this' pointer for a constructor call.
  1589. TCK_ConstructorCall,
  1590. /// Checking the operand of a static_cast to a derived pointer type. Must be
  1591. /// null or an object within its lifetime.
  1592. TCK_DowncastPointer,
  1593. /// Checking the operand of a static_cast to a derived reference type. Must
  1594. /// be an object within its lifetime.
  1595. TCK_DowncastReference,
  1596. /// Checking the operand of a cast to a base object. Must be suitably sized
  1597. /// and aligned.
  1598. TCK_Upcast,
  1599. /// Checking the operand of a cast to a virtual base object. Must be an
  1600. /// object within its lifetime.
  1601. TCK_UpcastToVirtualBase
  1602. };
  1603. /// \brief Whether any type-checking sanitizers are enabled. If \c false,
  1604. /// calls to EmitTypeCheck can be skipped.
  1605. bool sanitizePerformTypeCheck() const;
  1606. /// \brief Emit a check that \p V is the address of storage of the
  1607. /// appropriate size and alignment for an object of type \p Type.
  1608. void EmitTypeCheck(TypeCheckKind TCK, SourceLocation Loc, llvm::Value *V,
  1609. QualType Type, CharUnits Alignment = CharUnits::Zero(),
  1610. bool SkipNullCheck = false);
  1611. /// \brief Emit a check that \p Base points into an array object, which
  1612. /// we can access at index \p Index. \p Accessed should be \c false if we
  1613. /// this expression is used as an lvalue, for instance in "&Arr[Idx]".
  1614. void EmitBoundsCheck(const Expr *E, const Expr *Base, llvm::Value *Index,
  1615. QualType IndexType, bool Accessed);
  1616. llvm::Value *EmitScalarPrePostIncDec(const UnaryOperator *E, LValue LV,
  1617. bool isInc, bool isPre);
  1618. ComplexPairTy EmitComplexPrePostIncDec(const UnaryOperator *E, LValue LV,
  1619. bool isInc, bool isPre);
  1620. void EmitAlignmentAssumption(llvm::Value *PtrValue, unsigned Alignment,
  1621. llvm::Value *OffsetValue = nullptr) {
  1622. Builder.CreateAlignmentAssumption(CGM.getDataLayout(), PtrValue, Alignment,
  1623. OffsetValue);
  1624. }
  1625. //===--------------------------------------------------------------------===//
  1626. // Declaration Emission
  1627. //===--------------------------------------------------------------------===//
  1628. /// EmitDecl - Emit a declaration.
  1629. ///
  1630. /// This function can be called with a null (unreachable) insert point.
  1631. void EmitDecl(const Decl &D);
  1632. /// EmitVarDecl - Emit a local variable declaration.
  1633. ///
  1634. /// This function can be called with a null (unreachable) insert point.
  1635. void EmitVarDecl(const VarDecl &D);
  1636. void EmitScalarInit(const Expr *init, const ValueDecl *D, LValue lvalue,
  1637. bool capturedByInit);
  1638. void EmitScalarInit(llvm::Value *init, LValue lvalue);
  1639. typedef void SpecialInitFn(CodeGenFunction &Init, const VarDecl &D,
  1640. llvm::Value *Address);
  1641. /// \brief Determine whether the given initializer is trivial in the sense
  1642. /// that it requires no code to be generated.
  1643. bool isTrivialInitializer(const Expr *Init);
  1644. /// EmitAutoVarDecl - Emit an auto variable declaration.
  1645. ///
  1646. /// This function can be called with a null (unreachable) insert point.
  1647. void EmitAutoVarDecl(const VarDecl &D);
  1648. class AutoVarEmission {
  1649. friend class CodeGenFunction;
  1650. const VarDecl *Variable;
  1651. /// The address of the alloca. Invalid if the variable was emitted
  1652. /// as a global constant.
  1653. Address Addr;
  1654. llvm::Value *NRVOFlag;
  1655. /// True if the variable is a __block variable.
  1656. bool IsByRef;
  1657. /// True if the variable is of aggregate type and has a constant
  1658. /// initializer.
  1659. bool IsConstantAggregate;
  1660. /// Non-null if we should use lifetime annotations.
  1661. llvm::Value *SizeForLifetimeMarkers;
  1662. struct Invalid {};
  1663. AutoVarEmission(Invalid) : Variable(nullptr), Addr(Address::invalid()) {}
  1664. AutoVarEmission(const VarDecl &variable)
  1665. : Variable(&variable), Addr(Address::invalid()), NRVOFlag(nullptr),
  1666. IsByRef(false), IsConstantAggregate(false),
  1667. SizeForLifetimeMarkers(nullptr) {}
  1668. bool wasEmittedAsGlobal() const { return !Addr.isValid(); }
  1669. public:
  1670. static AutoVarEmission invalid() { return AutoVarEmission(Invalid()); }
  1671. bool useLifetimeMarkers() const {
  1672. return SizeForLifetimeMarkers != nullptr;
  1673. }
  1674. llvm::Value *getSizeForLifetimeMarkers() const {
  1675. assert(useLifetimeMarkers());
  1676. return SizeForLifetimeMarkers;
  1677. }
  1678. /// Returns the raw, allocated address, which is not necessarily
  1679. /// the address of the object itself.
  1680. Address getAllocatedAddress() const {
  1681. return Addr;
  1682. }
  1683. /// Returns the address of the object within this declaration.
  1684. /// Note that this does not chase the forwarding pointer for
  1685. /// __block decls.
  1686. Address getObjectAddress(CodeGenFunction &CGF) const {
  1687. if (!IsByRef) return Addr;
  1688. return CGF.emitBlockByrefAddress(Addr, Variable, /*forward*/ false);
  1689. }
  1690. };
  1691. AutoVarEmission EmitAutoVarAlloca(const VarDecl &var);
  1692. void EmitAutoVarInit(const AutoVarEmission &emission);
  1693. void EmitAutoVarCleanups(const AutoVarEmission &emission);
  1694. void emitAutoVarTypeCleanup(const AutoVarEmission &emission,
  1695. QualType::DestructionKind dtorKind);
  1696. void EmitStaticVarDecl(const VarDecl &D,
  1697. llvm::GlobalValue::LinkageTypes Linkage);
  1698. class ParamValue {
  1699. llvm::Value *Value;
  1700. unsigned Alignment;
  1701. ParamValue(llvm::Value *V, unsigned A) : Value(V), Alignment(A) {}
  1702. public:
  1703. static ParamValue forDirect(llvm::Value *value) {
  1704. return ParamValue(value, 0);
  1705. }
  1706. static ParamValue forIndirect(Address addr) {
  1707. assert(!addr.getAlignment().isZero());
  1708. return ParamValue(addr.getPointer(), addr.getAlignment().getQuantity());
  1709. }
  1710. bool isIndirect() const { return Alignment != 0; }
  1711. llvm::Value *getAnyValue() const { return Value; }
  1712. llvm::Value *getDirectValue() const {
  1713. assert(!isIndirect());
  1714. return Value;
  1715. }
  1716. Address getIndirectAddress() const {
  1717. assert(isIndirect());
  1718. return Address(Value, CharUnits::fromQuantity(Alignment));
  1719. }
  1720. };
  1721. /// EmitParmDecl - Emit a ParmVarDecl or an ImplicitParamDecl.
  1722. void EmitParmDecl(const VarDecl &D, ParamValue Arg, unsigned ArgNo);
  1723. /// protectFromPeepholes - Protect a value that we're intending to
  1724. /// store to the side, but which will probably be used later, from
  1725. /// aggressive peepholing optimizations that might delete it.
  1726. ///
  1727. /// Pass the result to unprotectFromPeepholes to declare that
  1728. /// protection is no longer required.
  1729. ///
  1730. /// There's no particular reason why this shouldn't apply to
  1731. /// l-values, it's just that no existing peepholes work on pointers.
  1732. PeepholeProtection protectFromPeepholes(RValue rvalue);
  1733. void unprotectFromPeepholes(PeepholeProtection protection);
  1734. //===--------------------------------------------------------------------===//
  1735. // Statement Emission
  1736. //===--------------------------------------------------------------------===//
  1737. /// EmitStopPoint - Emit a debug stoppoint if we are emitting debug info.
  1738. void EmitStopPoint(const Stmt *S);
  1739. /// EmitStmt - Emit the code for the statement \arg S. It is legal to call
  1740. /// this function even if there is no current insertion point.
  1741. ///
  1742. /// This function may clear the current insertion point; callers should use
  1743. /// EnsureInsertPoint if they wish to subsequently generate code without first
  1744. /// calling EmitBlock, EmitBranch, or EmitStmt.
  1745. void EmitStmt(const Stmt *S);
  1746. /// EmitSimpleStmt - Try to emit a "simple" statement which does not
  1747. /// necessarily require an insertion point or debug information; typically
  1748. /// because the statement amounts to a jump or a container of other
  1749. /// statements.
  1750. ///
  1751. /// \return True if the statement was handled.
  1752. bool EmitSimpleStmt(const Stmt *S);
  1753. Address EmitCompoundStmt(const CompoundStmt &S, bool GetLast = false,
  1754. AggValueSlot AVS = AggValueSlot::ignored());
  1755. Address EmitCompoundStmtWithoutScope(const CompoundStmt &S,
  1756. bool GetLast = false,
  1757. AggValueSlot AVS =
  1758. AggValueSlot::ignored());
  1759. /// EmitLabel - Emit the block for the given label. It is legal to call this
  1760. /// function even if there is no current insertion point.
  1761. void EmitLabel(const LabelDecl *D); // helper for EmitLabelStmt.
  1762. void EmitLabelStmt(const LabelStmt &S);
  1763. void EmitAttributedStmt(const AttributedStmt &S);
  1764. void EmitGotoStmt(const GotoStmt &S);
  1765. void EmitIndirectGotoStmt(const IndirectGotoStmt &S);
  1766. void EmitIfStmt(const IfStmt &S);
  1767. void EmitWhileStmt(const WhileStmt &S,
  1768. ArrayRef<const Attr *> Attrs = None);
  1769. void EmitDoStmt(const DoStmt &S, ArrayRef<const Attr *> Attrs = None);
  1770. void EmitForStmt(const ForStmt &S,
  1771. ArrayRef<const Attr *> Attrs = None);
  1772. void EmitReturnStmt(const ReturnStmt &S);
  1773. void EmitDeclStmt(const DeclStmt &S);
  1774. void EmitBreakStmt(const BreakStmt &S);
  1775. void EmitContinueStmt(const ContinueStmt &S);
  1776. void EmitSwitchStmt(const SwitchStmt &S);
  1777. void EmitDefaultStmt(const DefaultStmt &S);
  1778. void EmitCaseStmt(const CaseStmt &S);
  1779. void EmitCaseStmtRange(const CaseStmt &S);
  1780. void EmitAsmStmt(const AsmStmt &S);
  1781. void EmitObjCForCollectionStmt(const ObjCForCollectionStmt &S);
  1782. void EmitObjCAtTryStmt(const ObjCAtTryStmt &S);
  1783. void EmitObjCAtThrowStmt(const ObjCAtThrowStmt &S);
  1784. void EmitObjCAtSynchronizedStmt(const ObjCAtSynchronizedStmt &S);
  1785. void EmitObjCAutoreleasePoolStmt(const ObjCAutoreleasePoolStmt &S);
  1786. void EnterCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock = false);
  1787. void ExitCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock = false);
  1788. void EmitCXXTryStmt(const CXXTryStmt &S);
  1789. void EmitSEHTryStmt(const SEHTryStmt &S);
  1790. void EmitSEHLeaveStmt(const SEHLeaveStmt &S);
  1791. void EnterSEHTryStmt(const SEHTryStmt &S);
  1792. void ExitSEHTryStmt(const SEHTryStmt &S);
  1793. void startOutlinedSEHHelper(CodeGenFunction &ParentCGF, bool IsFilter,
  1794. const Stmt *OutlinedStmt);
  1795. llvm::Function *GenerateSEHFilterFunction(CodeGenFunction &ParentCGF,
  1796. const SEHExceptStmt &Except);
  1797. llvm::Function *GenerateSEHFinallyFunction(CodeGenFunction &ParentCGF,
  1798. const SEHFinallyStmt &Finally);
  1799. void EmitSEHExceptionCodeSave(CodeGenFunction &ParentCGF,
  1800. llvm::Value *ParentFP,
  1801. llvm::Value *EntryEBP);
  1802. llvm::Value *EmitSEHExceptionCode();
  1803. llvm::Value *EmitSEHExceptionInfo();
  1804. llvm::Value *EmitSEHAbnormalTermination();
  1805. /// Scan the outlined statement for captures from the parent function. For
  1806. /// each capture, mark the capture as escaped and emit a call to
  1807. /// llvm.localrecover. Insert the localrecover result into the LocalDeclMap.
  1808. void EmitCapturedLocals(CodeGenFunction &ParentCGF, const Stmt *OutlinedStmt,
  1809. bool IsFilter);
  1810. /// Recovers the address of a local in a parent function. ParentVar is the
  1811. /// address of the variable used in the immediate parent function. It can
  1812. /// either be an alloca or a call to llvm.localrecover if there are nested
  1813. /// outlined functions. ParentFP is the frame pointer of the outermost parent
  1814. /// frame.
  1815. Address recoverAddrOfEscapedLocal(CodeGenFunction &ParentCGF,
  1816. Address ParentVar,
  1817. llvm::Value *ParentFP);
  1818. void EmitCXXForRangeStmt(const CXXForRangeStmt &S,
  1819. ArrayRef<const Attr *> Attrs = None);
  1820. /// Returns calculated size of the specified type.
  1821. llvm::Value *getTypeSize(QualType Ty);
  1822. LValue InitCapturedStruct(const CapturedStmt &S);
  1823. llvm::Function *EmitCapturedStmt(const CapturedStmt &S, CapturedRegionKind K);
  1824. llvm::Function *GenerateCapturedStmtFunction(const CapturedStmt &S);
  1825. Address GenerateCapturedStmtArgument(const CapturedStmt &S);
  1826. llvm::Function *GenerateOpenMPCapturedStmtFunction(const CapturedStmt &S);
  1827. void GenerateOpenMPCapturedVars(const CapturedStmt &S,
  1828. SmallVectorImpl<llvm::Value *> &CapturedVars);
  1829. void emitOMPSimpleStore(LValue LVal, RValue RVal, QualType RValTy,
  1830. SourceLocation Loc);
  1831. /// \brief Perform element by element copying of arrays with type \a
  1832. /// OriginalType from \a SrcAddr to \a DestAddr using copying procedure
  1833. /// generated by \a CopyGen.
  1834. ///
  1835. /// \param DestAddr Address of the destination array.
  1836. /// \param SrcAddr Address of the source array.
  1837. /// \param OriginalType Type of destination and source arrays.
  1838. /// \param CopyGen Copying procedure that copies value of single array element
  1839. /// to another single array element.
  1840. void EmitOMPAggregateAssign(
  1841. Address DestAddr, Address SrcAddr, QualType OriginalType,
  1842. const llvm::function_ref<void(Address, Address)> &CopyGen);
  1843. /// \brief Emit proper copying of data from one variable to another.
  1844. ///
  1845. /// \param OriginalType Original type of the copied variables.
  1846. /// \param DestAddr Destination address.
  1847. /// \param SrcAddr Source address.
  1848. /// \param DestVD Destination variable used in \a CopyExpr (for arrays, has
  1849. /// type of the base array element).
  1850. /// \param SrcVD Source variable used in \a CopyExpr (for arrays, has type of
  1851. /// the base array element).
  1852. /// \param Copy Actual copygin expression for copying data from \a SrcVD to \a
  1853. /// DestVD.
  1854. void EmitOMPCopy(QualType OriginalType,
  1855. Address DestAddr, Address SrcAddr,
  1856. const VarDecl *DestVD, const VarDecl *SrcVD,
  1857. const Expr *Copy);
  1858. /// \brief Emit atomic update code for constructs: \a X = \a X \a BO \a E or
  1859. /// \a X = \a E \a BO \a E.
  1860. ///
  1861. /// \param X Value to be updated.
  1862. /// \param E Update value.
  1863. /// \param BO Binary operation for update operation.
  1864. /// \param IsXLHSInRHSPart true if \a X is LHS in RHS part of the update
  1865. /// expression, false otherwise.
  1866. /// \param AO Atomic ordering of the generated atomic instructions.
  1867. /// \param CommonGen Code generator for complex expressions that cannot be
  1868. /// expressed through atomicrmw instruction.
  1869. /// \returns <true, OldAtomicValue> if simple 'atomicrmw' instruction was
  1870. /// generated, <false, RValue::get(nullptr)> otherwise.
  1871. std::pair<bool, RValue> EmitOMPAtomicSimpleUpdateExpr(
  1872. LValue X, RValue E, BinaryOperatorKind BO, bool IsXLHSInRHSPart,
  1873. llvm::AtomicOrdering AO, SourceLocation Loc,
  1874. const llvm::function_ref<RValue(RValue)> &CommonGen);
  1875. bool EmitOMPFirstprivateClause(const OMPExecutableDirective &D,
  1876. OMPPrivateScope &PrivateScope);
  1877. void EmitOMPPrivateClause(const OMPExecutableDirective &D,
  1878. OMPPrivateScope &PrivateScope);
  1879. /// \brief Emit code for copyin clause in \a D directive. The next code is
  1880. /// generated at the start of outlined functions for directives:
  1881. /// \code
  1882. /// threadprivate_var1 = master_threadprivate_var1;
  1883. /// operator=(threadprivate_var2, master_threadprivate_var2);
  1884. /// ...
  1885. /// __kmpc_barrier(&loc, global_tid);
  1886. /// \endcode
  1887. ///
  1888. /// \param D OpenMP directive possibly with 'copyin' clause(s).
  1889. /// \returns true if at least one copyin variable is found, false otherwise.
  1890. bool EmitOMPCopyinClause(const OMPExecutableDirective &D);
  1891. /// \brief Emit initial code for lastprivate variables. If some variable is
  1892. /// not also firstprivate, then the default initialization is used. Otherwise
  1893. /// initialization of this variable is performed by EmitOMPFirstprivateClause
  1894. /// method.
  1895. ///
  1896. /// \param D Directive that may have 'lastprivate' directives.
  1897. /// \param PrivateScope Private scope for capturing lastprivate variables for
  1898. /// proper codegen in internal captured statement.
  1899. ///
  1900. /// \returns true if there is at least one lastprivate variable, false
  1901. /// otherwise.
  1902. bool EmitOMPLastprivateClauseInit(const OMPExecutableDirective &D,
  1903. OMPPrivateScope &PrivateScope);
  1904. /// \brief Emit final copying of lastprivate values to original variables at
  1905. /// the end of the worksharing or simd directive.
  1906. ///
  1907. /// \param D Directive that has at least one 'lastprivate' directives.
  1908. /// \param IsLastIterCond Boolean condition that must be set to 'i1 true' if
  1909. /// it is the last iteration of the loop code in associated directive, or to
  1910. /// 'i1 false' otherwise. If this item is nullptr, no final check is required.
  1911. void EmitOMPLastprivateClauseFinal(const OMPExecutableDirective &D,
  1912. llvm::Value *IsLastIterCond = nullptr);
  1913. /// \brief Emit initial code for reduction variables. Creates reduction copies
  1914. /// and initializes them with the values according to OpenMP standard.
  1915. ///
  1916. /// \param D Directive (possibly) with the 'reduction' clause.
  1917. /// \param PrivateScope Private scope for capturing reduction variables for
  1918. /// proper codegen in internal captured statement.
  1919. ///
  1920. void EmitOMPReductionClauseInit(const OMPExecutableDirective &D,
  1921. OMPPrivateScope &PrivateScope);
  1922. /// \brief Emit final update of reduction values to original variables at
  1923. /// the end of the directive.
  1924. ///
  1925. /// \param D Directive that has at least one 'reduction' directives.
  1926. void EmitOMPReductionClauseFinal(const OMPExecutableDirective &D);
  1927. /// \brief Emit initial code for linear variables. Creates private copies
  1928. /// and initializes them with the values according to OpenMP standard.
  1929. ///
  1930. /// \param D Directive (possibly) with the 'linear' clause.
  1931. void EmitOMPLinearClauseInit(const OMPLoopDirective &D);
  1932. void EmitOMPParallelDirective(const OMPParallelDirective &S);
  1933. void EmitOMPSimdDirective(const OMPSimdDirective &S);
  1934. void EmitOMPForDirective(const OMPForDirective &S);
  1935. void EmitOMPForSimdDirective(const OMPForSimdDirective &S);
  1936. void EmitOMPSectionsDirective(const OMPSectionsDirective &S);
  1937. void EmitOMPSectionDirective(const OMPSectionDirective &S);
  1938. void EmitOMPSingleDirective(const OMPSingleDirective &S);
  1939. void EmitOMPMasterDirective(const OMPMasterDirective &S);
  1940. void EmitOMPCriticalDirective(const OMPCriticalDirective &S);
  1941. void EmitOMPParallelForDirective(const OMPParallelForDirective &S);
  1942. void EmitOMPParallelForSimdDirective(const OMPParallelForSimdDirective &S);
  1943. void EmitOMPParallelSectionsDirective(const OMPParallelSectionsDirective &S);
  1944. void EmitOMPTaskDirective(const OMPTaskDirective &S);
  1945. void EmitOMPTaskyieldDirective(const OMPTaskyieldDirective &S);
  1946. void EmitOMPBarrierDirective(const OMPBarrierDirective &S);
  1947. void EmitOMPTaskwaitDirective(const OMPTaskwaitDirective &S);
  1948. void EmitOMPTaskgroupDirective(const OMPTaskgroupDirective &S);
  1949. void EmitOMPFlushDirective(const OMPFlushDirective &S);
  1950. void EmitOMPOrderedDirective(const OMPOrderedDirective &S);
  1951. void EmitOMPAtomicDirective(const OMPAtomicDirective &S);
  1952. void EmitOMPTargetDirective(const OMPTargetDirective &S);
  1953. void EmitOMPTargetDataDirective(const OMPTargetDataDirective &S);
  1954. void EmitOMPTargetEnterDataDirective(const OMPTargetEnterDataDirective &S);
  1955. void EmitOMPTargetExitDataDirective(const OMPTargetExitDataDirective &S);
  1956. void EmitOMPTargetParallelDirective(const OMPTargetParallelDirective &S);
  1957. void EmitOMPTeamsDirective(const OMPTeamsDirective &S);
  1958. void
  1959. EmitOMPCancellationPointDirective(const OMPCancellationPointDirective &S);
  1960. void EmitOMPCancelDirective(const OMPCancelDirective &S);
  1961. void EmitOMPTaskLoopDirective(const OMPTaskLoopDirective &S);
  1962. void EmitOMPTaskLoopSimdDirective(const OMPTaskLoopSimdDirective &S);
  1963. void EmitOMPDistributeDirective(const OMPDistributeDirective &S);
  1964. /// \brief Emit inner loop of the worksharing/simd construct.
  1965. ///
  1966. /// \param S Directive, for which the inner loop must be emitted.
  1967. /// \param RequiresCleanup true, if directive has some associated private
  1968. /// variables.
  1969. /// \param LoopCond Bollean condition for loop continuation.
  1970. /// \param IncExpr Increment expression for loop control variable.
  1971. /// \param BodyGen Generator for the inner body of the inner loop.
  1972. /// \param PostIncGen Genrator for post-increment code (required for ordered
  1973. /// loop directvies).
  1974. void EmitOMPInnerLoop(
  1975. const Stmt &S, bool RequiresCleanup, const Expr *LoopCond,
  1976. const Expr *IncExpr,
  1977. const llvm::function_ref<void(CodeGenFunction &)> &BodyGen,
  1978. const llvm::function_ref<void(CodeGenFunction &)> &PostIncGen);
  1979. JumpDest getOMPCancelDestination(OpenMPDirectiveKind Kind);
  1980. private:
  1981. /// Helpers for the OpenMP loop directives.
  1982. void EmitOMPLoopBody(const OMPLoopDirective &D, JumpDest LoopExit);
  1983. void EmitOMPSimdInit(const OMPLoopDirective &D, bool IsMonotonic = false);
  1984. void EmitOMPSimdFinal(const OMPLoopDirective &D);
  1985. /// \brief Emit code for the worksharing loop-based directive.
  1986. /// \return true, if this construct has any lastprivate clause, false -
  1987. /// otherwise.
  1988. bool EmitOMPWorksharingLoop(const OMPLoopDirective &S);
  1989. void EmitOMPForOuterLoop(OpenMPScheduleClauseKind ScheduleKind,
  1990. bool IsMonotonic, const OMPLoopDirective &S,
  1991. OMPPrivateScope &LoopScope, bool Ordered, Address LB,
  1992. Address UB, Address ST, Address IL,
  1993. llvm::Value *Chunk);
  1994. /// \brief Emit code for sections directive.
  1995. OpenMPDirectiveKind EmitSections(const OMPExecutableDirective &S);
  1996. public:
  1997. //===--------------------------------------------------------------------===//
  1998. // LValue Expression Emission
  1999. //===--------------------------------------------------------------------===//
  2000. /// GetUndefRValue - Get an appropriate 'undef' rvalue for the given type.
  2001. RValue GetUndefRValue(QualType Ty);
  2002. /// EmitUnsupportedRValue - Emit a dummy r-value using the type of E
  2003. /// and issue an ErrorUnsupported style diagnostic (using the
  2004. /// provided Name).
  2005. RValue EmitUnsupportedRValue(const Expr *E,
  2006. const char *Name);
  2007. /// EmitUnsupportedLValue - Emit a dummy l-value using the type of E and issue
  2008. /// an ErrorUnsupported style diagnostic (using the provided Name).
  2009. LValue EmitUnsupportedLValue(const Expr *E,
  2010. const char *Name);
  2011. /// EmitLValue - Emit code to compute a designator that specifies the location
  2012. /// of the expression.
  2013. ///
  2014. /// This can return one of two things: a simple address or a bitfield
  2015. /// reference. In either case, the LLVM Value* in the LValue structure is
  2016. /// guaranteed to be an LLVM pointer type.
  2017. ///
  2018. /// If this returns a bitfield reference, nothing about the pointee type of
  2019. /// the LLVM value is known: For example, it may not be a pointer to an
  2020. /// integer.
  2021. ///
  2022. /// If this returns a normal address, and if the lvalue's C type is fixed
  2023. /// size, this method guarantees that the returned pointer type will point to
  2024. /// an LLVM type of the same size of the lvalue's type. If the lvalue has a
  2025. /// variable length type, this is not possible.
  2026. ///
  2027. LValue EmitLValue(const Expr *E);
  2028. /// \brief Same as EmitLValue but additionally we generate checking code to
  2029. /// guard against undefined behavior. This is only suitable when we know
  2030. /// that the address will be used to access the object.
  2031. LValue EmitCheckedLValue(const Expr *E, TypeCheckKind TCK);
  2032. RValue convertTempToRValue(Address addr, QualType type,
  2033. SourceLocation Loc);
  2034. void EmitAtomicInit(Expr *E, LValue lvalue);
  2035. bool LValueIsSuitableForInlineAtomic(LValue Src);
  2036. bool typeIsSuitableForInlineAtomic(QualType Ty, bool IsVolatile) const;
  2037. RValue EmitAtomicLoad(LValue LV, SourceLocation SL,
  2038. AggValueSlot Slot = AggValueSlot::ignored());
  2039. RValue EmitAtomicLoad(LValue lvalue, SourceLocation loc,
  2040. llvm::AtomicOrdering AO, bool IsVolatile = false,
  2041. AggValueSlot slot = AggValueSlot::ignored());
  2042. void EmitAtomicStore(RValue rvalue, LValue lvalue, bool isInit);
  2043. void EmitAtomicStore(RValue rvalue, LValue lvalue, llvm::AtomicOrdering AO,
  2044. bool IsVolatile, bool isInit);
  2045. std::pair<RValue, llvm::Value *> EmitAtomicCompareExchange(
  2046. LValue Obj, RValue Expected, RValue Desired, SourceLocation Loc,
  2047. llvm::AtomicOrdering Success = llvm::SequentiallyConsistent,
  2048. llvm::AtomicOrdering Failure = llvm::SequentiallyConsistent,
  2049. bool IsWeak = false, AggValueSlot Slot = AggValueSlot::ignored());
  2050. void EmitAtomicUpdate(LValue LVal, llvm::AtomicOrdering AO,
  2051. const llvm::function_ref<RValue(RValue)> &UpdateOp,
  2052. bool IsVolatile);
  2053. /// EmitToMemory - Change a scalar value from its value
  2054. /// representation to its in-memory representation.
  2055. llvm::Value *EmitToMemory(llvm::Value *Value, QualType Ty);
  2056. /// EmitFromMemory - Change a scalar value from its memory
  2057. /// representation to its value representation.
  2058. llvm::Value *EmitFromMemory(llvm::Value *Value, QualType Ty);
  2059. /// EmitLoadOfScalar - Load a scalar value from an address, taking
  2060. /// care to appropriately convert from the memory representation to
  2061. /// the LLVM value representation.
  2062. llvm::Value *EmitLoadOfScalar(Address Addr, bool Volatile, QualType Ty,
  2063. SourceLocation Loc,
  2064. AlignmentSource AlignSource =
  2065. AlignmentSource::Type,
  2066. llvm::MDNode *TBAAInfo = nullptr,
  2067. QualType TBAABaseTy = QualType(),
  2068. uint64_t TBAAOffset = 0,
  2069. bool isNontemporal = false);
  2070. /// EmitLoadOfScalar - Load a scalar value from an address, taking
  2071. /// care to appropriately convert from the memory representation to
  2072. /// the LLVM value representation. The l-value must be a simple
  2073. /// l-value.
  2074. llvm::Value *EmitLoadOfScalar(LValue lvalue, SourceLocation Loc);
  2075. /// EmitStoreOfScalar - Store a scalar value to an address, taking
  2076. /// care to appropriately convert from the memory representation to
  2077. /// the LLVM value representation.
  2078. void EmitStoreOfScalar(llvm::Value *Value, Address Addr,
  2079. bool Volatile, QualType Ty,
  2080. AlignmentSource AlignSource = AlignmentSource::Type,
  2081. llvm::MDNode *TBAAInfo = nullptr, bool isInit = false,
  2082. QualType TBAABaseTy = QualType(),
  2083. uint64_t TBAAOffset = 0, bool isNontemporal = false);
  2084. /// EmitStoreOfScalar - Store a scalar value to an address, taking
  2085. /// care to appropriately convert from the memory representation to
  2086. /// the LLVM value representation. The l-value must be a simple
  2087. /// l-value. The isInit flag indicates whether this is an initialization.
  2088. /// If so, atomic qualifiers are ignored and the store is always non-atomic.
  2089. void EmitStoreOfScalar(llvm::Value *value, LValue lvalue, bool isInit=false);
  2090. /// EmitLoadOfLValue - Given an expression that represents a value lvalue,
  2091. /// this method emits the address of the lvalue, then loads the result as an
  2092. /// rvalue, returning the rvalue.
  2093. RValue EmitLoadOfLValue(LValue V, SourceLocation Loc);
  2094. RValue EmitLoadOfExtVectorElementLValue(LValue V);
  2095. RValue EmitLoadOfBitfieldLValue(LValue LV);
  2096. RValue EmitLoadOfGlobalRegLValue(LValue LV);
  2097. /// EmitStoreThroughLValue - Store the specified rvalue into the specified
  2098. /// lvalue, where both are guaranteed to the have the same type, and that type
  2099. /// is 'Ty'.
  2100. void EmitStoreThroughLValue(RValue Src, LValue Dst, bool isInit = false);
  2101. void EmitStoreThroughExtVectorComponentLValue(RValue Src, LValue Dst);
  2102. void EmitStoreThroughGlobalRegLValue(RValue Src, LValue Dst);
  2103. /// EmitStoreThroughBitfieldLValue - Store Src into Dst with same constraints
  2104. /// as EmitStoreThroughLValue.
  2105. ///
  2106. /// \param Result [out] - If non-null, this will be set to a Value* for the
  2107. /// bit-field contents after the store, appropriate for use as the result of
  2108. /// an assignment to the bit-field.
  2109. void EmitStoreThroughBitfieldLValue(RValue Src, LValue Dst,
  2110. llvm::Value **Result=nullptr);
  2111. /// Emit an l-value for an assignment (simple or compound) of complex type.
  2112. LValue EmitComplexAssignmentLValue(const BinaryOperator *E);
  2113. LValue EmitComplexCompoundAssignmentLValue(const CompoundAssignOperator *E);
  2114. LValue EmitScalarCompoundAssignWithComplex(const CompoundAssignOperator *E,
  2115. llvm::Value *&Result);
  2116. // Note: only available for agg return types
  2117. LValue EmitBinaryOperatorLValue(const BinaryOperator *E);
  2118. LValue EmitCompoundAssignmentLValue(const CompoundAssignOperator *E);
  2119. // Note: only available for agg return types
  2120. LValue EmitCallExprLValue(const CallExpr *E);
  2121. // Note: only available for agg return types
  2122. LValue EmitVAArgExprLValue(const VAArgExpr *E);
  2123. LValue EmitDeclRefLValue(const DeclRefExpr *E);
  2124. LValue EmitStringLiteralLValue(const StringLiteral *E);
  2125. LValue EmitObjCEncodeExprLValue(const ObjCEncodeExpr *E);
  2126. LValue EmitPredefinedLValue(const PredefinedExpr *E);
  2127. LValue EmitUnaryOpLValue(const UnaryOperator *E);
  2128. LValue EmitArraySubscriptExpr(const ArraySubscriptExpr *E,
  2129. bool Accessed = false);
  2130. LValue EmitOMPArraySectionExpr(const OMPArraySectionExpr *E,
  2131. bool IsLowerBound = true);
  2132. LValue EmitExtVectorElementExpr(const ExtVectorElementExpr *E);
  2133. LValue EmitMemberExpr(const MemberExpr *E);
  2134. LValue EmitObjCIsaExpr(const ObjCIsaExpr *E);
  2135. LValue EmitCompoundLiteralLValue(const CompoundLiteralExpr *E);
  2136. LValue EmitInitListLValue(const InitListExpr *E);
  2137. LValue EmitConditionalOperatorLValue(const AbstractConditionalOperator *E);
  2138. LValue EmitCastLValue(const CastExpr *E);
  2139. LValue EmitMaterializeTemporaryExpr(const MaterializeTemporaryExpr *E);
  2140. LValue EmitOpaqueValueLValue(const OpaqueValueExpr *e);
  2141. Address EmitExtVectorElementLValue(LValue V);
  2142. RValue EmitRValueForField(LValue LV, const FieldDecl *FD, SourceLocation Loc);
  2143. Address EmitArrayToPointerDecay(const Expr *Array,
  2144. AlignmentSource *AlignSource = nullptr);
  2145. class ConstantEmission {
  2146. llvm::PointerIntPair<llvm::Constant*, 1, bool> ValueAndIsReference;
  2147. ConstantEmission(llvm::Constant *C, bool isReference)
  2148. : ValueAndIsReference(C, isReference) {}
  2149. public:
  2150. ConstantEmission() {}
  2151. static ConstantEmission forReference(llvm::Constant *C) {
  2152. return ConstantEmission(C, true);
  2153. }
  2154. static ConstantEmission forValue(llvm::Constant *C) {
  2155. return ConstantEmission(C, false);
  2156. }
  2157. explicit operator bool() const {
  2158. return ValueAndIsReference.getOpaqueValue() != nullptr;
  2159. }
  2160. bool isReference() const { return ValueAndIsReference.getInt(); }
  2161. LValue getReferenceLValue(CodeGenFunction &CGF, Expr *refExpr) const {
  2162. assert(isReference());
  2163. return CGF.MakeNaturalAlignAddrLValue(ValueAndIsReference.getPointer(),
  2164. refExpr->getType());
  2165. }
  2166. llvm::Constant *getValue() const {
  2167. assert(!isReference());
  2168. return ValueAndIsReference.getPointer();
  2169. }
  2170. };
  2171. ConstantEmission tryEmitAsConstant(DeclRefExpr *refExpr);
  2172. RValue EmitPseudoObjectRValue(const PseudoObjectExpr *e,
  2173. AggValueSlot slot = AggValueSlot::ignored());
  2174. LValue EmitPseudoObjectLValue(const PseudoObjectExpr *e);
  2175. llvm::Value *EmitIvarOffset(const ObjCInterfaceDecl *Interface,
  2176. const ObjCIvarDecl *Ivar);
  2177. LValue EmitLValueForField(LValue Base, const FieldDecl* Field);
  2178. LValue EmitLValueForLambdaField(const FieldDecl *Field);
  2179. /// EmitLValueForFieldInitialization - Like EmitLValueForField, except that
  2180. /// if the Field is a reference, this will return the address of the reference
  2181. /// and not the address of the value stored in the reference.
  2182. LValue EmitLValueForFieldInitialization(LValue Base,
  2183. const FieldDecl* Field);
  2184. LValue EmitLValueForIvar(QualType ObjectTy,
  2185. llvm::Value* Base, const ObjCIvarDecl *Ivar,
  2186. unsigned CVRQualifiers);
  2187. LValue EmitCXXConstructLValue(const CXXConstructExpr *E);
  2188. LValue EmitCXXBindTemporaryLValue(const CXXBindTemporaryExpr *E);
  2189. LValue EmitLambdaLValue(const LambdaExpr *E);
  2190. LValue EmitCXXTypeidLValue(const CXXTypeidExpr *E);
  2191. LValue EmitCXXUuidofLValue(const CXXUuidofExpr *E);
  2192. LValue EmitObjCMessageExprLValue(const ObjCMessageExpr *E);
  2193. LValue EmitObjCIvarRefLValue(const ObjCIvarRefExpr *E);
  2194. LValue EmitStmtExprLValue(const StmtExpr *E);
  2195. LValue EmitPointerToDataMemberBinaryExpr(const BinaryOperator *E);
  2196. LValue EmitObjCSelectorLValue(const ObjCSelectorExpr *E);
  2197. void EmitDeclRefExprDbgValue(const DeclRefExpr *E, llvm::Constant *Init);
  2198. //===--------------------------------------------------------------------===//
  2199. // Scalar Expression Emission
  2200. //===--------------------------------------------------------------------===//
  2201. /// EmitCall - Generate a call of the given function, expecting the given
  2202. /// result type, and using the given argument list which specifies both the
  2203. /// LLVM arguments and the types they were derived from.
  2204. RValue EmitCall(const CGFunctionInfo &FnInfo, llvm::Value *Callee,
  2205. ReturnValueSlot ReturnValue, const CallArgList &Args,
  2206. CGCalleeInfo CalleeInfo = CGCalleeInfo(),
  2207. llvm::Instruction **callOrInvoke = nullptr);
  2208. RValue EmitCall(QualType FnType, llvm::Value *Callee, const CallExpr *E,
  2209. ReturnValueSlot ReturnValue,
  2210. CGCalleeInfo CalleeInfo = CGCalleeInfo(),
  2211. llvm::Value *Chain = nullptr);
  2212. RValue EmitCallExpr(const CallExpr *E,
  2213. ReturnValueSlot ReturnValue = ReturnValueSlot());
  2214. void checkTargetFeatures(const CallExpr *E, const FunctionDecl *TargetDecl);
  2215. llvm::CallInst *EmitRuntimeCall(llvm::Value *callee,
  2216. const Twine &name = "");
  2217. llvm::CallInst *EmitRuntimeCall(llvm::Value *callee,
  2218. ArrayRef<llvm::Value*> args,
  2219. const Twine &name = "");
  2220. llvm::CallInst *EmitNounwindRuntimeCall(llvm::Value *callee,
  2221. const Twine &name = "");
  2222. llvm::CallInst *EmitNounwindRuntimeCall(llvm::Value *callee,
  2223. ArrayRef<llvm::Value*> args,
  2224. const Twine &name = "");
  2225. llvm::CallSite EmitCallOrInvoke(llvm::Value *Callee,
  2226. ArrayRef<llvm::Value *> Args,
  2227. const Twine &Name = "");
  2228. llvm::CallSite EmitRuntimeCallOrInvoke(llvm::Value *callee,
  2229. ArrayRef<llvm::Value*> args,
  2230. const Twine &name = "");
  2231. llvm::CallSite EmitRuntimeCallOrInvoke(llvm::Value *callee,
  2232. const Twine &name = "");
  2233. void EmitNoreturnRuntimeCallOrInvoke(llvm::Value *callee,
  2234. ArrayRef<llvm::Value*> args);
  2235. llvm::Value *BuildAppleKextVirtualCall(const CXXMethodDecl *MD,
  2236. NestedNameSpecifier *Qual,
  2237. llvm::Type *Ty);
  2238. llvm::Value *BuildAppleKextVirtualDestructorCall(const CXXDestructorDecl *DD,
  2239. CXXDtorType Type,
  2240. const CXXRecordDecl *RD);
  2241. RValue
  2242. EmitCXXMemberOrOperatorCall(const CXXMethodDecl *MD, llvm::Value *Callee,
  2243. ReturnValueSlot ReturnValue, llvm::Value *This,
  2244. llvm::Value *ImplicitParam,
  2245. QualType ImplicitParamTy, const CallExpr *E);
  2246. RValue EmitCXXStructorCall(const CXXMethodDecl *MD, llvm::Value *Callee,
  2247. ReturnValueSlot ReturnValue, llvm::Value *This,
  2248. llvm::Value *ImplicitParam,
  2249. QualType ImplicitParamTy, const CallExpr *E,
  2250. StructorType Type);
  2251. RValue EmitCXXMemberCallExpr(const CXXMemberCallExpr *E,
  2252. ReturnValueSlot ReturnValue);
  2253. RValue EmitCXXMemberOrOperatorMemberCallExpr(const CallExpr *CE,
  2254. const CXXMethodDecl *MD,
  2255. ReturnValueSlot ReturnValue,
  2256. bool HasQualifier,
  2257. NestedNameSpecifier *Qualifier,
  2258. bool IsArrow, const Expr *Base);
  2259. // Compute the object pointer.
  2260. Address EmitCXXMemberDataPointerAddress(const Expr *E, Address base,
  2261. llvm::Value *memberPtr,
  2262. const MemberPointerType *memberPtrType,
  2263. AlignmentSource *AlignSource = nullptr);
  2264. RValue EmitCXXMemberPointerCallExpr(const CXXMemberCallExpr *E,
  2265. ReturnValueSlot ReturnValue);
  2266. RValue EmitCXXOperatorMemberCallExpr(const CXXOperatorCallExpr *E,
  2267. const CXXMethodDecl *MD,
  2268. ReturnValueSlot ReturnValue);
  2269. RValue EmitCUDAKernelCallExpr(const CUDAKernelCallExpr *E,
  2270. ReturnValueSlot ReturnValue);
  2271. RValue EmitCUDADevicePrintfCallExpr(const CallExpr *E,
  2272. ReturnValueSlot ReturnValue);
  2273. RValue EmitBuiltinExpr(const FunctionDecl *FD,
  2274. unsigned BuiltinID, const CallExpr *E,
  2275. ReturnValueSlot ReturnValue);
  2276. RValue EmitBlockCallExpr(const CallExpr *E, ReturnValueSlot ReturnValue);
  2277. /// EmitTargetBuiltinExpr - Emit the given builtin call. Returns 0 if the call
  2278. /// is unhandled by the current target.
  2279. llvm::Value *EmitTargetBuiltinExpr(unsigned BuiltinID, const CallExpr *E);
  2280. llvm::Value *EmitAArch64CompareBuiltinExpr(llvm::Value *Op, llvm::Type *Ty,
  2281. const llvm::CmpInst::Predicate Fp,
  2282. const llvm::CmpInst::Predicate Ip,
  2283. const llvm::Twine &Name = "");
  2284. llvm::Value *EmitARMBuiltinExpr(unsigned BuiltinID, const CallExpr *E);
  2285. llvm::Value *EmitCommonNeonBuiltinExpr(unsigned BuiltinID,
  2286. unsigned LLVMIntrinsic,
  2287. unsigned AltLLVMIntrinsic,
  2288. const char *NameHint,
  2289. unsigned Modifier,
  2290. const CallExpr *E,
  2291. SmallVectorImpl<llvm::Value *> &Ops,
  2292. Address PtrOp0, Address PtrOp1);
  2293. llvm::Function *LookupNeonLLVMIntrinsic(unsigned IntrinsicID,
  2294. unsigned Modifier, llvm::Type *ArgTy,
  2295. const CallExpr *E);
  2296. llvm::Value *EmitNeonCall(llvm::Function *F,
  2297. SmallVectorImpl<llvm::Value*> &O,
  2298. const char *name,
  2299. unsigned shift = 0, bool rightshift = false);
  2300. llvm::Value *EmitNeonSplat(llvm::Value *V, llvm::Constant *Idx);
  2301. llvm::Value *EmitNeonShiftVector(llvm::Value *V, llvm::Type *Ty,
  2302. bool negateForRightShift);
  2303. llvm::Value *EmitNeonRShiftImm(llvm::Value *Vec, llvm::Value *Amt,
  2304. llvm::Type *Ty, bool usgn, const char *name);
  2305. llvm::Value *vectorWrapScalar16(llvm::Value *Op);
  2306. llvm::Value *EmitAArch64BuiltinExpr(unsigned BuiltinID, const CallExpr *E);
  2307. llvm::Value *BuildVector(ArrayRef<llvm::Value*> Ops);
  2308. llvm::Value *EmitX86BuiltinExpr(unsigned BuiltinID, const CallExpr *E);
  2309. llvm::Value *EmitPPCBuiltinExpr(unsigned BuiltinID, const CallExpr *E);
  2310. llvm::Value *EmitAMDGPUBuiltinExpr(unsigned BuiltinID, const CallExpr *E);
  2311. llvm::Value *EmitSystemZBuiltinExpr(unsigned BuiltinID, const CallExpr *E);
  2312. llvm::Value *EmitNVPTXBuiltinExpr(unsigned BuiltinID, const CallExpr *E);
  2313. llvm::Value *EmitWebAssemblyBuiltinExpr(unsigned BuiltinID,
  2314. const CallExpr *E);
  2315. llvm::Value *EmitObjCProtocolExpr(const ObjCProtocolExpr *E);
  2316. llvm::Value *EmitObjCStringLiteral(const ObjCStringLiteral *E);
  2317. llvm::Value *EmitObjCBoxedExpr(const ObjCBoxedExpr *E);
  2318. llvm::Value *EmitObjCArrayLiteral(const ObjCArrayLiteral *E);
  2319. llvm::Value *EmitObjCDictionaryLiteral(const ObjCDictionaryLiteral *E);
  2320. llvm::Value *EmitObjCCollectionLiteral(const Expr *E,
  2321. const ObjCMethodDecl *MethodWithObjects);
  2322. llvm::Value *EmitObjCSelectorExpr(const ObjCSelectorExpr *E);
  2323. RValue EmitObjCMessageExpr(const ObjCMessageExpr *E,
  2324. ReturnValueSlot Return = ReturnValueSlot());
  2325. /// Retrieves the default cleanup kind for an ARC cleanup.
  2326. /// Except under -fobjc-arc-eh, ARC cleanups are normal-only.
  2327. CleanupKind getARCCleanupKind() {
  2328. return CGM.getCodeGenOpts().ObjCAutoRefCountExceptions
  2329. ? NormalAndEHCleanup : NormalCleanup;
  2330. }
  2331. // ARC primitives.
  2332. void EmitARCInitWeak(Address addr, llvm::Value *value);
  2333. void EmitARCDestroyWeak(Address addr);
  2334. llvm::Value *EmitARCLoadWeak(Address addr);
  2335. llvm::Value *EmitARCLoadWeakRetained(Address addr);
  2336. llvm::Value *EmitARCStoreWeak(Address addr, llvm::Value *value, bool ignored);
  2337. void EmitARCCopyWeak(Address dst, Address src);
  2338. void EmitARCMoveWeak(Address dst, Address src);
  2339. llvm::Value *EmitARCRetainAutorelease(QualType type, llvm::Value *value);
  2340. llvm::Value *EmitARCRetainAutoreleaseNonBlock(llvm::Value *value);
  2341. llvm::Value *EmitARCStoreStrong(LValue lvalue, llvm::Value *value,
  2342. bool resultIgnored);
  2343. llvm::Value *EmitARCStoreStrongCall(Address addr, llvm::Value *value,
  2344. bool resultIgnored);
  2345. llvm::Value *EmitARCRetain(QualType type, llvm::Value *value);
  2346. llvm::Value *EmitARCRetainNonBlock(llvm::Value *value);
  2347. llvm::Value *EmitARCRetainBlock(llvm::Value *value, bool mandatory);
  2348. void EmitARCDestroyStrong(Address addr, ARCPreciseLifetime_t precise);
  2349. void EmitARCRelease(llvm::Value *value, ARCPreciseLifetime_t precise);
  2350. llvm::Value *EmitARCAutorelease(llvm::Value *value);
  2351. llvm::Value *EmitARCAutoreleaseReturnValue(llvm::Value *value);
  2352. llvm::Value *EmitARCRetainAutoreleaseReturnValue(llvm::Value *value);
  2353. llvm::Value *EmitARCRetainAutoreleasedReturnValue(llvm::Value *value);
  2354. std::pair<LValue,llvm::Value*>
  2355. EmitARCStoreAutoreleasing(const BinaryOperator *e);
  2356. std::pair<LValue,llvm::Value*>
  2357. EmitARCStoreStrong(const BinaryOperator *e, bool ignored);
  2358. llvm::Value *EmitObjCThrowOperand(const Expr *expr);
  2359. llvm::Value *EmitObjCConsumeObject(QualType T, llvm::Value *Ptr);
  2360. llvm::Value *EmitObjCExtendObjectLifetime(QualType T, llvm::Value *Ptr);
  2361. llvm::Value *EmitARCExtendBlockObject(const Expr *expr);
  2362. llvm::Value *EmitARCRetainScalarExpr(const Expr *expr);
  2363. llvm::Value *EmitARCRetainAutoreleaseScalarExpr(const Expr *expr);
  2364. void EmitARCIntrinsicUse(ArrayRef<llvm::Value*> values);
  2365. static Destroyer destroyARCStrongImprecise;
  2366. static Destroyer destroyARCStrongPrecise;
  2367. static Destroyer destroyARCWeak;
  2368. void EmitObjCAutoreleasePoolPop(llvm::Value *Ptr);
  2369. llvm::Value *EmitObjCAutoreleasePoolPush();
  2370. llvm::Value *EmitObjCMRRAutoreleasePoolPush();
  2371. void EmitObjCAutoreleasePoolCleanup(llvm::Value *Ptr);
  2372. void EmitObjCMRRAutoreleasePoolPop(llvm::Value *Ptr);
  2373. /// \brief Emits a reference binding to the passed in expression.
  2374. RValue EmitReferenceBindingToExpr(const Expr *E);
  2375. //===--------------------------------------------------------------------===//
  2376. // Expression Emission
  2377. //===--------------------------------------------------------------------===//
  2378. // Expressions are broken into three classes: scalar, complex, aggregate.
  2379. /// EmitScalarExpr - Emit the computation of the specified expression of LLVM
  2380. /// scalar type, returning the result.
  2381. llvm::Value *EmitScalarExpr(const Expr *E , bool IgnoreResultAssign = false);
  2382. /// Emit a conversion from the specified type to the specified destination
  2383. /// type, both of which are LLVM scalar types.
  2384. llvm::Value *EmitScalarConversion(llvm::Value *Src, QualType SrcTy,
  2385. QualType DstTy, SourceLocation Loc);
  2386. /// Emit a conversion from the specified complex type to the specified
  2387. /// destination type, where the destination type is an LLVM scalar type.
  2388. llvm::Value *EmitComplexToScalarConversion(ComplexPairTy Src, QualType SrcTy,
  2389. QualType DstTy,
  2390. SourceLocation Loc);
  2391. /// EmitAggExpr - Emit the computation of the specified expression
  2392. /// of aggregate type. The result is computed into the given slot,
  2393. /// which may be null to indicate that the value is not needed.
  2394. void EmitAggExpr(const Expr *E, AggValueSlot AS);
  2395. /// EmitAggExprToLValue - Emit the computation of the specified expression of
  2396. /// aggregate type into a temporary LValue.
  2397. LValue EmitAggExprToLValue(const Expr *E);
  2398. /// EmitExtendGCLifetime - Given a pointer to an Objective-C object,
  2399. /// make sure it survives garbage collection until this point.
  2400. void EmitExtendGCLifetime(llvm::Value *object);
  2401. /// EmitComplexExpr - Emit the computation of the specified expression of
  2402. /// complex type, returning the result.
  2403. ComplexPairTy EmitComplexExpr(const Expr *E,
  2404. bool IgnoreReal = false,
  2405. bool IgnoreImag = false);
  2406. /// EmitComplexExprIntoLValue - Emit the given expression of complex
  2407. /// type and place its result into the specified l-value.
  2408. void EmitComplexExprIntoLValue(const Expr *E, LValue dest, bool isInit);
  2409. /// EmitStoreOfComplex - Store a complex number into the specified l-value.
  2410. void EmitStoreOfComplex(ComplexPairTy V, LValue dest, bool isInit);
  2411. /// EmitLoadOfComplex - Load a complex number from the specified l-value.
  2412. ComplexPairTy EmitLoadOfComplex(LValue src, SourceLocation loc);
  2413. Address emitAddrOfRealComponent(Address complex, QualType complexType);
  2414. Address emitAddrOfImagComponent(Address complex, QualType complexType);
  2415. /// AddInitializerToStaticVarDecl - Add the initializer for 'D' to the
  2416. /// global variable that has already been created for it. If the initializer
  2417. /// has a different type than GV does, this may free GV and return a different
  2418. /// one. Otherwise it just returns GV.
  2419. llvm::GlobalVariable *
  2420. AddInitializerToStaticVarDecl(const VarDecl &D,
  2421. llvm::GlobalVariable *GV);
  2422. /// EmitCXXGlobalVarDeclInit - Create the initializer for a C++
  2423. /// variable with global storage.
  2424. void EmitCXXGlobalVarDeclInit(const VarDecl &D, llvm::Constant *DeclPtr,
  2425. bool PerformInit);
  2426. llvm::Constant *createAtExitStub(const VarDecl &VD, llvm::Constant *Dtor,
  2427. llvm::Constant *Addr);
  2428. /// Call atexit() with a function that passes the given argument to
  2429. /// the given function.
  2430. void registerGlobalDtorWithAtExit(const VarDecl &D, llvm::Constant *fn,
  2431. llvm::Constant *addr);
  2432. /// Emit code in this function to perform a guarded variable
  2433. /// initialization. Guarded initializations are used when it's not
  2434. /// possible to prove that an initialization will be done exactly
  2435. /// once, e.g. with a static local variable or a static data member
  2436. /// of a class template.
  2437. void EmitCXXGuardedInit(const VarDecl &D, llvm::GlobalVariable *DeclPtr,
  2438. bool PerformInit);
  2439. /// GenerateCXXGlobalInitFunc - Generates code for initializing global
  2440. /// variables.
  2441. void GenerateCXXGlobalInitFunc(llvm::Function *Fn,
  2442. ArrayRef<llvm::Function *> CXXThreadLocals,
  2443. Address Guard = Address::invalid());
  2444. /// GenerateCXXGlobalDtorsFunc - Generates code for destroying global
  2445. /// variables.
  2446. void GenerateCXXGlobalDtorsFunc(llvm::Function *Fn,
  2447. const std::vector<std::pair<llvm::WeakVH,
  2448. llvm::Constant*> > &DtorsAndObjects);
  2449. void GenerateCXXGlobalVarDeclInitFunc(llvm::Function *Fn,
  2450. const VarDecl *D,
  2451. llvm::GlobalVariable *Addr,
  2452. bool PerformInit);
  2453. void EmitCXXConstructExpr(const CXXConstructExpr *E, AggValueSlot Dest);
  2454. void EmitSynthesizedCXXCopyCtor(Address Dest, Address Src, const Expr *Exp);
  2455. void enterFullExpression(const ExprWithCleanups *E) {
  2456. if (E->getNumObjects() == 0) return;
  2457. enterNonTrivialFullExpression(E);
  2458. }
  2459. void enterNonTrivialFullExpression(const ExprWithCleanups *E);
  2460. void EmitCXXThrowExpr(const CXXThrowExpr *E, bool KeepInsertionPoint = true);
  2461. void EmitLambdaExpr(const LambdaExpr *E, AggValueSlot Dest);
  2462. RValue EmitAtomicExpr(AtomicExpr *E);
  2463. //===--------------------------------------------------------------------===//
  2464. // Annotations Emission
  2465. //===--------------------------------------------------------------------===//
  2466. /// Emit an annotation call (intrinsic or builtin).
  2467. llvm::Value *EmitAnnotationCall(llvm::Value *AnnotationFn,
  2468. llvm::Value *AnnotatedVal,
  2469. StringRef AnnotationStr,
  2470. SourceLocation Location);
  2471. /// Emit local annotations for the local variable V, declared by D.
  2472. void EmitVarAnnotations(const VarDecl *D, llvm::Value *V);
  2473. /// Emit field annotations for the given field & value. Returns the
  2474. /// annotation result.
  2475. Address EmitFieldAnnotations(const FieldDecl *D, Address V);
  2476. //===--------------------------------------------------------------------===//
  2477. // Internal Helpers
  2478. //===--------------------------------------------------------------------===//
  2479. /// ContainsLabel - Return true if the statement contains a label in it. If
  2480. /// this statement is not executed normally, it not containing a label means
  2481. /// that we can just remove the code.
  2482. static bool ContainsLabel(const Stmt *S, bool IgnoreCaseStmts = false);
  2483. /// containsBreak - Return true if the statement contains a break out of it.
  2484. /// If the statement (recursively) contains a switch or loop with a break
  2485. /// inside of it, this is fine.
  2486. static bool containsBreak(const Stmt *S);
  2487. /// ConstantFoldsToSimpleInteger - If the specified expression does not fold
  2488. /// to a constant, or if it does but contains a label, return false. If it
  2489. /// constant folds return true and set the boolean result in Result.
  2490. bool ConstantFoldsToSimpleInteger(const Expr *Cond, bool &Result);
  2491. /// ConstantFoldsToSimpleInteger - If the specified expression does not fold
  2492. /// to a constant, or if it does but contains a label, return false. If it
  2493. /// constant folds return true and set the folded value.
  2494. bool ConstantFoldsToSimpleInteger(const Expr *Cond, llvm::APSInt &Result);
  2495. /// EmitBranchOnBoolExpr - Emit a branch on a boolean condition (e.g. for an
  2496. /// if statement) to the specified blocks. Based on the condition, this might
  2497. /// try to simplify the codegen of the conditional based on the branch.
  2498. /// TrueCount should be the number of times we expect the condition to
  2499. /// evaluate to true based on PGO data.
  2500. void EmitBranchOnBoolExpr(const Expr *Cond, llvm::BasicBlock *TrueBlock,
  2501. llvm::BasicBlock *FalseBlock, uint64_t TrueCount);
  2502. /// \brief Emit a description of a type in a format suitable for passing to
  2503. /// a runtime sanitizer handler.
  2504. llvm::Constant *EmitCheckTypeDescriptor(QualType T);
  2505. /// \brief Convert a value into a format suitable for passing to a runtime
  2506. /// sanitizer handler.
  2507. llvm::Value *EmitCheckValue(llvm::Value *V);
  2508. /// \brief Emit a description of a source location in a format suitable for
  2509. /// passing to a runtime sanitizer handler.
  2510. llvm::Constant *EmitCheckSourceLocation(SourceLocation Loc);
  2511. /// \brief Create a basic block that will call a handler function in a
  2512. /// sanitizer runtime with the provided arguments, and create a conditional
  2513. /// branch to it.
  2514. void EmitCheck(ArrayRef<std::pair<llvm::Value *, SanitizerMask>> Checked,
  2515. StringRef CheckName, ArrayRef<llvm::Constant *> StaticArgs,
  2516. ArrayRef<llvm::Value *> DynamicArgs);
  2517. /// \brief Emit a slow path cross-DSO CFI check which calls __cfi_slowpath
  2518. /// if Cond if false.
  2519. void EmitCfiSlowPathCheck(SanitizerMask Kind, llvm::Value *Cond,
  2520. llvm::ConstantInt *TypeId, llvm::Value *Ptr,
  2521. ArrayRef<llvm::Constant *> StaticArgs);
  2522. /// \brief Create a basic block that will call the trap intrinsic, and emit a
  2523. /// conditional branch to it, for the -ftrapv checks.
  2524. void EmitTrapCheck(llvm::Value *Checked);
  2525. /// \brief Emit a call to trap or debugtrap and attach function attribute
  2526. /// "trap-func-name" if specified.
  2527. llvm::CallInst *EmitTrapCall(llvm::Intrinsic::ID IntrID);
  2528. /// \brief Emit a cross-DSO CFI failure handling function.
  2529. void EmitCfiCheckFail();
  2530. /// \brief Create a check for a function parameter that may potentially be
  2531. /// declared as non-null.
  2532. void EmitNonNullArgCheck(RValue RV, QualType ArgType, SourceLocation ArgLoc,
  2533. const FunctionDecl *FD, unsigned ParmNum);
  2534. /// EmitCallArg - Emit a single call argument.
  2535. void EmitCallArg(CallArgList &args, const Expr *E, QualType ArgType);
  2536. /// EmitDelegateCallArg - We are performing a delegate call; that
  2537. /// is, the current function is delegating to another one. Produce
  2538. /// a r-value suitable for passing the given parameter.
  2539. void EmitDelegateCallArg(CallArgList &args, const VarDecl *param,
  2540. SourceLocation loc);
  2541. /// SetFPAccuracy - Set the minimum required accuracy of the given floating
  2542. /// point operation, expressed as the maximum relative error in ulp.
  2543. void SetFPAccuracy(llvm::Value *Val, float Accuracy);
  2544. private:
  2545. llvm::MDNode *getRangeForLoadFromType(QualType Ty);
  2546. void EmitReturnOfRValue(RValue RV, QualType Ty);
  2547. void deferPlaceholderReplacement(llvm::Instruction *Old, llvm::Value *New);
  2548. llvm::SmallVector<std::pair<llvm::Instruction *, llvm::Value *>, 4>
  2549. DeferredReplacements;
  2550. /// Set the address of a local variable.
  2551. void setAddrOfLocalVar(const VarDecl *VD, Address Addr) {
  2552. assert(!LocalDeclMap.count(VD) && "Decl already exists in LocalDeclMap!");
  2553. LocalDeclMap.insert({VD, Addr});
  2554. }
  2555. /// ExpandTypeFromArgs - Reconstruct a structure of type \arg Ty
  2556. /// from function arguments into \arg Dst. See ABIArgInfo::Expand.
  2557. ///
  2558. /// \param AI - The first function argument of the expansion.
  2559. void ExpandTypeFromArgs(QualType Ty, LValue Dst,
  2560. SmallVectorImpl<llvm::Argument *>::iterator &AI);
  2561. /// ExpandTypeToArgs - Expand an RValue \arg RV, with the LLVM type for \arg
  2562. /// Ty, into individual arguments on the provided vector \arg IRCallArgs,
  2563. /// starting at index \arg IRCallArgPos. See ABIArgInfo::Expand.
  2564. void ExpandTypeToArgs(QualType Ty, RValue RV, llvm::FunctionType *IRFuncTy,
  2565. SmallVectorImpl<llvm::Value *> &IRCallArgs,
  2566. unsigned &IRCallArgPos);
  2567. llvm::Value* EmitAsmInput(const TargetInfo::ConstraintInfo &Info,
  2568. const Expr *InputExpr, std::string &ConstraintStr);
  2569. llvm::Value* EmitAsmInputLValue(const TargetInfo::ConstraintInfo &Info,
  2570. LValue InputValue, QualType InputType,
  2571. std::string &ConstraintStr,
  2572. SourceLocation Loc);
  2573. /// \brief Attempts to statically evaluate the object size of E. If that
  2574. /// fails, emits code to figure the size of E out for us. This is
  2575. /// pass_object_size aware.
  2576. llvm::Value *evaluateOrEmitBuiltinObjectSize(const Expr *E, unsigned Type,
  2577. llvm::IntegerType *ResType);
  2578. /// \brief Emits the size of E, as required by __builtin_object_size. This
  2579. /// function is aware of pass_object_size parameters, and will act accordingly
  2580. /// if E is a parameter with the pass_object_size attribute.
  2581. llvm::Value *emitBuiltinObjectSize(const Expr *E, unsigned Type,
  2582. llvm::IntegerType *ResType);
  2583. public:
  2584. #ifndef NDEBUG
  2585. // Determine whether the given argument is an Objective-C method
  2586. // that may have type parameters in its signature.
  2587. static bool isObjCMethodWithTypeParams(const ObjCMethodDecl *method) {
  2588. const DeclContext *dc = method->getDeclContext();
  2589. if (const ObjCInterfaceDecl *classDecl= dyn_cast<ObjCInterfaceDecl>(dc)) {
  2590. return classDecl->getTypeParamListAsWritten();
  2591. }
  2592. if (const ObjCCategoryDecl *catDecl = dyn_cast<ObjCCategoryDecl>(dc)) {
  2593. return catDecl->getTypeParamList();
  2594. }
  2595. return false;
  2596. }
  2597. template<typename T>
  2598. static bool isObjCMethodWithTypeParams(const T *) { return false; }
  2599. #endif
  2600. /// EmitCallArgs - Emit call arguments for a function.
  2601. template <typename T>
  2602. void EmitCallArgs(CallArgList &Args, const T *CallArgTypeInfo,
  2603. llvm::iterator_range<CallExpr::const_arg_iterator> ArgRange,
  2604. const FunctionDecl *CalleeDecl = nullptr,
  2605. unsigned ParamsToSkip = 0) {
  2606. SmallVector<QualType, 16> ArgTypes;
  2607. CallExpr::const_arg_iterator Arg = ArgRange.begin();
  2608. assert((ParamsToSkip == 0 || CallArgTypeInfo) &&
  2609. "Can't skip parameters if type info is not provided");
  2610. if (CallArgTypeInfo) {
  2611. #ifndef NDEBUG
  2612. bool isGenericMethod = isObjCMethodWithTypeParams(CallArgTypeInfo);
  2613. #endif
  2614. // First, use the argument types that the type info knows about
  2615. for (auto I = CallArgTypeInfo->param_type_begin() + ParamsToSkip,
  2616. E = CallArgTypeInfo->param_type_end();
  2617. I != E; ++I, ++Arg) {
  2618. assert(Arg != ArgRange.end() && "Running over edge of argument list!");
  2619. assert((isGenericMethod ||
  2620. ((*I)->isVariablyModifiedType() ||
  2621. (*I).getNonReferenceType()->isObjCRetainableType() ||
  2622. getContext()
  2623. .getCanonicalType((*I).getNonReferenceType())
  2624. .getTypePtr() ==
  2625. getContext()
  2626. .getCanonicalType((*Arg)->getType())
  2627. .getTypePtr())) &&
  2628. "type mismatch in call argument!");
  2629. ArgTypes.push_back(*I);
  2630. }
  2631. }
  2632. // Either we've emitted all the call args, or we have a call to variadic
  2633. // function.
  2634. assert((Arg == ArgRange.end() || !CallArgTypeInfo ||
  2635. CallArgTypeInfo->isVariadic()) &&
  2636. "Extra arguments in non-variadic function!");
  2637. // If we still have any arguments, emit them using the type of the argument.
  2638. for (auto *A : llvm::make_range(Arg, ArgRange.end()))
  2639. ArgTypes.push_back(getVarArgType(A));
  2640. EmitCallArgs(Args, ArgTypes, ArgRange, CalleeDecl, ParamsToSkip);
  2641. }
  2642. void EmitCallArgs(CallArgList &Args, ArrayRef<QualType> ArgTypes,
  2643. llvm::iterator_range<CallExpr::const_arg_iterator> ArgRange,
  2644. const FunctionDecl *CalleeDecl = nullptr,
  2645. unsigned ParamsToSkip = 0);
  2646. /// EmitPointerWithAlignment - Given an expression with a pointer
  2647. /// type, emit the value and compute our best estimate of the
  2648. /// alignment of the pointee.
  2649. ///
  2650. /// Note that this function will conservatively fall back on the type
  2651. /// when it doesn't
  2652. ///
  2653. /// \param Source - If non-null, this will be initialized with
  2654. /// information about the source of the alignment. Note that this
  2655. /// function will conservatively fall back on the type when it
  2656. /// doesn't recognize the expression, which means that sometimes
  2657. ///
  2658. /// a worst-case One
  2659. /// reasonable way to use this information is when there's a
  2660. /// language guarantee that the pointer must be aligned to some
  2661. /// stricter value, and we're simply trying to ensure that
  2662. /// sufficiently obvious uses of under-aligned objects don't get
  2663. /// miscompiled; for example, a placement new into the address of
  2664. /// a local variable. In such a case, it's quite reasonable to
  2665. /// just ignore the returned alignment when it isn't from an
  2666. /// explicit source.
  2667. Address EmitPointerWithAlignment(const Expr *Addr,
  2668. AlignmentSource *Source = nullptr);
  2669. void EmitSanitizerStatReport(llvm::SanitizerStatKind SSK);
  2670. private:
  2671. QualType getVarArgType(const Expr *Arg);
  2672. const TargetCodeGenInfo &getTargetHooks() const {
  2673. return CGM.getTargetCodeGenInfo();
  2674. }
  2675. void EmitDeclMetadata();
  2676. BlockByrefHelpers *buildByrefHelpers(llvm::StructType &byrefType,
  2677. const AutoVarEmission &emission);
  2678. void AddObjCARCExceptionMetadata(llvm::Instruction *Inst);
  2679. llvm::Value *GetValueForARMHint(unsigned BuiltinID);
  2680. };
  2681. /// Helper class with most of the code for saving a value for a
  2682. /// conditional expression cleanup.
  2683. struct DominatingLLVMValue {
  2684. typedef llvm::PointerIntPair<llvm::Value*, 1, bool> saved_type;
  2685. /// Answer whether the given value needs extra work to be saved.
  2686. static bool needsSaving(llvm::Value *value) {
  2687. // If it's not an instruction, we don't need to save.
  2688. if (!isa<llvm::Instruction>(value)) return false;
  2689. // If it's an instruction in the entry block, we don't need to save.
  2690. llvm::BasicBlock *block = cast<llvm::Instruction>(value)->getParent();
  2691. return (block != &block->getParent()->getEntryBlock());
  2692. }
  2693. /// Try to save the given value.
  2694. static saved_type save(CodeGenFunction &CGF, llvm::Value *value) {
  2695. if (!needsSaving(value)) return saved_type(value, false);
  2696. // Otherwise, we need an alloca.
  2697. auto align = CharUnits::fromQuantity(
  2698. CGF.CGM.getDataLayout().getPrefTypeAlignment(value->getType()));
  2699. Address alloca =
  2700. CGF.CreateTempAlloca(value->getType(), align, "cond-cleanup.save");
  2701. CGF.Builder.CreateStore(value, alloca);
  2702. return saved_type(alloca.getPointer(), true);
  2703. }
  2704. static llvm::Value *restore(CodeGenFunction &CGF, saved_type value) {
  2705. // If the value says it wasn't saved, trust that it's still dominating.
  2706. if (!value.getInt()) return value.getPointer();
  2707. // Otherwise, it should be an alloca instruction, as set up in save().
  2708. auto alloca = cast<llvm::AllocaInst>(value.getPointer());
  2709. return CGF.Builder.CreateAlignedLoad(alloca, alloca->getAlignment());
  2710. }
  2711. };
  2712. /// A partial specialization of DominatingValue for llvm::Values that
  2713. /// might be llvm::Instructions.
  2714. template <class T> struct DominatingPointer<T,true> : DominatingLLVMValue {
  2715. typedef T *type;
  2716. static type restore(CodeGenFunction &CGF, saved_type value) {
  2717. return static_cast<T*>(DominatingLLVMValue::restore(CGF, value));
  2718. }
  2719. };
  2720. /// A specialization of DominatingValue for Address.
  2721. template <> struct DominatingValue<Address> {
  2722. typedef Address type;
  2723. struct saved_type {
  2724. DominatingLLVMValue::saved_type SavedValue;
  2725. CharUnits Alignment;
  2726. };
  2727. static bool needsSaving(type value) {
  2728. return DominatingLLVMValue::needsSaving(value.getPointer());
  2729. }
  2730. static saved_type save(CodeGenFunction &CGF, type value) {
  2731. return { DominatingLLVMValue::save(CGF, value.getPointer()),
  2732. value.getAlignment() };
  2733. }
  2734. static type restore(CodeGenFunction &CGF, saved_type value) {
  2735. return Address(DominatingLLVMValue::restore(CGF, value.SavedValue),
  2736. value.Alignment);
  2737. }
  2738. };
  2739. /// A specialization of DominatingValue for RValue.
  2740. template <> struct DominatingValue<RValue> {
  2741. typedef RValue type;
  2742. class saved_type {
  2743. enum Kind { ScalarLiteral, ScalarAddress, AggregateLiteral,
  2744. AggregateAddress, ComplexAddress };
  2745. llvm::Value *Value;
  2746. unsigned K : 3;
  2747. unsigned Align : 29;
  2748. saved_type(llvm::Value *v, Kind k, unsigned a = 0)
  2749. : Value(v), K(k), Align(a) {}
  2750. public:
  2751. static bool needsSaving(RValue value);
  2752. static saved_type save(CodeGenFunction &CGF, RValue value);
  2753. RValue restore(CodeGenFunction &CGF);
  2754. // implementations in CGCleanup.cpp
  2755. };
  2756. static bool needsSaving(type value) {
  2757. return saved_type::needsSaving(value);
  2758. }
  2759. static saved_type save(CodeGenFunction &CGF, type value) {
  2760. return saved_type::save(CGF, value);
  2761. }
  2762. static type restore(CodeGenFunction &CGF, saved_type value) {
  2763. return value.restore(CGF);
  2764. }
  2765. };
  2766. } // end namespace CodeGen
  2767. } // end namespace clang
  2768. #endif