CodeGenFunction.h 143 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407
  1. //===-- CodeGenFunction.h - Per-Function state for LLVM CodeGen -*- C++ -*-===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This is the internal per-function state used for llvm translation.
  11. //
  12. //===----------------------------------------------------------------------===//
  13. #ifndef LLVM_CLANG_LIB_CODEGEN_CODEGENFUNCTION_H
  14. #define LLVM_CLANG_LIB_CODEGEN_CODEGENFUNCTION_H
  15. #include "CGBuilder.h"
  16. #include "CGDebugInfo.h"
  17. #include "CGLoopInfo.h"
  18. #include "CGValue.h"
  19. #include "CodeGenModule.h"
  20. #include "CodeGenPGO.h"
  21. #include "EHScopeStack.h"
  22. #include "clang/AST/CharUnits.h"
  23. #include "clang/AST/ExprCXX.h"
  24. #include "clang/AST/ExprObjC.h"
  25. #include "clang/AST/ExprOpenMP.h"
  26. #include "clang/AST/Type.h"
  27. #include "clang/Basic/ABI.h"
  28. #include "clang/Basic/CapturedStmt.h"
  29. #include "clang/Basic/OpenMPKinds.h"
  30. #include "clang/Basic/TargetInfo.h"
  31. #include "clang/Frontend/CodeGenOptions.h"
  32. #include "llvm/ADT/ArrayRef.h"
  33. #include "llvm/ADT/DenseMap.h"
  34. #include "llvm/ADT/SmallVector.h"
  35. #include "llvm/IR/ValueHandle.h"
  36. #include "llvm/Support/Debug.h"
  37. #include "llvm/Transforms/Utils/SanitizerStats.h"
  38. namespace llvm {
  39. class BasicBlock;
  40. class LLVMContext;
  41. class MDNode;
  42. class Module;
  43. class SwitchInst;
  44. class Twine;
  45. class Value;
  46. class CallSite;
  47. }
  48. namespace clang {
  49. class ASTContext;
  50. class BlockDecl;
  51. class CXXDestructorDecl;
  52. class CXXForRangeStmt;
  53. class CXXTryStmt;
  54. class Decl;
  55. class LabelDecl;
  56. class EnumConstantDecl;
  57. class FunctionDecl;
  58. class FunctionProtoType;
  59. class LabelStmt;
  60. class ObjCContainerDecl;
  61. class ObjCInterfaceDecl;
  62. class ObjCIvarDecl;
  63. class ObjCMethodDecl;
  64. class ObjCImplementationDecl;
  65. class ObjCPropertyImplDecl;
  66. class TargetInfo;
  67. class VarDecl;
  68. class ObjCForCollectionStmt;
  69. class ObjCAtTryStmt;
  70. class ObjCAtThrowStmt;
  71. class ObjCAtSynchronizedStmt;
  72. class ObjCAutoreleasePoolStmt;
  73. namespace CodeGen {
  74. class CodeGenTypes;
  75. class CGFunctionInfo;
  76. class CGRecordLayout;
  77. class CGBlockInfo;
  78. class CGCXXABI;
  79. class BlockByrefHelpers;
  80. class BlockByrefInfo;
  81. class BlockFlags;
  82. class BlockFieldFlags;
  83. class RegionCodeGenTy;
  84. class TargetCodeGenInfo;
  85. struct OMPTaskDataTy;
  86. /// The kind of evaluation to perform on values of a particular
  87. /// type. Basically, is the code in CGExprScalar, CGExprComplex, or
  88. /// CGExprAgg?
  89. ///
  90. /// TODO: should vectors maybe be split out into their own thing?
  91. enum TypeEvaluationKind {
  92. TEK_Scalar,
  93. TEK_Complex,
  94. TEK_Aggregate
  95. };
  96. /// CodeGenFunction - This class organizes the per-function state that is used
  97. /// while generating LLVM code.
  98. class CodeGenFunction : public CodeGenTypeCache {
  99. CodeGenFunction(const CodeGenFunction &) = delete;
  100. void operator=(const CodeGenFunction &) = delete;
  101. friend class CGCXXABI;
  102. public:
  103. /// A jump destination is an abstract label, branching to which may
  104. /// require a jump out through normal cleanups.
  105. struct JumpDest {
  106. JumpDest() : Block(nullptr), ScopeDepth(), Index(0) {}
  107. JumpDest(llvm::BasicBlock *Block,
  108. EHScopeStack::stable_iterator Depth,
  109. unsigned Index)
  110. : Block(Block), ScopeDepth(Depth), Index(Index) {}
  111. bool isValid() const { return Block != nullptr; }
  112. llvm::BasicBlock *getBlock() const { return Block; }
  113. EHScopeStack::stable_iterator getScopeDepth() const { return ScopeDepth; }
  114. unsigned getDestIndex() const { return Index; }
  115. // This should be used cautiously.
  116. void setScopeDepth(EHScopeStack::stable_iterator depth) {
  117. ScopeDepth = depth;
  118. }
  119. private:
  120. llvm::BasicBlock *Block;
  121. EHScopeStack::stable_iterator ScopeDepth;
  122. unsigned Index;
  123. };
  124. CodeGenModule &CGM; // Per-module state.
  125. const TargetInfo &Target;
  126. typedef std::pair<llvm::Value *, llvm::Value *> ComplexPairTy;
  127. LoopInfoStack LoopStack;
  128. CGBuilderTy Builder;
  129. /// \brief CGBuilder insert helper. This function is called after an
  130. /// instruction is created using Builder.
  131. void InsertHelper(llvm::Instruction *I, const llvm::Twine &Name,
  132. llvm::BasicBlock *BB,
  133. llvm::BasicBlock::iterator InsertPt) const;
  134. /// CurFuncDecl - Holds the Decl for the current outermost
  135. /// non-closure context.
  136. const Decl *CurFuncDecl;
  137. /// CurCodeDecl - This is the inner-most code context, which includes blocks.
  138. const Decl *CurCodeDecl;
  139. const CGFunctionInfo *CurFnInfo;
  140. QualType FnRetTy;
  141. llvm::Function *CurFn;
  142. /// CurGD - The GlobalDecl for the current function being compiled.
  143. GlobalDecl CurGD;
  144. /// PrologueCleanupDepth - The cleanup depth enclosing all the
  145. /// cleanups associated with the parameters.
  146. EHScopeStack::stable_iterator PrologueCleanupDepth;
  147. /// ReturnBlock - Unified return block.
  148. JumpDest ReturnBlock;
  149. /// ReturnValue - The temporary alloca to hold the return
  150. /// value. This is invalid iff the function has no return value.
  151. Address ReturnValue;
  152. /// AllocaInsertPoint - This is an instruction in the entry block before which
  153. /// we prefer to insert allocas.
  154. llvm::AssertingVH<llvm::Instruction> AllocaInsertPt;
  155. /// \brief API for captured statement code generation.
  156. class CGCapturedStmtInfo {
  157. public:
  158. explicit CGCapturedStmtInfo(CapturedRegionKind K = CR_Default)
  159. : Kind(K), ThisValue(nullptr), CXXThisFieldDecl(nullptr) {}
  160. explicit CGCapturedStmtInfo(const CapturedStmt &S,
  161. CapturedRegionKind K = CR_Default)
  162. : Kind(K), ThisValue(nullptr), CXXThisFieldDecl(nullptr) {
  163. RecordDecl::field_iterator Field =
  164. S.getCapturedRecordDecl()->field_begin();
  165. for (CapturedStmt::const_capture_iterator I = S.capture_begin(),
  166. E = S.capture_end();
  167. I != E; ++I, ++Field) {
  168. if (I->capturesThis())
  169. CXXThisFieldDecl = *Field;
  170. else if (I->capturesVariable())
  171. CaptureFields[I->getCapturedVar()] = *Field;
  172. else if (I->capturesVariableByCopy())
  173. CaptureFields[I->getCapturedVar()] = *Field;
  174. }
  175. }
  176. virtual ~CGCapturedStmtInfo();
  177. CapturedRegionKind getKind() const { return Kind; }
  178. virtual void setContextValue(llvm::Value *V) { ThisValue = V; }
  179. // \brief Retrieve the value of the context parameter.
  180. virtual llvm::Value *getContextValue() const { return ThisValue; }
  181. /// \brief Lookup the captured field decl for a variable.
  182. virtual const FieldDecl *lookup(const VarDecl *VD) const {
  183. return CaptureFields.lookup(VD);
  184. }
  185. bool isCXXThisExprCaptured() const { return getThisFieldDecl() != nullptr; }
  186. virtual FieldDecl *getThisFieldDecl() const { return CXXThisFieldDecl; }
  187. static bool classof(const CGCapturedStmtInfo *) {
  188. return true;
  189. }
  190. /// \brief Emit the captured statement body.
  191. virtual void EmitBody(CodeGenFunction &CGF, const Stmt *S) {
  192. CGF.incrementProfileCounter(S);
  193. CGF.EmitStmt(S);
  194. }
  195. /// \brief Get the name of the capture helper.
  196. virtual StringRef getHelperName() const { return "__captured_stmt"; }
  197. private:
  198. /// \brief The kind of captured statement being generated.
  199. CapturedRegionKind Kind;
  200. /// \brief Keep the map between VarDecl and FieldDecl.
  201. llvm::SmallDenseMap<const VarDecl *, FieldDecl *> CaptureFields;
  202. /// \brief The base address of the captured record, passed in as the first
  203. /// argument of the parallel region function.
  204. llvm::Value *ThisValue;
  205. /// \brief Captured 'this' type.
  206. FieldDecl *CXXThisFieldDecl;
  207. };
  208. CGCapturedStmtInfo *CapturedStmtInfo;
  209. /// \brief RAII for correct setting/restoring of CapturedStmtInfo.
  210. class CGCapturedStmtRAII {
  211. private:
  212. CodeGenFunction &CGF;
  213. CGCapturedStmtInfo *PrevCapturedStmtInfo;
  214. public:
  215. CGCapturedStmtRAII(CodeGenFunction &CGF,
  216. CGCapturedStmtInfo *NewCapturedStmtInfo)
  217. : CGF(CGF), PrevCapturedStmtInfo(CGF.CapturedStmtInfo) {
  218. CGF.CapturedStmtInfo = NewCapturedStmtInfo;
  219. }
  220. ~CGCapturedStmtRAII() { CGF.CapturedStmtInfo = PrevCapturedStmtInfo; }
  221. };
  222. /// \brief Sanitizers enabled for this function.
  223. SanitizerSet SanOpts;
  224. /// \brief True if CodeGen currently emits code implementing sanitizer checks.
  225. bool IsSanitizerScope;
  226. /// \brief RAII object to set/unset CodeGenFunction::IsSanitizerScope.
  227. class SanitizerScope {
  228. CodeGenFunction *CGF;
  229. public:
  230. SanitizerScope(CodeGenFunction *CGF);
  231. ~SanitizerScope();
  232. };
  233. /// In C++, whether we are code generating a thunk. This controls whether we
  234. /// should emit cleanups.
  235. bool CurFuncIsThunk;
  236. /// In ARC, whether we should autorelease the return value.
  237. bool AutoreleaseResult;
  238. /// Whether we processed a Microsoft-style asm block during CodeGen. These can
  239. /// potentially set the return value.
  240. bool SawAsmBlock;
  241. const FunctionDecl *CurSEHParent = nullptr;
  242. /// True if the current function is an outlined SEH helper. This can be a
  243. /// finally block or filter expression.
  244. bool IsOutlinedSEHHelper;
  245. const CodeGen::CGBlockInfo *BlockInfo;
  246. llvm::Value *BlockPointer;
  247. llvm::DenseMap<const VarDecl *, FieldDecl *> LambdaCaptureFields;
  248. FieldDecl *LambdaThisCaptureField;
  249. /// \brief A mapping from NRVO variables to the flags used to indicate
  250. /// when the NRVO has been applied to this variable.
  251. llvm::DenseMap<const VarDecl *, llvm::Value *> NRVOFlags;
  252. EHScopeStack EHStack;
  253. llvm::SmallVector<char, 256> LifetimeExtendedCleanupStack;
  254. llvm::SmallVector<const JumpDest *, 2> SEHTryEpilogueStack;
  255. llvm::Instruction *CurrentFuncletPad = nullptr;
  256. /// Header for data within LifetimeExtendedCleanupStack.
  257. struct LifetimeExtendedCleanupHeader {
  258. /// The size of the following cleanup object.
  259. unsigned Size;
  260. /// The kind of cleanup to push: a value from the CleanupKind enumeration.
  261. CleanupKind Kind;
  262. size_t getSize() const { return Size; }
  263. CleanupKind getKind() const { return Kind; }
  264. };
  265. /// i32s containing the indexes of the cleanup destinations.
  266. llvm::AllocaInst *NormalCleanupDest;
  267. unsigned NextCleanupDestIndex;
  268. /// FirstBlockInfo - The head of a singly-linked-list of block layouts.
  269. CGBlockInfo *FirstBlockInfo;
  270. /// EHResumeBlock - Unified block containing a call to llvm.eh.resume.
  271. llvm::BasicBlock *EHResumeBlock;
  272. /// The exception slot. All landing pads write the current exception pointer
  273. /// into this alloca.
  274. llvm::Value *ExceptionSlot;
  275. /// The selector slot. Under the MandatoryCleanup model, all landing pads
  276. /// write the current selector value into this alloca.
  277. llvm::AllocaInst *EHSelectorSlot;
  278. /// A stack of exception code slots. Entering an __except block pushes a slot
  279. /// on the stack and leaving pops one. The __exception_code() intrinsic loads
  280. /// a value from the top of the stack.
  281. SmallVector<Address, 1> SEHCodeSlotStack;
  282. /// Value returned by __exception_info intrinsic.
  283. llvm::Value *SEHInfo = nullptr;
  284. /// Emits a landing pad for the current EH stack.
  285. llvm::BasicBlock *EmitLandingPad();
  286. llvm::BasicBlock *getInvokeDestImpl();
  287. template <class T>
  288. typename DominatingValue<T>::saved_type saveValueInCond(T value) {
  289. return DominatingValue<T>::save(*this, value);
  290. }
  291. public:
  292. /// ObjCEHValueStack - Stack of Objective-C exception values, used for
  293. /// rethrows.
  294. SmallVector<llvm::Value*, 8> ObjCEHValueStack;
  295. /// A class controlling the emission of a finally block.
  296. class FinallyInfo {
  297. /// Where the catchall's edge through the cleanup should go.
  298. JumpDest RethrowDest;
  299. /// A function to call to enter the catch.
  300. llvm::Constant *BeginCatchFn;
  301. /// An i1 variable indicating whether or not the @finally is
  302. /// running for an exception.
  303. llvm::AllocaInst *ForEHVar;
  304. /// An i8* variable into which the exception pointer to rethrow
  305. /// has been saved.
  306. llvm::AllocaInst *SavedExnVar;
  307. public:
  308. void enter(CodeGenFunction &CGF, const Stmt *Finally,
  309. llvm::Constant *beginCatchFn, llvm::Constant *endCatchFn,
  310. llvm::Constant *rethrowFn);
  311. void exit(CodeGenFunction &CGF);
  312. };
  313. /// Returns true inside SEH __try blocks.
  314. bool isSEHTryScope() const { return !SEHTryEpilogueStack.empty(); }
  315. /// Returns true while emitting a cleanuppad.
  316. bool isCleanupPadScope() const {
  317. return CurrentFuncletPad && isa<llvm::CleanupPadInst>(CurrentFuncletPad);
  318. }
  319. /// pushFullExprCleanup - Push a cleanup to be run at the end of the
  320. /// current full-expression. Safe against the possibility that
  321. /// we're currently inside a conditionally-evaluated expression.
  322. template <class T, class... As>
  323. void pushFullExprCleanup(CleanupKind kind, As... A) {
  324. // If we're not in a conditional branch, or if none of the
  325. // arguments requires saving, then use the unconditional cleanup.
  326. if (!isInConditionalBranch())
  327. return EHStack.pushCleanup<T>(kind, A...);
  328. // Stash values in a tuple so we can guarantee the order of saves.
  329. typedef std::tuple<typename DominatingValue<As>::saved_type...> SavedTuple;
  330. SavedTuple Saved{saveValueInCond(A)...};
  331. typedef EHScopeStack::ConditionalCleanup<T, As...> CleanupType;
  332. EHStack.pushCleanupTuple<CleanupType>(kind, Saved);
  333. initFullExprCleanup();
  334. }
  335. /// \brief Queue a cleanup to be pushed after finishing the current
  336. /// full-expression.
  337. template <class T, class... As>
  338. void pushCleanupAfterFullExpr(CleanupKind Kind, As... A) {
  339. assert(!isInConditionalBranch() && "can't defer conditional cleanup");
  340. LifetimeExtendedCleanupHeader Header = { sizeof(T), Kind };
  341. size_t OldSize = LifetimeExtendedCleanupStack.size();
  342. LifetimeExtendedCleanupStack.resize(
  343. LifetimeExtendedCleanupStack.size() + sizeof(Header) + Header.Size);
  344. static_assert(sizeof(Header) % llvm::AlignOf<T>::Alignment == 0,
  345. "Cleanup will be allocated on misaligned address");
  346. char *Buffer = &LifetimeExtendedCleanupStack[OldSize];
  347. new (Buffer) LifetimeExtendedCleanupHeader(Header);
  348. new (Buffer + sizeof(Header)) T(A...);
  349. }
  350. /// Set up the last cleaup that was pushed as a conditional
  351. /// full-expression cleanup.
  352. void initFullExprCleanup();
  353. /// PushDestructorCleanup - Push a cleanup to call the
  354. /// complete-object destructor of an object of the given type at the
  355. /// given address. Does nothing if T is not a C++ class type with a
  356. /// non-trivial destructor.
  357. void PushDestructorCleanup(QualType T, Address Addr);
  358. /// PushDestructorCleanup - Push a cleanup to call the
  359. /// complete-object variant of the given destructor on the object at
  360. /// the given address.
  361. void PushDestructorCleanup(const CXXDestructorDecl *Dtor, Address Addr);
  362. /// PopCleanupBlock - Will pop the cleanup entry on the stack and
  363. /// process all branch fixups.
  364. void PopCleanupBlock(bool FallThroughIsBranchThrough = false);
  365. /// DeactivateCleanupBlock - Deactivates the given cleanup block.
  366. /// The block cannot be reactivated. Pops it if it's the top of the
  367. /// stack.
  368. ///
  369. /// \param DominatingIP - An instruction which is known to
  370. /// dominate the current IP (if set) and which lies along
  371. /// all paths of execution between the current IP and the
  372. /// the point at which the cleanup comes into scope.
  373. void DeactivateCleanupBlock(EHScopeStack::stable_iterator Cleanup,
  374. llvm::Instruction *DominatingIP);
  375. /// ActivateCleanupBlock - Activates an initially-inactive cleanup.
  376. /// Cannot be used to resurrect a deactivated cleanup.
  377. ///
  378. /// \param DominatingIP - An instruction which is known to
  379. /// dominate the current IP (if set) and which lies along
  380. /// all paths of execution between the current IP and the
  381. /// the point at which the cleanup comes into scope.
  382. void ActivateCleanupBlock(EHScopeStack::stable_iterator Cleanup,
  383. llvm::Instruction *DominatingIP);
  384. /// \brief Enters a new scope for capturing cleanups, all of which
  385. /// will be executed once the scope is exited.
  386. class RunCleanupsScope {
  387. EHScopeStack::stable_iterator CleanupStackDepth;
  388. size_t LifetimeExtendedCleanupStackSize;
  389. bool OldDidCallStackSave;
  390. protected:
  391. bool PerformCleanup;
  392. private:
  393. RunCleanupsScope(const RunCleanupsScope &) = delete;
  394. void operator=(const RunCleanupsScope &) = delete;
  395. protected:
  396. CodeGenFunction& CGF;
  397. public:
  398. /// \brief Enter a new cleanup scope.
  399. explicit RunCleanupsScope(CodeGenFunction &CGF)
  400. : PerformCleanup(true), CGF(CGF)
  401. {
  402. CleanupStackDepth = CGF.EHStack.stable_begin();
  403. LifetimeExtendedCleanupStackSize =
  404. CGF.LifetimeExtendedCleanupStack.size();
  405. OldDidCallStackSave = CGF.DidCallStackSave;
  406. CGF.DidCallStackSave = false;
  407. }
  408. /// \brief Exit this cleanup scope, emitting any accumulated
  409. /// cleanups.
  410. ~RunCleanupsScope() {
  411. if (PerformCleanup) {
  412. CGF.DidCallStackSave = OldDidCallStackSave;
  413. CGF.PopCleanupBlocks(CleanupStackDepth,
  414. LifetimeExtendedCleanupStackSize);
  415. }
  416. }
  417. /// \brief Determine whether this scope requires any cleanups.
  418. bool requiresCleanups() const {
  419. return CGF.EHStack.stable_begin() != CleanupStackDepth;
  420. }
  421. /// \brief Force the emission of cleanups now, instead of waiting
  422. /// until this object is destroyed.
  423. void ForceCleanup() {
  424. assert(PerformCleanup && "Already forced cleanup");
  425. CGF.DidCallStackSave = OldDidCallStackSave;
  426. CGF.PopCleanupBlocks(CleanupStackDepth,
  427. LifetimeExtendedCleanupStackSize);
  428. PerformCleanup = false;
  429. }
  430. };
  431. class LexicalScope : public RunCleanupsScope {
  432. SourceRange Range;
  433. SmallVector<const LabelDecl*, 4> Labels;
  434. LexicalScope *ParentScope;
  435. LexicalScope(const LexicalScope &) = delete;
  436. void operator=(const LexicalScope &) = delete;
  437. public:
  438. /// \brief Enter a new cleanup scope.
  439. explicit LexicalScope(CodeGenFunction &CGF, SourceRange Range)
  440. : RunCleanupsScope(CGF), Range(Range), ParentScope(CGF.CurLexicalScope) {
  441. CGF.CurLexicalScope = this;
  442. if (CGDebugInfo *DI = CGF.getDebugInfo())
  443. DI->EmitLexicalBlockStart(CGF.Builder, Range.getBegin());
  444. }
  445. void addLabel(const LabelDecl *label) {
  446. assert(PerformCleanup && "adding label to dead scope?");
  447. Labels.push_back(label);
  448. }
  449. /// \brief Exit this cleanup scope, emitting any accumulated
  450. /// cleanups.
  451. ~LexicalScope() {
  452. if (CGDebugInfo *DI = CGF.getDebugInfo())
  453. DI->EmitLexicalBlockEnd(CGF.Builder, Range.getEnd());
  454. // If we should perform a cleanup, force them now. Note that
  455. // this ends the cleanup scope before rescoping any labels.
  456. if (PerformCleanup) {
  457. ApplyDebugLocation DL(CGF, Range.getEnd());
  458. ForceCleanup();
  459. }
  460. }
  461. /// \brief Force the emission of cleanups now, instead of waiting
  462. /// until this object is destroyed.
  463. void ForceCleanup() {
  464. CGF.CurLexicalScope = ParentScope;
  465. RunCleanupsScope::ForceCleanup();
  466. if (!Labels.empty())
  467. rescopeLabels();
  468. }
  469. void rescopeLabels();
  470. };
  471. typedef llvm::DenseMap<const Decl *, Address> DeclMapTy;
  472. /// \brief The scope used to remap some variables as private in the OpenMP
  473. /// loop body (or other captured region emitted without outlining), and to
  474. /// restore old vars back on exit.
  475. class OMPPrivateScope : public RunCleanupsScope {
  476. DeclMapTy SavedLocals;
  477. DeclMapTy SavedPrivates;
  478. private:
  479. OMPPrivateScope(const OMPPrivateScope &) = delete;
  480. void operator=(const OMPPrivateScope &) = delete;
  481. public:
  482. /// \brief Enter a new OpenMP private scope.
  483. explicit OMPPrivateScope(CodeGenFunction &CGF) : RunCleanupsScope(CGF) {}
  484. /// \brief Registers \a LocalVD variable as a private and apply \a
  485. /// PrivateGen function for it to generate corresponding private variable.
  486. /// \a PrivateGen returns an address of the generated private variable.
  487. /// \return true if the variable is registered as private, false if it has
  488. /// been privatized already.
  489. bool
  490. addPrivate(const VarDecl *LocalVD,
  491. llvm::function_ref<Address()> PrivateGen) {
  492. assert(PerformCleanup && "adding private to dead scope");
  493. // Only save it once.
  494. if (SavedLocals.count(LocalVD)) return false;
  495. // Copy the existing local entry to SavedLocals.
  496. auto it = CGF.LocalDeclMap.find(LocalVD);
  497. if (it != CGF.LocalDeclMap.end()) {
  498. SavedLocals.insert({LocalVD, it->second});
  499. } else {
  500. SavedLocals.insert({LocalVD, Address::invalid()});
  501. }
  502. // Generate the private entry.
  503. Address Addr = PrivateGen();
  504. QualType VarTy = LocalVD->getType();
  505. if (VarTy->isReferenceType()) {
  506. Address Temp = CGF.CreateMemTemp(VarTy);
  507. CGF.Builder.CreateStore(Addr.getPointer(), Temp);
  508. Addr = Temp;
  509. }
  510. SavedPrivates.insert({LocalVD, Addr});
  511. return true;
  512. }
  513. /// \brief Privatizes local variables previously registered as private.
  514. /// Registration is separate from the actual privatization to allow
  515. /// initializers use values of the original variables, not the private one.
  516. /// This is important, for example, if the private variable is a class
  517. /// variable initialized by a constructor that references other private
  518. /// variables. But at initialization original variables must be used, not
  519. /// private copies.
  520. /// \return true if at least one variable was privatized, false otherwise.
  521. bool Privatize() {
  522. copyInto(SavedPrivates, CGF.LocalDeclMap);
  523. SavedPrivates.clear();
  524. return !SavedLocals.empty();
  525. }
  526. void ForceCleanup() {
  527. RunCleanupsScope::ForceCleanup();
  528. copyInto(SavedLocals, CGF.LocalDeclMap);
  529. SavedLocals.clear();
  530. }
  531. /// \brief Exit scope - all the mapped variables are restored.
  532. ~OMPPrivateScope() {
  533. if (PerformCleanup)
  534. ForceCleanup();
  535. }
  536. /// Checks if the global variable is captured in current function.
  537. bool isGlobalVarCaptured(const VarDecl *VD) const {
  538. return !VD->isLocalVarDeclOrParm() && CGF.LocalDeclMap.count(VD) > 0;
  539. }
  540. private:
  541. /// Copy all the entries in the source map over the corresponding
  542. /// entries in the destination, which must exist.
  543. static void copyInto(const DeclMapTy &src, DeclMapTy &dest) {
  544. for (auto &pair : src) {
  545. if (!pair.second.isValid()) {
  546. dest.erase(pair.first);
  547. continue;
  548. }
  549. auto it = dest.find(pair.first);
  550. if (it != dest.end()) {
  551. it->second = pair.second;
  552. } else {
  553. dest.insert(pair);
  554. }
  555. }
  556. }
  557. };
  558. /// \brief Takes the old cleanup stack size and emits the cleanup blocks
  559. /// that have been added.
  560. void PopCleanupBlocks(EHScopeStack::stable_iterator OldCleanupStackSize);
  561. /// \brief Takes the old cleanup stack size and emits the cleanup blocks
  562. /// that have been added, then adds all lifetime-extended cleanups from
  563. /// the given position to the stack.
  564. void PopCleanupBlocks(EHScopeStack::stable_iterator OldCleanupStackSize,
  565. size_t OldLifetimeExtendedStackSize);
  566. void ResolveBranchFixups(llvm::BasicBlock *Target);
  567. /// The given basic block lies in the current EH scope, but may be a
  568. /// target of a potentially scope-crossing jump; get a stable handle
  569. /// to which we can perform this jump later.
  570. JumpDest getJumpDestInCurrentScope(llvm::BasicBlock *Target) {
  571. return JumpDest(Target,
  572. EHStack.getInnermostNormalCleanup(),
  573. NextCleanupDestIndex++);
  574. }
  575. /// The given basic block lies in the current EH scope, but may be a
  576. /// target of a potentially scope-crossing jump; get a stable handle
  577. /// to which we can perform this jump later.
  578. JumpDest getJumpDestInCurrentScope(StringRef Name = StringRef()) {
  579. return getJumpDestInCurrentScope(createBasicBlock(Name));
  580. }
  581. /// EmitBranchThroughCleanup - Emit a branch from the current insert
  582. /// block through the normal cleanup handling code (if any) and then
  583. /// on to \arg Dest.
  584. void EmitBranchThroughCleanup(JumpDest Dest);
  585. /// isObviouslyBranchWithoutCleanups - Return true if a branch to the
  586. /// specified destination obviously has no cleanups to run. 'false' is always
  587. /// a conservatively correct answer for this method.
  588. bool isObviouslyBranchWithoutCleanups(JumpDest Dest) const;
  589. /// popCatchScope - Pops the catch scope at the top of the EHScope
  590. /// stack, emitting any required code (other than the catch handlers
  591. /// themselves).
  592. void popCatchScope();
  593. llvm::BasicBlock *getEHResumeBlock(bool isCleanup);
  594. llvm::BasicBlock *getEHDispatchBlock(EHScopeStack::stable_iterator scope);
  595. llvm::BasicBlock *getMSVCDispatchBlock(EHScopeStack::stable_iterator scope);
  596. /// An object to manage conditionally-evaluated expressions.
  597. class ConditionalEvaluation {
  598. llvm::BasicBlock *StartBB;
  599. public:
  600. ConditionalEvaluation(CodeGenFunction &CGF)
  601. : StartBB(CGF.Builder.GetInsertBlock()) {}
  602. void begin(CodeGenFunction &CGF) {
  603. assert(CGF.OutermostConditional != this);
  604. if (!CGF.OutermostConditional)
  605. CGF.OutermostConditional = this;
  606. }
  607. void end(CodeGenFunction &CGF) {
  608. assert(CGF.OutermostConditional != nullptr);
  609. if (CGF.OutermostConditional == this)
  610. CGF.OutermostConditional = nullptr;
  611. }
  612. /// Returns a block which will be executed prior to each
  613. /// evaluation of the conditional code.
  614. llvm::BasicBlock *getStartingBlock() const {
  615. return StartBB;
  616. }
  617. };
  618. /// isInConditionalBranch - Return true if we're currently emitting
  619. /// one branch or the other of a conditional expression.
  620. bool isInConditionalBranch() const { return OutermostConditional != nullptr; }
  621. void setBeforeOutermostConditional(llvm::Value *value, Address addr) {
  622. assert(isInConditionalBranch());
  623. llvm::BasicBlock *block = OutermostConditional->getStartingBlock();
  624. auto store = new llvm::StoreInst(value, addr.getPointer(), &block->back());
  625. store->setAlignment(addr.getAlignment().getQuantity());
  626. }
  627. /// An RAII object to record that we're evaluating a statement
  628. /// expression.
  629. class StmtExprEvaluation {
  630. CodeGenFunction &CGF;
  631. /// We have to save the outermost conditional: cleanups in a
  632. /// statement expression aren't conditional just because the
  633. /// StmtExpr is.
  634. ConditionalEvaluation *SavedOutermostConditional;
  635. public:
  636. StmtExprEvaluation(CodeGenFunction &CGF)
  637. : CGF(CGF), SavedOutermostConditional(CGF.OutermostConditional) {
  638. CGF.OutermostConditional = nullptr;
  639. }
  640. ~StmtExprEvaluation() {
  641. CGF.OutermostConditional = SavedOutermostConditional;
  642. CGF.EnsureInsertPoint();
  643. }
  644. };
  645. /// An object which temporarily prevents a value from being
  646. /// destroyed by aggressive peephole optimizations that assume that
  647. /// all uses of a value have been realized in the IR.
  648. class PeepholeProtection {
  649. llvm::Instruction *Inst;
  650. friend class CodeGenFunction;
  651. public:
  652. PeepholeProtection() : Inst(nullptr) {}
  653. };
  654. /// A non-RAII class containing all the information about a bound
  655. /// opaque value. OpaqueValueMapping, below, is a RAII wrapper for
  656. /// this which makes individual mappings very simple; using this
  657. /// class directly is useful when you have a variable number of
  658. /// opaque values or don't want the RAII functionality for some
  659. /// reason.
  660. class OpaqueValueMappingData {
  661. const OpaqueValueExpr *OpaqueValue;
  662. bool BoundLValue;
  663. CodeGenFunction::PeepholeProtection Protection;
  664. OpaqueValueMappingData(const OpaqueValueExpr *ov,
  665. bool boundLValue)
  666. : OpaqueValue(ov), BoundLValue(boundLValue) {}
  667. public:
  668. OpaqueValueMappingData() : OpaqueValue(nullptr) {}
  669. static bool shouldBindAsLValue(const Expr *expr) {
  670. // gl-values should be bound as l-values for obvious reasons.
  671. // Records should be bound as l-values because IR generation
  672. // always keeps them in memory. Expressions of function type
  673. // act exactly like l-values but are formally required to be
  674. // r-values in C.
  675. return expr->isGLValue() ||
  676. expr->getType()->isFunctionType() ||
  677. hasAggregateEvaluationKind(expr->getType());
  678. }
  679. static OpaqueValueMappingData bind(CodeGenFunction &CGF,
  680. const OpaqueValueExpr *ov,
  681. const Expr *e) {
  682. if (shouldBindAsLValue(ov))
  683. return bind(CGF, ov, CGF.EmitLValue(e));
  684. return bind(CGF, ov, CGF.EmitAnyExpr(e));
  685. }
  686. static OpaqueValueMappingData bind(CodeGenFunction &CGF,
  687. const OpaqueValueExpr *ov,
  688. const LValue &lv) {
  689. assert(shouldBindAsLValue(ov));
  690. CGF.OpaqueLValues.insert(std::make_pair(ov, lv));
  691. return OpaqueValueMappingData(ov, true);
  692. }
  693. static OpaqueValueMappingData bind(CodeGenFunction &CGF,
  694. const OpaqueValueExpr *ov,
  695. const RValue &rv) {
  696. assert(!shouldBindAsLValue(ov));
  697. CGF.OpaqueRValues.insert(std::make_pair(ov, rv));
  698. OpaqueValueMappingData data(ov, false);
  699. // Work around an extremely aggressive peephole optimization in
  700. // EmitScalarConversion which assumes that all other uses of a
  701. // value are extant.
  702. data.Protection = CGF.protectFromPeepholes(rv);
  703. return data;
  704. }
  705. bool isValid() const { return OpaqueValue != nullptr; }
  706. void clear() { OpaqueValue = nullptr; }
  707. void unbind(CodeGenFunction &CGF) {
  708. assert(OpaqueValue && "no data to unbind!");
  709. if (BoundLValue) {
  710. CGF.OpaqueLValues.erase(OpaqueValue);
  711. } else {
  712. CGF.OpaqueRValues.erase(OpaqueValue);
  713. CGF.unprotectFromPeepholes(Protection);
  714. }
  715. }
  716. };
  717. /// An RAII object to set (and then clear) a mapping for an OpaqueValueExpr.
  718. class OpaqueValueMapping {
  719. CodeGenFunction &CGF;
  720. OpaqueValueMappingData Data;
  721. public:
  722. static bool shouldBindAsLValue(const Expr *expr) {
  723. return OpaqueValueMappingData::shouldBindAsLValue(expr);
  724. }
  725. /// Build the opaque value mapping for the given conditional
  726. /// operator if it's the GNU ?: extension. This is a common
  727. /// enough pattern that the convenience operator is really
  728. /// helpful.
  729. ///
  730. OpaqueValueMapping(CodeGenFunction &CGF,
  731. const AbstractConditionalOperator *op) : CGF(CGF) {
  732. if (isa<ConditionalOperator>(op))
  733. // Leave Data empty.
  734. return;
  735. const BinaryConditionalOperator *e = cast<BinaryConditionalOperator>(op);
  736. Data = OpaqueValueMappingData::bind(CGF, e->getOpaqueValue(),
  737. e->getCommon());
  738. }
  739. OpaqueValueMapping(CodeGenFunction &CGF,
  740. const OpaqueValueExpr *opaqueValue,
  741. LValue lvalue)
  742. : CGF(CGF), Data(OpaqueValueMappingData::bind(CGF, opaqueValue, lvalue)) {
  743. }
  744. OpaqueValueMapping(CodeGenFunction &CGF,
  745. const OpaqueValueExpr *opaqueValue,
  746. RValue rvalue)
  747. : CGF(CGF), Data(OpaqueValueMappingData::bind(CGF, opaqueValue, rvalue)) {
  748. }
  749. void pop() {
  750. Data.unbind(CGF);
  751. Data.clear();
  752. }
  753. ~OpaqueValueMapping() {
  754. if (Data.isValid()) Data.unbind(CGF);
  755. }
  756. };
  757. private:
  758. CGDebugInfo *DebugInfo;
  759. bool DisableDebugInfo;
  760. /// DidCallStackSave - Whether llvm.stacksave has been called. Used to avoid
  761. /// calling llvm.stacksave for multiple VLAs in the same scope.
  762. bool DidCallStackSave;
  763. /// IndirectBranch - The first time an indirect goto is seen we create a block
  764. /// with an indirect branch. Every time we see the address of a label taken,
  765. /// we add the label to the indirect goto. Every subsequent indirect goto is
  766. /// codegen'd as a jump to the IndirectBranch's basic block.
  767. llvm::IndirectBrInst *IndirectBranch;
  768. /// LocalDeclMap - This keeps track of the LLVM allocas or globals for local C
  769. /// decls.
  770. DeclMapTy LocalDeclMap;
  771. /// SizeArguments - If a ParmVarDecl had the pass_object_size attribute, this
  772. /// will contain a mapping from said ParmVarDecl to its implicit "object_size"
  773. /// parameter.
  774. llvm::SmallDenseMap<const ParmVarDecl *, const ImplicitParamDecl *, 2>
  775. SizeArguments;
  776. /// Track escaped local variables with auto storage. Used during SEH
  777. /// outlining to produce a call to llvm.localescape.
  778. llvm::DenseMap<llvm::AllocaInst *, int> EscapedLocals;
  779. /// LabelMap - This keeps track of the LLVM basic block for each C label.
  780. llvm::DenseMap<const LabelDecl*, JumpDest> LabelMap;
  781. // BreakContinueStack - This keeps track of where break and continue
  782. // statements should jump to.
  783. struct BreakContinue {
  784. BreakContinue(JumpDest Break, JumpDest Continue)
  785. : BreakBlock(Break), ContinueBlock(Continue) {}
  786. JumpDest BreakBlock;
  787. JumpDest ContinueBlock;
  788. };
  789. SmallVector<BreakContinue, 8> BreakContinueStack;
  790. CodeGenPGO PGO;
  791. /// Calculate branch weights appropriate for PGO data
  792. llvm::MDNode *createProfileWeights(uint64_t TrueCount, uint64_t FalseCount);
  793. llvm::MDNode *createProfileWeights(ArrayRef<uint64_t> Weights);
  794. llvm::MDNode *createProfileWeightsForLoop(const Stmt *Cond,
  795. uint64_t LoopCount);
  796. public:
  797. /// Increment the profiler's counter for the given statement.
  798. void incrementProfileCounter(const Stmt *S) {
  799. if (CGM.getCodeGenOpts().hasProfileClangInstr())
  800. PGO.emitCounterIncrement(Builder, S);
  801. PGO.setCurrentStmt(S);
  802. }
  803. /// Get the profiler's count for the given statement.
  804. uint64_t getProfileCount(const Stmt *S) {
  805. Optional<uint64_t> Count = PGO.getStmtCount(S);
  806. if (!Count.hasValue())
  807. return 0;
  808. return *Count;
  809. }
  810. /// Set the profiler's current count.
  811. void setCurrentProfileCount(uint64_t Count) {
  812. PGO.setCurrentRegionCount(Count);
  813. }
  814. /// Get the profiler's current count. This is generally the count for the most
  815. /// recently incremented counter.
  816. uint64_t getCurrentProfileCount() {
  817. return PGO.getCurrentRegionCount();
  818. }
  819. private:
  820. /// SwitchInsn - This is nearest current switch instruction. It is null if
  821. /// current context is not in a switch.
  822. llvm::SwitchInst *SwitchInsn;
  823. /// The branch weights of SwitchInsn when doing instrumentation based PGO.
  824. SmallVector<uint64_t, 16> *SwitchWeights;
  825. /// CaseRangeBlock - This block holds if condition check for last case
  826. /// statement range in current switch instruction.
  827. llvm::BasicBlock *CaseRangeBlock;
  828. /// OpaqueLValues - Keeps track of the current set of opaque value
  829. /// expressions.
  830. llvm::DenseMap<const OpaqueValueExpr *, LValue> OpaqueLValues;
  831. llvm::DenseMap<const OpaqueValueExpr *, RValue> OpaqueRValues;
  832. // VLASizeMap - This keeps track of the associated size for each VLA type.
  833. // We track this by the size expression rather than the type itself because
  834. // in certain situations, like a const qualifier applied to an VLA typedef,
  835. // multiple VLA types can share the same size expression.
  836. // FIXME: Maybe this could be a stack of maps that is pushed/popped as we
  837. // enter/leave scopes.
  838. llvm::DenseMap<const Expr*, llvm::Value*> VLASizeMap;
  839. /// A block containing a single 'unreachable' instruction. Created
  840. /// lazily by getUnreachableBlock().
  841. llvm::BasicBlock *UnreachableBlock;
  842. /// Counts of the number return expressions in the function.
  843. unsigned NumReturnExprs;
  844. /// Count the number of simple (constant) return expressions in the function.
  845. unsigned NumSimpleReturnExprs;
  846. /// The last regular (non-return) debug location (breakpoint) in the function.
  847. SourceLocation LastStopPoint;
  848. public:
  849. /// A scope within which we are constructing the fields of an object which
  850. /// might use a CXXDefaultInitExpr. This stashes away a 'this' value to use
  851. /// if we need to evaluate a CXXDefaultInitExpr within the evaluation.
  852. class FieldConstructionScope {
  853. public:
  854. FieldConstructionScope(CodeGenFunction &CGF, Address This)
  855. : CGF(CGF), OldCXXDefaultInitExprThis(CGF.CXXDefaultInitExprThis) {
  856. CGF.CXXDefaultInitExprThis = This;
  857. }
  858. ~FieldConstructionScope() {
  859. CGF.CXXDefaultInitExprThis = OldCXXDefaultInitExprThis;
  860. }
  861. private:
  862. CodeGenFunction &CGF;
  863. Address OldCXXDefaultInitExprThis;
  864. };
  865. /// The scope of a CXXDefaultInitExpr. Within this scope, the value of 'this'
  866. /// is overridden to be the object under construction.
  867. class CXXDefaultInitExprScope {
  868. public:
  869. CXXDefaultInitExprScope(CodeGenFunction &CGF)
  870. : CGF(CGF), OldCXXThisValue(CGF.CXXThisValue),
  871. OldCXXThisAlignment(CGF.CXXThisAlignment) {
  872. CGF.CXXThisValue = CGF.CXXDefaultInitExprThis.getPointer();
  873. CGF.CXXThisAlignment = CGF.CXXDefaultInitExprThis.getAlignment();
  874. }
  875. ~CXXDefaultInitExprScope() {
  876. CGF.CXXThisValue = OldCXXThisValue;
  877. CGF.CXXThisAlignment = OldCXXThisAlignment;
  878. }
  879. public:
  880. CodeGenFunction &CGF;
  881. llvm::Value *OldCXXThisValue;
  882. CharUnits OldCXXThisAlignment;
  883. };
  884. private:
  885. /// CXXThisDecl - When generating code for a C++ member function,
  886. /// this will hold the implicit 'this' declaration.
  887. ImplicitParamDecl *CXXABIThisDecl;
  888. llvm::Value *CXXABIThisValue;
  889. llvm::Value *CXXThisValue;
  890. CharUnits CXXABIThisAlignment;
  891. CharUnits CXXThisAlignment;
  892. /// The value of 'this' to use when evaluating CXXDefaultInitExprs within
  893. /// this expression.
  894. Address CXXDefaultInitExprThis = Address::invalid();
  895. /// CXXStructorImplicitParamDecl - When generating code for a constructor or
  896. /// destructor, this will hold the implicit argument (e.g. VTT).
  897. ImplicitParamDecl *CXXStructorImplicitParamDecl;
  898. llvm::Value *CXXStructorImplicitParamValue;
  899. /// OutermostConditional - Points to the outermost active
  900. /// conditional control. This is used so that we know if a
  901. /// temporary should be destroyed conditionally.
  902. ConditionalEvaluation *OutermostConditional;
  903. /// The current lexical scope.
  904. LexicalScope *CurLexicalScope;
  905. /// The current source location that should be used for exception
  906. /// handling code.
  907. SourceLocation CurEHLocation;
  908. /// BlockByrefInfos - For each __block variable, contains
  909. /// information about the layout of the variable.
  910. llvm::DenseMap<const ValueDecl *, BlockByrefInfo> BlockByrefInfos;
  911. llvm::BasicBlock *TerminateLandingPad;
  912. llvm::BasicBlock *TerminateHandler;
  913. llvm::BasicBlock *TrapBB;
  914. /// Add a kernel metadata node to the named metadata node 'opencl.kernels'.
  915. /// In the kernel metadata node, reference the kernel function and metadata
  916. /// nodes for its optional attribute qualifiers (OpenCL 1.1 6.7.2):
  917. /// - A node for the vec_type_hint(<type>) qualifier contains string
  918. /// "vec_type_hint", an undefined value of the <type> data type,
  919. /// and a Boolean that is true if the <type> is integer and signed.
  920. /// - A node for the work_group_size_hint(X,Y,Z) qualifier contains string
  921. /// "work_group_size_hint", and three 32-bit integers X, Y and Z.
  922. /// - A node for the reqd_work_group_size(X,Y,Z) qualifier contains string
  923. /// "reqd_work_group_size", and three 32-bit integers X, Y and Z.
  924. void EmitOpenCLKernelMetadata(const FunctionDecl *FD,
  925. llvm::Function *Fn);
  926. public:
  927. CodeGenFunction(CodeGenModule &cgm, bool suppressNewContext=false);
  928. ~CodeGenFunction();
  929. CodeGenTypes &getTypes() const { return CGM.getTypes(); }
  930. ASTContext &getContext() const { return CGM.getContext(); }
  931. CGDebugInfo *getDebugInfo() {
  932. if (DisableDebugInfo)
  933. return nullptr;
  934. return DebugInfo;
  935. }
  936. void disableDebugInfo() { DisableDebugInfo = true; }
  937. void enableDebugInfo() { DisableDebugInfo = false; }
  938. bool shouldUseFusedARCCalls() {
  939. return CGM.getCodeGenOpts().OptimizationLevel == 0;
  940. }
  941. const LangOptions &getLangOpts() const { return CGM.getLangOpts(); }
  942. /// Returns a pointer to the function's exception object and selector slot,
  943. /// which is assigned in every landing pad.
  944. Address getExceptionSlot();
  945. Address getEHSelectorSlot();
  946. /// Returns the contents of the function's exception object and selector
  947. /// slots.
  948. llvm::Value *getExceptionFromSlot();
  949. llvm::Value *getSelectorFromSlot();
  950. Address getNormalCleanupDestSlot();
  951. llvm::BasicBlock *getUnreachableBlock() {
  952. if (!UnreachableBlock) {
  953. UnreachableBlock = createBasicBlock("unreachable");
  954. new llvm::UnreachableInst(getLLVMContext(), UnreachableBlock);
  955. }
  956. return UnreachableBlock;
  957. }
  958. llvm::BasicBlock *getInvokeDest() {
  959. if (!EHStack.requiresLandingPad()) return nullptr;
  960. return getInvokeDestImpl();
  961. }
  962. bool currentFunctionUsesSEHTry() const { return CurSEHParent != nullptr; }
  963. const TargetInfo &getTarget() const { return Target; }
  964. llvm::LLVMContext &getLLVMContext() { return CGM.getLLVMContext(); }
  965. //===--------------------------------------------------------------------===//
  966. // Cleanups
  967. //===--------------------------------------------------------------------===//
  968. typedef void Destroyer(CodeGenFunction &CGF, Address addr, QualType ty);
  969. void pushIrregularPartialArrayCleanup(llvm::Value *arrayBegin,
  970. Address arrayEndPointer,
  971. QualType elementType,
  972. CharUnits elementAlignment,
  973. Destroyer *destroyer);
  974. void pushRegularPartialArrayCleanup(llvm::Value *arrayBegin,
  975. llvm::Value *arrayEnd,
  976. QualType elementType,
  977. CharUnits elementAlignment,
  978. Destroyer *destroyer);
  979. void pushDestroy(QualType::DestructionKind dtorKind,
  980. Address addr, QualType type);
  981. void pushEHDestroy(QualType::DestructionKind dtorKind,
  982. Address addr, QualType type);
  983. void pushDestroy(CleanupKind kind, Address addr, QualType type,
  984. Destroyer *destroyer, bool useEHCleanupForArray);
  985. void pushLifetimeExtendedDestroy(CleanupKind kind, Address addr,
  986. QualType type, Destroyer *destroyer,
  987. bool useEHCleanupForArray);
  988. void pushCallObjectDeleteCleanup(const FunctionDecl *OperatorDelete,
  989. llvm::Value *CompletePtr,
  990. QualType ElementType);
  991. void pushStackRestore(CleanupKind kind, Address SPMem);
  992. void emitDestroy(Address addr, QualType type, Destroyer *destroyer,
  993. bool useEHCleanupForArray);
  994. llvm::Function *generateDestroyHelper(Address addr, QualType type,
  995. Destroyer *destroyer,
  996. bool useEHCleanupForArray,
  997. const VarDecl *VD);
  998. void emitArrayDestroy(llvm::Value *begin, llvm::Value *end,
  999. QualType elementType, CharUnits elementAlign,
  1000. Destroyer *destroyer,
  1001. bool checkZeroLength, bool useEHCleanup);
  1002. Destroyer *getDestroyer(QualType::DestructionKind destructionKind);
  1003. /// Determines whether an EH cleanup is required to destroy a type
  1004. /// with the given destruction kind.
  1005. bool needsEHCleanup(QualType::DestructionKind kind) {
  1006. switch (kind) {
  1007. case QualType::DK_none:
  1008. return false;
  1009. case QualType::DK_cxx_destructor:
  1010. case QualType::DK_objc_weak_lifetime:
  1011. return getLangOpts().Exceptions;
  1012. case QualType::DK_objc_strong_lifetime:
  1013. return getLangOpts().Exceptions &&
  1014. CGM.getCodeGenOpts().ObjCAutoRefCountExceptions;
  1015. }
  1016. llvm_unreachable("bad destruction kind");
  1017. }
  1018. CleanupKind getCleanupKind(QualType::DestructionKind kind) {
  1019. return (needsEHCleanup(kind) ? NormalAndEHCleanup : NormalCleanup);
  1020. }
  1021. //===--------------------------------------------------------------------===//
  1022. // Objective-C
  1023. //===--------------------------------------------------------------------===//
  1024. void GenerateObjCMethod(const ObjCMethodDecl *OMD);
  1025. void StartObjCMethod(const ObjCMethodDecl *MD, const ObjCContainerDecl *CD);
  1026. /// GenerateObjCGetter - Synthesize an Objective-C property getter function.
  1027. void GenerateObjCGetter(ObjCImplementationDecl *IMP,
  1028. const ObjCPropertyImplDecl *PID);
  1029. void generateObjCGetterBody(const ObjCImplementationDecl *classImpl,
  1030. const ObjCPropertyImplDecl *propImpl,
  1031. const ObjCMethodDecl *GetterMothodDecl,
  1032. llvm::Constant *AtomicHelperFn);
  1033. void GenerateObjCCtorDtorMethod(ObjCImplementationDecl *IMP,
  1034. ObjCMethodDecl *MD, bool ctor);
  1035. /// GenerateObjCSetter - Synthesize an Objective-C property setter function
  1036. /// for the given property.
  1037. void GenerateObjCSetter(ObjCImplementationDecl *IMP,
  1038. const ObjCPropertyImplDecl *PID);
  1039. void generateObjCSetterBody(const ObjCImplementationDecl *classImpl,
  1040. const ObjCPropertyImplDecl *propImpl,
  1041. llvm::Constant *AtomicHelperFn);
  1042. //===--------------------------------------------------------------------===//
  1043. // Block Bits
  1044. //===--------------------------------------------------------------------===//
  1045. llvm::Value *EmitBlockLiteral(const BlockExpr *);
  1046. llvm::Value *EmitBlockLiteral(const CGBlockInfo &Info);
  1047. static void destroyBlockInfos(CGBlockInfo *info);
  1048. llvm::Function *GenerateBlockFunction(GlobalDecl GD,
  1049. const CGBlockInfo &Info,
  1050. const DeclMapTy &ldm,
  1051. bool IsLambdaConversionToBlock);
  1052. llvm::Constant *GenerateCopyHelperFunction(const CGBlockInfo &blockInfo);
  1053. llvm::Constant *GenerateDestroyHelperFunction(const CGBlockInfo &blockInfo);
  1054. llvm::Constant *GenerateObjCAtomicSetterCopyHelperFunction(
  1055. const ObjCPropertyImplDecl *PID);
  1056. llvm::Constant *GenerateObjCAtomicGetterCopyHelperFunction(
  1057. const ObjCPropertyImplDecl *PID);
  1058. llvm::Value *EmitBlockCopyAndAutorelease(llvm::Value *Block, QualType Ty);
  1059. void BuildBlockRelease(llvm::Value *DeclPtr, BlockFieldFlags flags);
  1060. class AutoVarEmission;
  1061. void emitByrefStructureInit(const AutoVarEmission &emission);
  1062. void enterByrefCleanup(const AutoVarEmission &emission);
  1063. void setBlockContextParameter(const ImplicitParamDecl *D, unsigned argNum,
  1064. llvm::Value *ptr);
  1065. Address LoadBlockStruct();
  1066. Address GetAddrOfBlockDecl(const VarDecl *var, bool ByRef);
  1067. /// BuildBlockByrefAddress - Computes the location of the
  1068. /// data in a variable which is declared as __block.
  1069. Address emitBlockByrefAddress(Address baseAddr, const VarDecl *V,
  1070. bool followForward = true);
  1071. Address emitBlockByrefAddress(Address baseAddr,
  1072. const BlockByrefInfo &info,
  1073. bool followForward,
  1074. const llvm::Twine &name);
  1075. const BlockByrefInfo &getBlockByrefInfo(const VarDecl *var);
  1076. void GenerateCode(GlobalDecl GD, llvm::Function *Fn,
  1077. const CGFunctionInfo &FnInfo);
  1078. /// \brief Emit code for the start of a function.
  1079. /// \param Loc The location to be associated with the function.
  1080. /// \param StartLoc The location of the function body.
  1081. void StartFunction(GlobalDecl GD,
  1082. QualType RetTy,
  1083. llvm::Function *Fn,
  1084. const CGFunctionInfo &FnInfo,
  1085. const FunctionArgList &Args,
  1086. SourceLocation Loc = SourceLocation(),
  1087. SourceLocation StartLoc = SourceLocation());
  1088. void EmitConstructorBody(FunctionArgList &Args);
  1089. void EmitDestructorBody(FunctionArgList &Args);
  1090. void emitImplicitAssignmentOperatorBody(FunctionArgList &Args);
  1091. void EmitFunctionBody(FunctionArgList &Args, const Stmt *Body);
  1092. void EmitBlockWithFallThrough(llvm::BasicBlock *BB, const Stmt *S);
  1093. void EmitForwardingCallToLambda(const CXXMethodDecl *LambdaCallOperator,
  1094. CallArgList &CallArgs);
  1095. void EmitLambdaToBlockPointerBody(FunctionArgList &Args);
  1096. void EmitLambdaBlockInvokeBody();
  1097. void EmitLambdaDelegatingInvokeBody(const CXXMethodDecl *MD);
  1098. void EmitLambdaStaticInvokeFunction(const CXXMethodDecl *MD);
  1099. void EmitAsanPrologueOrEpilogue(bool Prologue);
  1100. /// \brief Emit the unified return block, trying to avoid its emission when
  1101. /// possible.
  1102. /// \return The debug location of the user written return statement if the
  1103. /// return block is is avoided.
  1104. llvm::DebugLoc EmitReturnBlock();
  1105. /// FinishFunction - Complete IR generation of the current function. It is
  1106. /// legal to call this function even if there is no current insertion point.
  1107. void FinishFunction(SourceLocation EndLoc=SourceLocation());
  1108. void StartThunk(llvm::Function *Fn, GlobalDecl GD,
  1109. const CGFunctionInfo &FnInfo);
  1110. void EmitCallAndReturnForThunk(llvm::Value *Callee, const ThunkInfo *Thunk);
  1111. void FinishThunk();
  1112. /// Emit a musttail call for a thunk with a potentially adjusted this pointer.
  1113. void EmitMustTailThunk(const CXXMethodDecl *MD, llvm::Value *AdjustedThisPtr,
  1114. llvm::Value *Callee);
  1115. /// Generate a thunk for the given method.
  1116. void generateThunk(llvm::Function *Fn, const CGFunctionInfo &FnInfo,
  1117. GlobalDecl GD, const ThunkInfo &Thunk);
  1118. llvm::Function *GenerateVarArgsThunk(llvm::Function *Fn,
  1119. const CGFunctionInfo &FnInfo,
  1120. GlobalDecl GD, const ThunkInfo &Thunk);
  1121. void EmitCtorPrologue(const CXXConstructorDecl *CD, CXXCtorType Type,
  1122. FunctionArgList &Args);
  1123. void EmitInitializerForField(FieldDecl *Field, LValue LHS, Expr *Init,
  1124. ArrayRef<VarDecl *> ArrayIndexes);
  1125. /// Struct with all informations about dynamic [sub]class needed to set vptr.
  1126. struct VPtr {
  1127. BaseSubobject Base;
  1128. const CXXRecordDecl *NearestVBase;
  1129. CharUnits OffsetFromNearestVBase;
  1130. const CXXRecordDecl *VTableClass;
  1131. };
  1132. /// Initialize the vtable pointer of the given subobject.
  1133. void InitializeVTablePointer(const VPtr &vptr);
  1134. typedef llvm::SmallVector<VPtr, 4> VPtrsVector;
  1135. typedef llvm::SmallPtrSet<const CXXRecordDecl *, 4> VisitedVirtualBasesSetTy;
  1136. VPtrsVector getVTablePointers(const CXXRecordDecl *VTableClass);
  1137. void getVTablePointers(BaseSubobject Base, const CXXRecordDecl *NearestVBase,
  1138. CharUnits OffsetFromNearestVBase,
  1139. bool BaseIsNonVirtualPrimaryBase,
  1140. const CXXRecordDecl *VTableClass,
  1141. VisitedVirtualBasesSetTy &VBases, VPtrsVector &vptrs);
  1142. void InitializeVTablePointers(const CXXRecordDecl *ClassDecl);
  1143. /// GetVTablePtr - Return the Value of the vtable pointer member pointed
  1144. /// to by This.
  1145. llvm::Value *GetVTablePtr(Address This, llvm::Type *VTableTy,
  1146. const CXXRecordDecl *VTableClass);
  1147. enum CFITypeCheckKind {
  1148. CFITCK_VCall,
  1149. CFITCK_NVCall,
  1150. CFITCK_DerivedCast,
  1151. CFITCK_UnrelatedCast,
  1152. CFITCK_ICall,
  1153. };
  1154. /// \brief Derived is the presumed address of an object of type T after a
  1155. /// cast. If T is a polymorphic class type, emit a check that the virtual
  1156. /// table for Derived belongs to a class derived from T.
  1157. void EmitVTablePtrCheckForCast(QualType T, llvm::Value *Derived,
  1158. bool MayBeNull, CFITypeCheckKind TCK,
  1159. SourceLocation Loc);
  1160. /// EmitVTablePtrCheckForCall - Virtual method MD is being called via VTable.
  1161. /// If vptr CFI is enabled, emit a check that VTable is valid.
  1162. void EmitVTablePtrCheckForCall(const CXXRecordDecl *RD, llvm::Value *VTable,
  1163. CFITypeCheckKind TCK, SourceLocation Loc);
  1164. /// EmitVTablePtrCheck - Emit a check that VTable is a valid virtual table for
  1165. /// RD using llvm.type.test.
  1166. void EmitVTablePtrCheck(const CXXRecordDecl *RD, llvm::Value *VTable,
  1167. CFITypeCheckKind TCK, SourceLocation Loc);
  1168. /// If whole-program virtual table optimization is enabled, emit an assumption
  1169. /// that VTable is a member of RD's type identifier. Or, if vptr CFI is
  1170. /// enabled, emit a check that VTable is a member of RD's type identifier.
  1171. void EmitTypeMetadataCodeForVCall(const CXXRecordDecl *RD,
  1172. llvm::Value *VTable, SourceLocation Loc);
  1173. /// Returns whether we should perform a type checked load when loading a
  1174. /// virtual function for virtual calls to members of RD. This is generally
  1175. /// true when both vcall CFI and whole-program-vtables are enabled.
  1176. bool ShouldEmitVTableTypeCheckedLoad(const CXXRecordDecl *RD);
  1177. /// Emit a type checked load from the given vtable.
  1178. llvm::Value *EmitVTableTypeCheckedLoad(const CXXRecordDecl *RD, llvm::Value *VTable,
  1179. uint64_t VTableByteOffset);
  1180. /// CanDevirtualizeMemberFunctionCalls - Checks whether virtual calls on given
  1181. /// expr can be devirtualized.
  1182. bool CanDevirtualizeMemberFunctionCall(const Expr *Base,
  1183. const CXXMethodDecl *MD);
  1184. /// EnterDtorCleanups - Enter the cleanups necessary to complete the
  1185. /// given phase of destruction for a destructor. The end result
  1186. /// should call destructors on members and base classes in reverse
  1187. /// order of their construction.
  1188. void EnterDtorCleanups(const CXXDestructorDecl *Dtor, CXXDtorType Type);
  1189. /// ShouldInstrumentFunction - Return true if the current function should be
  1190. /// instrumented with __cyg_profile_func_* calls
  1191. bool ShouldInstrumentFunction();
  1192. /// EmitFunctionInstrumentation - Emit LLVM code to call the specified
  1193. /// instrumentation function with the current function and the call site, if
  1194. /// function instrumentation is enabled.
  1195. void EmitFunctionInstrumentation(const char *Fn);
  1196. /// EmitMCountInstrumentation - Emit call to .mcount.
  1197. void EmitMCountInstrumentation();
  1198. /// EmitFunctionProlog - Emit the target specific LLVM code to load the
  1199. /// arguments for the given function. This is also responsible for naming the
  1200. /// LLVM function arguments.
  1201. void EmitFunctionProlog(const CGFunctionInfo &FI,
  1202. llvm::Function *Fn,
  1203. const FunctionArgList &Args);
  1204. /// EmitFunctionEpilog - Emit the target specific LLVM code to return the
  1205. /// given temporary.
  1206. void EmitFunctionEpilog(const CGFunctionInfo &FI, bool EmitRetDbgLoc,
  1207. SourceLocation EndLoc);
  1208. /// EmitStartEHSpec - Emit the start of the exception spec.
  1209. void EmitStartEHSpec(const Decl *D);
  1210. /// EmitEndEHSpec - Emit the end of the exception spec.
  1211. void EmitEndEHSpec(const Decl *D);
  1212. /// getTerminateLandingPad - Return a landing pad that just calls terminate.
  1213. llvm::BasicBlock *getTerminateLandingPad();
  1214. /// getTerminateHandler - Return a handler (not a landing pad, just
  1215. /// a catch handler) that just calls terminate. This is used when
  1216. /// a terminate scope encloses a try.
  1217. llvm::BasicBlock *getTerminateHandler();
  1218. llvm::Type *ConvertTypeForMem(QualType T);
  1219. llvm::Type *ConvertType(QualType T);
  1220. llvm::Type *ConvertType(const TypeDecl *T) {
  1221. return ConvertType(getContext().getTypeDeclType(T));
  1222. }
  1223. /// LoadObjCSelf - Load the value of self. This function is only valid while
  1224. /// generating code for an Objective-C method.
  1225. llvm::Value *LoadObjCSelf();
  1226. /// TypeOfSelfObject - Return type of object that this self represents.
  1227. QualType TypeOfSelfObject();
  1228. /// hasAggregateLLVMType - Return true if the specified AST type will map into
  1229. /// an aggregate LLVM type or is void.
  1230. static TypeEvaluationKind getEvaluationKind(QualType T);
  1231. static bool hasScalarEvaluationKind(QualType T) {
  1232. return getEvaluationKind(T) == TEK_Scalar;
  1233. }
  1234. static bool hasAggregateEvaluationKind(QualType T) {
  1235. return getEvaluationKind(T) == TEK_Aggregate;
  1236. }
  1237. /// createBasicBlock - Create an LLVM basic block.
  1238. llvm::BasicBlock *createBasicBlock(const Twine &name = "",
  1239. llvm::Function *parent = nullptr,
  1240. llvm::BasicBlock *before = nullptr) {
  1241. #ifdef NDEBUG
  1242. return llvm::BasicBlock::Create(getLLVMContext(), "", parent, before);
  1243. #else
  1244. return llvm::BasicBlock::Create(getLLVMContext(), name, parent, before);
  1245. #endif
  1246. }
  1247. /// getBasicBlockForLabel - Return the LLVM basicblock that the specified
  1248. /// label maps to.
  1249. JumpDest getJumpDestForLabel(const LabelDecl *S);
  1250. /// SimplifyForwardingBlocks - If the given basic block is only a branch to
  1251. /// another basic block, simplify it. This assumes that no other code could
  1252. /// potentially reference the basic block.
  1253. void SimplifyForwardingBlocks(llvm::BasicBlock *BB);
  1254. /// EmitBlock - Emit the given block \arg BB and set it as the insert point,
  1255. /// adding a fall-through branch from the current insert block if
  1256. /// necessary. It is legal to call this function even if there is no current
  1257. /// insertion point.
  1258. ///
  1259. /// IsFinished - If true, indicates that the caller has finished emitting
  1260. /// branches to the given block and does not expect to emit code into it. This
  1261. /// means the block can be ignored if it is unreachable.
  1262. void EmitBlock(llvm::BasicBlock *BB, bool IsFinished=false);
  1263. /// EmitBlockAfterUses - Emit the given block somewhere hopefully
  1264. /// near its uses, and leave the insertion point in it.
  1265. void EmitBlockAfterUses(llvm::BasicBlock *BB);
  1266. /// EmitBranch - Emit a branch to the specified basic block from the current
  1267. /// insert block, taking care to avoid creation of branches from dummy
  1268. /// blocks. It is legal to call this function even if there is no current
  1269. /// insertion point.
  1270. ///
  1271. /// This function clears the current insertion point. The caller should follow
  1272. /// calls to this function with calls to Emit*Block prior to generation new
  1273. /// code.
  1274. void EmitBranch(llvm::BasicBlock *Block);
  1275. /// HaveInsertPoint - True if an insertion point is defined. If not, this
  1276. /// indicates that the current code being emitted is unreachable.
  1277. bool HaveInsertPoint() const {
  1278. return Builder.GetInsertBlock() != nullptr;
  1279. }
  1280. /// EnsureInsertPoint - Ensure that an insertion point is defined so that
  1281. /// emitted IR has a place to go. Note that by definition, if this function
  1282. /// creates a block then that block is unreachable; callers may do better to
  1283. /// detect when no insertion point is defined and simply skip IR generation.
  1284. void EnsureInsertPoint() {
  1285. if (!HaveInsertPoint())
  1286. EmitBlock(createBasicBlock());
  1287. }
  1288. /// ErrorUnsupported - Print out an error that codegen doesn't support the
  1289. /// specified stmt yet.
  1290. void ErrorUnsupported(const Stmt *S, const char *Type);
  1291. //===--------------------------------------------------------------------===//
  1292. // Helpers
  1293. //===--------------------------------------------------------------------===//
  1294. LValue MakeAddrLValue(Address Addr, QualType T,
  1295. AlignmentSource AlignSource = AlignmentSource::Type) {
  1296. return LValue::MakeAddr(Addr, T, getContext(), AlignSource,
  1297. CGM.getTBAAInfo(T));
  1298. }
  1299. LValue MakeAddrLValue(llvm::Value *V, QualType T, CharUnits Alignment,
  1300. AlignmentSource AlignSource = AlignmentSource::Type) {
  1301. return LValue::MakeAddr(Address(V, Alignment), T, getContext(),
  1302. AlignSource, CGM.getTBAAInfo(T));
  1303. }
  1304. LValue MakeNaturalAlignPointeeAddrLValue(llvm::Value *V, QualType T);
  1305. LValue MakeNaturalAlignAddrLValue(llvm::Value *V, QualType T);
  1306. CharUnits getNaturalTypeAlignment(QualType T,
  1307. AlignmentSource *Source = nullptr,
  1308. bool forPointeeType = false);
  1309. CharUnits getNaturalPointeeTypeAlignment(QualType T,
  1310. AlignmentSource *Source = nullptr);
  1311. Address EmitLoadOfReference(Address Ref, const ReferenceType *RefTy,
  1312. AlignmentSource *Source = nullptr);
  1313. LValue EmitLoadOfReferenceLValue(Address Ref, const ReferenceType *RefTy);
  1314. Address EmitLoadOfPointer(Address Ptr, const PointerType *PtrTy,
  1315. AlignmentSource *Source = nullptr);
  1316. LValue EmitLoadOfPointerLValue(Address Ptr, const PointerType *PtrTy);
  1317. /// CreateTempAlloca - This creates a alloca and inserts it into the entry
  1318. /// block. The caller is responsible for setting an appropriate alignment on
  1319. /// the alloca.
  1320. llvm::AllocaInst *CreateTempAlloca(llvm::Type *Ty,
  1321. const Twine &Name = "tmp");
  1322. Address CreateTempAlloca(llvm::Type *Ty, CharUnits align,
  1323. const Twine &Name = "tmp");
  1324. /// CreateDefaultAlignedTempAlloca - This creates an alloca with the
  1325. /// default ABI alignment of the given LLVM type.
  1326. ///
  1327. /// IMPORTANT NOTE: This is *not* generally the right alignment for
  1328. /// any given AST type that happens to have been lowered to the
  1329. /// given IR type. This should only ever be used for function-local,
  1330. /// IR-driven manipulations like saving and restoring a value. Do
  1331. /// not hand this address off to arbitrary IRGen routines, and especially
  1332. /// do not pass it as an argument to a function that might expect a
  1333. /// properly ABI-aligned value.
  1334. Address CreateDefaultAlignTempAlloca(llvm::Type *Ty,
  1335. const Twine &Name = "tmp");
  1336. /// InitTempAlloca - Provide an initial value for the given alloca which
  1337. /// will be observable at all locations in the function.
  1338. ///
  1339. /// The address should be something that was returned from one of
  1340. /// the CreateTempAlloca or CreateMemTemp routines, and the
  1341. /// initializer must be valid in the entry block (i.e. it must
  1342. /// either be a constant or an argument value).
  1343. void InitTempAlloca(Address Alloca, llvm::Value *Value);
  1344. /// CreateIRTemp - Create a temporary IR object of the given type, with
  1345. /// appropriate alignment. This routine should only be used when an temporary
  1346. /// value needs to be stored into an alloca (for example, to avoid explicit
  1347. /// PHI construction), but the type is the IR type, not the type appropriate
  1348. /// for storing in memory.
  1349. ///
  1350. /// That is, this is exactly equivalent to CreateMemTemp, but calling
  1351. /// ConvertType instead of ConvertTypeForMem.
  1352. Address CreateIRTemp(QualType T, const Twine &Name = "tmp");
  1353. /// CreateMemTemp - Create a temporary memory object of the given type, with
  1354. /// appropriate alignment.
  1355. Address CreateMemTemp(QualType T, const Twine &Name = "tmp");
  1356. Address CreateMemTemp(QualType T, CharUnits Align, const Twine &Name = "tmp");
  1357. /// CreateAggTemp - Create a temporary memory object for the given
  1358. /// aggregate type.
  1359. AggValueSlot CreateAggTemp(QualType T, const Twine &Name = "tmp") {
  1360. return AggValueSlot::forAddr(CreateMemTemp(T, Name),
  1361. T.getQualifiers(),
  1362. AggValueSlot::IsNotDestructed,
  1363. AggValueSlot::DoesNotNeedGCBarriers,
  1364. AggValueSlot::IsNotAliased);
  1365. }
  1366. /// Emit a cast to void* in the appropriate address space.
  1367. llvm::Value *EmitCastToVoidPtr(llvm::Value *value);
  1368. /// EvaluateExprAsBool - Perform the usual unary conversions on the specified
  1369. /// expression and compare the result against zero, returning an Int1Ty value.
  1370. llvm::Value *EvaluateExprAsBool(const Expr *E);
  1371. /// EmitIgnoredExpr - Emit an expression in a context which ignores the result.
  1372. void EmitIgnoredExpr(const Expr *E);
  1373. /// EmitAnyExpr - Emit code to compute the specified expression which can have
  1374. /// any type. The result is returned as an RValue struct. If this is an
  1375. /// aggregate expression, the aggloc/agglocvolatile arguments indicate where
  1376. /// the result should be returned.
  1377. ///
  1378. /// \param ignoreResult True if the resulting value isn't used.
  1379. RValue EmitAnyExpr(const Expr *E,
  1380. AggValueSlot aggSlot = AggValueSlot::ignored(),
  1381. bool ignoreResult = false);
  1382. // EmitVAListRef - Emit a "reference" to a va_list; this is either the address
  1383. // or the value of the expression, depending on how va_list is defined.
  1384. Address EmitVAListRef(const Expr *E);
  1385. /// Emit a "reference" to a __builtin_ms_va_list; this is
  1386. /// always the value of the expression, because a __builtin_ms_va_list is a
  1387. /// pointer to a char.
  1388. Address EmitMSVAListRef(const Expr *E);
  1389. /// EmitAnyExprToTemp - Similary to EmitAnyExpr(), however, the result will
  1390. /// always be accessible even if no aggregate location is provided.
  1391. RValue EmitAnyExprToTemp(const Expr *E);
  1392. /// EmitAnyExprToMem - Emits the code necessary to evaluate an
  1393. /// arbitrary expression into the given memory location.
  1394. void EmitAnyExprToMem(const Expr *E, Address Location,
  1395. Qualifiers Quals, bool IsInitializer);
  1396. void EmitAnyExprToExn(const Expr *E, Address Addr);
  1397. /// EmitExprAsInit - Emits the code necessary to initialize a
  1398. /// location in memory with the given initializer.
  1399. void EmitExprAsInit(const Expr *init, const ValueDecl *D, LValue lvalue,
  1400. bool capturedByInit);
  1401. /// hasVolatileMember - returns true if aggregate type has a volatile
  1402. /// member.
  1403. bool hasVolatileMember(QualType T) {
  1404. if (const RecordType *RT = T->getAs<RecordType>()) {
  1405. const RecordDecl *RD = cast<RecordDecl>(RT->getDecl());
  1406. return RD->hasVolatileMember();
  1407. }
  1408. return false;
  1409. }
  1410. /// EmitAggregateCopy - Emit an aggregate assignment.
  1411. ///
  1412. /// The difference to EmitAggregateCopy is that tail padding is not copied.
  1413. /// This is required for correctness when assigning non-POD structures in C++.
  1414. void EmitAggregateAssign(Address DestPtr, Address SrcPtr,
  1415. QualType EltTy) {
  1416. bool IsVolatile = hasVolatileMember(EltTy);
  1417. EmitAggregateCopy(DestPtr, SrcPtr, EltTy, IsVolatile, true);
  1418. }
  1419. void EmitAggregateCopyCtor(Address DestPtr, Address SrcPtr,
  1420. QualType DestTy, QualType SrcTy) {
  1421. EmitAggregateCopy(DestPtr, SrcPtr, SrcTy, /*IsVolatile=*/false,
  1422. /*IsAssignment=*/false);
  1423. }
  1424. /// EmitAggregateCopy - Emit an aggregate copy.
  1425. ///
  1426. /// \param isVolatile - True iff either the source or the destination is
  1427. /// volatile.
  1428. /// \param isAssignment - If false, allow padding to be copied. This often
  1429. /// yields more efficient.
  1430. void EmitAggregateCopy(Address DestPtr, Address SrcPtr,
  1431. QualType EltTy, bool isVolatile=false,
  1432. bool isAssignment = false);
  1433. /// GetAddrOfLocalVar - Return the address of a local variable.
  1434. Address GetAddrOfLocalVar(const VarDecl *VD) {
  1435. auto it = LocalDeclMap.find(VD);
  1436. assert(it != LocalDeclMap.end() &&
  1437. "Invalid argument to GetAddrOfLocalVar(), no decl!");
  1438. return it->second;
  1439. }
  1440. /// getOpaqueLValueMapping - Given an opaque value expression (which
  1441. /// must be mapped to an l-value), return its mapping.
  1442. const LValue &getOpaqueLValueMapping(const OpaqueValueExpr *e) {
  1443. assert(OpaqueValueMapping::shouldBindAsLValue(e));
  1444. llvm::DenseMap<const OpaqueValueExpr*,LValue>::iterator
  1445. it = OpaqueLValues.find(e);
  1446. assert(it != OpaqueLValues.end() && "no mapping for opaque value!");
  1447. return it->second;
  1448. }
  1449. /// getOpaqueRValueMapping - Given an opaque value expression (which
  1450. /// must be mapped to an r-value), return its mapping.
  1451. const RValue &getOpaqueRValueMapping(const OpaqueValueExpr *e) {
  1452. assert(!OpaqueValueMapping::shouldBindAsLValue(e));
  1453. llvm::DenseMap<const OpaqueValueExpr*,RValue>::iterator
  1454. it = OpaqueRValues.find(e);
  1455. assert(it != OpaqueRValues.end() && "no mapping for opaque value!");
  1456. return it->second;
  1457. }
  1458. /// getAccessedFieldNo - Given an encoded value and a result number, return
  1459. /// the input field number being accessed.
  1460. static unsigned getAccessedFieldNo(unsigned Idx, const llvm::Constant *Elts);
  1461. llvm::BlockAddress *GetAddrOfLabel(const LabelDecl *L);
  1462. llvm::BasicBlock *GetIndirectGotoBlock();
  1463. /// EmitNullInitialization - Generate code to set a value of the given type to
  1464. /// null, If the type contains data member pointers, they will be initialized
  1465. /// to -1 in accordance with the Itanium C++ ABI.
  1466. void EmitNullInitialization(Address DestPtr, QualType Ty);
  1467. /// Emits a call to an LLVM variable-argument intrinsic, either
  1468. /// \c llvm.va_start or \c llvm.va_end.
  1469. /// \param ArgValue A reference to the \c va_list as emitted by either
  1470. /// \c EmitVAListRef or \c EmitMSVAListRef.
  1471. /// \param IsStart If \c true, emits a call to \c llvm.va_start; otherwise,
  1472. /// calls \c llvm.va_end.
  1473. llvm::Value *EmitVAStartEnd(llvm::Value *ArgValue, bool IsStart);
  1474. /// Generate code to get an argument from the passed in pointer
  1475. /// and update it accordingly.
  1476. /// \param VE The \c VAArgExpr for which to generate code.
  1477. /// \param VAListAddr Receives a reference to the \c va_list as emitted by
  1478. /// either \c EmitVAListRef or \c EmitMSVAListRef.
  1479. /// \returns A pointer to the argument.
  1480. // FIXME: We should be able to get rid of this method and use the va_arg
  1481. // instruction in LLVM instead once it works well enough.
  1482. Address EmitVAArg(VAArgExpr *VE, Address &VAListAddr);
  1483. /// emitArrayLength - Compute the length of an array, even if it's a
  1484. /// VLA, and drill down to the base element type.
  1485. llvm::Value *emitArrayLength(const ArrayType *arrayType,
  1486. QualType &baseType,
  1487. Address &addr);
  1488. /// EmitVLASize - Capture all the sizes for the VLA expressions in
  1489. /// the given variably-modified type and store them in the VLASizeMap.
  1490. ///
  1491. /// This function can be called with a null (unreachable) insert point.
  1492. void EmitVariablyModifiedType(QualType Ty);
  1493. /// getVLASize - Returns an LLVM value that corresponds to the size,
  1494. /// in non-variably-sized elements, of a variable length array type,
  1495. /// plus that largest non-variably-sized element type. Assumes that
  1496. /// the type has already been emitted with EmitVariablyModifiedType.
  1497. std::pair<llvm::Value*,QualType> getVLASize(const VariableArrayType *vla);
  1498. std::pair<llvm::Value*,QualType> getVLASize(QualType vla);
  1499. /// LoadCXXThis - Load the value of 'this'. This function is only valid while
  1500. /// generating code for an C++ member function.
  1501. llvm::Value *LoadCXXThis() {
  1502. assert(CXXThisValue && "no 'this' value for this function");
  1503. return CXXThisValue;
  1504. }
  1505. Address LoadCXXThisAddress();
  1506. /// LoadCXXVTT - Load the VTT parameter to base constructors/destructors have
  1507. /// virtual bases.
  1508. // FIXME: Every place that calls LoadCXXVTT is something
  1509. // that needs to be abstracted properly.
  1510. llvm::Value *LoadCXXVTT() {
  1511. assert(CXXStructorImplicitParamValue && "no VTT value for this function");
  1512. return CXXStructorImplicitParamValue;
  1513. }
  1514. /// GetAddressOfBaseOfCompleteClass - Convert the given pointer to a
  1515. /// complete class to the given direct base.
  1516. Address
  1517. GetAddressOfDirectBaseInCompleteClass(Address Value,
  1518. const CXXRecordDecl *Derived,
  1519. const CXXRecordDecl *Base,
  1520. bool BaseIsVirtual);
  1521. static bool ShouldNullCheckClassCastValue(const CastExpr *Cast);
  1522. /// GetAddressOfBaseClass - This function will add the necessary delta to the
  1523. /// load of 'this' and returns address of the base class.
  1524. Address GetAddressOfBaseClass(Address Value,
  1525. const CXXRecordDecl *Derived,
  1526. CastExpr::path_const_iterator PathBegin,
  1527. CastExpr::path_const_iterator PathEnd,
  1528. bool NullCheckValue, SourceLocation Loc);
  1529. Address GetAddressOfDerivedClass(Address Value,
  1530. const CXXRecordDecl *Derived,
  1531. CastExpr::path_const_iterator PathBegin,
  1532. CastExpr::path_const_iterator PathEnd,
  1533. bool NullCheckValue);
  1534. /// GetVTTParameter - Return the VTT parameter that should be passed to a
  1535. /// base constructor/destructor with virtual bases.
  1536. /// FIXME: VTTs are Itanium ABI-specific, so the definition should move
  1537. /// to ItaniumCXXABI.cpp together with all the references to VTT.
  1538. llvm::Value *GetVTTParameter(GlobalDecl GD, bool ForVirtualBase,
  1539. bool Delegating);
  1540. void EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor,
  1541. CXXCtorType CtorType,
  1542. const FunctionArgList &Args,
  1543. SourceLocation Loc);
  1544. // It's important not to confuse this and the previous function. Delegating
  1545. // constructors are the C++0x feature. The constructor delegate optimization
  1546. // is used to reduce duplication in the base and complete consturctors where
  1547. // they are substantially the same.
  1548. void EmitDelegatingCXXConstructorCall(const CXXConstructorDecl *Ctor,
  1549. const FunctionArgList &Args);
  1550. void EmitCXXConstructorCall(const CXXConstructorDecl *D, CXXCtorType Type,
  1551. bool ForVirtualBase, bool Delegating,
  1552. Address This, const CXXConstructExpr *E);
  1553. /// Emit assumption load for all bases. Requires to be be called only on
  1554. /// most-derived class and not under construction of the object.
  1555. void EmitVTableAssumptionLoads(const CXXRecordDecl *ClassDecl, Address This);
  1556. /// Emit assumption that vptr load == global vtable.
  1557. void EmitVTableAssumptionLoad(const VPtr &vptr, Address This);
  1558. void EmitSynthesizedCXXCopyCtorCall(const CXXConstructorDecl *D,
  1559. Address This, Address Src,
  1560. const CXXConstructExpr *E);
  1561. void EmitCXXAggrConstructorCall(const CXXConstructorDecl *D,
  1562. const ArrayType *ArrayTy,
  1563. Address ArrayPtr,
  1564. const CXXConstructExpr *E,
  1565. bool ZeroInitialization = false);
  1566. void EmitCXXAggrConstructorCall(const CXXConstructorDecl *D,
  1567. llvm::Value *NumElements,
  1568. Address ArrayPtr,
  1569. const CXXConstructExpr *E,
  1570. bool ZeroInitialization = false);
  1571. static Destroyer destroyCXXObject;
  1572. void EmitCXXDestructorCall(const CXXDestructorDecl *D, CXXDtorType Type,
  1573. bool ForVirtualBase, bool Delegating,
  1574. Address This);
  1575. void EmitNewArrayInitializer(const CXXNewExpr *E, QualType elementType,
  1576. llvm::Type *ElementTy, Address NewPtr,
  1577. llvm::Value *NumElements,
  1578. llvm::Value *AllocSizeWithoutCookie);
  1579. void EmitCXXTemporary(const CXXTemporary *Temporary, QualType TempType,
  1580. Address Ptr);
  1581. llvm::Value *EmitLifetimeStart(uint64_t Size, llvm::Value *Addr);
  1582. void EmitLifetimeEnd(llvm::Value *Size, llvm::Value *Addr);
  1583. llvm::Value *EmitCXXNewExpr(const CXXNewExpr *E);
  1584. void EmitCXXDeleteExpr(const CXXDeleteExpr *E);
  1585. void EmitDeleteCall(const FunctionDecl *DeleteFD, llvm::Value *Ptr,
  1586. QualType DeleteTy);
  1587. RValue EmitBuiltinNewDeleteCall(const FunctionProtoType *Type,
  1588. const Expr *Arg, bool IsDelete);
  1589. llvm::Value *EmitCXXTypeidExpr(const CXXTypeidExpr *E);
  1590. llvm::Value *EmitDynamicCast(Address V, const CXXDynamicCastExpr *DCE);
  1591. Address EmitCXXUuidofExpr(const CXXUuidofExpr *E);
  1592. /// \brief Situations in which we might emit a check for the suitability of a
  1593. /// pointer or glvalue.
  1594. enum TypeCheckKind {
  1595. /// Checking the operand of a load. Must be suitably sized and aligned.
  1596. TCK_Load,
  1597. /// Checking the destination of a store. Must be suitably sized and aligned.
  1598. TCK_Store,
  1599. /// Checking the bound value in a reference binding. Must be suitably sized
  1600. /// and aligned, but is not required to refer to an object (until the
  1601. /// reference is used), per core issue 453.
  1602. TCK_ReferenceBinding,
  1603. /// Checking the object expression in a non-static data member access. Must
  1604. /// be an object within its lifetime.
  1605. TCK_MemberAccess,
  1606. /// Checking the 'this' pointer for a call to a non-static member function.
  1607. /// Must be an object within its lifetime.
  1608. TCK_MemberCall,
  1609. /// Checking the 'this' pointer for a constructor call.
  1610. TCK_ConstructorCall,
  1611. /// Checking the operand of a static_cast to a derived pointer type. Must be
  1612. /// null or an object within its lifetime.
  1613. TCK_DowncastPointer,
  1614. /// Checking the operand of a static_cast to a derived reference type. Must
  1615. /// be an object within its lifetime.
  1616. TCK_DowncastReference,
  1617. /// Checking the operand of a cast to a base object. Must be suitably sized
  1618. /// and aligned.
  1619. TCK_Upcast,
  1620. /// Checking the operand of a cast to a virtual base object. Must be an
  1621. /// object within its lifetime.
  1622. TCK_UpcastToVirtualBase
  1623. };
  1624. /// \brief Whether any type-checking sanitizers are enabled. If \c false,
  1625. /// calls to EmitTypeCheck can be skipped.
  1626. bool sanitizePerformTypeCheck() const;
  1627. /// \brief Emit a check that \p V is the address of storage of the
  1628. /// appropriate size and alignment for an object of type \p Type.
  1629. void EmitTypeCheck(TypeCheckKind TCK, SourceLocation Loc, llvm::Value *V,
  1630. QualType Type, CharUnits Alignment = CharUnits::Zero(),
  1631. bool SkipNullCheck = false);
  1632. /// \brief Emit a check that \p Base points into an array object, which
  1633. /// we can access at index \p Index. \p Accessed should be \c false if we
  1634. /// this expression is used as an lvalue, for instance in "&Arr[Idx]".
  1635. void EmitBoundsCheck(const Expr *E, const Expr *Base, llvm::Value *Index,
  1636. QualType IndexType, bool Accessed);
  1637. llvm::Value *EmitScalarPrePostIncDec(const UnaryOperator *E, LValue LV,
  1638. bool isInc, bool isPre);
  1639. ComplexPairTy EmitComplexPrePostIncDec(const UnaryOperator *E, LValue LV,
  1640. bool isInc, bool isPre);
  1641. void EmitAlignmentAssumption(llvm::Value *PtrValue, unsigned Alignment,
  1642. llvm::Value *OffsetValue = nullptr) {
  1643. Builder.CreateAlignmentAssumption(CGM.getDataLayout(), PtrValue, Alignment,
  1644. OffsetValue);
  1645. }
  1646. //===--------------------------------------------------------------------===//
  1647. // Declaration Emission
  1648. //===--------------------------------------------------------------------===//
  1649. /// EmitDecl - Emit a declaration.
  1650. ///
  1651. /// This function can be called with a null (unreachable) insert point.
  1652. void EmitDecl(const Decl &D);
  1653. /// EmitVarDecl - Emit a local variable declaration.
  1654. ///
  1655. /// This function can be called with a null (unreachable) insert point.
  1656. void EmitVarDecl(const VarDecl &D);
  1657. void EmitScalarInit(const Expr *init, const ValueDecl *D, LValue lvalue,
  1658. bool capturedByInit);
  1659. void EmitScalarInit(llvm::Value *init, LValue lvalue);
  1660. typedef void SpecialInitFn(CodeGenFunction &Init, const VarDecl &D,
  1661. llvm::Value *Address);
  1662. /// \brief Determine whether the given initializer is trivial in the sense
  1663. /// that it requires no code to be generated.
  1664. bool isTrivialInitializer(const Expr *Init);
  1665. /// EmitAutoVarDecl - Emit an auto variable declaration.
  1666. ///
  1667. /// This function can be called with a null (unreachable) insert point.
  1668. void EmitAutoVarDecl(const VarDecl &D);
  1669. class AutoVarEmission {
  1670. friend class CodeGenFunction;
  1671. const VarDecl *Variable;
  1672. /// The address of the alloca. Invalid if the variable was emitted
  1673. /// as a global constant.
  1674. Address Addr;
  1675. llvm::Value *NRVOFlag;
  1676. /// True if the variable is a __block variable.
  1677. bool IsByRef;
  1678. /// True if the variable is of aggregate type and has a constant
  1679. /// initializer.
  1680. bool IsConstantAggregate;
  1681. /// Non-null if we should use lifetime annotations.
  1682. llvm::Value *SizeForLifetimeMarkers;
  1683. struct Invalid {};
  1684. AutoVarEmission(Invalid) : Variable(nullptr), Addr(Address::invalid()) {}
  1685. AutoVarEmission(const VarDecl &variable)
  1686. : Variable(&variable), Addr(Address::invalid()), NRVOFlag(nullptr),
  1687. IsByRef(false), IsConstantAggregate(false),
  1688. SizeForLifetimeMarkers(nullptr) {}
  1689. bool wasEmittedAsGlobal() const { return !Addr.isValid(); }
  1690. public:
  1691. static AutoVarEmission invalid() { return AutoVarEmission(Invalid()); }
  1692. bool useLifetimeMarkers() const {
  1693. return SizeForLifetimeMarkers != nullptr;
  1694. }
  1695. llvm::Value *getSizeForLifetimeMarkers() const {
  1696. assert(useLifetimeMarkers());
  1697. return SizeForLifetimeMarkers;
  1698. }
  1699. /// Returns the raw, allocated address, which is not necessarily
  1700. /// the address of the object itself.
  1701. Address getAllocatedAddress() const {
  1702. return Addr;
  1703. }
  1704. /// Returns the address of the object within this declaration.
  1705. /// Note that this does not chase the forwarding pointer for
  1706. /// __block decls.
  1707. Address getObjectAddress(CodeGenFunction &CGF) const {
  1708. if (!IsByRef) return Addr;
  1709. return CGF.emitBlockByrefAddress(Addr, Variable, /*forward*/ false);
  1710. }
  1711. };
  1712. AutoVarEmission EmitAutoVarAlloca(const VarDecl &var);
  1713. void EmitAutoVarInit(const AutoVarEmission &emission);
  1714. void EmitAutoVarCleanups(const AutoVarEmission &emission);
  1715. void emitAutoVarTypeCleanup(const AutoVarEmission &emission,
  1716. QualType::DestructionKind dtorKind);
  1717. void EmitStaticVarDecl(const VarDecl &D,
  1718. llvm::GlobalValue::LinkageTypes Linkage);
  1719. class ParamValue {
  1720. llvm::Value *Value;
  1721. unsigned Alignment;
  1722. ParamValue(llvm::Value *V, unsigned A) : Value(V), Alignment(A) {}
  1723. public:
  1724. static ParamValue forDirect(llvm::Value *value) {
  1725. return ParamValue(value, 0);
  1726. }
  1727. static ParamValue forIndirect(Address addr) {
  1728. assert(!addr.getAlignment().isZero());
  1729. return ParamValue(addr.getPointer(), addr.getAlignment().getQuantity());
  1730. }
  1731. bool isIndirect() const { return Alignment != 0; }
  1732. llvm::Value *getAnyValue() const { return Value; }
  1733. llvm::Value *getDirectValue() const {
  1734. assert(!isIndirect());
  1735. return Value;
  1736. }
  1737. Address getIndirectAddress() const {
  1738. assert(isIndirect());
  1739. return Address(Value, CharUnits::fromQuantity(Alignment));
  1740. }
  1741. };
  1742. /// EmitParmDecl - Emit a ParmVarDecl or an ImplicitParamDecl.
  1743. void EmitParmDecl(const VarDecl &D, ParamValue Arg, unsigned ArgNo);
  1744. /// protectFromPeepholes - Protect a value that we're intending to
  1745. /// store to the side, but which will probably be used later, from
  1746. /// aggressive peepholing optimizations that might delete it.
  1747. ///
  1748. /// Pass the result to unprotectFromPeepholes to declare that
  1749. /// protection is no longer required.
  1750. ///
  1751. /// There's no particular reason why this shouldn't apply to
  1752. /// l-values, it's just that no existing peepholes work on pointers.
  1753. PeepholeProtection protectFromPeepholes(RValue rvalue);
  1754. void unprotectFromPeepholes(PeepholeProtection protection);
  1755. //===--------------------------------------------------------------------===//
  1756. // Statement Emission
  1757. //===--------------------------------------------------------------------===//
  1758. /// EmitStopPoint - Emit a debug stoppoint if we are emitting debug info.
  1759. void EmitStopPoint(const Stmt *S);
  1760. /// EmitStmt - Emit the code for the statement \arg S. It is legal to call
  1761. /// this function even if there is no current insertion point.
  1762. ///
  1763. /// This function may clear the current insertion point; callers should use
  1764. /// EnsureInsertPoint if they wish to subsequently generate code without first
  1765. /// calling EmitBlock, EmitBranch, or EmitStmt.
  1766. void EmitStmt(const Stmt *S);
  1767. /// EmitSimpleStmt - Try to emit a "simple" statement which does not
  1768. /// necessarily require an insertion point or debug information; typically
  1769. /// because the statement amounts to a jump or a container of other
  1770. /// statements.
  1771. ///
  1772. /// \return True if the statement was handled.
  1773. bool EmitSimpleStmt(const Stmt *S);
  1774. Address EmitCompoundStmt(const CompoundStmt &S, bool GetLast = false,
  1775. AggValueSlot AVS = AggValueSlot::ignored());
  1776. Address EmitCompoundStmtWithoutScope(const CompoundStmt &S,
  1777. bool GetLast = false,
  1778. AggValueSlot AVS =
  1779. AggValueSlot::ignored());
  1780. /// EmitLabel - Emit the block for the given label. It is legal to call this
  1781. /// function even if there is no current insertion point.
  1782. void EmitLabel(const LabelDecl *D); // helper for EmitLabelStmt.
  1783. void EmitLabelStmt(const LabelStmt &S);
  1784. void EmitAttributedStmt(const AttributedStmt &S);
  1785. void EmitGotoStmt(const GotoStmt &S);
  1786. void EmitIndirectGotoStmt(const IndirectGotoStmt &S);
  1787. void EmitIfStmt(const IfStmt &S);
  1788. void EmitWhileStmt(const WhileStmt &S,
  1789. ArrayRef<const Attr *> Attrs = None);
  1790. void EmitDoStmt(const DoStmt &S, ArrayRef<const Attr *> Attrs = None);
  1791. void EmitForStmt(const ForStmt &S,
  1792. ArrayRef<const Attr *> Attrs = None);
  1793. void EmitReturnStmt(const ReturnStmt &S);
  1794. void EmitDeclStmt(const DeclStmt &S);
  1795. void EmitBreakStmt(const BreakStmt &S);
  1796. void EmitContinueStmt(const ContinueStmt &S);
  1797. void EmitSwitchStmt(const SwitchStmt &S);
  1798. void EmitDefaultStmt(const DefaultStmt &S);
  1799. void EmitCaseStmt(const CaseStmt &S);
  1800. void EmitCaseStmtRange(const CaseStmt &S);
  1801. void EmitAsmStmt(const AsmStmt &S);
  1802. void EmitObjCForCollectionStmt(const ObjCForCollectionStmt &S);
  1803. void EmitObjCAtTryStmt(const ObjCAtTryStmt &S);
  1804. void EmitObjCAtThrowStmt(const ObjCAtThrowStmt &S);
  1805. void EmitObjCAtSynchronizedStmt(const ObjCAtSynchronizedStmt &S);
  1806. void EmitObjCAutoreleasePoolStmt(const ObjCAutoreleasePoolStmt &S);
  1807. void EnterCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock = false);
  1808. void ExitCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock = false);
  1809. void EmitCXXTryStmt(const CXXTryStmt &S);
  1810. void EmitSEHTryStmt(const SEHTryStmt &S);
  1811. void EmitSEHLeaveStmt(const SEHLeaveStmt &S);
  1812. void EnterSEHTryStmt(const SEHTryStmt &S);
  1813. void ExitSEHTryStmt(const SEHTryStmt &S);
  1814. void startOutlinedSEHHelper(CodeGenFunction &ParentCGF, bool IsFilter,
  1815. const Stmt *OutlinedStmt);
  1816. llvm::Function *GenerateSEHFilterFunction(CodeGenFunction &ParentCGF,
  1817. const SEHExceptStmt &Except);
  1818. llvm::Function *GenerateSEHFinallyFunction(CodeGenFunction &ParentCGF,
  1819. const SEHFinallyStmt &Finally);
  1820. void EmitSEHExceptionCodeSave(CodeGenFunction &ParentCGF,
  1821. llvm::Value *ParentFP,
  1822. llvm::Value *EntryEBP);
  1823. llvm::Value *EmitSEHExceptionCode();
  1824. llvm::Value *EmitSEHExceptionInfo();
  1825. llvm::Value *EmitSEHAbnormalTermination();
  1826. /// Scan the outlined statement for captures from the parent function. For
  1827. /// each capture, mark the capture as escaped and emit a call to
  1828. /// llvm.localrecover. Insert the localrecover result into the LocalDeclMap.
  1829. void EmitCapturedLocals(CodeGenFunction &ParentCGF, const Stmt *OutlinedStmt,
  1830. bool IsFilter);
  1831. /// Recovers the address of a local in a parent function. ParentVar is the
  1832. /// address of the variable used in the immediate parent function. It can
  1833. /// either be an alloca or a call to llvm.localrecover if there are nested
  1834. /// outlined functions. ParentFP is the frame pointer of the outermost parent
  1835. /// frame.
  1836. Address recoverAddrOfEscapedLocal(CodeGenFunction &ParentCGF,
  1837. Address ParentVar,
  1838. llvm::Value *ParentFP);
  1839. void EmitCXXForRangeStmt(const CXXForRangeStmt &S,
  1840. ArrayRef<const Attr *> Attrs = None);
  1841. /// Returns calculated size of the specified type.
  1842. llvm::Value *getTypeSize(QualType Ty);
  1843. LValue InitCapturedStruct(const CapturedStmt &S);
  1844. llvm::Function *EmitCapturedStmt(const CapturedStmt &S, CapturedRegionKind K);
  1845. llvm::Function *GenerateCapturedStmtFunction(const CapturedStmt &S);
  1846. Address GenerateCapturedStmtArgument(const CapturedStmt &S);
  1847. llvm::Function *GenerateOpenMPCapturedStmtFunction(const CapturedStmt &S);
  1848. void GenerateOpenMPCapturedVars(const CapturedStmt &S,
  1849. SmallVectorImpl<llvm::Value *> &CapturedVars);
  1850. void emitOMPSimpleStore(LValue LVal, RValue RVal, QualType RValTy,
  1851. SourceLocation Loc);
  1852. /// \brief Perform element by element copying of arrays with type \a
  1853. /// OriginalType from \a SrcAddr to \a DestAddr using copying procedure
  1854. /// generated by \a CopyGen.
  1855. ///
  1856. /// \param DestAddr Address of the destination array.
  1857. /// \param SrcAddr Address of the source array.
  1858. /// \param OriginalType Type of destination and source arrays.
  1859. /// \param CopyGen Copying procedure that copies value of single array element
  1860. /// to another single array element.
  1861. void EmitOMPAggregateAssign(
  1862. Address DestAddr, Address SrcAddr, QualType OriginalType,
  1863. const llvm::function_ref<void(Address, Address)> &CopyGen);
  1864. /// \brief Emit proper copying of data from one variable to another.
  1865. ///
  1866. /// \param OriginalType Original type of the copied variables.
  1867. /// \param DestAddr Destination address.
  1868. /// \param SrcAddr Source address.
  1869. /// \param DestVD Destination variable used in \a CopyExpr (for arrays, has
  1870. /// type of the base array element).
  1871. /// \param SrcVD Source variable used in \a CopyExpr (for arrays, has type of
  1872. /// the base array element).
  1873. /// \param Copy Actual copygin expression for copying data from \a SrcVD to \a
  1874. /// DestVD.
  1875. void EmitOMPCopy(QualType OriginalType,
  1876. Address DestAddr, Address SrcAddr,
  1877. const VarDecl *DestVD, const VarDecl *SrcVD,
  1878. const Expr *Copy);
  1879. /// \brief Emit atomic update code for constructs: \a X = \a X \a BO \a E or
  1880. /// \a X = \a E \a BO \a E.
  1881. ///
  1882. /// \param X Value to be updated.
  1883. /// \param E Update value.
  1884. /// \param BO Binary operation for update operation.
  1885. /// \param IsXLHSInRHSPart true if \a X is LHS in RHS part of the update
  1886. /// expression, false otherwise.
  1887. /// \param AO Atomic ordering of the generated atomic instructions.
  1888. /// \param CommonGen Code generator for complex expressions that cannot be
  1889. /// expressed through atomicrmw instruction.
  1890. /// \returns <true, OldAtomicValue> if simple 'atomicrmw' instruction was
  1891. /// generated, <false, RValue::get(nullptr)> otherwise.
  1892. std::pair<bool, RValue> EmitOMPAtomicSimpleUpdateExpr(
  1893. LValue X, RValue E, BinaryOperatorKind BO, bool IsXLHSInRHSPart,
  1894. llvm::AtomicOrdering AO, SourceLocation Loc,
  1895. const llvm::function_ref<RValue(RValue)> &CommonGen);
  1896. bool EmitOMPFirstprivateClause(const OMPExecutableDirective &D,
  1897. OMPPrivateScope &PrivateScope);
  1898. void EmitOMPPrivateClause(const OMPExecutableDirective &D,
  1899. OMPPrivateScope &PrivateScope);
  1900. /// \brief Emit code for copyin clause in \a D directive. The next code is
  1901. /// generated at the start of outlined functions for directives:
  1902. /// \code
  1903. /// threadprivate_var1 = master_threadprivate_var1;
  1904. /// operator=(threadprivate_var2, master_threadprivate_var2);
  1905. /// ...
  1906. /// __kmpc_barrier(&loc, global_tid);
  1907. /// \endcode
  1908. ///
  1909. /// \param D OpenMP directive possibly with 'copyin' clause(s).
  1910. /// \returns true if at least one copyin variable is found, false otherwise.
  1911. bool EmitOMPCopyinClause(const OMPExecutableDirective &D);
  1912. /// \brief Emit initial code for lastprivate variables. If some variable is
  1913. /// not also firstprivate, then the default initialization is used. Otherwise
  1914. /// initialization of this variable is performed by EmitOMPFirstprivateClause
  1915. /// method.
  1916. ///
  1917. /// \param D Directive that may have 'lastprivate' directives.
  1918. /// \param PrivateScope Private scope for capturing lastprivate variables for
  1919. /// proper codegen in internal captured statement.
  1920. ///
  1921. /// \returns true if there is at least one lastprivate variable, false
  1922. /// otherwise.
  1923. bool EmitOMPLastprivateClauseInit(const OMPExecutableDirective &D,
  1924. OMPPrivateScope &PrivateScope);
  1925. /// \brief Emit final copying of lastprivate values to original variables at
  1926. /// the end of the worksharing or simd directive.
  1927. ///
  1928. /// \param D Directive that has at least one 'lastprivate' directives.
  1929. /// \param IsLastIterCond Boolean condition that must be set to 'i1 true' if
  1930. /// it is the last iteration of the loop code in associated directive, or to
  1931. /// 'i1 false' otherwise. If this item is nullptr, no final check is required.
  1932. void EmitOMPLastprivateClauseFinal(const OMPExecutableDirective &D,
  1933. bool NoFinals,
  1934. llvm::Value *IsLastIterCond = nullptr);
  1935. /// Emit initial code for linear clauses.
  1936. void EmitOMPLinearClause(const OMPLoopDirective &D,
  1937. CodeGenFunction::OMPPrivateScope &PrivateScope);
  1938. /// Emit final code for linear clauses.
  1939. /// \param CondGen Optional conditional code for final part of codegen for
  1940. /// linear clause.
  1941. void EmitOMPLinearClauseFinal(
  1942. const OMPLoopDirective &D,
  1943. const llvm::function_ref<llvm::Value *(CodeGenFunction &)> &CondGen);
  1944. /// \brief Emit initial code for reduction variables. Creates reduction copies
  1945. /// and initializes them with the values according to OpenMP standard.
  1946. ///
  1947. /// \param D Directive (possibly) with the 'reduction' clause.
  1948. /// \param PrivateScope Private scope for capturing reduction variables for
  1949. /// proper codegen in internal captured statement.
  1950. ///
  1951. void EmitOMPReductionClauseInit(const OMPExecutableDirective &D,
  1952. OMPPrivateScope &PrivateScope);
  1953. /// \brief Emit final update of reduction values to original variables at
  1954. /// the end of the directive.
  1955. ///
  1956. /// \param D Directive that has at least one 'reduction' directives.
  1957. void EmitOMPReductionClauseFinal(const OMPExecutableDirective &D);
  1958. /// \brief Emit initial code for linear variables. Creates private copies
  1959. /// and initializes them with the values according to OpenMP standard.
  1960. ///
  1961. /// \param D Directive (possibly) with the 'linear' clause.
  1962. void EmitOMPLinearClauseInit(const OMPLoopDirective &D);
  1963. typedef const llvm::function_ref<void(CodeGenFunction & /*CGF*/,
  1964. llvm::Value * /*OutlinedFn*/,
  1965. const OMPTaskDataTy & /*Data*/)>
  1966. TaskGenTy;
  1967. void EmitOMPTaskBasedDirective(const OMPExecutableDirective &S,
  1968. const RegionCodeGenTy &BodyGen,
  1969. const TaskGenTy &TaskGen, OMPTaskDataTy &Data);
  1970. void EmitOMPParallelDirective(const OMPParallelDirective &S);
  1971. void EmitOMPSimdDirective(const OMPSimdDirective &S);
  1972. void EmitOMPForDirective(const OMPForDirective &S);
  1973. void EmitOMPForSimdDirective(const OMPForSimdDirective &S);
  1974. void EmitOMPSectionsDirective(const OMPSectionsDirective &S);
  1975. void EmitOMPSectionDirective(const OMPSectionDirective &S);
  1976. void EmitOMPSingleDirective(const OMPSingleDirective &S);
  1977. void EmitOMPMasterDirective(const OMPMasterDirective &S);
  1978. void EmitOMPCriticalDirective(const OMPCriticalDirective &S);
  1979. void EmitOMPParallelForDirective(const OMPParallelForDirective &S);
  1980. void EmitOMPParallelForSimdDirective(const OMPParallelForSimdDirective &S);
  1981. void EmitOMPParallelSectionsDirective(const OMPParallelSectionsDirective &S);
  1982. void EmitOMPTaskDirective(const OMPTaskDirective &S);
  1983. void EmitOMPTaskyieldDirective(const OMPTaskyieldDirective &S);
  1984. void EmitOMPBarrierDirective(const OMPBarrierDirective &S);
  1985. void EmitOMPTaskwaitDirective(const OMPTaskwaitDirective &S);
  1986. void EmitOMPTaskgroupDirective(const OMPTaskgroupDirective &S);
  1987. void EmitOMPFlushDirective(const OMPFlushDirective &S);
  1988. void EmitOMPOrderedDirective(const OMPOrderedDirective &S);
  1989. void EmitOMPAtomicDirective(const OMPAtomicDirective &S);
  1990. void EmitOMPTargetDirective(const OMPTargetDirective &S);
  1991. void EmitOMPTargetDataDirective(const OMPTargetDataDirective &S);
  1992. void EmitOMPTargetEnterDataDirective(const OMPTargetEnterDataDirective &S);
  1993. void EmitOMPTargetExitDataDirective(const OMPTargetExitDataDirective &S);
  1994. void EmitOMPTargetUpdateDirective(const OMPTargetUpdateDirective &S);
  1995. void EmitOMPTargetParallelDirective(const OMPTargetParallelDirective &S);
  1996. void
  1997. EmitOMPTargetParallelForDirective(const OMPTargetParallelForDirective &S);
  1998. void EmitOMPTeamsDirective(const OMPTeamsDirective &S);
  1999. void
  2000. EmitOMPCancellationPointDirective(const OMPCancellationPointDirective &S);
  2001. void EmitOMPCancelDirective(const OMPCancelDirective &S);
  2002. void EmitOMPTaskLoopBasedDirective(const OMPLoopDirective &S);
  2003. void EmitOMPTaskLoopDirective(const OMPTaskLoopDirective &S);
  2004. void EmitOMPTaskLoopSimdDirective(const OMPTaskLoopSimdDirective &S);
  2005. void EmitOMPDistributeDirective(const OMPDistributeDirective &S);
  2006. void EmitOMPDistributeLoop(const OMPDistributeDirective &S);
  2007. void EmitOMPDistributeParallelForDirective(
  2008. const OMPDistributeParallelForDirective &S);
  2009. /// Emit outlined function for the target directive.
  2010. static std::pair<llvm::Function * /*OutlinedFn*/,
  2011. llvm::Constant * /*OutlinedFnID*/>
  2012. EmitOMPTargetDirectiveOutlinedFunction(CodeGenModule &CGM,
  2013. const OMPTargetDirective &S,
  2014. StringRef ParentName,
  2015. bool IsOffloadEntry);
  2016. /// \brief Emit inner loop of the worksharing/simd construct.
  2017. ///
  2018. /// \param S Directive, for which the inner loop must be emitted.
  2019. /// \param RequiresCleanup true, if directive has some associated private
  2020. /// variables.
  2021. /// \param LoopCond Bollean condition for loop continuation.
  2022. /// \param IncExpr Increment expression for loop control variable.
  2023. /// \param BodyGen Generator for the inner body of the inner loop.
  2024. /// \param PostIncGen Genrator for post-increment code (required for ordered
  2025. /// loop directvies).
  2026. void EmitOMPInnerLoop(
  2027. const Stmt &S, bool RequiresCleanup, const Expr *LoopCond,
  2028. const Expr *IncExpr,
  2029. const llvm::function_ref<void(CodeGenFunction &)> &BodyGen,
  2030. const llvm::function_ref<void(CodeGenFunction &)> &PostIncGen);
  2031. JumpDest getOMPCancelDestination(OpenMPDirectiveKind Kind);
  2032. /// Emit initial code for loop counters of loop-based directives.
  2033. void EmitOMPPrivateLoopCounters(const OMPLoopDirective &S,
  2034. OMPPrivateScope &LoopScope);
  2035. private:
  2036. /// Helpers for the OpenMP loop directives.
  2037. void EmitOMPLoopBody(const OMPLoopDirective &D, JumpDest LoopExit);
  2038. void EmitOMPSimdInit(const OMPLoopDirective &D, bool IsMonotonic = false);
  2039. void EmitOMPSimdFinal(
  2040. const OMPLoopDirective &D,
  2041. const llvm::function_ref<llvm::Value *(CodeGenFunction &)> &CondGen);
  2042. /// \brief Emit code for the worksharing loop-based directive.
  2043. /// \return true, if this construct has any lastprivate clause, false -
  2044. /// otherwise.
  2045. bool EmitOMPWorksharingLoop(const OMPLoopDirective &S);
  2046. void EmitOMPOuterLoop(bool IsMonotonic, bool DynamicOrOrdered,
  2047. const OMPLoopDirective &S, OMPPrivateScope &LoopScope, bool Ordered,
  2048. Address LB, Address UB, Address ST, Address IL, llvm::Value *Chunk);
  2049. void EmitOMPForOuterLoop(const OpenMPScheduleTy &ScheduleKind,
  2050. bool IsMonotonic, const OMPLoopDirective &S,
  2051. OMPPrivateScope &LoopScope, bool Ordered, Address LB,
  2052. Address UB, Address ST, Address IL,
  2053. llvm::Value *Chunk);
  2054. void EmitOMPDistributeOuterLoop(
  2055. OpenMPDistScheduleClauseKind ScheduleKind,
  2056. const OMPDistributeDirective &S, OMPPrivateScope &LoopScope,
  2057. Address LB, Address UB, Address ST, Address IL, llvm::Value *Chunk);
  2058. /// \brief Emit code for sections directive.
  2059. void EmitSections(const OMPExecutableDirective &S);
  2060. public:
  2061. //===--------------------------------------------------------------------===//
  2062. // LValue Expression Emission
  2063. //===--------------------------------------------------------------------===//
  2064. /// GetUndefRValue - Get an appropriate 'undef' rvalue for the given type.
  2065. RValue GetUndefRValue(QualType Ty);
  2066. /// EmitUnsupportedRValue - Emit a dummy r-value using the type of E
  2067. /// and issue an ErrorUnsupported style diagnostic (using the
  2068. /// provided Name).
  2069. RValue EmitUnsupportedRValue(const Expr *E,
  2070. const char *Name);
  2071. /// EmitUnsupportedLValue - Emit a dummy l-value using the type of E and issue
  2072. /// an ErrorUnsupported style diagnostic (using the provided Name).
  2073. LValue EmitUnsupportedLValue(const Expr *E,
  2074. const char *Name);
  2075. /// EmitLValue - Emit code to compute a designator that specifies the location
  2076. /// of the expression.
  2077. ///
  2078. /// This can return one of two things: a simple address or a bitfield
  2079. /// reference. In either case, the LLVM Value* in the LValue structure is
  2080. /// guaranteed to be an LLVM pointer type.
  2081. ///
  2082. /// If this returns a bitfield reference, nothing about the pointee type of
  2083. /// the LLVM value is known: For example, it may not be a pointer to an
  2084. /// integer.
  2085. ///
  2086. /// If this returns a normal address, and if the lvalue's C type is fixed
  2087. /// size, this method guarantees that the returned pointer type will point to
  2088. /// an LLVM type of the same size of the lvalue's type. If the lvalue has a
  2089. /// variable length type, this is not possible.
  2090. ///
  2091. LValue EmitLValue(const Expr *E);
  2092. /// \brief Same as EmitLValue but additionally we generate checking code to
  2093. /// guard against undefined behavior. This is only suitable when we know
  2094. /// that the address will be used to access the object.
  2095. LValue EmitCheckedLValue(const Expr *E, TypeCheckKind TCK);
  2096. RValue convertTempToRValue(Address addr, QualType type,
  2097. SourceLocation Loc);
  2098. void EmitAtomicInit(Expr *E, LValue lvalue);
  2099. bool LValueIsSuitableForInlineAtomic(LValue Src);
  2100. RValue EmitAtomicLoad(LValue LV, SourceLocation SL,
  2101. AggValueSlot Slot = AggValueSlot::ignored());
  2102. RValue EmitAtomicLoad(LValue lvalue, SourceLocation loc,
  2103. llvm::AtomicOrdering AO, bool IsVolatile = false,
  2104. AggValueSlot slot = AggValueSlot::ignored());
  2105. void EmitAtomicStore(RValue rvalue, LValue lvalue, bool isInit);
  2106. void EmitAtomicStore(RValue rvalue, LValue lvalue, llvm::AtomicOrdering AO,
  2107. bool IsVolatile, bool isInit);
  2108. std::pair<RValue, llvm::Value *> EmitAtomicCompareExchange(
  2109. LValue Obj, RValue Expected, RValue Desired, SourceLocation Loc,
  2110. llvm::AtomicOrdering Success =
  2111. llvm::AtomicOrdering::SequentiallyConsistent,
  2112. llvm::AtomicOrdering Failure =
  2113. llvm::AtomicOrdering::SequentiallyConsistent,
  2114. bool IsWeak = false, AggValueSlot Slot = AggValueSlot::ignored());
  2115. void EmitAtomicUpdate(LValue LVal, llvm::AtomicOrdering AO,
  2116. const llvm::function_ref<RValue(RValue)> &UpdateOp,
  2117. bool IsVolatile);
  2118. /// EmitToMemory - Change a scalar value from its value
  2119. /// representation to its in-memory representation.
  2120. llvm::Value *EmitToMemory(llvm::Value *Value, QualType Ty);
  2121. /// EmitFromMemory - Change a scalar value from its memory
  2122. /// representation to its value representation.
  2123. llvm::Value *EmitFromMemory(llvm::Value *Value, QualType Ty);
  2124. /// EmitLoadOfScalar - Load a scalar value from an address, taking
  2125. /// care to appropriately convert from the memory representation to
  2126. /// the LLVM value representation.
  2127. llvm::Value *EmitLoadOfScalar(Address Addr, bool Volatile, QualType Ty,
  2128. SourceLocation Loc,
  2129. AlignmentSource AlignSource =
  2130. AlignmentSource::Type,
  2131. llvm::MDNode *TBAAInfo = nullptr,
  2132. QualType TBAABaseTy = QualType(),
  2133. uint64_t TBAAOffset = 0,
  2134. bool isNontemporal = false);
  2135. /// EmitLoadOfScalar - Load a scalar value from an address, taking
  2136. /// care to appropriately convert from the memory representation to
  2137. /// the LLVM value representation. The l-value must be a simple
  2138. /// l-value.
  2139. llvm::Value *EmitLoadOfScalar(LValue lvalue, SourceLocation Loc);
  2140. /// EmitStoreOfScalar - Store a scalar value to an address, taking
  2141. /// care to appropriately convert from the memory representation to
  2142. /// the LLVM value representation.
  2143. void EmitStoreOfScalar(llvm::Value *Value, Address Addr,
  2144. bool Volatile, QualType Ty,
  2145. AlignmentSource AlignSource = AlignmentSource::Type,
  2146. llvm::MDNode *TBAAInfo = nullptr, bool isInit = false,
  2147. QualType TBAABaseTy = QualType(),
  2148. uint64_t TBAAOffset = 0, bool isNontemporal = false);
  2149. /// EmitStoreOfScalar - Store a scalar value to an address, taking
  2150. /// care to appropriately convert from the memory representation to
  2151. /// the LLVM value representation. The l-value must be a simple
  2152. /// l-value. The isInit flag indicates whether this is an initialization.
  2153. /// If so, atomic qualifiers are ignored and the store is always non-atomic.
  2154. void EmitStoreOfScalar(llvm::Value *value, LValue lvalue, bool isInit=false);
  2155. /// EmitLoadOfLValue - Given an expression that represents a value lvalue,
  2156. /// this method emits the address of the lvalue, then loads the result as an
  2157. /// rvalue, returning the rvalue.
  2158. RValue EmitLoadOfLValue(LValue V, SourceLocation Loc);
  2159. RValue EmitLoadOfExtVectorElementLValue(LValue V);
  2160. RValue EmitLoadOfBitfieldLValue(LValue LV);
  2161. RValue EmitLoadOfGlobalRegLValue(LValue LV);
  2162. /// EmitStoreThroughLValue - Store the specified rvalue into the specified
  2163. /// lvalue, where both are guaranteed to the have the same type, and that type
  2164. /// is 'Ty'.
  2165. void EmitStoreThroughLValue(RValue Src, LValue Dst, bool isInit = false);
  2166. void EmitStoreThroughExtVectorComponentLValue(RValue Src, LValue Dst);
  2167. void EmitStoreThroughGlobalRegLValue(RValue Src, LValue Dst);
  2168. /// EmitStoreThroughBitfieldLValue - Store Src into Dst with same constraints
  2169. /// as EmitStoreThroughLValue.
  2170. ///
  2171. /// \param Result [out] - If non-null, this will be set to a Value* for the
  2172. /// bit-field contents after the store, appropriate for use as the result of
  2173. /// an assignment to the bit-field.
  2174. void EmitStoreThroughBitfieldLValue(RValue Src, LValue Dst,
  2175. llvm::Value **Result=nullptr);
  2176. /// Emit an l-value for an assignment (simple or compound) of complex type.
  2177. LValue EmitComplexAssignmentLValue(const BinaryOperator *E);
  2178. LValue EmitComplexCompoundAssignmentLValue(const CompoundAssignOperator *E);
  2179. LValue EmitScalarCompoundAssignWithComplex(const CompoundAssignOperator *E,
  2180. llvm::Value *&Result);
  2181. // Note: only available for agg return types
  2182. LValue EmitBinaryOperatorLValue(const BinaryOperator *E);
  2183. LValue EmitCompoundAssignmentLValue(const CompoundAssignOperator *E);
  2184. // Note: only available for agg return types
  2185. LValue EmitCallExprLValue(const CallExpr *E);
  2186. // Note: only available for agg return types
  2187. LValue EmitVAArgExprLValue(const VAArgExpr *E);
  2188. LValue EmitDeclRefLValue(const DeclRefExpr *E);
  2189. LValue EmitStringLiteralLValue(const StringLiteral *E);
  2190. LValue EmitObjCEncodeExprLValue(const ObjCEncodeExpr *E);
  2191. LValue EmitPredefinedLValue(const PredefinedExpr *E);
  2192. LValue EmitUnaryOpLValue(const UnaryOperator *E);
  2193. LValue EmitArraySubscriptExpr(const ArraySubscriptExpr *E,
  2194. bool Accessed = false);
  2195. LValue EmitOMPArraySectionExpr(const OMPArraySectionExpr *E,
  2196. bool IsLowerBound = true);
  2197. LValue EmitExtVectorElementExpr(const ExtVectorElementExpr *E);
  2198. LValue EmitMemberExpr(const MemberExpr *E);
  2199. LValue EmitObjCIsaExpr(const ObjCIsaExpr *E);
  2200. LValue EmitCompoundLiteralLValue(const CompoundLiteralExpr *E);
  2201. LValue EmitInitListLValue(const InitListExpr *E);
  2202. LValue EmitConditionalOperatorLValue(const AbstractConditionalOperator *E);
  2203. LValue EmitCastLValue(const CastExpr *E);
  2204. LValue EmitMaterializeTemporaryExpr(const MaterializeTemporaryExpr *E);
  2205. LValue EmitOpaqueValueLValue(const OpaqueValueExpr *e);
  2206. Address EmitExtVectorElementLValue(LValue V);
  2207. RValue EmitRValueForField(LValue LV, const FieldDecl *FD, SourceLocation Loc);
  2208. Address EmitArrayToPointerDecay(const Expr *Array,
  2209. AlignmentSource *AlignSource = nullptr);
  2210. class ConstantEmission {
  2211. llvm::PointerIntPair<llvm::Constant*, 1, bool> ValueAndIsReference;
  2212. ConstantEmission(llvm::Constant *C, bool isReference)
  2213. : ValueAndIsReference(C, isReference) {}
  2214. public:
  2215. ConstantEmission() {}
  2216. static ConstantEmission forReference(llvm::Constant *C) {
  2217. return ConstantEmission(C, true);
  2218. }
  2219. static ConstantEmission forValue(llvm::Constant *C) {
  2220. return ConstantEmission(C, false);
  2221. }
  2222. explicit operator bool() const {
  2223. return ValueAndIsReference.getOpaqueValue() != nullptr;
  2224. }
  2225. bool isReference() const { return ValueAndIsReference.getInt(); }
  2226. LValue getReferenceLValue(CodeGenFunction &CGF, Expr *refExpr) const {
  2227. assert(isReference());
  2228. return CGF.MakeNaturalAlignAddrLValue(ValueAndIsReference.getPointer(),
  2229. refExpr->getType());
  2230. }
  2231. llvm::Constant *getValue() const {
  2232. assert(!isReference());
  2233. return ValueAndIsReference.getPointer();
  2234. }
  2235. };
  2236. ConstantEmission tryEmitAsConstant(DeclRefExpr *refExpr);
  2237. RValue EmitPseudoObjectRValue(const PseudoObjectExpr *e,
  2238. AggValueSlot slot = AggValueSlot::ignored());
  2239. LValue EmitPseudoObjectLValue(const PseudoObjectExpr *e);
  2240. llvm::Value *EmitIvarOffset(const ObjCInterfaceDecl *Interface,
  2241. const ObjCIvarDecl *Ivar);
  2242. LValue EmitLValueForField(LValue Base, const FieldDecl* Field);
  2243. LValue EmitLValueForLambdaField(const FieldDecl *Field);
  2244. /// EmitLValueForFieldInitialization - Like EmitLValueForField, except that
  2245. /// if the Field is a reference, this will return the address of the reference
  2246. /// and not the address of the value stored in the reference.
  2247. LValue EmitLValueForFieldInitialization(LValue Base,
  2248. const FieldDecl* Field);
  2249. LValue EmitLValueForIvar(QualType ObjectTy,
  2250. llvm::Value* Base, const ObjCIvarDecl *Ivar,
  2251. unsigned CVRQualifiers);
  2252. LValue EmitCXXConstructLValue(const CXXConstructExpr *E);
  2253. LValue EmitCXXBindTemporaryLValue(const CXXBindTemporaryExpr *E);
  2254. LValue EmitLambdaLValue(const LambdaExpr *E);
  2255. LValue EmitCXXTypeidLValue(const CXXTypeidExpr *E);
  2256. LValue EmitCXXUuidofLValue(const CXXUuidofExpr *E);
  2257. LValue EmitObjCMessageExprLValue(const ObjCMessageExpr *E);
  2258. LValue EmitObjCIvarRefLValue(const ObjCIvarRefExpr *E);
  2259. LValue EmitStmtExprLValue(const StmtExpr *E);
  2260. LValue EmitPointerToDataMemberBinaryExpr(const BinaryOperator *E);
  2261. LValue EmitObjCSelectorLValue(const ObjCSelectorExpr *E);
  2262. void EmitDeclRefExprDbgValue(const DeclRefExpr *E, llvm::Constant *Init);
  2263. //===--------------------------------------------------------------------===//
  2264. // Scalar Expression Emission
  2265. //===--------------------------------------------------------------------===//
  2266. /// EmitCall - Generate a call of the given function, expecting the given
  2267. /// result type, and using the given argument list which specifies both the
  2268. /// LLVM arguments and the types they were derived from.
  2269. RValue EmitCall(const CGFunctionInfo &FnInfo, llvm::Value *Callee,
  2270. ReturnValueSlot ReturnValue, const CallArgList &Args,
  2271. CGCalleeInfo CalleeInfo = CGCalleeInfo(),
  2272. llvm::Instruction **callOrInvoke = nullptr);
  2273. RValue EmitCall(QualType FnType, llvm::Value *Callee, const CallExpr *E,
  2274. ReturnValueSlot ReturnValue,
  2275. CGCalleeInfo CalleeInfo = CGCalleeInfo(),
  2276. llvm::Value *Chain = nullptr);
  2277. RValue EmitCallExpr(const CallExpr *E,
  2278. ReturnValueSlot ReturnValue = ReturnValueSlot());
  2279. void checkTargetFeatures(const CallExpr *E, const FunctionDecl *TargetDecl);
  2280. llvm::CallInst *EmitRuntimeCall(llvm::Value *callee,
  2281. const Twine &name = "");
  2282. llvm::CallInst *EmitRuntimeCall(llvm::Value *callee,
  2283. ArrayRef<llvm::Value*> args,
  2284. const Twine &name = "");
  2285. llvm::CallInst *EmitNounwindRuntimeCall(llvm::Value *callee,
  2286. const Twine &name = "");
  2287. llvm::CallInst *EmitNounwindRuntimeCall(llvm::Value *callee,
  2288. ArrayRef<llvm::Value*> args,
  2289. const Twine &name = "");
  2290. llvm::CallSite EmitCallOrInvoke(llvm::Value *Callee,
  2291. ArrayRef<llvm::Value *> Args,
  2292. const Twine &Name = "");
  2293. llvm::CallSite EmitRuntimeCallOrInvoke(llvm::Value *callee,
  2294. ArrayRef<llvm::Value*> args,
  2295. const Twine &name = "");
  2296. llvm::CallSite EmitRuntimeCallOrInvoke(llvm::Value *callee,
  2297. const Twine &name = "");
  2298. void EmitNoreturnRuntimeCallOrInvoke(llvm::Value *callee,
  2299. ArrayRef<llvm::Value*> args);
  2300. llvm::Value *BuildAppleKextVirtualCall(const CXXMethodDecl *MD,
  2301. NestedNameSpecifier *Qual,
  2302. llvm::Type *Ty);
  2303. llvm::Value *BuildAppleKextVirtualDestructorCall(const CXXDestructorDecl *DD,
  2304. CXXDtorType Type,
  2305. const CXXRecordDecl *RD);
  2306. RValue
  2307. EmitCXXMemberOrOperatorCall(const CXXMethodDecl *MD, llvm::Value *Callee,
  2308. ReturnValueSlot ReturnValue, llvm::Value *This,
  2309. llvm::Value *ImplicitParam,
  2310. QualType ImplicitParamTy, const CallExpr *E);
  2311. RValue EmitCXXDestructorCall(const CXXDestructorDecl *DD, llvm::Value *Callee,
  2312. llvm::Value *This, llvm::Value *ImplicitParam,
  2313. QualType ImplicitParamTy, const CallExpr *E,
  2314. StructorType Type);
  2315. RValue EmitCXXMemberCallExpr(const CXXMemberCallExpr *E,
  2316. ReturnValueSlot ReturnValue);
  2317. RValue EmitCXXMemberOrOperatorMemberCallExpr(const CallExpr *CE,
  2318. const CXXMethodDecl *MD,
  2319. ReturnValueSlot ReturnValue,
  2320. bool HasQualifier,
  2321. NestedNameSpecifier *Qualifier,
  2322. bool IsArrow, const Expr *Base);
  2323. // Compute the object pointer.
  2324. Address EmitCXXMemberDataPointerAddress(const Expr *E, Address base,
  2325. llvm::Value *memberPtr,
  2326. const MemberPointerType *memberPtrType,
  2327. AlignmentSource *AlignSource = nullptr);
  2328. RValue EmitCXXMemberPointerCallExpr(const CXXMemberCallExpr *E,
  2329. ReturnValueSlot ReturnValue);
  2330. RValue EmitCXXOperatorMemberCallExpr(const CXXOperatorCallExpr *E,
  2331. const CXXMethodDecl *MD,
  2332. ReturnValueSlot ReturnValue);
  2333. RValue EmitCUDAKernelCallExpr(const CUDAKernelCallExpr *E,
  2334. ReturnValueSlot ReturnValue);
  2335. RValue EmitCUDADevicePrintfCallExpr(const CallExpr *E,
  2336. ReturnValueSlot ReturnValue);
  2337. RValue EmitBuiltinExpr(const FunctionDecl *FD,
  2338. unsigned BuiltinID, const CallExpr *E,
  2339. ReturnValueSlot ReturnValue);
  2340. RValue EmitBlockCallExpr(const CallExpr *E, ReturnValueSlot ReturnValue);
  2341. /// EmitTargetBuiltinExpr - Emit the given builtin call. Returns 0 if the call
  2342. /// is unhandled by the current target.
  2343. llvm::Value *EmitTargetBuiltinExpr(unsigned BuiltinID, const CallExpr *E);
  2344. llvm::Value *EmitAArch64CompareBuiltinExpr(llvm::Value *Op, llvm::Type *Ty,
  2345. const llvm::CmpInst::Predicate Fp,
  2346. const llvm::CmpInst::Predicate Ip,
  2347. const llvm::Twine &Name = "");
  2348. llvm::Value *EmitARMBuiltinExpr(unsigned BuiltinID, const CallExpr *E);
  2349. llvm::Value *EmitCommonNeonBuiltinExpr(unsigned BuiltinID,
  2350. unsigned LLVMIntrinsic,
  2351. unsigned AltLLVMIntrinsic,
  2352. const char *NameHint,
  2353. unsigned Modifier,
  2354. const CallExpr *E,
  2355. SmallVectorImpl<llvm::Value *> &Ops,
  2356. Address PtrOp0, Address PtrOp1);
  2357. llvm::Function *LookupNeonLLVMIntrinsic(unsigned IntrinsicID,
  2358. unsigned Modifier, llvm::Type *ArgTy,
  2359. const CallExpr *E);
  2360. llvm::Value *EmitNeonCall(llvm::Function *F,
  2361. SmallVectorImpl<llvm::Value*> &O,
  2362. const char *name,
  2363. unsigned shift = 0, bool rightshift = false);
  2364. llvm::Value *EmitNeonSplat(llvm::Value *V, llvm::Constant *Idx);
  2365. llvm::Value *EmitNeonShiftVector(llvm::Value *V, llvm::Type *Ty,
  2366. bool negateForRightShift);
  2367. llvm::Value *EmitNeonRShiftImm(llvm::Value *Vec, llvm::Value *Amt,
  2368. llvm::Type *Ty, bool usgn, const char *name);
  2369. llvm::Value *vectorWrapScalar16(llvm::Value *Op);
  2370. llvm::Value *EmitAArch64BuiltinExpr(unsigned BuiltinID, const CallExpr *E);
  2371. llvm::Value *BuildVector(ArrayRef<llvm::Value*> Ops);
  2372. llvm::Value *EmitX86BuiltinExpr(unsigned BuiltinID, const CallExpr *E);
  2373. llvm::Value *EmitPPCBuiltinExpr(unsigned BuiltinID, const CallExpr *E);
  2374. llvm::Value *EmitAMDGPUBuiltinExpr(unsigned BuiltinID, const CallExpr *E);
  2375. llvm::Value *EmitSystemZBuiltinExpr(unsigned BuiltinID, const CallExpr *E);
  2376. llvm::Value *EmitNVPTXBuiltinExpr(unsigned BuiltinID, const CallExpr *E);
  2377. llvm::Value *EmitWebAssemblyBuiltinExpr(unsigned BuiltinID,
  2378. const CallExpr *E);
  2379. llvm::Value *EmitObjCProtocolExpr(const ObjCProtocolExpr *E);
  2380. llvm::Value *EmitObjCStringLiteral(const ObjCStringLiteral *E);
  2381. llvm::Value *EmitObjCBoxedExpr(const ObjCBoxedExpr *E);
  2382. llvm::Value *EmitObjCArrayLiteral(const ObjCArrayLiteral *E);
  2383. llvm::Value *EmitObjCDictionaryLiteral(const ObjCDictionaryLiteral *E);
  2384. llvm::Value *EmitObjCCollectionLiteral(const Expr *E,
  2385. const ObjCMethodDecl *MethodWithObjects);
  2386. llvm::Value *EmitObjCSelectorExpr(const ObjCSelectorExpr *E);
  2387. RValue EmitObjCMessageExpr(const ObjCMessageExpr *E,
  2388. ReturnValueSlot Return = ReturnValueSlot());
  2389. /// Retrieves the default cleanup kind for an ARC cleanup.
  2390. /// Except under -fobjc-arc-eh, ARC cleanups are normal-only.
  2391. CleanupKind getARCCleanupKind() {
  2392. return CGM.getCodeGenOpts().ObjCAutoRefCountExceptions
  2393. ? NormalAndEHCleanup : NormalCleanup;
  2394. }
  2395. // ARC primitives.
  2396. void EmitARCInitWeak(Address addr, llvm::Value *value);
  2397. void EmitARCDestroyWeak(Address addr);
  2398. llvm::Value *EmitARCLoadWeak(Address addr);
  2399. llvm::Value *EmitARCLoadWeakRetained(Address addr);
  2400. llvm::Value *EmitARCStoreWeak(Address addr, llvm::Value *value, bool ignored);
  2401. void EmitARCCopyWeak(Address dst, Address src);
  2402. void EmitARCMoveWeak(Address dst, Address src);
  2403. llvm::Value *EmitARCRetainAutorelease(QualType type, llvm::Value *value);
  2404. llvm::Value *EmitARCRetainAutoreleaseNonBlock(llvm::Value *value);
  2405. llvm::Value *EmitARCStoreStrong(LValue lvalue, llvm::Value *value,
  2406. bool resultIgnored);
  2407. llvm::Value *EmitARCStoreStrongCall(Address addr, llvm::Value *value,
  2408. bool resultIgnored);
  2409. llvm::Value *EmitARCRetain(QualType type, llvm::Value *value);
  2410. llvm::Value *EmitARCRetainNonBlock(llvm::Value *value);
  2411. llvm::Value *EmitARCRetainBlock(llvm::Value *value, bool mandatory);
  2412. void EmitARCDestroyStrong(Address addr, ARCPreciseLifetime_t precise);
  2413. void EmitARCRelease(llvm::Value *value, ARCPreciseLifetime_t precise);
  2414. llvm::Value *EmitARCAutorelease(llvm::Value *value);
  2415. llvm::Value *EmitARCAutoreleaseReturnValue(llvm::Value *value);
  2416. llvm::Value *EmitARCRetainAutoreleaseReturnValue(llvm::Value *value);
  2417. llvm::Value *EmitARCRetainAutoreleasedReturnValue(llvm::Value *value);
  2418. llvm::Value *EmitARCUnsafeClaimAutoreleasedReturnValue(llvm::Value *value);
  2419. std::pair<LValue,llvm::Value*>
  2420. EmitARCStoreAutoreleasing(const BinaryOperator *e);
  2421. std::pair<LValue,llvm::Value*>
  2422. EmitARCStoreStrong(const BinaryOperator *e, bool ignored);
  2423. std::pair<LValue,llvm::Value*>
  2424. EmitARCStoreUnsafeUnretained(const BinaryOperator *e, bool ignored);
  2425. llvm::Value *EmitObjCThrowOperand(const Expr *expr);
  2426. llvm::Value *EmitObjCConsumeObject(QualType T, llvm::Value *Ptr);
  2427. llvm::Value *EmitObjCExtendObjectLifetime(QualType T, llvm::Value *Ptr);
  2428. llvm::Value *EmitARCExtendBlockObject(const Expr *expr);
  2429. llvm::Value *EmitARCReclaimReturnedObject(const Expr *e,
  2430. bool allowUnsafeClaim);
  2431. llvm::Value *EmitARCRetainScalarExpr(const Expr *expr);
  2432. llvm::Value *EmitARCRetainAutoreleaseScalarExpr(const Expr *expr);
  2433. llvm::Value *EmitARCUnsafeUnretainedScalarExpr(const Expr *expr);
  2434. void EmitARCIntrinsicUse(ArrayRef<llvm::Value*> values);
  2435. static Destroyer destroyARCStrongImprecise;
  2436. static Destroyer destroyARCStrongPrecise;
  2437. static Destroyer destroyARCWeak;
  2438. void EmitObjCAutoreleasePoolPop(llvm::Value *Ptr);
  2439. llvm::Value *EmitObjCAutoreleasePoolPush();
  2440. llvm::Value *EmitObjCMRRAutoreleasePoolPush();
  2441. void EmitObjCAutoreleasePoolCleanup(llvm::Value *Ptr);
  2442. void EmitObjCMRRAutoreleasePoolPop(llvm::Value *Ptr);
  2443. /// \brief Emits a reference binding to the passed in expression.
  2444. RValue EmitReferenceBindingToExpr(const Expr *E);
  2445. //===--------------------------------------------------------------------===//
  2446. // Expression Emission
  2447. //===--------------------------------------------------------------------===//
  2448. // Expressions are broken into three classes: scalar, complex, aggregate.
  2449. /// EmitScalarExpr - Emit the computation of the specified expression of LLVM
  2450. /// scalar type, returning the result.
  2451. llvm::Value *EmitScalarExpr(const Expr *E , bool IgnoreResultAssign = false);
  2452. /// Emit a conversion from the specified type to the specified destination
  2453. /// type, both of which are LLVM scalar types.
  2454. llvm::Value *EmitScalarConversion(llvm::Value *Src, QualType SrcTy,
  2455. QualType DstTy, SourceLocation Loc);
  2456. /// Emit a conversion from the specified complex type to the specified
  2457. /// destination type, where the destination type is an LLVM scalar type.
  2458. llvm::Value *EmitComplexToScalarConversion(ComplexPairTy Src, QualType SrcTy,
  2459. QualType DstTy,
  2460. SourceLocation Loc);
  2461. /// EmitAggExpr - Emit the computation of the specified expression
  2462. /// of aggregate type. The result is computed into the given slot,
  2463. /// which may be null to indicate that the value is not needed.
  2464. void EmitAggExpr(const Expr *E, AggValueSlot AS);
  2465. /// EmitAggExprToLValue - Emit the computation of the specified expression of
  2466. /// aggregate type into a temporary LValue.
  2467. LValue EmitAggExprToLValue(const Expr *E);
  2468. /// EmitExtendGCLifetime - Given a pointer to an Objective-C object,
  2469. /// make sure it survives garbage collection until this point.
  2470. void EmitExtendGCLifetime(llvm::Value *object);
  2471. /// EmitComplexExpr - Emit the computation of the specified expression of
  2472. /// complex type, returning the result.
  2473. ComplexPairTy EmitComplexExpr(const Expr *E,
  2474. bool IgnoreReal = false,
  2475. bool IgnoreImag = false);
  2476. /// EmitComplexExprIntoLValue - Emit the given expression of complex
  2477. /// type and place its result into the specified l-value.
  2478. void EmitComplexExprIntoLValue(const Expr *E, LValue dest, bool isInit);
  2479. /// EmitStoreOfComplex - Store a complex number into the specified l-value.
  2480. void EmitStoreOfComplex(ComplexPairTy V, LValue dest, bool isInit);
  2481. /// EmitLoadOfComplex - Load a complex number from the specified l-value.
  2482. ComplexPairTy EmitLoadOfComplex(LValue src, SourceLocation loc);
  2483. Address emitAddrOfRealComponent(Address complex, QualType complexType);
  2484. Address emitAddrOfImagComponent(Address complex, QualType complexType);
  2485. /// AddInitializerToStaticVarDecl - Add the initializer for 'D' to the
  2486. /// global variable that has already been created for it. If the initializer
  2487. /// has a different type than GV does, this may free GV and return a different
  2488. /// one. Otherwise it just returns GV.
  2489. llvm::GlobalVariable *
  2490. AddInitializerToStaticVarDecl(const VarDecl &D,
  2491. llvm::GlobalVariable *GV);
  2492. /// EmitCXXGlobalVarDeclInit - Create the initializer for a C++
  2493. /// variable with global storage.
  2494. void EmitCXXGlobalVarDeclInit(const VarDecl &D, llvm::Constant *DeclPtr,
  2495. bool PerformInit);
  2496. llvm::Constant *createAtExitStub(const VarDecl &VD, llvm::Constant *Dtor,
  2497. llvm::Constant *Addr);
  2498. /// Call atexit() with a function that passes the given argument to
  2499. /// the given function.
  2500. void registerGlobalDtorWithAtExit(const VarDecl &D, llvm::Constant *fn,
  2501. llvm::Constant *addr);
  2502. /// Emit code in this function to perform a guarded variable
  2503. /// initialization. Guarded initializations are used when it's not
  2504. /// possible to prove that an initialization will be done exactly
  2505. /// once, e.g. with a static local variable or a static data member
  2506. /// of a class template.
  2507. void EmitCXXGuardedInit(const VarDecl &D, llvm::GlobalVariable *DeclPtr,
  2508. bool PerformInit);
  2509. /// GenerateCXXGlobalInitFunc - Generates code for initializing global
  2510. /// variables.
  2511. void GenerateCXXGlobalInitFunc(llvm::Function *Fn,
  2512. ArrayRef<llvm::Function *> CXXThreadLocals,
  2513. Address Guard = Address::invalid());
  2514. /// GenerateCXXGlobalDtorsFunc - Generates code for destroying global
  2515. /// variables.
  2516. void GenerateCXXGlobalDtorsFunc(llvm::Function *Fn,
  2517. const std::vector<std::pair<llvm::WeakVH,
  2518. llvm::Constant*> > &DtorsAndObjects);
  2519. void GenerateCXXGlobalVarDeclInitFunc(llvm::Function *Fn,
  2520. const VarDecl *D,
  2521. llvm::GlobalVariable *Addr,
  2522. bool PerformInit);
  2523. void EmitCXXConstructExpr(const CXXConstructExpr *E, AggValueSlot Dest);
  2524. void EmitSynthesizedCXXCopyCtor(Address Dest, Address Src, const Expr *Exp);
  2525. void enterFullExpression(const ExprWithCleanups *E) {
  2526. if (E->getNumObjects() == 0) return;
  2527. enterNonTrivialFullExpression(E);
  2528. }
  2529. void enterNonTrivialFullExpression(const ExprWithCleanups *E);
  2530. void EmitCXXThrowExpr(const CXXThrowExpr *E, bool KeepInsertionPoint = true);
  2531. void EmitLambdaExpr(const LambdaExpr *E, AggValueSlot Dest);
  2532. RValue EmitAtomicExpr(AtomicExpr *E);
  2533. //===--------------------------------------------------------------------===//
  2534. // Annotations Emission
  2535. //===--------------------------------------------------------------------===//
  2536. /// Emit an annotation call (intrinsic or builtin).
  2537. llvm::Value *EmitAnnotationCall(llvm::Value *AnnotationFn,
  2538. llvm::Value *AnnotatedVal,
  2539. StringRef AnnotationStr,
  2540. SourceLocation Location);
  2541. /// Emit local annotations for the local variable V, declared by D.
  2542. void EmitVarAnnotations(const VarDecl *D, llvm::Value *V);
  2543. /// Emit field annotations for the given field & value. Returns the
  2544. /// annotation result.
  2545. Address EmitFieldAnnotations(const FieldDecl *D, Address V);
  2546. //===--------------------------------------------------------------------===//
  2547. // Internal Helpers
  2548. //===--------------------------------------------------------------------===//
  2549. /// ContainsLabel - Return true if the statement contains a label in it. If
  2550. /// this statement is not executed normally, it not containing a label means
  2551. /// that we can just remove the code.
  2552. static bool ContainsLabel(const Stmt *S, bool IgnoreCaseStmts = false);
  2553. /// containsBreak - Return true if the statement contains a break out of it.
  2554. /// If the statement (recursively) contains a switch or loop with a break
  2555. /// inside of it, this is fine.
  2556. static bool containsBreak(const Stmt *S);
  2557. /// ConstantFoldsToSimpleInteger - If the specified expression does not fold
  2558. /// to a constant, or if it does but contains a label, return false. If it
  2559. /// constant folds return true and set the boolean result in Result.
  2560. bool ConstantFoldsToSimpleInteger(const Expr *Cond, bool &Result,
  2561. bool AllowLabels = false);
  2562. /// ConstantFoldsToSimpleInteger - If the specified expression does not fold
  2563. /// to a constant, or if it does but contains a label, return false. If it
  2564. /// constant folds return true and set the folded value.
  2565. bool ConstantFoldsToSimpleInteger(const Expr *Cond, llvm::APSInt &Result,
  2566. bool AllowLabels = false);
  2567. /// EmitBranchOnBoolExpr - Emit a branch on a boolean condition (e.g. for an
  2568. /// if statement) to the specified blocks. Based on the condition, this might
  2569. /// try to simplify the codegen of the conditional based on the branch.
  2570. /// TrueCount should be the number of times we expect the condition to
  2571. /// evaluate to true based on PGO data.
  2572. void EmitBranchOnBoolExpr(const Expr *Cond, llvm::BasicBlock *TrueBlock,
  2573. llvm::BasicBlock *FalseBlock, uint64_t TrueCount);
  2574. /// \brief Emit a description of a type in a format suitable for passing to
  2575. /// a runtime sanitizer handler.
  2576. llvm::Constant *EmitCheckTypeDescriptor(QualType T);
  2577. /// \brief Convert a value into a format suitable for passing to a runtime
  2578. /// sanitizer handler.
  2579. llvm::Value *EmitCheckValue(llvm::Value *V);
  2580. /// \brief Emit a description of a source location in a format suitable for
  2581. /// passing to a runtime sanitizer handler.
  2582. llvm::Constant *EmitCheckSourceLocation(SourceLocation Loc);
  2583. /// \brief Create a basic block that will call a handler function in a
  2584. /// sanitizer runtime with the provided arguments, and create a conditional
  2585. /// branch to it.
  2586. void EmitCheck(ArrayRef<std::pair<llvm::Value *, SanitizerMask>> Checked,
  2587. StringRef CheckName, ArrayRef<llvm::Constant *> StaticArgs,
  2588. ArrayRef<llvm::Value *> DynamicArgs);
  2589. /// \brief Emit a slow path cross-DSO CFI check which calls __cfi_slowpath
  2590. /// if Cond if false.
  2591. void EmitCfiSlowPathCheck(SanitizerMask Kind, llvm::Value *Cond,
  2592. llvm::ConstantInt *TypeId, llvm::Value *Ptr,
  2593. ArrayRef<llvm::Constant *> StaticArgs);
  2594. /// \brief Create a basic block that will call the trap intrinsic, and emit a
  2595. /// conditional branch to it, for the -ftrapv checks.
  2596. void EmitTrapCheck(llvm::Value *Checked);
  2597. /// \brief Emit a call to trap or debugtrap and attach function attribute
  2598. /// "trap-func-name" if specified.
  2599. llvm::CallInst *EmitTrapCall(llvm::Intrinsic::ID IntrID);
  2600. /// \brief Emit a cross-DSO CFI failure handling function.
  2601. void EmitCfiCheckFail();
  2602. /// \brief Create a check for a function parameter that may potentially be
  2603. /// declared as non-null.
  2604. void EmitNonNullArgCheck(RValue RV, QualType ArgType, SourceLocation ArgLoc,
  2605. const FunctionDecl *FD, unsigned ParmNum);
  2606. /// EmitCallArg - Emit a single call argument.
  2607. void EmitCallArg(CallArgList &args, const Expr *E, QualType ArgType);
  2608. /// EmitDelegateCallArg - We are performing a delegate call; that
  2609. /// is, the current function is delegating to another one. Produce
  2610. /// a r-value suitable for passing the given parameter.
  2611. void EmitDelegateCallArg(CallArgList &args, const VarDecl *param,
  2612. SourceLocation loc);
  2613. /// SetFPAccuracy - Set the minimum required accuracy of the given floating
  2614. /// point operation, expressed as the maximum relative error in ulp.
  2615. void SetFPAccuracy(llvm::Value *Val, float Accuracy);
  2616. private:
  2617. llvm::MDNode *getRangeForLoadFromType(QualType Ty);
  2618. void EmitReturnOfRValue(RValue RV, QualType Ty);
  2619. void deferPlaceholderReplacement(llvm::Instruction *Old, llvm::Value *New);
  2620. llvm::SmallVector<std::pair<llvm::Instruction *, llvm::Value *>, 4>
  2621. DeferredReplacements;
  2622. /// Set the address of a local variable.
  2623. void setAddrOfLocalVar(const VarDecl *VD, Address Addr) {
  2624. assert(!LocalDeclMap.count(VD) && "Decl already exists in LocalDeclMap!");
  2625. LocalDeclMap.insert({VD, Addr});
  2626. }
  2627. /// ExpandTypeFromArgs - Reconstruct a structure of type \arg Ty
  2628. /// from function arguments into \arg Dst. See ABIArgInfo::Expand.
  2629. ///
  2630. /// \param AI - The first function argument of the expansion.
  2631. void ExpandTypeFromArgs(QualType Ty, LValue Dst,
  2632. SmallVectorImpl<llvm::Value *>::iterator &AI);
  2633. /// ExpandTypeToArgs - Expand an RValue \arg RV, with the LLVM type for \arg
  2634. /// Ty, into individual arguments on the provided vector \arg IRCallArgs,
  2635. /// starting at index \arg IRCallArgPos. See ABIArgInfo::Expand.
  2636. void ExpandTypeToArgs(QualType Ty, RValue RV, llvm::FunctionType *IRFuncTy,
  2637. SmallVectorImpl<llvm::Value *> &IRCallArgs,
  2638. unsigned &IRCallArgPos);
  2639. llvm::Value* EmitAsmInput(const TargetInfo::ConstraintInfo &Info,
  2640. const Expr *InputExpr, std::string &ConstraintStr);
  2641. llvm::Value* EmitAsmInputLValue(const TargetInfo::ConstraintInfo &Info,
  2642. LValue InputValue, QualType InputType,
  2643. std::string &ConstraintStr,
  2644. SourceLocation Loc);
  2645. /// \brief Attempts to statically evaluate the object size of E. If that
  2646. /// fails, emits code to figure the size of E out for us. This is
  2647. /// pass_object_size aware.
  2648. llvm::Value *evaluateOrEmitBuiltinObjectSize(const Expr *E, unsigned Type,
  2649. llvm::IntegerType *ResType);
  2650. /// \brief Emits the size of E, as required by __builtin_object_size. This
  2651. /// function is aware of pass_object_size parameters, and will act accordingly
  2652. /// if E is a parameter with the pass_object_size attribute.
  2653. llvm::Value *emitBuiltinObjectSize(const Expr *E, unsigned Type,
  2654. llvm::IntegerType *ResType);
  2655. public:
  2656. #ifndef NDEBUG
  2657. // Determine whether the given argument is an Objective-C method
  2658. // that may have type parameters in its signature.
  2659. static bool isObjCMethodWithTypeParams(const ObjCMethodDecl *method) {
  2660. const DeclContext *dc = method->getDeclContext();
  2661. if (const ObjCInterfaceDecl *classDecl= dyn_cast<ObjCInterfaceDecl>(dc)) {
  2662. return classDecl->getTypeParamListAsWritten();
  2663. }
  2664. if (const ObjCCategoryDecl *catDecl = dyn_cast<ObjCCategoryDecl>(dc)) {
  2665. return catDecl->getTypeParamList();
  2666. }
  2667. return false;
  2668. }
  2669. template<typename T>
  2670. static bool isObjCMethodWithTypeParams(const T *) { return false; }
  2671. #endif
  2672. /// EmitCallArgs - Emit call arguments for a function.
  2673. template <typename T>
  2674. void EmitCallArgs(CallArgList &Args, const T *CallArgTypeInfo,
  2675. llvm::iterator_range<CallExpr::const_arg_iterator> ArgRange,
  2676. const FunctionDecl *CalleeDecl = nullptr,
  2677. unsigned ParamsToSkip = 0) {
  2678. SmallVector<QualType, 16> ArgTypes;
  2679. CallExpr::const_arg_iterator Arg = ArgRange.begin();
  2680. assert((ParamsToSkip == 0 || CallArgTypeInfo) &&
  2681. "Can't skip parameters if type info is not provided");
  2682. if (CallArgTypeInfo) {
  2683. #ifndef NDEBUG
  2684. bool isGenericMethod = isObjCMethodWithTypeParams(CallArgTypeInfo);
  2685. #endif
  2686. // First, use the argument types that the type info knows about
  2687. for (auto I = CallArgTypeInfo->param_type_begin() + ParamsToSkip,
  2688. E = CallArgTypeInfo->param_type_end();
  2689. I != E; ++I, ++Arg) {
  2690. assert(Arg != ArgRange.end() && "Running over edge of argument list!");
  2691. assert((isGenericMethod ||
  2692. ((*I)->isVariablyModifiedType() ||
  2693. (*I).getNonReferenceType()->isObjCRetainableType() ||
  2694. getContext()
  2695. .getCanonicalType((*I).getNonReferenceType())
  2696. .getTypePtr() ==
  2697. getContext()
  2698. .getCanonicalType((*Arg)->getType())
  2699. .getTypePtr())) &&
  2700. "type mismatch in call argument!");
  2701. ArgTypes.push_back(*I);
  2702. }
  2703. }
  2704. // Either we've emitted all the call args, or we have a call to variadic
  2705. // function.
  2706. assert((Arg == ArgRange.end() || !CallArgTypeInfo ||
  2707. CallArgTypeInfo->isVariadic()) &&
  2708. "Extra arguments in non-variadic function!");
  2709. // If we still have any arguments, emit them using the type of the argument.
  2710. for (auto *A : llvm::make_range(Arg, ArgRange.end()))
  2711. ArgTypes.push_back(getVarArgType(A));
  2712. EmitCallArgs(Args, ArgTypes, ArgRange, CalleeDecl, ParamsToSkip);
  2713. }
  2714. void EmitCallArgs(CallArgList &Args, ArrayRef<QualType> ArgTypes,
  2715. llvm::iterator_range<CallExpr::const_arg_iterator> ArgRange,
  2716. const FunctionDecl *CalleeDecl = nullptr,
  2717. unsigned ParamsToSkip = 0);
  2718. /// EmitPointerWithAlignment - Given an expression with a pointer
  2719. /// type, emit the value and compute our best estimate of the
  2720. /// alignment of the pointee.
  2721. ///
  2722. /// Note that this function will conservatively fall back on the type
  2723. /// when it doesn't
  2724. ///
  2725. /// \param Source - If non-null, this will be initialized with
  2726. /// information about the source of the alignment. Note that this
  2727. /// function will conservatively fall back on the type when it
  2728. /// doesn't recognize the expression, which means that sometimes
  2729. ///
  2730. /// a worst-case One
  2731. /// reasonable way to use this information is when there's a
  2732. /// language guarantee that the pointer must be aligned to some
  2733. /// stricter value, and we're simply trying to ensure that
  2734. /// sufficiently obvious uses of under-aligned objects don't get
  2735. /// miscompiled; for example, a placement new into the address of
  2736. /// a local variable. In such a case, it's quite reasonable to
  2737. /// just ignore the returned alignment when it isn't from an
  2738. /// explicit source.
  2739. Address EmitPointerWithAlignment(const Expr *Addr,
  2740. AlignmentSource *Source = nullptr);
  2741. void EmitSanitizerStatReport(llvm::SanitizerStatKind SSK);
  2742. private:
  2743. QualType getVarArgType(const Expr *Arg);
  2744. const TargetCodeGenInfo &getTargetHooks() const {
  2745. return CGM.getTargetCodeGenInfo();
  2746. }
  2747. void EmitDeclMetadata();
  2748. BlockByrefHelpers *buildByrefHelpers(llvm::StructType &byrefType,
  2749. const AutoVarEmission &emission);
  2750. void AddObjCARCExceptionMetadata(llvm::Instruction *Inst);
  2751. llvm::Value *GetValueForARMHint(unsigned BuiltinID);
  2752. };
  2753. /// Helper class with most of the code for saving a value for a
  2754. /// conditional expression cleanup.
  2755. struct DominatingLLVMValue {
  2756. typedef llvm::PointerIntPair<llvm::Value*, 1, bool> saved_type;
  2757. /// Answer whether the given value needs extra work to be saved.
  2758. static bool needsSaving(llvm::Value *value) {
  2759. // If it's not an instruction, we don't need to save.
  2760. if (!isa<llvm::Instruction>(value)) return false;
  2761. // If it's an instruction in the entry block, we don't need to save.
  2762. llvm::BasicBlock *block = cast<llvm::Instruction>(value)->getParent();
  2763. return (block != &block->getParent()->getEntryBlock());
  2764. }
  2765. /// Try to save the given value.
  2766. static saved_type save(CodeGenFunction &CGF, llvm::Value *value) {
  2767. if (!needsSaving(value)) return saved_type(value, false);
  2768. // Otherwise, we need an alloca.
  2769. auto align = CharUnits::fromQuantity(
  2770. CGF.CGM.getDataLayout().getPrefTypeAlignment(value->getType()));
  2771. Address alloca =
  2772. CGF.CreateTempAlloca(value->getType(), align, "cond-cleanup.save");
  2773. CGF.Builder.CreateStore(value, alloca);
  2774. return saved_type(alloca.getPointer(), true);
  2775. }
  2776. static llvm::Value *restore(CodeGenFunction &CGF, saved_type value) {
  2777. // If the value says it wasn't saved, trust that it's still dominating.
  2778. if (!value.getInt()) return value.getPointer();
  2779. // Otherwise, it should be an alloca instruction, as set up in save().
  2780. auto alloca = cast<llvm::AllocaInst>(value.getPointer());
  2781. return CGF.Builder.CreateAlignedLoad(alloca, alloca->getAlignment());
  2782. }
  2783. };
  2784. /// A partial specialization of DominatingValue for llvm::Values that
  2785. /// might be llvm::Instructions.
  2786. template <class T> struct DominatingPointer<T,true> : DominatingLLVMValue {
  2787. typedef T *type;
  2788. static type restore(CodeGenFunction &CGF, saved_type value) {
  2789. return static_cast<T*>(DominatingLLVMValue::restore(CGF, value));
  2790. }
  2791. };
  2792. /// A specialization of DominatingValue for Address.
  2793. template <> struct DominatingValue<Address> {
  2794. typedef Address type;
  2795. struct saved_type {
  2796. DominatingLLVMValue::saved_type SavedValue;
  2797. CharUnits Alignment;
  2798. };
  2799. static bool needsSaving(type value) {
  2800. return DominatingLLVMValue::needsSaving(value.getPointer());
  2801. }
  2802. static saved_type save(CodeGenFunction &CGF, type value) {
  2803. return { DominatingLLVMValue::save(CGF, value.getPointer()),
  2804. value.getAlignment() };
  2805. }
  2806. static type restore(CodeGenFunction &CGF, saved_type value) {
  2807. return Address(DominatingLLVMValue::restore(CGF, value.SavedValue),
  2808. value.Alignment);
  2809. }
  2810. };
  2811. /// A specialization of DominatingValue for RValue.
  2812. template <> struct DominatingValue<RValue> {
  2813. typedef RValue type;
  2814. class saved_type {
  2815. enum Kind { ScalarLiteral, ScalarAddress, AggregateLiteral,
  2816. AggregateAddress, ComplexAddress };
  2817. llvm::Value *Value;
  2818. unsigned K : 3;
  2819. unsigned Align : 29;
  2820. saved_type(llvm::Value *v, Kind k, unsigned a = 0)
  2821. : Value(v), K(k), Align(a) {}
  2822. public:
  2823. static bool needsSaving(RValue value);
  2824. static saved_type save(CodeGenFunction &CGF, RValue value);
  2825. RValue restore(CodeGenFunction &CGF);
  2826. // implementations in CGCleanup.cpp
  2827. };
  2828. static bool needsSaving(type value) {
  2829. return saved_type::needsSaving(value);
  2830. }
  2831. static saved_type save(CodeGenFunction &CGF, type value) {
  2832. return saved_type::save(CGF, value);
  2833. }
  2834. static type restore(CodeGenFunction &CGF, saved_type value) {
  2835. return value.restore(CGF);
  2836. }
  2837. };
  2838. } // end namespace CodeGen
  2839. } // end namespace clang
  2840. #endif