CGExprConstant.cpp 53 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496
  1. //===--- CGExprConstant.cpp - Emit LLVM Code from Constant Expressions ----===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This contains code to emit Constant Expr nodes as LLVM code.
  11. //
  12. //===----------------------------------------------------------------------===//
  13. #include "CodeGenFunction.h"
  14. #include "CodeGenModule.h"
  15. #include "CGCXXABI.h"
  16. #include "CGObjCRuntime.h"
  17. #include "CGRecordLayout.h"
  18. #include "clang/AST/APValue.h"
  19. #include "clang/AST/ASTContext.h"
  20. #include "clang/AST/RecordLayout.h"
  21. #include "clang/AST/StmtVisitor.h"
  22. #include "clang/Basic/Builtins.h"
  23. #include "llvm/Constants.h"
  24. #include "llvm/Function.h"
  25. #include "llvm/GlobalVariable.h"
  26. #include "llvm/Target/TargetData.h"
  27. using namespace clang;
  28. using namespace CodeGen;
  29. //===----------------------------------------------------------------------===//
  30. // ConstStructBuilder
  31. //===----------------------------------------------------------------------===//
  32. namespace {
  33. class ConstStructBuilder {
  34. CodeGenModule &CGM;
  35. CodeGenFunction *CGF;
  36. bool Packed;
  37. CharUnits NextFieldOffsetInChars;
  38. CharUnits LLVMStructAlignment;
  39. SmallVector<llvm::Constant *, 32> Elements;
  40. public:
  41. static llvm::Constant *BuildStruct(CodeGenModule &CGM, CodeGenFunction *CGF,
  42. InitListExpr *ILE);
  43. static llvm::Constant *BuildStruct(CodeGenModule &CGM, CodeGenFunction *CGF,
  44. const APValue &Value, QualType ValTy);
  45. private:
  46. ConstStructBuilder(CodeGenModule &CGM, CodeGenFunction *CGF)
  47. : CGM(CGM), CGF(CGF), Packed(false),
  48. NextFieldOffsetInChars(CharUnits::Zero()),
  49. LLVMStructAlignment(CharUnits::One()) { }
  50. void AppendVTablePointer(BaseSubobject Base, llvm::Constant *VTable,
  51. const CXXRecordDecl *VTableClass);
  52. void AppendField(const FieldDecl *Field, uint64_t FieldOffset,
  53. llvm::Constant *InitExpr);
  54. void AppendBytes(CharUnits FieldOffsetInChars, llvm::Constant *InitCst);
  55. void AppendBitField(const FieldDecl *Field, uint64_t FieldOffset,
  56. llvm::ConstantInt *InitExpr);
  57. void AppendPadding(CharUnits PadSize);
  58. void AppendTailPadding(CharUnits RecordSize);
  59. void ConvertStructToPacked();
  60. bool Build(InitListExpr *ILE);
  61. void Build(const APValue &Val, const RecordDecl *RD, bool IsPrimaryBase,
  62. llvm::Constant *VTable, const CXXRecordDecl *VTableClass,
  63. CharUnits BaseOffset);
  64. llvm::Constant *Finalize(QualType Ty);
  65. CharUnits getAlignment(const llvm::Constant *C) const {
  66. if (Packed) return CharUnits::One();
  67. return CharUnits::fromQuantity(
  68. CGM.getTargetData().getABITypeAlignment(C->getType()));
  69. }
  70. CharUnits getSizeInChars(const llvm::Constant *C) const {
  71. return CharUnits::fromQuantity(
  72. CGM.getTargetData().getTypeAllocSize(C->getType()));
  73. }
  74. };
  75. void ConstStructBuilder::AppendVTablePointer(BaseSubobject Base,
  76. llvm::Constant *VTable,
  77. const CXXRecordDecl *VTableClass) {
  78. // Find the appropriate vtable within the vtable group.
  79. uint64_t AddressPoint =
  80. CGM.getVTableContext().getVTableLayout(VTableClass).getAddressPoint(Base);
  81. llvm::Value *Indices[] = {
  82. llvm::ConstantInt::get(CGM.Int64Ty, 0),
  83. llvm::ConstantInt::get(CGM.Int64Ty, AddressPoint)
  84. };
  85. llvm::Constant *VTableAddressPoint =
  86. llvm::ConstantExpr::getInBoundsGetElementPtr(VTable, Indices);
  87. // Add the vtable at the start of the object.
  88. AppendBytes(CharUnits::Zero(), VTableAddressPoint);
  89. }
  90. void ConstStructBuilder::
  91. AppendField(const FieldDecl *Field, uint64_t FieldOffset,
  92. llvm::Constant *InitCst) {
  93. const ASTContext &Context = CGM.getContext();
  94. CharUnits FieldOffsetInChars = Context.toCharUnitsFromBits(FieldOffset);
  95. AppendBytes(FieldOffsetInChars, InitCst);
  96. }
  97. void ConstStructBuilder::
  98. AppendBytes(CharUnits FieldOffsetInChars, llvm::Constant *InitCst) {
  99. assert(NextFieldOffsetInChars <= FieldOffsetInChars
  100. && "Field offset mismatch!");
  101. CharUnits FieldAlignment = getAlignment(InitCst);
  102. // Round up the field offset to the alignment of the field type.
  103. CharUnits AlignedNextFieldOffsetInChars =
  104. NextFieldOffsetInChars.RoundUpToAlignment(FieldAlignment);
  105. if (AlignedNextFieldOffsetInChars > FieldOffsetInChars) {
  106. assert(!Packed && "Alignment is wrong even with a packed struct!");
  107. // Convert the struct to a packed struct.
  108. ConvertStructToPacked();
  109. AlignedNextFieldOffsetInChars = NextFieldOffsetInChars;
  110. }
  111. if (AlignedNextFieldOffsetInChars < FieldOffsetInChars) {
  112. // We need to append padding.
  113. AppendPadding(FieldOffsetInChars - NextFieldOffsetInChars);
  114. assert(NextFieldOffsetInChars == FieldOffsetInChars &&
  115. "Did not add enough padding!");
  116. AlignedNextFieldOffsetInChars = NextFieldOffsetInChars;
  117. }
  118. // Add the field.
  119. Elements.push_back(InitCst);
  120. NextFieldOffsetInChars = AlignedNextFieldOffsetInChars +
  121. getSizeInChars(InitCst);
  122. if (Packed)
  123. assert(LLVMStructAlignment == CharUnits::One() &&
  124. "Packed struct not byte-aligned!");
  125. else
  126. LLVMStructAlignment = std::max(LLVMStructAlignment, FieldAlignment);
  127. }
  128. void ConstStructBuilder::AppendBitField(const FieldDecl *Field,
  129. uint64_t FieldOffset,
  130. llvm::ConstantInt *CI) {
  131. const ASTContext &Context = CGM.getContext();
  132. const uint64_t CharWidth = Context.getCharWidth();
  133. uint64_t NextFieldOffsetInBits = Context.toBits(NextFieldOffsetInChars);
  134. if (FieldOffset > NextFieldOffsetInBits) {
  135. // We need to add padding.
  136. CharUnits PadSize = Context.toCharUnitsFromBits(
  137. llvm::RoundUpToAlignment(FieldOffset - NextFieldOffsetInBits,
  138. Context.getTargetInfo().getCharAlign()));
  139. AppendPadding(PadSize);
  140. }
  141. uint64_t FieldSize = Field->getBitWidthValue(Context);
  142. llvm::APInt FieldValue = CI->getValue();
  143. // Promote the size of FieldValue if necessary
  144. // FIXME: This should never occur, but currently it can because initializer
  145. // constants are cast to bool, and because clang is not enforcing bitfield
  146. // width limits.
  147. if (FieldSize > FieldValue.getBitWidth())
  148. FieldValue = FieldValue.zext(FieldSize);
  149. // Truncate the size of FieldValue to the bit field size.
  150. if (FieldSize < FieldValue.getBitWidth())
  151. FieldValue = FieldValue.trunc(FieldSize);
  152. NextFieldOffsetInBits = Context.toBits(NextFieldOffsetInChars);
  153. if (FieldOffset < NextFieldOffsetInBits) {
  154. // Either part of the field or the entire field can go into the previous
  155. // byte.
  156. assert(!Elements.empty() && "Elements can't be empty!");
  157. unsigned BitsInPreviousByte = NextFieldOffsetInBits - FieldOffset;
  158. bool FitsCompletelyInPreviousByte =
  159. BitsInPreviousByte >= FieldValue.getBitWidth();
  160. llvm::APInt Tmp = FieldValue;
  161. if (!FitsCompletelyInPreviousByte) {
  162. unsigned NewFieldWidth = FieldSize - BitsInPreviousByte;
  163. if (CGM.getTargetData().isBigEndian()) {
  164. Tmp = Tmp.lshr(NewFieldWidth);
  165. Tmp = Tmp.trunc(BitsInPreviousByte);
  166. // We want the remaining high bits.
  167. FieldValue = FieldValue.trunc(NewFieldWidth);
  168. } else {
  169. Tmp = Tmp.trunc(BitsInPreviousByte);
  170. // We want the remaining low bits.
  171. FieldValue = FieldValue.lshr(BitsInPreviousByte);
  172. FieldValue = FieldValue.trunc(NewFieldWidth);
  173. }
  174. }
  175. Tmp = Tmp.zext(CharWidth);
  176. if (CGM.getTargetData().isBigEndian()) {
  177. if (FitsCompletelyInPreviousByte)
  178. Tmp = Tmp.shl(BitsInPreviousByte - FieldValue.getBitWidth());
  179. } else {
  180. Tmp = Tmp.shl(CharWidth - BitsInPreviousByte);
  181. }
  182. // 'or' in the bits that go into the previous byte.
  183. llvm::Value *LastElt = Elements.back();
  184. if (llvm::ConstantInt *Val = dyn_cast<llvm::ConstantInt>(LastElt))
  185. Tmp |= Val->getValue();
  186. else {
  187. assert(isa<llvm::UndefValue>(LastElt));
  188. // If there is an undef field that we're adding to, it can either be a
  189. // scalar undef (in which case, we just replace it with our field) or it
  190. // is an array. If it is an array, we have to pull one byte off the
  191. // array so that the other undef bytes stay around.
  192. if (!isa<llvm::IntegerType>(LastElt->getType())) {
  193. // The undef padding will be a multibyte array, create a new smaller
  194. // padding and then an hole for our i8 to get plopped into.
  195. assert(isa<llvm::ArrayType>(LastElt->getType()) &&
  196. "Expected array padding of undefs");
  197. llvm::ArrayType *AT = cast<llvm::ArrayType>(LastElt->getType());
  198. assert(AT->getElementType()->isIntegerTy(CharWidth) &&
  199. AT->getNumElements() != 0 &&
  200. "Expected non-empty array padding of undefs");
  201. // Remove the padding array.
  202. NextFieldOffsetInChars -= CharUnits::fromQuantity(AT->getNumElements());
  203. Elements.pop_back();
  204. // Add the padding back in two chunks.
  205. AppendPadding(CharUnits::fromQuantity(AT->getNumElements()-1));
  206. AppendPadding(CharUnits::One());
  207. assert(isa<llvm::UndefValue>(Elements.back()) &&
  208. Elements.back()->getType()->isIntegerTy(CharWidth) &&
  209. "Padding addition didn't work right");
  210. }
  211. }
  212. Elements.back() = llvm::ConstantInt::get(CGM.getLLVMContext(), Tmp);
  213. if (FitsCompletelyInPreviousByte)
  214. return;
  215. }
  216. while (FieldValue.getBitWidth() > CharWidth) {
  217. llvm::APInt Tmp;
  218. if (CGM.getTargetData().isBigEndian()) {
  219. // We want the high bits.
  220. Tmp =
  221. FieldValue.lshr(FieldValue.getBitWidth() - CharWidth).trunc(CharWidth);
  222. } else {
  223. // We want the low bits.
  224. Tmp = FieldValue.trunc(CharWidth);
  225. FieldValue = FieldValue.lshr(CharWidth);
  226. }
  227. Elements.push_back(llvm::ConstantInt::get(CGM.getLLVMContext(), Tmp));
  228. ++NextFieldOffsetInChars;
  229. FieldValue = FieldValue.trunc(FieldValue.getBitWidth() - CharWidth);
  230. }
  231. assert(FieldValue.getBitWidth() > 0 &&
  232. "Should have at least one bit left!");
  233. assert(FieldValue.getBitWidth() <= CharWidth &&
  234. "Should not have more than a byte left!");
  235. if (FieldValue.getBitWidth() < CharWidth) {
  236. if (CGM.getTargetData().isBigEndian()) {
  237. unsigned BitWidth = FieldValue.getBitWidth();
  238. FieldValue = FieldValue.zext(CharWidth) << (CharWidth - BitWidth);
  239. } else
  240. FieldValue = FieldValue.zext(CharWidth);
  241. }
  242. // Append the last element.
  243. Elements.push_back(llvm::ConstantInt::get(CGM.getLLVMContext(),
  244. FieldValue));
  245. ++NextFieldOffsetInChars;
  246. }
  247. void ConstStructBuilder::AppendPadding(CharUnits PadSize) {
  248. if (PadSize.isZero())
  249. return;
  250. llvm::Type *Ty = CGM.Int8Ty;
  251. if (PadSize > CharUnits::One())
  252. Ty = llvm::ArrayType::get(Ty, PadSize.getQuantity());
  253. llvm::Constant *C = llvm::UndefValue::get(Ty);
  254. Elements.push_back(C);
  255. assert(getAlignment(C) == CharUnits::One() &&
  256. "Padding must have 1 byte alignment!");
  257. NextFieldOffsetInChars += getSizeInChars(C);
  258. }
  259. void ConstStructBuilder::AppendTailPadding(CharUnits RecordSize) {
  260. assert(NextFieldOffsetInChars <= RecordSize &&
  261. "Size mismatch!");
  262. AppendPadding(RecordSize - NextFieldOffsetInChars);
  263. }
  264. void ConstStructBuilder::ConvertStructToPacked() {
  265. SmallVector<llvm::Constant *, 16> PackedElements;
  266. CharUnits ElementOffsetInChars = CharUnits::Zero();
  267. for (unsigned i = 0, e = Elements.size(); i != e; ++i) {
  268. llvm::Constant *C = Elements[i];
  269. CharUnits ElementAlign = CharUnits::fromQuantity(
  270. CGM.getTargetData().getABITypeAlignment(C->getType()));
  271. CharUnits AlignedElementOffsetInChars =
  272. ElementOffsetInChars.RoundUpToAlignment(ElementAlign);
  273. if (AlignedElementOffsetInChars > ElementOffsetInChars) {
  274. // We need some padding.
  275. CharUnits NumChars =
  276. AlignedElementOffsetInChars - ElementOffsetInChars;
  277. llvm::Type *Ty = CGM.Int8Ty;
  278. if (NumChars > CharUnits::One())
  279. Ty = llvm::ArrayType::get(Ty, NumChars.getQuantity());
  280. llvm::Constant *Padding = llvm::UndefValue::get(Ty);
  281. PackedElements.push_back(Padding);
  282. ElementOffsetInChars += getSizeInChars(Padding);
  283. }
  284. PackedElements.push_back(C);
  285. ElementOffsetInChars += getSizeInChars(C);
  286. }
  287. assert(ElementOffsetInChars == NextFieldOffsetInChars &&
  288. "Packing the struct changed its size!");
  289. Elements.swap(PackedElements);
  290. LLVMStructAlignment = CharUnits::One();
  291. Packed = true;
  292. }
  293. bool ConstStructBuilder::Build(InitListExpr *ILE) {
  294. if (ILE->initializesStdInitializerList()) {
  295. //CGM.ErrorUnsupported(ILE, "global std::initializer_list");
  296. return false;
  297. }
  298. RecordDecl *RD = ILE->getType()->getAs<RecordType>()->getDecl();
  299. const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD);
  300. unsigned FieldNo = 0;
  301. unsigned ElementNo = 0;
  302. const FieldDecl *LastFD = 0;
  303. bool IsMsStruct = RD->hasAttr<MsStructAttr>();
  304. for (RecordDecl::field_iterator Field = RD->field_begin(),
  305. FieldEnd = RD->field_end(); Field != FieldEnd; ++Field, ++FieldNo) {
  306. if (IsMsStruct) {
  307. // Zero-length bitfields following non-bitfield members are
  308. // ignored:
  309. if (CGM.getContext().ZeroBitfieldFollowsNonBitfield((*Field), LastFD)) {
  310. --FieldNo;
  311. continue;
  312. }
  313. LastFD = (*Field);
  314. }
  315. // If this is a union, skip all the fields that aren't being initialized.
  316. if (RD->isUnion() && ILE->getInitializedFieldInUnion() != *Field)
  317. continue;
  318. // Don't emit anonymous bitfields, they just affect layout.
  319. if (Field->isUnnamedBitfield()) {
  320. LastFD = (*Field);
  321. continue;
  322. }
  323. // Get the initializer. A struct can include fields without initializers,
  324. // we just use explicit null values for them.
  325. llvm::Constant *EltInit;
  326. if (ElementNo < ILE->getNumInits())
  327. EltInit = CGM.EmitConstantExpr(ILE->getInit(ElementNo++),
  328. Field->getType(), CGF);
  329. else
  330. EltInit = CGM.EmitNullConstant(Field->getType());
  331. if (!EltInit)
  332. return false;
  333. if (!Field->isBitField()) {
  334. // Handle non-bitfield members.
  335. AppendField(*Field, Layout.getFieldOffset(FieldNo), EltInit);
  336. } else {
  337. // Otherwise we have a bitfield.
  338. AppendBitField(*Field, Layout.getFieldOffset(FieldNo),
  339. cast<llvm::ConstantInt>(EltInit));
  340. }
  341. }
  342. return true;
  343. }
  344. namespace {
  345. struct BaseInfo {
  346. BaseInfo(const CXXRecordDecl *Decl, CharUnits Offset, unsigned Index)
  347. : Decl(Decl), Offset(Offset), Index(Index) {
  348. }
  349. const CXXRecordDecl *Decl;
  350. CharUnits Offset;
  351. unsigned Index;
  352. bool operator<(const BaseInfo &O) const { return Offset < O.Offset; }
  353. };
  354. }
  355. void ConstStructBuilder::Build(const APValue &Val, const RecordDecl *RD,
  356. bool IsPrimaryBase, llvm::Constant *VTable,
  357. const CXXRecordDecl *VTableClass,
  358. CharUnits Offset) {
  359. const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD);
  360. if (const CXXRecordDecl *CD = dyn_cast<CXXRecordDecl>(RD)) {
  361. // Add a vtable pointer, if we need one and it hasn't already been added.
  362. if (CD->isDynamicClass() && !IsPrimaryBase)
  363. AppendVTablePointer(BaseSubobject(CD, Offset), VTable, VTableClass);
  364. // Accumulate and sort bases, in order to visit them in address order, which
  365. // may not be the same as declaration order.
  366. llvm::SmallVector<BaseInfo, 8> Bases;
  367. Bases.reserve(CD->getNumBases());
  368. unsigned BaseNo = 0;
  369. for (CXXRecordDecl::base_class_const_iterator Base = CD->bases_begin(),
  370. BaseEnd = CD->bases_end(); Base != BaseEnd; ++Base, ++BaseNo) {
  371. assert(!Base->isVirtual() && "should not have virtual bases here");
  372. const CXXRecordDecl *BD = Base->getType()->getAsCXXRecordDecl();
  373. CharUnits BaseOffset = Layout.getBaseClassOffset(BD);
  374. Bases.push_back(BaseInfo(BD, BaseOffset, BaseNo));
  375. }
  376. std::stable_sort(Bases.begin(), Bases.end());
  377. for (unsigned I = 0, N = Bases.size(); I != N; ++I) {
  378. BaseInfo &Base = Bases[I];
  379. // Build the base class subobject at the appropriately-offset location
  380. // within this object.
  381. NextFieldOffsetInChars -= Base.Offset;
  382. bool IsPrimaryBase = Layout.getPrimaryBase() == Base.Decl;
  383. Build(Val.getStructBase(Base.Index), Base.Decl, IsPrimaryBase,
  384. VTable, VTableClass, Offset + Base.Offset);
  385. NextFieldOffsetInChars += Base.Offset;
  386. }
  387. }
  388. unsigned FieldNo = 0;
  389. const FieldDecl *LastFD = 0;
  390. bool IsMsStruct = RD->hasAttr<MsStructAttr>();
  391. for (RecordDecl::field_iterator Field = RD->field_begin(),
  392. FieldEnd = RD->field_end(); Field != FieldEnd; ++Field, ++FieldNo) {
  393. if (IsMsStruct) {
  394. // Zero-length bitfields following non-bitfield members are
  395. // ignored:
  396. if (CGM.getContext().ZeroBitfieldFollowsNonBitfield((*Field), LastFD)) {
  397. --FieldNo;
  398. continue;
  399. }
  400. LastFD = (*Field);
  401. }
  402. // If this is a union, skip all the fields that aren't being initialized.
  403. if (RD->isUnion() && Val.getUnionField() != *Field)
  404. continue;
  405. // Don't emit anonymous bitfields, they just affect layout.
  406. if (Field->isUnnamedBitfield()) {
  407. LastFD = (*Field);
  408. continue;
  409. }
  410. // Emit the value of the initializer.
  411. const APValue &FieldValue =
  412. RD->isUnion() ? Val.getUnionValue() : Val.getStructField(FieldNo);
  413. llvm::Constant *EltInit =
  414. CGM.EmitConstantValue(FieldValue, Field->getType(), CGF);
  415. assert(EltInit && "EmitConstantValue can't fail");
  416. if (!Field->isBitField()) {
  417. // Handle non-bitfield members.
  418. AppendField(*Field, Layout.getFieldOffset(FieldNo), EltInit);
  419. } else {
  420. // Otherwise we have a bitfield.
  421. AppendBitField(*Field, Layout.getFieldOffset(FieldNo),
  422. cast<llvm::ConstantInt>(EltInit));
  423. }
  424. }
  425. }
  426. llvm::Constant *ConstStructBuilder::Finalize(QualType Ty) {
  427. RecordDecl *RD = Ty->getAs<RecordType>()->getDecl();
  428. const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD);
  429. CharUnits LayoutSizeInChars = Layout.getSize();
  430. if (NextFieldOffsetInChars > LayoutSizeInChars) {
  431. // If the struct is bigger than the size of the record type,
  432. // we must have a flexible array member at the end.
  433. assert(RD->hasFlexibleArrayMember() &&
  434. "Must have flexible array member if struct is bigger than type!");
  435. // No tail padding is necessary.
  436. } else {
  437. // Append tail padding if necessary.
  438. AppendTailPadding(LayoutSizeInChars);
  439. CharUnits LLVMSizeInChars =
  440. NextFieldOffsetInChars.RoundUpToAlignment(LLVMStructAlignment);
  441. // Check if we need to convert the struct to a packed struct.
  442. if (NextFieldOffsetInChars <= LayoutSizeInChars &&
  443. LLVMSizeInChars > LayoutSizeInChars) {
  444. assert(!Packed && "Size mismatch!");
  445. ConvertStructToPacked();
  446. assert(NextFieldOffsetInChars <= LayoutSizeInChars &&
  447. "Converting to packed did not help!");
  448. }
  449. assert(LayoutSizeInChars == NextFieldOffsetInChars &&
  450. "Tail padding mismatch!");
  451. }
  452. // Pick the type to use. If the type is layout identical to the ConvertType
  453. // type then use it, otherwise use whatever the builder produced for us.
  454. llvm::StructType *STy =
  455. llvm::ConstantStruct::getTypeForElements(CGM.getLLVMContext(),
  456. Elements, Packed);
  457. llvm::Type *ValTy = CGM.getTypes().ConvertType(Ty);
  458. if (llvm::StructType *ValSTy = dyn_cast<llvm::StructType>(ValTy)) {
  459. if (ValSTy->isLayoutIdentical(STy))
  460. STy = ValSTy;
  461. }
  462. llvm::Constant *Result = llvm::ConstantStruct::get(STy, Elements);
  463. assert(NextFieldOffsetInChars.RoundUpToAlignment(getAlignment(Result)) ==
  464. getSizeInChars(Result) && "Size mismatch!");
  465. return Result;
  466. }
  467. llvm::Constant *ConstStructBuilder::BuildStruct(CodeGenModule &CGM,
  468. CodeGenFunction *CGF,
  469. InitListExpr *ILE) {
  470. ConstStructBuilder Builder(CGM, CGF);
  471. if (!Builder.Build(ILE))
  472. return 0;
  473. return Builder.Finalize(ILE->getType());
  474. }
  475. llvm::Constant *ConstStructBuilder::BuildStruct(CodeGenModule &CGM,
  476. CodeGenFunction *CGF,
  477. const APValue &Val,
  478. QualType ValTy) {
  479. ConstStructBuilder Builder(CGM, CGF);
  480. const RecordDecl *RD = ValTy->castAs<RecordType>()->getDecl();
  481. const CXXRecordDecl *CD = dyn_cast<CXXRecordDecl>(RD);
  482. llvm::Constant *VTable = 0;
  483. if (CD && CD->isDynamicClass())
  484. VTable = CGM.getVTables().GetAddrOfVTable(CD);
  485. Builder.Build(Val, RD, false, VTable, CD, CharUnits::Zero());
  486. return Builder.Finalize(ValTy);
  487. }
  488. //===----------------------------------------------------------------------===//
  489. // ConstExprEmitter
  490. //===----------------------------------------------------------------------===//
  491. /// This class only needs to handle two cases:
  492. /// 1) Literals (this is used by APValue emission to emit literals).
  493. /// 2) Arrays, structs and unions (outside C++11 mode, we don't currently
  494. /// constant fold these types).
  495. class ConstExprEmitter :
  496. public StmtVisitor<ConstExprEmitter, llvm::Constant*> {
  497. CodeGenModule &CGM;
  498. CodeGenFunction *CGF;
  499. llvm::LLVMContext &VMContext;
  500. public:
  501. ConstExprEmitter(CodeGenModule &cgm, CodeGenFunction *cgf)
  502. : CGM(cgm), CGF(cgf), VMContext(cgm.getLLVMContext()) {
  503. }
  504. //===--------------------------------------------------------------------===//
  505. // Visitor Methods
  506. //===--------------------------------------------------------------------===//
  507. llvm::Constant *VisitStmt(Stmt *S) {
  508. return 0;
  509. }
  510. llvm::Constant *VisitParenExpr(ParenExpr *PE) {
  511. return Visit(PE->getSubExpr());
  512. }
  513. llvm::Constant *
  514. VisitSubstNonTypeTemplateParmExpr(SubstNonTypeTemplateParmExpr *PE) {
  515. return Visit(PE->getReplacement());
  516. }
  517. llvm::Constant *VisitGenericSelectionExpr(GenericSelectionExpr *GE) {
  518. return Visit(GE->getResultExpr());
  519. }
  520. llvm::Constant *VisitCompoundLiteralExpr(CompoundLiteralExpr *E) {
  521. return Visit(E->getInitializer());
  522. }
  523. llvm::Constant *VisitCastExpr(CastExpr* E) {
  524. Expr *subExpr = E->getSubExpr();
  525. llvm::Constant *C = CGM.EmitConstantExpr(subExpr, subExpr->getType(), CGF);
  526. if (!C) return 0;
  527. llvm::Type *destType = ConvertType(E->getType());
  528. switch (E->getCastKind()) {
  529. case CK_ToUnion: {
  530. // GCC cast to union extension
  531. assert(E->getType()->isUnionType() &&
  532. "Destination type is not union type!");
  533. // Build a struct with the union sub-element as the first member,
  534. // and padded to the appropriate size
  535. SmallVector<llvm::Constant*, 2> Elts;
  536. SmallVector<llvm::Type*, 2> Types;
  537. Elts.push_back(C);
  538. Types.push_back(C->getType());
  539. unsigned CurSize = CGM.getTargetData().getTypeAllocSize(C->getType());
  540. unsigned TotalSize = CGM.getTargetData().getTypeAllocSize(destType);
  541. assert(CurSize <= TotalSize && "Union size mismatch!");
  542. if (unsigned NumPadBytes = TotalSize - CurSize) {
  543. llvm::Type *Ty = CGM.Int8Ty;
  544. if (NumPadBytes > 1)
  545. Ty = llvm::ArrayType::get(Ty, NumPadBytes);
  546. Elts.push_back(llvm::UndefValue::get(Ty));
  547. Types.push_back(Ty);
  548. }
  549. llvm::StructType* STy =
  550. llvm::StructType::get(C->getType()->getContext(), Types, false);
  551. return llvm::ConstantStruct::get(STy, Elts);
  552. }
  553. case CK_LValueToRValue:
  554. case CK_AtomicToNonAtomic:
  555. case CK_NonAtomicToAtomic:
  556. case CK_NoOp:
  557. return C;
  558. case CK_Dependent: llvm_unreachable("saw dependent cast!");
  559. case CK_ReinterpretMemberPointer:
  560. case CK_DerivedToBaseMemberPointer:
  561. case CK_BaseToDerivedMemberPointer:
  562. return CGM.getCXXABI().EmitMemberPointerConversion(E, C);
  563. // These will never be supported.
  564. case CK_ObjCObjectLValueCast:
  565. case CK_ARCProduceObject:
  566. case CK_ARCConsumeObject:
  567. case CK_ARCReclaimReturnedObject:
  568. case CK_ARCExtendBlockObject:
  569. case CK_CopyAndAutoreleaseBlockObject:
  570. return 0;
  571. // These don't need to be handled here because Evaluate knows how to
  572. // evaluate them in the cases where they can be folded.
  573. case CK_BitCast:
  574. case CK_ToVoid:
  575. case CK_Dynamic:
  576. case CK_LValueBitCast:
  577. case CK_NullToMemberPointer:
  578. case CK_UserDefinedConversion:
  579. case CK_ConstructorConversion:
  580. case CK_CPointerToObjCPointerCast:
  581. case CK_BlockPointerToObjCPointerCast:
  582. case CK_AnyPointerToBlockPointerCast:
  583. case CK_ArrayToPointerDecay:
  584. case CK_FunctionToPointerDecay:
  585. case CK_BaseToDerived:
  586. case CK_DerivedToBase:
  587. case CK_UncheckedDerivedToBase:
  588. case CK_MemberPointerToBoolean:
  589. case CK_VectorSplat:
  590. case CK_FloatingRealToComplex:
  591. case CK_FloatingComplexToReal:
  592. case CK_FloatingComplexToBoolean:
  593. case CK_FloatingComplexCast:
  594. case CK_FloatingComplexToIntegralComplex:
  595. case CK_IntegralRealToComplex:
  596. case CK_IntegralComplexToReal:
  597. case CK_IntegralComplexToBoolean:
  598. case CK_IntegralComplexCast:
  599. case CK_IntegralComplexToFloatingComplex:
  600. case CK_PointerToIntegral:
  601. case CK_PointerToBoolean:
  602. case CK_NullToPointer:
  603. case CK_IntegralCast:
  604. case CK_IntegralToPointer:
  605. case CK_IntegralToBoolean:
  606. case CK_IntegralToFloating:
  607. case CK_FloatingToIntegral:
  608. case CK_FloatingToBoolean:
  609. case CK_FloatingCast:
  610. return 0;
  611. }
  612. llvm_unreachable("Invalid CastKind");
  613. }
  614. llvm::Constant *VisitCXXDefaultArgExpr(CXXDefaultArgExpr *DAE) {
  615. return Visit(DAE->getExpr());
  616. }
  617. llvm::Constant *VisitMaterializeTemporaryExpr(MaterializeTemporaryExpr *E) {
  618. return Visit(E->GetTemporaryExpr());
  619. }
  620. llvm::Constant *EmitArrayInitialization(InitListExpr *ILE) {
  621. unsigned NumInitElements = ILE->getNumInits();
  622. if (NumInitElements == 1 && ILE->getType() == ILE->getInit(0)->getType() &&
  623. (isa<StringLiteral>(ILE->getInit(0)) ||
  624. isa<ObjCEncodeExpr>(ILE->getInit(0))))
  625. return Visit(ILE->getInit(0));
  626. llvm::ArrayType *AType =
  627. cast<llvm::ArrayType>(ConvertType(ILE->getType()));
  628. llvm::Type *ElemTy = AType->getElementType();
  629. unsigned NumElements = AType->getNumElements();
  630. // Initialising an array requires us to automatically
  631. // initialise any elements that have not been initialised explicitly
  632. unsigned NumInitableElts = std::min(NumInitElements, NumElements);
  633. // Copy initializer elements.
  634. std::vector<llvm::Constant*> Elts;
  635. Elts.reserve(NumInitableElts + NumElements);
  636. bool RewriteType = false;
  637. for (unsigned i = 0; i < NumInitableElts; ++i) {
  638. Expr *Init = ILE->getInit(i);
  639. llvm::Constant *C = CGM.EmitConstantExpr(Init, Init->getType(), CGF);
  640. if (!C)
  641. return 0;
  642. RewriteType |= (C->getType() != ElemTy);
  643. Elts.push_back(C);
  644. }
  645. // Initialize remaining array elements.
  646. // FIXME: This doesn't handle member pointers correctly!
  647. llvm::Constant *fillC;
  648. if (Expr *filler = ILE->getArrayFiller())
  649. fillC = CGM.EmitConstantExpr(filler, filler->getType(), CGF);
  650. else
  651. fillC = llvm::Constant::getNullValue(ElemTy);
  652. if (!fillC)
  653. return 0;
  654. RewriteType |= (fillC->getType() != ElemTy);
  655. Elts.resize(NumElements, fillC);
  656. if (RewriteType) {
  657. // FIXME: Try to avoid packing the array
  658. std::vector<llvm::Type*> Types;
  659. Types.reserve(NumInitableElts + NumElements);
  660. for (unsigned i = 0, e = Elts.size(); i < e; ++i)
  661. Types.push_back(Elts[i]->getType());
  662. llvm::StructType *SType = llvm::StructType::get(AType->getContext(),
  663. Types, true);
  664. return llvm::ConstantStruct::get(SType, Elts);
  665. }
  666. return llvm::ConstantArray::get(AType, Elts);
  667. }
  668. llvm::Constant *EmitStructInitialization(InitListExpr *ILE) {
  669. return ConstStructBuilder::BuildStruct(CGM, CGF, ILE);
  670. }
  671. llvm::Constant *EmitUnionInitialization(InitListExpr *ILE) {
  672. return ConstStructBuilder::BuildStruct(CGM, CGF, ILE);
  673. }
  674. llvm::Constant *VisitImplicitValueInitExpr(ImplicitValueInitExpr* E) {
  675. return CGM.EmitNullConstant(E->getType());
  676. }
  677. llvm::Constant *VisitInitListExpr(InitListExpr *ILE) {
  678. if (ILE->getType()->isArrayType())
  679. return EmitArrayInitialization(ILE);
  680. if (ILE->getType()->isRecordType())
  681. return EmitStructInitialization(ILE);
  682. if (ILE->getType()->isUnionType())
  683. return EmitUnionInitialization(ILE);
  684. return 0;
  685. }
  686. llvm::Constant *VisitCXXConstructExpr(CXXConstructExpr *E) {
  687. if (!E->getConstructor()->isTrivial())
  688. return 0;
  689. QualType Ty = E->getType();
  690. // FIXME: We should not have to call getBaseElementType here.
  691. const RecordType *RT =
  692. CGM.getContext().getBaseElementType(Ty)->getAs<RecordType>();
  693. const CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
  694. // If the class doesn't have a trivial destructor, we can't emit it as a
  695. // constant expr.
  696. if (!RD->hasTrivialDestructor())
  697. return 0;
  698. // Only copy and default constructors can be trivial.
  699. if (E->getNumArgs()) {
  700. assert(E->getNumArgs() == 1 && "trivial ctor with > 1 argument");
  701. assert(E->getConstructor()->isCopyOrMoveConstructor() &&
  702. "trivial ctor has argument but isn't a copy/move ctor");
  703. Expr *Arg = E->getArg(0);
  704. assert(CGM.getContext().hasSameUnqualifiedType(Ty, Arg->getType()) &&
  705. "argument to copy ctor is of wrong type");
  706. return Visit(Arg);
  707. }
  708. return CGM.EmitNullConstant(Ty);
  709. }
  710. llvm::Constant *VisitStringLiteral(StringLiteral *E) {
  711. return CGM.GetConstantArrayFromStringLiteral(E);
  712. }
  713. llvm::Constant *VisitObjCEncodeExpr(ObjCEncodeExpr *E) {
  714. // This must be an @encode initializing an array in a static initializer.
  715. // Don't emit it as the address of the string, emit the string data itself
  716. // as an inline array.
  717. std::string Str;
  718. CGM.getContext().getObjCEncodingForType(E->getEncodedType(), Str);
  719. const ConstantArrayType *CAT = cast<ConstantArrayType>(E->getType());
  720. // Resize the string to the right size, adding zeros at the end, or
  721. // truncating as needed.
  722. Str.resize(CAT->getSize().getZExtValue(), '\0');
  723. return llvm::ConstantDataArray::getString(VMContext, Str, false);
  724. }
  725. llvm::Constant *VisitUnaryExtension(const UnaryOperator *E) {
  726. return Visit(E->getSubExpr());
  727. }
  728. // Utility methods
  729. llvm::Type *ConvertType(QualType T) {
  730. return CGM.getTypes().ConvertType(T);
  731. }
  732. public:
  733. llvm::Constant *EmitLValue(APValue::LValueBase LVBase) {
  734. if (const ValueDecl *Decl = LVBase.dyn_cast<const ValueDecl*>()) {
  735. if (Decl->hasAttr<WeakRefAttr>())
  736. return CGM.GetWeakRefReference(Decl);
  737. if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(Decl))
  738. return CGM.GetAddrOfFunction(FD);
  739. if (const VarDecl* VD = dyn_cast<VarDecl>(Decl)) {
  740. // We can never refer to a variable with local storage.
  741. if (!VD->hasLocalStorage()) {
  742. if (VD->isFileVarDecl() || VD->hasExternalStorage())
  743. return CGM.GetAddrOfGlobalVar(VD);
  744. else if (VD->isLocalVarDecl()) {
  745. assert(CGF && "Can't access static local vars without CGF");
  746. return CGF->GetAddrOfStaticLocalVar(VD);
  747. }
  748. }
  749. }
  750. return 0;
  751. }
  752. Expr *E = const_cast<Expr*>(LVBase.get<const Expr*>());
  753. switch (E->getStmtClass()) {
  754. default: break;
  755. case Expr::CompoundLiteralExprClass: {
  756. // Note that due to the nature of compound literals, this is guaranteed
  757. // to be the only use of the variable, so we just generate it here.
  758. CompoundLiteralExpr *CLE = cast<CompoundLiteralExpr>(E);
  759. llvm::Constant* C = CGM.EmitConstantExpr(CLE->getInitializer(),
  760. CLE->getType(), CGF);
  761. // FIXME: "Leaked" on failure.
  762. if (C)
  763. C = new llvm::GlobalVariable(CGM.getModule(), C->getType(),
  764. E->getType().isConstant(CGM.getContext()),
  765. llvm::GlobalValue::InternalLinkage,
  766. C, ".compoundliteral", 0, false,
  767. CGM.getContext().getTargetAddressSpace(E->getType()));
  768. return C;
  769. }
  770. case Expr::StringLiteralClass:
  771. return CGM.GetAddrOfConstantStringFromLiteral(cast<StringLiteral>(E));
  772. case Expr::ObjCEncodeExprClass:
  773. return CGM.GetAddrOfConstantStringFromObjCEncode(cast<ObjCEncodeExpr>(E));
  774. case Expr::ObjCStringLiteralClass: {
  775. ObjCStringLiteral* SL = cast<ObjCStringLiteral>(E);
  776. llvm::Constant *C =
  777. CGM.getObjCRuntime().GenerateConstantString(SL->getString());
  778. return llvm::ConstantExpr::getBitCast(C, ConvertType(E->getType()));
  779. }
  780. case Expr::PredefinedExprClass: {
  781. unsigned Type = cast<PredefinedExpr>(E)->getIdentType();
  782. if (CGF) {
  783. LValue Res = CGF->EmitPredefinedLValue(cast<PredefinedExpr>(E));
  784. return cast<llvm::Constant>(Res.getAddress());
  785. } else if (Type == PredefinedExpr::PrettyFunction) {
  786. return CGM.GetAddrOfConstantCString("top level", ".tmp");
  787. }
  788. return CGM.GetAddrOfConstantCString("", ".tmp");
  789. }
  790. case Expr::AddrLabelExprClass: {
  791. assert(CGF && "Invalid address of label expression outside function.");
  792. llvm::Constant *Ptr =
  793. CGF->GetAddrOfLabel(cast<AddrLabelExpr>(E)->getLabel());
  794. return llvm::ConstantExpr::getBitCast(Ptr, ConvertType(E->getType()));
  795. }
  796. case Expr::CallExprClass: {
  797. CallExpr* CE = cast<CallExpr>(E);
  798. unsigned builtin = CE->isBuiltinCall();
  799. if (builtin !=
  800. Builtin::BI__builtin___CFStringMakeConstantString &&
  801. builtin !=
  802. Builtin::BI__builtin___NSStringMakeConstantString)
  803. break;
  804. const Expr *Arg = CE->getArg(0)->IgnoreParenCasts();
  805. const StringLiteral *Literal = cast<StringLiteral>(Arg);
  806. if (builtin ==
  807. Builtin::BI__builtin___NSStringMakeConstantString) {
  808. return CGM.getObjCRuntime().GenerateConstantString(Literal);
  809. }
  810. // FIXME: need to deal with UCN conversion issues.
  811. return CGM.GetAddrOfConstantCFString(Literal);
  812. }
  813. case Expr::BlockExprClass: {
  814. std::string FunctionName;
  815. if (CGF)
  816. FunctionName = CGF->CurFn->getName();
  817. else
  818. FunctionName = "global";
  819. return CGM.GetAddrOfGlobalBlock(cast<BlockExpr>(E), FunctionName.c_str());
  820. }
  821. case Expr::CXXTypeidExprClass: {
  822. CXXTypeidExpr *Typeid = cast<CXXTypeidExpr>(E);
  823. QualType T;
  824. if (Typeid->isTypeOperand())
  825. T = Typeid->getTypeOperand();
  826. else
  827. T = Typeid->getExprOperand()->getType();
  828. return CGM.GetAddrOfRTTIDescriptor(T);
  829. }
  830. }
  831. return 0;
  832. }
  833. };
  834. } // end anonymous namespace.
  835. llvm::Constant *CodeGenModule::EmitConstantInit(const VarDecl &D,
  836. CodeGenFunction *CGF) {
  837. if (const APValue *Value = D.evaluateValue())
  838. return EmitConstantValue(*Value, D.getType(), CGF);
  839. // FIXME: Implement C++11 [basic.start.init]p2: if the initializer of a
  840. // reference is a constant expression, and the reference binds to a temporary,
  841. // then constant initialization is performed. ConstExprEmitter will
  842. // incorrectly emit a prvalue constant in this case, and the calling code
  843. // interprets that as the (pointer) value of the reference, rather than the
  844. // desired value of the referee.
  845. if (D.getType()->isReferenceType())
  846. return 0;
  847. const Expr *E = D.getInit();
  848. assert(E && "No initializer to emit");
  849. llvm::Constant* C = ConstExprEmitter(*this, CGF).Visit(const_cast<Expr*>(E));
  850. if (C && C->getType()->isIntegerTy(1)) {
  851. llvm::Type *BoolTy = getTypes().ConvertTypeForMem(E->getType());
  852. C = llvm::ConstantExpr::getZExt(C, BoolTy);
  853. }
  854. return C;
  855. }
  856. llvm::Constant *CodeGenModule::EmitConstantExpr(const Expr *E,
  857. QualType DestType,
  858. CodeGenFunction *CGF) {
  859. Expr::EvalResult Result;
  860. bool Success = false;
  861. if (DestType->isReferenceType())
  862. Success = E->EvaluateAsLValue(Result, Context);
  863. else
  864. Success = E->EvaluateAsRValue(Result, Context);
  865. if (Success && !Result.HasSideEffects)
  866. return EmitConstantValue(Result.Val, DestType, CGF);
  867. llvm::Constant* C = ConstExprEmitter(*this, CGF).Visit(const_cast<Expr*>(E));
  868. if (C && C->getType()->isIntegerTy(1)) {
  869. llvm::Type *BoolTy = getTypes().ConvertTypeForMem(E->getType());
  870. C = llvm::ConstantExpr::getZExt(C, BoolTy);
  871. }
  872. return C;
  873. }
  874. llvm::Constant *CodeGenModule::EmitConstantValue(const APValue &Value,
  875. QualType DestType,
  876. CodeGenFunction *CGF) {
  877. switch (Value.getKind()) {
  878. case APValue::Uninitialized:
  879. llvm_unreachable("Constant expressions should be initialized.");
  880. case APValue::LValue: {
  881. llvm::Type *DestTy = getTypes().ConvertTypeForMem(DestType);
  882. llvm::Constant *Offset =
  883. llvm::ConstantInt::get(Int64Ty, Value.getLValueOffset().getQuantity());
  884. llvm::Constant *C;
  885. if (APValue::LValueBase LVBase = Value.getLValueBase()) {
  886. // An array can be represented as an lvalue referring to the base.
  887. if (isa<llvm::ArrayType>(DestTy)) {
  888. assert(Offset->isNullValue() && "offset on array initializer");
  889. return ConstExprEmitter(*this, CGF).Visit(
  890. const_cast<Expr*>(LVBase.get<const Expr*>()));
  891. }
  892. C = ConstExprEmitter(*this, CGF).EmitLValue(LVBase);
  893. // Apply offset if necessary.
  894. if (!Offset->isNullValue()) {
  895. llvm::Constant *Casted = llvm::ConstantExpr::getBitCast(C, Int8PtrTy);
  896. Casted = llvm::ConstantExpr::getGetElementPtr(Casted, Offset);
  897. C = llvm::ConstantExpr::getBitCast(Casted, C->getType());
  898. }
  899. // Convert to the appropriate type; this could be an lvalue for
  900. // an integer.
  901. if (isa<llvm::PointerType>(DestTy))
  902. return llvm::ConstantExpr::getBitCast(C, DestTy);
  903. return llvm::ConstantExpr::getPtrToInt(C, DestTy);
  904. } else {
  905. C = Offset;
  906. // Convert to the appropriate type; this could be an lvalue for
  907. // an integer.
  908. if (isa<llvm::PointerType>(DestTy))
  909. return llvm::ConstantExpr::getIntToPtr(C, DestTy);
  910. // If the types don't match this should only be a truncate.
  911. if (C->getType() != DestTy)
  912. return llvm::ConstantExpr::getTrunc(C, DestTy);
  913. return C;
  914. }
  915. }
  916. case APValue::Int: {
  917. llvm::Constant *C = llvm::ConstantInt::get(VMContext,
  918. Value.getInt());
  919. if (C->getType()->isIntegerTy(1)) {
  920. llvm::Type *BoolTy = getTypes().ConvertTypeForMem(DestType);
  921. C = llvm::ConstantExpr::getZExt(C, BoolTy);
  922. }
  923. return C;
  924. }
  925. case APValue::ComplexInt: {
  926. llvm::Constant *Complex[2];
  927. Complex[0] = llvm::ConstantInt::get(VMContext,
  928. Value.getComplexIntReal());
  929. Complex[1] = llvm::ConstantInt::get(VMContext,
  930. Value.getComplexIntImag());
  931. // FIXME: the target may want to specify that this is packed.
  932. llvm::StructType *STy = llvm::StructType::get(Complex[0]->getType(),
  933. Complex[1]->getType(),
  934. NULL);
  935. return llvm::ConstantStruct::get(STy, Complex);
  936. }
  937. case APValue::Float: {
  938. const llvm::APFloat &Init = Value.getFloat();
  939. if (&Init.getSemantics() == &llvm::APFloat::IEEEhalf)
  940. return llvm::ConstantInt::get(VMContext, Init.bitcastToAPInt());
  941. else
  942. return llvm::ConstantFP::get(VMContext, Init);
  943. }
  944. case APValue::ComplexFloat: {
  945. llvm::Constant *Complex[2];
  946. Complex[0] = llvm::ConstantFP::get(VMContext,
  947. Value.getComplexFloatReal());
  948. Complex[1] = llvm::ConstantFP::get(VMContext,
  949. Value.getComplexFloatImag());
  950. // FIXME: the target may want to specify that this is packed.
  951. llvm::StructType *STy = llvm::StructType::get(Complex[0]->getType(),
  952. Complex[1]->getType(),
  953. NULL);
  954. return llvm::ConstantStruct::get(STy, Complex);
  955. }
  956. case APValue::Vector: {
  957. SmallVector<llvm::Constant *, 4> Inits;
  958. unsigned NumElts = Value.getVectorLength();
  959. for (unsigned i = 0; i != NumElts; ++i) {
  960. const APValue &Elt = Value.getVectorElt(i);
  961. if (Elt.isInt())
  962. Inits.push_back(llvm::ConstantInt::get(VMContext, Elt.getInt()));
  963. else
  964. Inits.push_back(llvm::ConstantFP::get(VMContext, Elt.getFloat()));
  965. }
  966. return llvm::ConstantVector::get(Inits);
  967. }
  968. case APValue::AddrLabelDiff: {
  969. const AddrLabelExpr *LHSExpr = Value.getAddrLabelDiffLHS();
  970. const AddrLabelExpr *RHSExpr = Value.getAddrLabelDiffRHS();
  971. llvm::Constant *LHS = EmitConstantExpr(LHSExpr, LHSExpr->getType(), CGF);
  972. llvm::Constant *RHS = EmitConstantExpr(RHSExpr, RHSExpr->getType(), CGF);
  973. // Compute difference
  974. llvm::Type *ResultType = getTypes().ConvertType(DestType);
  975. LHS = llvm::ConstantExpr::getPtrToInt(LHS, IntPtrTy);
  976. RHS = llvm::ConstantExpr::getPtrToInt(RHS, IntPtrTy);
  977. llvm::Constant *AddrLabelDiff = llvm::ConstantExpr::getSub(LHS, RHS);
  978. // LLVM is a bit sensitive about the exact format of the
  979. // address-of-label difference; make sure to truncate after
  980. // the subtraction.
  981. return llvm::ConstantExpr::getTruncOrBitCast(AddrLabelDiff, ResultType);
  982. }
  983. case APValue::Struct:
  984. case APValue::Union:
  985. return ConstStructBuilder::BuildStruct(*this, CGF, Value, DestType);
  986. case APValue::Array: {
  987. const ArrayType *CAT = Context.getAsArrayType(DestType);
  988. unsigned NumElements = Value.getArraySize();
  989. unsigned NumInitElts = Value.getArrayInitializedElts();
  990. std::vector<llvm::Constant*> Elts;
  991. Elts.reserve(NumElements);
  992. // Emit array filler, if there is one.
  993. llvm::Constant *Filler = 0;
  994. if (Value.hasArrayFiller())
  995. Filler = EmitConstantValue(Value.getArrayFiller(),
  996. CAT->getElementType(), CGF);
  997. // Emit initializer elements.
  998. llvm::Type *CommonElementType = 0;
  999. for (unsigned I = 0; I < NumElements; ++I) {
  1000. llvm::Constant *C = Filler;
  1001. if (I < NumInitElts)
  1002. C = EmitConstantValue(Value.getArrayInitializedElt(I),
  1003. CAT->getElementType(), CGF);
  1004. if (I == 0)
  1005. CommonElementType = C->getType();
  1006. else if (C->getType() != CommonElementType)
  1007. CommonElementType = 0;
  1008. Elts.push_back(C);
  1009. }
  1010. if (!CommonElementType) {
  1011. // FIXME: Try to avoid packing the array
  1012. std::vector<llvm::Type*> Types;
  1013. Types.reserve(NumElements);
  1014. for (unsigned i = 0, e = Elts.size(); i < e; ++i)
  1015. Types.push_back(Elts[i]->getType());
  1016. llvm::StructType *SType = llvm::StructType::get(VMContext, Types, true);
  1017. return llvm::ConstantStruct::get(SType, Elts);
  1018. }
  1019. llvm::ArrayType *AType =
  1020. llvm::ArrayType::get(CommonElementType, NumElements);
  1021. return llvm::ConstantArray::get(AType, Elts);
  1022. }
  1023. case APValue::MemberPointer:
  1024. return getCXXABI().EmitMemberPointer(Value, DestType);
  1025. }
  1026. llvm_unreachable("Unknown APValue kind");
  1027. }
  1028. llvm::Constant *
  1029. CodeGenModule::GetAddrOfConstantCompoundLiteral(const CompoundLiteralExpr *E) {
  1030. assert(E->isFileScope() && "not a file-scope compound literal expr");
  1031. return ConstExprEmitter(*this, 0).EmitLValue(E);
  1032. }
  1033. llvm::Constant *
  1034. CodeGenModule::getMemberPointerConstant(const UnaryOperator *uo) {
  1035. // Member pointer constants always have a very particular form.
  1036. const MemberPointerType *type = cast<MemberPointerType>(uo->getType());
  1037. const ValueDecl *decl = cast<DeclRefExpr>(uo->getSubExpr())->getDecl();
  1038. // A member function pointer.
  1039. if (const CXXMethodDecl *method = dyn_cast<CXXMethodDecl>(decl))
  1040. return getCXXABI().EmitMemberPointer(method);
  1041. // Otherwise, a member data pointer.
  1042. uint64_t fieldOffset = getContext().getFieldOffset(decl);
  1043. CharUnits chars = getContext().toCharUnitsFromBits((int64_t) fieldOffset);
  1044. return getCXXABI().EmitMemberDataPointer(type, chars);
  1045. }
  1046. static void
  1047. FillInNullDataMemberPointers(CodeGenModule &CGM, QualType T,
  1048. SmallVectorImpl<llvm::Constant *> &Elements,
  1049. uint64_t StartOffset) {
  1050. assert(StartOffset % CGM.getContext().getCharWidth() == 0 &&
  1051. "StartOffset not byte aligned!");
  1052. if (CGM.getTypes().isZeroInitializable(T))
  1053. return;
  1054. if (const ConstantArrayType *CAT =
  1055. CGM.getContext().getAsConstantArrayType(T)) {
  1056. QualType ElementTy = CAT->getElementType();
  1057. uint64_t ElementSize = CGM.getContext().getTypeSize(ElementTy);
  1058. for (uint64_t I = 0, E = CAT->getSize().getZExtValue(); I != E; ++I) {
  1059. FillInNullDataMemberPointers(CGM, ElementTy, Elements,
  1060. StartOffset + I * ElementSize);
  1061. }
  1062. } else if (const RecordType *RT = T->getAs<RecordType>()) {
  1063. const CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
  1064. const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD);
  1065. // Go through all bases and fill in any null pointer to data members.
  1066. for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
  1067. E = RD->bases_end(); I != E; ++I) {
  1068. if (I->isVirtual()) {
  1069. // Ignore virtual bases.
  1070. continue;
  1071. }
  1072. const CXXRecordDecl *BaseDecl =
  1073. cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
  1074. // Ignore empty bases.
  1075. if (BaseDecl->isEmpty())
  1076. continue;
  1077. // Ignore bases that don't have any pointer to data members.
  1078. if (CGM.getTypes().isZeroInitializable(BaseDecl))
  1079. continue;
  1080. uint64_t BaseOffset = Layout.getBaseClassOffsetInBits(BaseDecl);
  1081. FillInNullDataMemberPointers(CGM, I->getType(),
  1082. Elements, StartOffset + BaseOffset);
  1083. }
  1084. // Visit all fields.
  1085. unsigned FieldNo = 0;
  1086. for (RecordDecl::field_iterator I = RD->field_begin(),
  1087. E = RD->field_end(); I != E; ++I, ++FieldNo) {
  1088. QualType FieldType = I->getType();
  1089. if (CGM.getTypes().isZeroInitializable(FieldType))
  1090. continue;
  1091. uint64_t FieldOffset = StartOffset + Layout.getFieldOffset(FieldNo);
  1092. FillInNullDataMemberPointers(CGM, FieldType, Elements, FieldOffset);
  1093. }
  1094. } else {
  1095. assert(T->isMemberPointerType() && "Should only see member pointers here!");
  1096. assert(!T->getAs<MemberPointerType>()->getPointeeType()->isFunctionType() &&
  1097. "Should only see pointers to data members here!");
  1098. CharUnits StartIndex = CGM.getContext().toCharUnitsFromBits(StartOffset);
  1099. CharUnits EndIndex = StartIndex + CGM.getContext().getTypeSizeInChars(T);
  1100. // FIXME: hardcodes Itanium member pointer representation!
  1101. llvm::Constant *NegativeOne =
  1102. llvm::ConstantInt::get(CGM.Int8Ty, -1ULL, /*isSigned*/true);
  1103. // Fill in the null data member pointer.
  1104. for (CharUnits I = StartIndex; I != EndIndex; ++I)
  1105. Elements[I.getQuantity()] = NegativeOne;
  1106. }
  1107. }
  1108. static llvm::Constant *EmitNullConstantForBase(CodeGenModule &CGM,
  1109. llvm::Type *baseType,
  1110. const CXXRecordDecl *base);
  1111. static llvm::Constant *EmitNullConstant(CodeGenModule &CGM,
  1112. const CXXRecordDecl *record,
  1113. bool asCompleteObject) {
  1114. const CGRecordLayout &layout = CGM.getTypes().getCGRecordLayout(record);
  1115. llvm::StructType *structure =
  1116. (asCompleteObject ? layout.getLLVMType()
  1117. : layout.getBaseSubobjectLLVMType());
  1118. unsigned numElements = structure->getNumElements();
  1119. std::vector<llvm::Constant *> elements(numElements);
  1120. // Fill in all the bases.
  1121. for (CXXRecordDecl::base_class_const_iterator
  1122. I = record->bases_begin(), E = record->bases_end(); I != E; ++I) {
  1123. if (I->isVirtual()) {
  1124. // Ignore virtual bases; if we're laying out for a complete
  1125. // object, we'll lay these out later.
  1126. continue;
  1127. }
  1128. const CXXRecordDecl *base =
  1129. cast<CXXRecordDecl>(I->getType()->castAs<RecordType>()->getDecl());
  1130. // Ignore empty bases.
  1131. if (base->isEmpty())
  1132. continue;
  1133. unsigned fieldIndex = layout.getNonVirtualBaseLLVMFieldNo(base);
  1134. llvm::Type *baseType = structure->getElementType(fieldIndex);
  1135. elements[fieldIndex] = EmitNullConstantForBase(CGM, baseType, base);
  1136. }
  1137. // Fill in all the fields.
  1138. for (RecordDecl::field_iterator I = record->field_begin(),
  1139. E = record->field_end(); I != E; ++I) {
  1140. const FieldDecl *field = *I;
  1141. // Fill in non-bitfields. (Bitfields always use a zero pattern, which we
  1142. // will fill in later.)
  1143. if (!field->isBitField()) {
  1144. unsigned fieldIndex = layout.getLLVMFieldNo(field);
  1145. elements[fieldIndex] = CGM.EmitNullConstant(field->getType());
  1146. }
  1147. // For unions, stop after the first named field.
  1148. if (record->isUnion() && field->getDeclName())
  1149. break;
  1150. }
  1151. // Fill in the virtual bases, if we're working with the complete object.
  1152. if (asCompleteObject) {
  1153. for (CXXRecordDecl::base_class_const_iterator
  1154. I = record->vbases_begin(), E = record->vbases_end(); I != E; ++I) {
  1155. const CXXRecordDecl *base =
  1156. cast<CXXRecordDecl>(I->getType()->castAs<RecordType>()->getDecl());
  1157. // Ignore empty bases.
  1158. if (base->isEmpty())
  1159. continue;
  1160. unsigned fieldIndex = layout.getVirtualBaseIndex(base);
  1161. // We might have already laid this field out.
  1162. if (elements[fieldIndex]) continue;
  1163. llvm::Type *baseType = structure->getElementType(fieldIndex);
  1164. elements[fieldIndex] = EmitNullConstantForBase(CGM, baseType, base);
  1165. }
  1166. }
  1167. // Now go through all other fields and zero them out.
  1168. for (unsigned i = 0; i != numElements; ++i) {
  1169. if (!elements[i])
  1170. elements[i] = llvm::Constant::getNullValue(structure->getElementType(i));
  1171. }
  1172. return llvm::ConstantStruct::get(structure, elements);
  1173. }
  1174. /// Emit the null constant for a base subobject.
  1175. static llvm::Constant *EmitNullConstantForBase(CodeGenModule &CGM,
  1176. llvm::Type *baseType,
  1177. const CXXRecordDecl *base) {
  1178. const CGRecordLayout &baseLayout = CGM.getTypes().getCGRecordLayout(base);
  1179. // Just zero out bases that don't have any pointer to data members.
  1180. if (baseLayout.isZeroInitializableAsBase())
  1181. return llvm::Constant::getNullValue(baseType);
  1182. // If the base type is a struct, we can just use its null constant.
  1183. if (isa<llvm::StructType>(baseType)) {
  1184. return EmitNullConstant(CGM, base, /*complete*/ false);
  1185. }
  1186. // Otherwise, some bases are represented as arrays of i8 if the size
  1187. // of the base is smaller than its corresponding LLVM type. Figure
  1188. // out how many elements this base array has.
  1189. llvm::ArrayType *baseArrayType = cast<llvm::ArrayType>(baseType);
  1190. unsigned numBaseElements = baseArrayType->getNumElements();
  1191. // Fill in null data member pointers.
  1192. SmallVector<llvm::Constant *, 16> baseElements(numBaseElements);
  1193. FillInNullDataMemberPointers(CGM, CGM.getContext().getTypeDeclType(base),
  1194. baseElements, 0);
  1195. // Now go through all other elements and zero them out.
  1196. if (numBaseElements) {
  1197. llvm::Constant *i8_zero = llvm::Constant::getNullValue(CGM.Int8Ty);
  1198. for (unsigned i = 0; i != numBaseElements; ++i) {
  1199. if (!baseElements[i])
  1200. baseElements[i] = i8_zero;
  1201. }
  1202. }
  1203. return llvm::ConstantArray::get(baseArrayType, baseElements);
  1204. }
  1205. llvm::Constant *CodeGenModule::EmitNullConstant(QualType T) {
  1206. if (getTypes().isZeroInitializable(T))
  1207. return llvm::Constant::getNullValue(getTypes().ConvertTypeForMem(T));
  1208. if (const ConstantArrayType *CAT = Context.getAsConstantArrayType(T)) {
  1209. llvm::ArrayType *ATy =
  1210. cast<llvm::ArrayType>(getTypes().ConvertTypeForMem(T));
  1211. QualType ElementTy = CAT->getElementType();
  1212. llvm::Constant *Element = EmitNullConstant(ElementTy);
  1213. unsigned NumElements = CAT->getSize().getZExtValue();
  1214. if (Element->isNullValue())
  1215. return llvm::ConstantAggregateZero::get(ATy);
  1216. SmallVector<llvm::Constant *, 8> Array(NumElements, Element);
  1217. return llvm::ConstantArray::get(ATy, Array);
  1218. }
  1219. if (const RecordType *RT = T->getAs<RecordType>()) {
  1220. const CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
  1221. return ::EmitNullConstant(*this, RD, /*complete object*/ true);
  1222. }
  1223. assert(T->isMemberPointerType() && "Should only see member pointers here!");
  1224. assert(!T->getAs<MemberPointerType>()->getPointeeType()->isFunctionType() &&
  1225. "Should only see pointers to data members here!");
  1226. // Itanium C++ ABI 2.3:
  1227. // A NULL pointer is represented as -1.
  1228. return getCXXABI().EmitNullMemberPointer(T->castAs<MemberPointerType>());
  1229. }
  1230. llvm::Constant *
  1231. CodeGenModule::EmitNullConstantForBase(const CXXRecordDecl *Record) {
  1232. return ::EmitNullConstant(*this, Record, false);
  1233. }