MachineFunction.cpp 38 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079
  1. //===- MachineFunction.cpp ------------------------------------------------===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // Collect native machine code information for a function. This allows
  11. // target-specific information about the generated code to be stored with each
  12. // function.
  13. //
  14. //===----------------------------------------------------------------------===//
  15. #include "llvm/CodeGen/MachineFunction.h"
  16. #include "llvm/ADT/BitVector.h"
  17. #include "llvm/ADT/DenseMap.h"
  18. #include "llvm/ADT/DenseSet.h"
  19. #include "llvm/ADT/STLExtras.h"
  20. #include "llvm/ADT/SmallString.h"
  21. #include "llvm/ADT/SmallVector.h"
  22. #include "llvm/ADT/StringRef.h"
  23. #include "llvm/ADT/Twine.h"
  24. #include "llvm/Analysis/ConstantFolding.h"
  25. #include "llvm/Analysis/EHPersonalities.h"
  26. #include "llvm/CodeGen/MachineBasicBlock.h"
  27. #include "llvm/CodeGen/MachineConstantPool.h"
  28. #include "llvm/CodeGen/MachineFrameInfo.h"
  29. #include "llvm/CodeGen/MachineInstr.h"
  30. #include "llvm/CodeGen/MachineJumpTableInfo.h"
  31. #include "llvm/CodeGen/MachineMemOperand.h"
  32. #include "llvm/CodeGen/MachineModuleInfo.h"
  33. #include "llvm/CodeGen/MachineRegisterInfo.h"
  34. #include "llvm/CodeGen/PseudoSourceValue.h"
  35. #include "llvm/CodeGen/TargetFrameLowering.h"
  36. #include "llvm/CodeGen/TargetLowering.h"
  37. #include "llvm/CodeGen/TargetRegisterInfo.h"
  38. #include "llvm/CodeGen/TargetSubtargetInfo.h"
  39. #include "llvm/CodeGen/WinEHFuncInfo.h"
  40. #include "llvm/Config/llvm-config.h"
  41. #include "llvm/IR/Attributes.h"
  42. #include "llvm/IR/BasicBlock.h"
  43. #include "llvm/IR/Constant.h"
  44. #include "llvm/IR/DataLayout.h"
  45. #include "llvm/IR/DerivedTypes.h"
  46. #include "llvm/IR/Function.h"
  47. #include "llvm/IR/GlobalValue.h"
  48. #include "llvm/IR/Instruction.h"
  49. #include "llvm/IR/Instructions.h"
  50. #include "llvm/IR/Metadata.h"
  51. #include "llvm/IR/Module.h"
  52. #include "llvm/IR/ModuleSlotTracker.h"
  53. #include "llvm/IR/Value.h"
  54. #include "llvm/MC/MCContext.h"
  55. #include "llvm/MC/MCSymbol.h"
  56. #include "llvm/MC/SectionKind.h"
  57. #include "llvm/Support/Casting.h"
  58. #include "llvm/Support/CommandLine.h"
  59. #include "llvm/Support/Compiler.h"
  60. #include "llvm/Support/DOTGraphTraits.h"
  61. #include "llvm/Support/Debug.h"
  62. #include "llvm/Support/ErrorHandling.h"
  63. #include "llvm/Support/GraphWriter.h"
  64. #include "llvm/Support/raw_ostream.h"
  65. #include "llvm/Target/TargetMachine.h"
  66. #include <algorithm>
  67. #include <cassert>
  68. #include <cstddef>
  69. #include <cstdint>
  70. #include <iterator>
  71. #include <string>
  72. #include <utility>
  73. #include <vector>
  74. using namespace llvm;
  75. #define DEBUG_TYPE "codegen"
  76. static cl::opt<unsigned>
  77. AlignAllFunctions("align-all-functions",
  78. cl::desc("Force the alignment of all functions."),
  79. cl::init(0), cl::Hidden);
  80. static const char *getPropertyName(MachineFunctionProperties::Property Prop) {
  81. using P = MachineFunctionProperties::Property;
  82. switch(Prop) {
  83. case P::FailedISel: return "FailedISel";
  84. case P::IsSSA: return "IsSSA";
  85. case P::Legalized: return "Legalized";
  86. case P::NoPHIs: return "NoPHIs";
  87. case P::NoVRegs: return "NoVRegs";
  88. case P::RegBankSelected: return "RegBankSelected";
  89. case P::Selected: return "Selected";
  90. case P::TracksLiveness: return "TracksLiveness";
  91. }
  92. llvm_unreachable("Invalid machine function property");
  93. }
  94. void MachineFunctionProperties::print(raw_ostream &OS) const {
  95. const char *Separator = "";
  96. for (BitVector::size_type I = 0; I < Properties.size(); ++I) {
  97. if (!Properties[I])
  98. continue;
  99. OS << Separator << getPropertyName(static_cast<Property>(I));
  100. Separator = ", ";
  101. }
  102. }
  103. //===----------------------------------------------------------------------===//
  104. // MachineFunction implementation
  105. //===----------------------------------------------------------------------===//
  106. // Out-of-line virtual method.
  107. MachineFunctionInfo::~MachineFunctionInfo() = default;
  108. void ilist_alloc_traits<MachineBasicBlock>::deleteNode(MachineBasicBlock *MBB) {
  109. MBB->getParent()->DeleteMachineBasicBlock(MBB);
  110. }
  111. static inline unsigned getFnStackAlignment(const TargetSubtargetInfo *STI,
  112. const Function &F) {
  113. if (F.hasFnAttribute(Attribute::StackAlignment))
  114. return F.getFnStackAlignment();
  115. return STI->getFrameLowering()->getStackAlignment();
  116. }
  117. MachineFunction::MachineFunction(const Function &F, const TargetMachine &Target,
  118. const TargetSubtargetInfo &STI,
  119. unsigned FunctionNum, MachineModuleInfo &mmi)
  120. : F(F), Target(Target), STI(&STI), Ctx(mmi.getContext()), MMI(mmi) {
  121. FunctionNumber = FunctionNum;
  122. init();
  123. }
  124. void MachineFunction::init() {
  125. // Assume the function starts in SSA form with correct liveness.
  126. Properties.set(MachineFunctionProperties::Property::IsSSA);
  127. Properties.set(MachineFunctionProperties::Property::TracksLiveness);
  128. if (STI->getRegisterInfo())
  129. RegInfo = new (Allocator) MachineRegisterInfo(this);
  130. else
  131. RegInfo = nullptr;
  132. MFInfo = nullptr;
  133. // We can realign the stack if the target supports it and the user hasn't
  134. // explicitly asked us not to.
  135. bool CanRealignSP = STI->getFrameLowering()->isStackRealignable() &&
  136. !F.hasFnAttribute("no-realign-stack");
  137. FrameInfo = new (Allocator) MachineFrameInfo(
  138. getFnStackAlignment(STI, F), /*StackRealignable=*/CanRealignSP,
  139. /*ForceRealign=*/CanRealignSP &&
  140. F.hasFnAttribute(Attribute::StackAlignment));
  141. if (F.hasFnAttribute(Attribute::StackAlignment))
  142. FrameInfo->ensureMaxAlignment(F.getFnStackAlignment());
  143. ConstantPool = new (Allocator) MachineConstantPool(getDataLayout());
  144. Alignment = STI->getTargetLowering()->getMinFunctionAlignment();
  145. // FIXME: Shouldn't use pref alignment if explicit alignment is set on F.
  146. // FIXME: Use Function::optForSize().
  147. if (!F.hasFnAttribute(Attribute::OptimizeForSize))
  148. Alignment = std::max(Alignment,
  149. STI->getTargetLowering()->getPrefFunctionAlignment());
  150. if (AlignAllFunctions)
  151. Alignment = AlignAllFunctions;
  152. JumpTableInfo = nullptr;
  153. if (isFuncletEHPersonality(classifyEHPersonality(
  154. F.hasPersonalityFn() ? F.getPersonalityFn() : nullptr))) {
  155. WinEHInfo = new (Allocator) WinEHFuncInfo();
  156. }
  157. assert(Target.isCompatibleDataLayout(getDataLayout()) &&
  158. "Can't create a MachineFunction using a Module with a "
  159. "Target-incompatible DataLayout attached\n");
  160. PSVManager =
  161. llvm::make_unique<PseudoSourceValueManager>(*(getSubtarget().
  162. getInstrInfo()));
  163. }
  164. MachineFunction::~MachineFunction() {
  165. clear();
  166. }
  167. void MachineFunction::clear() {
  168. Properties.reset();
  169. // Don't call destructors on MachineInstr and MachineOperand. All of their
  170. // memory comes from the BumpPtrAllocator which is about to be purged.
  171. //
  172. // Do call MachineBasicBlock destructors, it contains std::vectors.
  173. for (iterator I = begin(), E = end(); I != E; I = BasicBlocks.erase(I))
  174. I->Insts.clearAndLeakNodesUnsafely();
  175. InstructionRecycler.clear(Allocator);
  176. OperandRecycler.clear(Allocator);
  177. BasicBlockRecycler.clear(Allocator);
  178. CodeViewAnnotations.clear();
  179. VariableDbgInfos.clear();
  180. if (RegInfo) {
  181. RegInfo->~MachineRegisterInfo();
  182. Allocator.Deallocate(RegInfo);
  183. }
  184. if (MFInfo) {
  185. MFInfo->~MachineFunctionInfo();
  186. Allocator.Deallocate(MFInfo);
  187. }
  188. FrameInfo->~MachineFrameInfo();
  189. Allocator.Deallocate(FrameInfo);
  190. ConstantPool->~MachineConstantPool();
  191. Allocator.Deallocate(ConstantPool);
  192. if (JumpTableInfo) {
  193. JumpTableInfo->~MachineJumpTableInfo();
  194. Allocator.Deallocate(JumpTableInfo);
  195. }
  196. if (WinEHInfo) {
  197. WinEHInfo->~WinEHFuncInfo();
  198. Allocator.Deallocate(WinEHInfo);
  199. }
  200. }
  201. const DataLayout &MachineFunction::getDataLayout() const {
  202. return F.getParent()->getDataLayout();
  203. }
  204. /// Get the JumpTableInfo for this function.
  205. /// If it does not already exist, allocate one.
  206. MachineJumpTableInfo *MachineFunction::
  207. getOrCreateJumpTableInfo(unsigned EntryKind) {
  208. if (JumpTableInfo) return JumpTableInfo;
  209. JumpTableInfo = new (Allocator)
  210. MachineJumpTableInfo((MachineJumpTableInfo::JTEntryKind)EntryKind);
  211. return JumpTableInfo;
  212. }
  213. /// Should we be emitting segmented stack stuff for the function
  214. bool MachineFunction::shouldSplitStack() const {
  215. return getFunction().hasFnAttribute("split-stack");
  216. }
  217. /// This discards all of the MachineBasicBlock numbers and recomputes them.
  218. /// This guarantees that the MBB numbers are sequential, dense, and match the
  219. /// ordering of the blocks within the function. If a specific MachineBasicBlock
  220. /// is specified, only that block and those after it are renumbered.
  221. void MachineFunction::RenumberBlocks(MachineBasicBlock *MBB) {
  222. if (empty()) { MBBNumbering.clear(); return; }
  223. MachineFunction::iterator MBBI, E = end();
  224. if (MBB == nullptr)
  225. MBBI = begin();
  226. else
  227. MBBI = MBB->getIterator();
  228. // Figure out the block number this should have.
  229. unsigned BlockNo = 0;
  230. if (MBBI != begin())
  231. BlockNo = std::prev(MBBI)->getNumber() + 1;
  232. for (; MBBI != E; ++MBBI, ++BlockNo) {
  233. if (MBBI->getNumber() != (int)BlockNo) {
  234. // Remove use of the old number.
  235. if (MBBI->getNumber() != -1) {
  236. assert(MBBNumbering[MBBI->getNumber()] == &*MBBI &&
  237. "MBB number mismatch!");
  238. MBBNumbering[MBBI->getNumber()] = nullptr;
  239. }
  240. // If BlockNo is already taken, set that block's number to -1.
  241. if (MBBNumbering[BlockNo])
  242. MBBNumbering[BlockNo]->setNumber(-1);
  243. MBBNumbering[BlockNo] = &*MBBI;
  244. MBBI->setNumber(BlockNo);
  245. }
  246. }
  247. // Okay, all the blocks are renumbered. If we have compactified the block
  248. // numbering, shrink MBBNumbering now.
  249. assert(BlockNo <= MBBNumbering.size() && "Mismatch!");
  250. MBBNumbering.resize(BlockNo);
  251. }
  252. /// Allocate a new MachineInstr. Use this instead of `new MachineInstr'.
  253. MachineInstr *MachineFunction::CreateMachineInstr(const MCInstrDesc &MCID,
  254. const DebugLoc &DL,
  255. bool NoImp) {
  256. return new (InstructionRecycler.Allocate<MachineInstr>(Allocator))
  257. MachineInstr(*this, MCID, DL, NoImp);
  258. }
  259. /// Create a new MachineInstr which is a copy of the 'Orig' instruction,
  260. /// identical in all ways except the instruction has no parent, prev, or next.
  261. MachineInstr *
  262. MachineFunction::CloneMachineInstr(const MachineInstr *Orig) {
  263. return new (InstructionRecycler.Allocate<MachineInstr>(Allocator))
  264. MachineInstr(*this, *Orig);
  265. }
  266. MachineInstr &MachineFunction::CloneMachineInstrBundle(MachineBasicBlock &MBB,
  267. MachineBasicBlock::iterator InsertBefore, const MachineInstr &Orig) {
  268. MachineInstr *FirstClone = nullptr;
  269. MachineBasicBlock::const_instr_iterator I = Orig.getIterator();
  270. while (true) {
  271. MachineInstr *Cloned = CloneMachineInstr(&*I);
  272. MBB.insert(InsertBefore, Cloned);
  273. if (FirstClone == nullptr) {
  274. FirstClone = Cloned;
  275. } else {
  276. Cloned->bundleWithPred();
  277. }
  278. if (!I->isBundledWithSucc())
  279. break;
  280. ++I;
  281. }
  282. return *FirstClone;
  283. }
  284. /// Delete the given MachineInstr.
  285. ///
  286. /// This function also serves as the MachineInstr destructor - the real
  287. /// ~MachineInstr() destructor must be empty.
  288. void
  289. MachineFunction::DeleteMachineInstr(MachineInstr *MI) {
  290. // Strip it for parts. The operand array and the MI object itself are
  291. // independently recyclable.
  292. if (MI->Operands)
  293. deallocateOperandArray(MI->CapOperands, MI->Operands);
  294. // Don't call ~MachineInstr() which must be trivial anyway because
  295. // ~MachineFunction drops whole lists of MachineInstrs wihout calling their
  296. // destructors.
  297. InstructionRecycler.Deallocate(Allocator, MI);
  298. }
  299. /// Allocate a new MachineBasicBlock. Use this instead of
  300. /// `new MachineBasicBlock'.
  301. MachineBasicBlock *
  302. MachineFunction::CreateMachineBasicBlock(const BasicBlock *bb) {
  303. return new (BasicBlockRecycler.Allocate<MachineBasicBlock>(Allocator))
  304. MachineBasicBlock(*this, bb);
  305. }
  306. /// Delete the given MachineBasicBlock.
  307. void
  308. MachineFunction::DeleteMachineBasicBlock(MachineBasicBlock *MBB) {
  309. assert(MBB->getParent() == this && "MBB parent mismatch!");
  310. MBB->~MachineBasicBlock();
  311. BasicBlockRecycler.Deallocate(Allocator, MBB);
  312. }
  313. MachineMemOperand *MachineFunction::getMachineMemOperand(
  314. MachinePointerInfo PtrInfo, MachineMemOperand::Flags f, uint64_t s,
  315. unsigned base_alignment, const AAMDNodes &AAInfo, const MDNode *Ranges,
  316. SyncScope::ID SSID, AtomicOrdering Ordering,
  317. AtomicOrdering FailureOrdering) {
  318. return new (Allocator)
  319. MachineMemOperand(PtrInfo, f, s, base_alignment, AAInfo, Ranges,
  320. SSID, Ordering, FailureOrdering);
  321. }
  322. MachineMemOperand *
  323. MachineFunction::getMachineMemOperand(const MachineMemOperand *MMO,
  324. int64_t Offset, uint64_t Size) {
  325. if (MMO->getValue())
  326. return new (Allocator)
  327. MachineMemOperand(MachinePointerInfo(MMO->getValue(),
  328. MMO->getOffset()+Offset),
  329. MMO->getFlags(), Size, MMO->getBaseAlignment(),
  330. AAMDNodes(), nullptr, MMO->getSyncScopeID(),
  331. MMO->getOrdering(), MMO->getFailureOrdering());
  332. return new (Allocator)
  333. MachineMemOperand(MachinePointerInfo(MMO->getPseudoValue(),
  334. MMO->getOffset()+Offset),
  335. MMO->getFlags(), Size, MMO->getBaseAlignment(),
  336. AAMDNodes(), nullptr, MMO->getSyncScopeID(),
  337. MMO->getOrdering(), MMO->getFailureOrdering());
  338. }
  339. MachineMemOperand *
  340. MachineFunction::getMachineMemOperand(const MachineMemOperand *MMO,
  341. const AAMDNodes &AAInfo) {
  342. MachinePointerInfo MPI = MMO->getValue() ?
  343. MachinePointerInfo(MMO->getValue(), MMO->getOffset()) :
  344. MachinePointerInfo(MMO->getPseudoValue(), MMO->getOffset());
  345. return new (Allocator)
  346. MachineMemOperand(MPI, MMO->getFlags(), MMO->getSize(),
  347. MMO->getBaseAlignment(), AAInfo,
  348. MMO->getRanges(), MMO->getSyncScopeID(),
  349. MMO->getOrdering(), MMO->getFailureOrdering());
  350. }
  351. MachineInstr::mmo_iterator
  352. MachineFunction::allocateMemRefsArray(unsigned long Num) {
  353. return Allocator.Allocate<MachineMemOperand *>(Num);
  354. }
  355. std::pair<MachineInstr::mmo_iterator, MachineInstr::mmo_iterator>
  356. MachineFunction::extractLoadMemRefs(MachineInstr::mmo_iterator Begin,
  357. MachineInstr::mmo_iterator End) {
  358. // Count the number of load mem refs.
  359. unsigned Num = 0;
  360. for (MachineInstr::mmo_iterator I = Begin; I != End; ++I)
  361. if ((*I)->isLoad())
  362. ++Num;
  363. // Allocate a new array and populate it with the load information.
  364. MachineInstr::mmo_iterator Result = allocateMemRefsArray(Num);
  365. unsigned Index = 0;
  366. for (MachineInstr::mmo_iterator I = Begin; I != End; ++I) {
  367. if ((*I)->isLoad()) {
  368. if (!(*I)->isStore())
  369. // Reuse the MMO.
  370. Result[Index] = *I;
  371. else {
  372. // Clone the MMO and unset the store flag.
  373. MachineMemOperand *JustLoad =
  374. getMachineMemOperand((*I)->getPointerInfo(),
  375. (*I)->getFlags() & ~MachineMemOperand::MOStore,
  376. (*I)->getSize(), (*I)->getBaseAlignment(),
  377. (*I)->getAAInfo(), nullptr,
  378. (*I)->getSyncScopeID(), (*I)->getOrdering(),
  379. (*I)->getFailureOrdering());
  380. Result[Index] = JustLoad;
  381. }
  382. ++Index;
  383. }
  384. }
  385. return std::make_pair(Result, Result + Num);
  386. }
  387. std::pair<MachineInstr::mmo_iterator, MachineInstr::mmo_iterator>
  388. MachineFunction::extractStoreMemRefs(MachineInstr::mmo_iterator Begin,
  389. MachineInstr::mmo_iterator End) {
  390. // Count the number of load mem refs.
  391. unsigned Num = 0;
  392. for (MachineInstr::mmo_iterator I = Begin; I != End; ++I)
  393. if ((*I)->isStore())
  394. ++Num;
  395. // Allocate a new array and populate it with the store information.
  396. MachineInstr::mmo_iterator Result = allocateMemRefsArray(Num);
  397. unsigned Index = 0;
  398. for (MachineInstr::mmo_iterator I = Begin; I != End; ++I) {
  399. if ((*I)->isStore()) {
  400. if (!(*I)->isLoad())
  401. // Reuse the MMO.
  402. Result[Index] = *I;
  403. else {
  404. // Clone the MMO and unset the load flag.
  405. MachineMemOperand *JustStore =
  406. getMachineMemOperand((*I)->getPointerInfo(),
  407. (*I)->getFlags() & ~MachineMemOperand::MOLoad,
  408. (*I)->getSize(), (*I)->getBaseAlignment(),
  409. (*I)->getAAInfo(), nullptr,
  410. (*I)->getSyncScopeID(), (*I)->getOrdering(),
  411. (*I)->getFailureOrdering());
  412. Result[Index] = JustStore;
  413. }
  414. ++Index;
  415. }
  416. }
  417. return std::make_pair(Result, Result + Num);
  418. }
  419. const char *MachineFunction::createExternalSymbolName(StringRef Name) {
  420. char *Dest = Allocator.Allocate<char>(Name.size() + 1);
  421. std::copy(Name.begin(), Name.end(), Dest);
  422. Dest[Name.size()] = 0;
  423. return Dest;
  424. }
  425. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  426. LLVM_DUMP_METHOD void MachineFunction::dump() const {
  427. print(dbgs());
  428. }
  429. #endif
  430. StringRef MachineFunction::getName() const {
  431. return getFunction().getName();
  432. }
  433. void MachineFunction::print(raw_ostream &OS, const SlotIndexes *Indexes) const {
  434. OS << "# Machine code for function " << getName() << ": ";
  435. getProperties().print(OS);
  436. OS << '\n';
  437. // Print Frame Information
  438. FrameInfo->print(*this, OS);
  439. // Print JumpTable Information
  440. if (JumpTableInfo)
  441. JumpTableInfo->print(OS);
  442. // Print Constant Pool
  443. ConstantPool->print(OS);
  444. const TargetRegisterInfo *TRI = getSubtarget().getRegisterInfo();
  445. if (RegInfo && !RegInfo->livein_empty()) {
  446. OS << "Function Live Ins: ";
  447. for (MachineRegisterInfo::livein_iterator
  448. I = RegInfo->livein_begin(), E = RegInfo->livein_end(); I != E; ++I) {
  449. OS << printReg(I->first, TRI);
  450. if (I->second)
  451. OS << " in " << printReg(I->second, TRI);
  452. if (std::next(I) != E)
  453. OS << ", ";
  454. }
  455. OS << '\n';
  456. }
  457. ModuleSlotTracker MST(getFunction().getParent());
  458. MST.incorporateFunction(getFunction());
  459. for (const auto &BB : *this) {
  460. OS << '\n';
  461. // If we print the whole function, print it at its most verbose level.
  462. BB.print(OS, MST, Indexes, /*IsStandalone=*/true);
  463. }
  464. OS << "\n# End machine code for function " << getName() << ".\n\n";
  465. }
  466. namespace llvm {
  467. template<>
  468. struct DOTGraphTraits<const MachineFunction*> : public DefaultDOTGraphTraits {
  469. DOTGraphTraits(bool isSimple = false) : DefaultDOTGraphTraits(isSimple) {}
  470. static std::string getGraphName(const MachineFunction *F) {
  471. return ("CFG for '" + F->getName() + "' function").str();
  472. }
  473. std::string getNodeLabel(const MachineBasicBlock *Node,
  474. const MachineFunction *Graph) {
  475. std::string OutStr;
  476. {
  477. raw_string_ostream OSS(OutStr);
  478. if (isSimple()) {
  479. OSS << printMBBReference(*Node);
  480. if (const BasicBlock *BB = Node->getBasicBlock())
  481. OSS << ": " << BB->getName();
  482. } else
  483. Node->print(OSS);
  484. }
  485. if (OutStr[0] == '\n') OutStr.erase(OutStr.begin());
  486. // Process string output to make it nicer...
  487. for (unsigned i = 0; i != OutStr.length(); ++i)
  488. if (OutStr[i] == '\n') { // Left justify
  489. OutStr[i] = '\\';
  490. OutStr.insert(OutStr.begin()+i+1, 'l');
  491. }
  492. return OutStr;
  493. }
  494. };
  495. } // end namespace llvm
  496. void MachineFunction::viewCFG() const
  497. {
  498. #ifndef NDEBUG
  499. ViewGraph(this, "mf" + getName());
  500. #else
  501. errs() << "MachineFunction::viewCFG is only available in debug builds on "
  502. << "systems with Graphviz or gv!\n";
  503. #endif // NDEBUG
  504. }
  505. void MachineFunction::viewCFGOnly() const
  506. {
  507. #ifndef NDEBUG
  508. ViewGraph(this, "mf" + getName(), true);
  509. #else
  510. errs() << "MachineFunction::viewCFGOnly is only available in debug builds on "
  511. << "systems with Graphviz or gv!\n";
  512. #endif // NDEBUG
  513. }
  514. /// Add the specified physical register as a live-in value and
  515. /// create a corresponding virtual register for it.
  516. unsigned MachineFunction::addLiveIn(unsigned PReg,
  517. const TargetRegisterClass *RC) {
  518. MachineRegisterInfo &MRI = getRegInfo();
  519. unsigned VReg = MRI.getLiveInVirtReg(PReg);
  520. if (VReg) {
  521. const TargetRegisterClass *VRegRC = MRI.getRegClass(VReg);
  522. (void)VRegRC;
  523. // A physical register can be added several times.
  524. // Between two calls, the register class of the related virtual register
  525. // may have been constrained to match some operation constraints.
  526. // In that case, check that the current register class includes the
  527. // physical register and is a sub class of the specified RC.
  528. assert((VRegRC == RC || (VRegRC->contains(PReg) &&
  529. RC->hasSubClassEq(VRegRC))) &&
  530. "Register class mismatch!");
  531. return VReg;
  532. }
  533. VReg = MRI.createVirtualRegister(RC);
  534. MRI.addLiveIn(PReg, VReg);
  535. return VReg;
  536. }
  537. /// Return the MCSymbol for the specified non-empty jump table.
  538. /// If isLinkerPrivate is specified, an 'l' label is returned, otherwise a
  539. /// normal 'L' label is returned.
  540. MCSymbol *MachineFunction::getJTISymbol(unsigned JTI, MCContext &Ctx,
  541. bool isLinkerPrivate) const {
  542. const DataLayout &DL = getDataLayout();
  543. assert(JumpTableInfo && "No jump tables");
  544. assert(JTI < JumpTableInfo->getJumpTables().size() && "Invalid JTI!");
  545. StringRef Prefix = isLinkerPrivate ? DL.getLinkerPrivateGlobalPrefix()
  546. : DL.getPrivateGlobalPrefix();
  547. SmallString<60> Name;
  548. raw_svector_ostream(Name)
  549. << Prefix << "JTI" << getFunctionNumber() << '_' << JTI;
  550. return Ctx.getOrCreateSymbol(Name);
  551. }
  552. /// Return a function-local symbol to represent the PIC base.
  553. MCSymbol *MachineFunction::getPICBaseSymbol() const {
  554. const DataLayout &DL = getDataLayout();
  555. return Ctx.getOrCreateSymbol(Twine(DL.getPrivateGlobalPrefix()) +
  556. Twine(getFunctionNumber()) + "$pb");
  557. }
  558. /// \name Exception Handling
  559. /// \{
  560. LandingPadInfo &
  561. MachineFunction::getOrCreateLandingPadInfo(MachineBasicBlock *LandingPad) {
  562. unsigned N = LandingPads.size();
  563. for (unsigned i = 0; i < N; ++i) {
  564. LandingPadInfo &LP = LandingPads[i];
  565. if (LP.LandingPadBlock == LandingPad)
  566. return LP;
  567. }
  568. LandingPads.push_back(LandingPadInfo(LandingPad));
  569. return LandingPads[N];
  570. }
  571. void MachineFunction::addInvoke(MachineBasicBlock *LandingPad,
  572. MCSymbol *BeginLabel, MCSymbol *EndLabel) {
  573. LandingPadInfo &LP = getOrCreateLandingPadInfo(LandingPad);
  574. LP.BeginLabels.push_back(BeginLabel);
  575. LP.EndLabels.push_back(EndLabel);
  576. }
  577. MCSymbol *MachineFunction::addLandingPad(MachineBasicBlock *LandingPad) {
  578. MCSymbol *LandingPadLabel = Ctx.createTempSymbol();
  579. LandingPadInfo &LP = getOrCreateLandingPadInfo(LandingPad);
  580. LP.LandingPadLabel = LandingPadLabel;
  581. return LandingPadLabel;
  582. }
  583. void MachineFunction::addCatchTypeInfo(MachineBasicBlock *LandingPad,
  584. ArrayRef<const GlobalValue *> TyInfo) {
  585. LandingPadInfo &LP = getOrCreateLandingPadInfo(LandingPad);
  586. for (unsigned N = TyInfo.size(); N; --N)
  587. LP.TypeIds.push_back(getTypeIDFor(TyInfo[N - 1]));
  588. }
  589. void MachineFunction::addFilterTypeInfo(MachineBasicBlock *LandingPad,
  590. ArrayRef<const GlobalValue *> TyInfo) {
  591. LandingPadInfo &LP = getOrCreateLandingPadInfo(LandingPad);
  592. std::vector<unsigned> IdsInFilter(TyInfo.size());
  593. for (unsigned I = 0, E = TyInfo.size(); I != E; ++I)
  594. IdsInFilter[I] = getTypeIDFor(TyInfo[I]);
  595. LP.TypeIds.push_back(getFilterIDFor(IdsInFilter));
  596. }
  597. void MachineFunction::tidyLandingPads(DenseMap<MCSymbol*, uintptr_t> *LPMap) {
  598. for (unsigned i = 0; i != LandingPads.size(); ) {
  599. LandingPadInfo &LandingPad = LandingPads[i];
  600. if (LandingPad.LandingPadLabel &&
  601. !LandingPad.LandingPadLabel->isDefined() &&
  602. (!LPMap || (*LPMap)[LandingPad.LandingPadLabel] == 0))
  603. LandingPad.LandingPadLabel = nullptr;
  604. // Special case: we *should* emit LPs with null LP MBB. This indicates
  605. // "nounwind" case.
  606. if (!LandingPad.LandingPadLabel && LandingPad.LandingPadBlock) {
  607. LandingPads.erase(LandingPads.begin() + i);
  608. continue;
  609. }
  610. for (unsigned j = 0, e = LandingPads[i].BeginLabels.size(); j != e; ++j) {
  611. MCSymbol *BeginLabel = LandingPad.BeginLabels[j];
  612. MCSymbol *EndLabel = LandingPad.EndLabels[j];
  613. if ((BeginLabel->isDefined() ||
  614. (LPMap && (*LPMap)[BeginLabel] != 0)) &&
  615. (EndLabel->isDefined() ||
  616. (LPMap && (*LPMap)[EndLabel] != 0))) continue;
  617. LandingPad.BeginLabels.erase(LandingPad.BeginLabels.begin() + j);
  618. LandingPad.EndLabels.erase(LandingPad.EndLabels.begin() + j);
  619. --j;
  620. --e;
  621. }
  622. // Remove landing pads with no try-ranges.
  623. if (LandingPads[i].BeginLabels.empty()) {
  624. LandingPads.erase(LandingPads.begin() + i);
  625. continue;
  626. }
  627. // If there is no landing pad, ensure that the list of typeids is empty.
  628. // If the only typeid is a cleanup, this is the same as having no typeids.
  629. if (!LandingPad.LandingPadBlock ||
  630. (LandingPad.TypeIds.size() == 1 && !LandingPad.TypeIds[0]))
  631. LandingPad.TypeIds.clear();
  632. ++i;
  633. }
  634. }
  635. void MachineFunction::addCleanup(MachineBasicBlock *LandingPad) {
  636. LandingPadInfo &LP = getOrCreateLandingPadInfo(LandingPad);
  637. LP.TypeIds.push_back(0);
  638. }
  639. void MachineFunction::addSEHCatchHandler(MachineBasicBlock *LandingPad,
  640. const Function *Filter,
  641. const BlockAddress *RecoverBA) {
  642. LandingPadInfo &LP = getOrCreateLandingPadInfo(LandingPad);
  643. SEHHandler Handler;
  644. Handler.FilterOrFinally = Filter;
  645. Handler.RecoverBA = RecoverBA;
  646. LP.SEHHandlers.push_back(Handler);
  647. }
  648. void MachineFunction::addSEHCleanupHandler(MachineBasicBlock *LandingPad,
  649. const Function *Cleanup) {
  650. LandingPadInfo &LP = getOrCreateLandingPadInfo(LandingPad);
  651. SEHHandler Handler;
  652. Handler.FilterOrFinally = Cleanup;
  653. Handler.RecoverBA = nullptr;
  654. LP.SEHHandlers.push_back(Handler);
  655. }
  656. void MachineFunction::setCallSiteLandingPad(MCSymbol *Sym,
  657. ArrayRef<unsigned> Sites) {
  658. LPadToCallSiteMap[Sym].append(Sites.begin(), Sites.end());
  659. }
  660. unsigned MachineFunction::getTypeIDFor(const GlobalValue *TI) {
  661. for (unsigned i = 0, N = TypeInfos.size(); i != N; ++i)
  662. if (TypeInfos[i] == TI) return i + 1;
  663. TypeInfos.push_back(TI);
  664. return TypeInfos.size();
  665. }
  666. int MachineFunction::getFilterIDFor(std::vector<unsigned> &TyIds) {
  667. // If the new filter coincides with the tail of an existing filter, then
  668. // re-use the existing filter. Folding filters more than this requires
  669. // re-ordering filters and/or their elements - probably not worth it.
  670. for (std::vector<unsigned>::iterator I = FilterEnds.begin(),
  671. E = FilterEnds.end(); I != E; ++I) {
  672. unsigned i = *I, j = TyIds.size();
  673. while (i && j)
  674. if (FilterIds[--i] != TyIds[--j])
  675. goto try_next;
  676. if (!j)
  677. // The new filter coincides with range [i, end) of the existing filter.
  678. return -(1 + i);
  679. try_next:;
  680. }
  681. // Add the new filter.
  682. int FilterID = -(1 + FilterIds.size());
  683. FilterIds.reserve(FilterIds.size() + TyIds.size() + 1);
  684. FilterIds.insert(FilterIds.end(), TyIds.begin(), TyIds.end());
  685. FilterEnds.push_back(FilterIds.size());
  686. FilterIds.push_back(0); // terminator
  687. return FilterID;
  688. }
  689. void llvm::addLandingPadInfo(const LandingPadInst &I, MachineBasicBlock &MBB) {
  690. MachineFunction &MF = *MBB.getParent();
  691. if (const auto *PF = dyn_cast<Function>(
  692. I.getParent()->getParent()->getPersonalityFn()->stripPointerCasts()))
  693. MF.getMMI().addPersonality(PF);
  694. if (I.isCleanup())
  695. MF.addCleanup(&MBB);
  696. // FIXME: New EH - Add the clauses in reverse order. This isn't 100% correct,
  697. // but we need to do it this way because of how the DWARF EH emitter
  698. // processes the clauses.
  699. for (unsigned i = I.getNumClauses(); i != 0; --i) {
  700. Value *Val = I.getClause(i - 1);
  701. if (I.isCatch(i - 1)) {
  702. MF.addCatchTypeInfo(&MBB,
  703. dyn_cast<GlobalValue>(Val->stripPointerCasts()));
  704. } else {
  705. // Add filters in a list.
  706. Constant *CVal = cast<Constant>(Val);
  707. SmallVector<const GlobalValue *, 4> FilterList;
  708. for (User::op_iterator II = CVal->op_begin(), IE = CVal->op_end();
  709. II != IE; ++II)
  710. FilterList.push_back(cast<GlobalValue>((*II)->stripPointerCasts()));
  711. MF.addFilterTypeInfo(&MBB, FilterList);
  712. }
  713. }
  714. }
  715. /// \}
  716. //===----------------------------------------------------------------------===//
  717. // MachineJumpTableInfo implementation
  718. //===----------------------------------------------------------------------===//
  719. /// Return the size of each entry in the jump table.
  720. unsigned MachineJumpTableInfo::getEntrySize(const DataLayout &TD) const {
  721. // The size of a jump table entry is 4 bytes unless the entry is just the
  722. // address of a block, in which case it is the pointer size.
  723. switch (getEntryKind()) {
  724. case MachineJumpTableInfo::EK_BlockAddress:
  725. return TD.getPointerSize();
  726. case MachineJumpTableInfo::EK_GPRel64BlockAddress:
  727. return 8;
  728. case MachineJumpTableInfo::EK_GPRel32BlockAddress:
  729. case MachineJumpTableInfo::EK_LabelDifference32:
  730. case MachineJumpTableInfo::EK_Custom32:
  731. return 4;
  732. case MachineJumpTableInfo::EK_Inline:
  733. return 0;
  734. }
  735. llvm_unreachable("Unknown jump table encoding!");
  736. }
  737. /// Return the alignment of each entry in the jump table.
  738. unsigned MachineJumpTableInfo::getEntryAlignment(const DataLayout &TD) const {
  739. // The alignment of a jump table entry is the alignment of int32 unless the
  740. // entry is just the address of a block, in which case it is the pointer
  741. // alignment.
  742. switch (getEntryKind()) {
  743. case MachineJumpTableInfo::EK_BlockAddress:
  744. return TD.getPointerABIAlignment(0);
  745. case MachineJumpTableInfo::EK_GPRel64BlockAddress:
  746. return TD.getABIIntegerTypeAlignment(64);
  747. case MachineJumpTableInfo::EK_GPRel32BlockAddress:
  748. case MachineJumpTableInfo::EK_LabelDifference32:
  749. case MachineJumpTableInfo::EK_Custom32:
  750. return TD.getABIIntegerTypeAlignment(32);
  751. case MachineJumpTableInfo::EK_Inline:
  752. return 1;
  753. }
  754. llvm_unreachable("Unknown jump table encoding!");
  755. }
  756. /// Create a new jump table entry in the jump table info.
  757. unsigned MachineJumpTableInfo::createJumpTableIndex(
  758. const std::vector<MachineBasicBlock*> &DestBBs) {
  759. assert(!DestBBs.empty() && "Cannot create an empty jump table!");
  760. JumpTables.push_back(MachineJumpTableEntry(DestBBs));
  761. return JumpTables.size()-1;
  762. }
  763. /// If Old is the target of any jump tables, update the jump tables to branch
  764. /// to New instead.
  765. bool MachineJumpTableInfo::ReplaceMBBInJumpTables(MachineBasicBlock *Old,
  766. MachineBasicBlock *New) {
  767. assert(Old != New && "Not making a change?");
  768. bool MadeChange = false;
  769. for (size_t i = 0, e = JumpTables.size(); i != e; ++i)
  770. ReplaceMBBInJumpTable(i, Old, New);
  771. return MadeChange;
  772. }
  773. /// If Old is a target of the jump tables, update the jump table to branch to
  774. /// New instead.
  775. bool MachineJumpTableInfo::ReplaceMBBInJumpTable(unsigned Idx,
  776. MachineBasicBlock *Old,
  777. MachineBasicBlock *New) {
  778. assert(Old != New && "Not making a change?");
  779. bool MadeChange = false;
  780. MachineJumpTableEntry &JTE = JumpTables[Idx];
  781. for (size_t j = 0, e = JTE.MBBs.size(); j != e; ++j)
  782. if (JTE.MBBs[j] == Old) {
  783. JTE.MBBs[j] = New;
  784. MadeChange = true;
  785. }
  786. return MadeChange;
  787. }
  788. void MachineJumpTableInfo::print(raw_ostream &OS) const {
  789. if (JumpTables.empty()) return;
  790. OS << "Jump Tables:\n";
  791. for (unsigned i = 0, e = JumpTables.size(); i != e; ++i) {
  792. OS << printJumpTableEntryReference(i) << ": ";
  793. for (unsigned j = 0, f = JumpTables[i].MBBs.size(); j != f; ++j)
  794. OS << ' ' << printMBBReference(*JumpTables[i].MBBs[j]);
  795. }
  796. OS << '\n';
  797. }
  798. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  799. LLVM_DUMP_METHOD void MachineJumpTableInfo::dump() const { print(dbgs()); }
  800. #endif
  801. Printable llvm::printJumpTableEntryReference(unsigned Idx) {
  802. return Printable([Idx](raw_ostream &OS) { OS << "%jump-table." << Idx; });
  803. }
  804. //===----------------------------------------------------------------------===//
  805. // MachineConstantPool implementation
  806. //===----------------------------------------------------------------------===//
  807. void MachineConstantPoolValue::anchor() {}
  808. Type *MachineConstantPoolEntry::getType() const {
  809. if (isMachineConstantPoolEntry())
  810. return Val.MachineCPVal->getType();
  811. return Val.ConstVal->getType();
  812. }
  813. bool MachineConstantPoolEntry::needsRelocation() const {
  814. if (isMachineConstantPoolEntry())
  815. return true;
  816. return Val.ConstVal->needsRelocation();
  817. }
  818. SectionKind
  819. MachineConstantPoolEntry::getSectionKind(const DataLayout *DL) const {
  820. if (needsRelocation())
  821. return SectionKind::getReadOnlyWithRel();
  822. switch (DL->getTypeAllocSize(getType())) {
  823. case 4:
  824. return SectionKind::getMergeableConst4();
  825. case 8:
  826. return SectionKind::getMergeableConst8();
  827. case 16:
  828. return SectionKind::getMergeableConst16();
  829. case 32:
  830. return SectionKind::getMergeableConst32();
  831. default:
  832. return SectionKind::getReadOnly();
  833. }
  834. }
  835. MachineConstantPool::~MachineConstantPool() {
  836. // A constant may be a member of both Constants and MachineCPVsSharingEntries,
  837. // so keep track of which we've deleted to avoid double deletions.
  838. DenseSet<MachineConstantPoolValue*> Deleted;
  839. for (unsigned i = 0, e = Constants.size(); i != e; ++i)
  840. if (Constants[i].isMachineConstantPoolEntry()) {
  841. Deleted.insert(Constants[i].Val.MachineCPVal);
  842. delete Constants[i].Val.MachineCPVal;
  843. }
  844. for (DenseSet<MachineConstantPoolValue*>::iterator I =
  845. MachineCPVsSharingEntries.begin(), E = MachineCPVsSharingEntries.end();
  846. I != E; ++I) {
  847. if (Deleted.count(*I) == 0)
  848. delete *I;
  849. }
  850. }
  851. /// Test whether the given two constants can be allocated the same constant pool
  852. /// entry.
  853. static bool CanShareConstantPoolEntry(const Constant *A, const Constant *B,
  854. const DataLayout &DL) {
  855. // Handle the trivial case quickly.
  856. if (A == B) return true;
  857. // If they have the same type but weren't the same constant, quickly
  858. // reject them.
  859. if (A->getType() == B->getType()) return false;
  860. // We can't handle structs or arrays.
  861. if (isa<StructType>(A->getType()) || isa<ArrayType>(A->getType()) ||
  862. isa<StructType>(B->getType()) || isa<ArrayType>(B->getType()))
  863. return false;
  864. // For now, only support constants with the same size.
  865. uint64_t StoreSize = DL.getTypeStoreSize(A->getType());
  866. if (StoreSize != DL.getTypeStoreSize(B->getType()) || StoreSize > 128)
  867. return false;
  868. Type *IntTy = IntegerType::get(A->getContext(), StoreSize*8);
  869. // Try constant folding a bitcast of both instructions to an integer. If we
  870. // get two identical ConstantInt's, then we are good to share them. We use
  871. // the constant folding APIs to do this so that we get the benefit of
  872. // DataLayout.
  873. if (isa<PointerType>(A->getType()))
  874. A = ConstantFoldCastOperand(Instruction::PtrToInt,
  875. const_cast<Constant *>(A), IntTy, DL);
  876. else if (A->getType() != IntTy)
  877. A = ConstantFoldCastOperand(Instruction::BitCast, const_cast<Constant *>(A),
  878. IntTy, DL);
  879. if (isa<PointerType>(B->getType()))
  880. B = ConstantFoldCastOperand(Instruction::PtrToInt,
  881. const_cast<Constant *>(B), IntTy, DL);
  882. else if (B->getType() != IntTy)
  883. B = ConstantFoldCastOperand(Instruction::BitCast, const_cast<Constant *>(B),
  884. IntTy, DL);
  885. return A == B;
  886. }
  887. /// Create a new entry in the constant pool or return an existing one.
  888. /// User must specify the log2 of the minimum required alignment for the object.
  889. unsigned MachineConstantPool::getConstantPoolIndex(const Constant *C,
  890. unsigned Alignment) {
  891. assert(Alignment && "Alignment must be specified!");
  892. if (Alignment > PoolAlignment) PoolAlignment = Alignment;
  893. // Check to see if we already have this constant.
  894. //
  895. // FIXME, this could be made much more efficient for large constant pools.
  896. for (unsigned i = 0, e = Constants.size(); i != e; ++i)
  897. if (!Constants[i].isMachineConstantPoolEntry() &&
  898. CanShareConstantPoolEntry(Constants[i].Val.ConstVal, C, DL)) {
  899. if ((unsigned)Constants[i].getAlignment() < Alignment)
  900. Constants[i].Alignment = Alignment;
  901. return i;
  902. }
  903. Constants.push_back(MachineConstantPoolEntry(C, Alignment));
  904. return Constants.size()-1;
  905. }
  906. unsigned MachineConstantPool::getConstantPoolIndex(MachineConstantPoolValue *V,
  907. unsigned Alignment) {
  908. assert(Alignment && "Alignment must be specified!");
  909. if (Alignment > PoolAlignment) PoolAlignment = Alignment;
  910. // Check to see if we already have this constant.
  911. //
  912. // FIXME, this could be made much more efficient for large constant pools.
  913. int Idx = V->getExistingMachineCPValue(this, Alignment);
  914. if (Idx != -1) {
  915. MachineCPVsSharingEntries.insert(V);
  916. return (unsigned)Idx;
  917. }
  918. Constants.push_back(MachineConstantPoolEntry(V, Alignment));
  919. return Constants.size()-1;
  920. }
  921. void MachineConstantPool::print(raw_ostream &OS) const {
  922. if (Constants.empty()) return;
  923. OS << "Constant Pool:\n";
  924. for (unsigned i = 0, e = Constants.size(); i != e; ++i) {
  925. OS << " cp#" << i << ": ";
  926. if (Constants[i].isMachineConstantPoolEntry())
  927. Constants[i].Val.MachineCPVal->print(OS);
  928. else
  929. Constants[i].Val.ConstVal->printAsOperand(OS, /*PrintType=*/false);
  930. OS << ", align=" << Constants[i].getAlignment();
  931. OS << "\n";
  932. }
  933. }
  934. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  935. LLVM_DUMP_METHOD void MachineConstantPool::dump() const { print(dbgs()); }
  936. #endif