MachineFunction.cpp 36 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989
  1. //===-- MachineFunction.cpp -----------------------------------------------===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // Collect native machine code information for a function. This allows
  11. // target-specific information about the generated code to be stored with each
  12. // function.
  13. //
  14. //===----------------------------------------------------------------------===//
  15. #include "llvm/CodeGen/MachineFunction.h"
  16. #include "llvm/ADT/STLExtras.h"
  17. #include "llvm/ADT/SmallString.h"
  18. #include "llvm/Analysis/ConstantFolding.h"
  19. #include "llvm/CodeGen/MachineConstantPool.h"
  20. #include "llvm/CodeGen/MachineFrameInfo.h"
  21. #include "llvm/CodeGen/MachineFunctionPass.h"
  22. #include "llvm/CodeGen/MachineInstr.h"
  23. #include "llvm/CodeGen/MachineJumpTableInfo.h"
  24. #include "llvm/CodeGen/MachineModuleInfo.h"
  25. #include "llvm/CodeGen/MachineRegisterInfo.h"
  26. #include "llvm/CodeGen/Passes.h"
  27. #include "llvm/IR/DataLayout.h"
  28. #include "llvm/IR/DebugInfo.h"
  29. #include "llvm/IR/Function.h"
  30. #include "llvm/MC/MCAsmInfo.h"
  31. #include "llvm/MC/MCContext.h"
  32. #include "llvm/Support/Debug.h"
  33. #include "llvm/Support/GraphWriter.h"
  34. #include "llvm/Support/raw_ostream.h"
  35. #include "llvm/Target/TargetFrameLowering.h"
  36. #include "llvm/Target/TargetLowering.h"
  37. #include "llvm/Target/TargetMachine.h"
  38. #include "llvm/Target/TargetSubtargetInfo.h"
  39. using namespace llvm;
  40. #define DEBUG_TYPE "codegen"
  41. //===----------------------------------------------------------------------===//
  42. // MachineFunction implementation
  43. //===----------------------------------------------------------------------===//
  44. // Out of line virtual method.
  45. MachineFunctionInfo::~MachineFunctionInfo() {}
  46. void ilist_traits<MachineBasicBlock>::deleteNode(MachineBasicBlock *MBB) {
  47. MBB->getParent()->DeleteMachineBasicBlock(MBB);
  48. }
  49. MachineFunction::MachineFunction(const Function *F, const TargetMachine &TM,
  50. unsigned FunctionNum, MachineModuleInfo &mmi,
  51. GCModuleInfo *gmi)
  52. : Fn(F), Target(TM), STI(TM.getSubtargetImpl()), Ctx(mmi.getContext()),
  53. MMI(mmi), GMI(gmi) {
  54. if (TM.getSubtargetImpl()->getRegisterInfo())
  55. RegInfo = new (Allocator) MachineRegisterInfo(this);
  56. else
  57. RegInfo = nullptr;
  58. MFInfo = nullptr;
  59. FrameInfo =
  60. new (Allocator) MachineFrameInfo(TM,!F->hasFnAttribute("no-realign-stack"));
  61. if (Fn->getAttributes().hasAttribute(AttributeSet::FunctionIndex,
  62. Attribute::StackAlignment))
  63. FrameInfo->ensureMaxAlignment(Fn->getAttributes().
  64. getStackAlignment(AttributeSet::FunctionIndex));
  65. ConstantPool = new (Allocator) MachineConstantPool(TM);
  66. Alignment =
  67. TM.getSubtargetImpl()->getTargetLowering()->getMinFunctionAlignment();
  68. // FIXME: Shouldn't use pref alignment if explicit alignment is set on Fn.
  69. if (!Fn->getAttributes().hasAttribute(AttributeSet::FunctionIndex,
  70. Attribute::OptimizeForSize))
  71. Alignment = std::max(
  72. Alignment,
  73. TM.getSubtargetImpl()->getTargetLowering()->getPrefFunctionAlignment());
  74. FunctionNumber = FunctionNum;
  75. JumpTableInfo = nullptr;
  76. }
  77. MachineFunction::~MachineFunction() {
  78. // Don't call destructors on MachineInstr and MachineOperand. All of their
  79. // memory comes from the BumpPtrAllocator which is about to be purged.
  80. //
  81. // Do call MachineBasicBlock destructors, it contains std::vectors.
  82. for (iterator I = begin(), E = end(); I != E; I = BasicBlocks.erase(I))
  83. I->Insts.clearAndLeakNodesUnsafely();
  84. InstructionRecycler.clear(Allocator);
  85. OperandRecycler.clear(Allocator);
  86. BasicBlockRecycler.clear(Allocator);
  87. if (RegInfo) {
  88. RegInfo->~MachineRegisterInfo();
  89. Allocator.Deallocate(RegInfo);
  90. }
  91. if (MFInfo) {
  92. MFInfo->~MachineFunctionInfo();
  93. Allocator.Deallocate(MFInfo);
  94. }
  95. FrameInfo->~MachineFrameInfo();
  96. Allocator.Deallocate(FrameInfo);
  97. ConstantPool->~MachineConstantPool();
  98. Allocator.Deallocate(ConstantPool);
  99. if (JumpTableInfo) {
  100. JumpTableInfo->~MachineJumpTableInfo();
  101. Allocator.Deallocate(JumpTableInfo);
  102. }
  103. }
  104. /// getOrCreateJumpTableInfo - Get the JumpTableInfo for this function, if it
  105. /// does already exist, allocate one.
  106. MachineJumpTableInfo *MachineFunction::
  107. getOrCreateJumpTableInfo(unsigned EntryKind) {
  108. if (JumpTableInfo) return JumpTableInfo;
  109. JumpTableInfo = new (Allocator)
  110. MachineJumpTableInfo((MachineJumpTableInfo::JTEntryKind)EntryKind);
  111. return JumpTableInfo;
  112. }
  113. /// Should we be emitting segmented stack stuff for the function
  114. bool MachineFunction::shouldSplitStack() {
  115. return getFunction()->hasFnAttribute("split-stack");
  116. }
  117. /// RenumberBlocks - This discards all of the MachineBasicBlock numbers and
  118. /// recomputes them. This guarantees that the MBB numbers are sequential,
  119. /// dense, and match the ordering of the blocks within the function. If a
  120. /// specific MachineBasicBlock is specified, only that block and those after
  121. /// it are renumbered.
  122. void MachineFunction::RenumberBlocks(MachineBasicBlock *MBB) {
  123. if (empty()) { MBBNumbering.clear(); return; }
  124. MachineFunction::iterator MBBI, E = end();
  125. if (MBB == nullptr)
  126. MBBI = begin();
  127. else
  128. MBBI = MBB;
  129. // Figure out the block number this should have.
  130. unsigned BlockNo = 0;
  131. if (MBBI != begin())
  132. BlockNo = std::prev(MBBI)->getNumber() + 1;
  133. for (; MBBI != E; ++MBBI, ++BlockNo) {
  134. if (MBBI->getNumber() != (int)BlockNo) {
  135. // Remove use of the old number.
  136. if (MBBI->getNumber() != -1) {
  137. assert(MBBNumbering[MBBI->getNumber()] == &*MBBI &&
  138. "MBB number mismatch!");
  139. MBBNumbering[MBBI->getNumber()] = nullptr;
  140. }
  141. // If BlockNo is already taken, set that block's number to -1.
  142. if (MBBNumbering[BlockNo])
  143. MBBNumbering[BlockNo]->setNumber(-1);
  144. MBBNumbering[BlockNo] = MBBI;
  145. MBBI->setNumber(BlockNo);
  146. }
  147. }
  148. // Okay, all the blocks are renumbered. If we have compactified the block
  149. // numbering, shrink MBBNumbering now.
  150. assert(BlockNo <= MBBNumbering.size() && "Mismatch!");
  151. MBBNumbering.resize(BlockNo);
  152. }
  153. /// CreateMachineInstr - Allocate a new MachineInstr. Use this instead
  154. /// of `new MachineInstr'.
  155. ///
  156. MachineInstr *
  157. MachineFunction::CreateMachineInstr(const MCInstrDesc &MCID,
  158. DebugLoc DL, bool NoImp) {
  159. return new (InstructionRecycler.Allocate<MachineInstr>(Allocator))
  160. MachineInstr(*this, MCID, DL, NoImp);
  161. }
  162. /// CloneMachineInstr - Create a new MachineInstr which is a copy of the
  163. /// 'Orig' instruction, identical in all ways except the instruction
  164. /// has no parent, prev, or next.
  165. ///
  166. MachineInstr *
  167. MachineFunction::CloneMachineInstr(const MachineInstr *Orig) {
  168. return new (InstructionRecycler.Allocate<MachineInstr>(Allocator))
  169. MachineInstr(*this, *Orig);
  170. }
  171. /// DeleteMachineInstr - Delete the given MachineInstr.
  172. ///
  173. /// This function also serves as the MachineInstr destructor - the real
  174. /// ~MachineInstr() destructor must be empty.
  175. void
  176. MachineFunction::DeleteMachineInstr(MachineInstr *MI) {
  177. // Strip it for parts. The operand array and the MI object itself are
  178. // independently recyclable.
  179. if (MI->Operands)
  180. deallocateOperandArray(MI->CapOperands, MI->Operands);
  181. // Don't call ~MachineInstr() which must be trivial anyway because
  182. // ~MachineFunction drops whole lists of MachineInstrs wihout calling their
  183. // destructors.
  184. InstructionRecycler.Deallocate(Allocator, MI);
  185. }
  186. /// CreateMachineBasicBlock - Allocate a new MachineBasicBlock. Use this
  187. /// instead of `new MachineBasicBlock'.
  188. ///
  189. MachineBasicBlock *
  190. MachineFunction::CreateMachineBasicBlock(const BasicBlock *bb) {
  191. return new (BasicBlockRecycler.Allocate<MachineBasicBlock>(Allocator))
  192. MachineBasicBlock(*this, bb);
  193. }
  194. /// DeleteMachineBasicBlock - Delete the given MachineBasicBlock.
  195. ///
  196. void
  197. MachineFunction::DeleteMachineBasicBlock(MachineBasicBlock *MBB) {
  198. assert(MBB->getParent() == this && "MBB parent mismatch!");
  199. MBB->~MachineBasicBlock();
  200. BasicBlockRecycler.Deallocate(Allocator, MBB);
  201. }
  202. MachineMemOperand *
  203. MachineFunction::getMachineMemOperand(MachinePointerInfo PtrInfo, unsigned f,
  204. uint64_t s, unsigned base_alignment,
  205. const AAMDNodes &AAInfo,
  206. const MDNode *Ranges) {
  207. return new (Allocator) MachineMemOperand(PtrInfo, f, s, base_alignment,
  208. AAInfo, Ranges);
  209. }
  210. MachineMemOperand *
  211. MachineFunction::getMachineMemOperand(const MachineMemOperand *MMO,
  212. int64_t Offset, uint64_t Size) {
  213. if (MMO->getValue())
  214. return new (Allocator)
  215. MachineMemOperand(MachinePointerInfo(MMO->getValue(),
  216. MMO->getOffset()+Offset),
  217. MMO->getFlags(), Size,
  218. MMO->getBaseAlignment(), nullptr);
  219. return new (Allocator)
  220. MachineMemOperand(MachinePointerInfo(MMO->getPseudoValue(),
  221. MMO->getOffset()+Offset),
  222. MMO->getFlags(), Size,
  223. MMO->getBaseAlignment(), nullptr);
  224. }
  225. MachineInstr::mmo_iterator
  226. MachineFunction::allocateMemRefsArray(unsigned long Num) {
  227. return Allocator.Allocate<MachineMemOperand *>(Num);
  228. }
  229. std::pair<MachineInstr::mmo_iterator, MachineInstr::mmo_iterator>
  230. MachineFunction::extractLoadMemRefs(MachineInstr::mmo_iterator Begin,
  231. MachineInstr::mmo_iterator End) {
  232. // Count the number of load mem refs.
  233. unsigned Num = 0;
  234. for (MachineInstr::mmo_iterator I = Begin; I != End; ++I)
  235. if ((*I)->isLoad())
  236. ++Num;
  237. // Allocate a new array and populate it with the load information.
  238. MachineInstr::mmo_iterator Result = allocateMemRefsArray(Num);
  239. unsigned Index = 0;
  240. for (MachineInstr::mmo_iterator I = Begin; I != End; ++I) {
  241. if ((*I)->isLoad()) {
  242. if (!(*I)->isStore())
  243. // Reuse the MMO.
  244. Result[Index] = *I;
  245. else {
  246. // Clone the MMO and unset the store flag.
  247. MachineMemOperand *JustLoad =
  248. getMachineMemOperand((*I)->getPointerInfo(),
  249. (*I)->getFlags() & ~MachineMemOperand::MOStore,
  250. (*I)->getSize(), (*I)->getBaseAlignment(),
  251. (*I)->getAAInfo());
  252. Result[Index] = JustLoad;
  253. }
  254. ++Index;
  255. }
  256. }
  257. return std::make_pair(Result, Result + Num);
  258. }
  259. std::pair<MachineInstr::mmo_iterator, MachineInstr::mmo_iterator>
  260. MachineFunction::extractStoreMemRefs(MachineInstr::mmo_iterator Begin,
  261. MachineInstr::mmo_iterator End) {
  262. // Count the number of load mem refs.
  263. unsigned Num = 0;
  264. for (MachineInstr::mmo_iterator I = Begin; I != End; ++I)
  265. if ((*I)->isStore())
  266. ++Num;
  267. // Allocate a new array and populate it with the store information.
  268. MachineInstr::mmo_iterator Result = allocateMemRefsArray(Num);
  269. unsigned Index = 0;
  270. for (MachineInstr::mmo_iterator I = Begin; I != End; ++I) {
  271. if ((*I)->isStore()) {
  272. if (!(*I)->isLoad())
  273. // Reuse the MMO.
  274. Result[Index] = *I;
  275. else {
  276. // Clone the MMO and unset the load flag.
  277. MachineMemOperand *JustStore =
  278. getMachineMemOperand((*I)->getPointerInfo(),
  279. (*I)->getFlags() & ~MachineMemOperand::MOLoad,
  280. (*I)->getSize(), (*I)->getBaseAlignment(),
  281. (*I)->getAAInfo());
  282. Result[Index] = JustStore;
  283. }
  284. ++Index;
  285. }
  286. }
  287. return std::make_pair(Result, Result + Num);
  288. }
  289. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  290. void MachineFunction::dump() const {
  291. print(dbgs());
  292. }
  293. #endif
  294. StringRef MachineFunction::getName() const {
  295. assert(getFunction() && "No function!");
  296. return getFunction()->getName();
  297. }
  298. void MachineFunction::print(raw_ostream &OS, SlotIndexes *Indexes) const {
  299. OS << "# Machine code for function " << getName() << ": ";
  300. if (RegInfo) {
  301. OS << (RegInfo->isSSA() ? "SSA" : "Post SSA");
  302. if (!RegInfo->tracksLiveness())
  303. OS << ", not tracking liveness";
  304. }
  305. OS << '\n';
  306. // Print Frame Information
  307. FrameInfo->print(*this, OS);
  308. // Print JumpTable Information
  309. if (JumpTableInfo)
  310. JumpTableInfo->print(OS);
  311. // Print Constant Pool
  312. ConstantPool->print(OS);
  313. const TargetRegisterInfo *TRI = getSubtarget().getRegisterInfo();
  314. if (RegInfo && !RegInfo->livein_empty()) {
  315. OS << "Function Live Ins: ";
  316. for (MachineRegisterInfo::livein_iterator
  317. I = RegInfo->livein_begin(), E = RegInfo->livein_end(); I != E; ++I) {
  318. OS << PrintReg(I->first, TRI);
  319. if (I->second)
  320. OS << " in " << PrintReg(I->second, TRI);
  321. if (std::next(I) != E)
  322. OS << ", ";
  323. }
  324. OS << '\n';
  325. }
  326. for (const auto &BB : *this) {
  327. OS << '\n';
  328. BB.print(OS, Indexes);
  329. }
  330. OS << "\n# End machine code for function " << getName() << ".\n\n";
  331. }
  332. namespace llvm {
  333. template<>
  334. struct DOTGraphTraits<const MachineFunction*> : public DefaultDOTGraphTraits {
  335. DOTGraphTraits (bool isSimple=false) : DefaultDOTGraphTraits(isSimple) {}
  336. static std::string getGraphName(const MachineFunction *F) {
  337. return "CFG for '" + F->getName().str() + "' function";
  338. }
  339. std::string getNodeLabel(const MachineBasicBlock *Node,
  340. const MachineFunction *Graph) {
  341. std::string OutStr;
  342. {
  343. raw_string_ostream OSS(OutStr);
  344. if (isSimple()) {
  345. OSS << "BB#" << Node->getNumber();
  346. if (const BasicBlock *BB = Node->getBasicBlock())
  347. OSS << ": " << BB->getName();
  348. } else
  349. Node->print(OSS);
  350. }
  351. if (OutStr[0] == '\n') OutStr.erase(OutStr.begin());
  352. // Process string output to make it nicer...
  353. for (unsigned i = 0; i != OutStr.length(); ++i)
  354. if (OutStr[i] == '\n') { // Left justify
  355. OutStr[i] = '\\';
  356. OutStr.insert(OutStr.begin()+i+1, 'l');
  357. }
  358. return OutStr;
  359. }
  360. };
  361. }
  362. void MachineFunction::viewCFG() const
  363. {
  364. #ifndef NDEBUG
  365. ViewGraph(this, "mf" + getName());
  366. #else
  367. errs() << "MachineFunction::viewCFG is only available in debug builds on "
  368. << "systems with Graphviz or gv!\n";
  369. #endif // NDEBUG
  370. }
  371. void MachineFunction::viewCFGOnly() const
  372. {
  373. #ifndef NDEBUG
  374. ViewGraph(this, "mf" + getName(), true);
  375. #else
  376. errs() << "MachineFunction::viewCFGOnly is only available in debug builds on "
  377. << "systems with Graphviz or gv!\n";
  378. #endif // NDEBUG
  379. }
  380. /// addLiveIn - Add the specified physical register as a live-in value and
  381. /// create a corresponding virtual register for it.
  382. unsigned MachineFunction::addLiveIn(unsigned PReg,
  383. const TargetRegisterClass *RC) {
  384. MachineRegisterInfo &MRI = getRegInfo();
  385. unsigned VReg = MRI.getLiveInVirtReg(PReg);
  386. if (VReg) {
  387. const TargetRegisterClass *VRegRC = MRI.getRegClass(VReg);
  388. (void)VRegRC;
  389. // A physical register can be added several times.
  390. // Between two calls, the register class of the related virtual register
  391. // may have been constrained to match some operation constraints.
  392. // In that case, check that the current register class includes the
  393. // physical register and is a sub class of the specified RC.
  394. assert((VRegRC == RC || (VRegRC->contains(PReg) &&
  395. RC->hasSubClassEq(VRegRC))) &&
  396. "Register class mismatch!");
  397. return VReg;
  398. }
  399. VReg = MRI.createVirtualRegister(RC);
  400. MRI.addLiveIn(PReg, VReg);
  401. return VReg;
  402. }
  403. /// getJTISymbol - Return the MCSymbol for the specified non-empty jump table.
  404. /// If isLinkerPrivate is specified, an 'l' label is returned, otherwise a
  405. /// normal 'L' label is returned.
  406. MCSymbol *MachineFunction::getJTISymbol(unsigned JTI, MCContext &Ctx,
  407. bool isLinkerPrivate) const {
  408. const DataLayout *DL = getSubtarget().getDataLayout();
  409. assert(JumpTableInfo && "No jump tables");
  410. assert(JTI < JumpTableInfo->getJumpTables().size() && "Invalid JTI!");
  411. const char *Prefix = isLinkerPrivate ? DL->getLinkerPrivateGlobalPrefix() :
  412. DL->getPrivateGlobalPrefix();
  413. SmallString<60> Name;
  414. raw_svector_ostream(Name)
  415. << Prefix << "JTI" << getFunctionNumber() << '_' << JTI;
  416. return Ctx.GetOrCreateSymbol(Name.str());
  417. }
  418. /// getPICBaseSymbol - Return a function-local symbol to represent the PIC
  419. /// base.
  420. MCSymbol *MachineFunction::getPICBaseSymbol() const {
  421. const DataLayout *DL = getSubtarget().getDataLayout();
  422. return Ctx.GetOrCreateSymbol(Twine(DL->getPrivateGlobalPrefix())+
  423. Twine(getFunctionNumber())+"$pb");
  424. }
  425. //===----------------------------------------------------------------------===//
  426. // MachineFrameInfo implementation
  427. //===----------------------------------------------------------------------===//
  428. const TargetFrameLowering *MachineFrameInfo::getFrameLowering() const {
  429. return TM.getSubtargetImpl()->getFrameLowering();
  430. }
  431. /// ensureMaxAlignment - Make sure the function is at least Align bytes
  432. /// aligned.
  433. void MachineFrameInfo::ensureMaxAlignment(unsigned Align) {
  434. if (!getFrameLowering()->isStackRealignable() || !RealignOption)
  435. assert(Align <= getFrameLowering()->getStackAlignment() &&
  436. "For targets without stack realignment, Align is out of limit!");
  437. if (MaxAlignment < Align) MaxAlignment = Align;
  438. }
  439. /// clampStackAlignment - Clamp the alignment if requested and emit a warning.
  440. static inline unsigned clampStackAlignment(bool ShouldClamp, unsigned Align,
  441. unsigned StackAlign) {
  442. if (!ShouldClamp || Align <= StackAlign)
  443. return Align;
  444. DEBUG(dbgs() << "Warning: requested alignment " << Align
  445. << " exceeds the stack alignment " << StackAlign
  446. << " when stack realignment is off" << '\n');
  447. return StackAlign;
  448. }
  449. /// CreateStackObject - Create a new statically sized stack object, returning
  450. /// a nonnegative identifier to represent it.
  451. ///
  452. int MachineFrameInfo::CreateStackObject(uint64_t Size, unsigned Alignment,
  453. bool isSS, const AllocaInst *Alloca) {
  454. assert(Size != 0 && "Cannot allocate zero size stack objects!");
  455. Alignment =
  456. clampStackAlignment(!getFrameLowering()->isStackRealignable() ||
  457. !RealignOption,
  458. Alignment, getFrameLowering()->getStackAlignment());
  459. Objects.push_back(StackObject(Size, Alignment, 0, false, isSS, Alloca,
  460. !isSS));
  461. int Index = (int)Objects.size() - NumFixedObjects - 1;
  462. assert(Index >= 0 && "Bad frame index!");
  463. ensureMaxAlignment(Alignment);
  464. return Index;
  465. }
  466. /// CreateSpillStackObject - Create a new statically sized stack object that
  467. /// represents a spill slot, returning a nonnegative identifier to represent
  468. /// it.
  469. ///
  470. int MachineFrameInfo::CreateSpillStackObject(uint64_t Size,
  471. unsigned Alignment) {
  472. Alignment = clampStackAlignment(
  473. !getFrameLowering()->isStackRealignable() || !RealignOption, Alignment,
  474. getFrameLowering()->getStackAlignment());
  475. CreateStackObject(Size, Alignment, true);
  476. int Index = (int)Objects.size() - NumFixedObjects - 1;
  477. ensureMaxAlignment(Alignment);
  478. return Index;
  479. }
  480. /// CreateVariableSizedObject - Notify the MachineFrameInfo object that a
  481. /// variable sized object has been created. This must be created whenever a
  482. /// variable sized object is created, whether or not the index returned is
  483. /// actually used.
  484. ///
  485. int MachineFrameInfo::CreateVariableSizedObject(unsigned Alignment,
  486. const AllocaInst *Alloca) {
  487. HasVarSizedObjects = true;
  488. Alignment = clampStackAlignment(
  489. !getFrameLowering()->isStackRealignable() || !RealignOption, Alignment,
  490. getFrameLowering()->getStackAlignment());
  491. Objects.push_back(StackObject(0, Alignment, 0, false, false, Alloca, true));
  492. ensureMaxAlignment(Alignment);
  493. return (int)Objects.size()-NumFixedObjects-1;
  494. }
  495. /// CreateFixedObject - Create a new object at a fixed location on the stack.
  496. /// All fixed objects should be created before other objects are created for
  497. /// efficiency. By default, fixed objects are immutable. This returns an
  498. /// index with a negative value.
  499. ///
  500. int MachineFrameInfo::CreateFixedObject(uint64_t Size, int64_t SPOffset,
  501. bool Immutable, bool isAliased) {
  502. assert(Size != 0 && "Cannot allocate zero size fixed stack objects!");
  503. // The alignment of the frame index can be determined from its offset from
  504. // the incoming frame position. If the frame object is at offset 32 and
  505. // the stack is guaranteed to be 16-byte aligned, then we know that the
  506. // object is 16-byte aligned.
  507. unsigned StackAlign = getFrameLowering()->getStackAlignment();
  508. unsigned Align = MinAlign(SPOffset, StackAlign);
  509. Align = clampStackAlignment(!getFrameLowering()->isStackRealignable() ||
  510. !RealignOption,
  511. Align, getFrameLowering()->getStackAlignment());
  512. Objects.insert(Objects.begin(), StackObject(Size, Align, SPOffset, Immutable,
  513. /*isSS*/ false,
  514. /*Alloca*/ nullptr, isAliased));
  515. return -++NumFixedObjects;
  516. }
  517. /// CreateFixedSpillStackObject - Create a spill slot at a fixed location
  518. /// on the stack. Returns an index with a negative value.
  519. int MachineFrameInfo::CreateFixedSpillStackObject(uint64_t Size,
  520. int64_t SPOffset) {
  521. unsigned StackAlign = getFrameLowering()->getStackAlignment();
  522. unsigned Align = MinAlign(SPOffset, StackAlign);
  523. Align = clampStackAlignment(!getFrameLowering()->isStackRealignable() ||
  524. !RealignOption,
  525. Align, getFrameLowering()->getStackAlignment());
  526. Objects.insert(Objects.begin(), StackObject(Size, Align, SPOffset,
  527. /*Immutable*/ true,
  528. /*isSS*/ true,
  529. /*Alloca*/ nullptr,
  530. /*isAliased*/ false));
  531. return -++NumFixedObjects;
  532. }
  533. BitVector
  534. MachineFrameInfo::getPristineRegs(const MachineBasicBlock *MBB) const {
  535. assert(MBB && "MBB must be valid");
  536. const MachineFunction *MF = MBB->getParent();
  537. assert(MF && "MBB must be part of a MachineFunction");
  538. const TargetMachine &TM = MF->getTarget();
  539. const TargetRegisterInfo *TRI = TM.getSubtargetImpl()->getRegisterInfo();
  540. BitVector BV(TRI->getNumRegs());
  541. // Before CSI is calculated, no registers are considered pristine. They can be
  542. // freely used and PEI will make sure they are saved.
  543. if (!isCalleeSavedInfoValid())
  544. return BV;
  545. for (const MCPhysReg *CSR = TRI->getCalleeSavedRegs(MF); CSR && *CSR; ++CSR)
  546. BV.set(*CSR);
  547. // The entry MBB always has all CSRs pristine.
  548. if (MBB == &MF->front())
  549. return BV;
  550. // On other MBBs the saved CSRs are not pristine.
  551. const std::vector<CalleeSavedInfo> &CSI = getCalleeSavedInfo();
  552. for (std::vector<CalleeSavedInfo>::const_iterator I = CSI.begin(),
  553. E = CSI.end(); I != E; ++I)
  554. BV.reset(I->getReg());
  555. return BV;
  556. }
  557. unsigned MachineFrameInfo::estimateStackSize(const MachineFunction &MF) const {
  558. const TargetFrameLowering *TFI = MF.getSubtarget().getFrameLowering();
  559. const TargetRegisterInfo *RegInfo = MF.getSubtarget().getRegisterInfo();
  560. unsigned MaxAlign = getMaxAlignment();
  561. int Offset = 0;
  562. // This code is very, very similar to PEI::calculateFrameObjectOffsets().
  563. // It really should be refactored to share code. Until then, changes
  564. // should keep in mind that there's tight coupling between the two.
  565. for (int i = getObjectIndexBegin(); i != 0; ++i) {
  566. int FixedOff = -getObjectOffset(i);
  567. if (FixedOff > Offset) Offset = FixedOff;
  568. }
  569. for (unsigned i = 0, e = getObjectIndexEnd(); i != e; ++i) {
  570. if (isDeadObjectIndex(i))
  571. continue;
  572. Offset += getObjectSize(i);
  573. unsigned Align = getObjectAlignment(i);
  574. // Adjust to alignment boundary
  575. Offset = (Offset+Align-1)/Align*Align;
  576. MaxAlign = std::max(Align, MaxAlign);
  577. }
  578. if (adjustsStack() && TFI->hasReservedCallFrame(MF))
  579. Offset += getMaxCallFrameSize();
  580. // Round up the size to a multiple of the alignment. If the function has
  581. // any calls or alloca's, align to the target's StackAlignment value to
  582. // ensure that the callee's frame or the alloca data is suitably aligned;
  583. // otherwise, for leaf functions, align to the TransientStackAlignment
  584. // value.
  585. unsigned StackAlign;
  586. if (adjustsStack() || hasVarSizedObjects() ||
  587. (RegInfo->needsStackRealignment(MF) && getObjectIndexEnd() != 0))
  588. StackAlign = TFI->getStackAlignment();
  589. else
  590. StackAlign = TFI->getTransientStackAlignment();
  591. // If the frame pointer is eliminated, all frame offsets will be relative to
  592. // SP not FP. Align to MaxAlign so this works.
  593. StackAlign = std::max(StackAlign, MaxAlign);
  594. unsigned AlignMask = StackAlign - 1;
  595. Offset = (Offset + AlignMask) & ~uint64_t(AlignMask);
  596. return (unsigned)Offset;
  597. }
  598. void MachineFrameInfo::print(const MachineFunction &MF, raw_ostream &OS) const{
  599. if (Objects.empty()) return;
  600. const TargetFrameLowering *FI = MF.getSubtarget().getFrameLowering();
  601. int ValOffset = (FI ? FI->getOffsetOfLocalArea() : 0);
  602. OS << "Frame Objects:\n";
  603. for (unsigned i = 0, e = Objects.size(); i != e; ++i) {
  604. const StackObject &SO = Objects[i];
  605. OS << " fi#" << (int)(i-NumFixedObjects) << ": ";
  606. if (SO.Size == ~0ULL) {
  607. OS << "dead\n";
  608. continue;
  609. }
  610. if (SO.Size == 0)
  611. OS << "variable sized";
  612. else
  613. OS << "size=" << SO.Size;
  614. OS << ", align=" << SO.Alignment;
  615. if (i < NumFixedObjects)
  616. OS << ", fixed";
  617. if (i < NumFixedObjects || SO.SPOffset != -1) {
  618. int64_t Off = SO.SPOffset - ValOffset;
  619. OS << ", at location [SP";
  620. if (Off > 0)
  621. OS << "+" << Off;
  622. else if (Off < 0)
  623. OS << Off;
  624. OS << "]";
  625. }
  626. OS << "\n";
  627. }
  628. }
  629. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  630. void MachineFrameInfo::dump(const MachineFunction &MF) const {
  631. print(MF, dbgs());
  632. }
  633. #endif
  634. //===----------------------------------------------------------------------===//
  635. // MachineJumpTableInfo implementation
  636. //===----------------------------------------------------------------------===//
  637. /// getEntrySize - Return the size of each entry in the jump table.
  638. unsigned MachineJumpTableInfo::getEntrySize(const DataLayout &TD) const {
  639. // The size of a jump table entry is 4 bytes unless the entry is just the
  640. // address of a block, in which case it is the pointer size.
  641. switch (getEntryKind()) {
  642. case MachineJumpTableInfo::EK_BlockAddress:
  643. return TD.getPointerSize();
  644. case MachineJumpTableInfo::EK_GPRel64BlockAddress:
  645. return 8;
  646. case MachineJumpTableInfo::EK_GPRel32BlockAddress:
  647. case MachineJumpTableInfo::EK_LabelDifference32:
  648. case MachineJumpTableInfo::EK_Custom32:
  649. return 4;
  650. case MachineJumpTableInfo::EK_Inline:
  651. return 0;
  652. }
  653. llvm_unreachable("Unknown jump table encoding!");
  654. }
  655. /// getEntryAlignment - Return the alignment of each entry in the jump table.
  656. unsigned MachineJumpTableInfo::getEntryAlignment(const DataLayout &TD) const {
  657. // The alignment of a jump table entry is the alignment of int32 unless the
  658. // entry is just the address of a block, in which case it is the pointer
  659. // alignment.
  660. switch (getEntryKind()) {
  661. case MachineJumpTableInfo::EK_BlockAddress:
  662. return TD.getPointerABIAlignment();
  663. case MachineJumpTableInfo::EK_GPRel64BlockAddress:
  664. return TD.getABIIntegerTypeAlignment(64);
  665. case MachineJumpTableInfo::EK_GPRel32BlockAddress:
  666. case MachineJumpTableInfo::EK_LabelDifference32:
  667. case MachineJumpTableInfo::EK_Custom32:
  668. return TD.getABIIntegerTypeAlignment(32);
  669. case MachineJumpTableInfo::EK_Inline:
  670. return 1;
  671. }
  672. llvm_unreachable("Unknown jump table encoding!");
  673. }
  674. /// createJumpTableIndex - Create a new jump table entry in the jump table info.
  675. ///
  676. unsigned MachineJumpTableInfo::createJumpTableIndex(
  677. const std::vector<MachineBasicBlock*> &DestBBs) {
  678. assert(!DestBBs.empty() && "Cannot create an empty jump table!");
  679. JumpTables.push_back(MachineJumpTableEntry(DestBBs));
  680. return JumpTables.size()-1;
  681. }
  682. /// ReplaceMBBInJumpTables - If Old is the target of any jump tables, update
  683. /// the jump tables to branch to New instead.
  684. bool MachineJumpTableInfo::ReplaceMBBInJumpTables(MachineBasicBlock *Old,
  685. MachineBasicBlock *New) {
  686. assert(Old != New && "Not making a change?");
  687. bool MadeChange = false;
  688. for (size_t i = 0, e = JumpTables.size(); i != e; ++i)
  689. ReplaceMBBInJumpTable(i, Old, New);
  690. return MadeChange;
  691. }
  692. /// ReplaceMBBInJumpTable - If Old is a target of the jump tables, update
  693. /// the jump table to branch to New instead.
  694. bool MachineJumpTableInfo::ReplaceMBBInJumpTable(unsigned Idx,
  695. MachineBasicBlock *Old,
  696. MachineBasicBlock *New) {
  697. assert(Old != New && "Not making a change?");
  698. bool MadeChange = false;
  699. MachineJumpTableEntry &JTE = JumpTables[Idx];
  700. for (size_t j = 0, e = JTE.MBBs.size(); j != e; ++j)
  701. if (JTE.MBBs[j] == Old) {
  702. JTE.MBBs[j] = New;
  703. MadeChange = true;
  704. }
  705. return MadeChange;
  706. }
  707. void MachineJumpTableInfo::print(raw_ostream &OS) const {
  708. if (JumpTables.empty()) return;
  709. OS << "Jump Tables:\n";
  710. for (unsigned i = 0, e = JumpTables.size(); i != e; ++i) {
  711. OS << " jt#" << i << ": ";
  712. for (unsigned j = 0, f = JumpTables[i].MBBs.size(); j != f; ++j)
  713. OS << " BB#" << JumpTables[i].MBBs[j]->getNumber();
  714. }
  715. OS << '\n';
  716. }
  717. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  718. void MachineJumpTableInfo::dump() const { print(dbgs()); }
  719. #endif
  720. //===----------------------------------------------------------------------===//
  721. // MachineConstantPool implementation
  722. //===----------------------------------------------------------------------===//
  723. void MachineConstantPoolValue::anchor() { }
  724. const DataLayout *MachineConstantPool::getDataLayout() const {
  725. return TM.getSubtargetImpl()->getDataLayout();
  726. }
  727. Type *MachineConstantPoolEntry::getType() const {
  728. if (isMachineConstantPoolEntry())
  729. return Val.MachineCPVal->getType();
  730. return Val.ConstVal->getType();
  731. }
  732. unsigned MachineConstantPoolEntry::getRelocationInfo() const {
  733. if (isMachineConstantPoolEntry())
  734. return Val.MachineCPVal->getRelocationInfo();
  735. return Val.ConstVal->getRelocationInfo();
  736. }
  737. SectionKind
  738. MachineConstantPoolEntry::getSectionKind(const DataLayout *DL) const {
  739. SectionKind Kind;
  740. switch (getRelocationInfo()) {
  741. default:
  742. llvm_unreachable("Unknown section kind");
  743. case 2:
  744. Kind = SectionKind::getReadOnlyWithRel();
  745. break;
  746. case 1:
  747. Kind = SectionKind::getReadOnlyWithRelLocal();
  748. break;
  749. case 0:
  750. switch (DL->getTypeAllocSize(getType())) {
  751. case 4:
  752. Kind = SectionKind::getMergeableConst4();
  753. break;
  754. case 8:
  755. Kind = SectionKind::getMergeableConst8();
  756. break;
  757. case 16:
  758. Kind = SectionKind::getMergeableConst16();
  759. break;
  760. default:
  761. Kind = SectionKind::getMergeableConst();
  762. break;
  763. }
  764. }
  765. return Kind;
  766. }
  767. MachineConstantPool::~MachineConstantPool() {
  768. for (unsigned i = 0, e = Constants.size(); i != e; ++i)
  769. if (Constants[i].isMachineConstantPoolEntry())
  770. delete Constants[i].Val.MachineCPVal;
  771. for (DenseSet<MachineConstantPoolValue*>::iterator I =
  772. MachineCPVsSharingEntries.begin(), E = MachineCPVsSharingEntries.end();
  773. I != E; ++I)
  774. delete *I;
  775. }
  776. /// CanShareConstantPoolEntry - Test whether the given two constants
  777. /// can be allocated the same constant pool entry.
  778. static bool CanShareConstantPoolEntry(const Constant *A, const Constant *B,
  779. const DataLayout *TD) {
  780. // Handle the trivial case quickly.
  781. if (A == B) return true;
  782. // If they have the same type but weren't the same constant, quickly
  783. // reject them.
  784. if (A->getType() == B->getType()) return false;
  785. // We can't handle structs or arrays.
  786. if (isa<StructType>(A->getType()) || isa<ArrayType>(A->getType()) ||
  787. isa<StructType>(B->getType()) || isa<ArrayType>(B->getType()))
  788. return false;
  789. // For now, only support constants with the same size.
  790. uint64_t StoreSize = TD->getTypeStoreSize(A->getType());
  791. if (StoreSize != TD->getTypeStoreSize(B->getType()) || StoreSize > 128)
  792. return false;
  793. Type *IntTy = IntegerType::get(A->getContext(), StoreSize*8);
  794. // Try constant folding a bitcast of both instructions to an integer. If we
  795. // get two identical ConstantInt's, then we are good to share them. We use
  796. // the constant folding APIs to do this so that we get the benefit of
  797. // DataLayout.
  798. if (isa<PointerType>(A->getType()))
  799. A = ConstantFoldInstOperands(Instruction::PtrToInt, IntTy,
  800. const_cast<Constant*>(A), TD);
  801. else if (A->getType() != IntTy)
  802. A = ConstantFoldInstOperands(Instruction::BitCast, IntTy,
  803. const_cast<Constant*>(A), TD);
  804. if (isa<PointerType>(B->getType()))
  805. B = ConstantFoldInstOperands(Instruction::PtrToInt, IntTy,
  806. const_cast<Constant*>(B), TD);
  807. else if (B->getType() != IntTy)
  808. B = ConstantFoldInstOperands(Instruction::BitCast, IntTy,
  809. const_cast<Constant*>(B), TD);
  810. return A == B;
  811. }
  812. /// getConstantPoolIndex - Create a new entry in the constant pool or return
  813. /// an existing one. User must specify the log2 of the minimum required
  814. /// alignment for the object.
  815. ///
  816. unsigned MachineConstantPool::getConstantPoolIndex(const Constant *C,
  817. unsigned Alignment) {
  818. assert(Alignment && "Alignment must be specified!");
  819. if (Alignment > PoolAlignment) PoolAlignment = Alignment;
  820. // Check to see if we already have this constant.
  821. //
  822. // FIXME, this could be made much more efficient for large constant pools.
  823. for (unsigned i = 0, e = Constants.size(); i != e; ++i)
  824. if (!Constants[i].isMachineConstantPoolEntry() &&
  825. CanShareConstantPoolEntry(Constants[i].Val.ConstVal, C,
  826. getDataLayout())) {
  827. if ((unsigned)Constants[i].getAlignment() < Alignment)
  828. Constants[i].Alignment = Alignment;
  829. return i;
  830. }
  831. Constants.push_back(MachineConstantPoolEntry(C, Alignment));
  832. return Constants.size()-1;
  833. }
  834. unsigned MachineConstantPool::getConstantPoolIndex(MachineConstantPoolValue *V,
  835. unsigned Alignment) {
  836. assert(Alignment && "Alignment must be specified!");
  837. if (Alignment > PoolAlignment) PoolAlignment = Alignment;
  838. // Check to see if we already have this constant.
  839. //
  840. // FIXME, this could be made much more efficient for large constant pools.
  841. int Idx = V->getExistingMachineCPValue(this, Alignment);
  842. if (Idx != -1) {
  843. MachineCPVsSharingEntries.insert(V);
  844. return (unsigned)Idx;
  845. }
  846. Constants.push_back(MachineConstantPoolEntry(V, Alignment));
  847. return Constants.size()-1;
  848. }
  849. void MachineConstantPool::print(raw_ostream &OS) const {
  850. if (Constants.empty()) return;
  851. OS << "Constant Pool:\n";
  852. for (unsigned i = 0, e = Constants.size(); i != e; ++i) {
  853. OS << " cp#" << i << ": ";
  854. if (Constants[i].isMachineConstantPoolEntry())
  855. Constants[i].Val.MachineCPVal->print(OS);
  856. else
  857. Constants[i].Val.ConstVal->printAsOperand(OS, /*PrintType=*/false);
  858. OS << ", align=" << Constants[i].getAlignment();
  859. OS << "\n";
  860. }
  861. }
  862. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  863. void MachineConstantPool::dump() const { print(dbgs()); }
  864. #endif