MachineFunction.cpp 37 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027
  1. //===-- MachineFunction.cpp -----------------------------------------------===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // Collect native machine code information for a function. This allows
  11. // target-specific information about the generated code to be stored with each
  12. // function.
  13. //
  14. //===----------------------------------------------------------------------===//
  15. #include "llvm/CodeGen/MachineFunction.h"
  16. #include "llvm/ADT/STLExtras.h"
  17. #include "llvm/ADT/SmallString.h"
  18. #include "llvm/Analysis/ConstantFolding.h"
  19. #include "llvm/Analysis/EHPersonalities.h"
  20. #include "llvm/CodeGen/MachineConstantPool.h"
  21. #include "llvm/CodeGen/MachineFrameInfo.h"
  22. #include "llvm/CodeGen/MachineFunctionInitializer.h"
  23. #include "llvm/CodeGen/MachineFunctionPass.h"
  24. #include "llvm/CodeGen/MachineInstr.h"
  25. #include "llvm/CodeGen/MachineJumpTableInfo.h"
  26. #include "llvm/CodeGen/MachineModuleInfo.h"
  27. #include "llvm/CodeGen/MachineRegisterInfo.h"
  28. #include "llvm/CodeGen/Passes.h"
  29. #include "llvm/CodeGen/PseudoSourceValue.h"
  30. #include "llvm/CodeGen/WinEHFuncInfo.h"
  31. #include "llvm/IR/DataLayout.h"
  32. #include "llvm/IR/DebugInfo.h"
  33. #include "llvm/IR/Function.h"
  34. #include "llvm/IR/Module.h"
  35. #include "llvm/IR/ModuleSlotTracker.h"
  36. #include "llvm/MC/MCAsmInfo.h"
  37. #include "llvm/MC/MCContext.h"
  38. #include "llvm/Support/Debug.h"
  39. #include "llvm/Support/GraphWriter.h"
  40. #include "llvm/Support/raw_ostream.h"
  41. #include "llvm/Target/TargetFrameLowering.h"
  42. #include "llvm/Target/TargetLowering.h"
  43. #include "llvm/Target/TargetMachine.h"
  44. #include "llvm/Target/TargetSubtargetInfo.h"
  45. using namespace llvm;
  46. #define DEBUG_TYPE "codegen"
  47. static cl::opt<unsigned>
  48. AlignAllFunctions("align-all-functions",
  49. cl::desc("Force the alignment of all functions."),
  50. cl::init(0), cl::Hidden);
  51. void MachineFunctionInitializer::anchor() {}
  52. static const char *getPropertyName(MachineFunctionProperties::Property Prop) {
  53. typedef MachineFunctionProperties::Property P;
  54. switch(Prop) {
  55. case P::FailedISel: return "FailedISel";
  56. case P::IsSSA: return "IsSSA";
  57. case P::Legalized: return "Legalized";
  58. case P::NoPHIs: return "NoPHIs";
  59. case P::NoVRegs: return "NoVRegs";
  60. case P::RegBankSelected: return "RegBankSelected";
  61. case P::Selected: return "Selected";
  62. case P::TracksLiveness: return "TracksLiveness";
  63. }
  64. llvm_unreachable("Invalid machine function property");
  65. }
  66. void MachineFunctionProperties::print(raw_ostream &OS) const {
  67. const char *Separator = "";
  68. for (BitVector::size_type I = 0; I < Properties.size(); ++I) {
  69. if (!Properties[I])
  70. continue;
  71. OS << Separator << getPropertyName(static_cast<Property>(I));
  72. Separator = ", ";
  73. }
  74. }
  75. //===----------------------------------------------------------------------===//
  76. // MachineFunction implementation
  77. //===----------------------------------------------------------------------===//
  78. // Out-of-line virtual method.
  79. MachineFunctionInfo::~MachineFunctionInfo() {}
  80. void ilist_alloc_traits<MachineBasicBlock>::deleteNode(MachineBasicBlock *MBB) {
  81. MBB->getParent()->DeleteMachineBasicBlock(MBB);
  82. }
  83. static inline unsigned getFnStackAlignment(const TargetSubtargetInfo *STI,
  84. const Function *Fn) {
  85. if (Fn->hasFnAttribute(Attribute::StackAlignment))
  86. return Fn->getFnStackAlignment();
  87. return STI->getFrameLowering()->getStackAlignment();
  88. }
  89. MachineFunction::MachineFunction(const Function *F, const TargetMachine &TM,
  90. unsigned FunctionNum, MachineModuleInfo &mmi)
  91. : Fn(F), Target(TM), STI(TM.getSubtargetImpl(*F)), Ctx(mmi.getContext()),
  92. MMI(mmi) {
  93. FunctionNumber = FunctionNum;
  94. init();
  95. }
  96. void MachineFunction::init() {
  97. // Assume the function starts in SSA form with correct liveness.
  98. Properties.set(MachineFunctionProperties::Property::IsSSA);
  99. Properties.set(MachineFunctionProperties::Property::TracksLiveness);
  100. if (STI->getRegisterInfo())
  101. RegInfo = new (Allocator) MachineRegisterInfo(this);
  102. else
  103. RegInfo = nullptr;
  104. MFInfo = nullptr;
  105. // We can realign the stack if the target supports it and the user hasn't
  106. // explicitly asked us not to.
  107. bool CanRealignSP = STI->getFrameLowering()->isStackRealignable() &&
  108. !Fn->hasFnAttribute("no-realign-stack");
  109. FrameInfo = new (Allocator) MachineFrameInfo(
  110. getFnStackAlignment(STI, Fn), /*StackRealignable=*/CanRealignSP,
  111. /*ForceRealign=*/CanRealignSP &&
  112. Fn->hasFnAttribute(Attribute::StackAlignment));
  113. if (Fn->hasFnAttribute(Attribute::StackAlignment))
  114. FrameInfo->ensureMaxAlignment(Fn->getFnStackAlignment());
  115. ConstantPool = new (Allocator) MachineConstantPool(getDataLayout());
  116. Alignment = STI->getTargetLowering()->getMinFunctionAlignment();
  117. // FIXME: Shouldn't use pref alignment if explicit alignment is set on Fn.
  118. // FIXME: Use Function::optForSize().
  119. if (!Fn->hasFnAttribute(Attribute::OptimizeForSize))
  120. Alignment = std::max(Alignment,
  121. STI->getTargetLowering()->getPrefFunctionAlignment());
  122. if (AlignAllFunctions)
  123. Alignment = AlignAllFunctions;
  124. JumpTableInfo = nullptr;
  125. if (isFuncletEHPersonality(classifyEHPersonality(
  126. Fn->hasPersonalityFn() ? Fn->getPersonalityFn() : nullptr))) {
  127. WinEHInfo = new (Allocator) WinEHFuncInfo();
  128. }
  129. assert(Target.isCompatibleDataLayout(getDataLayout()) &&
  130. "Can't create a MachineFunction using a Module with a "
  131. "Target-incompatible DataLayout attached\n");
  132. PSVManager = llvm::make_unique<PseudoSourceValueManager>();
  133. }
  134. MachineFunction::~MachineFunction() {
  135. clear();
  136. }
  137. void MachineFunction::clear() {
  138. Properties.reset();
  139. // Don't call destructors on MachineInstr and MachineOperand. All of their
  140. // memory comes from the BumpPtrAllocator which is about to be purged.
  141. //
  142. // Do call MachineBasicBlock destructors, it contains std::vectors.
  143. for (iterator I = begin(), E = end(); I != E; I = BasicBlocks.erase(I))
  144. I->Insts.clearAndLeakNodesUnsafely();
  145. InstructionRecycler.clear(Allocator);
  146. OperandRecycler.clear(Allocator);
  147. BasicBlockRecycler.clear(Allocator);
  148. if (RegInfo) {
  149. RegInfo->~MachineRegisterInfo();
  150. Allocator.Deallocate(RegInfo);
  151. }
  152. if (MFInfo) {
  153. MFInfo->~MachineFunctionInfo();
  154. Allocator.Deallocate(MFInfo);
  155. }
  156. FrameInfo->~MachineFrameInfo();
  157. Allocator.Deallocate(FrameInfo);
  158. ConstantPool->~MachineConstantPool();
  159. Allocator.Deallocate(ConstantPool);
  160. if (JumpTableInfo) {
  161. JumpTableInfo->~MachineJumpTableInfo();
  162. Allocator.Deallocate(JumpTableInfo);
  163. }
  164. if (WinEHInfo) {
  165. WinEHInfo->~WinEHFuncInfo();
  166. Allocator.Deallocate(WinEHInfo);
  167. }
  168. }
  169. const DataLayout &MachineFunction::getDataLayout() const {
  170. return Fn->getParent()->getDataLayout();
  171. }
  172. /// Get the JumpTableInfo for this function.
  173. /// If it does not already exist, allocate one.
  174. MachineJumpTableInfo *MachineFunction::
  175. getOrCreateJumpTableInfo(unsigned EntryKind) {
  176. if (JumpTableInfo) return JumpTableInfo;
  177. JumpTableInfo = new (Allocator)
  178. MachineJumpTableInfo((MachineJumpTableInfo::JTEntryKind)EntryKind);
  179. return JumpTableInfo;
  180. }
  181. /// Should we be emitting segmented stack stuff for the function
  182. bool MachineFunction::shouldSplitStack() const {
  183. return getFunction()->hasFnAttribute("split-stack");
  184. }
  185. /// This discards all of the MachineBasicBlock numbers and recomputes them.
  186. /// This guarantees that the MBB numbers are sequential, dense, and match the
  187. /// ordering of the blocks within the function. If a specific MachineBasicBlock
  188. /// is specified, only that block and those after it are renumbered.
  189. void MachineFunction::RenumberBlocks(MachineBasicBlock *MBB) {
  190. if (empty()) { MBBNumbering.clear(); return; }
  191. MachineFunction::iterator MBBI, E = end();
  192. if (MBB == nullptr)
  193. MBBI = begin();
  194. else
  195. MBBI = MBB->getIterator();
  196. // Figure out the block number this should have.
  197. unsigned BlockNo = 0;
  198. if (MBBI != begin())
  199. BlockNo = std::prev(MBBI)->getNumber() + 1;
  200. for (; MBBI != E; ++MBBI, ++BlockNo) {
  201. if (MBBI->getNumber() != (int)BlockNo) {
  202. // Remove use of the old number.
  203. if (MBBI->getNumber() != -1) {
  204. assert(MBBNumbering[MBBI->getNumber()] == &*MBBI &&
  205. "MBB number mismatch!");
  206. MBBNumbering[MBBI->getNumber()] = nullptr;
  207. }
  208. // If BlockNo is already taken, set that block's number to -1.
  209. if (MBBNumbering[BlockNo])
  210. MBBNumbering[BlockNo]->setNumber(-1);
  211. MBBNumbering[BlockNo] = &*MBBI;
  212. MBBI->setNumber(BlockNo);
  213. }
  214. }
  215. // Okay, all the blocks are renumbered. If we have compactified the block
  216. // numbering, shrink MBBNumbering now.
  217. assert(BlockNo <= MBBNumbering.size() && "Mismatch!");
  218. MBBNumbering.resize(BlockNo);
  219. }
  220. /// Allocate a new MachineInstr. Use this instead of `new MachineInstr'.
  221. MachineInstr *MachineFunction::CreateMachineInstr(const MCInstrDesc &MCID,
  222. const DebugLoc &DL,
  223. bool NoImp) {
  224. return new (InstructionRecycler.Allocate<MachineInstr>(Allocator))
  225. MachineInstr(*this, MCID, DL, NoImp);
  226. }
  227. /// Create a new MachineInstr which is a copy of the 'Orig' instruction,
  228. /// identical in all ways except the instruction has no parent, prev, or next.
  229. MachineInstr *
  230. MachineFunction::CloneMachineInstr(const MachineInstr *Orig) {
  231. return new (InstructionRecycler.Allocate<MachineInstr>(Allocator))
  232. MachineInstr(*this, *Orig);
  233. }
  234. /// Delete the given MachineInstr.
  235. ///
  236. /// This function also serves as the MachineInstr destructor - the real
  237. /// ~MachineInstr() destructor must be empty.
  238. void
  239. MachineFunction::DeleteMachineInstr(MachineInstr *MI) {
  240. // Strip it for parts. The operand array and the MI object itself are
  241. // independently recyclable.
  242. if (MI->Operands)
  243. deallocateOperandArray(MI->CapOperands, MI->Operands);
  244. // Don't call ~MachineInstr() which must be trivial anyway because
  245. // ~MachineFunction drops whole lists of MachineInstrs wihout calling their
  246. // destructors.
  247. InstructionRecycler.Deallocate(Allocator, MI);
  248. }
  249. /// Allocate a new MachineBasicBlock. Use this instead of
  250. /// `new MachineBasicBlock'.
  251. MachineBasicBlock *
  252. MachineFunction::CreateMachineBasicBlock(const BasicBlock *bb) {
  253. return new (BasicBlockRecycler.Allocate<MachineBasicBlock>(Allocator))
  254. MachineBasicBlock(*this, bb);
  255. }
  256. /// Delete the given MachineBasicBlock.
  257. void
  258. MachineFunction::DeleteMachineBasicBlock(MachineBasicBlock *MBB) {
  259. assert(MBB->getParent() == this && "MBB parent mismatch!");
  260. MBB->~MachineBasicBlock();
  261. BasicBlockRecycler.Deallocate(Allocator, MBB);
  262. }
  263. MachineMemOperand *MachineFunction::getMachineMemOperand(
  264. MachinePointerInfo PtrInfo, MachineMemOperand::Flags f, uint64_t s,
  265. unsigned base_alignment, const AAMDNodes &AAInfo, const MDNode *Ranges,
  266. SynchronizationScope SynchScope, AtomicOrdering Ordering,
  267. AtomicOrdering FailureOrdering) {
  268. return new (Allocator)
  269. MachineMemOperand(PtrInfo, f, s, base_alignment, AAInfo, Ranges,
  270. SynchScope, Ordering, FailureOrdering);
  271. }
  272. MachineMemOperand *
  273. MachineFunction::getMachineMemOperand(const MachineMemOperand *MMO,
  274. int64_t Offset, uint64_t Size) {
  275. if (MMO->getValue())
  276. return new (Allocator)
  277. MachineMemOperand(MachinePointerInfo(MMO->getValue(),
  278. MMO->getOffset()+Offset),
  279. MMO->getFlags(), Size, MMO->getBaseAlignment(),
  280. AAMDNodes(), nullptr, MMO->getSynchScope(),
  281. MMO->getOrdering(), MMO->getFailureOrdering());
  282. return new (Allocator)
  283. MachineMemOperand(MachinePointerInfo(MMO->getPseudoValue(),
  284. MMO->getOffset()+Offset),
  285. MMO->getFlags(), Size, MMO->getBaseAlignment(),
  286. AAMDNodes(), nullptr, MMO->getSynchScope(),
  287. MMO->getOrdering(), MMO->getFailureOrdering());
  288. }
  289. MachineInstr::mmo_iterator
  290. MachineFunction::allocateMemRefsArray(unsigned long Num) {
  291. return Allocator.Allocate<MachineMemOperand *>(Num);
  292. }
  293. std::pair<MachineInstr::mmo_iterator, MachineInstr::mmo_iterator>
  294. MachineFunction::extractLoadMemRefs(MachineInstr::mmo_iterator Begin,
  295. MachineInstr::mmo_iterator End) {
  296. // Count the number of load mem refs.
  297. unsigned Num = 0;
  298. for (MachineInstr::mmo_iterator I = Begin; I != End; ++I)
  299. if ((*I)->isLoad())
  300. ++Num;
  301. // Allocate a new array and populate it with the load information.
  302. MachineInstr::mmo_iterator Result = allocateMemRefsArray(Num);
  303. unsigned Index = 0;
  304. for (MachineInstr::mmo_iterator I = Begin; I != End; ++I) {
  305. if ((*I)->isLoad()) {
  306. if (!(*I)->isStore())
  307. // Reuse the MMO.
  308. Result[Index] = *I;
  309. else {
  310. // Clone the MMO and unset the store flag.
  311. MachineMemOperand *JustLoad =
  312. getMachineMemOperand((*I)->getPointerInfo(),
  313. (*I)->getFlags() & ~MachineMemOperand::MOStore,
  314. (*I)->getSize(), (*I)->getBaseAlignment(),
  315. (*I)->getAAInfo(), nullptr,
  316. (*I)->getSynchScope(), (*I)->getOrdering(),
  317. (*I)->getFailureOrdering());
  318. Result[Index] = JustLoad;
  319. }
  320. ++Index;
  321. }
  322. }
  323. return std::make_pair(Result, Result + Num);
  324. }
  325. std::pair<MachineInstr::mmo_iterator, MachineInstr::mmo_iterator>
  326. MachineFunction::extractStoreMemRefs(MachineInstr::mmo_iterator Begin,
  327. MachineInstr::mmo_iterator End) {
  328. // Count the number of load mem refs.
  329. unsigned Num = 0;
  330. for (MachineInstr::mmo_iterator I = Begin; I != End; ++I)
  331. if ((*I)->isStore())
  332. ++Num;
  333. // Allocate a new array and populate it with the store information.
  334. MachineInstr::mmo_iterator Result = allocateMemRefsArray(Num);
  335. unsigned Index = 0;
  336. for (MachineInstr::mmo_iterator I = Begin; I != End; ++I) {
  337. if ((*I)->isStore()) {
  338. if (!(*I)->isLoad())
  339. // Reuse the MMO.
  340. Result[Index] = *I;
  341. else {
  342. // Clone the MMO and unset the load flag.
  343. MachineMemOperand *JustStore =
  344. getMachineMemOperand((*I)->getPointerInfo(),
  345. (*I)->getFlags() & ~MachineMemOperand::MOLoad,
  346. (*I)->getSize(), (*I)->getBaseAlignment(),
  347. (*I)->getAAInfo(), nullptr,
  348. (*I)->getSynchScope(), (*I)->getOrdering(),
  349. (*I)->getFailureOrdering());
  350. Result[Index] = JustStore;
  351. }
  352. ++Index;
  353. }
  354. }
  355. return std::make_pair(Result, Result + Num);
  356. }
  357. const char *MachineFunction::createExternalSymbolName(StringRef Name) {
  358. char *Dest = Allocator.Allocate<char>(Name.size() + 1);
  359. std::copy(Name.begin(), Name.end(), Dest);
  360. Dest[Name.size()] = 0;
  361. return Dest;
  362. }
  363. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  364. LLVM_DUMP_METHOD void MachineFunction::dump() const {
  365. print(dbgs());
  366. }
  367. #endif
  368. StringRef MachineFunction::getName() const {
  369. assert(getFunction() && "No function!");
  370. return getFunction()->getName();
  371. }
  372. void MachineFunction::print(raw_ostream &OS, const SlotIndexes *Indexes) const {
  373. OS << "# Machine code for function " << getName() << ": ";
  374. getProperties().print(OS);
  375. OS << '\n';
  376. // Print Frame Information
  377. FrameInfo->print(*this, OS);
  378. // Print JumpTable Information
  379. if (JumpTableInfo)
  380. JumpTableInfo->print(OS);
  381. // Print Constant Pool
  382. ConstantPool->print(OS);
  383. const TargetRegisterInfo *TRI = getSubtarget().getRegisterInfo();
  384. if (RegInfo && !RegInfo->livein_empty()) {
  385. OS << "Function Live Ins: ";
  386. for (MachineRegisterInfo::livein_iterator
  387. I = RegInfo->livein_begin(), E = RegInfo->livein_end(); I != E; ++I) {
  388. OS << PrintReg(I->first, TRI);
  389. if (I->second)
  390. OS << " in " << PrintReg(I->second, TRI);
  391. if (std::next(I) != E)
  392. OS << ", ";
  393. }
  394. OS << '\n';
  395. }
  396. ModuleSlotTracker MST(getFunction()->getParent());
  397. MST.incorporateFunction(*getFunction());
  398. for (const auto &BB : *this) {
  399. OS << '\n';
  400. BB.print(OS, MST, Indexes);
  401. }
  402. OS << "\n# End machine code for function " << getName() << ".\n\n";
  403. }
  404. namespace llvm {
  405. template<>
  406. struct DOTGraphTraits<const MachineFunction*> : public DefaultDOTGraphTraits {
  407. DOTGraphTraits (bool isSimple=false) : DefaultDOTGraphTraits(isSimple) {}
  408. static std::string getGraphName(const MachineFunction *F) {
  409. return ("CFG for '" + F->getName() + "' function").str();
  410. }
  411. std::string getNodeLabel(const MachineBasicBlock *Node,
  412. const MachineFunction *Graph) {
  413. std::string OutStr;
  414. {
  415. raw_string_ostream OSS(OutStr);
  416. if (isSimple()) {
  417. OSS << "BB#" << Node->getNumber();
  418. if (const BasicBlock *BB = Node->getBasicBlock())
  419. OSS << ": " << BB->getName();
  420. } else
  421. Node->print(OSS);
  422. }
  423. if (OutStr[0] == '\n') OutStr.erase(OutStr.begin());
  424. // Process string output to make it nicer...
  425. for (unsigned i = 0; i != OutStr.length(); ++i)
  426. if (OutStr[i] == '\n') { // Left justify
  427. OutStr[i] = '\\';
  428. OutStr.insert(OutStr.begin()+i+1, 'l');
  429. }
  430. return OutStr;
  431. }
  432. };
  433. }
  434. void MachineFunction::viewCFG() const
  435. {
  436. #ifndef NDEBUG
  437. ViewGraph(this, "mf" + getName());
  438. #else
  439. errs() << "MachineFunction::viewCFG is only available in debug builds on "
  440. << "systems with Graphviz or gv!\n";
  441. #endif // NDEBUG
  442. }
  443. void MachineFunction::viewCFGOnly() const
  444. {
  445. #ifndef NDEBUG
  446. ViewGraph(this, "mf" + getName(), true);
  447. #else
  448. errs() << "MachineFunction::viewCFGOnly is only available in debug builds on "
  449. << "systems with Graphviz or gv!\n";
  450. #endif // NDEBUG
  451. }
  452. /// Add the specified physical register as a live-in value and
  453. /// create a corresponding virtual register for it.
  454. unsigned MachineFunction::addLiveIn(unsigned PReg,
  455. const TargetRegisterClass *RC) {
  456. MachineRegisterInfo &MRI = getRegInfo();
  457. unsigned VReg = MRI.getLiveInVirtReg(PReg);
  458. if (VReg) {
  459. const TargetRegisterClass *VRegRC = MRI.getRegClass(VReg);
  460. (void)VRegRC;
  461. // A physical register can be added several times.
  462. // Between two calls, the register class of the related virtual register
  463. // may have been constrained to match some operation constraints.
  464. // In that case, check that the current register class includes the
  465. // physical register and is a sub class of the specified RC.
  466. assert((VRegRC == RC || (VRegRC->contains(PReg) &&
  467. RC->hasSubClassEq(VRegRC))) &&
  468. "Register class mismatch!");
  469. return VReg;
  470. }
  471. VReg = MRI.createVirtualRegister(RC);
  472. MRI.addLiveIn(PReg, VReg);
  473. return VReg;
  474. }
  475. /// Return the MCSymbol for the specified non-empty jump table.
  476. /// If isLinkerPrivate is specified, an 'l' label is returned, otherwise a
  477. /// normal 'L' label is returned.
  478. MCSymbol *MachineFunction::getJTISymbol(unsigned JTI, MCContext &Ctx,
  479. bool isLinkerPrivate) const {
  480. const DataLayout &DL = getDataLayout();
  481. assert(JumpTableInfo && "No jump tables");
  482. assert(JTI < JumpTableInfo->getJumpTables().size() && "Invalid JTI!");
  483. StringRef Prefix = isLinkerPrivate ? DL.getLinkerPrivateGlobalPrefix()
  484. : DL.getPrivateGlobalPrefix();
  485. SmallString<60> Name;
  486. raw_svector_ostream(Name)
  487. << Prefix << "JTI" << getFunctionNumber() << '_' << JTI;
  488. return Ctx.getOrCreateSymbol(Name);
  489. }
  490. /// Return a function-local symbol to represent the PIC base.
  491. MCSymbol *MachineFunction::getPICBaseSymbol() const {
  492. const DataLayout &DL = getDataLayout();
  493. return Ctx.getOrCreateSymbol(Twine(DL.getPrivateGlobalPrefix()) +
  494. Twine(getFunctionNumber()) + "$pb");
  495. }
  496. //===----------------------------------------------------------------------===//
  497. // MachineFrameInfo implementation
  498. //===----------------------------------------------------------------------===//
  499. /// Make sure the function is at least Align bytes aligned.
  500. void MachineFrameInfo::ensureMaxAlignment(unsigned Align) {
  501. if (!StackRealignable)
  502. assert(Align <= StackAlignment &&
  503. "For targets without stack realignment, Align is out of limit!");
  504. if (MaxAlignment < Align) MaxAlignment = Align;
  505. }
  506. /// Clamp the alignment if requested and emit a warning.
  507. static inline unsigned clampStackAlignment(bool ShouldClamp, unsigned Align,
  508. unsigned StackAlign) {
  509. if (!ShouldClamp || Align <= StackAlign)
  510. return Align;
  511. DEBUG(dbgs() << "Warning: requested alignment " << Align
  512. << " exceeds the stack alignment " << StackAlign
  513. << " when stack realignment is off" << '\n');
  514. return StackAlign;
  515. }
  516. /// Create a new statically sized stack object, returning a nonnegative
  517. /// identifier to represent it.
  518. int MachineFrameInfo::CreateStackObject(uint64_t Size, unsigned Alignment,
  519. bool isSS, const AllocaInst *Alloca) {
  520. assert(Size != 0 && "Cannot allocate zero size stack objects!");
  521. Alignment = clampStackAlignment(!StackRealignable, Alignment, StackAlignment);
  522. Objects.push_back(StackObject(Size, Alignment, 0, false, isSS, Alloca,
  523. !isSS));
  524. int Index = (int)Objects.size() - NumFixedObjects - 1;
  525. assert(Index >= 0 && "Bad frame index!");
  526. ensureMaxAlignment(Alignment);
  527. return Index;
  528. }
  529. /// Create a new statically sized stack object that represents a spill slot,
  530. /// returning a nonnegative identifier to represent it.
  531. int MachineFrameInfo::CreateSpillStackObject(uint64_t Size,
  532. unsigned Alignment) {
  533. Alignment = clampStackAlignment(!StackRealignable, Alignment, StackAlignment);
  534. CreateStackObject(Size, Alignment, true);
  535. int Index = (int)Objects.size() - NumFixedObjects - 1;
  536. ensureMaxAlignment(Alignment);
  537. return Index;
  538. }
  539. /// Notify the MachineFrameInfo object that a variable sized object has been
  540. /// created. This must be created whenever a variable sized object is created,
  541. /// whether or not the index returned is actually used.
  542. int MachineFrameInfo::CreateVariableSizedObject(unsigned Alignment,
  543. const AllocaInst *Alloca) {
  544. HasVarSizedObjects = true;
  545. Alignment = clampStackAlignment(!StackRealignable, Alignment, StackAlignment);
  546. Objects.push_back(StackObject(0, Alignment, 0, false, false, Alloca, true));
  547. ensureMaxAlignment(Alignment);
  548. return (int)Objects.size()-NumFixedObjects-1;
  549. }
  550. /// Create a new object at a fixed location on the stack.
  551. /// All fixed objects should be created before other objects are created for
  552. /// efficiency. By default, fixed objects are immutable. This returns an
  553. /// index with a negative value.
  554. int MachineFrameInfo::CreateFixedObject(uint64_t Size, int64_t SPOffset,
  555. bool Immutable, bool isAliased) {
  556. assert(Size != 0 && "Cannot allocate zero size fixed stack objects!");
  557. // The alignment of the frame index can be determined from its offset from
  558. // the incoming frame position. If the frame object is at offset 32 and
  559. // the stack is guaranteed to be 16-byte aligned, then we know that the
  560. // object is 16-byte aligned. Note that unlike the non-fixed case, if the
  561. // stack needs realignment, we can't assume that the stack will in fact be
  562. // aligned.
  563. unsigned Align = MinAlign(SPOffset, ForcedRealign ? 1 : StackAlignment);
  564. Align = clampStackAlignment(!StackRealignable, Align, StackAlignment);
  565. Objects.insert(Objects.begin(), StackObject(Size, Align, SPOffset, Immutable,
  566. /*isSS*/ false,
  567. /*Alloca*/ nullptr, isAliased));
  568. return -++NumFixedObjects;
  569. }
  570. /// Create a spill slot at a fixed location on the stack.
  571. /// Returns an index with a negative value.
  572. int MachineFrameInfo::CreateFixedSpillStackObject(uint64_t Size,
  573. int64_t SPOffset,
  574. bool Immutable) {
  575. unsigned Align = MinAlign(SPOffset, ForcedRealign ? 1 : StackAlignment);
  576. Align = clampStackAlignment(!StackRealignable, Align, StackAlignment);
  577. Objects.insert(Objects.begin(), StackObject(Size, Align, SPOffset, Immutable,
  578. /*isSS*/ true,
  579. /*Alloca*/ nullptr,
  580. /*isAliased*/ false));
  581. return -++NumFixedObjects;
  582. }
  583. BitVector MachineFrameInfo::getPristineRegs(const MachineFunction &MF) const {
  584. const TargetRegisterInfo *TRI = MF.getSubtarget().getRegisterInfo();
  585. BitVector BV(TRI->getNumRegs());
  586. // Before CSI is calculated, no registers are considered pristine. They can be
  587. // freely used and PEI will make sure they are saved.
  588. if (!isCalleeSavedInfoValid())
  589. return BV;
  590. for (const MCPhysReg *CSR = TRI->getCalleeSavedRegs(&MF); CSR && *CSR; ++CSR)
  591. BV.set(*CSR);
  592. // Saved CSRs are not pristine.
  593. for (auto &I : getCalleeSavedInfo())
  594. for (MCSubRegIterator S(I.getReg(), TRI, true); S.isValid(); ++S)
  595. BV.reset(*S);
  596. return BV;
  597. }
  598. unsigned MachineFrameInfo::estimateStackSize(const MachineFunction &MF) const {
  599. const TargetFrameLowering *TFI = MF.getSubtarget().getFrameLowering();
  600. const TargetRegisterInfo *RegInfo = MF.getSubtarget().getRegisterInfo();
  601. unsigned MaxAlign = getMaxAlignment();
  602. int Offset = 0;
  603. // This code is very, very similar to PEI::calculateFrameObjectOffsets().
  604. // It really should be refactored to share code. Until then, changes
  605. // should keep in mind that there's tight coupling between the two.
  606. for (int i = getObjectIndexBegin(); i != 0; ++i) {
  607. int FixedOff = -getObjectOffset(i);
  608. if (FixedOff > Offset) Offset = FixedOff;
  609. }
  610. for (unsigned i = 0, e = getObjectIndexEnd(); i != e; ++i) {
  611. if (isDeadObjectIndex(i))
  612. continue;
  613. Offset += getObjectSize(i);
  614. unsigned Align = getObjectAlignment(i);
  615. // Adjust to alignment boundary
  616. Offset = (Offset+Align-1)/Align*Align;
  617. MaxAlign = std::max(Align, MaxAlign);
  618. }
  619. if (adjustsStack() && TFI->hasReservedCallFrame(MF))
  620. Offset += getMaxCallFrameSize();
  621. // Round up the size to a multiple of the alignment. If the function has
  622. // any calls or alloca's, align to the target's StackAlignment value to
  623. // ensure that the callee's frame or the alloca data is suitably aligned;
  624. // otherwise, for leaf functions, align to the TransientStackAlignment
  625. // value.
  626. unsigned StackAlign;
  627. if (adjustsStack() || hasVarSizedObjects() ||
  628. (RegInfo->needsStackRealignment(MF) && getObjectIndexEnd() != 0))
  629. StackAlign = TFI->getStackAlignment();
  630. else
  631. StackAlign = TFI->getTransientStackAlignment();
  632. // If the frame pointer is eliminated, all frame offsets will be relative to
  633. // SP not FP. Align to MaxAlign so this works.
  634. StackAlign = std::max(StackAlign, MaxAlign);
  635. unsigned AlignMask = StackAlign - 1;
  636. Offset = (Offset + AlignMask) & ~uint64_t(AlignMask);
  637. return (unsigned)Offset;
  638. }
  639. void MachineFrameInfo::print(const MachineFunction &MF, raw_ostream &OS) const{
  640. if (Objects.empty()) return;
  641. const TargetFrameLowering *FI = MF.getSubtarget().getFrameLowering();
  642. int ValOffset = (FI ? FI->getOffsetOfLocalArea() : 0);
  643. OS << "Frame Objects:\n";
  644. for (unsigned i = 0, e = Objects.size(); i != e; ++i) {
  645. const StackObject &SO = Objects[i];
  646. OS << " fi#" << (int)(i-NumFixedObjects) << ": ";
  647. if (SO.Size == ~0ULL) {
  648. OS << "dead\n";
  649. continue;
  650. }
  651. if (SO.Size == 0)
  652. OS << "variable sized";
  653. else
  654. OS << "size=" << SO.Size;
  655. OS << ", align=" << SO.Alignment;
  656. if (i < NumFixedObjects)
  657. OS << ", fixed";
  658. if (i < NumFixedObjects || SO.SPOffset != -1) {
  659. int64_t Off = SO.SPOffset - ValOffset;
  660. OS << ", at location [SP";
  661. if (Off > 0)
  662. OS << "+" << Off;
  663. else if (Off < 0)
  664. OS << Off;
  665. OS << "]";
  666. }
  667. OS << "\n";
  668. }
  669. }
  670. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  671. void MachineFrameInfo::dump(const MachineFunction &MF) const {
  672. print(MF, dbgs());
  673. }
  674. #endif
  675. //===----------------------------------------------------------------------===//
  676. // MachineJumpTableInfo implementation
  677. //===----------------------------------------------------------------------===//
  678. /// Return the size of each entry in the jump table.
  679. unsigned MachineJumpTableInfo::getEntrySize(const DataLayout &TD) const {
  680. // The size of a jump table entry is 4 bytes unless the entry is just the
  681. // address of a block, in which case it is the pointer size.
  682. switch (getEntryKind()) {
  683. case MachineJumpTableInfo::EK_BlockAddress:
  684. return TD.getPointerSize();
  685. case MachineJumpTableInfo::EK_GPRel64BlockAddress:
  686. return 8;
  687. case MachineJumpTableInfo::EK_GPRel32BlockAddress:
  688. case MachineJumpTableInfo::EK_LabelDifference32:
  689. case MachineJumpTableInfo::EK_Custom32:
  690. return 4;
  691. case MachineJumpTableInfo::EK_Inline:
  692. return 0;
  693. }
  694. llvm_unreachable("Unknown jump table encoding!");
  695. }
  696. /// Return the alignment of each entry in the jump table.
  697. unsigned MachineJumpTableInfo::getEntryAlignment(const DataLayout &TD) const {
  698. // The alignment of a jump table entry is the alignment of int32 unless the
  699. // entry is just the address of a block, in which case it is the pointer
  700. // alignment.
  701. switch (getEntryKind()) {
  702. case MachineJumpTableInfo::EK_BlockAddress:
  703. return TD.getPointerABIAlignment();
  704. case MachineJumpTableInfo::EK_GPRel64BlockAddress:
  705. return TD.getABIIntegerTypeAlignment(64);
  706. case MachineJumpTableInfo::EK_GPRel32BlockAddress:
  707. case MachineJumpTableInfo::EK_LabelDifference32:
  708. case MachineJumpTableInfo::EK_Custom32:
  709. return TD.getABIIntegerTypeAlignment(32);
  710. case MachineJumpTableInfo::EK_Inline:
  711. return 1;
  712. }
  713. llvm_unreachable("Unknown jump table encoding!");
  714. }
  715. /// Create a new jump table entry in the jump table info.
  716. unsigned MachineJumpTableInfo::createJumpTableIndex(
  717. const std::vector<MachineBasicBlock*> &DestBBs) {
  718. assert(!DestBBs.empty() && "Cannot create an empty jump table!");
  719. JumpTables.push_back(MachineJumpTableEntry(DestBBs));
  720. return JumpTables.size()-1;
  721. }
  722. /// If Old is the target of any jump tables, update the jump tables to branch
  723. /// to New instead.
  724. bool MachineJumpTableInfo::ReplaceMBBInJumpTables(MachineBasicBlock *Old,
  725. MachineBasicBlock *New) {
  726. assert(Old != New && "Not making a change?");
  727. bool MadeChange = false;
  728. for (size_t i = 0, e = JumpTables.size(); i != e; ++i)
  729. ReplaceMBBInJumpTable(i, Old, New);
  730. return MadeChange;
  731. }
  732. /// If Old is a target of the jump tables, update the jump table to branch to
  733. /// New instead.
  734. bool MachineJumpTableInfo::ReplaceMBBInJumpTable(unsigned Idx,
  735. MachineBasicBlock *Old,
  736. MachineBasicBlock *New) {
  737. assert(Old != New && "Not making a change?");
  738. bool MadeChange = false;
  739. MachineJumpTableEntry &JTE = JumpTables[Idx];
  740. for (size_t j = 0, e = JTE.MBBs.size(); j != e; ++j)
  741. if (JTE.MBBs[j] == Old) {
  742. JTE.MBBs[j] = New;
  743. MadeChange = true;
  744. }
  745. return MadeChange;
  746. }
  747. void MachineJumpTableInfo::print(raw_ostream &OS) const {
  748. if (JumpTables.empty()) return;
  749. OS << "Jump Tables:\n";
  750. for (unsigned i = 0, e = JumpTables.size(); i != e; ++i) {
  751. OS << " jt#" << i << ": ";
  752. for (unsigned j = 0, f = JumpTables[i].MBBs.size(); j != f; ++j)
  753. OS << " BB#" << JumpTables[i].MBBs[j]->getNumber();
  754. }
  755. OS << '\n';
  756. }
  757. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  758. LLVM_DUMP_METHOD void MachineJumpTableInfo::dump() const { print(dbgs()); }
  759. #endif
  760. //===----------------------------------------------------------------------===//
  761. // MachineConstantPool implementation
  762. //===----------------------------------------------------------------------===//
  763. void MachineConstantPoolValue::anchor() { }
  764. Type *MachineConstantPoolEntry::getType() const {
  765. if (isMachineConstantPoolEntry())
  766. return Val.MachineCPVal->getType();
  767. return Val.ConstVal->getType();
  768. }
  769. bool MachineConstantPoolEntry::needsRelocation() const {
  770. if (isMachineConstantPoolEntry())
  771. return true;
  772. return Val.ConstVal->needsRelocation();
  773. }
  774. SectionKind
  775. MachineConstantPoolEntry::getSectionKind(const DataLayout *DL) const {
  776. if (needsRelocation())
  777. return SectionKind::getReadOnlyWithRel();
  778. switch (DL->getTypeAllocSize(getType())) {
  779. case 4:
  780. return SectionKind::getMergeableConst4();
  781. case 8:
  782. return SectionKind::getMergeableConst8();
  783. case 16:
  784. return SectionKind::getMergeableConst16();
  785. case 32:
  786. return SectionKind::getMergeableConst32();
  787. default:
  788. return SectionKind::getReadOnly();
  789. }
  790. }
  791. MachineConstantPool::~MachineConstantPool() {
  792. // A constant may be a member of both Constants and MachineCPVsSharingEntries,
  793. // so keep track of which we've deleted to avoid double deletions.
  794. DenseSet<MachineConstantPoolValue*> Deleted;
  795. for (unsigned i = 0, e = Constants.size(); i != e; ++i)
  796. if (Constants[i].isMachineConstantPoolEntry()) {
  797. Deleted.insert(Constants[i].Val.MachineCPVal);
  798. delete Constants[i].Val.MachineCPVal;
  799. }
  800. for (DenseSet<MachineConstantPoolValue*>::iterator I =
  801. MachineCPVsSharingEntries.begin(), E = MachineCPVsSharingEntries.end();
  802. I != E; ++I) {
  803. if (Deleted.count(*I) == 0)
  804. delete *I;
  805. }
  806. }
  807. /// Test whether the given two constants can be allocated the same constant pool
  808. /// entry.
  809. static bool CanShareConstantPoolEntry(const Constant *A, const Constant *B,
  810. const DataLayout &DL) {
  811. // Handle the trivial case quickly.
  812. if (A == B) return true;
  813. // If they have the same type but weren't the same constant, quickly
  814. // reject them.
  815. if (A->getType() == B->getType()) return false;
  816. // We can't handle structs or arrays.
  817. if (isa<StructType>(A->getType()) || isa<ArrayType>(A->getType()) ||
  818. isa<StructType>(B->getType()) || isa<ArrayType>(B->getType()))
  819. return false;
  820. // For now, only support constants with the same size.
  821. uint64_t StoreSize = DL.getTypeStoreSize(A->getType());
  822. if (StoreSize != DL.getTypeStoreSize(B->getType()) || StoreSize > 128)
  823. return false;
  824. Type *IntTy = IntegerType::get(A->getContext(), StoreSize*8);
  825. // Try constant folding a bitcast of both instructions to an integer. If we
  826. // get two identical ConstantInt's, then we are good to share them. We use
  827. // the constant folding APIs to do this so that we get the benefit of
  828. // DataLayout.
  829. if (isa<PointerType>(A->getType()))
  830. A = ConstantFoldCastOperand(Instruction::PtrToInt,
  831. const_cast<Constant *>(A), IntTy, DL);
  832. else if (A->getType() != IntTy)
  833. A = ConstantFoldCastOperand(Instruction::BitCast, const_cast<Constant *>(A),
  834. IntTy, DL);
  835. if (isa<PointerType>(B->getType()))
  836. B = ConstantFoldCastOperand(Instruction::PtrToInt,
  837. const_cast<Constant *>(B), IntTy, DL);
  838. else if (B->getType() != IntTy)
  839. B = ConstantFoldCastOperand(Instruction::BitCast, const_cast<Constant *>(B),
  840. IntTy, DL);
  841. return A == B;
  842. }
  843. /// Create a new entry in the constant pool or return an existing one.
  844. /// User must specify the log2 of the minimum required alignment for the object.
  845. unsigned MachineConstantPool::getConstantPoolIndex(const Constant *C,
  846. unsigned Alignment) {
  847. assert(Alignment && "Alignment must be specified!");
  848. if (Alignment > PoolAlignment) PoolAlignment = Alignment;
  849. // Check to see if we already have this constant.
  850. //
  851. // FIXME, this could be made much more efficient for large constant pools.
  852. for (unsigned i = 0, e = Constants.size(); i != e; ++i)
  853. if (!Constants[i].isMachineConstantPoolEntry() &&
  854. CanShareConstantPoolEntry(Constants[i].Val.ConstVal, C, DL)) {
  855. if ((unsigned)Constants[i].getAlignment() < Alignment)
  856. Constants[i].Alignment = Alignment;
  857. return i;
  858. }
  859. Constants.push_back(MachineConstantPoolEntry(C, Alignment));
  860. return Constants.size()-1;
  861. }
  862. unsigned MachineConstantPool::getConstantPoolIndex(MachineConstantPoolValue *V,
  863. unsigned Alignment) {
  864. assert(Alignment && "Alignment must be specified!");
  865. if (Alignment > PoolAlignment) PoolAlignment = Alignment;
  866. // Check to see if we already have this constant.
  867. //
  868. // FIXME, this could be made much more efficient for large constant pools.
  869. int Idx = V->getExistingMachineCPValue(this, Alignment);
  870. if (Idx != -1) {
  871. MachineCPVsSharingEntries.insert(V);
  872. return (unsigned)Idx;
  873. }
  874. Constants.push_back(MachineConstantPoolEntry(V, Alignment));
  875. return Constants.size()-1;
  876. }
  877. void MachineConstantPool::print(raw_ostream &OS) const {
  878. if (Constants.empty()) return;
  879. OS << "Constant Pool:\n";
  880. for (unsigned i = 0, e = Constants.size(); i != e; ++i) {
  881. OS << " cp#" << i << ": ";
  882. if (Constants[i].isMachineConstantPoolEntry())
  883. Constants[i].Val.MachineCPVal->print(OS);
  884. else
  885. Constants[i].Val.ConstVal->printAsOperand(OS, /*PrintType=*/false);
  886. OS << ", align=" << Constants[i].getAlignment();
  887. OS << "\n";
  888. }
  889. }
  890. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  891. LLVM_DUMP_METHOD void MachineConstantPool::dump() const { print(dbgs()); }
  892. #endif