LiveIntervalAnalysis.cpp 59 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581
  1. //===-- LiveIntervalAnalysis.cpp - Live Interval Analysis -----------------===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This file implements the LiveInterval analysis pass which is used
  11. // by the Linear Scan Register allocator. This pass linearizes the
  12. // basic blocks of the function in DFS order and computes live intervals for
  13. // each virtual and physical register.
  14. //
  15. //===----------------------------------------------------------------------===//
  16. #include "llvm/CodeGen/LiveIntervalAnalysis.h"
  17. #include "LiveRangeCalc.h"
  18. #include "llvm/ADT/STLExtras.h"
  19. #include "llvm/Analysis/AliasAnalysis.h"
  20. #include "llvm/CodeGen/LiveVariables.h"
  21. #include "llvm/CodeGen/MachineBlockFrequencyInfo.h"
  22. #include "llvm/CodeGen/MachineDominators.h"
  23. #include "llvm/CodeGen/MachineInstr.h"
  24. #include "llvm/CodeGen/MachineRegisterInfo.h"
  25. #include "llvm/CodeGen/Passes.h"
  26. #include "llvm/CodeGen/VirtRegMap.h"
  27. #include "llvm/IR/Value.h"
  28. #include "llvm/Support/BlockFrequency.h"
  29. #include "llvm/Support/CommandLine.h"
  30. #include "llvm/Support/Debug.h"
  31. #include "llvm/Support/ErrorHandling.h"
  32. #include "llvm/Support/raw_ostream.h"
  33. #include "llvm/Target/TargetInstrInfo.h"
  34. #include "llvm/Target/TargetRegisterInfo.h"
  35. #include "llvm/Target/TargetSubtargetInfo.h"
  36. #include <algorithm>
  37. #include <cmath>
  38. using namespace llvm;
  39. #define DEBUG_TYPE "regalloc"
  40. char LiveIntervals::ID = 0;
  41. char &llvm::LiveIntervalsID = LiveIntervals::ID;
  42. INITIALIZE_PASS_BEGIN(LiveIntervals, "liveintervals",
  43. "Live Interval Analysis", false, false)
  44. INITIALIZE_PASS_DEPENDENCY(AAResultsWrapperPass)
  45. INITIALIZE_PASS_DEPENDENCY(MachineDominatorTree)
  46. INITIALIZE_PASS_DEPENDENCY(SlotIndexes)
  47. INITIALIZE_PASS_END(LiveIntervals, "liveintervals",
  48. "Live Interval Analysis", false, false)
  49. #ifndef NDEBUG
  50. static cl::opt<bool> EnablePrecomputePhysRegs(
  51. "precompute-phys-liveness", cl::Hidden,
  52. cl::desc("Eagerly compute live intervals for all physreg units."));
  53. #else
  54. static bool EnablePrecomputePhysRegs = false;
  55. #endif // NDEBUG
  56. namespace llvm {
  57. cl::opt<bool> UseSegmentSetForPhysRegs(
  58. "use-segment-set-for-physregs", cl::Hidden, cl::init(true),
  59. cl::desc(
  60. "Use segment set for the computation of the live ranges of physregs."));
  61. }
  62. void LiveIntervals::getAnalysisUsage(AnalysisUsage &AU) const {
  63. AU.setPreservesCFG();
  64. AU.addRequired<AAResultsWrapperPass>();
  65. AU.addPreserved<AAResultsWrapperPass>();
  66. AU.addPreserved<LiveVariables>();
  67. AU.addPreservedID(MachineLoopInfoID);
  68. AU.addRequiredTransitiveID(MachineDominatorsID);
  69. AU.addPreservedID(MachineDominatorsID);
  70. AU.addPreserved<SlotIndexes>();
  71. AU.addRequiredTransitive<SlotIndexes>();
  72. MachineFunctionPass::getAnalysisUsage(AU);
  73. }
  74. LiveIntervals::LiveIntervals() : MachineFunctionPass(ID),
  75. DomTree(nullptr), LRCalc(nullptr) {
  76. initializeLiveIntervalsPass(*PassRegistry::getPassRegistry());
  77. }
  78. LiveIntervals::~LiveIntervals() {
  79. delete LRCalc;
  80. }
  81. void LiveIntervals::releaseMemory() {
  82. // Free the live intervals themselves.
  83. for (unsigned i = 0, e = VirtRegIntervals.size(); i != e; ++i)
  84. delete VirtRegIntervals[TargetRegisterInfo::index2VirtReg(i)];
  85. VirtRegIntervals.clear();
  86. RegMaskSlots.clear();
  87. RegMaskBits.clear();
  88. RegMaskBlocks.clear();
  89. for (unsigned i = 0, e = RegUnitRanges.size(); i != e; ++i)
  90. delete RegUnitRanges[i];
  91. RegUnitRanges.clear();
  92. // Release VNInfo memory regions, VNInfo objects don't need to be dtor'd.
  93. VNInfoAllocator.Reset();
  94. }
  95. /// runOnMachineFunction - calculates LiveIntervals
  96. ///
  97. bool LiveIntervals::runOnMachineFunction(MachineFunction &fn) {
  98. MF = &fn;
  99. MRI = &MF->getRegInfo();
  100. TRI = MF->getSubtarget().getRegisterInfo();
  101. TII = MF->getSubtarget().getInstrInfo();
  102. AA = &getAnalysis<AAResultsWrapperPass>().getAAResults();
  103. Indexes = &getAnalysis<SlotIndexes>();
  104. DomTree = &getAnalysis<MachineDominatorTree>();
  105. if (!LRCalc)
  106. LRCalc = new LiveRangeCalc();
  107. // Allocate space for all virtual registers.
  108. VirtRegIntervals.resize(MRI->getNumVirtRegs());
  109. computeVirtRegs();
  110. computeRegMasks();
  111. computeLiveInRegUnits();
  112. if (EnablePrecomputePhysRegs) {
  113. // For stress testing, precompute live ranges of all physical register
  114. // units, including reserved registers.
  115. for (unsigned i = 0, e = TRI->getNumRegUnits(); i != e; ++i)
  116. getRegUnit(i);
  117. }
  118. DEBUG(dump());
  119. return true;
  120. }
  121. /// print - Implement the dump method.
  122. void LiveIntervals::print(raw_ostream &OS, const Module* ) const {
  123. OS << "********** INTERVALS **********\n";
  124. // Dump the regunits.
  125. for (unsigned i = 0, e = RegUnitRanges.size(); i != e; ++i)
  126. if (LiveRange *LR = RegUnitRanges[i])
  127. OS << PrintRegUnit(i, TRI) << ' ' << *LR << '\n';
  128. // Dump the virtregs.
  129. for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
  130. unsigned Reg = TargetRegisterInfo::index2VirtReg(i);
  131. if (hasInterval(Reg))
  132. OS << getInterval(Reg) << '\n';
  133. }
  134. OS << "RegMasks:";
  135. for (unsigned i = 0, e = RegMaskSlots.size(); i != e; ++i)
  136. OS << ' ' << RegMaskSlots[i];
  137. OS << '\n';
  138. printInstrs(OS);
  139. }
  140. void LiveIntervals::printInstrs(raw_ostream &OS) const {
  141. OS << "********** MACHINEINSTRS **********\n";
  142. MF->print(OS, Indexes);
  143. }
  144. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  145. void LiveIntervals::dumpInstrs() const {
  146. printInstrs(dbgs());
  147. }
  148. #endif
  149. LiveInterval* LiveIntervals::createInterval(unsigned reg) {
  150. float Weight = TargetRegisterInfo::isPhysicalRegister(reg) ?
  151. llvm::huge_valf : 0.0F;
  152. return new LiveInterval(reg, Weight);
  153. }
  154. /// computeVirtRegInterval - Compute the live interval of a virtual register,
  155. /// based on defs and uses.
  156. void LiveIntervals::computeVirtRegInterval(LiveInterval &LI) {
  157. assert(LRCalc && "LRCalc not initialized.");
  158. assert(LI.empty() && "Should only compute empty intervals.");
  159. LRCalc->reset(MF, getSlotIndexes(), DomTree, &getVNInfoAllocator());
  160. LRCalc->calculate(LI, MRI->shouldTrackSubRegLiveness(LI.reg));
  161. computeDeadValues(LI, nullptr);
  162. }
  163. void LiveIntervals::computeVirtRegs() {
  164. for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
  165. unsigned Reg = TargetRegisterInfo::index2VirtReg(i);
  166. if (MRI->reg_nodbg_empty(Reg))
  167. continue;
  168. createAndComputeVirtRegInterval(Reg);
  169. }
  170. }
  171. void LiveIntervals::computeRegMasks() {
  172. RegMaskBlocks.resize(MF->getNumBlockIDs());
  173. // Find all instructions with regmask operands.
  174. for (MachineBasicBlock &MBB : *MF) {
  175. std::pair<unsigned, unsigned> &RMB = RegMaskBlocks[MBB.getNumber()];
  176. RMB.first = RegMaskSlots.size();
  177. // Some block starts, such as EH funclets, create masks.
  178. if (const uint32_t *Mask = MBB.getBeginClobberMask(TRI)) {
  179. RegMaskSlots.push_back(Indexes->getMBBStartIdx(&MBB));
  180. RegMaskBits.push_back(Mask);
  181. }
  182. for (MachineInstr &MI : MBB) {
  183. for (const MachineOperand &MO : MI.operands()) {
  184. if (!MO.isRegMask())
  185. continue;
  186. RegMaskSlots.push_back(Indexes->getInstructionIndex(MI).getRegSlot());
  187. RegMaskBits.push_back(MO.getRegMask());
  188. }
  189. }
  190. // Some block ends, such as funclet returns, create masks. Put the mask on
  191. // the last instruction of the block, because MBB slot index intervals are
  192. // half-open.
  193. if (const uint32_t *Mask = MBB.getEndClobberMask(TRI)) {
  194. assert(!MBB.empty() && "empty return block?");
  195. RegMaskSlots.push_back(
  196. Indexes->getInstructionIndex(MBB.back()).getRegSlot());
  197. RegMaskBits.push_back(Mask);
  198. }
  199. // Compute the number of register mask instructions in this block.
  200. RMB.second = RegMaskSlots.size() - RMB.first;
  201. }
  202. }
  203. //===----------------------------------------------------------------------===//
  204. // Register Unit Liveness
  205. //===----------------------------------------------------------------------===//
  206. //
  207. // Fixed interference typically comes from ABI boundaries: Function arguments
  208. // and return values are passed in fixed registers, and so are exception
  209. // pointers entering landing pads. Certain instructions require values to be
  210. // present in specific registers. That is also represented through fixed
  211. // interference.
  212. //
  213. /// computeRegUnitInterval - Compute the live range of a register unit, based
  214. /// on the uses and defs of aliasing registers. The range should be empty,
  215. /// or contain only dead phi-defs from ABI blocks.
  216. void LiveIntervals::computeRegUnitRange(LiveRange &LR, unsigned Unit) {
  217. assert(LRCalc && "LRCalc not initialized.");
  218. LRCalc->reset(MF, getSlotIndexes(), DomTree, &getVNInfoAllocator());
  219. // The physregs aliasing Unit are the roots and their super-registers.
  220. // Create all values as dead defs before extending to uses. Note that roots
  221. // may share super-registers. That's OK because createDeadDefs() is
  222. // idempotent. It is very rare for a register unit to have multiple roots, so
  223. // uniquing super-registers is probably not worthwhile.
  224. for (MCRegUnitRootIterator Roots(Unit, TRI); Roots.isValid(); ++Roots) {
  225. for (MCSuperRegIterator Supers(*Roots, TRI, /*IncludeSelf=*/true);
  226. Supers.isValid(); ++Supers) {
  227. if (!MRI->reg_empty(*Supers))
  228. LRCalc->createDeadDefs(LR, *Supers);
  229. }
  230. }
  231. // Now extend LR to reach all uses.
  232. // Ignore uses of reserved registers. We only track defs of those.
  233. for (MCRegUnitRootIterator Roots(Unit, TRI); Roots.isValid(); ++Roots) {
  234. for (MCSuperRegIterator Supers(*Roots, TRI, /*IncludeSelf=*/true);
  235. Supers.isValid(); ++Supers) {
  236. unsigned Reg = *Supers;
  237. if (!MRI->isReserved(Reg) && !MRI->reg_empty(Reg))
  238. LRCalc->extendToUses(LR, Reg);
  239. }
  240. }
  241. // Flush the segment set to the segment vector.
  242. if (UseSegmentSetForPhysRegs)
  243. LR.flushSegmentSet();
  244. }
  245. /// computeLiveInRegUnits - Precompute the live ranges of any register units
  246. /// that are live-in to an ABI block somewhere. Register values can appear
  247. /// without a corresponding def when entering the entry block or a landing pad.
  248. ///
  249. void LiveIntervals::computeLiveInRegUnits() {
  250. RegUnitRanges.resize(TRI->getNumRegUnits());
  251. DEBUG(dbgs() << "Computing live-in reg-units in ABI blocks.\n");
  252. // Keep track of the live range sets allocated.
  253. SmallVector<unsigned, 8> NewRanges;
  254. // Check all basic blocks for live-ins.
  255. for (MachineFunction::const_iterator MFI = MF->begin(), MFE = MF->end();
  256. MFI != MFE; ++MFI) {
  257. const MachineBasicBlock *MBB = &*MFI;
  258. // We only care about ABI blocks: Entry + landing pads.
  259. if ((MFI != MF->begin() && !MBB->isEHPad()) || MBB->livein_empty())
  260. continue;
  261. // Create phi-defs at Begin for all live-in registers.
  262. SlotIndex Begin = Indexes->getMBBStartIdx(MBB);
  263. DEBUG(dbgs() << Begin << "\tBB#" << MBB->getNumber());
  264. for (const auto &LI : MBB->liveins()) {
  265. for (MCRegUnitIterator Units(LI.PhysReg, TRI); Units.isValid(); ++Units) {
  266. unsigned Unit = *Units;
  267. LiveRange *LR = RegUnitRanges[Unit];
  268. if (!LR) {
  269. // Use segment set to speed-up initial computation of the live range.
  270. LR = RegUnitRanges[Unit] = new LiveRange(UseSegmentSetForPhysRegs);
  271. NewRanges.push_back(Unit);
  272. }
  273. VNInfo *VNI = LR->createDeadDef(Begin, getVNInfoAllocator());
  274. (void)VNI;
  275. DEBUG(dbgs() << ' ' << PrintRegUnit(Unit, TRI) << '#' << VNI->id);
  276. }
  277. }
  278. DEBUG(dbgs() << '\n');
  279. }
  280. DEBUG(dbgs() << "Created " << NewRanges.size() << " new intervals.\n");
  281. // Compute the 'normal' part of the ranges.
  282. for (unsigned i = 0, e = NewRanges.size(); i != e; ++i) {
  283. unsigned Unit = NewRanges[i];
  284. computeRegUnitRange(*RegUnitRanges[Unit], Unit);
  285. }
  286. }
  287. static void createSegmentsForValues(LiveRange &LR,
  288. iterator_range<LiveInterval::vni_iterator> VNIs) {
  289. for (auto VNI : VNIs) {
  290. if (VNI->isUnused())
  291. continue;
  292. SlotIndex Def = VNI->def;
  293. LR.addSegment(LiveRange::Segment(Def, Def.getDeadSlot(), VNI));
  294. }
  295. }
  296. typedef SmallVector<std::pair<SlotIndex, VNInfo*>, 16> ShrinkToUsesWorkList;
  297. static void extendSegmentsToUses(LiveRange &LR, const SlotIndexes &Indexes,
  298. ShrinkToUsesWorkList &WorkList,
  299. const LiveRange &OldRange) {
  300. // Keep track of the PHIs that are in use.
  301. SmallPtrSet<VNInfo*, 8> UsedPHIs;
  302. // Blocks that have already been added to WorkList as live-out.
  303. SmallPtrSet<MachineBasicBlock*, 16> LiveOut;
  304. // Extend intervals to reach all uses in WorkList.
  305. while (!WorkList.empty()) {
  306. SlotIndex Idx = WorkList.back().first;
  307. VNInfo *VNI = WorkList.back().second;
  308. WorkList.pop_back();
  309. const MachineBasicBlock *MBB = Indexes.getMBBFromIndex(Idx.getPrevSlot());
  310. SlotIndex BlockStart = Indexes.getMBBStartIdx(MBB);
  311. // Extend the live range for VNI to be live at Idx.
  312. if (VNInfo *ExtVNI = LR.extendInBlock(BlockStart, Idx)) {
  313. assert(ExtVNI == VNI && "Unexpected existing value number");
  314. (void)ExtVNI;
  315. // Is this a PHIDef we haven't seen before?
  316. if (!VNI->isPHIDef() || VNI->def != BlockStart ||
  317. !UsedPHIs.insert(VNI).second)
  318. continue;
  319. // The PHI is live, make sure the predecessors are live-out.
  320. for (auto &Pred : MBB->predecessors()) {
  321. if (!LiveOut.insert(Pred).second)
  322. continue;
  323. SlotIndex Stop = Indexes.getMBBEndIdx(Pred);
  324. // A predecessor is not required to have a live-out value for a PHI.
  325. if (VNInfo *PVNI = OldRange.getVNInfoBefore(Stop))
  326. WorkList.push_back(std::make_pair(Stop, PVNI));
  327. }
  328. continue;
  329. }
  330. // VNI is live-in to MBB.
  331. DEBUG(dbgs() << " live-in at " << BlockStart << '\n');
  332. LR.addSegment(LiveRange::Segment(BlockStart, Idx, VNI));
  333. // Make sure VNI is live-out from the predecessors.
  334. for (auto &Pred : MBB->predecessors()) {
  335. if (!LiveOut.insert(Pred).second)
  336. continue;
  337. SlotIndex Stop = Indexes.getMBBEndIdx(Pred);
  338. assert(OldRange.getVNInfoBefore(Stop) == VNI &&
  339. "Wrong value out of predecessor");
  340. WorkList.push_back(std::make_pair(Stop, VNI));
  341. }
  342. }
  343. }
  344. bool LiveIntervals::shrinkToUses(LiveInterval *li,
  345. SmallVectorImpl<MachineInstr*> *dead) {
  346. DEBUG(dbgs() << "Shrink: " << *li << '\n');
  347. assert(TargetRegisterInfo::isVirtualRegister(li->reg)
  348. && "Can only shrink virtual registers");
  349. // Shrink subregister live ranges.
  350. bool NeedsCleanup = false;
  351. for (LiveInterval::SubRange &S : li->subranges()) {
  352. shrinkToUses(S, li->reg);
  353. if (S.empty())
  354. NeedsCleanup = true;
  355. }
  356. if (NeedsCleanup)
  357. li->removeEmptySubRanges();
  358. // Find all the values used, including PHI kills.
  359. ShrinkToUsesWorkList WorkList;
  360. // Visit all instructions reading li->reg.
  361. for (MachineRegisterInfo::reg_instr_iterator
  362. I = MRI->reg_instr_begin(li->reg), E = MRI->reg_instr_end();
  363. I != E; ) {
  364. MachineInstr *UseMI = &*(I++);
  365. if (UseMI->isDebugValue() || !UseMI->readsVirtualRegister(li->reg))
  366. continue;
  367. SlotIndex Idx = getInstructionIndex(*UseMI).getRegSlot();
  368. LiveQueryResult LRQ = li->Query(Idx);
  369. VNInfo *VNI = LRQ.valueIn();
  370. if (!VNI) {
  371. // This shouldn't happen: readsVirtualRegister returns true, but there is
  372. // no live value. It is likely caused by a target getting <undef> flags
  373. // wrong.
  374. DEBUG(dbgs() << Idx << '\t' << *UseMI
  375. << "Warning: Instr claims to read non-existent value in "
  376. << *li << '\n');
  377. continue;
  378. }
  379. // Special case: An early-clobber tied operand reads and writes the
  380. // register one slot early.
  381. if (VNInfo *DefVNI = LRQ.valueDefined())
  382. Idx = DefVNI->def;
  383. WorkList.push_back(std::make_pair(Idx, VNI));
  384. }
  385. // Create new live ranges with only minimal live segments per def.
  386. LiveRange NewLR;
  387. createSegmentsForValues(NewLR, make_range(li->vni_begin(), li->vni_end()));
  388. extendSegmentsToUses(NewLR, *Indexes, WorkList, *li);
  389. // Move the trimmed segments back.
  390. li->segments.swap(NewLR.segments);
  391. // Handle dead values.
  392. bool CanSeparate = computeDeadValues(*li, dead);
  393. DEBUG(dbgs() << "Shrunk: " << *li << '\n');
  394. return CanSeparate;
  395. }
  396. bool LiveIntervals::computeDeadValues(LiveInterval &LI,
  397. SmallVectorImpl<MachineInstr*> *dead) {
  398. bool MayHaveSplitComponents = false;
  399. for (auto VNI : LI.valnos) {
  400. if (VNI->isUnused())
  401. continue;
  402. SlotIndex Def = VNI->def;
  403. LiveRange::iterator I = LI.FindSegmentContaining(Def);
  404. assert(I != LI.end() && "Missing segment for VNI");
  405. // Is the register live before? Otherwise we may have to add a read-undef
  406. // flag for subregister defs.
  407. unsigned VReg = LI.reg;
  408. if (MRI->shouldTrackSubRegLiveness(VReg)) {
  409. if ((I == LI.begin() || std::prev(I)->end < Def) && !VNI->isPHIDef()) {
  410. MachineInstr *MI = getInstructionFromIndex(Def);
  411. MI->setRegisterDefReadUndef(VReg);
  412. }
  413. }
  414. if (I->end != Def.getDeadSlot())
  415. continue;
  416. if (VNI->isPHIDef()) {
  417. // This is a dead PHI. Remove it.
  418. VNI->markUnused();
  419. LI.removeSegment(I);
  420. DEBUG(dbgs() << "Dead PHI at " << Def << " may separate interval\n");
  421. MayHaveSplitComponents = true;
  422. } else {
  423. // This is a dead def. Make sure the instruction knows.
  424. MachineInstr *MI = getInstructionFromIndex(Def);
  425. assert(MI && "No instruction defining live value");
  426. MI->addRegisterDead(LI.reg, TRI);
  427. if (dead && MI->allDefsAreDead()) {
  428. DEBUG(dbgs() << "All defs dead: " << Def << '\t' << *MI);
  429. dead->push_back(MI);
  430. }
  431. }
  432. }
  433. return MayHaveSplitComponents;
  434. }
  435. void LiveIntervals::shrinkToUses(LiveInterval::SubRange &SR, unsigned Reg) {
  436. DEBUG(dbgs() << "Shrink: " << SR << '\n');
  437. assert(TargetRegisterInfo::isVirtualRegister(Reg)
  438. && "Can only shrink virtual registers");
  439. // Find all the values used, including PHI kills.
  440. ShrinkToUsesWorkList WorkList;
  441. // Visit all instructions reading Reg.
  442. SlotIndex LastIdx;
  443. for (MachineOperand &MO : MRI->use_nodbg_operands(Reg)) {
  444. // Skip "undef" uses.
  445. if (!MO.readsReg())
  446. continue;
  447. // Maybe the operand is for a subregister we don't care about.
  448. unsigned SubReg = MO.getSubReg();
  449. if (SubReg != 0) {
  450. LaneBitmask LaneMask = TRI->getSubRegIndexLaneMask(SubReg);
  451. if ((LaneMask & SR.LaneMask) == 0)
  452. continue;
  453. }
  454. // We only need to visit each instruction once.
  455. MachineInstr *UseMI = MO.getParent();
  456. SlotIndex Idx = getInstructionIndex(*UseMI).getRegSlot();
  457. if (Idx == LastIdx)
  458. continue;
  459. LastIdx = Idx;
  460. LiveQueryResult LRQ = SR.Query(Idx);
  461. VNInfo *VNI = LRQ.valueIn();
  462. // For Subranges it is possible that only undef values are left in that
  463. // part of the subregister, so there is no real liverange at the use
  464. if (!VNI)
  465. continue;
  466. // Special case: An early-clobber tied operand reads and writes the
  467. // register one slot early.
  468. if (VNInfo *DefVNI = LRQ.valueDefined())
  469. Idx = DefVNI->def;
  470. WorkList.push_back(std::make_pair(Idx, VNI));
  471. }
  472. // Create a new live ranges with only minimal live segments per def.
  473. LiveRange NewLR;
  474. createSegmentsForValues(NewLR, make_range(SR.vni_begin(), SR.vni_end()));
  475. extendSegmentsToUses(NewLR, *Indexes, WorkList, SR);
  476. // Move the trimmed ranges back.
  477. SR.segments.swap(NewLR.segments);
  478. // Remove dead PHI value numbers
  479. for (auto VNI : SR.valnos) {
  480. if (VNI->isUnused())
  481. continue;
  482. const LiveRange::Segment *Segment = SR.getSegmentContaining(VNI->def);
  483. assert(Segment != nullptr && "Missing segment for VNI");
  484. if (Segment->end != VNI->def.getDeadSlot())
  485. continue;
  486. if (VNI->isPHIDef()) {
  487. // This is a dead PHI. Remove it.
  488. DEBUG(dbgs() << "Dead PHI at " << VNI->def << " may separate interval\n");
  489. VNI->markUnused();
  490. SR.removeSegment(*Segment);
  491. }
  492. }
  493. DEBUG(dbgs() << "Shrunk: " << SR << '\n');
  494. }
  495. void LiveIntervals::extendToIndices(LiveRange &LR,
  496. ArrayRef<SlotIndex> Indices,
  497. ArrayRef<SlotIndex> Undefs) {
  498. assert(LRCalc && "LRCalc not initialized.");
  499. LRCalc->reset(MF, getSlotIndexes(), DomTree, &getVNInfoAllocator());
  500. for (unsigned i = 0, e = Indices.size(); i != e; ++i)
  501. LRCalc->extend(LR, Indices[i], /*PhysReg=*/0, Undefs);
  502. }
  503. void LiveIntervals::pruneValue(LiveRange &LR, SlotIndex Kill,
  504. SmallVectorImpl<SlotIndex> *EndPoints) {
  505. LiveQueryResult LRQ = LR.Query(Kill);
  506. VNInfo *VNI = LRQ.valueOutOrDead();
  507. if (!VNI)
  508. return;
  509. MachineBasicBlock *KillMBB = Indexes->getMBBFromIndex(Kill);
  510. SlotIndex MBBEnd = Indexes->getMBBEndIdx(KillMBB);
  511. // If VNI isn't live out from KillMBB, the value is trivially pruned.
  512. if (LRQ.endPoint() < MBBEnd) {
  513. LR.removeSegment(Kill, LRQ.endPoint());
  514. if (EndPoints) EndPoints->push_back(LRQ.endPoint());
  515. return;
  516. }
  517. // VNI is live out of KillMBB.
  518. LR.removeSegment(Kill, MBBEnd);
  519. if (EndPoints) EndPoints->push_back(MBBEnd);
  520. // Find all blocks that are reachable from KillMBB without leaving VNI's live
  521. // range. It is possible that KillMBB itself is reachable, so start a DFS
  522. // from each successor.
  523. typedef SmallPtrSet<MachineBasicBlock*, 9> VisitedTy;
  524. VisitedTy Visited;
  525. for (MachineBasicBlock::succ_iterator
  526. SuccI = KillMBB->succ_begin(), SuccE = KillMBB->succ_end();
  527. SuccI != SuccE; ++SuccI) {
  528. for (df_ext_iterator<MachineBasicBlock*, VisitedTy>
  529. I = df_ext_begin(*SuccI, Visited), E = df_ext_end(*SuccI, Visited);
  530. I != E;) {
  531. MachineBasicBlock *MBB = *I;
  532. // Check if VNI is live in to MBB.
  533. SlotIndex MBBStart, MBBEnd;
  534. std::tie(MBBStart, MBBEnd) = Indexes->getMBBRange(MBB);
  535. LiveQueryResult LRQ = LR.Query(MBBStart);
  536. if (LRQ.valueIn() != VNI) {
  537. // This block isn't part of the VNI segment. Prune the search.
  538. I.skipChildren();
  539. continue;
  540. }
  541. // Prune the search if VNI is killed in MBB.
  542. if (LRQ.endPoint() < MBBEnd) {
  543. LR.removeSegment(MBBStart, LRQ.endPoint());
  544. if (EndPoints) EndPoints->push_back(LRQ.endPoint());
  545. I.skipChildren();
  546. continue;
  547. }
  548. // VNI is live through MBB.
  549. LR.removeSegment(MBBStart, MBBEnd);
  550. if (EndPoints) EndPoints->push_back(MBBEnd);
  551. ++I;
  552. }
  553. }
  554. }
  555. //===----------------------------------------------------------------------===//
  556. // Register allocator hooks.
  557. //
  558. void LiveIntervals::addKillFlags(const VirtRegMap *VRM) {
  559. // Keep track of regunit ranges.
  560. SmallVector<std::pair<const LiveRange*, LiveRange::const_iterator>, 8> RU;
  561. // Keep track of subregister ranges.
  562. SmallVector<std::pair<const LiveInterval::SubRange*,
  563. LiveRange::const_iterator>, 4> SRs;
  564. for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
  565. unsigned Reg = TargetRegisterInfo::index2VirtReg(i);
  566. if (MRI->reg_nodbg_empty(Reg))
  567. continue;
  568. const LiveInterval &LI = getInterval(Reg);
  569. if (LI.empty())
  570. continue;
  571. // Find the regunit intervals for the assigned register. They may overlap
  572. // the virtual register live range, cancelling any kills.
  573. RU.clear();
  574. for (MCRegUnitIterator Units(VRM->getPhys(Reg), TRI); Units.isValid();
  575. ++Units) {
  576. const LiveRange &RURange = getRegUnit(*Units);
  577. if (RURange.empty())
  578. continue;
  579. RU.push_back(std::make_pair(&RURange, RURange.find(LI.begin()->end)));
  580. }
  581. if (MRI->subRegLivenessEnabled()) {
  582. SRs.clear();
  583. for (const LiveInterval::SubRange &SR : LI.subranges()) {
  584. SRs.push_back(std::make_pair(&SR, SR.find(LI.begin()->end)));
  585. }
  586. }
  587. // Every instruction that kills Reg corresponds to a segment range end
  588. // point.
  589. for (LiveInterval::const_iterator RI = LI.begin(), RE = LI.end(); RI != RE;
  590. ++RI) {
  591. // A block index indicates an MBB edge.
  592. if (RI->end.isBlock())
  593. continue;
  594. MachineInstr *MI = getInstructionFromIndex(RI->end);
  595. if (!MI)
  596. continue;
  597. // Check if any of the regunits are live beyond the end of RI. That could
  598. // happen when a physreg is defined as a copy of a virtreg:
  599. //
  600. // %EAX = COPY %vreg5
  601. // FOO %vreg5 <--- MI, cancel kill because %EAX is live.
  602. // BAR %EAX<kill>
  603. //
  604. // There should be no kill flag on FOO when %vreg5 is rewritten as %EAX.
  605. for (auto &RUP : RU) {
  606. const LiveRange &RURange = *RUP.first;
  607. LiveRange::const_iterator &I = RUP.second;
  608. if (I == RURange.end())
  609. continue;
  610. I = RURange.advanceTo(I, RI->end);
  611. if (I == RURange.end() || I->start >= RI->end)
  612. continue;
  613. // I is overlapping RI.
  614. goto CancelKill;
  615. }
  616. if (MRI->subRegLivenessEnabled()) {
  617. // When reading a partial undefined value we must not add a kill flag.
  618. // The regalloc might have used the undef lane for something else.
  619. // Example:
  620. // %vreg1 = ... ; R32: %vreg1
  621. // %vreg2:high16 = ... ; R64: %vreg2
  622. // = read %vreg2<kill> ; R64: %vreg2
  623. // = read %vreg1 ; R32: %vreg1
  624. // The <kill> flag is correct for %vreg2, but the register allocator may
  625. // assign R0L to %vreg1, and R0 to %vreg2 because the low 32bits of R0
  626. // are actually never written by %vreg2. After assignment the <kill>
  627. // flag at the read instruction is invalid.
  628. LaneBitmask DefinedLanesMask;
  629. if (!SRs.empty()) {
  630. // Compute a mask of lanes that are defined.
  631. DefinedLanesMask = 0;
  632. for (auto &SRP : SRs) {
  633. const LiveInterval::SubRange &SR = *SRP.first;
  634. LiveRange::const_iterator &I = SRP.second;
  635. if (I == SR.end())
  636. continue;
  637. I = SR.advanceTo(I, RI->end);
  638. if (I == SR.end() || I->start >= RI->end)
  639. continue;
  640. // I is overlapping RI
  641. DefinedLanesMask |= SR.LaneMask;
  642. }
  643. } else
  644. DefinedLanesMask = ~0u;
  645. bool IsFullWrite = false;
  646. for (const MachineOperand &MO : MI->operands()) {
  647. if (!MO.isReg() || MO.getReg() != Reg)
  648. continue;
  649. if (MO.isUse()) {
  650. // Reading any undefined lanes?
  651. LaneBitmask UseMask = TRI->getSubRegIndexLaneMask(MO.getSubReg());
  652. if ((UseMask & ~DefinedLanesMask) != 0)
  653. goto CancelKill;
  654. } else if (MO.getSubReg() == 0) {
  655. // Writing to the full register?
  656. assert(MO.isDef());
  657. IsFullWrite = true;
  658. }
  659. }
  660. // If an instruction writes to a subregister, a new segment starts in
  661. // the LiveInterval. But as this is only overriding part of the register
  662. // adding kill-flags is not correct here after registers have been
  663. // assigned.
  664. if (!IsFullWrite) {
  665. // Next segment has to be adjacent in the subregister write case.
  666. LiveRange::const_iterator N = std::next(RI);
  667. if (N != LI.end() && N->start == RI->end)
  668. goto CancelKill;
  669. }
  670. }
  671. MI->addRegisterKilled(Reg, nullptr);
  672. continue;
  673. CancelKill:
  674. MI->clearRegisterKills(Reg, nullptr);
  675. }
  676. }
  677. }
  678. MachineBasicBlock*
  679. LiveIntervals::intervalIsInOneMBB(const LiveInterval &LI) const {
  680. // A local live range must be fully contained inside the block, meaning it is
  681. // defined and killed at instructions, not at block boundaries. It is not
  682. // live in or or out of any block.
  683. //
  684. // It is technically possible to have a PHI-defined live range identical to a
  685. // single block, but we are going to return false in that case.
  686. SlotIndex Start = LI.beginIndex();
  687. if (Start.isBlock())
  688. return nullptr;
  689. SlotIndex Stop = LI.endIndex();
  690. if (Stop.isBlock())
  691. return nullptr;
  692. // getMBBFromIndex doesn't need to search the MBB table when both indexes
  693. // belong to proper instructions.
  694. MachineBasicBlock *MBB1 = Indexes->getMBBFromIndex(Start);
  695. MachineBasicBlock *MBB2 = Indexes->getMBBFromIndex(Stop);
  696. return MBB1 == MBB2 ? MBB1 : nullptr;
  697. }
  698. bool
  699. LiveIntervals::hasPHIKill(const LiveInterval &LI, const VNInfo *VNI) const {
  700. for (const VNInfo *PHI : LI.valnos) {
  701. if (PHI->isUnused() || !PHI->isPHIDef())
  702. continue;
  703. const MachineBasicBlock *PHIMBB = getMBBFromIndex(PHI->def);
  704. // Conservatively return true instead of scanning huge predecessor lists.
  705. if (PHIMBB->pred_size() > 100)
  706. return true;
  707. for (MachineBasicBlock::const_pred_iterator
  708. PI = PHIMBB->pred_begin(), PE = PHIMBB->pred_end(); PI != PE; ++PI)
  709. if (VNI == LI.getVNInfoBefore(Indexes->getMBBEndIdx(*PI)))
  710. return true;
  711. }
  712. return false;
  713. }
  714. float LiveIntervals::getSpillWeight(bool isDef, bool isUse,
  715. const MachineBlockFrequencyInfo *MBFI,
  716. const MachineInstr &MI) {
  717. BlockFrequency Freq = MBFI->getBlockFreq(MI.getParent());
  718. const float Scale = 1.0f / MBFI->getEntryFreq();
  719. return (isDef + isUse) * (Freq.getFrequency() * Scale);
  720. }
  721. LiveRange::Segment
  722. LiveIntervals::addSegmentToEndOfBlock(unsigned reg, MachineInstr &startInst) {
  723. LiveInterval& Interval = createEmptyInterval(reg);
  724. VNInfo *VN = Interval.getNextValue(
  725. SlotIndex(getInstructionIndex(startInst).getRegSlot()),
  726. getVNInfoAllocator());
  727. LiveRange::Segment S(SlotIndex(getInstructionIndex(startInst).getRegSlot()),
  728. getMBBEndIdx(startInst.getParent()), VN);
  729. Interval.addSegment(S);
  730. return S;
  731. }
  732. //===----------------------------------------------------------------------===//
  733. // Register mask functions
  734. //===----------------------------------------------------------------------===//
  735. bool LiveIntervals::checkRegMaskInterference(LiveInterval &LI,
  736. BitVector &UsableRegs) {
  737. if (LI.empty())
  738. return false;
  739. LiveInterval::iterator LiveI = LI.begin(), LiveE = LI.end();
  740. // Use a smaller arrays for local live ranges.
  741. ArrayRef<SlotIndex> Slots;
  742. ArrayRef<const uint32_t*> Bits;
  743. if (MachineBasicBlock *MBB = intervalIsInOneMBB(LI)) {
  744. Slots = getRegMaskSlotsInBlock(MBB->getNumber());
  745. Bits = getRegMaskBitsInBlock(MBB->getNumber());
  746. } else {
  747. Slots = getRegMaskSlots();
  748. Bits = getRegMaskBits();
  749. }
  750. // We are going to enumerate all the register mask slots contained in LI.
  751. // Start with a binary search of RegMaskSlots to find a starting point.
  752. ArrayRef<SlotIndex>::iterator SlotI =
  753. std::lower_bound(Slots.begin(), Slots.end(), LiveI->start);
  754. ArrayRef<SlotIndex>::iterator SlotE = Slots.end();
  755. // No slots in range, LI begins after the last call.
  756. if (SlotI == SlotE)
  757. return false;
  758. bool Found = false;
  759. for (;;) {
  760. assert(*SlotI >= LiveI->start);
  761. // Loop over all slots overlapping this segment.
  762. while (*SlotI < LiveI->end) {
  763. // *SlotI overlaps LI. Collect mask bits.
  764. if (!Found) {
  765. // This is the first overlap. Initialize UsableRegs to all ones.
  766. UsableRegs.clear();
  767. UsableRegs.resize(TRI->getNumRegs(), true);
  768. Found = true;
  769. }
  770. // Remove usable registers clobbered by this mask.
  771. UsableRegs.clearBitsNotInMask(Bits[SlotI-Slots.begin()]);
  772. if (++SlotI == SlotE)
  773. return Found;
  774. }
  775. // *SlotI is beyond the current LI segment.
  776. LiveI = LI.advanceTo(LiveI, *SlotI);
  777. if (LiveI == LiveE)
  778. return Found;
  779. // Advance SlotI until it overlaps.
  780. while (*SlotI < LiveI->start)
  781. if (++SlotI == SlotE)
  782. return Found;
  783. }
  784. }
  785. //===----------------------------------------------------------------------===//
  786. // IntervalUpdate class.
  787. //===----------------------------------------------------------------------===//
  788. // HMEditor is a toolkit used by handleMove to trim or extend live intervals.
  789. class LiveIntervals::HMEditor {
  790. private:
  791. LiveIntervals& LIS;
  792. const MachineRegisterInfo& MRI;
  793. const TargetRegisterInfo& TRI;
  794. SlotIndex OldIdx;
  795. SlotIndex NewIdx;
  796. SmallPtrSet<LiveRange*, 8> Updated;
  797. bool UpdateFlags;
  798. public:
  799. HMEditor(LiveIntervals& LIS, const MachineRegisterInfo& MRI,
  800. const TargetRegisterInfo& TRI,
  801. SlotIndex OldIdx, SlotIndex NewIdx, bool UpdateFlags)
  802. : LIS(LIS), MRI(MRI), TRI(TRI), OldIdx(OldIdx), NewIdx(NewIdx),
  803. UpdateFlags(UpdateFlags) {}
  804. // FIXME: UpdateFlags is a workaround that creates live intervals for all
  805. // physregs, even those that aren't needed for regalloc, in order to update
  806. // kill flags. This is wasteful. Eventually, LiveVariables will strip all kill
  807. // flags, and postRA passes will use a live register utility instead.
  808. LiveRange *getRegUnitLI(unsigned Unit) {
  809. if (UpdateFlags)
  810. return &LIS.getRegUnit(Unit);
  811. return LIS.getCachedRegUnit(Unit);
  812. }
  813. /// Update all live ranges touched by MI, assuming a move from OldIdx to
  814. /// NewIdx.
  815. void updateAllRanges(MachineInstr *MI) {
  816. DEBUG(dbgs() << "handleMove " << OldIdx << " -> " << NewIdx << ": " << *MI);
  817. bool hasRegMask = false;
  818. for (MachineOperand &MO : MI->operands()) {
  819. if (MO.isRegMask())
  820. hasRegMask = true;
  821. if (!MO.isReg())
  822. continue;
  823. if (MO.isUse()) {
  824. if (!MO.readsReg())
  825. continue;
  826. // Aggressively clear all kill flags.
  827. // They are reinserted by VirtRegRewriter.
  828. MO.setIsKill(false);
  829. }
  830. unsigned Reg = MO.getReg();
  831. if (!Reg)
  832. continue;
  833. if (TargetRegisterInfo::isVirtualRegister(Reg)) {
  834. LiveInterval &LI = LIS.getInterval(Reg);
  835. if (LI.hasSubRanges()) {
  836. unsigned SubReg = MO.getSubReg();
  837. LaneBitmask LaneMask = SubReg ? TRI.getSubRegIndexLaneMask(SubReg)
  838. : MRI.getMaxLaneMaskForVReg(Reg);
  839. for (LiveInterval::SubRange &S : LI.subranges()) {
  840. if ((S.LaneMask & LaneMask) == 0)
  841. continue;
  842. updateRange(S, Reg, S.LaneMask);
  843. }
  844. }
  845. updateRange(LI, Reg, 0);
  846. continue;
  847. }
  848. // For physregs, only update the regunits that actually have a
  849. // precomputed live range.
  850. for (MCRegUnitIterator Units(Reg, &TRI); Units.isValid(); ++Units)
  851. if (LiveRange *LR = getRegUnitLI(*Units))
  852. updateRange(*LR, *Units, 0);
  853. }
  854. if (hasRegMask)
  855. updateRegMaskSlots();
  856. }
  857. private:
  858. /// Update a single live range, assuming an instruction has been moved from
  859. /// OldIdx to NewIdx.
  860. void updateRange(LiveRange &LR, unsigned Reg, LaneBitmask LaneMask) {
  861. if (!Updated.insert(&LR).second)
  862. return;
  863. DEBUG({
  864. dbgs() << " ";
  865. if (TargetRegisterInfo::isVirtualRegister(Reg)) {
  866. dbgs() << PrintReg(Reg);
  867. if (LaneMask != 0)
  868. dbgs() << " L" << PrintLaneMask(LaneMask);
  869. } else {
  870. dbgs() << PrintRegUnit(Reg, &TRI);
  871. }
  872. dbgs() << ":\t" << LR << '\n';
  873. });
  874. if (SlotIndex::isEarlierInstr(OldIdx, NewIdx))
  875. handleMoveDown(LR);
  876. else
  877. handleMoveUp(LR, Reg, LaneMask);
  878. DEBUG(dbgs() << " -->\t" << LR << '\n');
  879. LR.verify();
  880. }
  881. /// Update LR to reflect an instruction has been moved downwards from OldIdx
  882. /// to NewIdx (OldIdx < NewIdx).
  883. void handleMoveDown(LiveRange &LR) {
  884. LiveRange::iterator E = LR.end();
  885. // Segment going into OldIdx.
  886. LiveRange::iterator OldIdxIn = LR.find(OldIdx.getBaseIndex());
  887. // No value live before or after OldIdx? Nothing to do.
  888. if (OldIdxIn == E || SlotIndex::isEarlierInstr(OldIdx, OldIdxIn->start))
  889. return;
  890. LiveRange::iterator OldIdxOut;
  891. // Do we have a value live-in to OldIdx?
  892. if (SlotIndex::isEarlierInstr(OldIdxIn->start, OldIdx)) {
  893. // If the live-in value already extends to NewIdx, there is nothing to do.
  894. if (SlotIndex::isEarlierEqualInstr(NewIdx, OldIdxIn->end))
  895. return;
  896. // Aggressively remove all kill flags from the old kill point.
  897. // Kill flags shouldn't be used while live intervals exist, they will be
  898. // reinserted by VirtRegRewriter.
  899. if (MachineInstr *KillMI = LIS.getInstructionFromIndex(OldIdxIn->end))
  900. for (MIBundleOperands MO(*KillMI); MO.isValid(); ++MO)
  901. if (MO->isReg() && MO->isUse())
  902. MO->setIsKill(false);
  903. // Is there a def before NewIdx which is not OldIdx?
  904. LiveRange::iterator Next = std::next(OldIdxIn);
  905. if (Next != E && !SlotIndex::isSameInstr(OldIdx, Next->start) &&
  906. SlotIndex::isEarlierInstr(Next->start, NewIdx)) {
  907. // If we are here then OldIdx was just a use but not a def. We only have
  908. // to ensure liveness extends to NewIdx.
  909. LiveRange::iterator NewIdxIn =
  910. LR.advanceTo(Next, NewIdx.getBaseIndex());
  911. // Extend the segment before NewIdx if necessary.
  912. if (NewIdxIn == E ||
  913. !SlotIndex::isEarlierInstr(NewIdxIn->start, NewIdx)) {
  914. LiveRange::iterator Prev = std::prev(NewIdxIn);
  915. Prev->end = NewIdx.getRegSlot();
  916. }
  917. // Extend OldIdxIn.
  918. OldIdxIn->end = Next->start;
  919. return;
  920. }
  921. // Adjust OldIdxIn->end to reach NewIdx. This may temporarily make LR
  922. // invalid by overlapping ranges.
  923. bool isKill = SlotIndex::isSameInstr(OldIdx, OldIdxIn->end);
  924. OldIdxIn->end = NewIdx.getRegSlot(OldIdxIn->end.isEarlyClobber());
  925. // If this was not a kill, then there was no def and we're done.
  926. if (!isKill)
  927. return;
  928. // Did we have a Def at OldIdx?
  929. OldIdxOut = Next;
  930. if (OldIdxOut == E || !SlotIndex::isSameInstr(OldIdx, OldIdxOut->start))
  931. return;
  932. } else {
  933. OldIdxOut = OldIdxIn;
  934. }
  935. // If we are here then there is a Definition at OldIdx. OldIdxOut points
  936. // to the segment starting there.
  937. assert(OldIdxOut != E && SlotIndex::isSameInstr(OldIdx, OldIdxOut->start) &&
  938. "No def?");
  939. VNInfo *OldIdxVNI = OldIdxOut->valno;
  940. assert(OldIdxVNI->def == OldIdxOut->start && "Inconsistent def");
  941. // If the defined value extends beyond NewIdx, just move the beginning
  942. // of the segment to NewIdx.
  943. SlotIndex NewIdxDef = NewIdx.getRegSlot(OldIdxOut->start.isEarlyClobber());
  944. if (SlotIndex::isEarlierInstr(NewIdxDef, OldIdxOut->end)) {
  945. OldIdxVNI->def = NewIdxDef;
  946. OldIdxOut->start = OldIdxVNI->def;
  947. return;
  948. }
  949. // If we are here then we have a Definition at OldIdx which ends before
  950. // NewIdx.
  951. // Is there an existing Def at NewIdx?
  952. LiveRange::iterator AfterNewIdx
  953. = LR.advanceTo(OldIdxOut, NewIdx.getRegSlot());
  954. bool OldIdxDefIsDead = OldIdxOut->end.isDead();
  955. if (!OldIdxDefIsDead &&
  956. SlotIndex::isEarlierInstr(OldIdxOut->end, NewIdxDef)) {
  957. // OldIdx is not a dead def, and NewIdxDef is inside a new interval.
  958. VNInfo *DefVNI;
  959. if (OldIdxOut != LR.begin() &&
  960. !SlotIndex::isEarlierInstr(std::prev(OldIdxOut)->end,
  961. OldIdxOut->start)) {
  962. // There is no gap between OldIdxOut and its predecessor anymore,
  963. // merge them.
  964. LiveRange::iterator IPrev = std::prev(OldIdxOut);
  965. DefVNI = OldIdxVNI;
  966. IPrev->end = OldIdxOut->end;
  967. } else {
  968. // The value is live in to OldIdx
  969. LiveRange::iterator INext = std::next(OldIdxOut);
  970. assert(INext != E && "Must have following segment");
  971. // We merge OldIdxOut and its successor. As we're dealing with subreg
  972. // reordering, there is always a successor to OldIdxOut in the same BB
  973. // We don't need INext->valno anymore and will reuse for the new segment
  974. // we create later.
  975. DefVNI = OldIdxVNI;
  976. INext->start = OldIdxOut->end;
  977. INext->valno->def = INext->start;
  978. }
  979. // If NewIdx is behind the last segment, extend that and append a new one.
  980. if (AfterNewIdx == E) {
  981. // OldIdxOut is undef at this point, Slide (OldIdxOut;AfterNewIdx] up
  982. // one position.
  983. // |- ?/OldIdxOut -| |- X0 -| ... |- Xn -| end
  984. // => |- X0/OldIdxOut -| ... |- Xn -| |- undef/NewS -| end
  985. std::copy(std::next(OldIdxOut), E, OldIdxOut);
  986. // The last segment is undefined now, reuse it for a dead def.
  987. LiveRange::iterator NewSegment = std::prev(E);
  988. *NewSegment = LiveRange::Segment(NewIdxDef, NewIdxDef.getDeadSlot(),
  989. DefVNI);
  990. DefVNI->def = NewIdxDef;
  991. LiveRange::iterator Prev = std::prev(NewSegment);
  992. Prev->end = NewIdxDef;
  993. } else {
  994. // OldIdxOut is undef at this point, Slide (OldIdxOut;AfterNewIdx] up
  995. // one position.
  996. // |- ?/OldIdxOut -| |- X0 -| ... |- Xn/AfterNewIdx -| |- Next -|
  997. // => |- X0/OldIdxOut -| ... |- Xn -| |- Xn/AfterNewIdx -| |- Next -|
  998. std::copy(std::next(OldIdxOut), std::next(AfterNewIdx), OldIdxOut);
  999. LiveRange::iterator Prev = std::prev(AfterNewIdx);
  1000. // We have two cases:
  1001. if (SlotIndex::isEarlierInstr(Prev->start, NewIdxDef)) {
  1002. // Case 1: NewIdx is inside a liverange. Split this liverange at
  1003. // NewIdxDef into the segment "Prev" followed by "NewSegment".
  1004. LiveRange::iterator NewSegment = AfterNewIdx;
  1005. *NewSegment = LiveRange::Segment(NewIdxDef, Prev->end, Prev->valno);
  1006. Prev->valno->def = NewIdxDef;
  1007. *Prev = LiveRange::Segment(Prev->start, NewIdxDef, DefVNI);
  1008. DefVNI->def = Prev->start;
  1009. } else {
  1010. // Case 2: NewIdx is in a lifetime hole. Keep AfterNewIdx as is and
  1011. // turn Prev into a segment from NewIdx to AfterNewIdx->start.
  1012. *Prev = LiveRange::Segment(NewIdxDef, AfterNewIdx->start, DefVNI);
  1013. DefVNI->def = NewIdxDef;
  1014. assert(DefVNI != AfterNewIdx->valno);
  1015. }
  1016. }
  1017. return;
  1018. }
  1019. if (AfterNewIdx != E &&
  1020. SlotIndex::isSameInstr(AfterNewIdx->start, NewIdxDef)) {
  1021. // There is an existing def at NewIdx. The def at OldIdx is coalesced into
  1022. // that value.
  1023. assert(AfterNewIdx->valno != OldIdxVNI && "Multiple defs of value?");
  1024. LR.removeValNo(OldIdxVNI);
  1025. } else {
  1026. // There was no existing def at NewIdx. We need to create a dead def
  1027. // at NewIdx. Shift segments over the old OldIdxOut segment, this frees
  1028. // a new segment at the place where we want to construct the dead def.
  1029. // |- OldIdxOut -| |- X0 -| ... |- Xn -| |- AfterNewIdx -|
  1030. // => |- X0/OldIdxOut -| ... |- Xn -| |- undef/NewS. -| |- AfterNewIdx -|
  1031. assert(AfterNewIdx != OldIdxOut && "Inconsistent iterators");
  1032. std::copy(std::next(OldIdxOut), AfterNewIdx, OldIdxOut);
  1033. // We can reuse OldIdxVNI now.
  1034. LiveRange::iterator NewSegment = std::prev(AfterNewIdx);
  1035. VNInfo *NewSegmentVNI = OldIdxVNI;
  1036. NewSegmentVNI->def = NewIdxDef;
  1037. *NewSegment = LiveRange::Segment(NewIdxDef, NewIdxDef.getDeadSlot(),
  1038. NewSegmentVNI);
  1039. }
  1040. }
  1041. /// Update LR to reflect an instruction has been moved upwards from OldIdx
  1042. /// to NewIdx (NewIdx < OldIdx).
  1043. void handleMoveUp(LiveRange &LR, unsigned Reg, LaneBitmask LaneMask) {
  1044. LiveRange::iterator E = LR.end();
  1045. // Segment going into OldIdx.
  1046. LiveRange::iterator OldIdxIn = LR.find(OldIdx.getBaseIndex());
  1047. // No value live before or after OldIdx? Nothing to do.
  1048. if (OldIdxIn == E || SlotIndex::isEarlierInstr(OldIdx, OldIdxIn->start))
  1049. return;
  1050. LiveRange::iterator OldIdxOut;
  1051. // Do we have a value live-in to OldIdx?
  1052. if (SlotIndex::isEarlierInstr(OldIdxIn->start, OldIdx)) {
  1053. // If the live-in value isn't killed here, then we have no Def at
  1054. // OldIdx, moreover the value must be live at NewIdx so there is nothing
  1055. // to do.
  1056. bool isKill = SlotIndex::isSameInstr(OldIdx, OldIdxIn->end);
  1057. if (!isKill)
  1058. return;
  1059. // At this point we have to move OldIdxIn->end back to the nearest
  1060. // previous use or (dead-)def but no further than NewIdx.
  1061. SlotIndex DefBeforeOldIdx
  1062. = std::max(OldIdxIn->start.getDeadSlot(),
  1063. NewIdx.getRegSlot(OldIdxIn->end.isEarlyClobber()));
  1064. OldIdxIn->end = findLastUseBefore(DefBeforeOldIdx, Reg, LaneMask);
  1065. // Did we have a Def at OldIdx? If not we are done now.
  1066. OldIdxOut = std::next(OldIdxIn);
  1067. if (OldIdxOut == E || !SlotIndex::isSameInstr(OldIdx, OldIdxOut->start))
  1068. return;
  1069. } else {
  1070. OldIdxOut = OldIdxIn;
  1071. OldIdxIn = OldIdxOut != LR.begin() ? std::prev(OldIdxOut) : E;
  1072. }
  1073. // If we are here then there is a Definition at OldIdx. OldIdxOut points
  1074. // to the segment starting there.
  1075. assert(OldIdxOut != E && SlotIndex::isSameInstr(OldIdx, OldIdxOut->start) &&
  1076. "No def?");
  1077. VNInfo *OldIdxVNI = OldIdxOut->valno;
  1078. assert(OldIdxVNI->def == OldIdxOut->start && "Inconsistent def");
  1079. bool OldIdxDefIsDead = OldIdxOut->end.isDead();
  1080. // Is there an existing def at NewIdx?
  1081. SlotIndex NewIdxDef = NewIdx.getRegSlot(OldIdxOut->start.isEarlyClobber());
  1082. LiveRange::iterator NewIdxOut = LR.find(NewIdx.getRegSlot());
  1083. if (SlotIndex::isSameInstr(NewIdxOut->start, NewIdx)) {
  1084. assert(NewIdxOut->valno != OldIdxVNI &&
  1085. "Same value defined more than once?");
  1086. // If OldIdx was a dead def remove it.
  1087. if (!OldIdxDefIsDead) {
  1088. // Remove segment starting at NewIdx and move begin of OldIdxOut to
  1089. // NewIdx so it can take its place.
  1090. OldIdxVNI->def = NewIdxDef;
  1091. OldIdxOut->start = NewIdxDef;
  1092. LR.removeValNo(NewIdxOut->valno);
  1093. } else {
  1094. // Simply remove the dead def at OldIdx.
  1095. LR.removeValNo(OldIdxVNI);
  1096. }
  1097. } else {
  1098. // Previously nothing was live after NewIdx, so all we have to do now is
  1099. // move the begin of OldIdxOut to NewIdx.
  1100. if (!OldIdxDefIsDead) {
  1101. // Do we have any intermediate Defs between OldIdx and NewIdx?
  1102. if (OldIdxIn != E &&
  1103. SlotIndex::isEarlierInstr(NewIdxDef, OldIdxIn->start)) {
  1104. // OldIdx is not a dead def and NewIdx is before predecessor start.
  1105. LiveRange::iterator NewIdxIn = NewIdxOut;
  1106. assert(NewIdxIn == LR.find(NewIdx.getBaseIndex()));
  1107. const SlotIndex SplitPos = NewIdxDef;
  1108. // Merge the OldIdxIn and OldIdxOut segments into OldIdxOut.
  1109. *OldIdxOut = LiveRange::Segment(OldIdxIn->start, OldIdxOut->end,
  1110. OldIdxIn->valno);
  1111. // OldIdxIn and OldIdxVNI are now undef and can be overridden.
  1112. // We Slide [NewIdxIn, OldIdxIn) down one position.
  1113. // |- X0/NewIdxIn -| ... |- Xn-1 -||- Xn/OldIdxIn -||- OldIdxOut -|
  1114. // => |- undef/NexIdxIn -| |- X0 -| ... |- Xn-1 -| |- Xn/OldIdxOut -|
  1115. std::copy_backward(NewIdxIn, OldIdxIn, OldIdxOut);
  1116. // NewIdxIn is now considered undef so we can reuse it for the moved
  1117. // value.
  1118. LiveRange::iterator NewSegment = NewIdxIn;
  1119. LiveRange::iterator Next = std::next(NewSegment);
  1120. if (SlotIndex::isEarlierInstr(Next->start, NewIdx)) {
  1121. // There is no gap between NewSegment and its predecessor.
  1122. *NewSegment = LiveRange::Segment(Next->start, SplitPos,
  1123. Next->valno);
  1124. *Next = LiveRange::Segment(SplitPos, Next->end, OldIdxVNI);
  1125. Next->valno->def = SplitPos;
  1126. } else {
  1127. // There is a gap between NewSegment and its predecessor
  1128. // Value becomes live in.
  1129. *NewSegment = LiveRange::Segment(SplitPos, Next->start, OldIdxVNI);
  1130. NewSegment->valno->def = SplitPos;
  1131. }
  1132. } else {
  1133. // Leave the end point of a live def.
  1134. OldIdxOut->start = NewIdxDef;
  1135. OldIdxVNI->def = NewIdxDef;
  1136. if (OldIdxIn != E && SlotIndex::isEarlierInstr(NewIdx, OldIdxIn->end))
  1137. OldIdxIn->end = NewIdx.getRegSlot();
  1138. }
  1139. } else {
  1140. // OldIdxVNI is a dead def. It may have been moved across other values
  1141. // in LR, so move OldIdxOut up to NewIdxOut. Slide [NewIdxOut;OldIdxOut)
  1142. // down one position.
  1143. // |- X0/NewIdxOut -| ... |- Xn-1 -| |- Xn/OldIdxOut -| |- next - |
  1144. // => |- undef/NewIdxOut -| |- X0 -| ... |- Xn-1 -| |- next -|
  1145. std::copy_backward(NewIdxOut, OldIdxOut, std::next(OldIdxOut));
  1146. // OldIdxVNI can be reused now to build a new dead def segment.
  1147. LiveRange::iterator NewSegment = NewIdxOut;
  1148. VNInfo *NewSegmentVNI = OldIdxVNI;
  1149. *NewSegment = LiveRange::Segment(NewIdxDef, NewIdxDef.getDeadSlot(),
  1150. NewSegmentVNI);
  1151. NewSegmentVNI->def = NewIdxDef;
  1152. }
  1153. }
  1154. }
  1155. void updateRegMaskSlots() {
  1156. SmallVectorImpl<SlotIndex>::iterator RI =
  1157. std::lower_bound(LIS.RegMaskSlots.begin(), LIS.RegMaskSlots.end(),
  1158. OldIdx);
  1159. assert(RI != LIS.RegMaskSlots.end() && *RI == OldIdx.getRegSlot() &&
  1160. "No RegMask at OldIdx.");
  1161. *RI = NewIdx.getRegSlot();
  1162. assert((RI == LIS.RegMaskSlots.begin() ||
  1163. SlotIndex::isEarlierInstr(*std::prev(RI), *RI)) &&
  1164. "Cannot move regmask instruction above another call");
  1165. assert((std::next(RI) == LIS.RegMaskSlots.end() ||
  1166. SlotIndex::isEarlierInstr(*RI, *std::next(RI))) &&
  1167. "Cannot move regmask instruction below another call");
  1168. }
  1169. // Return the last use of reg between NewIdx and OldIdx.
  1170. SlotIndex findLastUseBefore(SlotIndex Before, unsigned Reg,
  1171. LaneBitmask LaneMask) {
  1172. if (TargetRegisterInfo::isVirtualRegister(Reg)) {
  1173. SlotIndex LastUse = Before;
  1174. for (MachineOperand &MO : MRI.use_nodbg_operands(Reg)) {
  1175. if (MO.isUndef())
  1176. continue;
  1177. unsigned SubReg = MO.getSubReg();
  1178. if (SubReg != 0 && LaneMask != 0
  1179. && (TRI.getSubRegIndexLaneMask(SubReg) & LaneMask) == 0)
  1180. continue;
  1181. const MachineInstr &MI = *MO.getParent();
  1182. SlotIndex InstSlot = LIS.getSlotIndexes()->getInstructionIndex(MI);
  1183. if (InstSlot > LastUse && InstSlot < OldIdx)
  1184. LastUse = InstSlot.getRegSlot();
  1185. }
  1186. return LastUse;
  1187. }
  1188. // This is a regunit interval, so scanning the use list could be very
  1189. // expensive. Scan upwards from OldIdx instead.
  1190. assert(Before < OldIdx && "Expected upwards move");
  1191. SlotIndexes *Indexes = LIS.getSlotIndexes();
  1192. MachineBasicBlock *MBB = Indexes->getMBBFromIndex(Before);
  1193. // OldIdx may not correspond to an instruction any longer, so set MII to
  1194. // point to the next instruction after OldIdx, or MBB->end().
  1195. MachineBasicBlock::iterator MII = MBB->end();
  1196. if (MachineInstr *MI = Indexes->getInstructionFromIndex(
  1197. Indexes->getNextNonNullIndex(OldIdx)))
  1198. if (MI->getParent() == MBB)
  1199. MII = MI;
  1200. MachineBasicBlock::iterator Begin = MBB->begin();
  1201. while (MII != Begin) {
  1202. if ((--MII)->isDebugValue())
  1203. continue;
  1204. SlotIndex Idx = Indexes->getInstructionIndex(*MII);
  1205. // Stop searching when Before is reached.
  1206. if (!SlotIndex::isEarlierInstr(Before, Idx))
  1207. return Before;
  1208. // Check if MII uses Reg.
  1209. for (MIBundleOperands MO(*MII); MO.isValid(); ++MO)
  1210. if (MO->isReg() && !MO->isUndef() &&
  1211. TargetRegisterInfo::isPhysicalRegister(MO->getReg()) &&
  1212. TRI.hasRegUnit(MO->getReg(), Reg))
  1213. return Idx.getRegSlot();
  1214. }
  1215. // Didn't reach Before. It must be the first instruction in the block.
  1216. return Before;
  1217. }
  1218. };
  1219. void LiveIntervals::handleMove(MachineInstr &MI, bool UpdateFlags) {
  1220. assert(!MI.isBundled() && "Can't handle bundled instructions yet.");
  1221. SlotIndex OldIndex = Indexes->getInstructionIndex(MI);
  1222. Indexes->removeMachineInstrFromMaps(MI);
  1223. SlotIndex NewIndex = Indexes->insertMachineInstrInMaps(MI);
  1224. assert(getMBBStartIdx(MI.getParent()) <= OldIndex &&
  1225. OldIndex < getMBBEndIdx(MI.getParent()) &&
  1226. "Cannot handle moves across basic block boundaries.");
  1227. HMEditor HME(*this, *MRI, *TRI, OldIndex, NewIndex, UpdateFlags);
  1228. HME.updateAllRanges(&MI);
  1229. }
  1230. void LiveIntervals::handleMoveIntoBundle(MachineInstr &MI,
  1231. MachineInstr &BundleStart,
  1232. bool UpdateFlags) {
  1233. SlotIndex OldIndex = Indexes->getInstructionIndex(MI);
  1234. SlotIndex NewIndex = Indexes->getInstructionIndex(BundleStart);
  1235. HMEditor HME(*this, *MRI, *TRI, OldIndex, NewIndex, UpdateFlags);
  1236. HME.updateAllRanges(&MI);
  1237. }
  1238. void LiveIntervals::repairOldRegInRange(const MachineBasicBlock::iterator Begin,
  1239. const MachineBasicBlock::iterator End,
  1240. const SlotIndex endIdx,
  1241. LiveRange &LR, const unsigned Reg,
  1242. LaneBitmask LaneMask) {
  1243. LiveInterval::iterator LII = LR.find(endIdx);
  1244. SlotIndex lastUseIdx;
  1245. if (LII == LR.begin()) {
  1246. // This happens when the function is called for a subregister that only
  1247. // occurs _after_ the range that is to be repaired.
  1248. return;
  1249. }
  1250. if (LII != LR.end() && LII->start < endIdx)
  1251. lastUseIdx = LII->end;
  1252. else
  1253. --LII;
  1254. for (MachineBasicBlock::iterator I = End; I != Begin;) {
  1255. --I;
  1256. MachineInstr &MI = *I;
  1257. if (MI.isDebugValue())
  1258. continue;
  1259. SlotIndex instrIdx = getInstructionIndex(MI);
  1260. bool isStartValid = getInstructionFromIndex(LII->start);
  1261. bool isEndValid = getInstructionFromIndex(LII->end);
  1262. // FIXME: This doesn't currently handle early-clobber or multiple removed
  1263. // defs inside of the region to repair.
  1264. for (MachineInstr::mop_iterator OI = MI.operands_begin(),
  1265. OE = MI.operands_end();
  1266. OI != OE; ++OI) {
  1267. const MachineOperand &MO = *OI;
  1268. if (!MO.isReg() || MO.getReg() != Reg)
  1269. continue;
  1270. unsigned SubReg = MO.getSubReg();
  1271. LaneBitmask Mask = TRI->getSubRegIndexLaneMask(SubReg);
  1272. if ((Mask & LaneMask) == 0)
  1273. continue;
  1274. if (MO.isDef()) {
  1275. if (!isStartValid) {
  1276. if (LII->end.isDead()) {
  1277. SlotIndex prevStart;
  1278. if (LII != LR.begin())
  1279. prevStart = std::prev(LII)->start;
  1280. // FIXME: This could be more efficient if there was a
  1281. // removeSegment method that returned an iterator.
  1282. LR.removeSegment(*LII, true);
  1283. if (prevStart.isValid())
  1284. LII = LR.find(prevStart);
  1285. else
  1286. LII = LR.begin();
  1287. } else {
  1288. LII->start = instrIdx.getRegSlot();
  1289. LII->valno->def = instrIdx.getRegSlot();
  1290. if (MO.getSubReg() && !MO.isUndef())
  1291. lastUseIdx = instrIdx.getRegSlot();
  1292. else
  1293. lastUseIdx = SlotIndex();
  1294. continue;
  1295. }
  1296. }
  1297. if (!lastUseIdx.isValid()) {
  1298. VNInfo *VNI = LR.getNextValue(instrIdx.getRegSlot(), VNInfoAllocator);
  1299. LiveRange::Segment S(instrIdx.getRegSlot(),
  1300. instrIdx.getDeadSlot(), VNI);
  1301. LII = LR.addSegment(S);
  1302. } else if (LII->start != instrIdx.getRegSlot()) {
  1303. VNInfo *VNI = LR.getNextValue(instrIdx.getRegSlot(), VNInfoAllocator);
  1304. LiveRange::Segment S(instrIdx.getRegSlot(), lastUseIdx, VNI);
  1305. LII = LR.addSegment(S);
  1306. }
  1307. if (MO.getSubReg() && !MO.isUndef())
  1308. lastUseIdx = instrIdx.getRegSlot();
  1309. else
  1310. lastUseIdx = SlotIndex();
  1311. } else if (MO.isUse()) {
  1312. // FIXME: This should probably be handled outside of this branch,
  1313. // either as part of the def case (for defs inside of the region) or
  1314. // after the loop over the region.
  1315. if (!isEndValid && !LII->end.isBlock())
  1316. LII->end = instrIdx.getRegSlot();
  1317. if (!lastUseIdx.isValid())
  1318. lastUseIdx = instrIdx.getRegSlot();
  1319. }
  1320. }
  1321. }
  1322. }
  1323. void
  1324. LiveIntervals::repairIntervalsInRange(MachineBasicBlock *MBB,
  1325. MachineBasicBlock::iterator Begin,
  1326. MachineBasicBlock::iterator End,
  1327. ArrayRef<unsigned> OrigRegs) {
  1328. // Find anchor points, which are at the beginning/end of blocks or at
  1329. // instructions that already have indexes.
  1330. while (Begin != MBB->begin() && !Indexes->hasIndex(*Begin))
  1331. --Begin;
  1332. while (End != MBB->end() && !Indexes->hasIndex(*End))
  1333. ++End;
  1334. SlotIndex endIdx;
  1335. if (End == MBB->end())
  1336. endIdx = getMBBEndIdx(MBB).getPrevSlot();
  1337. else
  1338. endIdx = getInstructionIndex(*End);
  1339. Indexes->repairIndexesInRange(MBB, Begin, End);
  1340. for (MachineBasicBlock::iterator I = End; I != Begin;) {
  1341. --I;
  1342. MachineInstr &MI = *I;
  1343. if (MI.isDebugValue())
  1344. continue;
  1345. for (MachineInstr::const_mop_iterator MOI = MI.operands_begin(),
  1346. MOE = MI.operands_end();
  1347. MOI != MOE; ++MOI) {
  1348. if (MOI->isReg() &&
  1349. TargetRegisterInfo::isVirtualRegister(MOI->getReg()) &&
  1350. !hasInterval(MOI->getReg())) {
  1351. createAndComputeVirtRegInterval(MOI->getReg());
  1352. }
  1353. }
  1354. }
  1355. for (unsigned i = 0, e = OrigRegs.size(); i != e; ++i) {
  1356. unsigned Reg = OrigRegs[i];
  1357. if (!TargetRegisterInfo::isVirtualRegister(Reg))
  1358. continue;
  1359. LiveInterval &LI = getInterval(Reg);
  1360. // FIXME: Should we support undefs that gain defs?
  1361. if (!LI.hasAtLeastOneValue())
  1362. continue;
  1363. for (LiveInterval::SubRange &S : LI.subranges()) {
  1364. repairOldRegInRange(Begin, End, endIdx, S, Reg, S.LaneMask);
  1365. }
  1366. repairOldRegInRange(Begin, End, endIdx, LI, Reg);
  1367. }
  1368. }
  1369. void LiveIntervals::removePhysRegDefAt(unsigned Reg, SlotIndex Pos) {
  1370. for (MCRegUnitIterator Units(Reg, TRI); Units.isValid(); ++Units) {
  1371. if (LiveRange *LR = getCachedRegUnit(*Units))
  1372. if (VNInfo *VNI = LR->getVNInfoAt(Pos))
  1373. LR->removeValNo(VNI);
  1374. }
  1375. }
  1376. void LiveIntervals::removeVRegDefAt(LiveInterval &LI, SlotIndex Pos) {
  1377. // LI may not have the main range computed yet, but its subranges may
  1378. // be present.
  1379. VNInfo *VNI = LI.getVNInfoAt(Pos);
  1380. if (VNI != nullptr) {
  1381. assert(VNI->def.getBaseIndex() == Pos.getBaseIndex());
  1382. LI.removeValNo(VNI);
  1383. }
  1384. // Also remove the value defined in subranges.
  1385. for (LiveInterval::SubRange &S : LI.subranges()) {
  1386. if (VNInfo *SVNI = S.getVNInfoAt(Pos))
  1387. if (SVNI->def.getBaseIndex() == Pos.getBaseIndex())
  1388. S.removeValNo(SVNI);
  1389. }
  1390. LI.removeEmptySubRanges();
  1391. }
  1392. void LiveIntervals::splitSeparateComponents(LiveInterval &LI,
  1393. SmallVectorImpl<LiveInterval*> &SplitLIs) {
  1394. ConnectedVNInfoEqClasses ConEQ(*this);
  1395. unsigned NumComp = ConEQ.Classify(LI);
  1396. if (NumComp <= 1)
  1397. return;
  1398. DEBUG(dbgs() << " Split " << NumComp << " components: " << LI << '\n');
  1399. unsigned Reg = LI.reg;
  1400. const TargetRegisterClass *RegClass = MRI->getRegClass(Reg);
  1401. for (unsigned I = 1; I < NumComp; ++I) {
  1402. unsigned NewVReg = MRI->createVirtualRegister(RegClass);
  1403. LiveInterval &NewLI = createEmptyInterval(NewVReg);
  1404. SplitLIs.push_back(&NewLI);
  1405. }
  1406. ConEQ.Distribute(LI, SplitLIs.data(), *MRI);
  1407. }
  1408. void LiveIntervals::constructMainRangeFromSubranges(LiveInterval &LI) {
  1409. assert(LRCalc && "LRCalc not initialized.");
  1410. LRCalc->reset(MF, getSlotIndexes(), DomTree, &getVNInfoAllocator());
  1411. LRCalc->constructMainRangeFromSubranges(LI);
  1412. }