LiveIntervalAnalysis.cpp 42 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177
  1. //===-- LiveIntervalAnalysis.cpp - Live Interval Analysis -----------------===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This file implements the LiveInterval analysis pass which is used
  11. // by the Linear Scan Register allocator. This pass linearizes the
  12. // basic blocks of the function in DFS order and uses the
  13. // LiveVariables pass to conservatively compute live intervals for
  14. // each virtual and physical register.
  15. //
  16. //===----------------------------------------------------------------------===//
  17. #define DEBUG_TYPE "regalloc"
  18. #include "llvm/CodeGen/LiveIntervalAnalysis.h"
  19. #include "LiveRangeCalc.h"
  20. #include "llvm/ADT/DenseSet.h"
  21. #include "llvm/ADT/STLExtras.h"
  22. #include "llvm/Analysis/AliasAnalysis.h"
  23. #include "llvm/CodeGen/LiveVariables.h"
  24. #include "llvm/CodeGen/MachineBlockFrequencyInfo.h"
  25. #include "llvm/CodeGen/MachineDominators.h"
  26. #include "llvm/CodeGen/MachineInstr.h"
  27. #include "llvm/CodeGen/MachineRegisterInfo.h"
  28. #include "llvm/CodeGen/Passes.h"
  29. #include "llvm/CodeGen/VirtRegMap.h"
  30. #include "llvm/IR/Value.h"
  31. #include "llvm/Support/BlockFrequency.h"
  32. #include "llvm/Support/CommandLine.h"
  33. #include "llvm/Support/Debug.h"
  34. #include "llvm/Support/ErrorHandling.h"
  35. #include "llvm/Support/raw_ostream.h"
  36. #include "llvm/Target/TargetInstrInfo.h"
  37. #include "llvm/Target/TargetMachine.h"
  38. #include "llvm/Target/TargetRegisterInfo.h"
  39. #include <algorithm>
  40. #include <cmath>
  41. #include <limits>
  42. using namespace llvm;
  43. char LiveIntervals::ID = 0;
  44. char &llvm::LiveIntervalsID = LiveIntervals::ID;
  45. INITIALIZE_PASS_BEGIN(LiveIntervals, "liveintervals",
  46. "Live Interval Analysis", false, false)
  47. INITIALIZE_AG_DEPENDENCY(AliasAnalysis)
  48. INITIALIZE_PASS_DEPENDENCY(LiveVariables)
  49. INITIALIZE_PASS_DEPENDENCY(MachineDominatorTree)
  50. INITIALIZE_PASS_DEPENDENCY(SlotIndexes)
  51. INITIALIZE_PASS_END(LiveIntervals, "liveintervals",
  52. "Live Interval Analysis", false, false)
  53. #ifndef NDEBUG
  54. static cl::opt<bool> EnablePrecomputePhysRegs(
  55. "precompute-phys-liveness", cl::Hidden,
  56. cl::desc("Eagerly compute live intervals for all physreg units."));
  57. #else
  58. static bool EnablePrecomputePhysRegs = false;
  59. #endif // NDEBUG
  60. void LiveIntervals::getAnalysisUsage(AnalysisUsage &AU) const {
  61. AU.setPreservesCFG();
  62. AU.addRequired<AliasAnalysis>();
  63. AU.addPreserved<AliasAnalysis>();
  64. // LiveVariables isn't really required by this analysis, it is only required
  65. // here to make sure it is live during TwoAddressInstructionPass and
  66. // PHIElimination. This is temporary.
  67. AU.addRequired<LiveVariables>();
  68. AU.addPreserved<LiveVariables>();
  69. AU.addPreservedID(MachineLoopInfoID);
  70. AU.addRequiredTransitiveID(MachineDominatorsID);
  71. AU.addPreservedID(MachineDominatorsID);
  72. AU.addPreserved<SlotIndexes>();
  73. AU.addRequiredTransitive<SlotIndexes>();
  74. MachineFunctionPass::getAnalysisUsage(AU);
  75. }
  76. LiveIntervals::LiveIntervals() : MachineFunctionPass(ID),
  77. DomTree(0), LRCalc(0) {
  78. initializeLiveIntervalsPass(*PassRegistry::getPassRegistry());
  79. }
  80. LiveIntervals::~LiveIntervals() {
  81. delete LRCalc;
  82. }
  83. void LiveIntervals::releaseMemory() {
  84. // Free the live intervals themselves.
  85. for (unsigned i = 0, e = VirtRegIntervals.size(); i != e; ++i)
  86. delete VirtRegIntervals[TargetRegisterInfo::index2VirtReg(i)];
  87. VirtRegIntervals.clear();
  88. RegMaskSlots.clear();
  89. RegMaskBits.clear();
  90. RegMaskBlocks.clear();
  91. for (unsigned i = 0, e = RegUnitRanges.size(); i != e; ++i)
  92. delete RegUnitRanges[i];
  93. RegUnitRanges.clear();
  94. // Release VNInfo memory regions, VNInfo objects don't need to be dtor'd.
  95. VNInfoAllocator.Reset();
  96. }
  97. /// runOnMachineFunction - calculates LiveIntervals
  98. ///
  99. bool LiveIntervals::runOnMachineFunction(MachineFunction &fn) {
  100. MF = &fn;
  101. MRI = &MF->getRegInfo();
  102. TM = &fn.getTarget();
  103. TRI = TM->getRegisterInfo();
  104. TII = TM->getInstrInfo();
  105. AA = &getAnalysis<AliasAnalysis>();
  106. Indexes = &getAnalysis<SlotIndexes>();
  107. DomTree = &getAnalysis<MachineDominatorTree>();
  108. if (!LRCalc)
  109. LRCalc = new LiveRangeCalc();
  110. // Allocate space for all virtual registers.
  111. VirtRegIntervals.resize(MRI->getNumVirtRegs());
  112. computeVirtRegs();
  113. computeRegMasks();
  114. computeLiveInRegUnits();
  115. if (EnablePrecomputePhysRegs) {
  116. // For stress testing, precompute live ranges of all physical register
  117. // units, including reserved registers.
  118. for (unsigned i = 0, e = TRI->getNumRegUnits(); i != e; ++i)
  119. getRegUnit(i);
  120. }
  121. DEBUG(dump());
  122. return true;
  123. }
  124. /// print - Implement the dump method.
  125. void LiveIntervals::print(raw_ostream &OS, const Module* ) const {
  126. OS << "********** INTERVALS **********\n";
  127. // Dump the regunits.
  128. for (unsigned i = 0, e = RegUnitRanges.size(); i != e; ++i)
  129. if (LiveRange *LR = RegUnitRanges[i])
  130. OS << PrintRegUnit(i, TRI) << ' ' << *LR << '\n';
  131. // Dump the virtregs.
  132. for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
  133. unsigned Reg = TargetRegisterInfo::index2VirtReg(i);
  134. if (hasInterval(Reg))
  135. OS << getInterval(Reg) << '\n';
  136. }
  137. OS << "RegMasks:";
  138. for (unsigned i = 0, e = RegMaskSlots.size(); i != e; ++i)
  139. OS << ' ' << RegMaskSlots[i];
  140. OS << '\n';
  141. printInstrs(OS);
  142. }
  143. void LiveIntervals::printInstrs(raw_ostream &OS) const {
  144. OS << "********** MACHINEINSTRS **********\n";
  145. MF->print(OS, Indexes);
  146. }
  147. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  148. void LiveIntervals::dumpInstrs() const {
  149. printInstrs(dbgs());
  150. }
  151. #endif
  152. LiveInterval* LiveIntervals::createInterval(unsigned reg) {
  153. float Weight = TargetRegisterInfo::isPhysicalRegister(reg) ?
  154. llvm::huge_valf : 0.0F;
  155. return new LiveInterval(reg, Weight);
  156. }
  157. /// computeVirtRegInterval - Compute the live interval of a virtual register,
  158. /// based on defs and uses.
  159. void LiveIntervals::computeVirtRegInterval(LiveInterval &LI) {
  160. assert(LRCalc && "LRCalc not initialized.");
  161. assert(LI.empty() && "Should only compute empty intervals.");
  162. LRCalc->reset(MF, getSlotIndexes(), DomTree, &getVNInfoAllocator());
  163. LRCalc->createDeadDefs(LI);
  164. LRCalc->extendToUses(LI);
  165. }
  166. void LiveIntervals::computeVirtRegs() {
  167. for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
  168. unsigned Reg = TargetRegisterInfo::index2VirtReg(i);
  169. if (MRI->reg_nodbg_empty(Reg))
  170. continue;
  171. createAndComputeVirtRegInterval(Reg);
  172. }
  173. }
  174. void LiveIntervals::computeRegMasks() {
  175. RegMaskBlocks.resize(MF->getNumBlockIDs());
  176. // Find all instructions with regmask operands.
  177. for (MachineFunction::iterator MBBI = MF->begin(), E = MF->end();
  178. MBBI != E; ++MBBI) {
  179. MachineBasicBlock *MBB = MBBI;
  180. std::pair<unsigned, unsigned> &RMB = RegMaskBlocks[MBB->getNumber()];
  181. RMB.first = RegMaskSlots.size();
  182. for (MachineBasicBlock::iterator MI = MBB->begin(), ME = MBB->end();
  183. MI != ME; ++MI)
  184. for (MIOperands MO(MI); MO.isValid(); ++MO) {
  185. if (!MO->isRegMask())
  186. continue;
  187. RegMaskSlots.push_back(Indexes->getInstructionIndex(MI).getRegSlot());
  188. RegMaskBits.push_back(MO->getRegMask());
  189. }
  190. // Compute the number of register mask instructions in this block.
  191. RMB.second = RegMaskSlots.size() - RMB.first;
  192. }
  193. }
  194. //===----------------------------------------------------------------------===//
  195. // Register Unit Liveness
  196. //===----------------------------------------------------------------------===//
  197. //
  198. // Fixed interference typically comes from ABI boundaries: Function arguments
  199. // and return values are passed in fixed registers, and so are exception
  200. // pointers entering landing pads. Certain instructions require values to be
  201. // present in specific registers. That is also represented through fixed
  202. // interference.
  203. //
  204. /// computeRegUnitInterval - Compute the live range of a register unit, based
  205. /// on the uses and defs of aliasing registers. The range should be empty,
  206. /// or contain only dead phi-defs from ABI blocks.
  207. void LiveIntervals::computeRegUnitRange(LiveRange &LR, unsigned Unit) {
  208. assert(LRCalc && "LRCalc not initialized.");
  209. LRCalc->reset(MF, getSlotIndexes(), DomTree, &getVNInfoAllocator());
  210. // The physregs aliasing Unit are the roots and their super-registers.
  211. // Create all values as dead defs before extending to uses. Note that roots
  212. // may share super-registers. That's OK because createDeadDefs() is
  213. // idempotent. It is very rare for a register unit to have multiple roots, so
  214. // uniquing super-registers is probably not worthwhile.
  215. for (MCRegUnitRootIterator Roots(Unit, TRI); Roots.isValid(); ++Roots) {
  216. for (MCSuperRegIterator Supers(*Roots, TRI, /*IncludeSelf=*/true);
  217. Supers.isValid(); ++Supers) {
  218. if (!MRI->reg_empty(*Supers))
  219. LRCalc->createDeadDefs(LR, *Supers);
  220. }
  221. }
  222. // Now extend LR to reach all uses.
  223. // Ignore uses of reserved registers. We only track defs of those.
  224. for (MCRegUnitRootIterator Roots(Unit, TRI); Roots.isValid(); ++Roots) {
  225. for (MCSuperRegIterator Supers(*Roots, TRI, /*IncludeSelf=*/true);
  226. Supers.isValid(); ++Supers) {
  227. unsigned Reg = *Supers;
  228. if (!MRI->isReserved(Reg) && !MRI->reg_empty(Reg))
  229. LRCalc->extendToUses(LR, Reg);
  230. }
  231. }
  232. }
  233. /// computeLiveInRegUnits - Precompute the live ranges of any register units
  234. /// that are live-in to an ABI block somewhere. Register values can appear
  235. /// without a corresponding def when entering the entry block or a landing pad.
  236. ///
  237. void LiveIntervals::computeLiveInRegUnits() {
  238. RegUnitRanges.resize(TRI->getNumRegUnits());
  239. DEBUG(dbgs() << "Computing live-in reg-units in ABI blocks.\n");
  240. // Keep track of the live range sets allocated.
  241. SmallVector<unsigned, 8> NewRanges;
  242. // Check all basic blocks for live-ins.
  243. for (MachineFunction::const_iterator MFI = MF->begin(), MFE = MF->end();
  244. MFI != MFE; ++MFI) {
  245. const MachineBasicBlock *MBB = MFI;
  246. // We only care about ABI blocks: Entry + landing pads.
  247. if ((MFI != MF->begin() && !MBB->isLandingPad()) || MBB->livein_empty())
  248. continue;
  249. // Create phi-defs at Begin for all live-in registers.
  250. SlotIndex Begin = Indexes->getMBBStartIdx(MBB);
  251. DEBUG(dbgs() << Begin << "\tBB#" << MBB->getNumber());
  252. for (MachineBasicBlock::livein_iterator LII = MBB->livein_begin(),
  253. LIE = MBB->livein_end(); LII != LIE; ++LII) {
  254. for (MCRegUnitIterator Units(*LII, TRI); Units.isValid(); ++Units) {
  255. unsigned Unit = *Units;
  256. LiveRange *LR = RegUnitRanges[Unit];
  257. if (!LR) {
  258. LR = RegUnitRanges[Unit] = new LiveRange();
  259. NewRanges.push_back(Unit);
  260. }
  261. VNInfo *VNI = LR->createDeadDef(Begin, getVNInfoAllocator());
  262. (void)VNI;
  263. DEBUG(dbgs() << ' ' << PrintRegUnit(Unit, TRI) << '#' << VNI->id);
  264. }
  265. }
  266. DEBUG(dbgs() << '\n');
  267. }
  268. DEBUG(dbgs() << "Created " << NewRanges.size() << " new intervals.\n");
  269. // Compute the 'normal' part of the ranges.
  270. for (unsigned i = 0, e = NewRanges.size(); i != e; ++i) {
  271. unsigned Unit = NewRanges[i];
  272. computeRegUnitRange(*RegUnitRanges[Unit], Unit);
  273. }
  274. }
  275. /// shrinkToUses - After removing some uses of a register, shrink its live
  276. /// range to just the remaining uses. This method does not compute reaching
  277. /// defs for new uses, and it doesn't remove dead defs.
  278. bool LiveIntervals::shrinkToUses(LiveInterval *li,
  279. SmallVectorImpl<MachineInstr*> *dead) {
  280. DEBUG(dbgs() << "Shrink: " << *li << '\n');
  281. assert(TargetRegisterInfo::isVirtualRegister(li->reg)
  282. && "Can only shrink virtual registers");
  283. // Find all the values used, including PHI kills.
  284. SmallVector<std::pair<SlotIndex, VNInfo*>, 16> WorkList;
  285. // Blocks that have already been added to WorkList as live-out.
  286. SmallPtrSet<MachineBasicBlock*, 16> LiveOut;
  287. // Visit all instructions reading li->reg.
  288. for (MachineRegisterInfo::reg_iterator I = MRI->reg_begin(li->reg);
  289. MachineInstr *UseMI = I.skipInstruction();) {
  290. if (UseMI->isDebugValue() || !UseMI->readsVirtualRegister(li->reg))
  291. continue;
  292. SlotIndex Idx = getInstructionIndex(UseMI).getRegSlot();
  293. LiveQueryResult LRQ = li->Query(Idx);
  294. VNInfo *VNI = LRQ.valueIn();
  295. if (!VNI) {
  296. // This shouldn't happen: readsVirtualRegister returns true, but there is
  297. // no live value. It is likely caused by a target getting <undef> flags
  298. // wrong.
  299. DEBUG(dbgs() << Idx << '\t' << *UseMI
  300. << "Warning: Instr claims to read non-existent value in "
  301. << *li << '\n');
  302. continue;
  303. }
  304. // Special case: An early-clobber tied operand reads and writes the
  305. // register one slot early.
  306. if (VNInfo *DefVNI = LRQ.valueDefined())
  307. Idx = DefVNI->def;
  308. WorkList.push_back(std::make_pair(Idx, VNI));
  309. }
  310. // Create new live ranges with only minimal live segments per def.
  311. LiveRange NewLR;
  312. for (LiveInterval::vni_iterator I = li->vni_begin(), E = li->vni_end();
  313. I != E; ++I) {
  314. VNInfo *VNI = *I;
  315. if (VNI->isUnused())
  316. continue;
  317. NewLR.addSegment(LiveRange::Segment(VNI->def, VNI->def.getDeadSlot(), VNI));
  318. }
  319. // Keep track of the PHIs that are in use.
  320. SmallPtrSet<VNInfo*, 8> UsedPHIs;
  321. // Extend intervals to reach all uses in WorkList.
  322. while (!WorkList.empty()) {
  323. SlotIndex Idx = WorkList.back().first;
  324. VNInfo *VNI = WorkList.back().second;
  325. WorkList.pop_back();
  326. const MachineBasicBlock *MBB = getMBBFromIndex(Idx.getPrevSlot());
  327. SlotIndex BlockStart = getMBBStartIdx(MBB);
  328. // Extend the live range for VNI to be live at Idx.
  329. if (VNInfo *ExtVNI = NewLR.extendInBlock(BlockStart, Idx)) {
  330. (void)ExtVNI;
  331. assert(ExtVNI == VNI && "Unexpected existing value number");
  332. // Is this a PHIDef we haven't seen before?
  333. if (!VNI->isPHIDef() || VNI->def != BlockStart || !UsedPHIs.insert(VNI))
  334. continue;
  335. // The PHI is live, make sure the predecessors are live-out.
  336. for (MachineBasicBlock::const_pred_iterator PI = MBB->pred_begin(),
  337. PE = MBB->pred_end(); PI != PE; ++PI) {
  338. if (!LiveOut.insert(*PI))
  339. continue;
  340. SlotIndex Stop = getMBBEndIdx(*PI);
  341. // A predecessor is not required to have a live-out value for a PHI.
  342. if (VNInfo *PVNI = li->getVNInfoBefore(Stop))
  343. WorkList.push_back(std::make_pair(Stop, PVNI));
  344. }
  345. continue;
  346. }
  347. // VNI is live-in to MBB.
  348. DEBUG(dbgs() << " live-in at " << BlockStart << '\n');
  349. NewLR.addSegment(LiveRange::Segment(BlockStart, Idx, VNI));
  350. // Make sure VNI is live-out from the predecessors.
  351. for (MachineBasicBlock::const_pred_iterator PI = MBB->pred_begin(),
  352. PE = MBB->pred_end(); PI != PE; ++PI) {
  353. if (!LiveOut.insert(*PI))
  354. continue;
  355. SlotIndex Stop = getMBBEndIdx(*PI);
  356. assert(li->getVNInfoBefore(Stop) == VNI &&
  357. "Wrong value out of predecessor");
  358. WorkList.push_back(std::make_pair(Stop, VNI));
  359. }
  360. }
  361. // Handle dead values.
  362. bool CanSeparate = false;
  363. for (LiveInterval::vni_iterator I = li->vni_begin(), E = li->vni_end();
  364. I != E; ++I) {
  365. VNInfo *VNI = *I;
  366. if (VNI->isUnused())
  367. continue;
  368. LiveRange::iterator LRI = NewLR.FindSegmentContaining(VNI->def);
  369. assert(LRI != NewLR.end() && "Missing segment for PHI");
  370. if (LRI->end != VNI->def.getDeadSlot())
  371. continue;
  372. if (VNI->isPHIDef()) {
  373. // This is a dead PHI. Remove it.
  374. VNI->markUnused();
  375. NewLR.removeSegment(LRI->start, LRI->end);
  376. DEBUG(dbgs() << "Dead PHI at " << VNI->def << " may separate interval\n");
  377. CanSeparate = true;
  378. } else {
  379. // This is a dead def. Make sure the instruction knows.
  380. MachineInstr *MI = getInstructionFromIndex(VNI->def);
  381. assert(MI && "No instruction defining live value");
  382. MI->addRegisterDead(li->reg, TRI);
  383. if (dead && MI->allDefsAreDead()) {
  384. DEBUG(dbgs() << "All defs dead: " << VNI->def << '\t' << *MI);
  385. dead->push_back(MI);
  386. }
  387. }
  388. }
  389. // Move the trimmed segments back.
  390. li->segments.swap(NewLR.segments);
  391. DEBUG(dbgs() << "Shrunk: " << *li << '\n');
  392. return CanSeparate;
  393. }
  394. void LiveIntervals::extendToIndices(LiveRange &LR,
  395. ArrayRef<SlotIndex> Indices) {
  396. assert(LRCalc && "LRCalc not initialized.");
  397. LRCalc->reset(MF, getSlotIndexes(), DomTree, &getVNInfoAllocator());
  398. for (unsigned i = 0, e = Indices.size(); i != e; ++i)
  399. LRCalc->extend(LR, Indices[i]);
  400. }
  401. void LiveIntervals::pruneValue(LiveInterval *LI, SlotIndex Kill,
  402. SmallVectorImpl<SlotIndex> *EndPoints) {
  403. LiveQueryResult LRQ = LI->Query(Kill);
  404. VNInfo *VNI = LRQ.valueOut();
  405. if (!VNI)
  406. return;
  407. MachineBasicBlock *KillMBB = Indexes->getMBBFromIndex(Kill);
  408. SlotIndex MBBStart, MBBEnd;
  409. tie(MBBStart, MBBEnd) = Indexes->getMBBRange(KillMBB);
  410. // If VNI isn't live out from KillMBB, the value is trivially pruned.
  411. if (LRQ.endPoint() < MBBEnd) {
  412. LI->removeSegment(Kill, LRQ.endPoint());
  413. if (EndPoints) EndPoints->push_back(LRQ.endPoint());
  414. return;
  415. }
  416. // VNI is live out of KillMBB.
  417. LI->removeSegment(Kill, MBBEnd);
  418. if (EndPoints) EndPoints->push_back(MBBEnd);
  419. // Find all blocks that are reachable from KillMBB without leaving VNI's live
  420. // range. It is possible that KillMBB itself is reachable, so start a DFS
  421. // from each successor.
  422. typedef SmallPtrSet<MachineBasicBlock*, 9> VisitedTy;
  423. VisitedTy Visited;
  424. for (MachineBasicBlock::succ_iterator
  425. SuccI = KillMBB->succ_begin(), SuccE = KillMBB->succ_end();
  426. SuccI != SuccE; ++SuccI) {
  427. for (df_ext_iterator<MachineBasicBlock*, VisitedTy>
  428. I = df_ext_begin(*SuccI, Visited), E = df_ext_end(*SuccI, Visited);
  429. I != E;) {
  430. MachineBasicBlock *MBB = *I;
  431. // Check if VNI is live in to MBB.
  432. tie(MBBStart, MBBEnd) = Indexes->getMBBRange(MBB);
  433. LiveQueryResult LRQ = LI->Query(MBBStart);
  434. if (LRQ.valueIn() != VNI) {
  435. // This block isn't part of the VNI segment. Prune the search.
  436. I.skipChildren();
  437. continue;
  438. }
  439. // Prune the search if VNI is killed in MBB.
  440. if (LRQ.endPoint() < MBBEnd) {
  441. LI->removeSegment(MBBStart, LRQ.endPoint());
  442. if (EndPoints) EndPoints->push_back(LRQ.endPoint());
  443. I.skipChildren();
  444. continue;
  445. }
  446. // VNI is live through MBB.
  447. LI->removeSegment(MBBStart, MBBEnd);
  448. if (EndPoints) EndPoints->push_back(MBBEnd);
  449. ++I;
  450. }
  451. }
  452. }
  453. //===----------------------------------------------------------------------===//
  454. // Register allocator hooks.
  455. //
  456. void LiveIntervals::addKillFlags(const VirtRegMap *VRM) {
  457. // Keep track of regunit ranges.
  458. SmallVector<std::pair<LiveRange*, LiveRange::iterator>, 8> RU;
  459. for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
  460. unsigned Reg = TargetRegisterInfo::index2VirtReg(i);
  461. if (MRI->reg_nodbg_empty(Reg))
  462. continue;
  463. LiveInterval *LI = &getInterval(Reg);
  464. if (LI->empty())
  465. continue;
  466. // Find the regunit intervals for the assigned register. They may overlap
  467. // the virtual register live range, cancelling any kills.
  468. RU.clear();
  469. for (MCRegUnitIterator Units(VRM->getPhys(Reg), TRI); Units.isValid();
  470. ++Units) {
  471. LiveRange &RURanges = getRegUnit(*Units);
  472. if (RURanges.empty())
  473. continue;
  474. RU.push_back(std::make_pair(&RURanges, RURanges.find(LI->begin()->end)));
  475. }
  476. // Every instruction that kills Reg corresponds to a segment range end
  477. // point.
  478. for (LiveInterval::iterator RI = LI->begin(), RE = LI->end(); RI != RE;
  479. ++RI) {
  480. // A block index indicates an MBB edge.
  481. if (RI->end.isBlock())
  482. continue;
  483. MachineInstr *MI = getInstructionFromIndex(RI->end);
  484. if (!MI)
  485. continue;
  486. // Check if any of the regunits are live beyond the end of RI. That could
  487. // happen when a physreg is defined as a copy of a virtreg:
  488. //
  489. // %EAX = COPY %vreg5
  490. // FOO %vreg5 <--- MI, cancel kill because %EAX is live.
  491. // BAR %EAX<kill>
  492. //
  493. // There should be no kill flag on FOO when %vreg5 is rewritten as %EAX.
  494. bool CancelKill = false;
  495. for (unsigned u = 0, e = RU.size(); u != e; ++u) {
  496. LiveRange &RRanges = *RU[u].first;
  497. LiveRange::iterator &I = RU[u].second;
  498. if (I == RRanges.end())
  499. continue;
  500. I = RRanges.advanceTo(I, RI->end);
  501. if (I == RRanges.end() || I->start >= RI->end)
  502. continue;
  503. // I is overlapping RI.
  504. CancelKill = true;
  505. break;
  506. }
  507. if (CancelKill)
  508. MI->clearRegisterKills(Reg, NULL);
  509. else
  510. MI->addRegisterKilled(Reg, NULL);
  511. }
  512. }
  513. }
  514. MachineBasicBlock*
  515. LiveIntervals::intervalIsInOneMBB(const LiveInterval &LI) const {
  516. // A local live range must be fully contained inside the block, meaning it is
  517. // defined and killed at instructions, not at block boundaries. It is not
  518. // live in or or out of any block.
  519. //
  520. // It is technically possible to have a PHI-defined live range identical to a
  521. // single block, but we are going to return false in that case.
  522. SlotIndex Start = LI.beginIndex();
  523. if (Start.isBlock())
  524. return NULL;
  525. SlotIndex Stop = LI.endIndex();
  526. if (Stop.isBlock())
  527. return NULL;
  528. // getMBBFromIndex doesn't need to search the MBB table when both indexes
  529. // belong to proper instructions.
  530. MachineBasicBlock *MBB1 = Indexes->getMBBFromIndex(Start);
  531. MachineBasicBlock *MBB2 = Indexes->getMBBFromIndex(Stop);
  532. return MBB1 == MBB2 ? MBB1 : NULL;
  533. }
  534. bool
  535. LiveIntervals::hasPHIKill(const LiveInterval &LI, const VNInfo *VNI) const {
  536. for (LiveInterval::const_vni_iterator I = LI.vni_begin(), E = LI.vni_end();
  537. I != E; ++I) {
  538. const VNInfo *PHI = *I;
  539. if (PHI->isUnused() || !PHI->isPHIDef())
  540. continue;
  541. const MachineBasicBlock *PHIMBB = getMBBFromIndex(PHI->def);
  542. // Conservatively return true instead of scanning huge predecessor lists.
  543. if (PHIMBB->pred_size() > 100)
  544. return true;
  545. for (MachineBasicBlock::const_pred_iterator
  546. PI = PHIMBB->pred_begin(), PE = PHIMBB->pred_end(); PI != PE; ++PI)
  547. if (VNI == LI.getVNInfoBefore(Indexes->getMBBEndIdx(*PI)))
  548. return true;
  549. }
  550. return false;
  551. }
  552. float
  553. LiveIntervals::getSpillWeight(bool isDef, bool isUse,
  554. const MachineBlockFrequencyInfo *MBFI,
  555. const MachineInstr *MI) {
  556. BlockFrequency Freq = MBFI->getBlockFreq(MI->getParent());
  557. const float Scale = 1.0f / MBFI->getEntryFreq();
  558. return (isDef + isUse) * (Freq.getFrequency() * Scale);
  559. }
  560. LiveRange::Segment
  561. LiveIntervals::addSegmentToEndOfBlock(unsigned reg, MachineInstr* startInst) {
  562. LiveInterval& Interval = createEmptyInterval(reg);
  563. VNInfo* VN = Interval.getNextValue(
  564. SlotIndex(getInstructionIndex(startInst).getRegSlot()),
  565. getVNInfoAllocator());
  566. LiveRange::Segment S(
  567. SlotIndex(getInstructionIndex(startInst).getRegSlot()),
  568. getMBBEndIdx(startInst->getParent()), VN);
  569. Interval.addSegment(S);
  570. return S;
  571. }
  572. //===----------------------------------------------------------------------===//
  573. // Register mask functions
  574. //===----------------------------------------------------------------------===//
  575. bool LiveIntervals::checkRegMaskInterference(LiveInterval &LI,
  576. BitVector &UsableRegs) {
  577. if (LI.empty())
  578. return false;
  579. LiveInterval::iterator LiveI = LI.begin(), LiveE = LI.end();
  580. // Use a smaller arrays for local live ranges.
  581. ArrayRef<SlotIndex> Slots;
  582. ArrayRef<const uint32_t*> Bits;
  583. if (MachineBasicBlock *MBB = intervalIsInOneMBB(LI)) {
  584. Slots = getRegMaskSlotsInBlock(MBB->getNumber());
  585. Bits = getRegMaskBitsInBlock(MBB->getNumber());
  586. } else {
  587. Slots = getRegMaskSlots();
  588. Bits = getRegMaskBits();
  589. }
  590. // We are going to enumerate all the register mask slots contained in LI.
  591. // Start with a binary search of RegMaskSlots to find a starting point.
  592. ArrayRef<SlotIndex>::iterator SlotI =
  593. std::lower_bound(Slots.begin(), Slots.end(), LiveI->start);
  594. ArrayRef<SlotIndex>::iterator SlotE = Slots.end();
  595. // No slots in range, LI begins after the last call.
  596. if (SlotI == SlotE)
  597. return false;
  598. bool Found = false;
  599. for (;;) {
  600. assert(*SlotI >= LiveI->start);
  601. // Loop over all slots overlapping this segment.
  602. while (*SlotI < LiveI->end) {
  603. // *SlotI overlaps LI. Collect mask bits.
  604. if (!Found) {
  605. // This is the first overlap. Initialize UsableRegs to all ones.
  606. UsableRegs.clear();
  607. UsableRegs.resize(TRI->getNumRegs(), true);
  608. Found = true;
  609. }
  610. // Remove usable registers clobbered by this mask.
  611. UsableRegs.clearBitsNotInMask(Bits[SlotI-Slots.begin()]);
  612. if (++SlotI == SlotE)
  613. return Found;
  614. }
  615. // *SlotI is beyond the current LI segment.
  616. LiveI = LI.advanceTo(LiveI, *SlotI);
  617. if (LiveI == LiveE)
  618. return Found;
  619. // Advance SlotI until it overlaps.
  620. while (*SlotI < LiveI->start)
  621. if (++SlotI == SlotE)
  622. return Found;
  623. }
  624. }
  625. //===----------------------------------------------------------------------===//
  626. // IntervalUpdate class.
  627. //===----------------------------------------------------------------------===//
  628. // HMEditor is a toolkit used by handleMove to trim or extend live intervals.
  629. class LiveIntervals::HMEditor {
  630. private:
  631. LiveIntervals& LIS;
  632. const MachineRegisterInfo& MRI;
  633. const TargetRegisterInfo& TRI;
  634. SlotIndex OldIdx;
  635. SlotIndex NewIdx;
  636. SmallPtrSet<LiveRange*, 8> Updated;
  637. bool UpdateFlags;
  638. public:
  639. HMEditor(LiveIntervals& LIS, const MachineRegisterInfo& MRI,
  640. const TargetRegisterInfo& TRI,
  641. SlotIndex OldIdx, SlotIndex NewIdx, bool UpdateFlags)
  642. : LIS(LIS), MRI(MRI), TRI(TRI), OldIdx(OldIdx), NewIdx(NewIdx),
  643. UpdateFlags(UpdateFlags) {}
  644. // FIXME: UpdateFlags is a workaround that creates live intervals for all
  645. // physregs, even those that aren't needed for regalloc, in order to update
  646. // kill flags. This is wasteful. Eventually, LiveVariables will strip all kill
  647. // flags, and postRA passes will use a live register utility instead.
  648. LiveRange *getRegUnitLI(unsigned Unit) {
  649. if (UpdateFlags)
  650. return &LIS.getRegUnit(Unit);
  651. return LIS.getCachedRegUnit(Unit);
  652. }
  653. /// Update all live ranges touched by MI, assuming a move from OldIdx to
  654. /// NewIdx.
  655. void updateAllRanges(MachineInstr *MI) {
  656. DEBUG(dbgs() << "handleMove " << OldIdx << " -> " << NewIdx << ": " << *MI);
  657. bool hasRegMask = false;
  658. for (MIOperands MO(MI); MO.isValid(); ++MO) {
  659. if (MO->isRegMask())
  660. hasRegMask = true;
  661. if (!MO->isReg())
  662. continue;
  663. // Aggressively clear all kill flags.
  664. // They are reinserted by VirtRegRewriter.
  665. if (MO->isUse())
  666. MO->setIsKill(false);
  667. unsigned Reg = MO->getReg();
  668. if (!Reg)
  669. continue;
  670. if (TargetRegisterInfo::isVirtualRegister(Reg)) {
  671. LiveInterval &LI = LIS.getInterval(Reg);
  672. updateRange(LI, Reg);
  673. continue;
  674. }
  675. // For physregs, only update the regunits that actually have a
  676. // precomputed live range.
  677. for (MCRegUnitIterator Units(Reg, &TRI); Units.isValid(); ++Units)
  678. if (LiveRange *LR = getRegUnitLI(*Units))
  679. updateRange(*LR, *Units);
  680. }
  681. if (hasRegMask)
  682. updateRegMaskSlots();
  683. }
  684. private:
  685. /// Update a single live range, assuming an instruction has been moved from
  686. /// OldIdx to NewIdx.
  687. void updateRange(LiveRange &LR, unsigned Reg) {
  688. if (!Updated.insert(&LR))
  689. return;
  690. DEBUG({
  691. dbgs() << " ";
  692. if (TargetRegisterInfo::isVirtualRegister(Reg))
  693. dbgs() << PrintReg(Reg);
  694. else
  695. dbgs() << PrintRegUnit(Reg, &TRI);
  696. dbgs() << ":\t" << LR << '\n';
  697. });
  698. if (SlotIndex::isEarlierInstr(OldIdx, NewIdx))
  699. handleMoveDown(LR);
  700. else
  701. handleMoveUp(LR, Reg);
  702. DEBUG(dbgs() << " -->\t" << LR << '\n');
  703. LR.verify();
  704. }
  705. /// Update LR to reflect an instruction has been moved downwards from OldIdx
  706. /// to NewIdx.
  707. ///
  708. /// 1. Live def at OldIdx:
  709. /// Move def to NewIdx, assert endpoint after NewIdx.
  710. ///
  711. /// 2. Live def at OldIdx, killed at NewIdx:
  712. /// Change to dead def at NewIdx.
  713. /// (Happens when bundling def+kill together).
  714. ///
  715. /// 3. Dead def at OldIdx:
  716. /// Move def to NewIdx, possibly across another live value.
  717. ///
  718. /// 4. Def at OldIdx AND at NewIdx:
  719. /// Remove segment [OldIdx;NewIdx) and value defined at OldIdx.
  720. /// (Happens when bundling multiple defs together).
  721. ///
  722. /// 5. Value read at OldIdx, killed before NewIdx:
  723. /// Extend kill to NewIdx.
  724. ///
  725. void handleMoveDown(LiveRange &LR) {
  726. // First look for a kill at OldIdx.
  727. LiveRange::iterator I = LR.find(OldIdx.getBaseIndex());
  728. LiveRange::iterator E = LR.end();
  729. // Is LR even live at OldIdx?
  730. if (I == E || SlotIndex::isEarlierInstr(OldIdx, I->start))
  731. return;
  732. // Handle a live-in value.
  733. if (!SlotIndex::isSameInstr(I->start, OldIdx)) {
  734. bool isKill = SlotIndex::isSameInstr(OldIdx, I->end);
  735. // If the live-in value already extends to NewIdx, there is nothing to do.
  736. if (!SlotIndex::isEarlierInstr(I->end, NewIdx))
  737. return;
  738. // Aggressively remove all kill flags from the old kill point.
  739. // Kill flags shouldn't be used while live intervals exist, they will be
  740. // reinserted by VirtRegRewriter.
  741. if (MachineInstr *KillMI = LIS.getInstructionFromIndex(I->end))
  742. for (MIBundleOperands MO(KillMI); MO.isValid(); ++MO)
  743. if (MO->isReg() && MO->isUse())
  744. MO->setIsKill(false);
  745. // Adjust I->end to reach NewIdx. This may temporarily make LR invalid by
  746. // overlapping ranges. Case 5 above.
  747. I->end = NewIdx.getRegSlot(I->end.isEarlyClobber());
  748. // If this was a kill, there may also be a def. Otherwise we're done.
  749. if (!isKill)
  750. return;
  751. ++I;
  752. }
  753. // Check for a def at OldIdx.
  754. if (I == E || !SlotIndex::isSameInstr(OldIdx, I->start))
  755. return;
  756. // We have a def at OldIdx.
  757. VNInfo *DefVNI = I->valno;
  758. assert(DefVNI->def == I->start && "Inconsistent def");
  759. DefVNI->def = NewIdx.getRegSlot(I->start.isEarlyClobber());
  760. // If the defined value extends beyond NewIdx, just move the def down.
  761. // This is case 1 above.
  762. if (SlotIndex::isEarlierInstr(NewIdx, I->end)) {
  763. I->start = DefVNI->def;
  764. return;
  765. }
  766. // The remaining possibilities are now:
  767. // 2. Live def at OldIdx, killed at NewIdx: isSameInstr(I->end, NewIdx).
  768. // 3. Dead def at OldIdx: I->end = OldIdx.getDeadSlot().
  769. // In either case, it is possible that there is an existing def at NewIdx.
  770. assert((I->end == OldIdx.getDeadSlot() ||
  771. SlotIndex::isSameInstr(I->end, NewIdx)) &&
  772. "Cannot move def below kill");
  773. LiveRange::iterator NewI = LR.advanceTo(I, NewIdx.getRegSlot());
  774. if (NewI != E && SlotIndex::isSameInstr(NewI->start, NewIdx)) {
  775. // There is an existing def at NewIdx, case 4 above. The def at OldIdx is
  776. // coalesced into that value.
  777. assert(NewI->valno != DefVNI && "Multiple defs of value?");
  778. LR.removeValNo(DefVNI);
  779. return;
  780. }
  781. // There was no existing def at NewIdx. Turn *I into a dead def at NewIdx.
  782. // If the def at OldIdx was dead, we allow it to be moved across other LR
  783. // values. The new range should be placed immediately before NewI, move any
  784. // intermediate ranges up.
  785. assert(NewI != I && "Inconsistent iterators");
  786. std::copy(std::next(I), NewI, I);
  787. *std::prev(NewI)
  788. = LiveRange::Segment(DefVNI->def, NewIdx.getDeadSlot(), DefVNI);
  789. }
  790. /// Update LR to reflect an instruction has been moved upwards from OldIdx
  791. /// to NewIdx.
  792. ///
  793. /// 1. Live def at OldIdx:
  794. /// Hoist def to NewIdx.
  795. ///
  796. /// 2. Dead def at OldIdx:
  797. /// Hoist def+end to NewIdx, possibly move across other values.
  798. ///
  799. /// 3. Dead def at OldIdx AND existing def at NewIdx:
  800. /// Remove value defined at OldIdx, coalescing it with existing value.
  801. ///
  802. /// 4. Live def at OldIdx AND existing def at NewIdx:
  803. /// Remove value defined at NewIdx, hoist OldIdx def to NewIdx.
  804. /// (Happens when bundling multiple defs together).
  805. ///
  806. /// 5. Value killed at OldIdx:
  807. /// Hoist kill to NewIdx, then scan for last kill between NewIdx and
  808. /// OldIdx.
  809. ///
  810. void handleMoveUp(LiveRange &LR, unsigned Reg) {
  811. // First look for a kill at OldIdx.
  812. LiveRange::iterator I = LR.find(OldIdx.getBaseIndex());
  813. LiveRange::iterator E = LR.end();
  814. // Is LR even live at OldIdx?
  815. if (I == E || SlotIndex::isEarlierInstr(OldIdx, I->start))
  816. return;
  817. // Handle a live-in value.
  818. if (!SlotIndex::isSameInstr(I->start, OldIdx)) {
  819. // If the live-in value isn't killed here, there is nothing to do.
  820. if (!SlotIndex::isSameInstr(OldIdx, I->end))
  821. return;
  822. // Adjust I->end to end at NewIdx. If we are hoisting a kill above
  823. // another use, we need to search for that use. Case 5 above.
  824. I->end = NewIdx.getRegSlot(I->end.isEarlyClobber());
  825. ++I;
  826. // If OldIdx also defines a value, there couldn't have been another use.
  827. if (I == E || !SlotIndex::isSameInstr(I->start, OldIdx)) {
  828. // No def, search for the new kill.
  829. // This can never be an early clobber kill since there is no def.
  830. std::prev(I)->end = findLastUseBefore(Reg).getRegSlot();
  831. return;
  832. }
  833. }
  834. // Now deal with the def at OldIdx.
  835. assert(I != E && SlotIndex::isSameInstr(I->start, OldIdx) && "No def?");
  836. VNInfo *DefVNI = I->valno;
  837. assert(DefVNI->def == I->start && "Inconsistent def");
  838. DefVNI->def = NewIdx.getRegSlot(I->start.isEarlyClobber());
  839. // Check for an existing def at NewIdx.
  840. LiveRange::iterator NewI = LR.find(NewIdx.getRegSlot());
  841. if (SlotIndex::isSameInstr(NewI->start, NewIdx)) {
  842. assert(NewI->valno != DefVNI && "Same value defined more than once?");
  843. // There is an existing def at NewIdx.
  844. if (I->end.isDead()) {
  845. // Case 3: Remove the dead def at OldIdx.
  846. LR.removeValNo(DefVNI);
  847. return;
  848. }
  849. // Case 4: Replace def at NewIdx with live def at OldIdx.
  850. I->start = DefVNI->def;
  851. LR.removeValNo(NewI->valno);
  852. return;
  853. }
  854. // There is no existing def at NewIdx. Hoist DefVNI.
  855. if (!I->end.isDead()) {
  856. // Leave the end point of a live def.
  857. I->start = DefVNI->def;
  858. return;
  859. }
  860. // DefVNI is a dead def. It may have been moved across other values in LR,
  861. // so move I up to NewI. Slide [NewI;I) down one position.
  862. std::copy_backward(NewI, I, std::next(I));
  863. *NewI = LiveRange::Segment(DefVNI->def, NewIdx.getDeadSlot(), DefVNI);
  864. }
  865. void updateRegMaskSlots() {
  866. SmallVectorImpl<SlotIndex>::iterator RI =
  867. std::lower_bound(LIS.RegMaskSlots.begin(), LIS.RegMaskSlots.end(),
  868. OldIdx);
  869. assert(RI != LIS.RegMaskSlots.end() && *RI == OldIdx.getRegSlot() &&
  870. "No RegMask at OldIdx.");
  871. *RI = NewIdx.getRegSlot();
  872. assert((RI == LIS.RegMaskSlots.begin() ||
  873. SlotIndex::isEarlierInstr(*std::prev(RI), *RI)) &&
  874. "Cannot move regmask instruction above another call");
  875. assert((std::next(RI) == LIS.RegMaskSlots.end() ||
  876. SlotIndex::isEarlierInstr(*RI, *std::next(RI))) &&
  877. "Cannot move regmask instruction below another call");
  878. }
  879. // Return the last use of reg between NewIdx and OldIdx.
  880. SlotIndex findLastUseBefore(unsigned Reg) {
  881. if (TargetRegisterInfo::isVirtualRegister(Reg)) {
  882. SlotIndex LastUse = NewIdx;
  883. for (MachineRegisterInfo::use_nodbg_iterator
  884. UI = MRI.use_nodbg_begin(Reg),
  885. UE = MRI.use_nodbg_end();
  886. UI != UE; UI.skipInstruction()) {
  887. const MachineInstr* MI = &*UI;
  888. SlotIndex InstSlot = LIS.getSlotIndexes()->getInstructionIndex(MI);
  889. if (InstSlot > LastUse && InstSlot < OldIdx)
  890. LastUse = InstSlot;
  891. }
  892. return LastUse;
  893. }
  894. // This is a regunit interval, so scanning the use list could be very
  895. // expensive. Scan upwards from OldIdx instead.
  896. assert(NewIdx < OldIdx && "Expected upwards move");
  897. SlotIndexes *Indexes = LIS.getSlotIndexes();
  898. MachineBasicBlock *MBB = Indexes->getMBBFromIndex(NewIdx);
  899. // OldIdx may not correspond to an instruction any longer, so set MII to
  900. // point to the next instruction after OldIdx, or MBB->end().
  901. MachineBasicBlock::iterator MII = MBB->end();
  902. if (MachineInstr *MI = Indexes->getInstructionFromIndex(
  903. Indexes->getNextNonNullIndex(OldIdx)))
  904. if (MI->getParent() == MBB)
  905. MII = MI;
  906. MachineBasicBlock::iterator Begin = MBB->begin();
  907. while (MII != Begin) {
  908. if ((--MII)->isDebugValue())
  909. continue;
  910. SlotIndex Idx = Indexes->getInstructionIndex(MII);
  911. // Stop searching when NewIdx is reached.
  912. if (!SlotIndex::isEarlierInstr(NewIdx, Idx))
  913. return NewIdx;
  914. // Check if MII uses Reg.
  915. for (MIBundleOperands MO(MII); MO.isValid(); ++MO)
  916. if (MO->isReg() &&
  917. TargetRegisterInfo::isPhysicalRegister(MO->getReg()) &&
  918. TRI.hasRegUnit(MO->getReg(), Reg))
  919. return Idx;
  920. }
  921. // Didn't reach NewIdx. It must be the first instruction in the block.
  922. return NewIdx;
  923. }
  924. };
  925. void LiveIntervals::handleMove(MachineInstr* MI, bool UpdateFlags) {
  926. assert(!MI->isBundled() && "Can't handle bundled instructions yet.");
  927. SlotIndex OldIndex = Indexes->getInstructionIndex(MI);
  928. Indexes->removeMachineInstrFromMaps(MI);
  929. SlotIndex NewIndex = Indexes->insertMachineInstrInMaps(MI);
  930. assert(getMBBStartIdx(MI->getParent()) <= OldIndex &&
  931. OldIndex < getMBBEndIdx(MI->getParent()) &&
  932. "Cannot handle moves across basic block boundaries.");
  933. HMEditor HME(*this, *MRI, *TRI, OldIndex, NewIndex, UpdateFlags);
  934. HME.updateAllRanges(MI);
  935. }
  936. void LiveIntervals::handleMoveIntoBundle(MachineInstr* MI,
  937. MachineInstr* BundleStart,
  938. bool UpdateFlags) {
  939. SlotIndex OldIndex = Indexes->getInstructionIndex(MI);
  940. SlotIndex NewIndex = Indexes->getInstructionIndex(BundleStart);
  941. HMEditor HME(*this, *MRI, *TRI, OldIndex, NewIndex, UpdateFlags);
  942. HME.updateAllRanges(MI);
  943. }
  944. void
  945. LiveIntervals::repairIntervalsInRange(MachineBasicBlock *MBB,
  946. MachineBasicBlock::iterator Begin,
  947. MachineBasicBlock::iterator End,
  948. ArrayRef<unsigned> OrigRegs) {
  949. // Find anchor points, which are at the beginning/end of blocks or at
  950. // instructions that already have indexes.
  951. while (Begin != MBB->begin() && !Indexes->hasIndex(Begin))
  952. --Begin;
  953. while (End != MBB->end() && !Indexes->hasIndex(End))
  954. ++End;
  955. SlotIndex endIdx;
  956. if (End == MBB->end())
  957. endIdx = getMBBEndIdx(MBB).getPrevSlot();
  958. else
  959. endIdx = getInstructionIndex(End);
  960. Indexes->repairIndexesInRange(MBB, Begin, End);
  961. for (MachineBasicBlock::iterator I = End; I != Begin;) {
  962. --I;
  963. MachineInstr *MI = I;
  964. if (MI->isDebugValue())
  965. continue;
  966. for (MachineInstr::const_mop_iterator MOI = MI->operands_begin(),
  967. MOE = MI->operands_end(); MOI != MOE; ++MOI) {
  968. if (MOI->isReg() &&
  969. TargetRegisterInfo::isVirtualRegister(MOI->getReg()) &&
  970. !hasInterval(MOI->getReg())) {
  971. createAndComputeVirtRegInterval(MOI->getReg());
  972. }
  973. }
  974. }
  975. for (unsigned i = 0, e = OrigRegs.size(); i != e; ++i) {
  976. unsigned Reg = OrigRegs[i];
  977. if (!TargetRegisterInfo::isVirtualRegister(Reg))
  978. continue;
  979. LiveInterval &LI = getInterval(Reg);
  980. // FIXME: Should we support undefs that gain defs?
  981. if (!LI.hasAtLeastOneValue())
  982. continue;
  983. LiveInterval::iterator LII = LI.find(endIdx);
  984. SlotIndex lastUseIdx;
  985. if (LII != LI.end() && LII->start < endIdx)
  986. lastUseIdx = LII->end;
  987. else
  988. --LII;
  989. for (MachineBasicBlock::iterator I = End; I != Begin;) {
  990. --I;
  991. MachineInstr *MI = I;
  992. if (MI->isDebugValue())
  993. continue;
  994. SlotIndex instrIdx = getInstructionIndex(MI);
  995. bool isStartValid = getInstructionFromIndex(LII->start);
  996. bool isEndValid = getInstructionFromIndex(LII->end);
  997. // FIXME: This doesn't currently handle early-clobber or multiple removed
  998. // defs inside of the region to repair.
  999. for (MachineInstr::mop_iterator OI = MI->operands_begin(),
  1000. OE = MI->operands_end(); OI != OE; ++OI) {
  1001. const MachineOperand &MO = *OI;
  1002. if (!MO.isReg() || MO.getReg() != Reg)
  1003. continue;
  1004. if (MO.isDef()) {
  1005. if (!isStartValid) {
  1006. if (LII->end.isDead()) {
  1007. SlotIndex prevStart;
  1008. if (LII != LI.begin())
  1009. prevStart = std::prev(LII)->start;
  1010. // FIXME: This could be more efficient if there was a
  1011. // removeSegment method that returned an iterator.
  1012. LI.removeSegment(*LII, true);
  1013. if (prevStart.isValid())
  1014. LII = LI.find(prevStart);
  1015. else
  1016. LII = LI.begin();
  1017. } else {
  1018. LII->start = instrIdx.getRegSlot();
  1019. LII->valno->def = instrIdx.getRegSlot();
  1020. if (MO.getSubReg() && !MO.isUndef())
  1021. lastUseIdx = instrIdx.getRegSlot();
  1022. else
  1023. lastUseIdx = SlotIndex();
  1024. continue;
  1025. }
  1026. }
  1027. if (!lastUseIdx.isValid()) {
  1028. VNInfo *VNI = LI.getNextValue(instrIdx.getRegSlot(),
  1029. VNInfoAllocator);
  1030. LiveRange::Segment S(instrIdx.getRegSlot(),
  1031. instrIdx.getDeadSlot(), VNI);
  1032. LII = LI.addSegment(S);
  1033. } else if (LII->start != instrIdx.getRegSlot()) {
  1034. VNInfo *VNI = LI.getNextValue(instrIdx.getRegSlot(),
  1035. VNInfoAllocator);
  1036. LiveRange::Segment S(instrIdx.getRegSlot(), lastUseIdx, VNI);
  1037. LII = LI.addSegment(S);
  1038. }
  1039. if (MO.getSubReg() && !MO.isUndef())
  1040. lastUseIdx = instrIdx.getRegSlot();
  1041. else
  1042. lastUseIdx = SlotIndex();
  1043. } else if (MO.isUse()) {
  1044. // FIXME: This should probably be handled outside of this branch,
  1045. // either as part of the def case (for defs inside of the region) or
  1046. // after the loop over the region.
  1047. if (!isEndValid && !LII->end.isBlock())
  1048. LII->end = instrIdx.getRegSlot();
  1049. if (!lastUseIdx.isValid())
  1050. lastUseIdx = instrIdx.getRegSlot();
  1051. }
  1052. }
  1053. }
  1054. }
  1055. }