SplitKit.cpp 65 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850
  1. //===- SplitKit.cpp - Toolkit for splitting live ranges -------------------===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This file contains the SplitAnalysis class as well as mutator functions for
  11. // live range splitting.
  12. //
  13. //===----------------------------------------------------------------------===//
  14. #include "SplitKit.h"
  15. #include "LiveRangeCalc.h"
  16. #include "llvm/ADT/ArrayRef.h"
  17. #include "llvm/ADT/DenseSet.h"
  18. #include "llvm/ADT/None.h"
  19. #include "llvm/ADT/STLExtras.h"
  20. #include "llvm/ADT/SmallPtrSet.h"
  21. #include "llvm/ADT/SmallVector.h"
  22. #include "llvm/ADT/Statistic.h"
  23. #include "llvm/CodeGen/LiveInterval.h"
  24. #include "llvm/CodeGen/LiveIntervals.h"
  25. #include "llvm/CodeGen/LiveRangeEdit.h"
  26. #include "llvm/CodeGen/MachineBasicBlock.h"
  27. #include "llvm/CodeGen/MachineBlockFrequencyInfo.h"
  28. #include "llvm/CodeGen/MachineDominators.h"
  29. #include "llvm/CodeGen/MachineFunction.h"
  30. #include "llvm/CodeGen/MachineInstr.h"
  31. #include "llvm/CodeGen/MachineInstrBuilder.h"
  32. #include "llvm/CodeGen/MachineLoopInfo.h"
  33. #include "llvm/CodeGen/MachineOperand.h"
  34. #include "llvm/CodeGen/MachineRegisterInfo.h"
  35. #include "llvm/CodeGen/SlotIndexes.h"
  36. #include "llvm/CodeGen/TargetInstrInfo.h"
  37. #include "llvm/CodeGen/TargetOpcodes.h"
  38. #include "llvm/CodeGen/TargetRegisterInfo.h"
  39. #include "llvm/CodeGen/TargetSubtargetInfo.h"
  40. #include "llvm/CodeGen/VirtRegMap.h"
  41. #include "llvm/Config/llvm-config.h"
  42. #include "llvm/IR/DebugLoc.h"
  43. #include "llvm/MC/LaneBitmask.h"
  44. #include "llvm/Support/Allocator.h"
  45. #include "llvm/Support/BlockFrequency.h"
  46. #include "llvm/Support/Compiler.h"
  47. #include "llvm/Support/Debug.h"
  48. #include "llvm/Support/ErrorHandling.h"
  49. #include "llvm/Support/raw_ostream.h"
  50. #include <algorithm>
  51. #include <cassert>
  52. #include <iterator>
  53. #include <limits>
  54. #include <tuple>
  55. #include <utility>
  56. using namespace llvm;
  57. #define DEBUG_TYPE "regalloc"
  58. STATISTIC(NumFinished, "Number of splits finished");
  59. STATISTIC(NumSimple, "Number of splits that were simple");
  60. STATISTIC(NumCopies, "Number of copies inserted for splitting");
  61. STATISTIC(NumRemats, "Number of rematerialized defs for splitting");
  62. STATISTIC(NumRepairs, "Number of invalid live ranges repaired");
  63. //===----------------------------------------------------------------------===//
  64. // Last Insert Point Analysis
  65. //===----------------------------------------------------------------------===//
  66. InsertPointAnalysis::InsertPointAnalysis(const LiveIntervals &lis,
  67. unsigned BBNum)
  68. : LIS(lis), LastInsertPoint(BBNum) {}
  69. SlotIndex
  70. InsertPointAnalysis::computeLastInsertPoint(const LiveInterval &CurLI,
  71. const MachineBasicBlock &MBB) {
  72. unsigned Num = MBB.getNumber();
  73. std::pair<SlotIndex, SlotIndex> &LIP = LastInsertPoint[Num];
  74. SlotIndex MBBEnd = LIS.getMBBEndIdx(&MBB);
  75. SmallVector<const MachineBasicBlock *, 1> EHPadSuccessors;
  76. for (const MachineBasicBlock *SMBB : MBB.successors())
  77. if (SMBB->isEHPad())
  78. EHPadSuccessors.push_back(SMBB);
  79. // Compute insert points on the first call. The pair is independent of the
  80. // current live interval.
  81. if (!LIP.first.isValid()) {
  82. MachineBasicBlock::const_iterator FirstTerm = MBB.getFirstTerminator();
  83. if (FirstTerm == MBB.end())
  84. LIP.first = MBBEnd;
  85. else
  86. LIP.first = LIS.getInstructionIndex(*FirstTerm);
  87. // If there is a landing pad successor, also find the call instruction.
  88. if (EHPadSuccessors.empty())
  89. return LIP.first;
  90. // There may not be a call instruction (?) in which case we ignore LPad.
  91. LIP.second = LIP.first;
  92. for (MachineBasicBlock::const_iterator I = MBB.end(), E = MBB.begin();
  93. I != E;) {
  94. --I;
  95. if (I->isCall()) {
  96. LIP.second = LIS.getInstructionIndex(*I);
  97. break;
  98. }
  99. }
  100. }
  101. // If CurLI is live into a landing pad successor, move the last insert point
  102. // back to the call that may throw.
  103. if (!LIP.second)
  104. return LIP.first;
  105. if (none_of(EHPadSuccessors, [&](const MachineBasicBlock *EHPad) {
  106. return LIS.isLiveInToMBB(CurLI, EHPad);
  107. }))
  108. return LIP.first;
  109. // Find the value leaving MBB.
  110. const VNInfo *VNI = CurLI.getVNInfoBefore(MBBEnd);
  111. if (!VNI)
  112. return LIP.first;
  113. // If the value leaving MBB was defined after the call in MBB, it can't
  114. // really be live-in to the landing pad. This can happen if the landing pad
  115. // has a PHI, and this register is undef on the exceptional edge.
  116. // <rdar://problem/10664933>
  117. if (!SlotIndex::isEarlierInstr(VNI->def, LIP.second) && VNI->def < MBBEnd)
  118. return LIP.first;
  119. // Value is properly live-in to the landing pad.
  120. // Only allow inserts before the call.
  121. return LIP.second;
  122. }
  123. MachineBasicBlock::iterator
  124. InsertPointAnalysis::getLastInsertPointIter(const LiveInterval &CurLI,
  125. MachineBasicBlock &MBB) {
  126. SlotIndex LIP = getLastInsertPoint(CurLI, MBB);
  127. if (LIP == LIS.getMBBEndIdx(&MBB))
  128. return MBB.end();
  129. return LIS.getInstructionFromIndex(LIP);
  130. }
  131. //===----------------------------------------------------------------------===//
  132. // Split Analysis
  133. //===----------------------------------------------------------------------===//
  134. SplitAnalysis::SplitAnalysis(const VirtRegMap &vrm, const LiveIntervals &lis,
  135. const MachineLoopInfo &mli)
  136. : MF(vrm.getMachineFunction()), VRM(vrm), LIS(lis), Loops(mli),
  137. TII(*MF.getSubtarget().getInstrInfo()), IPA(lis, MF.getNumBlockIDs()) {}
  138. void SplitAnalysis::clear() {
  139. UseSlots.clear();
  140. UseBlocks.clear();
  141. ThroughBlocks.clear();
  142. CurLI = nullptr;
  143. DidRepairRange = false;
  144. }
  145. /// analyzeUses - Count instructions, basic blocks, and loops using CurLI.
  146. void SplitAnalysis::analyzeUses() {
  147. assert(UseSlots.empty() && "Call clear first");
  148. // First get all the defs from the interval values. This provides the correct
  149. // slots for early clobbers.
  150. for (const VNInfo *VNI : CurLI->valnos)
  151. if (!VNI->isPHIDef() && !VNI->isUnused())
  152. UseSlots.push_back(VNI->def);
  153. // Get use slots form the use-def chain.
  154. const MachineRegisterInfo &MRI = MF.getRegInfo();
  155. for (MachineOperand &MO : MRI.use_nodbg_operands(CurLI->reg))
  156. if (!MO.isUndef())
  157. UseSlots.push_back(LIS.getInstructionIndex(*MO.getParent()).getRegSlot());
  158. array_pod_sort(UseSlots.begin(), UseSlots.end());
  159. // Remove duplicates, keeping the smaller slot for each instruction.
  160. // That is what we want for early clobbers.
  161. UseSlots.erase(std::unique(UseSlots.begin(), UseSlots.end(),
  162. SlotIndex::isSameInstr),
  163. UseSlots.end());
  164. // Compute per-live block info.
  165. if (!calcLiveBlockInfo()) {
  166. // FIXME: calcLiveBlockInfo found inconsistencies in the live range.
  167. // I am looking at you, RegisterCoalescer!
  168. DidRepairRange = true;
  169. ++NumRepairs;
  170. DEBUG(dbgs() << "*** Fixing inconsistent live interval! ***\n");
  171. const_cast<LiveIntervals&>(LIS)
  172. .shrinkToUses(const_cast<LiveInterval*>(CurLI));
  173. UseBlocks.clear();
  174. ThroughBlocks.clear();
  175. bool fixed = calcLiveBlockInfo();
  176. (void)fixed;
  177. assert(fixed && "Couldn't fix broken live interval");
  178. }
  179. DEBUG(dbgs() << "Analyze counted "
  180. << UseSlots.size() << " instrs in "
  181. << UseBlocks.size() << " blocks, through "
  182. << NumThroughBlocks << " blocks.\n");
  183. }
  184. /// calcLiveBlockInfo - Fill the LiveBlocks array with information about blocks
  185. /// where CurLI is live.
  186. bool SplitAnalysis::calcLiveBlockInfo() {
  187. ThroughBlocks.resize(MF.getNumBlockIDs());
  188. NumThroughBlocks = NumGapBlocks = 0;
  189. if (CurLI->empty())
  190. return true;
  191. LiveInterval::const_iterator LVI = CurLI->begin();
  192. LiveInterval::const_iterator LVE = CurLI->end();
  193. SmallVectorImpl<SlotIndex>::const_iterator UseI, UseE;
  194. UseI = UseSlots.begin();
  195. UseE = UseSlots.end();
  196. // Loop over basic blocks where CurLI is live.
  197. MachineFunction::iterator MFI =
  198. LIS.getMBBFromIndex(LVI->start)->getIterator();
  199. while (true) {
  200. BlockInfo BI;
  201. BI.MBB = &*MFI;
  202. SlotIndex Start, Stop;
  203. std::tie(Start, Stop) = LIS.getSlotIndexes()->getMBBRange(BI.MBB);
  204. // If the block contains no uses, the range must be live through. At one
  205. // point, RegisterCoalescer could create dangling ranges that ended
  206. // mid-block.
  207. if (UseI == UseE || *UseI >= Stop) {
  208. ++NumThroughBlocks;
  209. ThroughBlocks.set(BI.MBB->getNumber());
  210. // The range shouldn't end mid-block if there are no uses. This shouldn't
  211. // happen.
  212. if (LVI->end < Stop)
  213. return false;
  214. } else {
  215. // This block has uses. Find the first and last uses in the block.
  216. BI.FirstInstr = *UseI;
  217. assert(BI.FirstInstr >= Start);
  218. do ++UseI;
  219. while (UseI != UseE && *UseI < Stop);
  220. BI.LastInstr = UseI[-1];
  221. assert(BI.LastInstr < Stop);
  222. // LVI is the first live segment overlapping MBB.
  223. BI.LiveIn = LVI->start <= Start;
  224. // When not live in, the first use should be a def.
  225. if (!BI.LiveIn) {
  226. assert(LVI->start == LVI->valno->def && "Dangling Segment start");
  227. assert(LVI->start == BI.FirstInstr && "First instr should be a def");
  228. BI.FirstDef = BI.FirstInstr;
  229. }
  230. // Look for gaps in the live range.
  231. BI.LiveOut = true;
  232. while (LVI->end < Stop) {
  233. SlotIndex LastStop = LVI->end;
  234. if (++LVI == LVE || LVI->start >= Stop) {
  235. BI.LiveOut = false;
  236. BI.LastInstr = LastStop;
  237. break;
  238. }
  239. if (LastStop < LVI->start) {
  240. // There is a gap in the live range. Create duplicate entries for the
  241. // live-in snippet and the live-out snippet.
  242. ++NumGapBlocks;
  243. // Push the Live-in part.
  244. BI.LiveOut = false;
  245. UseBlocks.push_back(BI);
  246. UseBlocks.back().LastInstr = LastStop;
  247. // Set up BI for the live-out part.
  248. BI.LiveIn = false;
  249. BI.LiveOut = true;
  250. BI.FirstInstr = BI.FirstDef = LVI->start;
  251. }
  252. // A Segment that starts in the middle of the block must be a def.
  253. assert(LVI->start == LVI->valno->def && "Dangling Segment start");
  254. if (!BI.FirstDef)
  255. BI.FirstDef = LVI->start;
  256. }
  257. UseBlocks.push_back(BI);
  258. // LVI is now at LVE or LVI->end >= Stop.
  259. if (LVI == LVE)
  260. break;
  261. }
  262. // Live segment ends exactly at Stop. Move to the next segment.
  263. if (LVI->end == Stop && ++LVI == LVE)
  264. break;
  265. // Pick the next basic block.
  266. if (LVI->start < Stop)
  267. ++MFI;
  268. else
  269. MFI = LIS.getMBBFromIndex(LVI->start)->getIterator();
  270. }
  271. assert(getNumLiveBlocks() == countLiveBlocks(CurLI) && "Bad block count");
  272. return true;
  273. }
  274. unsigned SplitAnalysis::countLiveBlocks(const LiveInterval *cli) const {
  275. if (cli->empty())
  276. return 0;
  277. LiveInterval *li = const_cast<LiveInterval*>(cli);
  278. LiveInterval::iterator LVI = li->begin();
  279. LiveInterval::iterator LVE = li->end();
  280. unsigned Count = 0;
  281. // Loop over basic blocks where li is live.
  282. MachineFunction::const_iterator MFI =
  283. LIS.getMBBFromIndex(LVI->start)->getIterator();
  284. SlotIndex Stop = LIS.getMBBEndIdx(&*MFI);
  285. while (true) {
  286. ++Count;
  287. LVI = li->advanceTo(LVI, Stop);
  288. if (LVI == LVE)
  289. return Count;
  290. do {
  291. ++MFI;
  292. Stop = LIS.getMBBEndIdx(&*MFI);
  293. } while (Stop <= LVI->start);
  294. }
  295. }
  296. bool SplitAnalysis::isOriginalEndpoint(SlotIndex Idx) const {
  297. unsigned OrigReg = VRM.getOriginal(CurLI->reg);
  298. const LiveInterval &Orig = LIS.getInterval(OrigReg);
  299. assert(!Orig.empty() && "Splitting empty interval?");
  300. LiveInterval::const_iterator I = Orig.find(Idx);
  301. // Range containing Idx should begin at Idx.
  302. if (I != Orig.end() && I->start <= Idx)
  303. return I->start == Idx;
  304. // Range does not contain Idx, previous must end at Idx.
  305. return I != Orig.begin() && (--I)->end == Idx;
  306. }
  307. void SplitAnalysis::analyze(const LiveInterval *li) {
  308. clear();
  309. CurLI = li;
  310. analyzeUses();
  311. }
  312. //===----------------------------------------------------------------------===//
  313. // Split Editor
  314. //===----------------------------------------------------------------------===//
  315. /// Create a new SplitEditor for editing the LiveInterval analyzed by SA.
  316. SplitEditor::SplitEditor(SplitAnalysis &sa, AliasAnalysis &aa,
  317. LiveIntervals &lis, VirtRegMap &vrm,
  318. MachineDominatorTree &mdt,
  319. MachineBlockFrequencyInfo &mbfi)
  320. : SA(sa), AA(aa), LIS(lis), VRM(vrm),
  321. MRI(vrm.getMachineFunction().getRegInfo()), MDT(mdt),
  322. TII(*vrm.getMachineFunction().getSubtarget().getInstrInfo()),
  323. TRI(*vrm.getMachineFunction().getSubtarget().getRegisterInfo()),
  324. MBFI(mbfi), RegAssign(Allocator) {}
  325. void SplitEditor::reset(LiveRangeEdit &LRE, ComplementSpillMode SM) {
  326. Edit = &LRE;
  327. SpillMode = SM;
  328. OpenIdx = 0;
  329. RegAssign.clear();
  330. Values.clear();
  331. // Reset the LiveRangeCalc instances needed for this spill mode.
  332. LRCalc[0].reset(&VRM.getMachineFunction(), LIS.getSlotIndexes(), &MDT,
  333. &LIS.getVNInfoAllocator());
  334. if (SpillMode)
  335. LRCalc[1].reset(&VRM.getMachineFunction(), LIS.getSlotIndexes(), &MDT,
  336. &LIS.getVNInfoAllocator());
  337. // We don't need an AliasAnalysis since we will only be performing
  338. // cheap-as-a-copy remats anyway.
  339. Edit->anyRematerializable(nullptr);
  340. }
  341. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  342. LLVM_DUMP_METHOD void SplitEditor::dump() const {
  343. if (RegAssign.empty()) {
  344. dbgs() << " empty\n";
  345. return;
  346. }
  347. for (RegAssignMap::const_iterator I = RegAssign.begin(); I.valid(); ++I)
  348. dbgs() << " [" << I.start() << ';' << I.stop() << "):" << I.value();
  349. dbgs() << '\n';
  350. }
  351. #endif
  352. LiveInterval::SubRange &SplitEditor::getSubRangeForMask(LaneBitmask LM,
  353. LiveInterval &LI) {
  354. for (LiveInterval::SubRange &S : LI.subranges())
  355. if (S.LaneMask == LM)
  356. return S;
  357. llvm_unreachable("SubRange for this mask not found");
  358. }
  359. void SplitEditor::addDeadDef(LiveInterval &LI, VNInfo *VNI, bool Original) {
  360. if (!LI.hasSubRanges()) {
  361. LI.createDeadDef(VNI);
  362. return;
  363. }
  364. SlotIndex Def = VNI->def;
  365. if (Original) {
  366. // If we are transferring a def from the original interval, make sure
  367. // to only update the subranges for which the original subranges had
  368. // a def at this location.
  369. for (LiveInterval::SubRange &S : LI.subranges()) {
  370. auto &PS = getSubRangeForMask(S.LaneMask, Edit->getParent());
  371. VNInfo *PV = PS.getVNInfoAt(Def);
  372. if (PV != nullptr && PV->def == Def)
  373. S.createDeadDef(Def, LIS.getVNInfoAllocator());
  374. }
  375. } else {
  376. // This is a new def: either from rematerialization, or from an inserted
  377. // copy. Since rematerialization can regenerate a definition of a sub-
  378. // register, we need to check which subranges need to be updated.
  379. const MachineInstr *DefMI = LIS.getInstructionFromIndex(Def);
  380. assert(DefMI != nullptr);
  381. LaneBitmask LM;
  382. for (const MachineOperand &DefOp : DefMI->defs()) {
  383. unsigned R = DefOp.getReg();
  384. if (R != LI.reg)
  385. continue;
  386. if (unsigned SR = DefOp.getSubReg())
  387. LM |= TRI.getSubRegIndexLaneMask(SR);
  388. else {
  389. LM = MRI.getMaxLaneMaskForVReg(R);
  390. break;
  391. }
  392. }
  393. for (LiveInterval::SubRange &S : LI.subranges())
  394. if ((S.LaneMask & LM).any())
  395. S.createDeadDef(Def, LIS.getVNInfoAllocator());
  396. }
  397. }
  398. VNInfo *SplitEditor::defValue(unsigned RegIdx,
  399. const VNInfo *ParentVNI,
  400. SlotIndex Idx,
  401. bool Original) {
  402. assert(ParentVNI && "Mapping NULL value");
  403. assert(Idx.isValid() && "Invalid SlotIndex");
  404. assert(Edit->getParent().getVNInfoAt(Idx) == ParentVNI && "Bad Parent VNI");
  405. LiveInterval *LI = &LIS.getInterval(Edit->get(RegIdx));
  406. // Create a new value.
  407. VNInfo *VNI = LI->getNextValue(Idx, LIS.getVNInfoAllocator());
  408. bool Force = LI->hasSubRanges();
  409. ValueForcePair FP(Force ? nullptr : VNI, Force);
  410. // Use insert for lookup, so we can add missing values with a second lookup.
  411. std::pair<ValueMap::iterator, bool> InsP =
  412. Values.insert(std::make_pair(std::make_pair(RegIdx, ParentVNI->id), FP));
  413. // This was the first time (RegIdx, ParentVNI) was mapped, and it is not
  414. // forced. Keep it as a simple def without any liveness.
  415. if (!Force && InsP.second)
  416. return VNI;
  417. // If the previous value was a simple mapping, add liveness for it now.
  418. if (VNInfo *OldVNI = InsP.first->second.getPointer()) {
  419. addDeadDef(*LI, OldVNI, Original);
  420. // No longer a simple mapping. Switch to a complex mapping. If the
  421. // interval has subranges, make it a forced mapping.
  422. InsP.first->second = ValueForcePair(nullptr, Force);
  423. }
  424. // This is a complex mapping, add liveness for VNI
  425. addDeadDef(*LI, VNI, Original);
  426. return VNI;
  427. }
  428. void SplitEditor::forceRecompute(unsigned RegIdx, const VNInfo &ParentVNI) {
  429. ValueForcePair &VFP = Values[std::make_pair(RegIdx, ParentVNI.id)];
  430. VNInfo *VNI = VFP.getPointer();
  431. // ParentVNI was either unmapped or already complex mapped. Either way, just
  432. // set the force bit.
  433. if (!VNI) {
  434. VFP.setInt(true);
  435. return;
  436. }
  437. // This was previously a single mapping. Make sure the old def is represented
  438. // by a trivial live range.
  439. addDeadDef(LIS.getInterval(Edit->get(RegIdx)), VNI, false);
  440. // Mark as complex mapped, forced.
  441. VFP = ValueForcePair(nullptr, true);
  442. }
  443. SlotIndex SplitEditor::buildSingleSubRegCopy(unsigned FromReg, unsigned ToReg,
  444. MachineBasicBlock &MBB, MachineBasicBlock::iterator InsertBefore,
  445. unsigned SubIdx, LiveInterval &DestLI, bool Late, SlotIndex Def) {
  446. const MCInstrDesc &Desc = TII.get(TargetOpcode::COPY);
  447. bool FirstCopy = !Def.isValid();
  448. MachineInstr *CopyMI = BuildMI(MBB, InsertBefore, DebugLoc(), Desc)
  449. .addReg(ToReg, RegState::Define | getUndefRegState(FirstCopy)
  450. | getInternalReadRegState(!FirstCopy), SubIdx)
  451. .addReg(FromReg, 0, SubIdx);
  452. BumpPtrAllocator &Allocator = LIS.getVNInfoAllocator();
  453. if (FirstCopy) {
  454. SlotIndexes &Indexes = *LIS.getSlotIndexes();
  455. Def = Indexes.insertMachineInstrInMaps(*CopyMI, Late).getRegSlot();
  456. } else {
  457. CopyMI->bundleWithPred();
  458. }
  459. LaneBitmask LaneMask = TRI.getSubRegIndexLaneMask(SubIdx);
  460. DestLI.refineSubRanges(Allocator, LaneMask,
  461. [Def, &Allocator](LiveInterval::SubRange& SR) {
  462. SR.createDeadDef(Def, Allocator);
  463. });
  464. return Def;
  465. }
  466. SlotIndex SplitEditor::buildCopy(unsigned FromReg, unsigned ToReg,
  467. LaneBitmask LaneMask, MachineBasicBlock &MBB,
  468. MachineBasicBlock::iterator InsertBefore, bool Late, unsigned RegIdx) {
  469. const MCInstrDesc &Desc = TII.get(TargetOpcode::COPY);
  470. if (LaneMask.all() || LaneMask == MRI.getMaxLaneMaskForVReg(FromReg)) {
  471. // The full vreg is copied.
  472. MachineInstr *CopyMI =
  473. BuildMI(MBB, InsertBefore, DebugLoc(), Desc, ToReg).addReg(FromReg);
  474. SlotIndexes &Indexes = *LIS.getSlotIndexes();
  475. return Indexes.insertMachineInstrInMaps(*CopyMI, Late).getRegSlot();
  476. }
  477. // Only a subset of lanes needs to be copied. The following is a simple
  478. // heuristic to construct a sequence of COPYs. We could add a target
  479. // specific callback if this turns out to be suboptimal.
  480. LiveInterval &DestLI = LIS.getInterval(Edit->get(RegIdx));
  481. // First pass: Try to find a perfectly matching subregister index. If none
  482. // exists find the one covering the most lanemask bits.
  483. SmallVector<unsigned, 8> PossibleIndexes;
  484. unsigned BestIdx = 0;
  485. unsigned BestCover = 0;
  486. const TargetRegisterClass *RC = MRI.getRegClass(FromReg);
  487. assert(RC == MRI.getRegClass(ToReg) && "Should have same reg class");
  488. for (unsigned Idx = 1, E = TRI.getNumSubRegIndices(); Idx < E; ++Idx) {
  489. // Is this index even compatible with the given class?
  490. if (TRI.getSubClassWithSubReg(RC, Idx) != RC)
  491. continue;
  492. LaneBitmask SubRegMask = TRI.getSubRegIndexLaneMask(Idx);
  493. // Early exit if we found a perfect match.
  494. if (SubRegMask == LaneMask) {
  495. BestIdx = Idx;
  496. break;
  497. }
  498. // The index must not cover any lanes outside \p LaneMask.
  499. if ((SubRegMask & ~LaneMask).any())
  500. continue;
  501. unsigned PopCount = SubRegMask.getNumLanes();
  502. PossibleIndexes.push_back(Idx);
  503. if (PopCount > BestCover) {
  504. BestCover = PopCount;
  505. BestIdx = Idx;
  506. }
  507. }
  508. // Abort if we cannot possibly implement the COPY with the given indexes.
  509. if (BestIdx == 0)
  510. report_fatal_error("Impossible to implement partial COPY");
  511. SlotIndex Def = buildSingleSubRegCopy(FromReg, ToReg, MBB, InsertBefore,
  512. BestIdx, DestLI, Late, SlotIndex());
  513. // Greedy heuristic: Keep iterating keeping the best covering subreg index
  514. // each time.
  515. LaneBitmask LanesLeft = LaneMask & ~(TRI.getSubRegIndexLaneMask(BestIdx));
  516. while (LanesLeft.any()) {
  517. unsigned BestIdx = 0;
  518. int BestCover = std::numeric_limits<int>::min();
  519. for (unsigned Idx : PossibleIndexes) {
  520. LaneBitmask SubRegMask = TRI.getSubRegIndexLaneMask(Idx);
  521. // Early exit if we found a perfect match.
  522. if (SubRegMask == LanesLeft) {
  523. BestIdx = Idx;
  524. break;
  525. }
  526. // Try to cover as much of the remaining lanes as possible but
  527. // as few of the already covered lanes as possible.
  528. int Cover = (SubRegMask & LanesLeft).getNumLanes()
  529. - (SubRegMask & ~LanesLeft).getNumLanes();
  530. if (Cover > BestCover) {
  531. BestCover = Cover;
  532. BestIdx = Idx;
  533. }
  534. }
  535. if (BestIdx == 0)
  536. report_fatal_error("Impossible to implement partial COPY");
  537. buildSingleSubRegCopy(FromReg, ToReg, MBB, InsertBefore, BestIdx,
  538. DestLI, Late, Def);
  539. LanesLeft &= ~TRI.getSubRegIndexLaneMask(BestIdx);
  540. }
  541. return Def;
  542. }
  543. VNInfo *SplitEditor::defFromParent(unsigned RegIdx,
  544. VNInfo *ParentVNI,
  545. SlotIndex UseIdx,
  546. MachineBasicBlock &MBB,
  547. MachineBasicBlock::iterator I) {
  548. SlotIndex Def;
  549. LiveInterval *LI = &LIS.getInterval(Edit->get(RegIdx));
  550. // We may be trying to avoid interference that ends at a deleted instruction,
  551. // so always begin RegIdx 0 early and all others late.
  552. bool Late = RegIdx != 0;
  553. // Attempt cheap-as-a-copy rematerialization.
  554. unsigned Original = VRM.getOriginal(Edit->get(RegIdx));
  555. LiveInterval &OrigLI = LIS.getInterval(Original);
  556. VNInfo *OrigVNI = OrigLI.getVNInfoAt(UseIdx);
  557. unsigned Reg = LI->reg;
  558. bool DidRemat = false;
  559. if (OrigVNI) {
  560. LiveRangeEdit::Remat RM(ParentVNI);
  561. RM.OrigMI = LIS.getInstructionFromIndex(OrigVNI->def);
  562. if (Edit->canRematerializeAt(RM, OrigVNI, UseIdx, true)) {
  563. Def = Edit->rematerializeAt(MBB, I, Reg, RM, TRI, Late);
  564. ++NumRemats;
  565. DidRemat = true;
  566. }
  567. }
  568. if (!DidRemat) {
  569. LaneBitmask LaneMask;
  570. if (LI->hasSubRanges()) {
  571. LaneMask = LaneBitmask::getNone();
  572. for (LiveInterval::SubRange &S : LI->subranges())
  573. LaneMask |= S.LaneMask;
  574. } else {
  575. LaneMask = LaneBitmask::getAll();
  576. }
  577. ++NumCopies;
  578. Def = buildCopy(Edit->getReg(), Reg, LaneMask, MBB, I, Late, RegIdx);
  579. }
  580. // Define the value in Reg.
  581. return defValue(RegIdx, ParentVNI, Def, false);
  582. }
  583. /// Create a new virtual register and live interval.
  584. unsigned SplitEditor::openIntv() {
  585. // Create the complement as index 0.
  586. if (Edit->empty())
  587. Edit->createEmptyInterval();
  588. // Create the open interval.
  589. OpenIdx = Edit->size();
  590. Edit->createEmptyInterval();
  591. return OpenIdx;
  592. }
  593. void SplitEditor::selectIntv(unsigned Idx) {
  594. assert(Idx != 0 && "Cannot select the complement interval");
  595. assert(Idx < Edit->size() && "Can only select previously opened interval");
  596. DEBUG(dbgs() << " selectIntv " << OpenIdx << " -> " << Idx << '\n');
  597. OpenIdx = Idx;
  598. }
  599. SlotIndex SplitEditor::enterIntvBefore(SlotIndex Idx) {
  600. assert(OpenIdx && "openIntv not called before enterIntvBefore");
  601. DEBUG(dbgs() << " enterIntvBefore " << Idx);
  602. Idx = Idx.getBaseIndex();
  603. VNInfo *ParentVNI = Edit->getParent().getVNInfoAt(Idx);
  604. if (!ParentVNI) {
  605. DEBUG(dbgs() << ": not live\n");
  606. return Idx;
  607. }
  608. DEBUG(dbgs() << ": valno " << ParentVNI->id << '\n');
  609. MachineInstr *MI = LIS.getInstructionFromIndex(Idx);
  610. assert(MI && "enterIntvBefore called with invalid index");
  611. VNInfo *VNI = defFromParent(OpenIdx, ParentVNI, Idx, *MI->getParent(), MI);
  612. return VNI->def;
  613. }
  614. SlotIndex SplitEditor::enterIntvAfter(SlotIndex Idx) {
  615. assert(OpenIdx && "openIntv not called before enterIntvAfter");
  616. DEBUG(dbgs() << " enterIntvAfter " << Idx);
  617. Idx = Idx.getBoundaryIndex();
  618. VNInfo *ParentVNI = Edit->getParent().getVNInfoAt(Idx);
  619. if (!ParentVNI) {
  620. DEBUG(dbgs() << ": not live\n");
  621. return Idx;
  622. }
  623. DEBUG(dbgs() << ": valno " << ParentVNI->id << '\n');
  624. MachineInstr *MI = LIS.getInstructionFromIndex(Idx);
  625. assert(MI && "enterIntvAfter called with invalid index");
  626. VNInfo *VNI = defFromParent(OpenIdx, ParentVNI, Idx, *MI->getParent(),
  627. std::next(MachineBasicBlock::iterator(MI)));
  628. return VNI->def;
  629. }
  630. SlotIndex SplitEditor::enterIntvAtEnd(MachineBasicBlock &MBB) {
  631. assert(OpenIdx && "openIntv not called before enterIntvAtEnd");
  632. SlotIndex End = LIS.getMBBEndIdx(&MBB);
  633. SlotIndex Last = End.getPrevSlot();
  634. DEBUG(dbgs() << " enterIntvAtEnd " << printMBBReference(MBB) << ", "
  635. << Last);
  636. VNInfo *ParentVNI = Edit->getParent().getVNInfoAt(Last);
  637. if (!ParentVNI) {
  638. DEBUG(dbgs() << ": not live\n");
  639. return End;
  640. }
  641. DEBUG(dbgs() << ": valno " << ParentVNI->id);
  642. VNInfo *VNI = defFromParent(OpenIdx, ParentVNI, Last, MBB,
  643. SA.getLastSplitPointIter(&MBB));
  644. RegAssign.insert(VNI->def, End, OpenIdx);
  645. DEBUG(dump());
  646. return VNI->def;
  647. }
  648. /// useIntv - indicate that all instructions in MBB should use OpenLI.
  649. void SplitEditor::useIntv(const MachineBasicBlock &MBB) {
  650. useIntv(LIS.getMBBStartIdx(&MBB), LIS.getMBBEndIdx(&MBB));
  651. }
  652. void SplitEditor::useIntv(SlotIndex Start, SlotIndex End) {
  653. assert(OpenIdx && "openIntv not called before useIntv");
  654. DEBUG(dbgs() << " useIntv [" << Start << ';' << End << "):");
  655. RegAssign.insert(Start, End, OpenIdx);
  656. DEBUG(dump());
  657. }
  658. SlotIndex SplitEditor::leaveIntvAfter(SlotIndex Idx) {
  659. assert(OpenIdx && "openIntv not called before leaveIntvAfter");
  660. DEBUG(dbgs() << " leaveIntvAfter " << Idx);
  661. // The interval must be live beyond the instruction at Idx.
  662. SlotIndex Boundary = Idx.getBoundaryIndex();
  663. VNInfo *ParentVNI = Edit->getParent().getVNInfoAt(Boundary);
  664. if (!ParentVNI) {
  665. DEBUG(dbgs() << ": not live\n");
  666. return Boundary.getNextSlot();
  667. }
  668. DEBUG(dbgs() << ": valno " << ParentVNI->id << '\n');
  669. MachineInstr *MI = LIS.getInstructionFromIndex(Boundary);
  670. assert(MI && "No instruction at index");
  671. // In spill mode, make live ranges as short as possible by inserting the copy
  672. // before MI. This is only possible if that instruction doesn't redefine the
  673. // value. The inserted COPY is not a kill, and we don't need to recompute
  674. // the source live range. The spiller also won't try to hoist this copy.
  675. if (SpillMode && !SlotIndex::isSameInstr(ParentVNI->def, Idx) &&
  676. MI->readsVirtualRegister(Edit->getReg())) {
  677. forceRecompute(0, *ParentVNI);
  678. defFromParent(0, ParentVNI, Idx, *MI->getParent(), MI);
  679. return Idx;
  680. }
  681. VNInfo *VNI = defFromParent(0, ParentVNI, Boundary, *MI->getParent(),
  682. std::next(MachineBasicBlock::iterator(MI)));
  683. return VNI->def;
  684. }
  685. SlotIndex SplitEditor::leaveIntvBefore(SlotIndex Idx) {
  686. assert(OpenIdx && "openIntv not called before leaveIntvBefore");
  687. DEBUG(dbgs() << " leaveIntvBefore " << Idx);
  688. // The interval must be live into the instruction at Idx.
  689. Idx = Idx.getBaseIndex();
  690. VNInfo *ParentVNI = Edit->getParent().getVNInfoAt(Idx);
  691. if (!ParentVNI) {
  692. DEBUG(dbgs() << ": not live\n");
  693. return Idx.getNextSlot();
  694. }
  695. DEBUG(dbgs() << ": valno " << ParentVNI->id << '\n');
  696. MachineInstr *MI = LIS.getInstructionFromIndex(Idx);
  697. assert(MI && "No instruction at index");
  698. VNInfo *VNI = defFromParent(0, ParentVNI, Idx, *MI->getParent(), MI);
  699. return VNI->def;
  700. }
  701. SlotIndex SplitEditor::leaveIntvAtTop(MachineBasicBlock &MBB) {
  702. assert(OpenIdx && "openIntv not called before leaveIntvAtTop");
  703. SlotIndex Start = LIS.getMBBStartIdx(&MBB);
  704. DEBUG(dbgs() << " leaveIntvAtTop " << printMBBReference(MBB) << ", "
  705. << Start);
  706. VNInfo *ParentVNI = Edit->getParent().getVNInfoAt(Start);
  707. if (!ParentVNI) {
  708. DEBUG(dbgs() << ": not live\n");
  709. return Start;
  710. }
  711. VNInfo *VNI = defFromParent(0, ParentVNI, Start, MBB,
  712. MBB.SkipPHIsLabelsAndDebug(MBB.begin()));
  713. RegAssign.insert(Start, VNI->def, OpenIdx);
  714. DEBUG(dump());
  715. return VNI->def;
  716. }
  717. void SplitEditor::overlapIntv(SlotIndex Start, SlotIndex End) {
  718. assert(OpenIdx && "openIntv not called before overlapIntv");
  719. const VNInfo *ParentVNI = Edit->getParent().getVNInfoAt(Start);
  720. assert(ParentVNI == Edit->getParent().getVNInfoBefore(End) &&
  721. "Parent changes value in extended range");
  722. assert(LIS.getMBBFromIndex(Start) == LIS.getMBBFromIndex(End) &&
  723. "Range cannot span basic blocks");
  724. // The complement interval will be extended as needed by LRCalc.extend().
  725. if (ParentVNI)
  726. forceRecompute(0, *ParentVNI);
  727. DEBUG(dbgs() << " overlapIntv [" << Start << ';' << End << "):");
  728. RegAssign.insert(Start, End, OpenIdx);
  729. DEBUG(dump());
  730. }
  731. //===----------------------------------------------------------------------===//
  732. // Spill modes
  733. //===----------------------------------------------------------------------===//
  734. void SplitEditor::removeBackCopies(SmallVectorImpl<VNInfo*> &Copies) {
  735. LiveInterval *LI = &LIS.getInterval(Edit->get(0));
  736. DEBUG(dbgs() << "Removing " << Copies.size() << " back-copies.\n");
  737. RegAssignMap::iterator AssignI;
  738. AssignI.setMap(RegAssign);
  739. for (unsigned i = 0, e = Copies.size(); i != e; ++i) {
  740. SlotIndex Def = Copies[i]->def;
  741. MachineInstr *MI = LIS.getInstructionFromIndex(Def);
  742. assert(MI && "No instruction for back-copy");
  743. MachineBasicBlock *MBB = MI->getParent();
  744. MachineBasicBlock::iterator MBBI(MI);
  745. bool AtBegin;
  746. do AtBegin = MBBI == MBB->begin();
  747. while (!AtBegin && (--MBBI)->isDebugInstr());
  748. DEBUG(dbgs() << "Removing " << Def << '\t' << *MI);
  749. LIS.removeVRegDefAt(*LI, Def);
  750. LIS.RemoveMachineInstrFromMaps(*MI);
  751. MI->eraseFromParent();
  752. // Adjust RegAssign if a register assignment is killed at Def. We want to
  753. // avoid calculating the live range of the source register if possible.
  754. AssignI.find(Def.getPrevSlot());
  755. if (!AssignI.valid() || AssignI.start() >= Def)
  756. continue;
  757. // If MI doesn't kill the assigned register, just leave it.
  758. if (AssignI.stop() != Def)
  759. continue;
  760. unsigned RegIdx = AssignI.value();
  761. if (AtBegin || !MBBI->readsVirtualRegister(Edit->getReg())) {
  762. DEBUG(dbgs() << " cannot find simple kill of RegIdx " << RegIdx << '\n');
  763. forceRecompute(RegIdx, *Edit->getParent().getVNInfoAt(Def));
  764. } else {
  765. SlotIndex Kill = LIS.getInstructionIndex(*MBBI).getRegSlot();
  766. DEBUG(dbgs() << " move kill to " << Kill << '\t' << *MBBI);
  767. AssignI.setStop(Kill);
  768. }
  769. }
  770. }
  771. MachineBasicBlock*
  772. SplitEditor::findShallowDominator(MachineBasicBlock *MBB,
  773. MachineBasicBlock *DefMBB) {
  774. if (MBB == DefMBB)
  775. return MBB;
  776. assert(MDT.dominates(DefMBB, MBB) && "MBB must be dominated by the def.");
  777. const MachineLoopInfo &Loops = SA.Loops;
  778. const MachineLoop *DefLoop = Loops.getLoopFor(DefMBB);
  779. MachineDomTreeNode *DefDomNode = MDT[DefMBB];
  780. // Best candidate so far.
  781. MachineBasicBlock *BestMBB = MBB;
  782. unsigned BestDepth = std::numeric_limits<unsigned>::max();
  783. while (true) {
  784. const MachineLoop *Loop = Loops.getLoopFor(MBB);
  785. // MBB isn't in a loop, it doesn't get any better. All dominators have a
  786. // higher frequency by definition.
  787. if (!Loop) {
  788. DEBUG(dbgs() << "Def in " << printMBBReference(*DefMBB) << " dominates "
  789. << printMBBReference(*MBB) << " at depth 0\n");
  790. return MBB;
  791. }
  792. // We'll never be able to exit the DefLoop.
  793. if (Loop == DefLoop) {
  794. DEBUG(dbgs() << "Def in " << printMBBReference(*DefMBB) << " dominates "
  795. << printMBBReference(*MBB) << " in the same loop\n");
  796. return MBB;
  797. }
  798. // Least busy dominator seen so far.
  799. unsigned Depth = Loop->getLoopDepth();
  800. if (Depth < BestDepth) {
  801. BestMBB = MBB;
  802. BestDepth = Depth;
  803. DEBUG(dbgs() << "Def in " << printMBBReference(*DefMBB) << " dominates "
  804. << printMBBReference(*MBB) << " at depth " << Depth << '\n');
  805. }
  806. // Leave loop by going to the immediate dominator of the loop header.
  807. // This is a bigger stride than simply walking up the dominator tree.
  808. MachineDomTreeNode *IDom = MDT[Loop->getHeader()]->getIDom();
  809. // Too far up the dominator tree?
  810. if (!IDom || !MDT.dominates(DefDomNode, IDom))
  811. return BestMBB;
  812. MBB = IDom->getBlock();
  813. }
  814. }
  815. void SplitEditor::computeRedundantBackCopies(
  816. DenseSet<unsigned> &NotToHoistSet, SmallVectorImpl<VNInfo *> &BackCopies) {
  817. LiveInterval *LI = &LIS.getInterval(Edit->get(0));
  818. LiveInterval *Parent = &Edit->getParent();
  819. SmallVector<SmallPtrSet<VNInfo *, 8>, 8> EqualVNs(Parent->getNumValNums());
  820. SmallPtrSet<VNInfo *, 8> DominatedVNIs;
  821. // Aggregate VNIs having the same value as ParentVNI.
  822. for (VNInfo *VNI : LI->valnos) {
  823. if (VNI->isUnused())
  824. continue;
  825. VNInfo *ParentVNI = Edit->getParent().getVNInfoAt(VNI->def);
  826. EqualVNs[ParentVNI->id].insert(VNI);
  827. }
  828. // For VNI aggregation of each ParentVNI, collect dominated, i.e.,
  829. // redundant VNIs to BackCopies.
  830. for (unsigned i = 0, e = Parent->getNumValNums(); i != e; ++i) {
  831. VNInfo *ParentVNI = Parent->getValNumInfo(i);
  832. if (!NotToHoistSet.count(ParentVNI->id))
  833. continue;
  834. SmallPtrSetIterator<VNInfo *> It1 = EqualVNs[ParentVNI->id].begin();
  835. SmallPtrSetIterator<VNInfo *> It2 = It1;
  836. for (; It1 != EqualVNs[ParentVNI->id].end(); ++It1) {
  837. It2 = It1;
  838. for (++It2; It2 != EqualVNs[ParentVNI->id].end(); ++It2) {
  839. if (DominatedVNIs.count(*It1) || DominatedVNIs.count(*It2))
  840. continue;
  841. MachineBasicBlock *MBB1 = LIS.getMBBFromIndex((*It1)->def);
  842. MachineBasicBlock *MBB2 = LIS.getMBBFromIndex((*It2)->def);
  843. if (MBB1 == MBB2) {
  844. DominatedVNIs.insert((*It1)->def < (*It2)->def ? (*It2) : (*It1));
  845. } else if (MDT.dominates(MBB1, MBB2)) {
  846. DominatedVNIs.insert(*It2);
  847. } else if (MDT.dominates(MBB2, MBB1)) {
  848. DominatedVNIs.insert(*It1);
  849. }
  850. }
  851. }
  852. if (!DominatedVNIs.empty()) {
  853. forceRecompute(0, *ParentVNI);
  854. for (auto VNI : DominatedVNIs) {
  855. BackCopies.push_back(VNI);
  856. }
  857. DominatedVNIs.clear();
  858. }
  859. }
  860. }
  861. /// For SM_Size mode, find a common dominator for all the back-copies for
  862. /// the same ParentVNI and hoist the backcopies to the dominator BB.
  863. /// For SM_Speed mode, if the common dominator is hot and it is not beneficial
  864. /// to do the hoisting, simply remove the dominated backcopies for the same
  865. /// ParentVNI.
  866. void SplitEditor::hoistCopies() {
  867. // Get the complement interval, always RegIdx 0.
  868. LiveInterval *LI = &LIS.getInterval(Edit->get(0));
  869. LiveInterval *Parent = &Edit->getParent();
  870. // Track the nearest common dominator for all back-copies for each ParentVNI,
  871. // indexed by ParentVNI->id.
  872. using DomPair = std::pair<MachineBasicBlock *, SlotIndex>;
  873. SmallVector<DomPair, 8> NearestDom(Parent->getNumValNums());
  874. // The total cost of all the back-copies for each ParentVNI.
  875. SmallVector<BlockFrequency, 8> Costs(Parent->getNumValNums());
  876. // The ParentVNI->id set for which hoisting back-copies are not beneficial
  877. // for Speed.
  878. DenseSet<unsigned> NotToHoistSet;
  879. // Find the nearest common dominator for parent values with multiple
  880. // back-copies. If a single back-copy dominates, put it in DomPair.second.
  881. for (VNInfo *VNI : LI->valnos) {
  882. if (VNI->isUnused())
  883. continue;
  884. VNInfo *ParentVNI = Edit->getParent().getVNInfoAt(VNI->def);
  885. assert(ParentVNI && "Parent not live at complement def");
  886. // Don't hoist remats. The complement is probably going to disappear
  887. // completely anyway.
  888. if (Edit->didRematerialize(ParentVNI))
  889. continue;
  890. MachineBasicBlock *ValMBB = LIS.getMBBFromIndex(VNI->def);
  891. DomPair &Dom = NearestDom[ParentVNI->id];
  892. // Keep directly defined parent values. This is either a PHI or an
  893. // instruction in the complement range. All other copies of ParentVNI
  894. // should be eliminated.
  895. if (VNI->def == ParentVNI->def) {
  896. DEBUG(dbgs() << "Direct complement def at " << VNI->def << '\n');
  897. Dom = DomPair(ValMBB, VNI->def);
  898. continue;
  899. }
  900. // Skip the singly mapped values. There is nothing to gain from hoisting a
  901. // single back-copy.
  902. if (Values.lookup(std::make_pair(0, ParentVNI->id)).getPointer()) {
  903. DEBUG(dbgs() << "Single complement def at " << VNI->def << '\n');
  904. continue;
  905. }
  906. if (!Dom.first) {
  907. // First time we see ParentVNI. VNI dominates itself.
  908. Dom = DomPair(ValMBB, VNI->def);
  909. } else if (Dom.first == ValMBB) {
  910. // Two defs in the same block. Pick the earlier def.
  911. if (!Dom.second.isValid() || VNI->def < Dom.second)
  912. Dom.second = VNI->def;
  913. } else {
  914. // Different basic blocks. Check if one dominates.
  915. MachineBasicBlock *Near =
  916. MDT.findNearestCommonDominator(Dom.first, ValMBB);
  917. if (Near == ValMBB)
  918. // Def ValMBB dominates.
  919. Dom = DomPair(ValMBB, VNI->def);
  920. else if (Near != Dom.first)
  921. // None dominate. Hoist to common dominator, need new def.
  922. Dom = DomPair(Near, SlotIndex());
  923. Costs[ParentVNI->id] += MBFI.getBlockFreq(ValMBB);
  924. }
  925. DEBUG(dbgs() << "Multi-mapped complement " << VNI->id << '@' << VNI->def
  926. << " for parent " << ParentVNI->id << '@' << ParentVNI->def
  927. << " hoist to " << printMBBReference(*Dom.first) << ' '
  928. << Dom.second << '\n');
  929. }
  930. // Insert the hoisted copies.
  931. for (unsigned i = 0, e = Parent->getNumValNums(); i != e; ++i) {
  932. DomPair &Dom = NearestDom[i];
  933. if (!Dom.first || Dom.second.isValid())
  934. continue;
  935. // This value needs a hoisted copy inserted at the end of Dom.first.
  936. VNInfo *ParentVNI = Parent->getValNumInfo(i);
  937. MachineBasicBlock *DefMBB = LIS.getMBBFromIndex(ParentVNI->def);
  938. // Get a less loopy dominator than Dom.first.
  939. Dom.first = findShallowDominator(Dom.first, DefMBB);
  940. if (SpillMode == SM_Speed &&
  941. MBFI.getBlockFreq(Dom.first) > Costs[ParentVNI->id]) {
  942. NotToHoistSet.insert(ParentVNI->id);
  943. continue;
  944. }
  945. SlotIndex Last = LIS.getMBBEndIdx(Dom.first).getPrevSlot();
  946. Dom.second =
  947. defFromParent(0, ParentVNI, Last, *Dom.first,
  948. SA.getLastSplitPointIter(Dom.first))->def;
  949. }
  950. // Remove redundant back-copies that are now known to be dominated by another
  951. // def with the same value.
  952. SmallVector<VNInfo*, 8> BackCopies;
  953. for (VNInfo *VNI : LI->valnos) {
  954. if (VNI->isUnused())
  955. continue;
  956. VNInfo *ParentVNI = Edit->getParent().getVNInfoAt(VNI->def);
  957. const DomPair &Dom = NearestDom[ParentVNI->id];
  958. if (!Dom.first || Dom.second == VNI->def ||
  959. NotToHoistSet.count(ParentVNI->id))
  960. continue;
  961. BackCopies.push_back(VNI);
  962. forceRecompute(0, *ParentVNI);
  963. }
  964. // If it is not beneficial to hoist all the BackCopies, simply remove
  965. // redundant BackCopies in speed mode.
  966. if (SpillMode == SM_Speed && !NotToHoistSet.empty())
  967. computeRedundantBackCopies(NotToHoistSet, BackCopies);
  968. removeBackCopies(BackCopies);
  969. }
  970. /// transferValues - Transfer all possible values to the new live ranges.
  971. /// Values that were rematerialized are left alone, they need LRCalc.extend().
  972. bool SplitEditor::transferValues() {
  973. bool Skipped = false;
  974. RegAssignMap::const_iterator AssignI = RegAssign.begin();
  975. for (const LiveRange::Segment &S : Edit->getParent()) {
  976. DEBUG(dbgs() << " blit " << S << ':');
  977. VNInfo *ParentVNI = S.valno;
  978. // RegAssign has holes where RegIdx 0 should be used.
  979. SlotIndex Start = S.start;
  980. AssignI.advanceTo(Start);
  981. do {
  982. unsigned RegIdx;
  983. SlotIndex End = S.end;
  984. if (!AssignI.valid()) {
  985. RegIdx = 0;
  986. } else if (AssignI.start() <= Start) {
  987. RegIdx = AssignI.value();
  988. if (AssignI.stop() < End) {
  989. End = AssignI.stop();
  990. ++AssignI;
  991. }
  992. } else {
  993. RegIdx = 0;
  994. End = std::min(End, AssignI.start());
  995. }
  996. // The interval [Start;End) is continuously mapped to RegIdx, ParentVNI.
  997. DEBUG(dbgs() << " [" << Start << ';' << End << ")=" << RegIdx
  998. << '(' << printReg(Edit->get(RegIdx)) << ')');
  999. LiveInterval &LI = LIS.getInterval(Edit->get(RegIdx));
  1000. // Check for a simply defined value that can be blitted directly.
  1001. ValueForcePair VFP = Values.lookup(std::make_pair(RegIdx, ParentVNI->id));
  1002. if (VNInfo *VNI = VFP.getPointer()) {
  1003. DEBUG(dbgs() << ':' << VNI->id);
  1004. LI.addSegment(LiveInterval::Segment(Start, End, VNI));
  1005. Start = End;
  1006. continue;
  1007. }
  1008. // Skip values with forced recomputation.
  1009. if (VFP.getInt()) {
  1010. DEBUG(dbgs() << "(recalc)");
  1011. Skipped = true;
  1012. Start = End;
  1013. continue;
  1014. }
  1015. LiveRangeCalc &LRC = getLRCalc(RegIdx);
  1016. // This value has multiple defs in RegIdx, but it wasn't rematerialized,
  1017. // so the live range is accurate. Add live-in blocks in [Start;End) to the
  1018. // LiveInBlocks.
  1019. MachineFunction::iterator MBB = LIS.getMBBFromIndex(Start)->getIterator();
  1020. SlotIndex BlockStart, BlockEnd;
  1021. std::tie(BlockStart, BlockEnd) = LIS.getSlotIndexes()->getMBBRange(&*MBB);
  1022. // The first block may be live-in, or it may have its own def.
  1023. if (Start != BlockStart) {
  1024. VNInfo *VNI = LI.extendInBlock(BlockStart, std::min(BlockEnd, End));
  1025. assert(VNI && "Missing def for complex mapped value");
  1026. DEBUG(dbgs() << ':' << VNI->id << "*" << printMBBReference(*MBB));
  1027. // MBB has its own def. Is it also live-out?
  1028. if (BlockEnd <= End)
  1029. LRC.setLiveOutValue(&*MBB, VNI);
  1030. // Skip to the next block for live-in.
  1031. ++MBB;
  1032. BlockStart = BlockEnd;
  1033. }
  1034. // Handle the live-in blocks covered by [Start;End).
  1035. assert(Start <= BlockStart && "Expected live-in block");
  1036. while (BlockStart < End) {
  1037. DEBUG(dbgs() << ">" << printMBBReference(*MBB));
  1038. BlockEnd = LIS.getMBBEndIdx(&*MBB);
  1039. if (BlockStart == ParentVNI->def) {
  1040. // This block has the def of a parent PHI, so it isn't live-in.
  1041. assert(ParentVNI->isPHIDef() && "Non-phi defined at block start?");
  1042. VNInfo *VNI = LI.extendInBlock(BlockStart, std::min(BlockEnd, End));
  1043. assert(VNI && "Missing def for complex mapped parent PHI");
  1044. if (End >= BlockEnd)
  1045. LRC.setLiveOutValue(&*MBB, VNI); // Live-out as well.
  1046. } else {
  1047. // This block needs a live-in value. The last block covered may not
  1048. // be live-out.
  1049. if (End < BlockEnd)
  1050. LRC.addLiveInBlock(LI, MDT[&*MBB], End);
  1051. else {
  1052. // Live-through, and we don't know the value.
  1053. LRC.addLiveInBlock(LI, MDT[&*MBB]);
  1054. LRC.setLiveOutValue(&*MBB, nullptr);
  1055. }
  1056. }
  1057. BlockStart = BlockEnd;
  1058. ++MBB;
  1059. }
  1060. Start = End;
  1061. } while (Start != S.end);
  1062. DEBUG(dbgs() << '\n');
  1063. }
  1064. LRCalc[0].calculateValues();
  1065. if (SpillMode)
  1066. LRCalc[1].calculateValues();
  1067. return Skipped;
  1068. }
  1069. static bool removeDeadSegment(SlotIndex Def, LiveRange &LR) {
  1070. const LiveRange::Segment *Seg = LR.getSegmentContaining(Def);
  1071. if (Seg == nullptr)
  1072. return true;
  1073. if (Seg->end != Def.getDeadSlot())
  1074. return false;
  1075. // This is a dead PHI. Remove it.
  1076. LR.removeSegment(*Seg, true);
  1077. return true;
  1078. }
  1079. void SplitEditor::extendPHIRange(MachineBasicBlock &B, LiveRangeCalc &LRC,
  1080. LiveRange &LR, LaneBitmask LM,
  1081. ArrayRef<SlotIndex> Undefs) {
  1082. for (MachineBasicBlock *P : B.predecessors()) {
  1083. SlotIndex End = LIS.getMBBEndIdx(P);
  1084. SlotIndex LastUse = End.getPrevSlot();
  1085. // The predecessor may not have a live-out value. That is OK, like an
  1086. // undef PHI operand.
  1087. LiveInterval &PLI = Edit->getParent();
  1088. // Need the cast because the inputs to ?: would otherwise be deemed
  1089. // "incompatible": SubRange vs LiveInterval.
  1090. LiveRange &PSR = !LM.all() ? getSubRangeForMask(LM, PLI)
  1091. : static_cast<LiveRange&>(PLI);
  1092. if (PSR.liveAt(LastUse))
  1093. LRC.extend(LR, End, /*PhysReg=*/0, Undefs);
  1094. }
  1095. }
  1096. void SplitEditor::extendPHIKillRanges() {
  1097. // Extend live ranges to be live-out for successor PHI values.
  1098. // Visit each PHI def slot in the parent live interval. If the def is dead,
  1099. // remove it. Otherwise, extend the live interval to reach the end indexes
  1100. // of all predecessor blocks.
  1101. LiveInterval &ParentLI = Edit->getParent();
  1102. for (const VNInfo *V : ParentLI.valnos) {
  1103. if (V->isUnused() || !V->isPHIDef())
  1104. continue;
  1105. unsigned RegIdx = RegAssign.lookup(V->def);
  1106. LiveInterval &LI = LIS.getInterval(Edit->get(RegIdx));
  1107. LiveRangeCalc &LRC = getLRCalc(RegIdx);
  1108. MachineBasicBlock &B = *LIS.getMBBFromIndex(V->def);
  1109. if (!removeDeadSegment(V->def, LI))
  1110. extendPHIRange(B, LRC, LI, LaneBitmask::getAll(), /*Undefs=*/{});
  1111. }
  1112. SmallVector<SlotIndex, 4> Undefs;
  1113. LiveRangeCalc SubLRC;
  1114. for (LiveInterval::SubRange &PS : ParentLI.subranges()) {
  1115. for (const VNInfo *V : PS.valnos) {
  1116. if (V->isUnused() || !V->isPHIDef())
  1117. continue;
  1118. unsigned RegIdx = RegAssign.lookup(V->def);
  1119. LiveInterval &LI = LIS.getInterval(Edit->get(RegIdx));
  1120. LiveInterval::SubRange &S = getSubRangeForMask(PS.LaneMask, LI);
  1121. if (removeDeadSegment(V->def, S))
  1122. continue;
  1123. MachineBasicBlock &B = *LIS.getMBBFromIndex(V->def);
  1124. SubLRC.reset(&VRM.getMachineFunction(), LIS.getSlotIndexes(), &MDT,
  1125. &LIS.getVNInfoAllocator());
  1126. Undefs.clear();
  1127. LI.computeSubRangeUndefs(Undefs, PS.LaneMask, MRI, *LIS.getSlotIndexes());
  1128. extendPHIRange(B, SubLRC, S, PS.LaneMask, Undefs);
  1129. }
  1130. }
  1131. }
  1132. /// rewriteAssigned - Rewrite all uses of Edit->getReg().
  1133. void SplitEditor::rewriteAssigned(bool ExtendRanges) {
  1134. struct ExtPoint {
  1135. ExtPoint(const MachineOperand &O, unsigned R, SlotIndex N)
  1136. : MO(O), RegIdx(R), Next(N) {}
  1137. MachineOperand MO;
  1138. unsigned RegIdx;
  1139. SlotIndex Next;
  1140. };
  1141. SmallVector<ExtPoint,4> ExtPoints;
  1142. for (MachineRegisterInfo::reg_iterator RI = MRI.reg_begin(Edit->getReg()),
  1143. RE = MRI.reg_end(); RI != RE;) {
  1144. MachineOperand &MO = *RI;
  1145. MachineInstr *MI = MO.getParent();
  1146. ++RI;
  1147. // LiveDebugVariables should have handled all DBG_VALUE instructions.
  1148. if (MI->isDebugValue()) {
  1149. DEBUG(dbgs() << "Zapping " << *MI);
  1150. MO.setReg(0);
  1151. continue;
  1152. }
  1153. // <undef> operands don't really read the register, so it doesn't matter
  1154. // which register we choose. When the use operand is tied to a def, we must
  1155. // use the same register as the def, so just do that always.
  1156. SlotIndex Idx = LIS.getInstructionIndex(*MI);
  1157. if (MO.isDef() || MO.isUndef())
  1158. Idx = Idx.getRegSlot(MO.isEarlyClobber());
  1159. // Rewrite to the mapped register at Idx.
  1160. unsigned RegIdx = RegAssign.lookup(Idx);
  1161. LiveInterval &LI = LIS.getInterval(Edit->get(RegIdx));
  1162. MO.setReg(LI.reg);
  1163. DEBUG(dbgs() << " rewr " << printMBBReference(*MI->getParent()) << '\t'
  1164. << Idx << ':' << RegIdx << '\t' << *MI);
  1165. // Extend liveness to Idx if the instruction reads reg.
  1166. if (!ExtendRanges || MO.isUndef())
  1167. continue;
  1168. // Skip instructions that don't read Reg.
  1169. if (MO.isDef()) {
  1170. if (!MO.getSubReg() && !MO.isEarlyClobber())
  1171. continue;
  1172. // We may want to extend a live range for a partial redef, or for a use
  1173. // tied to an early clobber.
  1174. Idx = Idx.getPrevSlot();
  1175. if (!Edit->getParent().liveAt(Idx))
  1176. continue;
  1177. } else
  1178. Idx = Idx.getRegSlot(true);
  1179. SlotIndex Next = Idx.getNextSlot();
  1180. if (LI.hasSubRanges()) {
  1181. // We have to delay extending subranges until we have seen all operands
  1182. // defining the register. This is because a <def,read-undef> operand
  1183. // will create an "undef" point, and we cannot extend any subranges
  1184. // until all of them have been accounted for.
  1185. if (MO.isUse())
  1186. ExtPoints.push_back(ExtPoint(MO, RegIdx, Next));
  1187. } else {
  1188. LiveRangeCalc &LRC = getLRCalc(RegIdx);
  1189. LRC.extend(LI, Next, 0, ArrayRef<SlotIndex>());
  1190. }
  1191. }
  1192. for (ExtPoint &EP : ExtPoints) {
  1193. LiveInterval &LI = LIS.getInterval(Edit->get(EP.RegIdx));
  1194. assert(LI.hasSubRanges());
  1195. LiveRangeCalc SubLRC;
  1196. unsigned Reg = EP.MO.getReg(), Sub = EP.MO.getSubReg();
  1197. LaneBitmask LM = Sub != 0 ? TRI.getSubRegIndexLaneMask(Sub)
  1198. : MRI.getMaxLaneMaskForVReg(Reg);
  1199. for (LiveInterval::SubRange &S : LI.subranges()) {
  1200. if ((S.LaneMask & LM).none())
  1201. continue;
  1202. // The problem here can be that the new register may have been created
  1203. // for a partially defined original register. For example:
  1204. // %0:subreg_hireg<def,read-undef> = ...
  1205. // ...
  1206. // %1 = COPY %0
  1207. if (S.empty())
  1208. continue;
  1209. SubLRC.reset(&VRM.getMachineFunction(), LIS.getSlotIndexes(), &MDT,
  1210. &LIS.getVNInfoAllocator());
  1211. SmallVector<SlotIndex, 4> Undefs;
  1212. LI.computeSubRangeUndefs(Undefs, S.LaneMask, MRI, *LIS.getSlotIndexes());
  1213. SubLRC.extend(S, EP.Next, 0, Undefs);
  1214. }
  1215. }
  1216. for (unsigned R : *Edit) {
  1217. LiveInterval &LI = LIS.getInterval(R);
  1218. if (!LI.hasSubRanges())
  1219. continue;
  1220. LI.clear();
  1221. LI.removeEmptySubRanges();
  1222. LIS.constructMainRangeFromSubranges(LI);
  1223. }
  1224. }
  1225. void SplitEditor::deleteRematVictims() {
  1226. SmallVector<MachineInstr*, 8> Dead;
  1227. for (LiveRangeEdit::iterator I = Edit->begin(), E = Edit->end(); I != E; ++I){
  1228. LiveInterval *LI = &LIS.getInterval(*I);
  1229. for (const LiveRange::Segment &S : LI->segments) {
  1230. // Dead defs end at the dead slot.
  1231. if (S.end != S.valno->def.getDeadSlot())
  1232. continue;
  1233. if (S.valno->isPHIDef())
  1234. continue;
  1235. MachineInstr *MI = LIS.getInstructionFromIndex(S.valno->def);
  1236. assert(MI && "Missing instruction for dead def");
  1237. MI->addRegisterDead(LI->reg, &TRI);
  1238. if (!MI->allDefsAreDead())
  1239. continue;
  1240. DEBUG(dbgs() << "All defs dead: " << *MI);
  1241. Dead.push_back(MI);
  1242. }
  1243. }
  1244. if (Dead.empty())
  1245. return;
  1246. Edit->eliminateDeadDefs(Dead, None, &AA);
  1247. }
  1248. void SplitEditor::forceRecomputeVNI(const VNInfo &ParentVNI) {
  1249. // Fast-path for common case.
  1250. if (!ParentVNI.isPHIDef()) {
  1251. for (unsigned I = 0, E = Edit->size(); I != E; ++I)
  1252. forceRecompute(I, ParentVNI);
  1253. return;
  1254. }
  1255. // Trace value through phis.
  1256. SmallPtrSet<const VNInfo *, 8> Visited; ///< whether VNI was/is in worklist.
  1257. SmallVector<const VNInfo *, 4> WorkList;
  1258. Visited.insert(&ParentVNI);
  1259. WorkList.push_back(&ParentVNI);
  1260. const LiveInterval &ParentLI = Edit->getParent();
  1261. const SlotIndexes &Indexes = *LIS.getSlotIndexes();
  1262. do {
  1263. const VNInfo &VNI = *WorkList.back();
  1264. WorkList.pop_back();
  1265. for (unsigned I = 0, E = Edit->size(); I != E; ++I)
  1266. forceRecompute(I, VNI);
  1267. if (!VNI.isPHIDef())
  1268. continue;
  1269. MachineBasicBlock &MBB = *Indexes.getMBBFromIndex(VNI.def);
  1270. for (const MachineBasicBlock *Pred : MBB.predecessors()) {
  1271. SlotIndex PredEnd = Indexes.getMBBEndIdx(Pred);
  1272. VNInfo *PredVNI = ParentLI.getVNInfoBefore(PredEnd);
  1273. assert(PredVNI && "Value available in PhiVNI predecessor");
  1274. if (Visited.insert(PredVNI).second)
  1275. WorkList.push_back(PredVNI);
  1276. }
  1277. } while(!WorkList.empty());
  1278. }
  1279. void SplitEditor::finish(SmallVectorImpl<unsigned> *LRMap) {
  1280. ++NumFinished;
  1281. // At this point, the live intervals in Edit contain VNInfos corresponding to
  1282. // the inserted copies.
  1283. // Add the original defs from the parent interval.
  1284. for (const VNInfo *ParentVNI : Edit->getParent().valnos) {
  1285. if (ParentVNI->isUnused())
  1286. continue;
  1287. unsigned RegIdx = RegAssign.lookup(ParentVNI->def);
  1288. defValue(RegIdx, ParentVNI, ParentVNI->def, true);
  1289. // Force rematted values to be recomputed everywhere.
  1290. // The new live ranges may be truncated.
  1291. if (Edit->didRematerialize(ParentVNI))
  1292. forceRecomputeVNI(*ParentVNI);
  1293. }
  1294. // Hoist back-copies to the complement interval when in spill mode.
  1295. switch (SpillMode) {
  1296. case SM_Partition:
  1297. // Leave all back-copies as is.
  1298. break;
  1299. case SM_Size:
  1300. case SM_Speed:
  1301. // hoistCopies will behave differently between size and speed.
  1302. hoistCopies();
  1303. }
  1304. // Transfer the simply mapped values, check if any are skipped.
  1305. bool Skipped = transferValues();
  1306. // Rewrite virtual registers, possibly extending ranges.
  1307. rewriteAssigned(Skipped);
  1308. if (Skipped)
  1309. extendPHIKillRanges();
  1310. else
  1311. ++NumSimple;
  1312. // Delete defs that were rematted everywhere.
  1313. if (Skipped)
  1314. deleteRematVictims();
  1315. // Get rid of unused values and set phi-kill flags.
  1316. for (unsigned Reg : *Edit) {
  1317. LiveInterval &LI = LIS.getInterval(Reg);
  1318. LI.removeEmptySubRanges();
  1319. LI.RenumberValues();
  1320. }
  1321. // Provide a reverse mapping from original indices to Edit ranges.
  1322. if (LRMap) {
  1323. LRMap->clear();
  1324. for (unsigned i = 0, e = Edit->size(); i != e; ++i)
  1325. LRMap->push_back(i);
  1326. }
  1327. // Now check if any registers were separated into multiple components.
  1328. ConnectedVNInfoEqClasses ConEQ(LIS);
  1329. for (unsigned i = 0, e = Edit->size(); i != e; ++i) {
  1330. // Don't use iterators, they are invalidated by create() below.
  1331. unsigned VReg = Edit->get(i);
  1332. LiveInterval &LI = LIS.getInterval(VReg);
  1333. SmallVector<LiveInterval*, 8> SplitLIs;
  1334. LIS.splitSeparateComponents(LI, SplitLIs);
  1335. unsigned Original = VRM.getOriginal(VReg);
  1336. for (LiveInterval *SplitLI : SplitLIs)
  1337. VRM.setIsSplitFromReg(SplitLI->reg, Original);
  1338. // The new intervals all map back to i.
  1339. if (LRMap)
  1340. LRMap->resize(Edit->size(), i);
  1341. }
  1342. // Calculate spill weight and allocation hints for new intervals.
  1343. Edit->calculateRegClassAndHint(VRM.getMachineFunction(), SA.Loops, MBFI);
  1344. assert(!LRMap || LRMap->size() == Edit->size());
  1345. }
  1346. //===----------------------------------------------------------------------===//
  1347. // Single Block Splitting
  1348. //===----------------------------------------------------------------------===//
  1349. bool SplitAnalysis::shouldSplitSingleBlock(const BlockInfo &BI,
  1350. bool SingleInstrs) const {
  1351. // Always split for multiple instructions.
  1352. if (!BI.isOneInstr())
  1353. return true;
  1354. // Don't split for single instructions unless explicitly requested.
  1355. if (!SingleInstrs)
  1356. return false;
  1357. // Splitting a live-through range always makes progress.
  1358. if (BI.LiveIn && BI.LiveOut)
  1359. return true;
  1360. // No point in isolating a copy. It has no register class constraints.
  1361. if (LIS.getInstructionFromIndex(BI.FirstInstr)->isCopyLike())
  1362. return false;
  1363. // Finally, don't isolate an end point that was created by earlier splits.
  1364. return isOriginalEndpoint(BI.FirstInstr);
  1365. }
  1366. void SplitEditor::splitSingleBlock(const SplitAnalysis::BlockInfo &BI) {
  1367. openIntv();
  1368. SlotIndex LastSplitPoint = SA.getLastSplitPoint(BI.MBB->getNumber());
  1369. SlotIndex SegStart = enterIntvBefore(std::min(BI.FirstInstr,
  1370. LastSplitPoint));
  1371. if (!BI.LiveOut || BI.LastInstr < LastSplitPoint) {
  1372. useIntv(SegStart, leaveIntvAfter(BI.LastInstr));
  1373. } else {
  1374. // The last use is after the last valid split point.
  1375. SlotIndex SegStop = leaveIntvBefore(LastSplitPoint);
  1376. useIntv(SegStart, SegStop);
  1377. overlapIntv(SegStop, BI.LastInstr);
  1378. }
  1379. }
  1380. //===----------------------------------------------------------------------===//
  1381. // Global Live Range Splitting Support
  1382. //===----------------------------------------------------------------------===//
  1383. // These methods support a method of global live range splitting that uses a
  1384. // global algorithm to decide intervals for CFG edges. They will insert split
  1385. // points and color intervals in basic blocks while avoiding interference.
  1386. //
  1387. // Note that splitSingleBlock is also useful for blocks where both CFG edges
  1388. // are on the stack.
  1389. void SplitEditor::splitLiveThroughBlock(unsigned MBBNum,
  1390. unsigned IntvIn, SlotIndex LeaveBefore,
  1391. unsigned IntvOut, SlotIndex EnterAfter){
  1392. SlotIndex Start, Stop;
  1393. std::tie(Start, Stop) = LIS.getSlotIndexes()->getMBBRange(MBBNum);
  1394. DEBUG(dbgs() << "%bb." << MBBNum << " [" << Start << ';' << Stop << ") intf "
  1395. << LeaveBefore << '-' << EnterAfter << ", live-through "
  1396. << IntvIn << " -> " << IntvOut);
  1397. assert((IntvIn || IntvOut) && "Use splitSingleBlock for isolated blocks");
  1398. assert((!LeaveBefore || LeaveBefore < Stop) && "Interference after block");
  1399. assert((!IntvIn || !LeaveBefore || LeaveBefore > Start) && "Impossible intf");
  1400. assert((!EnterAfter || EnterAfter >= Start) && "Interference before block");
  1401. MachineBasicBlock *MBB = VRM.getMachineFunction().getBlockNumbered(MBBNum);
  1402. if (!IntvOut) {
  1403. DEBUG(dbgs() << ", spill on entry.\n");
  1404. //
  1405. // <<<<<<<<< Possible LeaveBefore interference.
  1406. // |-----------| Live through.
  1407. // -____________ Spill on entry.
  1408. //
  1409. selectIntv(IntvIn);
  1410. SlotIndex Idx = leaveIntvAtTop(*MBB);
  1411. assert((!LeaveBefore || Idx <= LeaveBefore) && "Interference");
  1412. (void)Idx;
  1413. return;
  1414. }
  1415. if (!IntvIn) {
  1416. DEBUG(dbgs() << ", reload on exit.\n");
  1417. //
  1418. // >>>>>>> Possible EnterAfter interference.
  1419. // |-----------| Live through.
  1420. // ___________-- Reload on exit.
  1421. //
  1422. selectIntv(IntvOut);
  1423. SlotIndex Idx = enterIntvAtEnd(*MBB);
  1424. assert((!EnterAfter || Idx >= EnterAfter) && "Interference");
  1425. (void)Idx;
  1426. return;
  1427. }
  1428. if (IntvIn == IntvOut && !LeaveBefore && !EnterAfter) {
  1429. DEBUG(dbgs() << ", straight through.\n");
  1430. //
  1431. // |-----------| Live through.
  1432. // ------------- Straight through, same intv, no interference.
  1433. //
  1434. selectIntv(IntvOut);
  1435. useIntv(Start, Stop);
  1436. return;
  1437. }
  1438. // We cannot legally insert splits after LSP.
  1439. SlotIndex LSP = SA.getLastSplitPoint(MBBNum);
  1440. assert((!IntvOut || !EnterAfter || EnterAfter < LSP) && "Impossible intf");
  1441. if (IntvIn != IntvOut && (!LeaveBefore || !EnterAfter ||
  1442. LeaveBefore.getBaseIndex() > EnterAfter.getBoundaryIndex())) {
  1443. DEBUG(dbgs() << ", switch avoiding interference.\n");
  1444. //
  1445. // >>>> <<<< Non-overlapping EnterAfter/LeaveBefore interference.
  1446. // |-----------| Live through.
  1447. // ------======= Switch intervals between interference.
  1448. //
  1449. selectIntv(IntvOut);
  1450. SlotIndex Idx;
  1451. if (LeaveBefore && LeaveBefore < LSP) {
  1452. Idx = enterIntvBefore(LeaveBefore);
  1453. useIntv(Idx, Stop);
  1454. } else {
  1455. Idx = enterIntvAtEnd(*MBB);
  1456. }
  1457. selectIntv(IntvIn);
  1458. useIntv(Start, Idx);
  1459. assert((!LeaveBefore || Idx <= LeaveBefore) && "Interference");
  1460. assert((!EnterAfter || Idx >= EnterAfter) && "Interference");
  1461. return;
  1462. }
  1463. DEBUG(dbgs() << ", create local intv for interference.\n");
  1464. //
  1465. // >>><><><><<<< Overlapping EnterAfter/LeaveBefore interference.
  1466. // |-----------| Live through.
  1467. // ==---------== Switch intervals before/after interference.
  1468. //
  1469. assert(LeaveBefore <= EnterAfter && "Missed case");
  1470. selectIntv(IntvOut);
  1471. SlotIndex Idx = enterIntvAfter(EnterAfter);
  1472. useIntv(Idx, Stop);
  1473. assert((!EnterAfter || Idx >= EnterAfter) && "Interference");
  1474. selectIntv(IntvIn);
  1475. Idx = leaveIntvBefore(LeaveBefore);
  1476. useIntv(Start, Idx);
  1477. assert((!LeaveBefore || Idx <= LeaveBefore) && "Interference");
  1478. }
  1479. void SplitEditor::splitRegInBlock(const SplitAnalysis::BlockInfo &BI,
  1480. unsigned IntvIn, SlotIndex LeaveBefore) {
  1481. SlotIndex Start, Stop;
  1482. std::tie(Start, Stop) = LIS.getSlotIndexes()->getMBBRange(BI.MBB);
  1483. DEBUG(dbgs() << printMBBReference(*BI.MBB) << " [" << Start << ';' << Stop
  1484. << "), uses " << BI.FirstInstr << '-' << BI.LastInstr
  1485. << ", reg-in " << IntvIn << ", leave before " << LeaveBefore
  1486. << (BI.LiveOut ? ", stack-out" : ", killed in block"));
  1487. assert(IntvIn && "Must have register in");
  1488. assert(BI.LiveIn && "Must be live-in");
  1489. assert((!LeaveBefore || LeaveBefore > Start) && "Bad interference");
  1490. if (!BI.LiveOut && (!LeaveBefore || LeaveBefore >= BI.LastInstr)) {
  1491. DEBUG(dbgs() << " before interference.\n");
  1492. //
  1493. // <<< Interference after kill.
  1494. // |---o---x | Killed in block.
  1495. // ========= Use IntvIn everywhere.
  1496. //
  1497. selectIntv(IntvIn);
  1498. useIntv(Start, BI.LastInstr);
  1499. return;
  1500. }
  1501. SlotIndex LSP = SA.getLastSplitPoint(BI.MBB->getNumber());
  1502. if (!LeaveBefore || LeaveBefore > BI.LastInstr.getBoundaryIndex()) {
  1503. //
  1504. // <<< Possible interference after last use.
  1505. // |---o---o---| Live-out on stack.
  1506. // =========____ Leave IntvIn after last use.
  1507. //
  1508. // < Interference after last use.
  1509. // |---o---o--o| Live-out on stack, late last use.
  1510. // ============ Copy to stack after LSP, overlap IntvIn.
  1511. // \_____ Stack interval is live-out.
  1512. //
  1513. if (BI.LastInstr < LSP) {
  1514. DEBUG(dbgs() << ", spill after last use before interference.\n");
  1515. selectIntv(IntvIn);
  1516. SlotIndex Idx = leaveIntvAfter(BI.LastInstr);
  1517. useIntv(Start, Idx);
  1518. assert((!LeaveBefore || Idx <= LeaveBefore) && "Interference");
  1519. } else {
  1520. DEBUG(dbgs() << ", spill before last split point.\n");
  1521. selectIntv(IntvIn);
  1522. SlotIndex Idx = leaveIntvBefore(LSP);
  1523. overlapIntv(Idx, BI.LastInstr);
  1524. useIntv(Start, Idx);
  1525. assert((!LeaveBefore || Idx <= LeaveBefore) && "Interference");
  1526. }
  1527. return;
  1528. }
  1529. // The interference is overlapping somewhere we wanted to use IntvIn. That
  1530. // means we need to create a local interval that can be allocated a
  1531. // different register.
  1532. unsigned LocalIntv = openIntv();
  1533. (void)LocalIntv;
  1534. DEBUG(dbgs() << ", creating local interval " << LocalIntv << ".\n");
  1535. if (!BI.LiveOut || BI.LastInstr < LSP) {
  1536. //
  1537. // <<<<<<< Interference overlapping uses.
  1538. // |---o---o---| Live-out on stack.
  1539. // =====----____ Leave IntvIn before interference, then spill.
  1540. //
  1541. SlotIndex To = leaveIntvAfter(BI.LastInstr);
  1542. SlotIndex From = enterIntvBefore(LeaveBefore);
  1543. useIntv(From, To);
  1544. selectIntv(IntvIn);
  1545. useIntv(Start, From);
  1546. assert((!LeaveBefore || From <= LeaveBefore) && "Interference");
  1547. return;
  1548. }
  1549. // <<<<<<< Interference overlapping uses.
  1550. // |---o---o--o| Live-out on stack, late last use.
  1551. // =====------- Copy to stack before LSP, overlap LocalIntv.
  1552. // \_____ Stack interval is live-out.
  1553. //
  1554. SlotIndex To = leaveIntvBefore(LSP);
  1555. overlapIntv(To, BI.LastInstr);
  1556. SlotIndex From = enterIntvBefore(std::min(To, LeaveBefore));
  1557. useIntv(From, To);
  1558. selectIntv(IntvIn);
  1559. useIntv(Start, From);
  1560. assert((!LeaveBefore || From <= LeaveBefore) && "Interference");
  1561. }
  1562. void SplitEditor::splitRegOutBlock(const SplitAnalysis::BlockInfo &BI,
  1563. unsigned IntvOut, SlotIndex EnterAfter) {
  1564. SlotIndex Start, Stop;
  1565. std::tie(Start, Stop) = LIS.getSlotIndexes()->getMBBRange(BI.MBB);
  1566. DEBUG(dbgs() << printMBBReference(*BI.MBB) << " [" << Start << ';' << Stop
  1567. << "), uses " << BI.FirstInstr << '-' << BI.LastInstr
  1568. << ", reg-out " << IntvOut << ", enter after " << EnterAfter
  1569. << (BI.LiveIn ? ", stack-in" : ", defined in block"));
  1570. SlotIndex LSP = SA.getLastSplitPoint(BI.MBB->getNumber());
  1571. assert(IntvOut && "Must have register out");
  1572. assert(BI.LiveOut && "Must be live-out");
  1573. assert((!EnterAfter || EnterAfter < LSP) && "Bad interference");
  1574. if (!BI.LiveIn && (!EnterAfter || EnterAfter <= BI.FirstInstr)) {
  1575. DEBUG(dbgs() << " after interference.\n");
  1576. //
  1577. // >>>> Interference before def.
  1578. // | o---o---| Defined in block.
  1579. // ========= Use IntvOut everywhere.
  1580. //
  1581. selectIntv(IntvOut);
  1582. useIntv(BI.FirstInstr, Stop);
  1583. return;
  1584. }
  1585. if (!EnterAfter || EnterAfter < BI.FirstInstr.getBaseIndex()) {
  1586. DEBUG(dbgs() << ", reload after interference.\n");
  1587. //
  1588. // >>>> Interference before def.
  1589. // |---o---o---| Live-through, stack-in.
  1590. // ____========= Enter IntvOut before first use.
  1591. //
  1592. selectIntv(IntvOut);
  1593. SlotIndex Idx = enterIntvBefore(std::min(LSP, BI.FirstInstr));
  1594. useIntv(Idx, Stop);
  1595. assert((!EnterAfter || Idx >= EnterAfter) && "Interference");
  1596. return;
  1597. }
  1598. // The interference is overlapping somewhere we wanted to use IntvOut. That
  1599. // means we need to create a local interval that can be allocated a
  1600. // different register.
  1601. DEBUG(dbgs() << ", interference overlaps uses.\n");
  1602. //
  1603. // >>>>>>> Interference overlapping uses.
  1604. // |---o---o---| Live-through, stack-in.
  1605. // ____---====== Create local interval for interference range.
  1606. //
  1607. selectIntv(IntvOut);
  1608. SlotIndex Idx = enterIntvAfter(EnterAfter);
  1609. useIntv(Idx, Stop);
  1610. assert((!EnterAfter || Idx >= EnterAfter) && "Interference");
  1611. openIntv();
  1612. SlotIndex From = enterIntvBefore(std::min(Idx, BI.FirstInstr));
  1613. useIntv(From, Idx);
  1614. }