SplitKit.cpp 66 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856
  1. //===- SplitKit.cpp - Toolkit for splitting live ranges -------------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This file contains the SplitAnalysis class as well as mutator functions for
  10. // live range splitting.
  11. //
  12. //===----------------------------------------------------------------------===//
  13. #include "SplitKit.h"
  14. #include "LiveRangeCalc.h"
  15. #include "llvm/ADT/ArrayRef.h"
  16. #include "llvm/ADT/DenseSet.h"
  17. #include "llvm/ADT/None.h"
  18. #include "llvm/ADT/STLExtras.h"
  19. #include "llvm/ADT/SmallPtrSet.h"
  20. #include "llvm/ADT/SmallVector.h"
  21. #include "llvm/ADT/Statistic.h"
  22. #include "llvm/CodeGen/LiveInterval.h"
  23. #include "llvm/CodeGen/LiveIntervals.h"
  24. #include "llvm/CodeGen/LiveRangeEdit.h"
  25. #include "llvm/CodeGen/MachineBasicBlock.h"
  26. #include "llvm/CodeGen/MachineBlockFrequencyInfo.h"
  27. #include "llvm/CodeGen/MachineDominators.h"
  28. #include "llvm/CodeGen/MachineFunction.h"
  29. #include "llvm/CodeGen/MachineInstr.h"
  30. #include "llvm/CodeGen/MachineInstrBuilder.h"
  31. #include "llvm/CodeGen/MachineLoopInfo.h"
  32. #include "llvm/CodeGen/MachineOperand.h"
  33. #include "llvm/CodeGen/MachineRegisterInfo.h"
  34. #include "llvm/CodeGen/SlotIndexes.h"
  35. #include "llvm/CodeGen/TargetInstrInfo.h"
  36. #include "llvm/CodeGen/TargetOpcodes.h"
  37. #include "llvm/CodeGen/TargetRegisterInfo.h"
  38. #include "llvm/CodeGen/TargetSubtargetInfo.h"
  39. #include "llvm/CodeGen/VirtRegMap.h"
  40. #include "llvm/Config/llvm-config.h"
  41. #include "llvm/IR/DebugLoc.h"
  42. #include "llvm/MC/LaneBitmask.h"
  43. #include "llvm/Support/Allocator.h"
  44. #include "llvm/Support/BlockFrequency.h"
  45. #include "llvm/Support/Compiler.h"
  46. #include "llvm/Support/Debug.h"
  47. #include "llvm/Support/ErrorHandling.h"
  48. #include "llvm/Support/raw_ostream.h"
  49. #include <algorithm>
  50. #include <cassert>
  51. #include <iterator>
  52. #include <limits>
  53. #include <tuple>
  54. #include <utility>
  55. using namespace llvm;
  56. #define DEBUG_TYPE "regalloc"
  57. STATISTIC(NumFinished, "Number of splits finished");
  58. STATISTIC(NumSimple, "Number of splits that were simple");
  59. STATISTIC(NumCopies, "Number of copies inserted for splitting");
  60. STATISTIC(NumRemats, "Number of rematerialized defs for splitting");
  61. STATISTIC(NumRepairs, "Number of invalid live ranges repaired");
  62. //===----------------------------------------------------------------------===//
  63. // Last Insert Point Analysis
  64. //===----------------------------------------------------------------------===//
  65. InsertPointAnalysis::InsertPointAnalysis(const LiveIntervals &lis,
  66. unsigned BBNum)
  67. : LIS(lis), LastInsertPoint(BBNum) {}
  68. SlotIndex
  69. InsertPointAnalysis::computeLastInsertPoint(const LiveInterval &CurLI,
  70. const MachineBasicBlock &MBB) {
  71. unsigned Num = MBB.getNumber();
  72. std::pair<SlotIndex, SlotIndex> &LIP = LastInsertPoint[Num];
  73. SlotIndex MBBEnd = LIS.getMBBEndIdx(&MBB);
  74. SmallVector<const MachineBasicBlock *, 1> EHPadSuccessors;
  75. for (const MachineBasicBlock *SMBB : MBB.successors())
  76. if (SMBB->isEHPad())
  77. EHPadSuccessors.push_back(SMBB);
  78. // Compute insert points on the first call. The pair is independent of the
  79. // current live interval.
  80. if (!LIP.first.isValid()) {
  81. MachineBasicBlock::const_iterator FirstTerm = MBB.getFirstTerminator();
  82. if (FirstTerm == MBB.end())
  83. LIP.first = MBBEnd;
  84. else
  85. LIP.first = LIS.getInstructionIndex(*FirstTerm);
  86. // If there is a landing pad successor, also find the call instruction.
  87. if (EHPadSuccessors.empty())
  88. return LIP.first;
  89. // There may not be a call instruction (?) in which case we ignore LPad.
  90. LIP.second = LIP.first;
  91. for (MachineBasicBlock::const_iterator I = MBB.end(), E = MBB.begin();
  92. I != E;) {
  93. --I;
  94. if (I->isCall()) {
  95. LIP.second = LIS.getInstructionIndex(*I);
  96. break;
  97. }
  98. }
  99. }
  100. // If CurLI is live into a landing pad successor, move the last insert point
  101. // back to the call that may throw.
  102. if (!LIP.second)
  103. return LIP.first;
  104. if (none_of(EHPadSuccessors, [&](const MachineBasicBlock *EHPad) {
  105. return LIS.isLiveInToMBB(CurLI, EHPad);
  106. }))
  107. return LIP.first;
  108. // Find the value leaving MBB.
  109. const VNInfo *VNI = CurLI.getVNInfoBefore(MBBEnd);
  110. if (!VNI)
  111. return LIP.first;
  112. // If the value leaving MBB was defined after the call in MBB, it can't
  113. // really be live-in to the landing pad. This can happen if the landing pad
  114. // has a PHI, and this register is undef on the exceptional edge.
  115. // <rdar://problem/10664933>
  116. if (!SlotIndex::isEarlierInstr(VNI->def, LIP.second) && VNI->def < MBBEnd)
  117. return LIP.first;
  118. // Value is properly live-in to the landing pad.
  119. // Only allow inserts before the call.
  120. return LIP.second;
  121. }
  122. MachineBasicBlock::iterator
  123. InsertPointAnalysis::getLastInsertPointIter(const LiveInterval &CurLI,
  124. MachineBasicBlock &MBB) {
  125. SlotIndex LIP = getLastInsertPoint(CurLI, MBB);
  126. if (LIP == LIS.getMBBEndIdx(&MBB))
  127. return MBB.end();
  128. return LIS.getInstructionFromIndex(LIP);
  129. }
  130. //===----------------------------------------------------------------------===//
  131. // Split Analysis
  132. //===----------------------------------------------------------------------===//
  133. SplitAnalysis::SplitAnalysis(const VirtRegMap &vrm, const LiveIntervals &lis,
  134. const MachineLoopInfo &mli)
  135. : MF(vrm.getMachineFunction()), VRM(vrm), LIS(lis), Loops(mli),
  136. TII(*MF.getSubtarget().getInstrInfo()), IPA(lis, MF.getNumBlockIDs()) {}
  137. void SplitAnalysis::clear() {
  138. UseSlots.clear();
  139. UseBlocks.clear();
  140. ThroughBlocks.clear();
  141. CurLI = nullptr;
  142. DidRepairRange = false;
  143. }
  144. /// analyzeUses - Count instructions, basic blocks, and loops using CurLI.
  145. void SplitAnalysis::analyzeUses() {
  146. assert(UseSlots.empty() && "Call clear first");
  147. // First get all the defs from the interval values. This provides the correct
  148. // slots for early clobbers.
  149. for (const VNInfo *VNI : CurLI->valnos)
  150. if (!VNI->isPHIDef() && !VNI->isUnused())
  151. UseSlots.push_back(VNI->def);
  152. // Get use slots form the use-def chain.
  153. const MachineRegisterInfo &MRI = MF.getRegInfo();
  154. for (MachineOperand &MO : MRI.use_nodbg_operands(CurLI->reg))
  155. if (!MO.isUndef())
  156. UseSlots.push_back(LIS.getInstructionIndex(*MO.getParent()).getRegSlot());
  157. array_pod_sort(UseSlots.begin(), UseSlots.end());
  158. // Remove duplicates, keeping the smaller slot for each instruction.
  159. // That is what we want for early clobbers.
  160. UseSlots.erase(std::unique(UseSlots.begin(), UseSlots.end(),
  161. SlotIndex::isSameInstr),
  162. UseSlots.end());
  163. // Compute per-live block info.
  164. if (!calcLiveBlockInfo()) {
  165. // FIXME: calcLiveBlockInfo found inconsistencies in the live range.
  166. // I am looking at you, RegisterCoalescer!
  167. DidRepairRange = true;
  168. ++NumRepairs;
  169. LLVM_DEBUG(dbgs() << "*** Fixing inconsistent live interval! ***\n");
  170. const_cast<LiveIntervals&>(LIS)
  171. .shrinkToUses(const_cast<LiveInterval*>(CurLI));
  172. UseBlocks.clear();
  173. ThroughBlocks.clear();
  174. bool fixed = calcLiveBlockInfo();
  175. (void)fixed;
  176. assert(fixed && "Couldn't fix broken live interval");
  177. }
  178. LLVM_DEBUG(dbgs() << "Analyze counted " << UseSlots.size() << " instrs in "
  179. << UseBlocks.size() << " blocks, through "
  180. << NumThroughBlocks << " blocks.\n");
  181. }
  182. /// calcLiveBlockInfo - Fill the LiveBlocks array with information about blocks
  183. /// where CurLI is live.
  184. bool SplitAnalysis::calcLiveBlockInfo() {
  185. ThroughBlocks.resize(MF.getNumBlockIDs());
  186. NumThroughBlocks = NumGapBlocks = 0;
  187. if (CurLI->empty())
  188. return true;
  189. LiveInterval::const_iterator LVI = CurLI->begin();
  190. LiveInterval::const_iterator LVE = CurLI->end();
  191. SmallVectorImpl<SlotIndex>::const_iterator UseI, UseE;
  192. UseI = UseSlots.begin();
  193. UseE = UseSlots.end();
  194. // Loop over basic blocks where CurLI is live.
  195. MachineFunction::iterator MFI =
  196. LIS.getMBBFromIndex(LVI->start)->getIterator();
  197. while (true) {
  198. BlockInfo BI;
  199. BI.MBB = &*MFI;
  200. SlotIndex Start, Stop;
  201. std::tie(Start, Stop) = LIS.getSlotIndexes()->getMBBRange(BI.MBB);
  202. // If the block contains no uses, the range must be live through. At one
  203. // point, RegisterCoalescer could create dangling ranges that ended
  204. // mid-block.
  205. if (UseI == UseE || *UseI >= Stop) {
  206. ++NumThroughBlocks;
  207. ThroughBlocks.set(BI.MBB->getNumber());
  208. // The range shouldn't end mid-block if there are no uses. This shouldn't
  209. // happen.
  210. if (LVI->end < Stop)
  211. return false;
  212. } else {
  213. // This block has uses. Find the first and last uses in the block.
  214. BI.FirstInstr = *UseI;
  215. assert(BI.FirstInstr >= Start);
  216. do ++UseI;
  217. while (UseI != UseE && *UseI < Stop);
  218. BI.LastInstr = UseI[-1];
  219. assert(BI.LastInstr < Stop);
  220. // LVI is the first live segment overlapping MBB.
  221. BI.LiveIn = LVI->start <= Start;
  222. // When not live in, the first use should be a def.
  223. if (!BI.LiveIn) {
  224. assert(LVI->start == LVI->valno->def && "Dangling Segment start");
  225. assert(LVI->start == BI.FirstInstr && "First instr should be a def");
  226. BI.FirstDef = BI.FirstInstr;
  227. }
  228. // Look for gaps in the live range.
  229. BI.LiveOut = true;
  230. while (LVI->end < Stop) {
  231. SlotIndex LastStop = LVI->end;
  232. if (++LVI == LVE || LVI->start >= Stop) {
  233. BI.LiveOut = false;
  234. BI.LastInstr = LastStop;
  235. break;
  236. }
  237. if (LastStop < LVI->start) {
  238. // There is a gap in the live range. Create duplicate entries for the
  239. // live-in snippet and the live-out snippet.
  240. ++NumGapBlocks;
  241. // Push the Live-in part.
  242. BI.LiveOut = false;
  243. UseBlocks.push_back(BI);
  244. UseBlocks.back().LastInstr = LastStop;
  245. // Set up BI for the live-out part.
  246. BI.LiveIn = false;
  247. BI.LiveOut = true;
  248. BI.FirstInstr = BI.FirstDef = LVI->start;
  249. }
  250. // A Segment that starts in the middle of the block must be a def.
  251. assert(LVI->start == LVI->valno->def && "Dangling Segment start");
  252. if (!BI.FirstDef)
  253. BI.FirstDef = LVI->start;
  254. }
  255. UseBlocks.push_back(BI);
  256. // LVI is now at LVE or LVI->end >= Stop.
  257. if (LVI == LVE)
  258. break;
  259. }
  260. // Live segment ends exactly at Stop. Move to the next segment.
  261. if (LVI->end == Stop && ++LVI == LVE)
  262. break;
  263. // Pick the next basic block.
  264. if (LVI->start < Stop)
  265. ++MFI;
  266. else
  267. MFI = LIS.getMBBFromIndex(LVI->start)->getIterator();
  268. }
  269. assert(getNumLiveBlocks() == countLiveBlocks(CurLI) && "Bad block count");
  270. return true;
  271. }
  272. unsigned SplitAnalysis::countLiveBlocks(const LiveInterval *cli) const {
  273. if (cli->empty())
  274. return 0;
  275. LiveInterval *li = const_cast<LiveInterval*>(cli);
  276. LiveInterval::iterator LVI = li->begin();
  277. LiveInterval::iterator LVE = li->end();
  278. unsigned Count = 0;
  279. // Loop over basic blocks where li is live.
  280. MachineFunction::const_iterator MFI =
  281. LIS.getMBBFromIndex(LVI->start)->getIterator();
  282. SlotIndex Stop = LIS.getMBBEndIdx(&*MFI);
  283. while (true) {
  284. ++Count;
  285. LVI = li->advanceTo(LVI, Stop);
  286. if (LVI == LVE)
  287. return Count;
  288. do {
  289. ++MFI;
  290. Stop = LIS.getMBBEndIdx(&*MFI);
  291. } while (Stop <= LVI->start);
  292. }
  293. }
  294. bool SplitAnalysis::isOriginalEndpoint(SlotIndex Idx) const {
  295. unsigned OrigReg = VRM.getOriginal(CurLI->reg);
  296. const LiveInterval &Orig = LIS.getInterval(OrigReg);
  297. assert(!Orig.empty() && "Splitting empty interval?");
  298. LiveInterval::const_iterator I = Orig.find(Idx);
  299. // Range containing Idx should begin at Idx.
  300. if (I != Orig.end() && I->start <= Idx)
  301. return I->start == Idx;
  302. // Range does not contain Idx, previous must end at Idx.
  303. return I != Orig.begin() && (--I)->end == Idx;
  304. }
  305. void SplitAnalysis::analyze(const LiveInterval *li) {
  306. clear();
  307. CurLI = li;
  308. analyzeUses();
  309. }
  310. //===----------------------------------------------------------------------===//
  311. // Split Editor
  312. //===----------------------------------------------------------------------===//
  313. /// Create a new SplitEditor for editing the LiveInterval analyzed by SA.
  314. SplitEditor::SplitEditor(SplitAnalysis &sa, AliasAnalysis &aa,
  315. LiveIntervals &lis, VirtRegMap &vrm,
  316. MachineDominatorTree &mdt,
  317. MachineBlockFrequencyInfo &mbfi)
  318. : SA(sa), AA(aa), LIS(lis), VRM(vrm),
  319. MRI(vrm.getMachineFunction().getRegInfo()), MDT(mdt),
  320. TII(*vrm.getMachineFunction().getSubtarget().getInstrInfo()),
  321. TRI(*vrm.getMachineFunction().getSubtarget().getRegisterInfo()),
  322. MBFI(mbfi), RegAssign(Allocator) {}
  323. void SplitEditor::reset(LiveRangeEdit &LRE, ComplementSpillMode SM) {
  324. Edit = &LRE;
  325. SpillMode = SM;
  326. OpenIdx = 0;
  327. RegAssign.clear();
  328. Values.clear();
  329. // Reset the LiveRangeCalc instances needed for this spill mode.
  330. LRCalc[0].reset(&VRM.getMachineFunction(), LIS.getSlotIndexes(), &MDT,
  331. &LIS.getVNInfoAllocator());
  332. if (SpillMode)
  333. LRCalc[1].reset(&VRM.getMachineFunction(), LIS.getSlotIndexes(), &MDT,
  334. &LIS.getVNInfoAllocator());
  335. // We don't need an AliasAnalysis since we will only be performing
  336. // cheap-as-a-copy remats anyway.
  337. Edit->anyRematerializable(nullptr);
  338. }
  339. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  340. LLVM_DUMP_METHOD void SplitEditor::dump() const {
  341. if (RegAssign.empty()) {
  342. dbgs() << " empty\n";
  343. return;
  344. }
  345. for (RegAssignMap::const_iterator I = RegAssign.begin(); I.valid(); ++I)
  346. dbgs() << " [" << I.start() << ';' << I.stop() << "):" << I.value();
  347. dbgs() << '\n';
  348. }
  349. #endif
  350. LiveInterval::SubRange &SplitEditor::getSubRangeForMask(LaneBitmask LM,
  351. LiveInterval &LI) {
  352. for (LiveInterval::SubRange &S : LI.subranges())
  353. if (S.LaneMask == LM)
  354. return S;
  355. llvm_unreachable("SubRange for this mask not found");
  356. }
  357. void SplitEditor::addDeadDef(LiveInterval &LI, VNInfo *VNI, bool Original) {
  358. if (!LI.hasSubRanges()) {
  359. LI.createDeadDef(VNI);
  360. return;
  361. }
  362. SlotIndex Def = VNI->def;
  363. if (Original) {
  364. // If we are transferring a def from the original interval, make sure
  365. // to only update the subranges for which the original subranges had
  366. // a def at this location.
  367. for (LiveInterval::SubRange &S : LI.subranges()) {
  368. auto &PS = getSubRangeForMask(S.LaneMask, Edit->getParent());
  369. VNInfo *PV = PS.getVNInfoAt(Def);
  370. if (PV != nullptr && PV->def == Def)
  371. S.createDeadDef(Def, LIS.getVNInfoAllocator());
  372. }
  373. } else {
  374. // This is a new def: either from rematerialization, or from an inserted
  375. // copy. Since rematerialization can regenerate a definition of a sub-
  376. // register, we need to check which subranges need to be updated.
  377. const MachineInstr *DefMI = LIS.getInstructionFromIndex(Def);
  378. assert(DefMI != nullptr);
  379. LaneBitmask LM;
  380. for (const MachineOperand &DefOp : DefMI->defs()) {
  381. Register R = DefOp.getReg();
  382. if (R != LI.reg)
  383. continue;
  384. if (unsigned SR = DefOp.getSubReg())
  385. LM |= TRI.getSubRegIndexLaneMask(SR);
  386. else {
  387. LM = MRI.getMaxLaneMaskForVReg(R);
  388. break;
  389. }
  390. }
  391. for (LiveInterval::SubRange &S : LI.subranges())
  392. if ((S.LaneMask & LM).any())
  393. S.createDeadDef(Def, LIS.getVNInfoAllocator());
  394. }
  395. }
  396. VNInfo *SplitEditor::defValue(unsigned RegIdx,
  397. const VNInfo *ParentVNI,
  398. SlotIndex Idx,
  399. bool Original) {
  400. assert(ParentVNI && "Mapping NULL value");
  401. assert(Idx.isValid() && "Invalid SlotIndex");
  402. assert(Edit->getParent().getVNInfoAt(Idx) == ParentVNI && "Bad Parent VNI");
  403. LiveInterval *LI = &LIS.getInterval(Edit->get(RegIdx));
  404. // Create a new value.
  405. VNInfo *VNI = LI->getNextValue(Idx, LIS.getVNInfoAllocator());
  406. bool Force = LI->hasSubRanges();
  407. ValueForcePair FP(Force ? nullptr : VNI, Force);
  408. // Use insert for lookup, so we can add missing values with a second lookup.
  409. std::pair<ValueMap::iterator, bool> InsP =
  410. Values.insert(std::make_pair(std::make_pair(RegIdx, ParentVNI->id), FP));
  411. // This was the first time (RegIdx, ParentVNI) was mapped, and it is not
  412. // forced. Keep it as a simple def without any liveness.
  413. if (!Force && InsP.second)
  414. return VNI;
  415. // If the previous value was a simple mapping, add liveness for it now.
  416. if (VNInfo *OldVNI = InsP.first->second.getPointer()) {
  417. addDeadDef(*LI, OldVNI, Original);
  418. // No longer a simple mapping. Switch to a complex mapping. If the
  419. // interval has subranges, make it a forced mapping.
  420. InsP.first->second = ValueForcePair(nullptr, Force);
  421. }
  422. // This is a complex mapping, add liveness for VNI
  423. addDeadDef(*LI, VNI, Original);
  424. return VNI;
  425. }
  426. void SplitEditor::forceRecompute(unsigned RegIdx, const VNInfo &ParentVNI) {
  427. ValueForcePair &VFP = Values[std::make_pair(RegIdx, ParentVNI.id)];
  428. VNInfo *VNI = VFP.getPointer();
  429. // ParentVNI was either unmapped or already complex mapped. Either way, just
  430. // set the force bit.
  431. if (!VNI) {
  432. VFP.setInt(true);
  433. return;
  434. }
  435. // This was previously a single mapping. Make sure the old def is represented
  436. // by a trivial live range.
  437. addDeadDef(LIS.getInterval(Edit->get(RegIdx)), VNI, false);
  438. // Mark as complex mapped, forced.
  439. VFP = ValueForcePair(nullptr, true);
  440. }
  441. SlotIndex SplitEditor::buildSingleSubRegCopy(unsigned FromReg, unsigned ToReg,
  442. MachineBasicBlock &MBB, MachineBasicBlock::iterator InsertBefore,
  443. unsigned SubIdx, LiveInterval &DestLI, bool Late, SlotIndex Def) {
  444. const MCInstrDesc &Desc = TII.get(TargetOpcode::COPY);
  445. bool FirstCopy = !Def.isValid();
  446. MachineInstr *CopyMI = BuildMI(MBB, InsertBefore, DebugLoc(), Desc)
  447. .addReg(ToReg, RegState::Define | getUndefRegState(FirstCopy)
  448. | getInternalReadRegState(!FirstCopy), SubIdx)
  449. .addReg(FromReg, 0, SubIdx);
  450. BumpPtrAllocator &Allocator = LIS.getVNInfoAllocator();
  451. SlotIndexes &Indexes = *LIS.getSlotIndexes();
  452. if (FirstCopy) {
  453. Def = Indexes.insertMachineInstrInMaps(*CopyMI, Late).getRegSlot();
  454. } else {
  455. CopyMI->bundleWithPred();
  456. }
  457. LaneBitmask LaneMask = TRI.getSubRegIndexLaneMask(SubIdx);
  458. DestLI.refineSubRanges(Allocator, LaneMask,
  459. [Def, &Allocator](LiveInterval::SubRange &SR) {
  460. SR.createDeadDef(Def, Allocator);
  461. },
  462. Indexes, TRI);
  463. return Def;
  464. }
  465. SlotIndex SplitEditor::buildCopy(unsigned FromReg, unsigned ToReg,
  466. LaneBitmask LaneMask, MachineBasicBlock &MBB,
  467. MachineBasicBlock::iterator InsertBefore, bool Late, unsigned RegIdx) {
  468. const MCInstrDesc &Desc = TII.get(TargetOpcode::COPY);
  469. if (LaneMask.all() || LaneMask == MRI.getMaxLaneMaskForVReg(FromReg)) {
  470. // The full vreg is copied.
  471. MachineInstr *CopyMI =
  472. BuildMI(MBB, InsertBefore, DebugLoc(), Desc, ToReg).addReg(FromReg);
  473. SlotIndexes &Indexes = *LIS.getSlotIndexes();
  474. return Indexes.insertMachineInstrInMaps(*CopyMI, Late).getRegSlot();
  475. }
  476. // Only a subset of lanes needs to be copied. The following is a simple
  477. // heuristic to construct a sequence of COPYs. We could add a target
  478. // specific callback if this turns out to be suboptimal.
  479. LiveInterval &DestLI = LIS.getInterval(Edit->get(RegIdx));
  480. // First pass: Try to find a perfectly matching subregister index. If none
  481. // exists find the one covering the most lanemask bits.
  482. SmallVector<unsigned, 8> PossibleIndexes;
  483. unsigned BestIdx = 0;
  484. unsigned BestCover = 0;
  485. const TargetRegisterClass *RC = MRI.getRegClass(FromReg);
  486. assert(RC == MRI.getRegClass(ToReg) && "Should have same reg class");
  487. for (unsigned Idx = 1, E = TRI.getNumSubRegIndices(); Idx < E; ++Idx) {
  488. // Is this index even compatible with the given class?
  489. if (TRI.getSubClassWithSubReg(RC, Idx) != RC)
  490. continue;
  491. LaneBitmask SubRegMask = TRI.getSubRegIndexLaneMask(Idx);
  492. // Early exit if we found a perfect match.
  493. if (SubRegMask == LaneMask) {
  494. BestIdx = Idx;
  495. break;
  496. }
  497. // The index must not cover any lanes outside \p LaneMask.
  498. if ((SubRegMask & ~LaneMask).any())
  499. continue;
  500. unsigned PopCount = SubRegMask.getNumLanes();
  501. PossibleIndexes.push_back(Idx);
  502. if (PopCount > BestCover) {
  503. BestCover = PopCount;
  504. BestIdx = Idx;
  505. }
  506. }
  507. // Abort if we cannot possibly implement the COPY with the given indexes.
  508. if (BestIdx == 0)
  509. report_fatal_error("Impossible to implement partial COPY");
  510. SlotIndex Def = buildSingleSubRegCopy(FromReg, ToReg, MBB, InsertBefore,
  511. BestIdx, DestLI, Late, SlotIndex());
  512. // Greedy heuristic: Keep iterating keeping the best covering subreg index
  513. // each time.
  514. LaneBitmask LanesLeft = LaneMask & ~(TRI.getSubRegIndexLaneMask(BestIdx));
  515. while (LanesLeft.any()) {
  516. unsigned BestIdx = 0;
  517. int BestCover = std::numeric_limits<int>::min();
  518. for (unsigned Idx : PossibleIndexes) {
  519. LaneBitmask SubRegMask = TRI.getSubRegIndexLaneMask(Idx);
  520. // Early exit if we found a perfect match.
  521. if (SubRegMask == LanesLeft) {
  522. BestIdx = Idx;
  523. break;
  524. }
  525. // Try to cover as much of the remaining lanes as possible but
  526. // as few of the already covered lanes as possible.
  527. int Cover = (SubRegMask & LanesLeft).getNumLanes()
  528. - (SubRegMask & ~LanesLeft).getNumLanes();
  529. if (Cover > BestCover) {
  530. BestCover = Cover;
  531. BestIdx = Idx;
  532. }
  533. }
  534. if (BestIdx == 0)
  535. report_fatal_error("Impossible to implement partial COPY");
  536. buildSingleSubRegCopy(FromReg, ToReg, MBB, InsertBefore, BestIdx,
  537. DestLI, Late, Def);
  538. LanesLeft &= ~TRI.getSubRegIndexLaneMask(BestIdx);
  539. }
  540. return Def;
  541. }
  542. VNInfo *SplitEditor::defFromParent(unsigned RegIdx,
  543. VNInfo *ParentVNI,
  544. SlotIndex UseIdx,
  545. MachineBasicBlock &MBB,
  546. MachineBasicBlock::iterator I) {
  547. SlotIndex Def;
  548. LiveInterval *LI = &LIS.getInterval(Edit->get(RegIdx));
  549. // We may be trying to avoid interference that ends at a deleted instruction,
  550. // so always begin RegIdx 0 early and all others late.
  551. bool Late = RegIdx != 0;
  552. // Attempt cheap-as-a-copy rematerialization.
  553. unsigned Original = VRM.getOriginal(Edit->get(RegIdx));
  554. LiveInterval &OrigLI = LIS.getInterval(Original);
  555. VNInfo *OrigVNI = OrigLI.getVNInfoAt(UseIdx);
  556. unsigned Reg = LI->reg;
  557. bool DidRemat = false;
  558. if (OrigVNI) {
  559. LiveRangeEdit::Remat RM(ParentVNI);
  560. RM.OrigMI = LIS.getInstructionFromIndex(OrigVNI->def);
  561. if (Edit->canRematerializeAt(RM, OrigVNI, UseIdx, true)) {
  562. Def = Edit->rematerializeAt(MBB, I, Reg, RM, TRI, Late);
  563. ++NumRemats;
  564. DidRemat = true;
  565. }
  566. }
  567. if (!DidRemat) {
  568. LaneBitmask LaneMask;
  569. if (LI->hasSubRanges()) {
  570. LaneMask = LaneBitmask::getNone();
  571. for (LiveInterval::SubRange &S : LI->subranges())
  572. LaneMask |= S.LaneMask;
  573. } else {
  574. LaneMask = LaneBitmask::getAll();
  575. }
  576. ++NumCopies;
  577. Def = buildCopy(Edit->getReg(), Reg, LaneMask, MBB, I, Late, RegIdx);
  578. }
  579. // Define the value in Reg.
  580. return defValue(RegIdx, ParentVNI, Def, false);
  581. }
  582. /// Create a new virtual register and live interval.
  583. unsigned SplitEditor::openIntv() {
  584. // Create the complement as index 0.
  585. if (Edit->empty())
  586. Edit->createEmptyInterval();
  587. // Create the open interval.
  588. OpenIdx = Edit->size();
  589. Edit->createEmptyInterval();
  590. return OpenIdx;
  591. }
  592. void SplitEditor::selectIntv(unsigned Idx) {
  593. assert(Idx != 0 && "Cannot select the complement interval");
  594. assert(Idx < Edit->size() && "Can only select previously opened interval");
  595. LLVM_DEBUG(dbgs() << " selectIntv " << OpenIdx << " -> " << Idx << '\n');
  596. OpenIdx = Idx;
  597. }
  598. SlotIndex SplitEditor::enterIntvBefore(SlotIndex Idx) {
  599. assert(OpenIdx && "openIntv not called before enterIntvBefore");
  600. LLVM_DEBUG(dbgs() << " enterIntvBefore " << Idx);
  601. Idx = Idx.getBaseIndex();
  602. VNInfo *ParentVNI = Edit->getParent().getVNInfoAt(Idx);
  603. if (!ParentVNI) {
  604. LLVM_DEBUG(dbgs() << ": not live\n");
  605. return Idx;
  606. }
  607. LLVM_DEBUG(dbgs() << ": valno " << ParentVNI->id << '\n');
  608. MachineInstr *MI = LIS.getInstructionFromIndex(Idx);
  609. assert(MI && "enterIntvBefore called with invalid index");
  610. VNInfo *VNI = defFromParent(OpenIdx, ParentVNI, Idx, *MI->getParent(), MI);
  611. return VNI->def;
  612. }
  613. SlotIndex SplitEditor::enterIntvAfter(SlotIndex Idx) {
  614. assert(OpenIdx && "openIntv not called before enterIntvAfter");
  615. LLVM_DEBUG(dbgs() << " enterIntvAfter " << Idx);
  616. Idx = Idx.getBoundaryIndex();
  617. VNInfo *ParentVNI = Edit->getParent().getVNInfoAt(Idx);
  618. if (!ParentVNI) {
  619. LLVM_DEBUG(dbgs() << ": not live\n");
  620. return Idx;
  621. }
  622. LLVM_DEBUG(dbgs() << ": valno " << ParentVNI->id << '\n');
  623. MachineInstr *MI = LIS.getInstructionFromIndex(Idx);
  624. assert(MI && "enterIntvAfter called with invalid index");
  625. VNInfo *VNI = defFromParent(OpenIdx, ParentVNI, Idx, *MI->getParent(),
  626. std::next(MachineBasicBlock::iterator(MI)));
  627. return VNI->def;
  628. }
  629. SlotIndex SplitEditor::enterIntvAtEnd(MachineBasicBlock &MBB) {
  630. assert(OpenIdx && "openIntv not called before enterIntvAtEnd");
  631. SlotIndex End = LIS.getMBBEndIdx(&MBB);
  632. SlotIndex Last = End.getPrevSlot();
  633. LLVM_DEBUG(dbgs() << " enterIntvAtEnd " << printMBBReference(MBB) << ", "
  634. << Last);
  635. VNInfo *ParentVNI = Edit->getParent().getVNInfoAt(Last);
  636. if (!ParentVNI) {
  637. LLVM_DEBUG(dbgs() << ": not live\n");
  638. return End;
  639. }
  640. LLVM_DEBUG(dbgs() << ": valno " << ParentVNI->id);
  641. VNInfo *VNI = defFromParent(OpenIdx, ParentVNI, Last, MBB,
  642. SA.getLastSplitPointIter(&MBB));
  643. RegAssign.insert(VNI->def, End, OpenIdx);
  644. LLVM_DEBUG(dump());
  645. return VNI->def;
  646. }
  647. /// useIntv - indicate that all instructions in MBB should use OpenLI.
  648. void SplitEditor::useIntv(const MachineBasicBlock &MBB) {
  649. useIntv(LIS.getMBBStartIdx(&MBB), LIS.getMBBEndIdx(&MBB));
  650. }
  651. void SplitEditor::useIntv(SlotIndex Start, SlotIndex End) {
  652. assert(OpenIdx && "openIntv not called before useIntv");
  653. LLVM_DEBUG(dbgs() << " useIntv [" << Start << ';' << End << "):");
  654. RegAssign.insert(Start, End, OpenIdx);
  655. LLVM_DEBUG(dump());
  656. }
  657. SlotIndex SplitEditor::leaveIntvAfter(SlotIndex Idx) {
  658. assert(OpenIdx && "openIntv not called before leaveIntvAfter");
  659. LLVM_DEBUG(dbgs() << " leaveIntvAfter " << Idx);
  660. // The interval must be live beyond the instruction at Idx.
  661. SlotIndex Boundary = Idx.getBoundaryIndex();
  662. VNInfo *ParentVNI = Edit->getParent().getVNInfoAt(Boundary);
  663. if (!ParentVNI) {
  664. LLVM_DEBUG(dbgs() << ": not live\n");
  665. return Boundary.getNextSlot();
  666. }
  667. LLVM_DEBUG(dbgs() << ": valno " << ParentVNI->id << '\n');
  668. MachineInstr *MI = LIS.getInstructionFromIndex(Boundary);
  669. assert(MI && "No instruction at index");
  670. // In spill mode, make live ranges as short as possible by inserting the copy
  671. // before MI. This is only possible if that instruction doesn't redefine the
  672. // value. The inserted COPY is not a kill, and we don't need to recompute
  673. // the source live range. The spiller also won't try to hoist this copy.
  674. if (SpillMode && !SlotIndex::isSameInstr(ParentVNI->def, Idx) &&
  675. MI->readsVirtualRegister(Edit->getReg())) {
  676. forceRecompute(0, *ParentVNI);
  677. defFromParent(0, ParentVNI, Idx, *MI->getParent(), MI);
  678. return Idx;
  679. }
  680. VNInfo *VNI = defFromParent(0, ParentVNI, Boundary, *MI->getParent(),
  681. std::next(MachineBasicBlock::iterator(MI)));
  682. return VNI->def;
  683. }
  684. SlotIndex SplitEditor::leaveIntvBefore(SlotIndex Idx) {
  685. assert(OpenIdx && "openIntv not called before leaveIntvBefore");
  686. LLVM_DEBUG(dbgs() << " leaveIntvBefore " << Idx);
  687. // The interval must be live into the instruction at Idx.
  688. Idx = Idx.getBaseIndex();
  689. VNInfo *ParentVNI = Edit->getParent().getVNInfoAt(Idx);
  690. if (!ParentVNI) {
  691. LLVM_DEBUG(dbgs() << ": not live\n");
  692. return Idx.getNextSlot();
  693. }
  694. LLVM_DEBUG(dbgs() << ": valno " << ParentVNI->id << '\n');
  695. MachineInstr *MI = LIS.getInstructionFromIndex(Idx);
  696. assert(MI && "No instruction at index");
  697. VNInfo *VNI = defFromParent(0, ParentVNI, Idx, *MI->getParent(), MI);
  698. return VNI->def;
  699. }
  700. SlotIndex SplitEditor::leaveIntvAtTop(MachineBasicBlock &MBB) {
  701. assert(OpenIdx && "openIntv not called before leaveIntvAtTop");
  702. SlotIndex Start = LIS.getMBBStartIdx(&MBB);
  703. LLVM_DEBUG(dbgs() << " leaveIntvAtTop " << printMBBReference(MBB) << ", "
  704. << Start);
  705. VNInfo *ParentVNI = Edit->getParent().getVNInfoAt(Start);
  706. if (!ParentVNI) {
  707. LLVM_DEBUG(dbgs() << ": not live\n");
  708. return Start;
  709. }
  710. VNInfo *VNI = defFromParent(0, ParentVNI, Start, MBB,
  711. MBB.SkipPHIsLabelsAndDebug(MBB.begin()));
  712. RegAssign.insert(Start, VNI->def, OpenIdx);
  713. LLVM_DEBUG(dump());
  714. return VNI->def;
  715. }
  716. void SplitEditor::overlapIntv(SlotIndex Start, SlotIndex End) {
  717. assert(OpenIdx && "openIntv not called before overlapIntv");
  718. const VNInfo *ParentVNI = Edit->getParent().getVNInfoAt(Start);
  719. assert(ParentVNI == Edit->getParent().getVNInfoBefore(End) &&
  720. "Parent changes value in extended range");
  721. assert(LIS.getMBBFromIndex(Start) == LIS.getMBBFromIndex(End) &&
  722. "Range cannot span basic blocks");
  723. // The complement interval will be extended as needed by LRCalc.extend().
  724. if (ParentVNI)
  725. forceRecompute(0, *ParentVNI);
  726. LLVM_DEBUG(dbgs() << " overlapIntv [" << Start << ';' << End << "):");
  727. RegAssign.insert(Start, End, OpenIdx);
  728. LLVM_DEBUG(dump());
  729. }
  730. //===----------------------------------------------------------------------===//
  731. // Spill modes
  732. //===----------------------------------------------------------------------===//
  733. void SplitEditor::removeBackCopies(SmallVectorImpl<VNInfo*> &Copies) {
  734. LiveInterval *LI = &LIS.getInterval(Edit->get(0));
  735. LLVM_DEBUG(dbgs() << "Removing " << Copies.size() << " back-copies.\n");
  736. RegAssignMap::iterator AssignI;
  737. AssignI.setMap(RegAssign);
  738. for (unsigned i = 0, e = Copies.size(); i != e; ++i) {
  739. SlotIndex Def = Copies[i]->def;
  740. MachineInstr *MI = LIS.getInstructionFromIndex(Def);
  741. assert(MI && "No instruction for back-copy");
  742. MachineBasicBlock *MBB = MI->getParent();
  743. MachineBasicBlock::iterator MBBI(MI);
  744. bool AtBegin;
  745. do AtBegin = MBBI == MBB->begin();
  746. while (!AtBegin && (--MBBI)->isDebugInstr());
  747. LLVM_DEBUG(dbgs() << "Removing " << Def << '\t' << *MI);
  748. LIS.removeVRegDefAt(*LI, Def);
  749. LIS.RemoveMachineInstrFromMaps(*MI);
  750. MI->eraseFromParent();
  751. // Adjust RegAssign if a register assignment is killed at Def. We want to
  752. // avoid calculating the live range of the source register if possible.
  753. AssignI.find(Def.getPrevSlot());
  754. if (!AssignI.valid() || AssignI.start() >= Def)
  755. continue;
  756. // If MI doesn't kill the assigned register, just leave it.
  757. if (AssignI.stop() != Def)
  758. continue;
  759. unsigned RegIdx = AssignI.value();
  760. if (AtBegin || !MBBI->readsVirtualRegister(Edit->getReg())) {
  761. LLVM_DEBUG(dbgs() << " cannot find simple kill of RegIdx " << RegIdx
  762. << '\n');
  763. forceRecompute(RegIdx, *Edit->getParent().getVNInfoAt(Def));
  764. } else {
  765. SlotIndex Kill = LIS.getInstructionIndex(*MBBI).getRegSlot();
  766. LLVM_DEBUG(dbgs() << " move kill to " << Kill << '\t' << *MBBI);
  767. AssignI.setStop(Kill);
  768. }
  769. }
  770. }
  771. MachineBasicBlock*
  772. SplitEditor::findShallowDominator(MachineBasicBlock *MBB,
  773. MachineBasicBlock *DefMBB) {
  774. if (MBB == DefMBB)
  775. return MBB;
  776. assert(MDT.dominates(DefMBB, MBB) && "MBB must be dominated by the def.");
  777. const MachineLoopInfo &Loops = SA.Loops;
  778. const MachineLoop *DefLoop = Loops.getLoopFor(DefMBB);
  779. MachineDomTreeNode *DefDomNode = MDT[DefMBB];
  780. // Best candidate so far.
  781. MachineBasicBlock *BestMBB = MBB;
  782. unsigned BestDepth = std::numeric_limits<unsigned>::max();
  783. while (true) {
  784. const MachineLoop *Loop = Loops.getLoopFor(MBB);
  785. // MBB isn't in a loop, it doesn't get any better. All dominators have a
  786. // higher frequency by definition.
  787. if (!Loop) {
  788. LLVM_DEBUG(dbgs() << "Def in " << printMBBReference(*DefMBB)
  789. << " dominates " << printMBBReference(*MBB)
  790. << " at depth 0\n");
  791. return MBB;
  792. }
  793. // We'll never be able to exit the DefLoop.
  794. if (Loop == DefLoop) {
  795. LLVM_DEBUG(dbgs() << "Def in " << printMBBReference(*DefMBB)
  796. << " dominates " << printMBBReference(*MBB)
  797. << " in the same loop\n");
  798. return MBB;
  799. }
  800. // Least busy dominator seen so far.
  801. unsigned Depth = Loop->getLoopDepth();
  802. if (Depth < BestDepth) {
  803. BestMBB = MBB;
  804. BestDepth = Depth;
  805. LLVM_DEBUG(dbgs() << "Def in " << printMBBReference(*DefMBB)
  806. << " dominates " << printMBBReference(*MBB)
  807. << " at depth " << Depth << '\n');
  808. }
  809. // Leave loop by going to the immediate dominator of the loop header.
  810. // This is a bigger stride than simply walking up the dominator tree.
  811. MachineDomTreeNode *IDom = MDT[Loop->getHeader()]->getIDom();
  812. // Too far up the dominator tree?
  813. if (!IDom || !MDT.dominates(DefDomNode, IDom))
  814. return BestMBB;
  815. MBB = IDom->getBlock();
  816. }
  817. }
  818. void SplitEditor::computeRedundantBackCopies(
  819. DenseSet<unsigned> &NotToHoistSet, SmallVectorImpl<VNInfo *> &BackCopies) {
  820. LiveInterval *LI = &LIS.getInterval(Edit->get(0));
  821. LiveInterval *Parent = &Edit->getParent();
  822. SmallVector<SmallPtrSet<VNInfo *, 8>, 8> EqualVNs(Parent->getNumValNums());
  823. SmallPtrSet<VNInfo *, 8> DominatedVNIs;
  824. // Aggregate VNIs having the same value as ParentVNI.
  825. for (VNInfo *VNI : LI->valnos) {
  826. if (VNI->isUnused())
  827. continue;
  828. VNInfo *ParentVNI = Edit->getParent().getVNInfoAt(VNI->def);
  829. EqualVNs[ParentVNI->id].insert(VNI);
  830. }
  831. // For VNI aggregation of each ParentVNI, collect dominated, i.e.,
  832. // redundant VNIs to BackCopies.
  833. for (unsigned i = 0, e = Parent->getNumValNums(); i != e; ++i) {
  834. VNInfo *ParentVNI = Parent->getValNumInfo(i);
  835. if (!NotToHoistSet.count(ParentVNI->id))
  836. continue;
  837. SmallPtrSetIterator<VNInfo *> It1 = EqualVNs[ParentVNI->id].begin();
  838. SmallPtrSetIterator<VNInfo *> It2 = It1;
  839. for (; It1 != EqualVNs[ParentVNI->id].end(); ++It1) {
  840. It2 = It1;
  841. for (++It2; It2 != EqualVNs[ParentVNI->id].end(); ++It2) {
  842. if (DominatedVNIs.count(*It1) || DominatedVNIs.count(*It2))
  843. continue;
  844. MachineBasicBlock *MBB1 = LIS.getMBBFromIndex((*It1)->def);
  845. MachineBasicBlock *MBB2 = LIS.getMBBFromIndex((*It2)->def);
  846. if (MBB1 == MBB2) {
  847. DominatedVNIs.insert((*It1)->def < (*It2)->def ? (*It2) : (*It1));
  848. } else if (MDT.dominates(MBB1, MBB2)) {
  849. DominatedVNIs.insert(*It2);
  850. } else if (MDT.dominates(MBB2, MBB1)) {
  851. DominatedVNIs.insert(*It1);
  852. }
  853. }
  854. }
  855. if (!DominatedVNIs.empty()) {
  856. forceRecompute(0, *ParentVNI);
  857. for (auto VNI : DominatedVNIs) {
  858. BackCopies.push_back(VNI);
  859. }
  860. DominatedVNIs.clear();
  861. }
  862. }
  863. }
  864. /// For SM_Size mode, find a common dominator for all the back-copies for
  865. /// the same ParentVNI and hoist the backcopies to the dominator BB.
  866. /// For SM_Speed mode, if the common dominator is hot and it is not beneficial
  867. /// to do the hoisting, simply remove the dominated backcopies for the same
  868. /// ParentVNI.
  869. void SplitEditor::hoistCopies() {
  870. // Get the complement interval, always RegIdx 0.
  871. LiveInterval *LI = &LIS.getInterval(Edit->get(0));
  872. LiveInterval *Parent = &Edit->getParent();
  873. // Track the nearest common dominator for all back-copies for each ParentVNI,
  874. // indexed by ParentVNI->id.
  875. using DomPair = std::pair<MachineBasicBlock *, SlotIndex>;
  876. SmallVector<DomPair, 8> NearestDom(Parent->getNumValNums());
  877. // The total cost of all the back-copies for each ParentVNI.
  878. SmallVector<BlockFrequency, 8> Costs(Parent->getNumValNums());
  879. // The ParentVNI->id set for which hoisting back-copies are not beneficial
  880. // for Speed.
  881. DenseSet<unsigned> NotToHoistSet;
  882. // Find the nearest common dominator for parent values with multiple
  883. // back-copies. If a single back-copy dominates, put it in DomPair.second.
  884. for (VNInfo *VNI : LI->valnos) {
  885. if (VNI->isUnused())
  886. continue;
  887. VNInfo *ParentVNI = Edit->getParent().getVNInfoAt(VNI->def);
  888. assert(ParentVNI && "Parent not live at complement def");
  889. // Don't hoist remats. The complement is probably going to disappear
  890. // completely anyway.
  891. if (Edit->didRematerialize(ParentVNI))
  892. continue;
  893. MachineBasicBlock *ValMBB = LIS.getMBBFromIndex(VNI->def);
  894. DomPair &Dom = NearestDom[ParentVNI->id];
  895. // Keep directly defined parent values. This is either a PHI or an
  896. // instruction in the complement range. All other copies of ParentVNI
  897. // should be eliminated.
  898. if (VNI->def == ParentVNI->def) {
  899. LLVM_DEBUG(dbgs() << "Direct complement def at " << VNI->def << '\n');
  900. Dom = DomPair(ValMBB, VNI->def);
  901. continue;
  902. }
  903. // Skip the singly mapped values. There is nothing to gain from hoisting a
  904. // single back-copy.
  905. if (Values.lookup(std::make_pair(0, ParentVNI->id)).getPointer()) {
  906. LLVM_DEBUG(dbgs() << "Single complement def at " << VNI->def << '\n');
  907. continue;
  908. }
  909. if (!Dom.first) {
  910. // First time we see ParentVNI. VNI dominates itself.
  911. Dom = DomPair(ValMBB, VNI->def);
  912. } else if (Dom.first == ValMBB) {
  913. // Two defs in the same block. Pick the earlier def.
  914. if (!Dom.second.isValid() || VNI->def < Dom.second)
  915. Dom.second = VNI->def;
  916. } else {
  917. // Different basic blocks. Check if one dominates.
  918. MachineBasicBlock *Near =
  919. MDT.findNearestCommonDominator(Dom.first, ValMBB);
  920. if (Near == ValMBB)
  921. // Def ValMBB dominates.
  922. Dom = DomPair(ValMBB, VNI->def);
  923. else if (Near != Dom.first)
  924. // None dominate. Hoist to common dominator, need new def.
  925. Dom = DomPair(Near, SlotIndex());
  926. Costs[ParentVNI->id] += MBFI.getBlockFreq(ValMBB);
  927. }
  928. LLVM_DEBUG(dbgs() << "Multi-mapped complement " << VNI->id << '@'
  929. << VNI->def << " for parent " << ParentVNI->id << '@'
  930. << ParentVNI->def << " hoist to "
  931. << printMBBReference(*Dom.first) << ' ' << Dom.second
  932. << '\n');
  933. }
  934. // Insert the hoisted copies.
  935. for (unsigned i = 0, e = Parent->getNumValNums(); i != e; ++i) {
  936. DomPair &Dom = NearestDom[i];
  937. if (!Dom.first || Dom.second.isValid())
  938. continue;
  939. // This value needs a hoisted copy inserted at the end of Dom.first.
  940. VNInfo *ParentVNI = Parent->getValNumInfo(i);
  941. MachineBasicBlock *DefMBB = LIS.getMBBFromIndex(ParentVNI->def);
  942. // Get a less loopy dominator than Dom.first.
  943. Dom.first = findShallowDominator(Dom.first, DefMBB);
  944. if (SpillMode == SM_Speed &&
  945. MBFI.getBlockFreq(Dom.first) > Costs[ParentVNI->id]) {
  946. NotToHoistSet.insert(ParentVNI->id);
  947. continue;
  948. }
  949. SlotIndex Last = LIS.getMBBEndIdx(Dom.first).getPrevSlot();
  950. Dom.second =
  951. defFromParent(0, ParentVNI, Last, *Dom.first,
  952. SA.getLastSplitPointIter(Dom.first))->def;
  953. }
  954. // Remove redundant back-copies that are now known to be dominated by another
  955. // def with the same value.
  956. SmallVector<VNInfo*, 8> BackCopies;
  957. for (VNInfo *VNI : LI->valnos) {
  958. if (VNI->isUnused())
  959. continue;
  960. VNInfo *ParentVNI = Edit->getParent().getVNInfoAt(VNI->def);
  961. const DomPair &Dom = NearestDom[ParentVNI->id];
  962. if (!Dom.first || Dom.second == VNI->def ||
  963. NotToHoistSet.count(ParentVNI->id))
  964. continue;
  965. BackCopies.push_back(VNI);
  966. forceRecompute(0, *ParentVNI);
  967. }
  968. // If it is not beneficial to hoist all the BackCopies, simply remove
  969. // redundant BackCopies in speed mode.
  970. if (SpillMode == SM_Speed && !NotToHoistSet.empty())
  971. computeRedundantBackCopies(NotToHoistSet, BackCopies);
  972. removeBackCopies(BackCopies);
  973. }
  974. /// transferValues - Transfer all possible values to the new live ranges.
  975. /// Values that were rematerialized are left alone, they need LRCalc.extend().
  976. bool SplitEditor::transferValues() {
  977. bool Skipped = false;
  978. RegAssignMap::const_iterator AssignI = RegAssign.begin();
  979. for (const LiveRange::Segment &S : Edit->getParent()) {
  980. LLVM_DEBUG(dbgs() << " blit " << S << ':');
  981. VNInfo *ParentVNI = S.valno;
  982. // RegAssign has holes where RegIdx 0 should be used.
  983. SlotIndex Start = S.start;
  984. AssignI.advanceTo(Start);
  985. do {
  986. unsigned RegIdx;
  987. SlotIndex End = S.end;
  988. if (!AssignI.valid()) {
  989. RegIdx = 0;
  990. } else if (AssignI.start() <= Start) {
  991. RegIdx = AssignI.value();
  992. if (AssignI.stop() < End) {
  993. End = AssignI.stop();
  994. ++AssignI;
  995. }
  996. } else {
  997. RegIdx = 0;
  998. End = std::min(End, AssignI.start());
  999. }
  1000. // The interval [Start;End) is continuously mapped to RegIdx, ParentVNI.
  1001. LLVM_DEBUG(dbgs() << " [" << Start << ';' << End << ")=" << RegIdx << '('
  1002. << printReg(Edit->get(RegIdx)) << ')');
  1003. LiveInterval &LI = LIS.getInterval(Edit->get(RegIdx));
  1004. // Check for a simply defined value that can be blitted directly.
  1005. ValueForcePair VFP = Values.lookup(std::make_pair(RegIdx, ParentVNI->id));
  1006. if (VNInfo *VNI = VFP.getPointer()) {
  1007. LLVM_DEBUG(dbgs() << ':' << VNI->id);
  1008. LI.addSegment(LiveInterval::Segment(Start, End, VNI));
  1009. Start = End;
  1010. continue;
  1011. }
  1012. // Skip values with forced recomputation.
  1013. if (VFP.getInt()) {
  1014. LLVM_DEBUG(dbgs() << "(recalc)");
  1015. Skipped = true;
  1016. Start = End;
  1017. continue;
  1018. }
  1019. LiveRangeCalc &LRC = getLRCalc(RegIdx);
  1020. // This value has multiple defs in RegIdx, but it wasn't rematerialized,
  1021. // so the live range is accurate. Add live-in blocks in [Start;End) to the
  1022. // LiveInBlocks.
  1023. MachineFunction::iterator MBB = LIS.getMBBFromIndex(Start)->getIterator();
  1024. SlotIndex BlockStart, BlockEnd;
  1025. std::tie(BlockStart, BlockEnd) = LIS.getSlotIndexes()->getMBBRange(&*MBB);
  1026. // The first block may be live-in, or it may have its own def.
  1027. if (Start != BlockStart) {
  1028. VNInfo *VNI = LI.extendInBlock(BlockStart, std::min(BlockEnd, End));
  1029. assert(VNI && "Missing def for complex mapped value");
  1030. LLVM_DEBUG(dbgs() << ':' << VNI->id << "*" << printMBBReference(*MBB));
  1031. // MBB has its own def. Is it also live-out?
  1032. if (BlockEnd <= End)
  1033. LRC.setLiveOutValue(&*MBB, VNI);
  1034. // Skip to the next block for live-in.
  1035. ++MBB;
  1036. BlockStart = BlockEnd;
  1037. }
  1038. // Handle the live-in blocks covered by [Start;End).
  1039. assert(Start <= BlockStart && "Expected live-in block");
  1040. while (BlockStart < End) {
  1041. LLVM_DEBUG(dbgs() << ">" << printMBBReference(*MBB));
  1042. BlockEnd = LIS.getMBBEndIdx(&*MBB);
  1043. if (BlockStart == ParentVNI->def) {
  1044. // This block has the def of a parent PHI, so it isn't live-in.
  1045. assert(ParentVNI->isPHIDef() && "Non-phi defined at block start?");
  1046. VNInfo *VNI = LI.extendInBlock(BlockStart, std::min(BlockEnd, End));
  1047. assert(VNI && "Missing def for complex mapped parent PHI");
  1048. if (End >= BlockEnd)
  1049. LRC.setLiveOutValue(&*MBB, VNI); // Live-out as well.
  1050. } else {
  1051. // This block needs a live-in value. The last block covered may not
  1052. // be live-out.
  1053. if (End < BlockEnd)
  1054. LRC.addLiveInBlock(LI, MDT[&*MBB], End);
  1055. else {
  1056. // Live-through, and we don't know the value.
  1057. LRC.addLiveInBlock(LI, MDT[&*MBB]);
  1058. LRC.setLiveOutValue(&*MBB, nullptr);
  1059. }
  1060. }
  1061. BlockStart = BlockEnd;
  1062. ++MBB;
  1063. }
  1064. Start = End;
  1065. } while (Start != S.end);
  1066. LLVM_DEBUG(dbgs() << '\n');
  1067. }
  1068. LRCalc[0].calculateValues();
  1069. if (SpillMode)
  1070. LRCalc[1].calculateValues();
  1071. return Skipped;
  1072. }
  1073. static bool removeDeadSegment(SlotIndex Def, LiveRange &LR) {
  1074. const LiveRange::Segment *Seg = LR.getSegmentContaining(Def);
  1075. if (Seg == nullptr)
  1076. return true;
  1077. if (Seg->end != Def.getDeadSlot())
  1078. return false;
  1079. // This is a dead PHI. Remove it.
  1080. LR.removeSegment(*Seg, true);
  1081. return true;
  1082. }
  1083. void SplitEditor::extendPHIRange(MachineBasicBlock &B, LiveRangeCalc &LRC,
  1084. LiveRange &LR, LaneBitmask LM,
  1085. ArrayRef<SlotIndex> Undefs) {
  1086. for (MachineBasicBlock *P : B.predecessors()) {
  1087. SlotIndex End = LIS.getMBBEndIdx(P);
  1088. SlotIndex LastUse = End.getPrevSlot();
  1089. // The predecessor may not have a live-out value. That is OK, like an
  1090. // undef PHI operand.
  1091. LiveInterval &PLI = Edit->getParent();
  1092. // Need the cast because the inputs to ?: would otherwise be deemed
  1093. // "incompatible": SubRange vs LiveInterval.
  1094. LiveRange &PSR = !LM.all() ? getSubRangeForMask(LM, PLI)
  1095. : static_cast<LiveRange&>(PLI);
  1096. if (PSR.liveAt(LastUse))
  1097. LRC.extend(LR, End, /*PhysReg=*/0, Undefs);
  1098. }
  1099. }
  1100. void SplitEditor::extendPHIKillRanges() {
  1101. // Extend live ranges to be live-out for successor PHI values.
  1102. // Visit each PHI def slot in the parent live interval. If the def is dead,
  1103. // remove it. Otherwise, extend the live interval to reach the end indexes
  1104. // of all predecessor blocks.
  1105. LiveInterval &ParentLI = Edit->getParent();
  1106. for (const VNInfo *V : ParentLI.valnos) {
  1107. if (V->isUnused() || !V->isPHIDef())
  1108. continue;
  1109. unsigned RegIdx = RegAssign.lookup(V->def);
  1110. LiveInterval &LI = LIS.getInterval(Edit->get(RegIdx));
  1111. LiveRangeCalc &LRC = getLRCalc(RegIdx);
  1112. MachineBasicBlock &B = *LIS.getMBBFromIndex(V->def);
  1113. if (!removeDeadSegment(V->def, LI))
  1114. extendPHIRange(B, LRC, LI, LaneBitmask::getAll(), /*Undefs=*/{});
  1115. }
  1116. SmallVector<SlotIndex, 4> Undefs;
  1117. LiveRangeCalc SubLRC;
  1118. for (LiveInterval::SubRange &PS : ParentLI.subranges()) {
  1119. for (const VNInfo *V : PS.valnos) {
  1120. if (V->isUnused() || !V->isPHIDef())
  1121. continue;
  1122. unsigned RegIdx = RegAssign.lookup(V->def);
  1123. LiveInterval &LI = LIS.getInterval(Edit->get(RegIdx));
  1124. LiveInterval::SubRange &S = getSubRangeForMask(PS.LaneMask, LI);
  1125. if (removeDeadSegment(V->def, S))
  1126. continue;
  1127. MachineBasicBlock &B = *LIS.getMBBFromIndex(V->def);
  1128. SubLRC.reset(&VRM.getMachineFunction(), LIS.getSlotIndexes(), &MDT,
  1129. &LIS.getVNInfoAllocator());
  1130. Undefs.clear();
  1131. LI.computeSubRangeUndefs(Undefs, PS.LaneMask, MRI, *LIS.getSlotIndexes());
  1132. extendPHIRange(B, SubLRC, S, PS.LaneMask, Undefs);
  1133. }
  1134. }
  1135. }
  1136. /// rewriteAssigned - Rewrite all uses of Edit->getReg().
  1137. void SplitEditor::rewriteAssigned(bool ExtendRanges) {
  1138. struct ExtPoint {
  1139. ExtPoint(const MachineOperand &O, unsigned R, SlotIndex N)
  1140. : MO(O), RegIdx(R), Next(N) {}
  1141. MachineOperand MO;
  1142. unsigned RegIdx;
  1143. SlotIndex Next;
  1144. };
  1145. SmallVector<ExtPoint,4> ExtPoints;
  1146. for (MachineRegisterInfo::reg_iterator RI = MRI.reg_begin(Edit->getReg()),
  1147. RE = MRI.reg_end(); RI != RE;) {
  1148. MachineOperand &MO = *RI;
  1149. MachineInstr *MI = MO.getParent();
  1150. ++RI;
  1151. // LiveDebugVariables should have handled all DBG_VALUE instructions.
  1152. if (MI->isDebugValue()) {
  1153. LLVM_DEBUG(dbgs() << "Zapping " << *MI);
  1154. MO.setReg(0);
  1155. continue;
  1156. }
  1157. // <undef> operands don't really read the register, so it doesn't matter
  1158. // which register we choose. When the use operand is tied to a def, we must
  1159. // use the same register as the def, so just do that always.
  1160. SlotIndex Idx = LIS.getInstructionIndex(*MI);
  1161. if (MO.isDef() || MO.isUndef())
  1162. Idx = Idx.getRegSlot(MO.isEarlyClobber());
  1163. // Rewrite to the mapped register at Idx.
  1164. unsigned RegIdx = RegAssign.lookup(Idx);
  1165. LiveInterval &LI = LIS.getInterval(Edit->get(RegIdx));
  1166. MO.setReg(LI.reg);
  1167. LLVM_DEBUG(dbgs() << " rewr " << printMBBReference(*MI->getParent())
  1168. << '\t' << Idx << ':' << RegIdx << '\t' << *MI);
  1169. // Extend liveness to Idx if the instruction reads reg.
  1170. if (!ExtendRanges || MO.isUndef())
  1171. continue;
  1172. // Skip instructions that don't read Reg.
  1173. if (MO.isDef()) {
  1174. if (!MO.getSubReg() && !MO.isEarlyClobber())
  1175. continue;
  1176. // We may want to extend a live range for a partial redef, or for a use
  1177. // tied to an early clobber.
  1178. Idx = Idx.getPrevSlot();
  1179. if (!Edit->getParent().liveAt(Idx))
  1180. continue;
  1181. } else
  1182. Idx = Idx.getRegSlot(true);
  1183. SlotIndex Next = Idx.getNextSlot();
  1184. if (LI.hasSubRanges()) {
  1185. // We have to delay extending subranges until we have seen all operands
  1186. // defining the register. This is because a <def,read-undef> operand
  1187. // will create an "undef" point, and we cannot extend any subranges
  1188. // until all of them have been accounted for.
  1189. if (MO.isUse())
  1190. ExtPoints.push_back(ExtPoint(MO, RegIdx, Next));
  1191. } else {
  1192. LiveRangeCalc &LRC = getLRCalc(RegIdx);
  1193. LRC.extend(LI, Next, 0, ArrayRef<SlotIndex>());
  1194. }
  1195. }
  1196. for (ExtPoint &EP : ExtPoints) {
  1197. LiveInterval &LI = LIS.getInterval(Edit->get(EP.RegIdx));
  1198. assert(LI.hasSubRanges());
  1199. LiveRangeCalc SubLRC;
  1200. Register Reg = EP.MO.getReg(), Sub = EP.MO.getSubReg();
  1201. LaneBitmask LM = Sub != 0 ? TRI.getSubRegIndexLaneMask(Sub)
  1202. : MRI.getMaxLaneMaskForVReg(Reg);
  1203. for (LiveInterval::SubRange &S : LI.subranges()) {
  1204. if ((S.LaneMask & LM).none())
  1205. continue;
  1206. // The problem here can be that the new register may have been created
  1207. // for a partially defined original register. For example:
  1208. // %0:subreg_hireg<def,read-undef> = ...
  1209. // ...
  1210. // %1 = COPY %0
  1211. if (S.empty())
  1212. continue;
  1213. SubLRC.reset(&VRM.getMachineFunction(), LIS.getSlotIndexes(), &MDT,
  1214. &LIS.getVNInfoAllocator());
  1215. SmallVector<SlotIndex, 4> Undefs;
  1216. LI.computeSubRangeUndefs(Undefs, S.LaneMask, MRI, *LIS.getSlotIndexes());
  1217. SubLRC.extend(S, EP.Next, 0, Undefs);
  1218. }
  1219. }
  1220. for (unsigned R : *Edit) {
  1221. LiveInterval &LI = LIS.getInterval(R);
  1222. if (!LI.hasSubRanges())
  1223. continue;
  1224. LI.clear();
  1225. LI.removeEmptySubRanges();
  1226. LIS.constructMainRangeFromSubranges(LI);
  1227. }
  1228. }
  1229. void SplitEditor::deleteRematVictims() {
  1230. SmallVector<MachineInstr*, 8> Dead;
  1231. for (LiveRangeEdit::iterator I = Edit->begin(), E = Edit->end(); I != E; ++I){
  1232. LiveInterval *LI = &LIS.getInterval(*I);
  1233. for (const LiveRange::Segment &S : LI->segments) {
  1234. // Dead defs end at the dead slot.
  1235. if (S.end != S.valno->def.getDeadSlot())
  1236. continue;
  1237. if (S.valno->isPHIDef())
  1238. continue;
  1239. MachineInstr *MI = LIS.getInstructionFromIndex(S.valno->def);
  1240. assert(MI && "Missing instruction for dead def");
  1241. MI->addRegisterDead(LI->reg, &TRI);
  1242. if (!MI->allDefsAreDead())
  1243. continue;
  1244. LLVM_DEBUG(dbgs() << "All defs dead: " << *MI);
  1245. Dead.push_back(MI);
  1246. }
  1247. }
  1248. if (Dead.empty())
  1249. return;
  1250. Edit->eliminateDeadDefs(Dead, None, &AA);
  1251. }
  1252. void SplitEditor::forceRecomputeVNI(const VNInfo &ParentVNI) {
  1253. // Fast-path for common case.
  1254. if (!ParentVNI.isPHIDef()) {
  1255. for (unsigned I = 0, E = Edit->size(); I != E; ++I)
  1256. forceRecompute(I, ParentVNI);
  1257. return;
  1258. }
  1259. // Trace value through phis.
  1260. SmallPtrSet<const VNInfo *, 8> Visited; ///< whether VNI was/is in worklist.
  1261. SmallVector<const VNInfo *, 4> WorkList;
  1262. Visited.insert(&ParentVNI);
  1263. WorkList.push_back(&ParentVNI);
  1264. const LiveInterval &ParentLI = Edit->getParent();
  1265. const SlotIndexes &Indexes = *LIS.getSlotIndexes();
  1266. do {
  1267. const VNInfo &VNI = *WorkList.back();
  1268. WorkList.pop_back();
  1269. for (unsigned I = 0, E = Edit->size(); I != E; ++I)
  1270. forceRecompute(I, VNI);
  1271. if (!VNI.isPHIDef())
  1272. continue;
  1273. MachineBasicBlock &MBB = *Indexes.getMBBFromIndex(VNI.def);
  1274. for (const MachineBasicBlock *Pred : MBB.predecessors()) {
  1275. SlotIndex PredEnd = Indexes.getMBBEndIdx(Pred);
  1276. VNInfo *PredVNI = ParentLI.getVNInfoBefore(PredEnd);
  1277. assert(PredVNI && "Value available in PhiVNI predecessor");
  1278. if (Visited.insert(PredVNI).second)
  1279. WorkList.push_back(PredVNI);
  1280. }
  1281. } while(!WorkList.empty());
  1282. }
  1283. void SplitEditor::finish(SmallVectorImpl<unsigned> *LRMap) {
  1284. ++NumFinished;
  1285. // At this point, the live intervals in Edit contain VNInfos corresponding to
  1286. // the inserted copies.
  1287. // Add the original defs from the parent interval.
  1288. for (const VNInfo *ParentVNI : Edit->getParent().valnos) {
  1289. if (ParentVNI->isUnused())
  1290. continue;
  1291. unsigned RegIdx = RegAssign.lookup(ParentVNI->def);
  1292. defValue(RegIdx, ParentVNI, ParentVNI->def, true);
  1293. // Force rematted values to be recomputed everywhere.
  1294. // The new live ranges may be truncated.
  1295. if (Edit->didRematerialize(ParentVNI))
  1296. forceRecomputeVNI(*ParentVNI);
  1297. }
  1298. // Hoist back-copies to the complement interval when in spill mode.
  1299. switch (SpillMode) {
  1300. case SM_Partition:
  1301. // Leave all back-copies as is.
  1302. break;
  1303. case SM_Size:
  1304. case SM_Speed:
  1305. // hoistCopies will behave differently between size and speed.
  1306. hoistCopies();
  1307. }
  1308. // Transfer the simply mapped values, check if any are skipped.
  1309. bool Skipped = transferValues();
  1310. // Rewrite virtual registers, possibly extending ranges.
  1311. rewriteAssigned(Skipped);
  1312. if (Skipped)
  1313. extendPHIKillRanges();
  1314. else
  1315. ++NumSimple;
  1316. // Delete defs that were rematted everywhere.
  1317. if (Skipped)
  1318. deleteRematVictims();
  1319. // Get rid of unused values and set phi-kill flags.
  1320. for (unsigned Reg : *Edit) {
  1321. LiveInterval &LI = LIS.getInterval(Reg);
  1322. LI.removeEmptySubRanges();
  1323. LI.RenumberValues();
  1324. }
  1325. // Provide a reverse mapping from original indices to Edit ranges.
  1326. if (LRMap) {
  1327. LRMap->clear();
  1328. for (unsigned i = 0, e = Edit->size(); i != e; ++i)
  1329. LRMap->push_back(i);
  1330. }
  1331. // Now check if any registers were separated into multiple components.
  1332. ConnectedVNInfoEqClasses ConEQ(LIS);
  1333. for (unsigned i = 0, e = Edit->size(); i != e; ++i) {
  1334. // Don't use iterators, they are invalidated by create() below.
  1335. unsigned VReg = Edit->get(i);
  1336. LiveInterval &LI = LIS.getInterval(VReg);
  1337. SmallVector<LiveInterval*, 8> SplitLIs;
  1338. LIS.splitSeparateComponents(LI, SplitLIs);
  1339. unsigned Original = VRM.getOriginal(VReg);
  1340. for (LiveInterval *SplitLI : SplitLIs)
  1341. VRM.setIsSplitFromReg(SplitLI->reg, Original);
  1342. // The new intervals all map back to i.
  1343. if (LRMap)
  1344. LRMap->resize(Edit->size(), i);
  1345. }
  1346. // Calculate spill weight and allocation hints for new intervals.
  1347. Edit->calculateRegClassAndHint(VRM.getMachineFunction(), SA.Loops, MBFI);
  1348. assert(!LRMap || LRMap->size() == Edit->size());
  1349. }
  1350. //===----------------------------------------------------------------------===//
  1351. // Single Block Splitting
  1352. //===----------------------------------------------------------------------===//
  1353. bool SplitAnalysis::shouldSplitSingleBlock(const BlockInfo &BI,
  1354. bool SingleInstrs) const {
  1355. // Always split for multiple instructions.
  1356. if (!BI.isOneInstr())
  1357. return true;
  1358. // Don't split for single instructions unless explicitly requested.
  1359. if (!SingleInstrs)
  1360. return false;
  1361. // Splitting a live-through range always makes progress.
  1362. if (BI.LiveIn && BI.LiveOut)
  1363. return true;
  1364. // No point in isolating a copy. It has no register class constraints.
  1365. if (LIS.getInstructionFromIndex(BI.FirstInstr)->isCopyLike())
  1366. return false;
  1367. // Finally, don't isolate an end point that was created by earlier splits.
  1368. return isOriginalEndpoint(BI.FirstInstr);
  1369. }
  1370. void SplitEditor::splitSingleBlock(const SplitAnalysis::BlockInfo &BI) {
  1371. openIntv();
  1372. SlotIndex LastSplitPoint = SA.getLastSplitPoint(BI.MBB->getNumber());
  1373. SlotIndex SegStart = enterIntvBefore(std::min(BI.FirstInstr,
  1374. LastSplitPoint));
  1375. if (!BI.LiveOut || BI.LastInstr < LastSplitPoint) {
  1376. useIntv(SegStart, leaveIntvAfter(BI.LastInstr));
  1377. } else {
  1378. // The last use is after the last valid split point.
  1379. SlotIndex SegStop = leaveIntvBefore(LastSplitPoint);
  1380. useIntv(SegStart, SegStop);
  1381. overlapIntv(SegStop, BI.LastInstr);
  1382. }
  1383. }
  1384. //===----------------------------------------------------------------------===//
  1385. // Global Live Range Splitting Support
  1386. //===----------------------------------------------------------------------===//
  1387. // These methods support a method of global live range splitting that uses a
  1388. // global algorithm to decide intervals for CFG edges. They will insert split
  1389. // points and color intervals in basic blocks while avoiding interference.
  1390. //
  1391. // Note that splitSingleBlock is also useful for blocks where both CFG edges
  1392. // are on the stack.
  1393. void SplitEditor::splitLiveThroughBlock(unsigned MBBNum,
  1394. unsigned IntvIn, SlotIndex LeaveBefore,
  1395. unsigned IntvOut, SlotIndex EnterAfter){
  1396. SlotIndex Start, Stop;
  1397. std::tie(Start, Stop) = LIS.getSlotIndexes()->getMBBRange(MBBNum);
  1398. LLVM_DEBUG(dbgs() << "%bb." << MBBNum << " [" << Start << ';' << Stop
  1399. << ") intf " << LeaveBefore << '-' << EnterAfter
  1400. << ", live-through " << IntvIn << " -> " << IntvOut);
  1401. assert((IntvIn || IntvOut) && "Use splitSingleBlock for isolated blocks");
  1402. assert((!LeaveBefore || LeaveBefore < Stop) && "Interference after block");
  1403. assert((!IntvIn || !LeaveBefore || LeaveBefore > Start) && "Impossible intf");
  1404. assert((!EnterAfter || EnterAfter >= Start) && "Interference before block");
  1405. MachineBasicBlock *MBB = VRM.getMachineFunction().getBlockNumbered(MBBNum);
  1406. if (!IntvOut) {
  1407. LLVM_DEBUG(dbgs() << ", spill on entry.\n");
  1408. //
  1409. // <<<<<<<<< Possible LeaveBefore interference.
  1410. // |-----------| Live through.
  1411. // -____________ Spill on entry.
  1412. //
  1413. selectIntv(IntvIn);
  1414. SlotIndex Idx = leaveIntvAtTop(*MBB);
  1415. assert((!LeaveBefore || Idx <= LeaveBefore) && "Interference");
  1416. (void)Idx;
  1417. return;
  1418. }
  1419. if (!IntvIn) {
  1420. LLVM_DEBUG(dbgs() << ", reload on exit.\n");
  1421. //
  1422. // >>>>>>> Possible EnterAfter interference.
  1423. // |-----------| Live through.
  1424. // ___________-- Reload on exit.
  1425. //
  1426. selectIntv(IntvOut);
  1427. SlotIndex Idx = enterIntvAtEnd(*MBB);
  1428. assert((!EnterAfter || Idx >= EnterAfter) && "Interference");
  1429. (void)Idx;
  1430. return;
  1431. }
  1432. if (IntvIn == IntvOut && !LeaveBefore && !EnterAfter) {
  1433. LLVM_DEBUG(dbgs() << ", straight through.\n");
  1434. //
  1435. // |-----------| Live through.
  1436. // ------------- Straight through, same intv, no interference.
  1437. //
  1438. selectIntv(IntvOut);
  1439. useIntv(Start, Stop);
  1440. return;
  1441. }
  1442. // We cannot legally insert splits after LSP.
  1443. SlotIndex LSP = SA.getLastSplitPoint(MBBNum);
  1444. assert((!IntvOut || !EnterAfter || EnterAfter < LSP) && "Impossible intf");
  1445. if (IntvIn != IntvOut && (!LeaveBefore || !EnterAfter ||
  1446. LeaveBefore.getBaseIndex() > EnterAfter.getBoundaryIndex())) {
  1447. LLVM_DEBUG(dbgs() << ", switch avoiding interference.\n");
  1448. //
  1449. // >>>> <<<< Non-overlapping EnterAfter/LeaveBefore interference.
  1450. // |-----------| Live through.
  1451. // ------======= Switch intervals between interference.
  1452. //
  1453. selectIntv(IntvOut);
  1454. SlotIndex Idx;
  1455. if (LeaveBefore && LeaveBefore < LSP) {
  1456. Idx = enterIntvBefore(LeaveBefore);
  1457. useIntv(Idx, Stop);
  1458. } else {
  1459. Idx = enterIntvAtEnd(*MBB);
  1460. }
  1461. selectIntv(IntvIn);
  1462. useIntv(Start, Idx);
  1463. assert((!LeaveBefore || Idx <= LeaveBefore) && "Interference");
  1464. assert((!EnterAfter || Idx >= EnterAfter) && "Interference");
  1465. return;
  1466. }
  1467. LLVM_DEBUG(dbgs() << ", create local intv for interference.\n");
  1468. //
  1469. // >>><><><><<<< Overlapping EnterAfter/LeaveBefore interference.
  1470. // |-----------| Live through.
  1471. // ==---------== Switch intervals before/after interference.
  1472. //
  1473. assert(LeaveBefore <= EnterAfter && "Missed case");
  1474. selectIntv(IntvOut);
  1475. SlotIndex Idx = enterIntvAfter(EnterAfter);
  1476. useIntv(Idx, Stop);
  1477. assert((!EnterAfter || Idx >= EnterAfter) && "Interference");
  1478. selectIntv(IntvIn);
  1479. Idx = leaveIntvBefore(LeaveBefore);
  1480. useIntv(Start, Idx);
  1481. assert((!LeaveBefore || Idx <= LeaveBefore) && "Interference");
  1482. }
  1483. void SplitEditor::splitRegInBlock(const SplitAnalysis::BlockInfo &BI,
  1484. unsigned IntvIn, SlotIndex LeaveBefore) {
  1485. SlotIndex Start, Stop;
  1486. std::tie(Start, Stop) = LIS.getSlotIndexes()->getMBBRange(BI.MBB);
  1487. LLVM_DEBUG(dbgs() << printMBBReference(*BI.MBB) << " [" << Start << ';'
  1488. << Stop << "), uses " << BI.FirstInstr << '-'
  1489. << BI.LastInstr << ", reg-in " << IntvIn
  1490. << ", leave before " << LeaveBefore
  1491. << (BI.LiveOut ? ", stack-out" : ", killed in block"));
  1492. assert(IntvIn && "Must have register in");
  1493. assert(BI.LiveIn && "Must be live-in");
  1494. assert((!LeaveBefore || LeaveBefore > Start) && "Bad interference");
  1495. if (!BI.LiveOut && (!LeaveBefore || LeaveBefore >= BI.LastInstr)) {
  1496. LLVM_DEBUG(dbgs() << " before interference.\n");
  1497. //
  1498. // <<< Interference after kill.
  1499. // |---o---x | Killed in block.
  1500. // ========= Use IntvIn everywhere.
  1501. //
  1502. selectIntv(IntvIn);
  1503. useIntv(Start, BI.LastInstr);
  1504. return;
  1505. }
  1506. SlotIndex LSP = SA.getLastSplitPoint(BI.MBB->getNumber());
  1507. if (!LeaveBefore || LeaveBefore > BI.LastInstr.getBoundaryIndex()) {
  1508. //
  1509. // <<< Possible interference after last use.
  1510. // |---o---o---| Live-out on stack.
  1511. // =========____ Leave IntvIn after last use.
  1512. //
  1513. // < Interference after last use.
  1514. // |---o---o--o| Live-out on stack, late last use.
  1515. // ============ Copy to stack after LSP, overlap IntvIn.
  1516. // \_____ Stack interval is live-out.
  1517. //
  1518. if (BI.LastInstr < LSP) {
  1519. LLVM_DEBUG(dbgs() << ", spill after last use before interference.\n");
  1520. selectIntv(IntvIn);
  1521. SlotIndex Idx = leaveIntvAfter(BI.LastInstr);
  1522. useIntv(Start, Idx);
  1523. assert((!LeaveBefore || Idx <= LeaveBefore) && "Interference");
  1524. } else {
  1525. LLVM_DEBUG(dbgs() << ", spill before last split point.\n");
  1526. selectIntv(IntvIn);
  1527. SlotIndex Idx = leaveIntvBefore(LSP);
  1528. overlapIntv(Idx, BI.LastInstr);
  1529. useIntv(Start, Idx);
  1530. assert((!LeaveBefore || Idx <= LeaveBefore) && "Interference");
  1531. }
  1532. return;
  1533. }
  1534. // The interference is overlapping somewhere we wanted to use IntvIn. That
  1535. // means we need to create a local interval that can be allocated a
  1536. // different register.
  1537. unsigned LocalIntv = openIntv();
  1538. (void)LocalIntv;
  1539. LLVM_DEBUG(dbgs() << ", creating local interval " << LocalIntv << ".\n");
  1540. if (!BI.LiveOut || BI.LastInstr < LSP) {
  1541. //
  1542. // <<<<<<< Interference overlapping uses.
  1543. // |---o---o---| Live-out on stack.
  1544. // =====----____ Leave IntvIn before interference, then spill.
  1545. //
  1546. SlotIndex To = leaveIntvAfter(BI.LastInstr);
  1547. SlotIndex From = enterIntvBefore(LeaveBefore);
  1548. useIntv(From, To);
  1549. selectIntv(IntvIn);
  1550. useIntv(Start, From);
  1551. assert((!LeaveBefore || From <= LeaveBefore) && "Interference");
  1552. return;
  1553. }
  1554. // <<<<<<< Interference overlapping uses.
  1555. // |---o---o--o| Live-out on stack, late last use.
  1556. // =====------- Copy to stack before LSP, overlap LocalIntv.
  1557. // \_____ Stack interval is live-out.
  1558. //
  1559. SlotIndex To = leaveIntvBefore(LSP);
  1560. overlapIntv(To, BI.LastInstr);
  1561. SlotIndex From = enterIntvBefore(std::min(To, LeaveBefore));
  1562. useIntv(From, To);
  1563. selectIntv(IntvIn);
  1564. useIntv(Start, From);
  1565. assert((!LeaveBefore || From <= LeaveBefore) && "Interference");
  1566. }
  1567. void SplitEditor::splitRegOutBlock(const SplitAnalysis::BlockInfo &BI,
  1568. unsigned IntvOut, SlotIndex EnterAfter) {
  1569. SlotIndex Start, Stop;
  1570. std::tie(Start, Stop) = LIS.getSlotIndexes()->getMBBRange(BI.MBB);
  1571. LLVM_DEBUG(dbgs() << printMBBReference(*BI.MBB) << " [" << Start << ';'
  1572. << Stop << "), uses " << BI.FirstInstr << '-'
  1573. << BI.LastInstr << ", reg-out " << IntvOut
  1574. << ", enter after " << EnterAfter
  1575. << (BI.LiveIn ? ", stack-in" : ", defined in block"));
  1576. SlotIndex LSP = SA.getLastSplitPoint(BI.MBB->getNumber());
  1577. assert(IntvOut && "Must have register out");
  1578. assert(BI.LiveOut && "Must be live-out");
  1579. assert((!EnterAfter || EnterAfter < LSP) && "Bad interference");
  1580. if (!BI.LiveIn && (!EnterAfter || EnterAfter <= BI.FirstInstr)) {
  1581. LLVM_DEBUG(dbgs() << " after interference.\n");
  1582. //
  1583. // >>>> Interference before def.
  1584. // | o---o---| Defined in block.
  1585. // ========= Use IntvOut everywhere.
  1586. //
  1587. selectIntv(IntvOut);
  1588. useIntv(BI.FirstInstr, Stop);
  1589. return;
  1590. }
  1591. if (!EnterAfter || EnterAfter < BI.FirstInstr.getBaseIndex()) {
  1592. LLVM_DEBUG(dbgs() << ", reload after interference.\n");
  1593. //
  1594. // >>>> Interference before def.
  1595. // |---o---o---| Live-through, stack-in.
  1596. // ____========= Enter IntvOut before first use.
  1597. //
  1598. selectIntv(IntvOut);
  1599. SlotIndex Idx = enterIntvBefore(std::min(LSP, BI.FirstInstr));
  1600. useIntv(Idx, Stop);
  1601. assert((!EnterAfter || Idx >= EnterAfter) && "Interference");
  1602. return;
  1603. }
  1604. // The interference is overlapping somewhere we wanted to use IntvOut. That
  1605. // means we need to create a local interval that can be allocated a
  1606. // different register.
  1607. LLVM_DEBUG(dbgs() << ", interference overlaps uses.\n");
  1608. //
  1609. // >>>>>>> Interference overlapping uses.
  1610. // |---o---o---| Live-through, stack-in.
  1611. // ____---====== Create local interval for interference range.
  1612. //
  1613. selectIntv(IntvOut);
  1614. SlotIndex Idx = enterIntvAfter(EnterAfter);
  1615. useIntv(Idx, Stop);
  1616. assert((!EnterAfter || Idx >= EnterAfter) && "Interference");
  1617. openIntv();
  1618. SlotIndex From = enterIntvBefore(std::min(Idx, BI.FirstInstr));
  1619. useIntv(From, Idx);
  1620. }