LiveInterval.cpp 44 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370
  1. //===- LiveInterval.cpp - Live Interval Representation --------------------===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This file implements the LiveRange and LiveInterval classes. Given some
  11. // numbering of each the machine instructions an interval [i, j) is said to be a
  12. // live range for register v if there is no instruction with number j' >= j
  13. // such that v is live at j' and there is no instruction with number i' < i such
  14. // that v is live at i'. In this implementation ranges can have holes,
  15. // i.e. a range might look like [1,20), [50,65), [1000,1001). Each
  16. // individual segment is represented as an instance of LiveRange::Segment,
  17. // and the whole range is represented as an instance of LiveRange.
  18. //
  19. //===----------------------------------------------------------------------===//
  20. #include "llvm/CodeGen/LiveInterval.h"
  21. #include "LiveRangeUtils.h"
  22. #include "RegisterCoalescer.h"
  23. #include "llvm/ADT/ArrayRef.h"
  24. #include "llvm/ADT/STLExtras.h"
  25. #include "llvm/ADT/SmallPtrSet.h"
  26. #include "llvm/ADT/SmallVector.h"
  27. #include "llvm/ADT/iterator_range.h"
  28. #include "llvm/CodeGen/LiveIntervals.h"
  29. #include "llvm/CodeGen/MachineBasicBlock.h"
  30. #include "llvm/CodeGen/MachineInstr.h"
  31. #include "llvm/CodeGen/MachineOperand.h"
  32. #include "llvm/CodeGen/MachineRegisterInfo.h"
  33. #include "llvm/CodeGen/SlotIndexes.h"
  34. #include "llvm/CodeGen/TargetRegisterInfo.h"
  35. #include "llvm/Config/llvm-config.h"
  36. #include "llvm/MC/LaneBitmask.h"
  37. #include "llvm/Support/Compiler.h"
  38. #include "llvm/Support/Debug.h"
  39. #include "llvm/Support/raw_ostream.h"
  40. #include <algorithm>
  41. #include <cassert>
  42. #include <cstddef>
  43. #include <iterator>
  44. #include <utility>
  45. using namespace llvm;
  46. namespace {
  47. //===----------------------------------------------------------------------===//
  48. // Implementation of various methods necessary for calculation of live ranges.
  49. // The implementation of the methods abstracts from the concrete type of the
  50. // segment collection.
  51. //
  52. // Implementation of the class follows the Template design pattern. The base
  53. // class contains generic algorithms that call collection-specific methods,
  54. // which are provided in concrete subclasses. In order to avoid virtual calls
  55. // these methods are provided by means of C++ template instantiation.
  56. // The base class calls the methods of the subclass through method impl(),
  57. // which casts 'this' pointer to the type of the subclass.
  58. //
  59. //===----------------------------------------------------------------------===//
  60. template <typename ImplT, typename IteratorT, typename CollectionT>
  61. class CalcLiveRangeUtilBase {
  62. protected:
  63. LiveRange *LR;
  64. protected:
  65. CalcLiveRangeUtilBase(LiveRange *LR) : LR(LR) {}
  66. public:
  67. using Segment = LiveRange::Segment;
  68. using iterator = IteratorT;
  69. /// A counterpart of LiveRange::createDeadDef: Make sure the range has a
  70. /// value defined at @p Def.
  71. /// If @p ForVNI is null, and there is no value defined at @p Def, a new
  72. /// value will be allocated using @p VNInfoAllocator.
  73. /// If @p ForVNI is null, the return value is the value defined at @p Def,
  74. /// either a pre-existing one, or the one newly created.
  75. /// If @p ForVNI is not null, then @p Def should be the location where
  76. /// @p ForVNI is defined. If the range does not have a value defined at
  77. /// @p Def, the value @p ForVNI will be used instead of allocating a new
  78. /// one. If the range already has a value defined at @p Def, it must be
  79. /// same as @p ForVNI. In either case, @p ForVNI will be the return value.
  80. VNInfo *createDeadDef(SlotIndex Def, VNInfo::Allocator *VNInfoAllocator,
  81. VNInfo *ForVNI) {
  82. assert(!Def.isDead() && "Cannot define a value at the dead slot");
  83. assert((!ForVNI || ForVNI->def == Def) &&
  84. "If ForVNI is specified, it must match Def");
  85. iterator I = impl().find(Def);
  86. if (I == segments().end()) {
  87. VNInfo *VNI = ForVNI ? ForVNI : LR->getNextValue(Def, *VNInfoAllocator);
  88. impl().insertAtEnd(Segment(Def, Def.getDeadSlot(), VNI));
  89. return VNI;
  90. }
  91. Segment *S = segmentAt(I);
  92. if (SlotIndex::isSameInstr(Def, S->start)) {
  93. assert((!ForVNI || ForVNI == S->valno) && "Value number mismatch");
  94. assert(S->valno->def == S->start && "Inconsistent existing value def");
  95. // It is possible to have both normal and early-clobber defs of the same
  96. // register on an instruction. It doesn't make a lot of sense, but it is
  97. // possible to specify in inline assembly.
  98. //
  99. // Just convert everything to early-clobber.
  100. Def = std::min(Def, S->start);
  101. if (Def != S->start)
  102. S->start = S->valno->def = Def;
  103. return S->valno;
  104. }
  105. assert(SlotIndex::isEarlierInstr(Def, S->start) && "Already live at def");
  106. VNInfo *VNI = ForVNI ? ForVNI : LR->getNextValue(Def, *VNInfoAllocator);
  107. segments().insert(I, Segment(Def, Def.getDeadSlot(), VNI));
  108. return VNI;
  109. }
  110. VNInfo *extendInBlock(SlotIndex StartIdx, SlotIndex Use) {
  111. if (segments().empty())
  112. return nullptr;
  113. iterator I =
  114. impl().findInsertPos(Segment(Use.getPrevSlot(), Use, nullptr));
  115. if (I == segments().begin())
  116. return nullptr;
  117. --I;
  118. if (I->end <= StartIdx)
  119. return nullptr;
  120. if (I->end < Use)
  121. extendSegmentEndTo(I, Use);
  122. return I->valno;
  123. }
  124. std::pair<VNInfo*,bool> extendInBlock(ArrayRef<SlotIndex> Undefs,
  125. SlotIndex StartIdx, SlotIndex Use) {
  126. if (segments().empty())
  127. return std::make_pair(nullptr, false);
  128. SlotIndex BeforeUse = Use.getPrevSlot();
  129. iterator I = impl().findInsertPos(Segment(BeforeUse, Use, nullptr));
  130. if (I == segments().begin())
  131. return std::make_pair(nullptr, LR->isUndefIn(Undefs, StartIdx, BeforeUse));
  132. --I;
  133. if (I->end <= StartIdx)
  134. return std::make_pair(nullptr, LR->isUndefIn(Undefs, StartIdx, BeforeUse));
  135. if (I->end < Use) {
  136. if (LR->isUndefIn(Undefs, I->end, BeforeUse))
  137. return std::make_pair(nullptr, true);
  138. extendSegmentEndTo(I, Use);
  139. }
  140. return std::make_pair(I->valno, false);
  141. }
  142. /// This method is used when we want to extend the segment specified
  143. /// by I to end at the specified endpoint. To do this, we should
  144. /// merge and eliminate all segments that this will overlap
  145. /// with. The iterator is not invalidated.
  146. void extendSegmentEndTo(iterator I, SlotIndex NewEnd) {
  147. assert(I != segments().end() && "Not a valid segment!");
  148. Segment *S = segmentAt(I);
  149. VNInfo *ValNo = I->valno;
  150. // Search for the first segment that we can't merge with.
  151. iterator MergeTo = std::next(I);
  152. for (; MergeTo != segments().end() && NewEnd >= MergeTo->end; ++MergeTo)
  153. assert(MergeTo->valno == ValNo && "Cannot merge with differing values!");
  154. // If NewEnd was in the middle of a segment, make sure to get its endpoint.
  155. S->end = std::max(NewEnd, std::prev(MergeTo)->end);
  156. // If the newly formed segment now touches the segment after it and if they
  157. // have the same value number, merge the two segments into one segment.
  158. if (MergeTo != segments().end() && MergeTo->start <= I->end &&
  159. MergeTo->valno == ValNo) {
  160. S->end = MergeTo->end;
  161. ++MergeTo;
  162. }
  163. // Erase any dead segments.
  164. segments().erase(std::next(I), MergeTo);
  165. }
  166. /// This method is used when we want to extend the segment specified
  167. /// by I to start at the specified endpoint. To do this, we should
  168. /// merge and eliminate all segments that this will overlap with.
  169. iterator extendSegmentStartTo(iterator I, SlotIndex NewStart) {
  170. assert(I != segments().end() && "Not a valid segment!");
  171. Segment *S = segmentAt(I);
  172. VNInfo *ValNo = I->valno;
  173. // Search for the first segment that we can't merge with.
  174. iterator MergeTo = I;
  175. do {
  176. if (MergeTo == segments().begin()) {
  177. S->start = NewStart;
  178. segments().erase(MergeTo, I);
  179. return I;
  180. }
  181. assert(MergeTo->valno == ValNo && "Cannot merge with differing values!");
  182. --MergeTo;
  183. } while (NewStart <= MergeTo->start);
  184. // If we start in the middle of another segment, just delete a range and
  185. // extend that segment.
  186. if (MergeTo->end >= NewStart && MergeTo->valno == ValNo) {
  187. segmentAt(MergeTo)->end = S->end;
  188. } else {
  189. // Otherwise, extend the segment right after.
  190. ++MergeTo;
  191. Segment *MergeToSeg = segmentAt(MergeTo);
  192. MergeToSeg->start = NewStart;
  193. MergeToSeg->end = S->end;
  194. }
  195. segments().erase(std::next(MergeTo), std::next(I));
  196. return MergeTo;
  197. }
  198. iterator addSegment(Segment S) {
  199. SlotIndex Start = S.start, End = S.end;
  200. iterator I = impl().findInsertPos(S);
  201. // If the inserted segment starts in the middle or right at the end of
  202. // another segment, just extend that segment to contain the segment of S.
  203. if (I != segments().begin()) {
  204. iterator B = std::prev(I);
  205. if (S.valno == B->valno) {
  206. if (B->start <= Start && B->end >= Start) {
  207. extendSegmentEndTo(B, End);
  208. return B;
  209. }
  210. } else {
  211. // Check to make sure that we are not overlapping two live segments with
  212. // different valno's.
  213. assert(B->end <= Start &&
  214. "Cannot overlap two segments with differing ValID's"
  215. " (did you def the same reg twice in a MachineInstr?)");
  216. }
  217. }
  218. // Otherwise, if this segment ends in the middle of, or right next
  219. // to, another segment, merge it into that segment.
  220. if (I != segments().end()) {
  221. if (S.valno == I->valno) {
  222. if (I->start <= End) {
  223. I = extendSegmentStartTo(I, Start);
  224. // If S is a complete superset of a segment, we may need to grow its
  225. // endpoint as well.
  226. if (End > I->end)
  227. extendSegmentEndTo(I, End);
  228. return I;
  229. }
  230. } else {
  231. // Check to make sure that we are not overlapping two live segments with
  232. // different valno's.
  233. assert(I->start >= End &&
  234. "Cannot overlap two segments with differing ValID's");
  235. }
  236. }
  237. // Otherwise, this is just a new segment that doesn't interact with
  238. // anything.
  239. // Insert it.
  240. return segments().insert(I, S);
  241. }
  242. private:
  243. ImplT &impl() { return *static_cast<ImplT *>(this); }
  244. CollectionT &segments() { return impl().segmentsColl(); }
  245. Segment *segmentAt(iterator I) { return const_cast<Segment *>(&(*I)); }
  246. };
  247. //===----------------------------------------------------------------------===//
  248. // Instantiation of the methods for calculation of live ranges
  249. // based on a segment vector.
  250. //===----------------------------------------------------------------------===//
  251. class CalcLiveRangeUtilVector;
  252. using CalcLiveRangeUtilVectorBase =
  253. CalcLiveRangeUtilBase<CalcLiveRangeUtilVector, LiveRange::iterator,
  254. LiveRange::Segments>;
  255. class CalcLiveRangeUtilVector : public CalcLiveRangeUtilVectorBase {
  256. public:
  257. CalcLiveRangeUtilVector(LiveRange *LR) : CalcLiveRangeUtilVectorBase(LR) {}
  258. private:
  259. friend CalcLiveRangeUtilVectorBase;
  260. LiveRange::Segments &segmentsColl() { return LR->segments; }
  261. void insertAtEnd(const Segment &S) { LR->segments.push_back(S); }
  262. iterator find(SlotIndex Pos) { return LR->find(Pos); }
  263. iterator findInsertPos(Segment S) {
  264. return std::upper_bound(LR->begin(), LR->end(), S.start);
  265. }
  266. };
  267. //===----------------------------------------------------------------------===//
  268. // Instantiation of the methods for calculation of live ranges
  269. // based on a segment set.
  270. //===----------------------------------------------------------------------===//
  271. class CalcLiveRangeUtilSet;
  272. using CalcLiveRangeUtilSetBase =
  273. CalcLiveRangeUtilBase<CalcLiveRangeUtilSet, LiveRange::SegmentSet::iterator,
  274. LiveRange::SegmentSet>;
  275. class CalcLiveRangeUtilSet : public CalcLiveRangeUtilSetBase {
  276. public:
  277. CalcLiveRangeUtilSet(LiveRange *LR) : CalcLiveRangeUtilSetBase(LR) {}
  278. private:
  279. friend CalcLiveRangeUtilSetBase;
  280. LiveRange::SegmentSet &segmentsColl() { return *LR->segmentSet; }
  281. void insertAtEnd(const Segment &S) {
  282. LR->segmentSet->insert(LR->segmentSet->end(), S);
  283. }
  284. iterator find(SlotIndex Pos) {
  285. iterator I =
  286. LR->segmentSet->upper_bound(Segment(Pos, Pos.getNextSlot(), nullptr));
  287. if (I == LR->segmentSet->begin())
  288. return I;
  289. iterator PrevI = std::prev(I);
  290. if (Pos < (*PrevI).end)
  291. return PrevI;
  292. return I;
  293. }
  294. iterator findInsertPos(Segment S) {
  295. iterator I = LR->segmentSet->upper_bound(S);
  296. if (I != LR->segmentSet->end() && !(S.start < *I))
  297. ++I;
  298. return I;
  299. }
  300. };
  301. } // end anonymous namespace
  302. //===----------------------------------------------------------------------===//
  303. // LiveRange methods
  304. //===----------------------------------------------------------------------===//
  305. LiveRange::iterator LiveRange::find(SlotIndex Pos) {
  306. // This algorithm is basically std::upper_bound.
  307. // Unfortunately, std::upper_bound cannot be used with mixed types until we
  308. // adopt C++0x. Many libraries can do it, but not all.
  309. if (empty() || Pos >= endIndex())
  310. return end();
  311. iterator I = begin();
  312. size_t Len = size();
  313. do {
  314. size_t Mid = Len >> 1;
  315. if (Pos < I[Mid].end) {
  316. Len = Mid;
  317. } else {
  318. I += Mid + 1;
  319. Len -= Mid + 1;
  320. }
  321. } while (Len);
  322. return I;
  323. }
  324. VNInfo *LiveRange::createDeadDef(SlotIndex Def, VNInfo::Allocator &VNIAlloc) {
  325. // Use the segment set, if it is available.
  326. if (segmentSet != nullptr)
  327. return CalcLiveRangeUtilSet(this).createDeadDef(Def, &VNIAlloc, nullptr);
  328. // Otherwise use the segment vector.
  329. return CalcLiveRangeUtilVector(this).createDeadDef(Def, &VNIAlloc, nullptr);
  330. }
  331. VNInfo *LiveRange::createDeadDef(VNInfo *VNI) {
  332. // Use the segment set, if it is available.
  333. if (segmentSet != nullptr)
  334. return CalcLiveRangeUtilSet(this).createDeadDef(VNI->def, nullptr, VNI);
  335. // Otherwise use the segment vector.
  336. return CalcLiveRangeUtilVector(this).createDeadDef(VNI->def, nullptr, VNI);
  337. }
  338. // overlaps - Return true if the intersection of the two live ranges is
  339. // not empty.
  340. //
  341. // An example for overlaps():
  342. //
  343. // 0: A = ...
  344. // 4: B = ...
  345. // 8: C = A + B ;; last use of A
  346. //
  347. // The live ranges should look like:
  348. //
  349. // A = [3, 11)
  350. // B = [7, x)
  351. // C = [11, y)
  352. //
  353. // A->overlaps(C) should return false since we want to be able to join
  354. // A and C.
  355. //
  356. bool LiveRange::overlapsFrom(const LiveRange& other,
  357. const_iterator StartPos) const {
  358. assert(!empty() && "empty range");
  359. const_iterator i = begin();
  360. const_iterator ie = end();
  361. const_iterator j = StartPos;
  362. const_iterator je = other.end();
  363. assert((StartPos->start <= i->start || StartPos == other.begin()) &&
  364. StartPos != other.end() && "Bogus start position hint!");
  365. if (i->start < j->start) {
  366. i = std::upper_bound(i, ie, j->start);
  367. if (i != begin()) --i;
  368. } else if (j->start < i->start) {
  369. ++StartPos;
  370. if (StartPos != other.end() && StartPos->start <= i->start) {
  371. assert(StartPos < other.end() && i < end());
  372. j = std::upper_bound(j, je, i->start);
  373. if (j != other.begin()) --j;
  374. }
  375. } else {
  376. return true;
  377. }
  378. if (j == je) return false;
  379. while (i != ie) {
  380. if (i->start > j->start) {
  381. std::swap(i, j);
  382. std::swap(ie, je);
  383. }
  384. if (i->end > j->start)
  385. return true;
  386. ++i;
  387. }
  388. return false;
  389. }
  390. bool LiveRange::overlaps(const LiveRange &Other, const CoalescerPair &CP,
  391. const SlotIndexes &Indexes) const {
  392. assert(!empty() && "empty range");
  393. if (Other.empty())
  394. return false;
  395. // Use binary searches to find initial positions.
  396. const_iterator I = find(Other.beginIndex());
  397. const_iterator IE = end();
  398. if (I == IE)
  399. return false;
  400. const_iterator J = Other.find(I->start);
  401. const_iterator JE = Other.end();
  402. if (J == JE)
  403. return false;
  404. while (true) {
  405. // J has just been advanced to satisfy:
  406. assert(J->end >= I->start);
  407. // Check for an overlap.
  408. if (J->start < I->end) {
  409. // I and J are overlapping. Find the later start.
  410. SlotIndex Def = std::max(I->start, J->start);
  411. // Allow the overlap if Def is a coalescable copy.
  412. if (Def.isBlock() ||
  413. !CP.isCoalescable(Indexes.getInstructionFromIndex(Def)))
  414. return true;
  415. }
  416. // Advance the iterator that ends first to check for more overlaps.
  417. if (J->end > I->end) {
  418. std::swap(I, J);
  419. std::swap(IE, JE);
  420. }
  421. // Advance J until J->end >= I->start.
  422. do
  423. if (++J == JE)
  424. return false;
  425. while (J->end < I->start);
  426. }
  427. }
  428. /// overlaps - Return true if the live range overlaps an interval specified
  429. /// by [Start, End).
  430. bool LiveRange::overlaps(SlotIndex Start, SlotIndex End) const {
  431. assert(Start < End && "Invalid range");
  432. const_iterator I = std::lower_bound(begin(), end(), End);
  433. return I != begin() && (--I)->end > Start;
  434. }
  435. bool LiveRange::covers(const LiveRange &Other) const {
  436. if (empty())
  437. return Other.empty();
  438. const_iterator I = begin();
  439. for (const Segment &O : Other.segments) {
  440. I = advanceTo(I, O.start);
  441. if (I == end() || I->start > O.start)
  442. return false;
  443. // Check adjacent live segments and see if we can get behind O.end.
  444. while (I->end < O.end) {
  445. const_iterator Last = I;
  446. // Get next segment and abort if it was not adjacent.
  447. ++I;
  448. if (I == end() || Last->end != I->start)
  449. return false;
  450. }
  451. }
  452. return true;
  453. }
  454. /// ValNo is dead, remove it. If it is the largest value number, just nuke it
  455. /// (and any other deleted values neighboring it), otherwise mark it as ~1U so
  456. /// it can be nuked later.
  457. void LiveRange::markValNoForDeletion(VNInfo *ValNo) {
  458. if (ValNo->id == getNumValNums()-1) {
  459. do {
  460. valnos.pop_back();
  461. } while (!valnos.empty() && valnos.back()->isUnused());
  462. } else {
  463. ValNo->markUnused();
  464. }
  465. }
  466. /// RenumberValues - Renumber all values in order of appearance and delete the
  467. /// remaining unused values.
  468. void LiveRange::RenumberValues() {
  469. SmallPtrSet<VNInfo*, 8> Seen;
  470. valnos.clear();
  471. for (const Segment &S : segments) {
  472. VNInfo *VNI = S.valno;
  473. if (!Seen.insert(VNI).second)
  474. continue;
  475. assert(!VNI->isUnused() && "Unused valno used by live segment");
  476. VNI->id = (unsigned)valnos.size();
  477. valnos.push_back(VNI);
  478. }
  479. }
  480. void LiveRange::addSegmentToSet(Segment S) {
  481. CalcLiveRangeUtilSet(this).addSegment(S);
  482. }
  483. LiveRange::iterator LiveRange::addSegment(Segment S) {
  484. // Use the segment set, if it is available.
  485. if (segmentSet != nullptr) {
  486. addSegmentToSet(S);
  487. return end();
  488. }
  489. // Otherwise use the segment vector.
  490. return CalcLiveRangeUtilVector(this).addSegment(S);
  491. }
  492. void LiveRange::append(const Segment S) {
  493. // Check that the segment belongs to the back of the list.
  494. assert(segments.empty() || segments.back().end <= S.start);
  495. segments.push_back(S);
  496. }
  497. std::pair<VNInfo*,bool> LiveRange::extendInBlock(ArrayRef<SlotIndex> Undefs,
  498. SlotIndex StartIdx, SlotIndex Kill) {
  499. // Use the segment set, if it is available.
  500. if (segmentSet != nullptr)
  501. return CalcLiveRangeUtilSet(this).extendInBlock(Undefs, StartIdx, Kill);
  502. // Otherwise use the segment vector.
  503. return CalcLiveRangeUtilVector(this).extendInBlock(Undefs, StartIdx, Kill);
  504. }
  505. VNInfo *LiveRange::extendInBlock(SlotIndex StartIdx, SlotIndex Kill) {
  506. // Use the segment set, if it is available.
  507. if (segmentSet != nullptr)
  508. return CalcLiveRangeUtilSet(this).extendInBlock(StartIdx, Kill);
  509. // Otherwise use the segment vector.
  510. return CalcLiveRangeUtilVector(this).extendInBlock(StartIdx, Kill);
  511. }
  512. /// Remove the specified segment from this range. Note that the segment must
  513. /// be in a single Segment in its entirety.
  514. void LiveRange::removeSegment(SlotIndex Start, SlotIndex End,
  515. bool RemoveDeadValNo) {
  516. // Find the Segment containing this span.
  517. iterator I = find(Start);
  518. assert(I != end() && "Segment is not in range!");
  519. assert(I->containsInterval(Start, End)
  520. && "Segment is not entirely in range!");
  521. // If the span we are removing is at the start of the Segment, adjust it.
  522. VNInfo *ValNo = I->valno;
  523. if (I->start == Start) {
  524. if (I->end == End) {
  525. if (RemoveDeadValNo) {
  526. // Check if val# is dead.
  527. bool isDead = true;
  528. for (const_iterator II = begin(), EE = end(); II != EE; ++II)
  529. if (II != I && II->valno == ValNo) {
  530. isDead = false;
  531. break;
  532. }
  533. if (isDead) {
  534. // Now that ValNo is dead, remove it.
  535. markValNoForDeletion(ValNo);
  536. }
  537. }
  538. segments.erase(I); // Removed the whole Segment.
  539. } else
  540. I->start = End;
  541. return;
  542. }
  543. // Otherwise if the span we are removing is at the end of the Segment,
  544. // adjust the other way.
  545. if (I->end == End) {
  546. I->end = Start;
  547. return;
  548. }
  549. // Otherwise, we are splitting the Segment into two pieces.
  550. SlotIndex OldEnd = I->end;
  551. I->end = Start; // Trim the old segment.
  552. // Insert the new one.
  553. segments.insert(std::next(I), Segment(End, OldEnd, ValNo));
  554. }
  555. /// removeValNo - Remove all the segments defined by the specified value#.
  556. /// Also remove the value# from value# list.
  557. void LiveRange::removeValNo(VNInfo *ValNo) {
  558. if (empty()) return;
  559. segments.erase(remove_if(*this, [ValNo](const Segment &S) {
  560. return S.valno == ValNo;
  561. }), end());
  562. // Now that ValNo is dead, remove it.
  563. markValNoForDeletion(ValNo);
  564. }
  565. void LiveRange::join(LiveRange &Other,
  566. const int *LHSValNoAssignments,
  567. const int *RHSValNoAssignments,
  568. SmallVectorImpl<VNInfo *> &NewVNInfo) {
  569. verify();
  570. // Determine if any of our values are mapped. This is uncommon, so we want
  571. // to avoid the range scan if not.
  572. bool MustMapCurValNos = false;
  573. unsigned NumVals = getNumValNums();
  574. unsigned NumNewVals = NewVNInfo.size();
  575. for (unsigned i = 0; i != NumVals; ++i) {
  576. unsigned LHSValID = LHSValNoAssignments[i];
  577. if (i != LHSValID ||
  578. (NewVNInfo[LHSValID] && NewVNInfo[LHSValID] != getValNumInfo(i))) {
  579. MustMapCurValNos = true;
  580. break;
  581. }
  582. }
  583. // If we have to apply a mapping to our base range assignment, rewrite it now.
  584. if (MustMapCurValNos && !empty()) {
  585. // Map the first live range.
  586. iterator OutIt = begin();
  587. OutIt->valno = NewVNInfo[LHSValNoAssignments[OutIt->valno->id]];
  588. for (iterator I = std::next(OutIt), E = end(); I != E; ++I) {
  589. VNInfo* nextValNo = NewVNInfo[LHSValNoAssignments[I->valno->id]];
  590. assert(nextValNo && "Huh?");
  591. // If this live range has the same value # as its immediate predecessor,
  592. // and if they are neighbors, remove one Segment. This happens when we
  593. // have [0,4:0)[4,7:1) and map 0/1 onto the same value #.
  594. if (OutIt->valno == nextValNo && OutIt->end == I->start) {
  595. OutIt->end = I->end;
  596. } else {
  597. // Didn't merge. Move OutIt to the next segment,
  598. ++OutIt;
  599. OutIt->valno = nextValNo;
  600. if (OutIt != I) {
  601. OutIt->start = I->start;
  602. OutIt->end = I->end;
  603. }
  604. }
  605. }
  606. // If we merge some segments, chop off the end.
  607. ++OutIt;
  608. segments.erase(OutIt, end());
  609. }
  610. // Rewrite Other values before changing the VNInfo ids.
  611. // This can leave Other in an invalid state because we're not coalescing
  612. // touching segments that now have identical values. That's OK since Other is
  613. // not supposed to be valid after calling join();
  614. for (Segment &S : Other.segments)
  615. S.valno = NewVNInfo[RHSValNoAssignments[S.valno->id]];
  616. // Update val# info. Renumber them and make sure they all belong to this
  617. // LiveRange now. Also remove dead val#'s.
  618. unsigned NumValNos = 0;
  619. for (unsigned i = 0; i < NumNewVals; ++i) {
  620. VNInfo *VNI = NewVNInfo[i];
  621. if (VNI) {
  622. if (NumValNos >= NumVals)
  623. valnos.push_back(VNI);
  624. else
  625. valnos[NumValNos] = VNI;
  626. VNI->id = NumValNos++; // Renumber val#.
  627. }
  628. }
  629. if (NumNewVals < NumVals)
  630. valnos.resize(NumNewVals); // shrinkify
  631. // Okay, now insert the RHS live segments into the LHS.
  632. LiveRangeUpdater Updater(this);
  633. for (Segment &S : Other.segments)
  634. Updater.add(S);
  635. }
  636. /// Merge all of the segments in RHS into this live range as the specified
  637. /// value number. The segments in RHS are allowed to overlap with segments in
  638. /// the current range, but only if the overlapping segments have the
  639. /// specified value number.
  640. void LiveRange::MergeSegmentsInAsValue(const LiveRange &RHS,
  641. VNInfo *LHSValNo) {
  642. LiveRangeUpdater Updater(this);
  643. for (const Segment &S : RHS.segments)
  644. Updater.add(S.start, S.end, LHSValNo);
  645. }
  646. /// MergeValueInAsValue - Merge all of the live segments of a specific val#
  647. /// in RHS into this live range as the specified value number.
  648. /// The segments in RHS are allowed to overlap with segments in the
  649. /// current range, it will replace the value numbers of the overlaped
  650. /// segments with the specified value number.
  651. void LiveRange::MergeValueInAsValue(const LiveRange &RHS,
  652. const VNInfo *RHSValNo,
  653. VNInfo *LHSValNo) {
  654. LiveRangeUpdater Updater(this);
  655. for (const Segment &S : RHS.segments)
  656. if (S.valno == RHSValNo)
  657. Updater.add(S.start, S.end, LHSValNo);
  658. }
  659. /// MergeValueNumberInto - This method is called when two value nubmers
  660. /// are found to be equivalent. This eliminates V1, replacing all
  661. /// segments with the V1 value number with the V2 value number. This can
  662. /// cause merging of V1/V2 values numbers and compaction of the value space.
  663. VNInfo *LiveRange::MergeValueNumberInto(VNInfo *V1, VNInfo *V2) {
  664. assert(V1 != V2 && "Identical value#'s are always equivalent!");
  665. // This code actually merges the (numerically) larger value number into the
  666. // smaller value number, which is likely to allow us to compactify the value
  667. // space. The only thing we have to be careful of is to preserve the
  668. // instruction that defines the result value.
  669. // Make sure V2 is smaller than V1.
  670. if (V1->id < V2->id) {
  671. V1->copyFrom(*V2);
  672. std::swap(V1, V2);
  673. }
  674. // Merge V1 segments into V2.
  675. for (iterator I = begin(); I != end(); ) {
  676. iterator S = I++;
  677. if (S->valno != V1) continue; // Not a V1 Segment.
  678. // Okay, we found a V1 live range. If it had a previous, touching, V2 live
  679. // range, extend it.
  680. if (S != begin()) {
  681. iterator Prev = S-1;
  682. if (Prev->valno == V2 && Prev->end == S->start) {
  683. Prev->end = S->end;
  684. // Erase this live-range.
  685. segments.erase(S);
  686. I = Prev+1;
  687. S = Prev;
  688. }
  689. }
  690. // Okay, now we have a V1 or V2 live range that is maximally merged forward.
  691. // Ensure that it is a V2 live-range.
  692. S->valno = V2;
  693. // If we can merge it into later V2 segments, do so now. We ignore any
  694. // following V1 segments, as they will be merged in subsequent iterations
  695. // of the loop.
  696. if (I != end()) {
  697. if (I->start == S->end && I->valno == V2) {
  698. S->end = I->end;
  699. segments.erase(I);
  700. I = S+1;
  701. }
  702. }
  703. }
  704. // Now that V1 is dead, remove it.
  705. markValNoForDeletion(V1);
  706. return V2;
  707. }
  708. void LiveRange::flushSegmentSet() {
  709. assert(segmentSet != nullptr && "segment set must have been created");
  710. assert(
  711. segments.empty() &&
  712. "segment set can be used only initially before switching to the array");
  713. segments.append(segmentSet->begin(), segmentSet->end());
  714. segmentSet = nullptr;
  715. verify();
  716. }
  717. bool LiveRange::isLiveAtIndexes(ArrayRef<SlotIndex> Slots) const {
  718. ArrayRef<SlotIndex>::iterator SlotI = Slots.begin();
  719. ArrayRef<SlotIndex>::iterator SlotE = Slots.end();
  720. // If there are no regmask slots, we have nothing to search.
  721. if (SlotI == SlotE)
  722. return false;
  723. // Start our search at the first segment that ends after the first slot.
  724. const_iterator SegmentI = find(*SlotI);
  725. const_iterator SegmentE = end();
  726. // If there are no segments that end after the first slot, we're done.
  727. if (SegmentI == SegmentE)
  728. return false;
  729. // Look for each slot in the live range.
  730. for ( ; SlotI != SlotE; ++SlotI) {
  731. // Go to the next segment that ends after the current slot.
  732. // The slot may be within a hole in the range.
  733. SegmentI = advanceTo(SegmentI, *SlotI);
  734. if (SegmentI == SegmentE)
  735. return false;
  736. // If this segment contains the slot, we're done.
  737. if (SegmentI->contains(*SlotI))
  738. return true;
  739. // Otherwise, look for the next slot.
  740. }
  741. // We didn't find a segment containing any of the slots.
  742. return false;
  743. }
  744. void LiveInterval::freeSubRange(SubRange *S) {
  745. S->~SubRange();
  746. // Memory was allocated with BumpPtr allocator and is not freed here.
  747. }
  748. void LiveInterval::removeEmptySubRanges() {
  749. SubRange **NextPtr = &SubRanges;
  750. SubRange *I = *NextPtr;
  751. while (I != nullptr) {
  752. if (!I->empty()) {
  753. NextPtr = &I->Next;
  754. I = *NextPtr;
  755. continue;
  756. }
  757. // Skip empty subranges until we find the first nonempty one.
  758. do {
  759. SubRange *Next = I->Next;
  760. freeSubRange(I);
  761. I = Next;
  762. } while (I != nullptr && I->empty());
  763. *NextPtr = I;
  764. }
  765. }
  766. void LiveInterval::clearSubRanges() {
  767. for (SubRange *I = SubRanges, *Next; I != nullptr; I = Next) {
  768. Next = I->Next;
  769. freeSubRange(I);
  770. }
  771. SubRanges = nullptr;
  772. }
  773. void LiveInterval::refineSubRanges(BumpPtrAllocator &Allocator,
  774. LaneBitmask LaneMask, std::function<void(LiveInterval::SubRange&)> Apply) {
  775. LaneBitmask ToApply = LaneMask;
  776. for (SubRange &SR : subranges()) {
  777. LaneBitmask SRMask = SR.LaneMask;
  778. LaneBitmask Matching = SRMask & LaneMask;
  779. if (Matching.none())
  780. continue;
  781. SubRange *MatchingRange;
  782. if (SRMask == Matching) {
  783. // The subrange fits (it does not cover bits outside \p LaneMask).
  784. MatchingRange = &SR;
  785. } else {
  786. // We have to split the subrange into a matching and non-matching part.
  787. // Reduce lanemask of existing lane to non-matching part.
  788. SR.LaneMask = SRMask & ~Matching;
  789. // Create a new subrange for the matching part
  790. MatchingRange = createSubRangeFrom(Allocator, Matching, SR);
  791. }
  792. Apply(*MatchingRange);
  793. ToApply &= ~Matching;
  794. }
  795. // Create a new subrange if there are uncovered bits left.
  796. if (ToApply.any()) {
  797. SubRange *NewRange = createSubRange(Allocator, ToApply);
  798. Apply(*NewRange);
  799. }
  800. }
  801. unsigned LiveInterval::getSize() const {
  802. unsigned Sum = 0;
  803. for (const Segment &S : segments)
  804. Sum += S.start.distance(S.end);
  805. return Sum;
  806. }
  807. void LiveInterval::computeSubRangeUndefs(SmallVectorImpl<SlotIndex> &Undefs,
  808. LaneBitmask LaneMask,
  809. const MachineRegisterInfo &MRI,
  810. const SlotIndexes &Indexes) const {
  811. assert(TargetRegisterInfo::isVirtualRegister(reg));
  812. LaneBitmask VRegMask = MRI.getMaxLaneMaskForVReg(reg);
  813. assert((VRegMask & LaneMask).any());
  814. const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
  815. for (const MachineOperand &MO : MRI.def_operands(reg)) {
  816. if (!MO.isUndef())
  817. continue;
  818. unsigned SubReg = MO.getSubReg();
  819. assert(SubReg != 0 && "Undef should only be set on subreg defs");
  820. LaneBitmask DefMask = TRI.getSubRegIndexLaneMask(SubReg);
  821. LaneBitmask UndefMask = VRegMask & ~DefMask;
  822. if ((UndefMask & LaneMask).any()) {
  823. const MachineInstr &MI = *MO.getParent();
  824. bool EarlyClobber = MO.isEarlyClobber();
  825. SlotIndex Pos = Indexes.getInstructionIndex(MI).getRegSlot(EarlyClobber);
  826. Undefs.push_back(Pos);
  827. }
  828. }
  829. }
  830. raw_ostream& llvm::operator<<(raw_ostream& OS, const LiveRange::Segment &S) {
  831. return OS << '[' << S.start << ',' << S.end << ':' << S.valno->id << ')';
  832. }
  833. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  834. LLVM_DUMP_METHOD void LiveRange::Segment::dump() const {
  835. dbgs() << *this << '\n';
  836. }
  837. #endif
  838. void LiveRange::print(raw_ostream &OS) const {
  839. if (empty())
  840. OS << "EMPTY";
  841. else {
  842. for (const Segment &S : segments) {
  843. OS << S;
  844. assert(S.valno == getValNumInfo(S.valno->id) && "Bad VNInfo");
  845. }
  846. }
  847. // Print value number info.
  848. if (getNumValNums()) {
  849. OS << " ";
  850. unsigned vnum = 0;
  851. for (const_vni_iterator i = vni_begin(), e = vni_end(); i != e;
  852. ++i, ++vnum) {
  853. const VNInfo *vni = *i;
  854. if (vnum) OS << ' ';
  855. OS << vnum << '@';
  856. if (vni->isUnused()) {
  857. OS << 'x';
  858. } else {
  859. OS << vni->def;
  860. if (vni->isPHIDef())
  861. OS << "-phi";
  862. }
  863. }
  864. }
  865. }
  866. void LiveInterval::SubRange::print(raw_ostream &OS) const {
  867. OS << " L" << PrintLaneMask(LaneMask) << ' '
  868. << static_cast<const LiveRange&>(*this);
  869. }
  870. void LiveInterval::print(raw_ostream &OS) const {
  871. OS << printReg(reg) << ' ';
  872. super::print(OS);
  873. // Print subranges
  874. for (const SubRange &SR : subranges())
  875. OS << SR;
  876. OS << " weight:" << weight;
  877. }
  878. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  879. LLVM_DUMP_METHOD void LiveRange::dump() const {
  880. dbgs() << *this << '\n';
  881. }
  882. LLVM_DUMP_METHOD void LiveInterval::SubRange::dump() const {
  883. dbgs() << *this << '\n';
  884. }
  885. LLVM_DUMP_METHOD void LiveInterval::dump() const {
  886. dbgs() << *this << '\n';
  887. }
  888. #endif
  889. #ifndef NDEBUG
  890. void LiveRange::verify() const {
  891. for (const_iterator I = begin(), E = end(); I != E; ++I) {
  892. assert(I->start.isValid());
  893. assert(I->end.isValid());
  894. assert(I->start < I->end);
  895. assert(I->valno != nullptr);
  896. assert(I->valno->id < valnos.size());
  897. assert(I->valno == valnos[I->valno->id]);
  898. if (std::next(I) != E) {
  899. assert(I->end <= std::next(I)->start);
  900. if (I->end == std::next(I)->start)
  901. assert(I->valno != std::next(I)->valno);
  902. }
  903. }
  904. }
  905. void LiveInterval::verify(const MachineRegisterInfo *MRI) const {
  906. super::verify();
  907. // Make sure SubRanges are fine and LaneMasks are disjunct.
  908. LaneBitmask Mask;
  909. LaneBitmask MaxMask = MRI != nullptr ? MRI->getMaxLaneMaskForVReg(reg)
  910. : LaneBitmask::getAll();
  911. for (const SubRange &SR : subranges()) {
  912. // Subrange lanemask should be disjunct to any previous subrange masks.
  913. assert((Mask & SR.LaneMask).none());
  914. Mask |= SR.LaneMask;
  915. // subrange mask should not contained in maximum lane mask for the vreg.
  916. assert((Mask & ~MaxMask).none());
  917. // empty subranges must be removed.
  918. assert(!SR.empty());
  919. SR.verify();
  920. // Main liverange should cover subrange.
  921. assert(covers(SR));
  922. }
  923. }
  924. #endif
  925. //===----------------------------------------------------------------------===//
  926. // LiveRangeUpdater class
  927. //===----------------------------------------------------------------------===//
  928. //
  929. // The LiveRangeUpdater class always maintains these invariants:
  930. //
  931. // - When LastStart is invalid, Spills is empty and the iterators are invalid.
  932. // This is the initial state, and the state created by flush().
  933. // In this state, isDirty() returns false.
  934. //
  935. // Otherwise, segments are kept in three separate areas:
  936. //
  937. // 1. [begin; WriteI) at the front of LR.
  938. // 2. [ReadI; end) at the back of LR.
  939. // 3. Spills.
  940. //
  941. // - LR.begin() <= WriteI <= ReadI <= LR.end().
  942. // - Segments in all three areas are fully ordered and coalesced.
  943. // - Segments in area 1 precede and can't coalesce with segments in area 2.
  944. // - Segments in Spills precede and can't coalesce with segments in area 2.
  945. // - No coalescing is possible between segments in Spills and segments in area
  946. // 1, and there are no overlapping segments.
  947. //
  948. // The segments in Spills are not ordered with respect to the segments in area
  949. // 1. They need to be merged.
  950. //
  951. // When they exist, Spills.back().start <= LastStart,
  952. // and WriteI[-1].start <= LastStart.
  953. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  954. void LiveRangeUpdater::print(raw_ostream &OS) const {
  955. if (!isDirty()) {
  956. if (LR)
  957. OS << "Clean updater: " << *LR << '\n';
  958. else
  959. OS << "Null updater.\n";
  960. return;
  961. }
  962. assert(LR && "Can't have null LR in dirty updater.");
  963. OS << " updater with gap = " << (ReadI - WriteI)
  964. << ", last start = " << LastStart
  965. << ":\n Area 1:";
  966. for (const auto &S : make_range(LR->begin(), WriteI))
  967. OS << ' ' << S;
  968. OS << "\n Spills:";
  969. for (unsigned I = 0, E = Spills.size(); I != E; ++I)
  970. OS << ' ' << Spills[I];
  971. OS << "\n Area 2:";
  972. for (const auto &S : make_range(ReadI, LR->end()))
  973. OS << ' ' << S;
  974. OS << '\n';
  975. }
  976. LLVM_DUMP_METHOD void LiveRangeUpdater::dump() const {
  977. print(errs());
  978. }
  979. #endif
  980. // Determine if A and B should be coalesced.
  981. static inline bool coalescable(const LiveRange::Segment &A,
  982. const LiveRange::Segment &B) {
  983. assert(A.start <= B.start && "Unordered live segments.");
  984. if (A.end == B.start)
  985. return A.valno == B.valno;
  986. if (A.end < B.start)
  987. return false;
  988. assert(A.valno == B.valno && "Cannot overlap different values");
  989. return true;
  990. }
  991. void LiveRangeUpdater::add(LiveRange::Segment Seg) {
  992. assert(LR && "Cannot add to a null destination");
  993. // Fall back to the regular add method if the live range
  994. // is using the segment set instead of the segment vector.
  995. if (LR->segmentSet != nullptr) {
  996. LR->addSegmentToSet(Seg);
  997. return;
  998. }
  999. // Flush the state if Start moves backwards.
  1000. if (!LastStart.isValid() || LastStart > Seg.start) {
  1001. if (isDirty())
  1002. flush();
  1003. // This brings us to an uninitialized state. Reinitialize.
  1004. assert(Spills.empty() && "Leftover spilled segments");
  1005. WriteI = ReadI = LR->begin();
  1006. }
  1007. // Remember start for next time.
  1008. LastStart = Seg.start;
  1009. // Advance ReadI until it ends after Seg.start.
  1010. LiveRange::iterator E = LR->end();
  1011. if (ReadI != E && ReadI->end <= Seg.start) {
  1012. // First try to close the gap between WriteI and ReadI with spills.
  1013. if (ReadI != WriteI)
  1014. mergeSpills();
  1015. // Then advance ReadI.
  1016. if (ReadI == WriteI)
  1017. ReadI = WriteI = LR->find(Seg.start);
  1018. else
  1019. while (ReadI != E && ReadI->end <= Seg.start)
  1020. *WriteI++ = *ReadI++;
  1021. }
  1022. assert(ReadI == E || ReadI->end > Seg.start);
  1023. // Check if the ReadI segment begins early.
  1024. if (ReadI != E && ReadI->start <= Seg.start) {
  1025. assert(ReadI->valno == Seg.valno && "Cannot overlap different values");
  1026. // Bail if Seg is completely contained in ReadI.
  1027. if (ReadI->end >= Seg.end)
  1028. return;
  1029. // Coalesce into Seg.
  1030. Seg.start = ReadI->start;
  1031. ++ReadI;
  1032. }
  1033. // Coalesce as much as possible from ReadI into Seg.
  1034. while (ReadI != E && coalescable(Seg, *ReadI)) {
  1035. Seg.end = std::max(Seg.end, ReadI->end);
  1036. ++ReadI;
  1037. }
  1038. // Try coalescing Spills.back() into Seg.
  1039. if (!Spills.empty() && coalescable(Spills.back(), Seg)) {
  1040. Seg.start = Spills.back().start;
  1041. Seg.end = std::max(Spills.back().end, Seg.end);
  1042. Spills.pop_back();
  1043. }
  1044. // Try coalescing Seg into WriteI[-1].
  1045. if (WriteI != LR->begin() && coalescable(WriteI[-1], Seg)) {
  1046. WriteI[-1].end = std::max(WriteI[-1].end, Seg.end);
  1047. return;
  1048. }
  1049. // Seg doesn't coalesce with anything, and needs to be inserted somewhere.
  1050. if (WriteI != ReadI) {
  1051. *WriteI++ = Seg;
  1052. return;
  1053. }
  1054. // Finally, append to LR or Spills.
  1055. if (WriteI == E) {
  1056. LR->segments.push_back(Seg);
  1057. WriteI = ReadI = LR->end();
  1058. } else
  1059. Spills.push_back(Seg);
  1060. }
  1061. // Merge as many spilled segments as possible into the gap between WriteI
  1062. // and ReadI. Advance WriteI to reflect the inserted instructions.
  1063. void LiveRangeUpdater::mergeSpills() {
  1064. // Perform a backwards merge of Spills and [SpillI;WriteI).
  1065. size_t GapSize = ReadI - WriteI;
  1066. size_t NumMoved = std::min(Spills.size(), GapSize);
  1067. LiveRange::iterator Src = WriteI;
  1068. LiveRange::iterator Dst = Src + NumMoved;
  1069. LiveRange::iterator SpillSrc = Spills.end();
  1070. LiveRange::iterator B = LR->begin();
  1071. // This is the new WriteI position after merging spills.
  1072. WriteI = Dst;
  1073. // Now merge Src and Spills backwards.
  1074. while (Src != Dst) {
  1075. if (Src != B && Src[-1].start > SpillSrc[-1].start)
  1076. *--Dst = *--Src;
  1077. else
  1078. *--Dst = *--SpillSrc;
  1079. }
  1080. assert(NumMoved == size_t(Spills.end() - SpillSrc));
  1081. Spills.erase(SpillSrc, Spills.end());
  1082. }
  1083. void LiveRangeUpdater::flush() {
  1084. if (!isDirty())
  1085. return;
  1086. // Clear the dirty state.
  1087. LastStart = SlotIndex();
  1088. assert(LR && "Cannot add to a null destination");
  1089. // Nothing to merge?
  1090. if (Spills.empty()) {
  1091. LR->segments.erase(WriteI, ReadI);
  1092. LR->verify();
  1093. return;
  1094. }
  1095. // Resize the WriteI - ReadI gap to match Spills.
  1096. size_t GapSize = ReadI - WriteI;
  1097. if (GapSize < Spills.size()) {
  1098. // The gap is too small. Make some room.
  1099. size_t WritePos = WriteI - LR->begin();
  1100. LR->segments.insert(ReadI, Spills.size() - GapSize, LiveRange::Segment());
  1101. // This also invalidated ReadI, but it is recomputed below.
  1102. WriteI = LR->begin() + WritePos;
  1103. } else {
  1104. // Shrink the gap if necessary.
  1105. LR->segments.erase(WriteI + Spills.size(), ReadI);
  1106. }
  1107. ReadI = WriteI + Spills.size();
  1108. mergeSpills();
  1109. LR->verify();
  1110. }
  1111. unsigned ConnectedVNInfoEqClasses::Classify(const LiveRange &LR) {
  1112. // Create initial equivalence classes.
  1113. EqClass.clear();
  1114. EqClass.grow(LR.getNumValNums());
  1115. const VNInfo *used = nullptr, *unused = nullptr;
  1116. // Determine connections.
  1117. for (const VNInfo *VNI : LR.valnos) {
  1118. // Group all unused values into one class.
  1119. if (VNI->isUnused()) {
  1120. if (unused)
  1121. EqClass.join(unused->id, VNI->id);
  1122. unused = VNI;
  1123. continue;
  1124. }
  1125. used = VNI;
  1126. if (VNI->isPHIDef()) {
  1127. const MachineBasicBlock *MBB = LIS.getMBBFromIndex(VNI->def);
  1128. assert(MBB && "Phi-def has no defining MBB");
  1129. // Connect to values live out of predecessors.
  1130. for (MachineBasicBlock::const_pred_iterator PI = MBB->pred_begin(),
  1131. PE = MBB->pred_end(); PI != PE; ++PI)
  1132. if (const VNInfo *PVNI = LR.getVNInfoBefore(LIS.getMBBEndIdx(*PI)))
  1133. EqClass.join(VNI->id, PVNI->id);
  1134. } else {
  1135. // Normal value defined by an instruction. Check for two-addr redef.
  1136. // FIXME: This could be coincidental. Should we really check for a tied
  1137. // operand constraint?
  1138. // Note that VNI->def may be a use slot for an early clobber def.
  1139. if (const VNInfo *UVNI = LR.getVNInfoBefore(VNI->def))
  1140. EqClass.join(VNI->id, UVNI->id);
  1141. }
  1142. }
  1143. // Lump all the unused values in with the last used value.
  1144. if (used && unused)
  1145. EqClass.join(used->id, unused->id);
  1146. EqClass.compress();
  1147. return EqClass.getNumClasses();
  1148. }
  1149. void ConnectedVNInfoEqClasses::Distribute(LiveInterval &LI, LiveInterval *LIV[],
  1150. MachineRegisterInfo &MRI) {
  1151. // Rewrite instructions.
  1152. for (MachineRegisterInfo::reg_iterator RI = MRI.reg_begin(LI.reg),
  1153. RE = MRI.reg_end(); RI != RE;) {
  1154. MachineOperand &MO = *RI;
  1155. MachineInstr *MI = RI->getParent();
  1156. ++RI;
  1157. // DBG_VALUE instructions don't have slot indexes, so get the index of the
  1158. // instruction before them.
  1159. // Normally, DBG_VALUE instructions are removed before this function is
  1160. // called, but it is not a requirement.
  1161. SlotIndex Idx;
  1162. if (MI->isDebugValue())
  1163. Idx = LIS.getSlotIndexes()->getIndexBefore(*MI);
  1164. else
  1165. Idx = LIS.getInstructionIndex(*MI);
  1166. LiveQueryResult LRQ = LI.Query(Idx);
  1167. const VNInfo *VNI = MO.readsReg() ? LRQ.valueIn() : LRQ.valueDefined();
  1168. // In the case of an <undef> use that isn't tied to any def, VNI will be
  1169. // NULL. If the use is tied to a def, VNI will be the defined value.
  1170. if (!VNI)
  1171. continue;
  1172. if (unsigned EqClass = getEqClass(VNI))
  1173. MO.setReg(LIV[EqClass-1]->reg);
  1174. }
  1175. // Distribute subregister liveranges.
  1176. if (LI.hasSubRanges()) {
  1177. unsigned NumComponents = EqClass.getNumClasses();
  1178. SmallVector<unsigned, 8> VNIMapping;
  1179. SmallVector<LiveInterval::SubRange*, 8> SubRanges;
  1180. BumpPtrAllocator &Allocator = LIS.getVNInfoAllocator();
  1181. for (LiveInterval::SubRange &SR : LI.subranges()) {
  1182. // Create new subranges in the split intervals and construct a mapping
  1183. // for the VNInfos in the subrange.
  1184. unsigned NumValNos = SR.valnos.size();
  1185. VNIMapping.clear();
  1186. VNIMapping.reserve(NumValNos);
  1187. SubRanges.clear();
  1188. SubRanges.resize(NumComponents-1, nullptr);
  1189. for (unsigned I = 0; I < NumValNos; ++I) {
  1190. const VNInfo &VNI = *SR.valnos[I];
  1191. unsigned ComponentNum;
  1192. if (VNI.isUnused()) {
  1193. ComponentNum = 0;
  1194. } else {
  1195. const VNInfo *MainRangeVNI = LI.getVNInfoAt(VNI.def);
  1196. assert(MainRangeVNI != nullptr
  1197. && "SubRange def must have corresponding main range def");
  1198. ComponentNum = getEqClass(MainRangeVNI);
  1199. if (ComponentNum > 0 && SubRanges[ComponentNum-1] == nullptr) {
  1200. SubRanges[ComponentNum-1]
  1201. = LIV[ComponentNum-1]->createSubRange(Allocator, SR.LaneMask);
  1202. }
  1203. }
  1204. VNIMapping.push_back(ComponentNum);
  1205. }
  1206. DistributeRange(SR, SubRanges.data(), VNIMapping);
  1207. }
  1208. LI.removeEmptySubRanges();
  1209. }
  1210. // Distribute main liverange.
  1211. DistributeRange(LI, LIV, EqClass);
  1212. }