LiveInterval.cpp 44 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369
  1. //===- LiveInterval.cpp - Live Interval Representation --------------------===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This file implements the LiveRange and LiveInterval classes. Given some
  11. // numbering of each the machine instructions an interval [i, j) is said to be a
  12. // live range for register v if there is no instruction with number j' >= j
  13. // such that v is live at j' and there is no instruction with number i' < i such
  14. // that v is live at i'. In this implementation ranges can have holes,
  15. // i.e. a range might look like [1,20), [50,65), [1000,1001). Each
  16. // individual segment is represented as an instance of LiveRange::Segment,
  17. // and the whole range is represented as an instance of LiveRange.
  18. //
  19. //===----------------------------------------------------------------------===//
  20. #include "llvm/CodeGen/LiveInterval.h"
  21. #include "LiveRangeUtils.h"
  22. #include "RegisterCoalescer.h"
  23. #include "llvm/ADT/ArrayRef.h"
  24. #include "llvm/ADT/STLExtras.h"
  25. #include "llvm/ADT/SmallPtrSet.h"
  26. #include "llvm/ADT/SmallVector.h"
  27. #include "llvm/ADT/iterator_range.h"
  28. #include "llvm/CodeGen/LiveIntervals.h"
  29. #include "llvm/CodeGen/MachineBasicBlock.h"
  30. #include "llvm/CodeGen/MachineInstr.h"
  31. #include "llvm/CodeGen/MachineOperand.h"
  32. #include "llvm/CodeGen/MachineRegisterInfo.h"
  33. #include "llvm/CodeGen/SlotIndexes.h"
  34. #include "llvm/CodeGen/TargetRegisterInfo.h"
  35. #include "llvm/MC/LaneBitmask.h"
  36. #include "llvm/Support/Compiler.h"
  37. #include "llvm/Support/Debug.h"
  38. #include "llvm/Support/raw_ostream.h"
  39. #include <algorithm>
  40. #include <cassert>
  41. #include <cstddef>
  42. #include <iterator>
  43. #include <utility>
  44. using namespace llvm;
  45. namespace {
  46. //===----------------------------------------------------------------------===//
  47. // Implementation of various methods necessary for calculation of live ranges.
  48. // The implementation of the methods abstracts from the concrete type of the
  49. // segment collection.
  50. //
  51. // Implementation of the class follows the Template design pattern. The base
  52. // class contains generic algorithms that call collection-specific methods,
  53. // which are provided in concrete subclasses. In order to avoid virtual calls
  54. // these methods are provided by means of C++ template instantiation.
  55. // The base class calls the methods of the subclass through method impl(),
  56. // which casts 'this' pointer to the type of the subclass.
  57. //
  58. //===----------------------------------------------------------------------===//
  59. template <typename ImplT, typename IteratorT, typename CollectionT>
  60. class CalcLiveRangeUtilBase {
  61. protected:
  62. LiveRange *LR;
  63. protected:
  64. CalcLiveRangeUtilBase(LiveRange *LR) : LR(LR) {}
  65. public:
  66. using Segment = LiveRange::Segment;
  67. using iterator = IteratorT;
  68. /// A counterpart of LiveRange::createDeadDef: Make sure the range has a
  69. /// value defined at @p Def.
  70. /// If @p ForVNI is null, and there is no value defined at @p Def, a new
  71. /// value will be allocated using @p VNInfoAllocator.
  72. /// If @p ForVNI is null, the return value is the value defined at @p Def,
  73. /// either a pre-existing one, or the one newly created.
  74. /// If @p ForVNI is not null, then @p Def should be the location where
  75. /// @p ForVNI is defined. If the range does not have a value defined at
  76. /// @p Def, the value @p ForVNI will be used instead of allocating a new
  77. /// one. If the range already has a value defined at @p Def, it must be
  78. /// same as @p ForVNI. In either case, @p ForVNI will be the return value.
  79. VNInfo *createDeadDef(SlotIndex Def, VNInfo::Allocator *VNInfoAllocator,
  80. VNInfo *ForVNI) {
  81. assert(!Def.isDead() && "Cannot define a value at the dead slot");
  82. assert((!ForVNI || ForVNI->def == Def) &&
  83. "If ForVNI is specified, it must match Def");
  84. iterator I = impl().find(Def);
  85. if (I == segments().end()) {
  86. VNInfo *VNI = ForVNI ? ForVNI : LR->getNextValue(Def, *VNInfoAllocator);
  87. impl().insertAtEnd(Segment(Def, Def.getDeadSlot(), VNI));
  88. return VNI;
  89. }
  90. Segment *S = segmentAt(I);
  91. if (SlotIndex::isSameInstr(Def, S->start)) {
  92. assert((!ForVNI || ForVNI == S->valno) && "Value number mismatch");
  93. assert(S->valno->def == S->start && "Inconsistent existing value def");
  94. // It is possible to have both normal and early-clobber defs of the same
  95. // register on an instruction. It doesn't make a lot of sense, but it is
  96. // possible to specify in inline assembly.
  97. //
  98. // Just convert everything to early-clobber.
  99. Def = std::min(Def, S->start);
  100. if (Def != S->start)
  101. S->start = S->valno->def = Def;
  102. return S->valno;
  103. }
  104. assert(SlotIndex::isEarlierInstr(Def, S->start) && "Already live at def");
  105. VNInfo *VNI = ForVNI ? ForVNI : LR->getNextValue(Def, *VNInfoAllocator);
  106. segments().insert(I, Segment(Def, Def.getDeadSlot(), VNI));
  107. return VNI;
  108. }
  109. VNInfo *extendInBlock(SlotIndex StartIdx, SlotIndex Use) {
  110. if (segments().empty())
  111. return nullptr;
  112. iterator I =
  113. impl().findInsertPos(Segment(Use.getPrevSlot(), Use, nullptr));
  114. if (I == segments().begin())
  115. return nullptr;
  116. --I;
  117. if (I->end <= StartIdx)
  118. return nullptr;
  119. if (I->end < Use)
  120. extendSegmentEndTo(I, Use);
  121. return I->valno;
  122. }
  123. std::pair<VNInfo*,bool> extendInBlock(ArrayRef<SlotIndex> Undefs,
  124. SlotIndex StartIdx, SlotIndex Use) {
  125. if (segments().empty())
  126. return std::make_pair(nullptr, false);
  127. SlotIndex BeforeUse = Use.getPrevSlot();
  128. iterator I = impl().findInsertPos(Segment(BeforeUse, Use, nullptr));
  129. if (I == segments().begin())
  130. return std::make_pair(nullptr, LR->isUndefIn(Undefs, StartIdx, BeforeUse));
  131. --I;
  132. if (I->end <= StartIdx)
  133. return std::make_pair(nullptr, LR->isUndefIn(Undefs, StartIdx, BeforeUse));
  134. if (I->end < Use) {
  135. if (LR->isUndefIn(Undefs, I->end, BeforeUse))
  136. return std::make_pair(nullptr, true);
  137. extendSegmentEndTo(I, Use);
  138. }
  139. return std::make_pair(I->valno, false);
  140. }
  141. /// This method is used when we want to extend the segment specified
  142. /// by I to end at the specified endpoint. To do this, we should
  143. /// merge and eliminate all segments that this will overlap
  144. /// with. The iterator is not invalidated.
  145. void extendSegmentEndTo(iterator I, SlotIndex NewEnd) {
  146. assert(I != segments().end() && "Not a valid segment!");
  147. Segment *S = segmentAt(I);
  148. VNInfo *ValNo = I->valno;
  149. // Search for the first segment that we can't merge with.
  150. iterator MergeTo = std::next(I);
  151. for (; MergeTo != segments().end() && NewEnd >= MergeTo->end; ++MergeTo)
  152. assert(MergeTo->valno == ValNo && "Cannot merge with differing values!");
  153. // If NewEnd was in the middle of a segment, make sure to get its endpoint.
  154. S->end = std::max(NewEnd, std::prev(MergeTo)->end);
  155. // If the newly formed segment now touches the segment after it and if they
  156. // have the same value number, merge the two segments into one segment.
  157. if (MergeTo != segments().end() && MergeTo->start <= I->end &&
  158. MergeTo->valno == ValNo) {
  159. S->end = MergeTo->end;
  160. ++MergeTo;
  161. }
  162. // Erase any dead segments.
  163. segments().erase(std::next(I), MergeTo);
  164. }
  165. /// This method is used when we want to extend the segment specified
  166. /// by I to start at the specified endpoint. To do this, we should
  167. /// merge and eliminate all segments that this will overlap with.
  168. iterator extendSegmentStartTo(iterator I, SlotIndex NewStart) {
  169. assert(I != segments().end() && "Not a valid segment!");
  170. Segment *S = segmentAt(I);
  171. VNInfo *ValNo = I->valno;
  172. // Search for the first segment that we can't merge with.
  173. iterator MergeTo = I;
  174. do {
  175. if (MergeTo == segments().begin()) {
  176. S->start = NewStart;
  177. segments().erase(MergeTo, I);
  178. return I;
  179. }
  180. assert(MergeTo->valno == ValNo && "Cannot merge with differing values!");
  181. --MergeTo;
  182. } while (NewStart <= MergeTo->start);
  183. // If we start in the middle of another segment, just delete a range and
  184. // extend that segment.
  185. if (MergeTo->end >= NewStart && MergeTo->valno == ValNo) {
  186. segmentAt(MergeTo)->end = S->end;
  187. } else {
  188. // Otherwise, extend the segment right after.
  189. ++MergeTo;
  190. Segment *MergeToSeg = segmentAt(MergeTo);
  191. MergeToSeg->start = NewStart;
  192. MergeToSeg->end = S->end;
  193. }
  194. segments().erase(std::next(MergeTo), std::next(I));
  195. return MergeTo;
  196. }
  197. iterator addSegment(Segment S) {
  198. SlotIndex Start = S.start, End = S.end;
  199. iterator I = impl().findInsertPos(S);
  200. // If the inserted segment starts in the middle or right at the end of
  201. // another segment, just extend that segment to contain the segment of S.
  202. if (I != segments().begin()) {
  203. iterator B = std::prev(I);
  204. if (S.valno == B->valno) {
  205. if (B->start <= Start && B->end >= Start) {
  206. extendSegmentEndTo(B, End);
  207. return B;
  208. }
  209. } else {
  210. // Check to make sure that we are not overlapping two live segments with
  211. // different valno's.
  212. assert(B->end <= Start &&
  213. "Cannot overlap two segments with differing ValID's"
  214. " (did you def the same reg twice in a MachineInstr?)");
  215. }
  216. }
  217. // Otherwise, if this segment ends in the middle of, or right next
  218. // to, another segment, merge it into that segment.
  219. if (I != segments().end()) {
  220. if (S.valno == I->valno) {
  221. if (I->start <= End) {
  222. I = extendSegmentStartTo(I, Start);
  223. // If S is a complete superset of a segment, we may need to grow its
  224. // endpoint as well.
  225. if (End > I->end)
  226. extendSegmentEndTo(I, End);
  227. return I;
  228. }
  229. } else {
  230. // Check to make sure that we are not overlapping two live segments with
  231. // different valno's.
  232. assert(I->start >= End &&
  233. "Cannot overlap two segments with differing ValID's");
  234. }
  235. }
  236. // Otherwise, this is just a new segment that doesn't interact with
  237. // anything.
  238. // Insert it.
  239. return segments().insert(I, S);
  240. }
  241. private:
  242. ImplT &impl() { return *static_cast<ImplT *>(this); }
  243. CollectionT &segments() { return impl().segmentsColl(); }
  244. Segment *segmentAt(iterator I) { return const_cast<Segment *>(&(*I)); }
  245. };
  246. //===----------------------------------------------------------------------===//
  247. // Instantiation of the methods for calculation of live ranges
  248. // based on a segment vector.
  249. //===----------------------------------------------------------------------===//
  250. class CalcLiveRangeUtilVector;
  251. using CalcLiveRangeUtilVectorBase =
  252. CalcLiveRangeUtilBase<CalcLiveRangeUtilVector, LiveRange::iterator,
  253. LiveRange::Segments>;
  254. class CalcLiveRangeUtilVector : public CalcLiveRangeUtilVectorBase {
  255. public:
  256. CalcLiveRangeUtilVector(LiveRange *LR) : CalcLiveRangeUtilVectorBase(LR) {}
  257. private:
  258. friend CalcLiveRangeUtilVectorBase;
  259. LiveRange::Segments &segmentsColl() { return LR->segments; }
  260. void insertAtEnd(const Segment &S) { LR->segments.push_back(S); }
  261. iterator find(SlotIndex Pos) { return LR->find(Pos); }
  262. iterator findInsertPos(Segment S) {
  263. return std::upper_bound(LR->begin(), LR->end(), S.start);
  264. }
  265. };
  266. //===----------------------------------------------------------------------===//
  267. // Instantiation of the methods for calculation of live ranges
  268. // based on a segment set.
  269. //===----------------------------------------------------------------------===//
  270. class CalcLiveRangeUtilSet;
  271. using CalcLiveRangeUtilSetBase =
  272. CalcLiveRangeUtilBase<CalcLiveRangeUtilSet, LiveRange::SegmentSet::iterator,
  273. LiveRange::SegmentSet>;
  274. class CalcLiveRangeUtilSet : public CalcLiveRangeUtilSetBase {
  275. public:
  276. CalcLiveRangeUtilSet(LiveRange *LR) : CalcLiveRangeUtilSetBase(LR) {}
  277. private:
  278. friend CalcLiveRangeUtilSetBase;
  279. LiveRange::SegmentSet &segmentsColl() { return *LR->segmentSet; }
  280. void insertAtEnd(const Segment &S) {
  281. LR->segmentSet->insert(LR->segmentSet->end(), S);
  282. }
  283. iterator find(SlotIndex Pos) {
  284. iterator I =
  285. LR->segmentSet->upper_bound(Segment(Pos, Pos.getNextSlot(), nullptr));
  286. if (I == LR->segmentSet->begin())
  287. return I;
  288. iterator PrevI = std::prev(I);
  289. if (Pos < (*PrevI).end)
  290. return PrevI;
  291. return I;
  292. }
  293. iterator findInsertPos(Segment S) {
  294. iterator I = LR->segmentSet->upper_bound(S);
  295. if (I != LR->segmentSet->end() && !(S.start < *I))
  296. ++I;
  297. return I;
  298. }
  299. };
  300. } // end anonymous namespace
  301. //===----------------------------------------------------------------------===//
  302. // LiveRange methods
  303. //===----------------------------------------------------------------------===//
  304. LiveRange::iterator LiveRange::find(SlotIndex Pos) {
  305. // This algorithm is basically std::upper_bound.
  306. // Unfortunately, std::upper_bound cannot be used with mixed types until we
  307. // adopt C++0x. Many libraries can do it, but not all.
  308. if (empty() || Pos >= endIndex())
  309. return end();
  310. iterator I = begin();
  311. size_t Len = size();
  312. do {
  313. size_t Mid = Len >> 1;
  314. if (Pos < I[Mid].end) {
  315. Len = Mid;
  316. } else {
  317. I += Mid + 1;
  318. Len -= Mid + 1;
  319. }
  320. } while (Len);
  321. return I;
  322. }
  323. VNInfo *LiveRange::createDeadDef(SlotIndex Def, VNInfo::Allocator &VNIAlloc) {
  324. // Use the segment set, if it is available.
  325. if (segmentSet != nullptr)
  326. return CalcLiveRangeUtilSet(this).createDeadDef(Def, &VNIAlloc, nullptr);
  327. // Otherwise use the segment vector.
  328. return CalcLiveRangeUtilVector(this).createDeadDef(Def, &VNIAlloc, nullptr);
  329. }
  330. VNInfo *LiveRange::createDeadDef(VNInfo *VNI) {
  331. // Use the segment set, if it is available.
  332. if (segmentSet != nullptr)
  333. return CalcLiveRangeUtilSet(this).createDeadDef(VNI->def, nullptr, VNI);
  334. // Otherwise use the segment vector.
  335. return CalcLiveRangeUtilVector(this).createDeadDef(VNI->def, nullptr, VNI);
  336. }
  337. // overlaps - Return true if the intersection of the two live ranges is
  338. // not empty.
  339. //
  340. // An example for overlaps():
  341. //
  342. // 0: A = ...
  343. // 4: B = ...
  344. // 8: C = A + B ;; last use of A
  345. //
  346. // The live ranges should look like:
  347. //
  348. // A = [3, 11)
  349. // B = [7, x)
  350. // C = [11, y)
  351. //
  352. // A->overlaps(C) should return false since we want to be able to join
  353. // A and C.
  354. //
  355. bool LiveRange::overlapsFrom(const LiveRange& other,
  356. const_iterator StartPos) const {
  357. assert(!empty() && "empty range");
  358. const_iterator i = begin();
  359. const_iterator ie = end();
  360. const_iterator j = StartPos;
  361. const_iterator je = other.end();
  362. assert((StartPos->start <= i->start || StartPos == other.begin()) &&
  363. StartPos != other.end() && "Bogus start position hint!");
  364. if (i->start < j->start) {
  365. i = std::upper_bound(i, ie, j->start);
  366. if (i != begin()) --i;
  367. } else if (j->start < i->start) {
  368. ++StartPos;
  369. if (StartPos != other.end() && StartPos->start <= i->start) {
  370. assert(StartPos < other.end() && i < end());
  371. j = std::upper_bound(j, je, i->start);
  372. if (j != other.begin()) --j;
  373. }
  374. } else {
  375. return true;
  376. }
  377. if (j == je) return false;
  378. while (i != ie) {
  379. if (i->start > j->start) {
  380. std::swap(i, j);
  381. std::swap(ie, je);
  382. }
  383. if (i->end > j->start)
  384. return true;
  385. ++i;
  386. }
  387. return false;
  388. }
  389. bool LiveRange::overlaps(const LiveRange &Other, const CoalescerPair &CP,
  390. const SlotIndexes &Indexes) const {
  391. assert(!empty() && "empty range");
  392. if (Other.empty())
  393. return false;
  394. // Use binary searches to find initial positions.
  395. const_iterator I = find(Other.beginIndex());
  396. const_iterator IE = end();
  397. if (I == IE)
  398. return false;
  399. const_iterator J = Other.find(I->start);
  400. const_iterator JE = Other.end();
  401. if (J == JE)
  402. return false;
  403. while (true) {
  404. // J has just been advanced to satisfy:
  405. assert(J->end >= I->start);
  406. // Check for an overlap.
  407. if (J->start < I->end) {
  408. // I and J are overlapping. Find the later start.
  409. SlotIndex Def = std::max(I->start, J->start);
  410. // Allow the overlap if Def is a coalescable copy.
  411. if (Def.isBlock() ||
  412. !CP.isCoalescable(Indexes.getInstructionFromIndex(Def)))
  413. return true;
  414. }
  415. // Advance the iterator that ends first to check for more overlaps.
  416. if (J->end > I->end) {
  417. std::swap(I, J);
  418. std::swap(IE, JE);
  419. }
  420. // Advance J until J->end >= I->start.
  421. do
  422. if (++J == JE)
  423. return false;
  424. while (J->end < I->start);
  425. }
  426. }
  427. /// overlaps - Return true if the live range overlaps an interval specified
  428. /// by [Start, End).
  429. bool LiveRange::overlaps(SlotIndex Start, SlotIndex End) const {
  430. assert(Start < End && "Invalid range");
  431. const_iterator I = std::lower_bound(begin(), end(), End);
  432. return I != begin() && (--I)->end > Start;
  433. }
  434. bool LiveRange::covers(const LiveRange &Other) const {
  435. if (empty())
  436. return Other.empty();
  437. const_iterator I = begin();
  438. for (const Segment &O : Other.segments) {
  439. I = advanceTo(I, O.start);
  440. if (I == end() || I->start > O.start)
  441. return false;
  442. // Check adjacent live segments and see if we can get behind O.end.
  443. while (I->end < O.end) {
  444. const_iterator Last = I;
  445. // Get next segment and abort if it was not adjacent.
  446. ++I;
  447. if (I == end() || Last->end != I->start)
  448. return false;
  449. }
  450. }
  451. return true;
  452. }
  453. /// ValNo is dead, remove it. If it is the largest value number, just nuke it
  454. /// (and any other deleted values neighboring it), otherwise mark it as ~1U so
  455. /// it can be nuked later.
  456. void LiveRange::markValNoForDeletion(VNInfo *ValNo) {
  457. if (ValNo->id == getNumValNums()-1) {
  458. do {
  459. valnos.pop_back();
  460. } while (!valnos.empty() && valnos.back()->isUnused());
  461. } else {
  462. ValNo->markUnused();
  463. }
  464. }
  465. /// RenumberValues - Renumber all values in order of appearance and delete the
  466. /// remaining unused values.
  467. void LiveRange::RenumberValues() {
  468. SmallPtrSet<VNInfo*, 8> Seen;
  469. valnos.clear();
  470. for (const Segment &S : segments) {
  471. VNInfo *VNI = S.valno;
  472. if (!Seen.insert(VNI).second)
  473. continue;
  474. assert(!VNI->isUnused() && "Unused valno used by live segment");
  475. VNI->id = (unsigned)valnos.size();
  476. valnos.push_back(VNI);
  477. }
  478. }
  479. void LiveRange::addSegmentToSet(Segment S) {
  480. CalcLiveRangeUtilSet(this).addSegment(S);
  481. }
  482. LiveRange::iterator LiveRange::addSegment(Segment S) {
  483. // Use the segment set, if it is available.
  484. if (segmentSet != nullptr) {
  485. addSegmentToSet(S);
  486. return end();
  487. }
  488. // Otherwise use the segment vector.
  489. return CalcLiveRangeUtilVector(this).addSegment(S);
  490. }
  491. void LiveRange::append(const Segment S) {
  492. // Check that the segment belongs to the back of the list.
  493. assert(segments.empty() || segments.back().end <= S.start);
  494. segments.push_back(S);
  495. }
  496. std::pair<VNInfo*,bool> LiveRange::extendInBlock(ArrayRef<SlotIndex> Undefs,
  497. SlotIndex StartIdx, SlotIndex Kill) {
  498. // Use the segment set, if it is available.
  499. if (segmentSet != nullptr)
  500. return CalcLiveRangeUtilSet(this).extendInBlock(Undefs, StartIdx, Kill);
  501. // Otherwise use the segment vector.
  502. return CalcLiveRangeUtilVector(this).extendInBlock(Undefs, StartIdx, Kill);
  503. }
  504. VNInfo *LiveRange::extendInBlock(SlotIndex StartIdx, SlotIndex Kill) {
  505. // Use the segment set, if it is available.
  506. if (segmentSet != nullptr)
  507. return CalcLiveRangeUtilSet(this).extendInBlock(StartIdx, Kill);
  508. // Otherwise use the segment vector.
  509. return CalcLiveRangeUtilVector(this).extendInBlock(StartIdx, Kill);
  510. }
  511. /// Remove the specified segment from this range. Note that the segment must
  512. /// be in a single Segment in its entirety.
  513. void LiveRange::removeSegment(SlotIndex Start, SlotIndex End,
  514. bool RemoveDeadValNo) {
  515. // Find the Segment containing this span.
  516. iterator I = find(Start);
  517. assert(I != end() && "Segment is not in range!");
  518. assert(I->containsInterval(Start, End)
  519. && "Segment is not entirely in range!");
  520. // If the span we are removing is at the start of the Segment, adjust it.
  521. VNInfo *ValNo = I->valno;
  522. if (I->start == Start) {
  523. if (I->end == End) {
  524. if (RemoveDeadValNo) {
  525. // Check if val# is dead.
  526. bool isDead = true;
  527. for (const_iterator II = begin(), EE = end(); II != EE; ++II)
  528. if (II != I && II->valno == ValNo) {
  529. isDead = false;
  530. break;
  531. }
  532. if (isDead) {
  533. // Now that ValNo is dead, remove it.
  534. markValNoForDeletion(ValNo);
  535. }
  536. }
  537. segments.erase(I); // Removed the whole Segment.
  538. } else
  539. I->start = End;
  540. return;
  541. }
  542. // Otherwise if the span we are removing is at the end of the Segment,
  543. // adjust the other way.
  544. if (I->end == End) {
  545. I->end = Start;
  546. return;
  547. }
  548. // Otherwise, we are splitting the Segment into two pieces.
  549. SlotIndex OldEnd = I->end;
  550. I->end = Start; // Trim the old segment.
  551. // Insert the new one.
  552. segments.insert(std::next(I), Segment(End, OldEnd, ValNo));
  553. }
  554. /// removeValNo - Remove all the segments defined by the specified value#.
  555. /// Also remove the value# from value# list.
  556. void LiveRange::removeValNo(VNInfo *ValNo) {
  557. if (empty()) return;
  558. segments.erase(remove_if(*this, [ValNo](const Segment &S) {
  559. return S.valno == ValNo;
  560. }), end());
  561. // Now that ValNo is dead, remove it.
  562. markValNoForDeletion(ValNo);
  563. }
  564. void LiveRange::join(LiveRange &Other,
  565. const int *LHSValNoAssignments,
  566. const int *RHSValNoAssignments,
  567. SmallVectorImpl<VNInfo *> &NewVNInfo) {
  568. verify();
  569. // Determine if any of our values are mapped. This is uncommon, so we want
  570. // to avoid the range scan if not.
  571. bool MustMapCurValNos = false;
  572. unsigned NumVals = getNumValNums();
  573. unsigned NumNewVals = NewVNInfo.size();
  574. for (unsigned i = 0; i != NumVals; ++i) {
  575. unsigned LHSValID = LHSValNoAssignments[i];
  576. if (i != LHSValID ||
  577. (NewVNInfo[LHSValID] && NewVNInfo[LHSValID] != getValNumInfo(i))) {
  578. MustMapCurValNos = true;
  579. break;
  580. }
  581. }
  582. // If we have to apply a mapping to our base range assignment, rewrite it now.
  583. if (MustMapCurValNos && !empty()) {
  584. // Map the first live range.
  585. iterator OutIt = begin();
  586. OutIt->valno = NewVNInfo[LHSValNoAssignments[OutIt->valno->id]];
  587. for (iterator I = std::next(OutIt), E = end(); I != E; ++I) {
  588. VNInfo* nextValNo = NewVNInfo[LHSValNoAssignments[I->valno->id]];
  589. assert(nextValNo && "Huh?");
  590. // If this live range has the same value # as its immediate predecessor,
  591. // and if they are neighbors, remove one Segment. This happens when we
  592. // have [0,4:0)[4,7:1) and map 0/1 onto the same value #.
  593. if (OutIt->valno == nextValNo && OutIt->end == I->start) {
  594. OutIt->end = I->end;
  595. } else {
  596. // Didn't merge. Move OutIt to the next segment,
  597. ++OutIt;
  598. OutIt->valno = nextValNo;
  599. if (OutIt != I) {
  600. OutIt->start = I->start;
  601. OutIt->end = I->end;
  602. }
  603. }
  604. }
  605. // If we merge some segments, chop off the end.
  606. ++OutIt;
  607. segments.erase(OutIt, end());
  608. }
  609. // Rewrite Other values before changing the VNInfo ids.
  610. // This can leave Other in an invalid state because we're not coalescing
  611. // touching segments that now have identical values. That's OK since Other is
  612. // not supposed to be valid after calling join();
  613. for (Segment &S : Other.segments)
  614. S.valno = NewVNInfo[RHSValNoAssignments[S.valno->id]];
  615. // Update val# info. Renumber them and make sure they all belong to this
  616. // LiveRange now. Also remove dead val#'s.
  617. unsigned NumValNos = 0;
  618. for (unsigned i = 0; i < NumNewVals; ++i) {
  619. VNInfo *VNI = NewVNInfo[i];
  620. if (VNI) {
  621. if (NumValNos >= NumVals)
  622. valnos.push_back(VNI);
  623. else
  624. valnos[NumValNos] = VNI;
  625. VNI->id = NumValNos++; // Renumber val#.
  626. }
  627. }
  628. if (NumNewVals < NumVals)
  629. valnos.resize(NumNewVals); // shrinkify
  630. // Okay, now insert the RHS live segments into the LHS.
  631. LiveRangeUpdater Updater(this);
  632. for (Segment &S : Other.segments)
  633. Updater.add(S);
  634. }
  635. /// Merge all of the segments in RHS into this live range as the specified
  636. /// value number. The segments in RHS are allowed to overlap with segments in
  637. /// the current range, but only if the overlapping segments have the
  638. /// specified value number.
  639. void LiveRange::MergeSegmentsInAsValue(const LiveRange &RHS,
  640. VNInfo *LHSValNo) {
  641. LiveRangeUpdater Updater(this);
  642. for (const Segment &S : RHS.segments)
  643. Updater.add(S.start, S.end, LHSValNo);
  644. }
  645. /// MergeValueInAsValue - Merge all of the live segments of a specific val#
  646. /// in RHS into this live range as the specified value number.
  647. /// The segments in RHS are allowed to overlap with segments in the
  648. /// current range, it will replace the value numbers of the overlaped
  649. /// segments with the specified value number.
  650. void LiveRange::MergeValueInAsValue(const LiveRange &RHS,
  651. const VNInfo *RHSValNo,
  652. VNInfo *LHSValNo) {
  653. LiveRangeUpdater Updater(this);
  654. for (const Segment &S : RHS.segments)
  655. if (S.valno == RHSValNo)
  656. Updater.add(S.start, S.end, LHSValNo);
  657. }
  658. /// MergeValueNumberInto - This method is called when two value nubmers
  659. /// are found to be equivalent. This eliminates V1, replacing all
  660. /// segments with the V1 value number with the V2 value number. This can
  661. /// cause merging of V1/V2 values numbers and compaction of the value space.
  662. VNInfo *LiveRange::MergeValueNumberInto(VNInfo *V1, VNInfo *V2) {
  663. assert(V1 != V2 && "Identical value#'s are always equivalent!");
  664. // This code actually merges the (numerically) larger value number into the
  665. // smaller value number, which is likely to allow us to compactify the value
  666. // space. The only thing we have to be careful of is to preserve the
  667. // instruction that defines the result value.
  668. // Make sure V2 is smaller than V1.
  669. if (V1->id < V2->id) {
  670. V1->copyFrom(*V2);
  671. std::swap(V1, V2);
  672. }
  673. // Merge V1 segments into V2.
  674. for (iterator I = begin(); I != end(); ) {
  675. iterator S = I++;
  676. if (S->valno != V1) continue; // Not a V1 Segment.
  677. // Okay, we found a V1 live range. If it had a previous, touching, V2 live
  678. // range, extend it.
  679. if (S != begin()) {
  680. iterator Prev = S-1;
  681. if (Prev->valno == V2 && Prev->end == S->start) {
  682. Prev->end = S->end;
  683. // Erase this live-range.
  684. segments.erase(S);
  685. I = Prev+1;
  686. S = Prev;
  687. }
  688. }
  689. // Okay, now we have a V1 or V2 live range that is maximally merged forward.
  690. // Ensure that it is a V2 live-range.
  691. S->valno = V2;
  692. // If we can merge it into later V2 segments, do so now. We ignore any
  693. // following V1 segments, as they will be merged in subsequent iterations
  694. // of the loop.
  695. if (I != end()) {
  696. if (I->start == S->end && I->valno == V2) {
  697. S->end = I->end;
  698. segments.erase(I);
  699. I = S+1;
  700. }
  701. }
  702. }
  703. // Now that V1 is dead, remove it.
  704. markValNoForDeletion(V1);
  705. return V2;
  706. }
  707. void LiveRange::flushSegmentSet() {
  708. assert(segmentSet != nullptr && "segment set must have been created");
  709. assert(
  710. segments.empty() &&
  711. "segment set can be used only initially before switching to the array");
  712. segments.append(segmentSet->begin(), segmentSet->end());
  713. segmentSet = nullptr;
  714. verify();
  715. }
  716. bool LiveRange::isLiveAtIndexes(ArrayRef<SlotIndex> Slots) const {
  717. ArrayRef<SlotIndex>::iterator SlotI = Slots.begin();
  718. ArrayRef<SlotIndex>::iterator SlotE = Slots.end();
  719. // If there are no regmask slots, we have nothing to search.
  720. if (SlotI == SlotE)
  721. return false;
  722. // Start our search at the first segment that ends after the first slot.
  723. const_iterator SegmentI = find(*SlotI);
  724. const_iterator SegmentE = end();
  725. // If there are no segments that end after the first slot, we're done.
  726. if (SegmentI == SegmentE)
  727. return false;
  728. // Look for each slot in the live range.
  729. for ( ; SlotI != SlotE; ++SlotI) {
  730. // Go to the next segment that ends after the current slot.
  731. // The slot may be within a hole in the range.
  732. SegmentI = advanceTo(SegmentI, *SlotI);
  733. if (SegmentI == SegmentE)
  734. return false;
  735. // If this segment contains the slot, we're done.
  736. if (SegmentI->contains(*SlotI))
  737. return true;
  738. // Otherwise, look for the next slot.
  739. }
  740. // We didn't find a segment containing any of the slots.
  741. return false;
  742. }
  743. void LiveInterval::freeSubRange(SubRange *S) {
  744. S->~SubRange();
  745. // Memory was allocated with BumpPtr allocator and is not freed here.
  746. }
  747. void LiveInterval::removeEmptySubRanges() {
  748. SubRange **NextPtr = &SubRanges;
  749. SubRange *I = *NextPtr;
  750. while (I != nullptr) {
  751. if (!I->empty()) {
  752. NextPtr = &I->Next;
  753. I = *NextPtr;
  754. continue;
  755. }
  756. // Skip empty subranges until we find the first nonempty one.
  757. do {
  758. SubRange *Next = I->Next;
  759. freeSubRange(I);
  760. I = Next;
  761. } while (I != nullptr && I->empty());
  762. *NextPtr = I;
  763. }
  764. }
  765. void LiveInterval::clearSubRanges() {
  766. for (SubRange *I = SubRanges, *Next; I != nullptr; I = Next) {
  767. Next = I->Next;
  768. freeSubRange(I);
  769. }
  770. SubRanges = nullptr;
  771. }
  772. void LiveInterval::refineSubRanges(BumpPtrAllocator &Allocator,
  773. LaneBitmask LaneMask, std::function<void(LiveInterval::SubRange&)> Apply) {
  774. LaneBitmask ToApply = LaneMask;
  775. for (SubRange &SR : subranges()) {
  776. LaneBitmask SRMask = SR.LaneMask;
  777. LaneBitmask Matching = SRMask & LaneMask;
  778. if (Matching.none())
  779. continue;
  780. SubRange *MatchingRange;
  781. if (SRMask == Matching) {
  782. // The subrange fits (it does not cover bits outside \p LaneMask).
  783. MatchingRange = &SR;
  784. } else {
  785. // We have to split the subrange into a matching and non-matching part.
  786. // Reduce lanemask of existing lane to non-matching part.
  787. SR.LaneMask = SRMask & ~Matching;
  788. // Create a new subrange for the matching part
  789. MatchingRange = createSubRangeFrom(Allocator, Matching, SR);
  790. }
  791. Apply(*MatchingRange);
  792. ToApply &= ~Matching;
  793. }
  794. // Create a new subrange if there are uncovered bits left.
  795. if (ToApply.any()) {
  796. SubRange *NewRange = createSubRange(Allocator, ToApply);
  797. Apply(*NewRange);
  798. }
  799. }
  800. unsigned LiveInterval::getSize() const {
  801. unsigned Sum = 0;
  802. for (const Segment &S : segments)
  803. Sum += S.start.distance(S.end);
  804. return Sum;
  805. }
  806. void LiveInterval::computeSubRangeUndefs(SmallVectorImpl<SlotIndex> &Undefs,
  807. LaneBitmask LaneMask,
  808. const MachineRegisterInfo &MRI,
  809. const SlotIndexes &Indexes) const {
  810. assert(TargetRegisterInfo::isVirtualRegister(reg));
  811. LaneBitmask VRegMask = MRI.getMaxLaneMaskForVReg(reg);
  812. assert((VRegMask & LaneMask).any());
  813. const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
  814. for (const MachineOperand &MO : MRI.def_operands(reg)) {
  815. if (!MO.isUndef())
  816. continue;
  817. unsigned SubReg = MO.getSubReg();
  818. assert(SubReg != 0 && "Undef should only be set on subreg defs");
  819. LaneBitmask DefMask = TRI.getSubRegIndexLaneMask(SubReg);
  820. LaneBitmask UndefMask = VRegMask & ~DefMask;
  821. if ((UndefMask & LaneMask).any()) {
  822. const MachineInstr &MI = *MO.getParent();
  823. bool EarlyClobber = MO.isEarlyClobber();
  824. SlotIndex Pos = Indexes.getInstructionIndex(MI).getRegSlot(EarlyClobber);
  825. Undefs.push_back(Pos);
  826. }
  827. }
  828. }
  829. raw_ostream& llvm::operator<<(raw_ostream& OS, const LiveRange::Segment &S) {
  830. return OS << '[' << S.start << ',' << S.end << ':' << S.valno->id << ')';
  831. }
  832. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  833. LLVM_DUMP_METHOD void LiveRange::Segment::dump() const {
  834. dbgs() << *this << '\n';
  835. }
  836. #endif
  837. void LiveRange::print(raw_ostream &OS) const {
  838. if (empty())
  839. OS << "EMPTY";
  840. else {
  841. for (const Segment &S : segments) {
  842. OS << S;
  843. assert(S.valno == getValNumInfo(S.valno->id) && "Bad VNInfo");
  844. }
  845. }
  846. // Print value number info.
  847. if (getNumValNums()) {
  848. OS << " ";
  849. unsigned vnum = 0;
  850. for (const_vni_iterator i = vni_begin(), e = vni_end(); i != e;
  851. ++i, ++vnum) {
  852. const VNInfo *vni = *i;
  853. if (vnum) OS << ' ';
  854. OS << vnum << '@';
  855. if (vni->isUnused()) {
  856. OS << 'x';
  857. } else {
  858. OS << vni->def;
  859. if (vni->isPHIDef())
  860. OS << "-phi";
  861. }
  862. }
  863. }
  864. }
  865. void LiveInterval::SubRange::print(raw_ostream &OS) const {
  866. OS << " L" << PrintLaneMask(LaneMask) << ' '
  867. << static_cast<const LiveRange&>(*this);
  868. }
  869. void LiveInterval::print(raw_ostream &OS) const {
  870. OS << printReg(reg) << ' ';
  871. super::print(OS);
  872. // Print subranges
  873. for (const SubRange &SR : subranges())
  874. OS << SR;
  875. OS << " weight:" << weight;
  876. }
  877. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  878. LLVM_DUMP_METHOD void LiveRange::dump() const {
  879. dbgs() << *this << '\n';
  880. }
  881. LLVM_DUMP_METHOD void LiveInterval::SubRange::dump() const {
  882. dbgs() << *this << '\n';
  883. }
  884. LLVM_DUMP_METHOD void LiveInterval::dump() const {
  885. dbgs() << *this << '\n';
  886. }
  887. #endif
  888. #ifndef NDEBUG
  889. void LiveRange::verify() const {
  890. for (const_iterator I = begin(), E = end(); I != E; ++I) {
  891. assert(I->start.isValid());
  892. assert(I->end.isValid());
  893. assert(I->start < I->end);
  894. assert(I->valno != nullptr);
  895. assert(I->valno->id < valnos.size());
  896. assert(I->valno == valnos[I->valno->id]);
  897. if (std::next(I) != E) {
  898. assert(I->end <= std::next(I)->start);
  899. if (I->end == std::next(I)->start)
  900. assert(I->valno != std::next(I)->valno);
  901. }
  902. }
  903. }
  904. void LiveInterval::verify(const MachineRegisterInfo *MRI) const {
  905. super::verify();
  906. // Make sure SubRanges are fine and LaneMasks are disjunct.
  907. LaneBitmask Mask;
  908. LaneBitmask MaxMask = MRI != nullptr ? MRI->getMaxLaneMaskForVReg(reg)
  909. : LaneBitmask::getAll();
  910. for (const SubRange &SR : subranges()) {
  911. // Subrange lanemask should be disjunct to any previous subrange masks.
  912. assert((Mask & SR.LaneMask).none());
  913. Mask |= SR.LaneMask;
  914. // subrange mask should not contained in maximum lane mask for the vreg.
  915. assert((Mask & ~MaxMask).none());
  916. // empty subranges must be removed.
  917. assert(!SR.empty());
  918. SR.verify();
  919. // Main liverange should cover subrange.
  920. assert(covers(SR));
  921. }
  922. }
  923. #endif
  924. //===----------------------------------------------------------------------===//
  925. // LiveRangeUpdater class
  926. //===----------------------------------------------------------------------===//
  927. //
  928. // The LiveRangeUpdater class always maintains these invariants:
  929. //
  930. // - When LastStart is invalid, Spills is empty and the iterators are invalid.
  931. // This is the initial state, and the state created by flush().
  932. // In this state, isDirty() returns false.
  933. //
  934. // Otherwise, segments are kept in three separate areas:
  935. //
  936. // 1. [begin; WriteI) at the front of LR.
  937. // 2. [ReadI; end) at the back of LR.
  938. // 3. Spills.
  939. //
  940. // - LR.begin() <= WriteI <= ReadI <= LR.end().
  941. // - Segments in all three areas are fully ordered and coalesced.
  942. // - Segments in area 1 precede and can't coalesce with segments in area 2.
  943. // - Segments in Spills precede and can't coalesce with segments in area 2.
  944. // - No coalescing is possible between segments in Spills and segments in area
  945. // 1, and there are no overlapping segments.
  946. //
  947. // The segments in Spills are not ordered with respect to the segments in area
  948. // 1. They need to be merged.
  949. //
  950. // When they exist, Spills.back().start <= LastStart,
  951. // and WriteI[-1].start <= LastStart.
  952. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  953. void LiveRangeUpdater::print(raw_ostream &OS) const {
  954. if (!isDirty()) {
  955. if (LR)
  956. OS << "Clean updater: " << *LR << '\n';
  957. else
  958. OS << "Null updater.\n";
  959. return;
  960. }
  961. assert(LR && "Can't have null LR in dirty updater.");
  962. OS << " updater with gap = " << (ReadI - WriteI)
  963. << ", last start = " << LastStart
  964. << ":\n Area 1:";
  965. for (const auto &S : make_range(LR->begin(), WriteI))
  966. OS << ' ' << S;
  967. OS << "\n Spills:";
  968. for (unsigned I = 0, E = Spills.size(); I != E; ++I)
  969. OS << ' ' << Spills[I];
  970. OS << "\n Area 2:";
  971. for (const auto &S : make_range(ReadI, LR->end()))
  972. OS << ' ' << S;
  973. OS << '\n';
  974. }
  975. LLVM_DUMP_METHOD void LiveRangeUpdater::dump() const {
  976. print(errs());
  977. }
  978. #endif
  979. // Determine if A and B should be coalesced.
  980. static inline bool coalescable(const LiveRange::Segment &A,
  981. const LiveRange::Segment &B) {
  982. assert(A.start <= B.start && "Unordered live segments.");
  983. if (A.end == B.start)
  984. return A.valno == B.valno;
  985. if (A.end < B.start)
  986. return false;
  987. assert(A.valno == B.valno && "Cannot overlap different values");
  988. return true;
  989. }
  990. void LiveRangeUpdater::add(LiveRange::Segment Seg) {
  991. assert(LR && "Cannot add to a null destination");
  992. // Fall back to the regular add method if the live range
  993. // is using the segment set instead of the segment vector.
  994. if (LR->segmentSet != nullptr) {
  995. LR->addSegmentToSet(Seg);
  996. return;
  997. }
  998. // Flush the state if Start moves backwards.
  999. if (!LastStart.isValid() || LastStart > Seg.start) {
  1000. if (isDirty())
  1001. flush();
  1002. // This brings us to an uninitialized state. Reinitialize.
  1003. assert(Spills.empty() && "Leftover spilled segments");
  1004. WriteI = ReadI = LR->begin();
  1005. }
  1006. // Remember start for next time.
  1007. LastStart = Seg.start;
  1008. // Advance ReadI until it ends after Seg.start.
  1009. LiveRange::iterator E = LR->end();
  1010. if (ReadI != E && ReadI->end <= Seg.start) {
  1011. // First try to close the gap between WriteI and ReadI with spills.
  1012. if (ReadI != WriteI)
  1013. mergeSpills();
  1014. // Then advance ReadI.
  1015. if (ReadI == WriteI)
  1016. ReadI = WriteI = LR->find(Seg.start);
  1017. else
  1018. while (ReadI != E && ReadI->end <= Seg.start)
  1019. *WriteI++ = *ReadI++;
  1020. }
  1021. assert(ReadI == E || ReadI->end > Seg.start);
  1022. // Check if the ReadI segment begins early.
  1023. if (ReadI != E && ReadI->start <= Seg.start) {
  1024. assert(ReadI->valno == Seg.valno && "Cannot overlap different values");
  1025. // Bail if Seg is completely contained in ReadI.
  1026. if (ReadI->end >= Seg.end)
  1027. return;
  1028. // Coalesce into Seg.
  1029. Seg.start = ReadI->start;
  1030. ++ReadI;
  1031. }
  1032. // Coalesce as much as possible from ReadI into Seg.
  1033. while (ReadI != E && coalescable(Seg, *ReadI)) {
  1034. Seg.end = std::max(Seg.end, ReadI->end);
  1035. ++ReadI;
  1036. }
  1037. // Try coalescing Spills.back() into Seg.
  1038. if (!Spills.empty() && coalescable(Spills.back(), Seg)) {
  1039. Seg.start = Spills.back().start;
  1040. Seg.end = std::max(Spills.back().end, Seg.end);
  1041. Spills.pop_back();
  1042. }
  1043. // Try coalescing Seg into WriteI[-1].
  1044. if (WriteI != LR->begin() && coalescable(WriteI[-1], Seg)) {
  1045. WriteI[-1].end = std::max(WriteI[-1].end, Seg.end);
  1046. return;
  1047. }
  1048. // Seg doesn't coalesce with anything, and needs to be inserted somewhere.
  1049. if (WriteI != ReadI) {
  1050. *WriteI++ = Seg;
  1051. return;
  1052. }
  1053. // Finally, append to LR or Spills.
  1054. if (WriteI == E) {
  1055. LR->segments.push_back(Seg);
  1056. WriteI = ReadI = LR->end();
  1057. } else
  1058. Spills.push_back(Seg);
  1059. }
  1060. // Merge as many spilled segments as possible into the gap between WriteI
  1061. // and ReadI. Advance WriteI to reflect the inserted instructions.
  1062. void LiveRangeUpdater::mergeSpills() {
  1063. // Perform a backwards merge of Spills and [SpillI;WriteI).
  1064. size_t GapSize = ReadI - WriteI;
  1065. size_t NumMoved = std::min(Spills.size(), GapSize);
  1066. LiveRange::iterator Src = WriteI;
  1067. LiveRange::iterator Dst = Src + NumMoved;
  1068. LiveRange::iterator SpillSrc = Spills.end();
  1069. LiveRange::iterator B = LR->begin();
  1070. // This is the new WriteI position after merging spills.
  1071. WriteI = Dst;
  1072. // Now merge Src and Spills backwards.
  1073. while (Src != Dst) {
  1074. if (Src != B && Src[-1].start > SpillSrc[-1].start)
  1075. *--Dst = *--Src;
  1076. else
  1077. *--Dst = *--SpillSrc;
  1078. }
  1079. assert(NumMoved == size_t(Spills.end() - SpillSrc));
  1080. Spills.erase(SpillSrc, Spills.end());
  1081. }
  1082. void LiveRangeUpdater::flush() {
  1083. if (!isDirty())
  1084. return;
  1085. // Clear the dirty state.
  1086. LastStart = SlotIndex();
  1087. assert(LR && "Cannot add to a null destination");
  1088. // Nothing to merge?
  1089. if (Spills.empty()) {
  1090. LR->segments.erase(WriteI, ReadI);
  1091. LR->verify();
  1092. return;
  1093. }
  1094. // Resize the WriteI - ReadI gap to match Spills.
  1095. size_t GapSize = ReadI - WriteI;
  1096. if (GapSize < Spills.size()) {
  1097. // The gap is too small. Make some room.
  1098. size_t WritePos = WriteI - LR->begin();
  1099. LR->segments.insert(ReadI, Spills.size() - GapSize, LiveRange::Segment());
  1100. // This also invalidated ReadI, but it is recomputed below.
  1101. WriteI = LR->begin() + WritePos;
  1102. } else {
  1103. // Shrink the gap if necessary.
  1104. LR->segments.erase(WriteI + Spills.size(), ReadI);
  1105. }
  1106. ReadI = WriteI + Spills.size();
  1107. mergeSpills();
  1108. LR->verify();
  1109. }
  1110. unsigned ConnectedVNInfoEqClasses::Classify(const LiveRange &LR) {
  1111. // Create initial equivalence classes.
  1112. EqClass.clear();
  1113. EqClass.grow(LR.getNumValNums());
  1114. const VNInfo *used = nullptr, *unused = nullptr;
  1115. // Determine connections.
  1116. for (const VNInfo *VNI : LR.valnos) {
  1117. // Group all unused values into one class.
  1118. if (VNI->isUnused()) {
  1119. if (unused)
  1120. EqClass.join(unused->id, VNI->id);
  1121. unused = VNI;
  1122. continue;
  1123. }
  1124. used = VNI;
  1125. if (VNI->isPHIDef()) {
  1126. const MachineBasicBlock *MBB = LIS.getMBBFromIndex(VNI->def);
  1127. assert(MBB && "Phi-def has no defining MBB");
  1128. // Connect to values live out of predecessors.
  1129. for (MachineBasicBlock::const_pred_iterator PI = MBB->pred_begin(),
  1130. PE = MBB->pred_end(); PI != PE; ++PI)
  1131. if (const VNInfo *PVNI = LR.getVNInfoBefore(LIS.getMBBEndIdx(*PI)))
  1132. EqClass.join(VNI->id, PVNI->id);
  1133. } else {
  1134. // Normal value defined by an instruction. Check for two-addr redef.
  1135. // FIXME: This could be coincidental. Should we really check for a tied
  1136. // operand constraint?
  1137. // Note that VNI->def may be a use slot for an early clobber def.
  1138. if (const VNInfo *UVNI = LR.getVNInfoBefore(VNI->def))
  1139. EqClass.join(VNI->id, UVNI->id);
  1140. }
  1141. }
  1142. // Lump all the unused values in with the last used value.
  1143. if (used && unused)
  1144. EqClass.join(used->id, unused->id);
  1145. EqClass.compress();
  1146. return EqClass.getNumClasses();
  1147. }
  1148. void ConnectedVNInfoEqClasses::Distribute(LiveInterval &LI, LiveInterval *LIV[],
  1149. MachineRegisterInfo &MRI) {
  1150. // Rewrite instructions.
  1151. for (MachineRegisterInfo::reg_iterator RI = MRI.reg_begin(LI.reg),
  1152. RE = MRI.reg_end(); RI != RE;) {
  1153. MachineOperand &MO = *RI;
  1154. MachineInstr *MI = RI->getParent();
  1155. ++RI;
  1156. // DBG_VALUE instructions don't have slot indexes, so get the index of the
  1157. // instruction before them.
  1158. // Normally, DBG_VALUE instructions are removed before this function is
  1159. // called, but it is not a requirement.
  1160. SlotIndex Idx;
  1161. if (MI->isDebugValue())
  1162. Idx = LIS.getSlotIndexes()->getIndexBefore(*MI);
  1163. else
  1164. Idx = LIS.getInstructionIndex(*MI);
  1165. LiveQueryResult LRQ = LI.Query(Idx);
  1166. const VNInfo *VNI = MO.readsReg() ? LRQ.valueIn() : LRQ.valueDefined();
  1167. // In the case of an <undef> use that isn't tied to any def, VNI will be
  1168. // NULL. If the use is tied to a def, VNI will be the defined value.
  1169. if (!VNI)
  1170. continue;
  1171. if (unsigned EqClass = getEqClass(VNI))
  1172. MO.setReg(LIV[EqClass-1]->reg);
  1173. }
  1174. // Distribute subregister liveranges.
  1175. if (LI.hasSubRanges()) {
  1176. unsigned NumComponents = EqClass.getNumClasses();
  1177. SmallVector<unsigned, 8> VNIMapping;
  1178. SmallVector<LiveInterval::SubRange*, 8> SubRanges;
  1179. BumpPtrAllocator &Allocator = LIS.getVNInfoAllocator();
  1180. for (LiveInterval::SubRange &SR : LI.subranges()) {
  1181. // Create new subranges in the split intervals and construct a mapping
  1182. // for the VNInfos in the subrange.
  1183. unsigned NumValNos = SR.valnos.size();
  1184. VNIMapping.clear();
  1185. VNIMapping.reserve(NumValNos);
  1186. SubRanges.clear();
  1187. SubRanges.resize(NumComponents-1, nullptr);
  1188. for (unsigned I = 0; I < NumValNos; ++I) {
  1189. const VNInfo &VNI = *SR.valnos[I];
  1190. unsigned ComponentNum;
  1191. if (VNI.isUnused()) {
  1192. ComponentNum = 0;
  1193. } else {
  1194. const VNInfo *MainRangeVNI = LI.getVNInfoAt(VNI.def);
  1195. assert(MainRangeVNI != nullptr
  1196. && "SubRange def must have corresponding main range def");
  1197. ComponentNum = getEqClass(MainRangeVNI);
  1198. if (ComponentNum > 0 && SubRanges[ComponentNum-1] == nullptr) {
  1199. SubRanges[ComponentNum-1]
  1200. = LIV[ComponentNum-1]->createSubRange(Allocator, SR.LaneMask);
  1201. }
  1202. }
  1203. VNIMapping.push_back(ComponentNum);
  1204. }
  1205. DistributeRange(SR, SubRanges.data(), VNIMapping);
  1206. }
  1207. LI.removeEmptySubRanges();
  1208. }
  1209. // Distribute main liverange.
  1210. DistributeRange(LI, LIV, EqClass);
  1211. }