LazyCallGraphTest.cpp 80 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172
  1. //===- LazyCallGraphTest.cpp - Unit tests for the lazy CG analysis --------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. #include "llvm/Analysis/LazyCallGraph.h"
  9. #include "llvm/AsmParser/Parser.h"
  10. #include "llvm/IR/Function.h"
  11. #include "llvm/IR/Instructions.h"
  12. #include "llvm/IR/LLVMContext.h"
  13. #include "llvm/IR/Module.h"
  14. #include "llvm/Support/ErrorHandling.h"
  15. #include "llvm/Support/SourceMgr.h"
  16. #include "gtest/gtest.h"
  17. #include <memory>
  18. using namespace llvm;
  19. namespace {
  20. std::unique_ptr<Module> parseAssembly(LLVMContext &Context,
  21. const char *Assembly) {
  22. SMDiagnostic Error;
  23. std::unique_ptr<Module> M = parseAssemblyString(Assembly, Error, Context);
  24. std::string ErrMsg;
  25. raw_string_ostream OS(ErrMsg);
  26. Error.print("", OS);
  27. // A failure here means that the test itself is buggy.
  28. if (!M)
  29. report_fatal_error(OS.str().c_str());
  30. return M;
  31. }
  32. /*
  33. IR forming a call graph with a diamond of triangle-shaped SCCs:
  34. d1
  35. / \
  36. d3--d2
  37. / \
  38. b1 c1
  39. / \ / \
  40. b3--b2 c3--c2
  41. \ /
  42. a1
  43. / \
  44. a3--a2
  45. All call edges go up between SCCs, and clockwise around the SCC.
  46. */
  47. static const char DiamondOfTriangles[] =
  48. "define void @a1() {\n"
  49. "entry:\n"
  50. " call void @a2()\n"
  51. " call void @b2()\n"
  52. " call void @c3()\n"
  53. " ret void\n"
  54. "}\n"
  55. "define void @a2() {\n"
  56. "entry:\n"
  57. " call void @a3()\n"
  58. " ret void\n"
  59. "}\n"
  60. "define void @a3() {\n"
  61. "entry:\n"
  62. " call void @a1()\n"
  63. " ret void\n"
  64. "}\n"
  65. "define void @b1() {\n"
  66. "entry:\n"
  67. " call void @b2()\n"
  68. " call void @d3()\n"
  69. " ret void\n"
  70. "}\n"
  71. "define void @b2() {\n"
  72. "entry:\n"
  73. " call void @b3()\n"
  74. " ret void\n"
  75. "}\n"
  76. "define void @b3() {\n"
  77. "entry:\n"
  78. " call void @b1()\n"
  79. " ret void\n"
  80. "}\n"
  81. "define void @c1() {\n"
  82. "entry:\n"
  83. " call void @c2()\n"
  84. " call void @d2()\n"
  85. " ret void\n"
  86. "}\n"
  87. "define void @c2() {\n"
  88. "entry:\n"
  89. " call void @c3()\n"
  90. " ret void\n"
  91. "}\n"
  92. "define void @c3() {\n"
  93. "entry:\n"
  94. " call void @c1()\n"
  95. " ret void\n"
  96. "}\n"
  97. "define void @d1() {\n"
  98. "entry:\n"
  99. " call void @d2()\n"
  100. " ret void\n"
  101. "}\n"
  102. "define void @d2() {\n"
  103. "entry:\n"
  104. " call void @d3()\n"
  105. " ret void\n"
  106. "}\n"
  107. "define void @d3() {\n"
  108. "entry:\n"
  109. " call void @d1()\n"
  110. " ret void\n"
  111. "}\n";
  112. /*
  113. IR forming a reference graph with a diamond of triangle-shaped RefSCCs
  114. d1
  115. / \
  116. d3--d2
  117. / \
  118. b1 c1
  119. / \ / \
  120. b3--b2 c3--c2
  121. \ /
  122. a1
  123. / \
  124. a3--a2
  125. All call edges go up between RefSCCs, and clockwise around the RefSCC.
  126. */
  127. static const char DiamondOfTrianglesRefGraph[] =
  128. "define void @a1() {\n"
  129. "entry:\n"
  130. " %a = alloca void ()*\n"
  131. " store void ()* @a2, void ()** %a\n"
  132. " store void ()* @b2, void ()** %a\n"
  133. " store void ()* @c3, void ()** %a\n"
  134. " ret void\n"
  135. "}\n"
  136. "define void @a2() {\n"
  137. "entry:\n"
  138. " %a = alloca void ()*\n"
  139. " store void ()* @a3, void ()** %a\n"
  140. " ret void\n"
  141. "}\n"
  142. "define void @a3() {\n"
  143. "entry:\n"
  144. " %a = alloca void ()*\n"
  145. " store void ()* @a1, void ()** %a\n"
  146. " ret void\n"
  147. "}\n"
  148. "define void @b1() {\n"
  149. "entry:\n"
  150. " %a = alloca void ()*\n"
  151. " store void ()* @b2, void ()** %a\n"
  152. " store void ()* @d3, void ()** %a\n"
  153. " ret void\n"
  154. "}\n"
  155. "define void @b2() {\n"
  156. "entry:\n"
  157. " %a = alloca void ()*\n"
  158. " store void ()* @b3, void ()** %a\n"
  159. " ret void\n"
  160. "}\n"
  161. "define void @b3() {\n"
  162. "entry:\n"
  163. " %a = alloca void ()*\n"
  164. " store void ()* @b1, void ()** %a\n"
  165. " ret void\n"
  166. "}\n"
  167. "define void @c1() {\n"
  168. "entry:\n"
  169. " %a = alloca void ()*\n"
  170. " store void ()* @c2, void ()** %a\n"
  171. " store void ()* @d2, void ()** %a\n"
  172. " ret void\n"
  173. "}\n"
  174. "define void @c2() {\n"
  175. "entry:\n"
  176. " %a = alloca void ()*\n"
  177. " store void ()* @c3, void ()** %a\n"
  178. " ret void\n"
  179. "}\n"
  180. "define void @c3() {\n"
  181. "entry:\n"
  182. " %a = alloca void ()*\n"
  183. " store void ()* @c1, void ()** %a\n"
  184. " ret void\n"
  185. "}\n"
  186. "define void @d1() {\n"
  187. "entry:\n"
  188. " %a = alloca void ()*\n"
  189. " store void ()* @d2, void ()** %a\n"
  190. " ret void\n"
  191. "}\n"
  192. "define void @d2() {\n"
  193. "entry:\n"
  194. " %a = alloca void ()*\n"
  195. " store void ()* @d3, void ()** %a\n"
  196. " ret void\n"
  197. "}\n"
  198. "define void @d3() {\n"
  199. "entry:\n"
  200. " %a = alloca void ()*\n"
  201. " store void ()* @d1, void ()** %a\n"
  202. " ret void\n"
  203. "}\n";
  204. static LazyCallGraph buildCG(Module &M) {
  205. TargetLibraryInfoImpl TLII(Triple(M.getTargetTriple()));
  206. TargetLibraryInfo TLI(TLII);
  207. auto GetTLI = [&TLI](Function &F) -> TargetLibraryInfo & { return TLI; };
  208. LazyCallGraph CG(M, GetTLI);
  209. return CG;
  210. }
  211. TEST(LazyCallGraphTest, BasicGraphFormation) {
  212. LLVMContext Context;
  213. std::unique_ptr<Module> M = parseAssembly(Context, DiamondOfTriangles);
  214. LazyCallGraph CG = buildCG(*M);
  215. // The order of the entry nodes should be stable w.r.t. the source order of
  216. // the IR, and everything in our module is an entry node, so just directly
  217. // build variables for each node.
  218. auto I = CG.begin();
  219. LazyCallGraph::Node &A1 = (I++)->getNode();
  220. EXPECT_EQ("a1", A1.getFunction().getName());
  221. LazyCallGraph::Node &A2 = (I++)->getNode();
  222. EXPECT_EQ("a2", A2.getFunction().getName());
  223. LazyCallGraph::Node &A3 = (I++)->getNode();
  224. EXPECT_EQ("a3", A3.getFunction().getName());
  225. LazyCallGraph::Node &B1 = (I++)->getNode();
  226. EXPECT_EQ("b1", B1.getFunction().getName());
  227. LazyCallGraph::Node &B2 = (I++)->getNode();
  228. EXPECT_EQ("b2", B2.getFunction().getName());
  229. LazyCallGraph::Node &B3 = (I++)->getNode();
  230. EXPECT_EQ("b3", B3.getFunction().getName());
  231. LazyCallGraph::Node &C1 = (I++)->getNode();
  232. EXPECT_EQ("c1", C1.getFunction().getName());
  233. LazyCallGraph::Node &C2 = (I++)->getNode();
  234. EXPECT_EQ("c2", C2.getFunction().getName());
  235. LazyCallGraph::Node &C3 = (I++)->getNode();
  236. EXPECT_EQ("c3", C3.getFunction().getName());
  237. LazyCallGraph::Node &D1 = (I++)->getNode();
  238. EXPECT_EQ("d1", D1.getFunction().getName());
  239. LazyCallGraph::Node &D2 = (I++)->getNode();
  240. EXPECT_EQ("d2", D2.getFunction().getName());
  241. LazyCallGraph::Node &D3 = (I++)->getNode();
  242. EXPECT_EQ("d3", D3.getFunction().getName());
  243. EXPECT_EQ(CG.end(), I);
  244. // Build vectors and sort them for the rest of the assertions to make them
  245. // independent of order.
  246. std::vector<std::string> Nodes;
  247. for (LazyCallGraph::Edge &E : A1.populate())
  248. Nodes.push_back(E.getFunction().getName());
  249. llvm::sort(Nodes);
  250. EXPECT_EQ("a2", Nodes[0]);
  251. EXPECT_EQ("b2", Nodes[1]);
  252. EXPECT_EQ("c3", Nodes[2]);
  253. Nodes.clear();
  254. A2.populate();
  255. EXPECT_EQ(A2->end(), std::next(A2->begin()));
  256. EXPECT_EQ("a3", A2->begin()->getFunction().getName());
  257. A3.populate();
  258. EXPECT_EQ(A3->end(), std::next(A3->begin()));
  259. EXPECT_EQ("a1", A3->begin()->getFunction().getName());
  260. for (LazyCallGraph::Edge &E : B1.populate())
  261. Nodes.push_back(E.getFunction().getName());
  262. llvm::sort(Nodes);
  263. EXPECT_EQ("b2", Nodes[0]);
  264. EXPECT_EQ("d3", Nodes[1]);
  265. Nodes.clear();
  266. B2.populate();
  267. EXPECT_EQ(B2->end(), std::next(B2->begin()));
  268. EXPECT_EQ("b3", B2->begin()->getFunction().getName());
  269. B3.populate();
  270. EXPECT_EQ(B3->end(), std::next(B3->begin()));
  271. EXPECT_EQ("b1", B3->begin()->getFunction().getName());
  272. for (LazyCallGraph::Edge &E : C1.populate())
  273. Nodes.push_back(E.getFunction().getName());
  274. llvm::sort(Nodes);
  275. EXPECT_EQ("c2", Nodes[0]);
  276. EXPECT_EQ("d2", Nodes[1]);
  277. Nodes.clear();
  278. C2.populate();
  279. EXPECT_EQ(C2->end(), std::next(C2->begin()));
  280. EXPECT_EQ("c3", C2->begin()->getFunction().getName());
  281. C3.populate();
  282. EXPECT_EQ(C3->end(), std::next(C3->begin()));
  283. EXPECT_EQ("c1", C3->begin()->getFunction().getName());
  284. D1.populate();
  285. EXPECT_EQ(D1->end(), std::next(D1->begin()));
  286. EXPECT_EQ("d2", D1->begin()->getFunction().getName());
  287. D2.populate();
  288. EXPECT_EQ(D2->end(), std::next(D2->begin()));
  289. EXPECT_EQ("d3", D2->begin()->getFunction().getName());
  290. D3.populate();
  291. EXPECT_EQ(D3->end(), std::next(D3->begin()));
  292. EXPECT_EQ("d1", D3->begin()->getFunction().getName());
  293. // Now lets look at the RefSCCs and SCCs.
  294. CG.buildRefSCCs();
  295. auto J = CG.postorder_ref_scc_begin();
  296. LazyCallGraph::RefSCC &D = *J++;
  297. ASSERT_EQ(1, D.size());
  298. for (LazyCallGraph::Node &N : *D.begin())
  299. Nodes.push_back(N.getFunction().getName());
  300. llvm::sort(Nodes);
  301. EXPECT_EQ(3u, Nodes.size());
  302. EXPECT_EQ("d1", Nodes[0]);
  303. EXPECT_EQ("d2", Nodes[1]);
  304. EXPECT_EQ("d3", Nodes[2]);
  305. Nodes.clear();
  306. EXPECT_FALSE(D.isParentOf(D));
  307. EXPECT_FALSE(D.isChildOf(D));
  308. EXPECT_FALSE(D.isAncestorOf(D));
  309. EXPECT_FALSE(D.isDescendantOf(D));
  310. EXPECT_EQ(&D, &*CG.postorder_ref_scc_begin());
  311. LazyCallGraph::RefSCC &C = *J++;
  312. ASSERT_EQ(1, C.size());
  313. for (LazyCallGraph::Node &N : *C.begin())
  314. Nodes.push_back(N.getFunction().getName());
  315. llvm::sort(Nodes);
  316. EXPECT_EQ(3u, Nodes.size());
  317. EXPECT_EQ("c1", Nodes[0]);
  318. EXPECT_EQ("c2", Nodes[1]);
  319. EXPECT_EQ("c3", Nodes[2]);
  320. Nodes.clear();
  321. EXPECT_TRUE(C.isParentOf(D));
  322. EXPECT_FALSE(C.isChildOf(D));
  323. EXPECT_TRUE(C.isAncestorOf(D));
  324. EXPECT_FALSE(C.isDescendantOf(D));
  325. EXPECT_EQ(&C, &*std::next(CG.postorder_ref_scc_begin()));
  326. LazyCallGraph::RefSCC &B = *J++;
  327. ASSERT_EQ(1, B.size());
  328. for (LazyCallGraph::Node &N : *B.begin())
  329. Nodes.push_back(N.getFunction().getName());
  330. llvm::sort(Nodes);
  331. EXPECT_EQ(3u, Nodes.size());
  332. EXPECT_EQ("b1", Nodes[0]);
  333. EXPECT_EQ("b2", Nodes[1]);
  334. EXPECT_EQ("b3", Nodes[2]);
  335. Nodes.clear();
  336. EXPECT_TRUE(B.isParentOf(D));
  337. EXPECT_FALSE(B.isChildOf(D));
  338. EXPECT_TRUE(B.isAncestorOf(D));
  339. EXPECT_FALSE(B.isDescendantOf(D));
  340. EXPECT_FALSE(B.isAncestorOf(C));
  341. EXPECT_FALSE(C.isAncestorOf(B));
  342. EXPECT_EQ(&B, &*std::next(CG.postorder_ref_scc_begin(), 2));
  343. LazyCallGraph::RefSCC &A = *J++;
  344. ASSERT_EQ(1, A.size());
  345. for (LazyCallGraph::Node &N : *A.begin())
  346. Nodes.push_back(N.getFunction().getName());
  347. llvm::sort(Nodes);
  348. EXPECT_EQ(3u, Nodes.size());
  349. EXPECT_EQ("a1", Nodes[0]);
  350. EXPECT_EQ("a2", Nodes[1]);
  351. EXPECT_EQ("a3", Nodes[2]);
  352. Nodes.clear();
  353. EXPECT_TRUE(A.isParentOf(B));
  354. EXPECT_TRUE(A.isParentOf(C));
  355. EXPECT_FALSE(A.isParentOf(D));
  356. EXPECT_TRUE(A.isAncestorOf(B));
  357. EXPECT_TRUE(A.isAncestorOf(C));
  358. EXPECT_TRUE(A.isAncestorOf(D));
  359. EXPECT_EQ(&A, &*std::next(CG.postorder_ref_scc_begin(), 3));
  360. EXPECT_EQ(CG.postorder_ref_scc_end(), J);
  361. EXPECT_EQ(J, std::next(CG.postorder_ref_scc_begin(), 4));
  362. }
  363. static Function &lookupFunction(Module &M, StringRef Name) {
  364. for (Function &F : M)
  365. if (F.getName() == Name)
  366. return F;
  367. report_fatal_error("Couldn't find function!");
  368. }
  369. TEST(LazyCallGraphTest, BasicGraphMutation) {
  370. LLVMContext Context;
  371. std::unique_ptr<Module> M = parseAssembly(Context, "define void @a() {\n"
  372. "entry:\n"
  373. " call void @b()\n"
  374. " call void @c()\n"
  375. " ret void\n"
  376. "}\n"
  377. "define void @b() {\n"
  378. "entry:\n"
  379. " ret void\n"
  380. "}\n"
  381. "define void @c() {\n"
  382. "entry:\n"
  383. " ret void\n"
  384. "}\n");
  385. LazyCallGraph CG = buildCG(*M);
  386. LazyCallGraph::Node &A = CG.get(lookupFunction(*M, "a"));
  387. LazyCallGraph::Node &B = CG.get(lookupFunction(*M, "b"));
  388. A.populate();
  389. EXPECT_EQ(2, std::distance(A->begin(), A->end()));
  390. B.populate();
  391. EXPECT_EQ(0, std::distance(B->begin(), B->end()));
  392. LazyCallGraph::Node &C = CG.get(lookupFunction(*M, "c"));
  393. C.populate();
  394. CG.insertEdge(B, C, LazyCallGraph::Edge::Call);
  395. EXPECT_EQ(1, std::distance(B->begin(), B->end()));
  396. EXPECT_EQ(0, std::distance(C->begin(), C->end()));
  397. CG.insertEdge(C, B, LazyCallGraph::Edge::Call);
  398. EXPECT_EQ(1, std::distance(C->begin(), C->end()));
  399. EXPECT_EQ(&B, &C->begin()->getNode());
  400. CG.insertEdge(C, C, LazyCallGraph::Edge::Call);
  401. EXPECT_EQ(2, std::distance(C->begin(), C->end()));
  402. EXPECT_EQ(&B, &C->begin()->getNode());
  403. EXPECT_EQ(&C, &std::next(C->begin())->getNode());
  404. CG.removeEdge(C, B);
  405. EXPECT_EQ(1, std::distance(C->begin(), C->end()));
  406. EXPECT_EQ(&C, &C->begin()->getNode());
  407. CG.removeEdge(C, C);
  408. EXPECT_EQ(0, std::distance(C->begin(), C->end()));
  409. CG.removeEdge(B, C);
  410. EXPECT_EQ(0, std::distance(B->begin(), B->end()));
  411. }
  412. TEST(LazyCallGraphTest, InnerSCCFormation) {
  413. LLVMContext Context;
  414. std::unique_ptr<Module> M = parseAssembly(Context, DiamondOfTriangles);
  415. LazyCallGraph CG = buildCG(*M);
  416. // Now mutate the graph to connect every node into a single RefSCC to ensure
  417. // that our inner SCC formation handles the rest.
  418. LazyCallGraph::Node &D1 = CG.get(lookupFunction(*M, "d1"));
  419. LazyCallGraph::Node &A1 = CG.get(lookupFunction(*M, "a1"));
  420. A1.populate();
  421. D1.populate();
  422. CG.insertEdge(D1, A1, LazyCallGraph::Edge::Ref);
  423. // Build vectors and sort them for the rest of the assertions to make them
  424. // independent of order.
  425. std::vector<std::string> Nodes;
  426. // We should build a single RefSCC for the entire graph.
  427. CG.buildRefSCCs();
  428. auto I = CG.postorder_ref_scc_begin();
  429. LazyCallGraph::RefSCC &RC = *I++;
  430. EXPECT_EQ(CG.postorder_ref_scc_end(), I);
  431. // Now walk the four SCCs which should be in post-order.
  432. auto J = RC.begin();
  433. LazyCallGraph::SCC &D = *J++;
  434. for (LazyCallGraph::Node &N : D)
  435. Nodes.push_back(N.getFunction().getName());
  436. llvm::sort(Nodes);
  437. EXPECT_EQ(3u, Nodes.size());
  438. EXPECT_EQ("d1", Nodes[0]);
  439. EXPECT_EQ("d2", Nodes[1]);
  440. EXPECT_EQ("d3", Nodes[2]);
  441. Nodes.clear();
  442. LazyCallGraph::SCC &B = *J++;
  443. for (LazyCallGraph::Node &N : B)
  444. Nodes.push_back(N.getFunction().getName());
  445. llvm::sort(Nodes);
  446. EXPECT_EQ(3u, Nodes.size());
  447. EXPECT_EQ("b1", Nodes[0]);
  448. EXPECT_EQ("b2", Nodes[1]);
  449. EXPECT_EQ("b3", Nodes[2]);
  450. Nodes.clear();
  451. LazyCallGraph::SCC &C = *J++;
  452. for (LazyCallGraph::Node &N : C)
  453. Nodes.push_back(N.getFunction().getName());
  454. llvm::sort(Nodes);
  455. EXPECT_EQ(3u, Nodes.size());
  456. EXPECT_EQ("c1", Nodes[0]);
  457. EXPECT_EQ("c2", Nodes[1]);
  458. EXPECT_EQ("c3", Nodes[2]);
  459. Nodes.clear();
  460. LazyCallGraph::SCC &A = *J++;
  461. for (LazyCallGraph::Node &N : A)
  462. Nodes.push_back(N.getFunction().getName());
  463. llvm::sort(Nodes);
  464. EXPECT_EQ(3u, Nodes.size());
  465. EXPECT_EQ("a1", Nodes[0]);
  466. EXPECT_EQ("a2", Nodes[1]);
  467. EXPECT_EQ("a3", Nodes[2]);
  468. Nodes.clear();
  469. EXPECT_EQ(RC.end(), J);
  470. }
  471. TEST(LazyCallGraphTest, MultiArmSCC) {
  472. LLVMContext Context;
  473. // Two interlocking cycles. The really useful thing about this SCC is that it
  474. // will require Tarjan's DFS to backtrack and finish processing all of the
  475. // children of each node in the SCC. Since this involves call edges, both
  476. // Tarjan implementations will have to successfully navigate the structure.
  477. std::unique_ptr<Module> M = parseAssembly(Context, "define void @f1() {\n"
  478. "entry:\n"
  479. " call void @f2()\n"
  480. " call void @f4()\n"
  481. " ret void\n"
  482. "}\n"
  483. "define void @f2() {\n"
  484. "entry:\n"
  485. " call void @f3()\n"
  486. " ret void\n"
  487. "}\n"
  488. "define void @f3() {\n"
  489. "entry:\n"
  490. " call void @f1()\n"
  491. " ret void\n"
  492. "}\n"
  493. "define void @f4() {\n"
  494. "entry:\n"
  495. " call void @f5()\n"
  496. " ret void\n"
  497. "}\n"
  498. "define void @f5() {\n"
  499. "entry:\n"
  500. " call void @f1()\n"
  501. " ret void\n"
  502. "}\n");
  503. LazyCallGraph CG = buildCG(*M);
  504. // Force the graph to be fully expanded.
  505. CG.buildRefSCCs();
  506. auto I = CG.postorder_ref_scc_begin();
  507. LazyCallGraph::RefSCC &RC = *I++;
  508. EXPECT_EQ(CG.postorder_ref_scc_end(), I);
  509. LazyCallGraph::Node &N1 = *CG.lookup(lookupFunction(*M, "f1"));
  510. LazyCallGraph::Node &N2 = *CG.lookup(lookupFunction(*M, "f2"));
  511. LazyCallGraph::Node &N3 = *CG.lookup(lookupFunction(*M, "f3"));
  512. LazyCallGraph::Node &N4 = *CG.lookup(lookupFunction(*M, "f4"));
  513. LazyCallGraph::Node &N5 = *CG.lookup(lookupFunction(*M, "f4"));
  514. EXPECT_EQ(&RC, CG.lookupRefSCC(N1));
  515. EXPECT_EQ(&RC, CG.lookupRefSCC(N2));
  516. EXPECT_EQ(&RC, CG.lookupRefSCC(N3));
  517. EXPECT_EQ(&RC, CG.lookupRefSCC(N4));
  518. EXPECT_EQ(&RC, CG.lookupRefSCC(N5));
  519. ASSERT_EQ(1, RC.size());
  520. LazyCallGraph::SCC &C = *RC.begin();
  521. EXPECT_EQ(&C, CG.lookupSCC(N1));
  522. EXPECT_EQ(&C, CG.lookupSCC(N2));
  523. EXPECT_EQ(&C, CG.lookupSCC(N3));
  524. EXPECT_EQ(&C, CG.lookupSCC(N4));
  525. EXPECT_EQ(&C, CG.lookupSCC(N5));
  526. }
  527. TEST(LazyCallGraphTest, OutgoingEdgeMutation) {
  528. LLVMContext Context;
  529. std::unique_ptr<Module> M = parseAssembly(Context, "define void @a() {\n"
  530. "entry:\n"
  531. " call void @b()\n"
  532. " call void @c()\n"
  533. " ret void\n"
  534. "}\n"
  535. "define void @b() {\n"
  536. "entry:\n"
  537. " call void @d()\n"
  538. " ret void\n"
  539. "}\n"
  540. "define void @c() {\n"
  541. "entry:\n"
  542. " call void @d()\n"
  543. " ret void\n"
  544. "}\n"
  545. "define void @d() {\n"
  546. "entry:\n"
  547. " ret void\n"
  548. "}\n");
  549. LazyCallGraph CG = buildCG(*M);
  550. // Force the graph to be fully expanded.
  551. CG.buildRefSCCs();
  552. for (LazyCallGraph::RefSCC &RC : CG.postorder_ref_sccs())
  553. dbgs() << "Formed RefSCC: " << RC << "\n";
  554. LazyCallGraph::Node &A = *CG.lookup(lookupFunction(*M, "a"));
  555. LazyCallGraph::Node &B = *CG.lookup(lookupFunction(*M, "b"));
  556. LazyCallGraph::Node &C = *CG.lookup(lookupFunction(*M, "c"));
  557. LazyCallGraph::Node &D = *CG.lookup(lookupFunction(*M, "d"));
  558. LazyCallGraph::SCC &AC = *CG.lookupSCC(A);
  559. LazyCallGraph::SCC &BC = *CG.lookupSCC(B);
  560. LazyCallGraph::SCC &CC = *CG.lookupSCC(C);
  561. LazyCallGraph::SCC &DC = *CG.lookupSCC(D);
  562. LazyCallGraph::RefSCC &ARC = *CG.lookupRefSCC(A);
  563. LazyCallGraph::RefSCC &BRC = *CG.lookupRefSCC(B);
  564. LazyCallGraph::RefSCC &CRC = *CG.lookupRefSCC(C);
  565. LazyCallGraph::RefSCC &DRC = *CG.lookupRefSCC(D);
  566. EXPECT_TRUE(ARC.isParentOf(BRC));
  567. EXPECT_TRUE(AC.isParentOf(BC));
  568. EXPECT_TRUE(ARC.isParentOf(CRC));
  569. EXPECT_TRUE(AC.isParentOf(CC));
  570. EXPECT_FALSE(ARC.isParentOf(DRC));
  571. EXPECT_FALSE(AC.isParentOf(DC));
  572. EXPECT_TRUE(ARC.isAncestorOf(DRC));
  573. EXPECT_TRUE(AC.isAncestorOf(DC));
  574. EXPECT_FALSE(DRC.isChildOf(ARC));
  575. EXPECT_FALSE(DC.isChildOf(AC));
  576. EXPECT_TRUE(DRC.isDescendantOf(ARC));
  577. EXPECT_TRUE(DC.isDescendantOf(AC));
  578. EXPECT_TRUE(DRC.isChildOf(BRC));
  579. EXPECT_TRUE(DC.isChildOf(BC));
  580. EXPECT_TRUE(DRC.isChildOf(CRC));
  581. EXPECT_TRUE(DC.isChildOf(CC));
  582. EXPECT_EQ(2, std::distance(A->begin(), A->end()));
  583. ARC.insertOutgoingEdge(A, D, LazyCallGraph::Edge::Call);
  584. EXPECT_EQ(3, std::distance(A->begin(), A->end()));
  585. const LazyCallGraph::Edge &NewE = (*A)[D];
  586. EXPECT_TRUE(NewE);
  587. EXPECT_TRUE(NewE.isCall());
  588. EXPECT_EQ(&D, &NewE.getNode());
  589. // Only the parent and child tests sholud have changed. The rest of the graph
  590. // remains the same.
  591. EXPECT_TRUE(ARC.isParentOf(DRC));
  592. EXPECT_TRUE(AC.isParentOf(DC));
  593. EXPECT_TRUE(ARC.isAncestorOf(DRC));
  594. EXPECT_TRUE(AC.isAncestorOf(DC));
  595. EXPECT_TRUE(DRC.isChildOf(ARC));
  596. EXPECT_TRUE(DC.isChildOf(AC));
  597. EXPECT_TRUE(DRC.isDescendantOf(ARC));
  598. EXPECT_TRUE(DC.isDescendantOf(AC));
  599. EXPECT_EQ(&AC, CG.lookupSCC(A));
  600. EXPECT_EQ(&BC, CG.lookupSCC(B));
  601. EXPECT_EQ(&CC, CG.lookupSCC(C));
  602. EXPECT_EQ(&DC, CG.lookupSCC(D));
  603. EXPECT_EQ(&ARC, CG.lookupRefSCC(A));
  604. EXPECT_EQ(&BRC, CG.lookupRefSCC(B));
  605. EXPECT_EQ(&CRC, CG.lookupRefSCC(C));
  606. EXPECT_EQ(&DRC, CG.lookupRefSCC(D));
  607. ARC.switchOutgoingEdgeToRef(A, D);
  608. EXPECT_FALSE(NewE.isCall());
  609. // Verify the reference graph remains the same but the SCC graph is updated.
  610. EXPECT_TRUE(ARC.isParentOf(DRC));
  611. EXPECT_FALSE(AC.isParentOf(DC));
  612. EXPECT_TRUE(ARC.isAncestorOf(DRC));
  613. EXPECT_TRUE(AC.isAncestorOf(DC));
  614. EXPECT_TRUE(DRC.isChildOf(ARC));
  615. EXPECT_FALSE(DC.isChildOf(AC));
  616. EXPECT_TRUE(DRC.isDescendantOf(ARC));
  617. EXPECT_TRUE(DC.isDescendantOf(AC));
  618. EXPECT_EQ(&AC, CG.lookupSCC(A));
  619. EXPECT_EQ(&BC, CG.lookupSCC(B));
  620. EXPECT_EQ(&CC, CG.lookupSCC(C));
  621. EXPECT_EQ(&DC, CG.lookupSCC(D));
  622. EXPECT_EQ(&ARC, CG.lookupRefSCC(A));
  623. EXPECT_EQ(&BRC, CG.lookupRefSCC(B));
  624. EXPECT_EQ(&CRC, CG.lookupRefSCC(C));
  625. EXPECT_EQ(&DRC, CG.lookupRefSCC(D));
  626. ARC.switchOutgoingEdgeToCall(A, D);
  627. EXPECT_TRUE(NewE.isCall());
  628. // Verify the reference graph remains the same but the SCC graph is updated.
  629. EXPECT_TRUE(ARC.isParentOf(DRC));
  630. EXPECT_TRUE(AC.isParentOf(DC));
  631. EXPECT_TRUE(ARC.isAncestorOf(DRC));
  632. EXPECT_TRUE(AC.isAncestorOf(DC));
  633. EXPECT_TRUE(DRC.isChildOf(ARC));
  634. EXPECT_TRUE(DC.isChildOf(AC));
  635. EXPECT_TRUE(DRC.isDescendantOf(ARC));
  636. EXPECT_TRUE(DC.isDescendantOf(AC));
  637. EXPECT_EQ(&AC, CG.lookupSCC(A));
  638. EXPECT_EQ(&BC, CG.lookupSCC(B));
  639. EXPECT_EQ(&CC, CG.lookupSCC(C));
  640. EXPECT_EQ(&DC, CG.lookupSCC(D));
  641. EXPECT_EQ(&ARC, CG.lookupRefSCC(A));
  642. EXPECT_EQ(&BRC, CG.lookupRefSCC(B));
  643. EXPECT_EQ(&CRC, CG.lookupRefSCC(C));
  644. EXPECT_EQ(&DRC, CG.lookupRefSCC(D));
  645. ARC.removeOutgoingEdge(A, D);
  646. EXPECT_EQ(2, std::distance(A->begin(), A->end()));
  647. // Now the parent and child tests fail again but the rest remains the same.
  648. EXPECT_FALSE(ARC.isParentOf(DRC));
  649. EXPECT_FALSE(AC.isParentOf(DC));
  650. EXPECT_TRUE(ARC.isAncestorOf(DRC));
  651. EXPECT_TRUE(AC.isAncestorOf(DC));
  652. EXPECT_FALSE(DRC.isChildOf(ARC));
  653. EXPECT_FALSE(DC.isChildOf(AC));
  654. EXPECT_TRUE(DRC.isDescendantOf(ARC));
  655. EXPECT_TRUE(DC.isDescendantOf(AC));
  656. EXPECT_EQ(&AC, CG.lookupSCC(A));
  657. EXPECT_EQ(&BC, CG.lookupSCC(B));
  658. EXPECT_EQ(&CC, CG.lookupSCC(C));
  659. EXPECT_EQ(&DC, CG.lookupSCC(D));
  660. EXPECT_EQ(&ARC, CG.lookupRefSCC(A));
  661. EXPECT_EQ(&BRC, CG.lookupRefSCC(B));
  662. EXPECT_EQ(&CRC, CG.lookupRefSCC(C));
  663. EXPECT_EQ(&DRC, CG.lookupRefSCC(D));
  664. }
  665. TEST(LazyCallGraphTest, IncomingEdgeInsertion) {
  666. LLVMContext Context;
  667. // We want to ensure we can add edges even across complex diamond graphs, so
  668. // we use the diamond of triangles graph defined above. The ascii diagram is
  669. // repeated here for easy reference.
  670. //
  671. // d1 |
  672. // / \ |
  673. // d3--d2 |
  674. // / \ |
  675. // b1 c1 |
  676. // / \ / \ |
  677. // b3--b2 c3--c2 |
  678. // \ / |
  679. // a1 |
  680. // / \ |
  681. // a3--a2 |
  682. //
  683. std::unique_ptr<Module> M = parseAssembly(Context, DiamondOfTriangles);
  684. LazyCallGraph CG = buildCG(*M);
  685. // Force the graph to be fully expanded.
  686. CG.buildRefSCCs();
  687. for (LazyCallGraph::RefSCC &RC : CG.postorder_ref_sccs())
  688. dbgs() << "Formed RefSCC: " << RC << "\n";
  689. LazyCallGraph::Node &A1 = *CG.lookup(lookupFunction(*M, "a1"));
  690. LazyCallGraph::Node &A2 = *CG.lookup(lookupFunction(*M, "a2"));
  691. LazyCallGraph::Node &A3 = *CG.lookup(lookupFunction(*M, "a3"));
  692. LazyCallGraph::Node &B1 = *CG.lookup(lookupFunction(*M, "b1"));
  693. LazyCallGraph::Node &B2 = *CG.lookup(lookupFunction(*M, "b2"));
  694. LazyCallGraph::Node &B3 = *CG.lookup(lookupFunction(*M, "b3"));
  695. LazyCallGraph::Node &C1 = *CG.lookup(lookupFunction(*M, "c1"));
  696. LazyCallGraph::Node &C2 = *CG.lookup(lookupFunction(*M, "c2"));
  697. LazyCallGraph::Node &C3 = *CG.lookup(lookupFunction(*M, "c3"));
  698. LazyCallGraph::Node &D1 = *CG.lookup(lookupFunction(*M, "d1"));
  699. LazyCallGraph::Node &D2 = *CG.lookup(lookupFunction(*M, "d2"));
  700. LazyCallGraph::Node &D3 = *CG.lookup(lookupFunction(*M, "d3"));
  701. LazyCallGraph::RefSCC &ARC = *CG.lookupRefSCC(A1);
  702. LazyCallGraph::RefSCC &BRC = *CG.lookupRefSCC(B1);
  703. LazyCallGraph::RefSCC &CRC = *CG.lookupRefSCC(C1);
  704. LazyCallGraph::RefSCC &DRC = *CG.lookupRefSCC(D1);
  705. ASSERT_EQ(&ARC, CG.lookupRefSCC(A2));
  706. ASSERT_EQ(&ARC, CG.lookupRefSCC(A3));
  707. ASSERT_EQ(&BRC, CG.lookupRefSCC(B2));
  708. ASSERT_EQ(&BRC, CG.lookupRefSCC(B3));
  709. ASSERT_EQ(&CRC, CG.lookupRefSCC(C2));
  710. ASSERT_EQ(&CRC, CG.lookupRefSCC(C3));
  711. ASSERT_EQ(&DRC, CG.lookupRefSCC(D2));
  712. ASSERT_EQ(&DRC, CG.lookupRefSCC(D3));
  713. ASSERT_EQ(1, std::distance(D2->begin(), D2->end()));
  714. // Add an edge to make the graph:
  715. //
  716. // d1 |
  717. // / \ |
  718. // d3--d2---. |
  719. // / \ | |
  720. // b1 c1 | |
  721. // / \ / \ / |
  722. // b3--b2 c3--c2 |
  723. // \ / |
  724. // a1 |
  725. // / \ |
  726. // a3--a2 |
  727. auto MergedRCs = CRC.insertIncomingRefEdge(D2, C2);
  728. // Make sure we connected the nodes.
  729. for (LazyCallGraph::Edge E : *D2) {
  730. if (&E.getNode() == &D3)
  731. continue;
  732. EXPECT_EQ(&C2, &E.getNode());
  733. }
  734. // And marked the D ref-SCC as no longer valid.
  735. EXPECT_EQ(1u, MergedRCs.size());
  736. EXPECT_EQ(&DRC, MergedRCs[0]);
  737. // Make sure we have the correct nodes in the SCC sets.
  738. EXPECT_EQ(&ARC, CG.lookupRefSCC(A1));
  739. EXPECT_EQ(&ARC, CG.lookupRefSCC(A2));
  740. EXPECT_EQ(&ARC, CG.lookupRefSCC(A3));
  741. EXPECT_EQ(&BRC, CG.lookupRefSCC(B1));
  742. EXPECT_EQ(&BRC, CG.lookupRefSCC(B2));
  743. EXPECT_EQ(&BRC, CG.lookupRefSCC(B3));
  744. EXPECT_EQ(&CRC, CG.lookupRefSCC(C1));
  745. EXPECT_EQ(&CRC, CG.lookupRefSCC(C2));
  746. EXPECT_EQ(&CRC, CG.lookupRefSCC(C3));
  747. EXPECT_EQ(&CRC, CG.lookupRefSCC(D1));
  748. EXPECT_EQ(&CRC, CG.lookupRefSCC(D2));
  749. EXPECT_EQ(&CRC, CG.lookupRefSCC(D3));
  750. // And that ancestry tests have been updated.
  751. EXPECT_TRUE(ARC.isParentOf(CRC));
  752. EXPECT_TRUE(BRC.isParentOf(CRC));
  753. // And verify the post-order walk reflects the updated structure.
  754. auto I = CG.postorder_ref_scc_begin(), E = CG.postorder_ref_scc_end();
  755. ASSERT_NE(I, E);
  756. EXPECT_EQ(&CRC, &*I) << "Actual RefSCC: " << *I;
  757. ASSERT_NE(++I, E);
  758. EXPECT_EQ(&BRC, &*I) << "Actual RefSCC: " << *I;
  759. ASSERT_NE(++I, E);
  760. EXPECT_EQ(&ARC, &*I) << "Actual RefSCC: " << *I;
  761. EXPECT_EQ(++I, E);
  762. }
  763. TEST(LazyCallGraphTest, IncomingEdgeInsertionRefGraph) {
  764. LLVMContext Context;
  765. // Another variation of the above test but with all the edges switched to
  766. // references rather than calls.
  767. std::unique_ptr<Module> M =
  768. parseAssembly(Context, DiamondOfTrianglesRefGraph);
  769. LazyCallGraph CG = buildCG(*M);
  770. // Force the graph to be fully expanded.
  771. CG.buildRefSCCs();
  772. for (LazyCallGraph::RefSCC &RC : CG.postorder_ref_sccs())
  773. dbgs() << "Formed RefSCC: " << RC << "\n";
  774. LazyCallGraph::Node &A1 = *CG.lookup(lookupFunction(*M, "a1"));
  775. LazyCallGraph::Node &A2 = *CG.lookup(lookupFunction(*M, "a2"));
  776. LazyCallGraph::Node &A3 = *CG.lookup(lookupFunction(*M, "a3"));
  777. LazyCallGraph::Node &B1 = *CG.lookup(lookupFunction(*M, "b1"));
  778. LazyCallGraph::Node &B2 = *CG.lookup(lookupFunction(*M, "b2"));
  779. LazyCallGraph::Node &B3 = *CG.lookup(lookupFunction(*M, "b3"));
  780. LazyCallGraph::Node &C1 = *CG.lookup(lookupFunction(*M, "c1"));
  781. LazyCallGraph::Node &C2 = *CG.lookup(lookupFunction(*M, "c2"));
  782. LazyCallGraph::Node &C3 = *CG.lookup(lookupFunction(*M, "c3"));
  783. LazyCallGraph::Node &D1 = *CG.lookup(lookupFunction(*M, "d1"));
  784. LazyCallGraph::Node &D2 = *CG.lookup(lookupFunction(*M, "d2"));
  785. LazyCallGraph::Node &D3 = *CG.lookup(lookupFunction(*M, "d3"));
  786. LazyCallGraph::RefSCC &ARC = *CG.lookupRefSCC(A1);
  787. LazyCallGraph::RefSCC &BRC = *CG.lookupRefSCC(B1);
  788. LazyCallGraph::RefSCC &CRC = *CG.lookupRefSCC(C1);
  789. LazyCallGraph::RefSCC &DRC = *CG.lookupRefSCC(D1);
  790. ASSERT_EQ(&ARC, CG.lookupRefSCC(A2));
  791. ASSERT_EQ(&ARC, CG.lookupRefSCC(A3));
  792. ASSERT_EQ(&BRC, CG.lookupRefSCC(B2));
  793. ASSERT_EQ(&BRC, CG.lookupRefSCC(B3));
  794. ASSERT_EQ(&CRC, CG.lookupRefSCC(C2));
  795. ASSERT_EQ(&CRC, CG.lookupRefSCC(C3));
  796. ASSERT_EQ(&DRC, CG.lookupRefSCC(D2));
  797. ASSERT_EQ(&DRC, CG.lookupRefSCC(D3));
  798. ASSERT_EQ(1, std::distance(D2->begin(), D2->end()));
  799. // Add an edge to make the graph:
  800. //
  801. // d1 |
  802. // / \ |
  803. // d3--d2---. |
  804. // / \ | |
  805. // b1 c1 | |
  806. // / \ / \ / |
  807. // b3--b2 c3--c2 |
  808. // \ / |
  809. // a1 |
  810. // / \ |
  811. // a3--a2 |
  812. auto MergedRCs = CRC.insertIncomingRefEdge(D2, C2);
  813. // Make sure we connected the nodes.
  814. for (LazyCallGraph::Edge E : *D2) {
  815. if (&E.getNode() == &D3)
  816. continue;
  817. EXPECT_EQ(&C2, &E.getNode());
  818. }
  819. // And marked the D ref-SCC as no longer valid.
  820. EXPECT_EQ(1u, MergedRCs.size());
  821. EXPECT_EQ(&DRC, MergedRCs[0]);
  822. // Make sure we have the correct nodes in the SCC sets.
  823. EXPECT_EQ(&ARC, CG.lookupRefSCC(A1));
  824. EXPECT_EQ(&ARC, CG.lookupRefSCC(A2));
  825. EXPECT_EQ(&ARC, CG.lookupRefSCC(A3));
  826. EXPECT_EQ(&BRC, CG.lookupRefSCC(B1));
  827. EXPECT_EQ(&BRC, CG.lookupRefSCC(B2));
  828. EXPECT_EQ(&BRC, CG.lookupRefSCC(B3));
  829. EXPECT_EQ(&CRC, CG.lookupRefSCC(C1));
  830. EXPECT_EQ(&CRC, CG.lookupRefSCC(C2));
  831. EXPECT_EQ(&CRC, CG.lookupRefSCC(C3));
  832. EXPECT_EQ(&CRC, CG.lookupRefSCC(D1));
  833. EXPECT_EQ(&CRC, CG.lookupRefSCC(D2));
  834. EXPECT_EQ(&CRC, CG.lookupRefSCC(D3));
  835. // And that ancestry tests have been updated.
  836. EXPECT_TRUE(ARC.isParentOf(CRC));
  837. EXPECT_TRUE(BRC.isParentOf(CRC));
  838. // And verify the post-order walk reflects the updated structure.
  839. auto I = CG.postorder_ref_scc_begin(), E = CG.postorder_ref_scc_end();
  840. ASSERT_NE(I, E);
  841. EXPECT_EQ(&CRC, &*I) << "Actual RefSCC: " << *I;
  842. ASSERT_NE(++I, E);
  843. EXPECT_EQ(&BRC, &*I) << "Actual RefSCC: " << *I;
  844. ASSERT_NE(++I, E);
  845. EXPECT_EQ(&ARC, &*I) << "Actual RefSCC: " << *I;
  846. EXPECT_EQ(++I, E);
  847. }
  848. TEST(LazyCallGraphTest, IncomingEdgeInsertionLargeCallCycle) {
  849. LLVMContext Context;
  850. std::unique_ptr<Module> M = parseAssembly(Context, "define void @a() {\n"
  851. "entry:\n"
  852. " call void @b()\n"
  853. " ret void\n"
  854. "}\n"
  855. "define void @b() {\n"
  856. "entry:\n"
  857. " call void @c()\n"
  858. " ret void\n"
  859. "}\n"
  860. "define void @c() {\n"
  861. "entry:\n"
  862. " call void @d()\n"
  863. " ret void\n"
  864. "}\n"
  865. "define void @d() {\n"
  866. "entry:\n"
  867. " ret void\n"
  868. "}\n");
  869. LazyCallGraph CG = buildCG(*M);
  870. // Force the graph to be fully expanded.
  871. CG.buildRefSCCs();
  872. for (LazyCallGraph::RefSCC &RC : CG.postorder_ref_sccs())
  873. dbgs() << "Formed RefSCC: " << RC << "\n";
  874. LazyCallGraph::Node &A = *CG.lookup(lookupFunction(*M, "a"));
  875. LazyCallGraph::Node &B = *CG.lookup(lookupFunction(*M, "b"));
  876. LazyCallGraph::Node &C = *CG.lookup(lookupFunction(*M, "c"));
  877. LazyCallGraph::Node &D = *CG.lookup(lookupFunction(*M, "d"));
  878. LazyCallGraph::SCC &AC = *CG.lookupSCC(A);
  879. LazyCallGraph::SCC &BC = *CG.lookupSCC(B);
  880. LazyCallGraph::SCC &CC = *CG.lookupSCC(C);
  881. LazyCallGraph::SCC &DC = *CG.lookupSCC(D);
  882. LazyCallGraph::RefSCC &ARC = *CG.lookupRefSCC(A);
  883. LazyCallGraph::RefSCC &BRC = *CG.lookupRefSCC(B);
  884. LazyCallGraph::RefSCC &CRC = *CG.lookupRefSCC(C);
  885. LazyCallGraph::RefSCC &DRC = *CG.lookupRefSCC(D);
  886. // Connect the top to the bottom forming a large RefSCC made up mostly of calls.
  887. auto MergedRCs = ARC.insertIncomingRefEdge(D, A);
  888. // Make sure we connected the nodes.
  889. EXPECT_NE(D->begin(), D->end());
  890. EXPECT_EQ(&A, &D->begin()->getNode());
  891. // Check that we have the dead RCs, but ignore the order.
  892. EXPECT_EQ(3u, MergedRCs.size());
  893. EXPECT_NE(find(MergedRCs, &BRC), MergedRCs.end());
  894. EXPECT_NE(find(MergedRCs, &CRC), MergedRCs.end());
  895. EXPECT_NE(find(MergedRCs, &DRC), MergedRCs.end());
  896. // Make sure the nodes point to the right place now.
  897. EXPECT_EQ(&ARC, CG.lookupRefSCC(A));
  898. EXPECT_EQ(&ARC, CG.lookupRefSCC(B));
  899. EXPECT_EQ(&ARC, CG.lookupRefSCC(C));
  900. EXPECT_EQ(&ARC, CG.lookupRefSCC(D));
  901. // Check that the SCCs are in postorder.
  902. EXPECT_EQ(4, ARC.size());
  903. EXPECT_EQ(&DC, &ARC[0]);
  904. EXPECT_EQ(&CC, &ARC[1]);
  905. EXPECT_EQ(&BC, &ARC[2]);
  906. EXPECT_EQ(&AC, &ARC[3]);
  907. // And verify the post-order walk reflects the updated structure.
  908. auto I = CG.postorder_ref_scc_begin(), E = CG.postorder_ref_scc_end();
  909. ASSERT_NE(I, E);
  910. EXPECT_EQ(&ARC, &*I) << "Actual RefSCC: " << *I;
  911. EXPECT_EQ(++I, E);
  912. }
  913. TEST(LazyCallGraphTest, IncomingEdgeInsertionLargeRefCycle) {
  914. LLVMContext Context;
  915. std::unique_ptr<Module> M =
  916. parseAssembly(Context, "define void @a() {\n"
  917. "entry:\n"
  918. " %p = alloca void ()*\n"
  919. " store void ()* @b, void ()** %p\n"
  920. " ret void\n"
  921. "}\n"
  922. "define void @b() {\n"
  923. "entry:\n"
  924. " %p = alloca void ()*\n"
  925. " store void ()* @c, void ()** %p\n"
  926. " ret void\n"
  927. "}\n"
  928. "define void @c() {\n"
  929. "entry:\n"
  930. " %p = alloca void ()*\n"
  931. " store void ()* @d, void ()** %p\n"
  932. " ret void\n"
  933. "}\n"
  934. "define void @d() {\n"
  935. "entry:\n"
  936. " ret void\n"
  937. "}\n");
  938. LazyCallGraph CG = buildCG(*M);
  939. // Force the graph to be fully expanded.
  940. CG.buildRefSCCs();
  941. for (LazyCallGraph::RefSCC &RC : CG.postorder_ref_sccs())
  942. dbgs() << "Formed RefSCC: " << RC << "\n";
  943. LazyCallGraph::Node &A = *CG.lookup(lookupFunction(*M, "a"));
  944. LazyCallGraph::Node &B = *CG.lookup(lookupFunction(*M, "b"));
  945. LazyCallGraph::Node &C = *CG.lookup(lookupFunction(*M, "c"));
  946. LazyCallGraph::Node &D = *CG.lookup(lookupFunction(*M, "d"));
  947. LazyCallGraph::RefSCC &ARC = *CG.lookupRefSCC(A);
  948. LazyCallGraph::RefSCC &BRC = *CG.lookupRefSCC(B);
  949. LazyCallGraph::RefSCC &CRC = *CG.lookupRefSCC(C);
  950. LazyCallGraph::RefSCC &DRC = *CG.lookupRefSCC(D);
  951. // Connect the top to the bottom forming a large RefSCC made up just of
  952. // references.
  953. auto MergedRCs = ARC.insertIncomingRefEdge(D, A);
  954. // Make sure we connected the nodes.
  955. EXPECT_NE(D->begin(), D->end());
  956. EXPECT_EQ(&A, &D->begin()->getNode());
  957. // Check that we have the dead RCs, but ignore the order.
  958. EXPECT_EQ(3u, MergedRCs.size());
  959. EXPECT_NE(find(MergedRCs, &BRC), MergedRCs.end());
  960. EXPECT_NE(find(MergedRCs, &CRC), MergedRCs.end());
  961. EXPECT_NE(find(MergedRCs, &DRC), MergedRCs.end());
  962. // Make sure the nodes point to the right place now.
  963. EXPECT_EQ(&ARC, CG.lookupRefSCC(A));
  964. EXPECT_EQ(&ARC, CG.lookupRefSCC(B));
  965. EXPECT_EQ(&ARC, CG.lookupRefSCC(C));
  966. EXPECT_EQ(&ARC, CG.lookupRefSCC(D));
  967. // And verify the post-order walk reflects the updated structure.
  968. auto I = CG.postorder_ref_scc_begin(), End = CG.postorder_ref_scc_end();
  969. ASSERT_NE(I, End);
  970. EXPECT_EQ(&ARC, &*I) << "Actual RefSCC: " << *I;
  971. EXPECT_EQ(++I, End);
  972. }
  973. TEST(LazyCallGraphTest, InlineAndDeleteFunction) {
  974. LLVMContext Context;
  975. // We want to ensure we can delete nodes from relatively complex graphs and
  976. // so use the diamond of triangles graph defined above.
  977. //
  978. // The ascii diagram is repeated here for easy reference.
  979. //
  980. // d1 |
  981. // / \ |
  982. // d3--d2 |
  983. // / \ |
  984. // b1 c1 |
  985. // / \ / \ |
  986. // b3--b2 c3--c2 |
  987. // \ / |
  988. // a1 |
  989. // / \ |
  990. // a3--a2 |
  991. //
  992. std::unique_ptr<Module> M = parseAssembly(Context, DiamondOfTriangles);
  993. LazyCallGraph CG = buildCG(*M);
  994. // Force the graph to be fully expanded.
  995. CG.buildRefSCCs();
  996. for (LazyCallGraph::RefSCC &RC : CG.postorder_ref_sccs())
  997. dbgs() << "Formed RefSCC: " << RC << "\n";
  998. LazyCallGraph::Node &A1 = *CG.lookup(lookupFunction(*M, "a1"));
  999. LazyCallGraph::Node &A2 = *CG.lookup(lookupFunction(*M, "a2"));
  1000. LazyCallGraph::Node &A3 = *CG.lookup(lookupFunction(*M, "a3"));
  1001. LazyCallGraph::Node &B1 = *CG.lookup(lookupFunction(*M, "b1"));
  1002. LazyCallGraph::Node &B2 = *CG.lookup(lookupFunction(*M, "b2"));
  1003. LazyCallGraph::Node &B3 = *CG.lookup(lookupFunction(*M, "b3"));
  1004. LazyCallGraph::Node &C1 = *CG.lookup(lookupFunction(*M, "c1"));
  1005. LazyCallGraph::Node &C2 = *CG.lookup(lookupFunction(*M, "c2"));
  1006. LazyCallGraph::Node &C3 = *CG.lookup(lookupFunction(*M, "c3"));
  1007. LazyCallGraph::Node &D1 = *CG.lookup(lookupFunction(*M, "d1"));
  1008. LazyCallGraph::Node &D2 = *CG.lookup(lookupFunction(*M, "d2"));
  1009. LazyCallGraph::Node &D3 = *CG.lookup(lookupFunction(*M, "d3"));
  1010. LazyCallGraph::RefSCC &ARC = *CG.lookupRefSCC(A1);
  1011. LazyCallGraph::RefSCC &BRC = *CG.lookupRefSCC(B1);
  1012. LazyCallGraph::RefSCC &CRC = *CG.lookupRefSCC(C1);
  1013. LazyCallGraph::RefSCC &DRC = *CG.lookupRefSCC(D1);
  1014. ASSERT_EQ(&ARC, CG.lookupRefSCC(A2));
  1015. ASSERT_EQ(&ARC, CG.lookupRefSCC(A3));
  1016. ASSERT_EQ(&BRC, CG.lookupRefSCC(B2));
  1017. ASSERT_EQ(&BRC, CG.lookupRefSCC(B3));
  1018. ASSERT_EQ(&CRC, CG.lookupRefSCC(C2));
  1019. ASSERT_EQ(&CRC, CG.lookupRefSCC(C3));
  1020. ASSERT_EQ(&DRC, CG.lookupRefSCC(D2));
  1021. ASSERT_EQ(&DRC, CG.lookupRefSCC(D3));
  1022. ASSERT_EQ(1, std::distance(D2->begin(), D2->end()));
  1023. // Delete d2 from the graph, as if it had been inlined.
  1024. //
  1025. // d1 |
  1026. // / / |
  1027. // d3--. |
  1028. // / \ |
  1029. // b1 c1 |
  1030. // / \ / \ |
  1031. // b3--b2 c3--c2 |
  1032. // \ / |
  1033. // a1 |
  1034. // / \ |
  1035. // a3--a2 |
  1036. Function &D2F = D2.getFunction();
  1037. CallInst *C1Call = nullptr, *D1Call = nullptr;
  1038. for (User *U : D2F.users()) {
  1039. CallInst *CI = dyn_cast<CallInst>(U);
  1040. ASSERT_TRUE(CI) << "Expected a call: " << *U;
  1041. if (CI->getParent()->getParent() == &C1.getFunction()) {
  1042. ASSERT_EQ(nullptr, C1Call) << "Found too many C1 calls: " << *CI;
  1043. C1Call = CI;
  1044. } else if (CI->getParent()->getParent() == &D1.getFunction()) {
  1045. ASSERT_EQ(nullptr, D1Call) << "Found too many D1 calls: " << *CI;
  1046. D1Call = CI;
  1047. } else {
  1048. FAIL() << "Found an unexpected call instruction: " << *CI;
  1049. }
  1050. }
  1051. ASSERT_NE(C1Call, nullptr);
  1052. ASSERT_NE(D1Call, nullptr);
  1053. ASSERT_EQ(&D2F, C1Call->getCalledFunction());
  1054. ASSERT_EQ(&D2F, D1Call->getCalledFunction());
  1055. C1Call->setCalledFunction(&D3.getFunction());
  1056. D1Call->setCalledFunction(&D3.getFunction());
  1057. ASSERT_EQ(0u, D2F.getNumUses());
  1058. // Insert new edges first.
  1059. CRC.insertTrivialCallEdge(C1, D3);
  1060. DRC.insertTrivialCallEdge(D1, D3);
  1061. // Then remove the old ones.
  1062. LazyCallGraph::SCC &DC = *CG.lookupSCC(D2);
  1063. auto NewCs = DRC.switchInternalEdgeToRef(D1, D2);
  1064. EXPECT_EQ(&DC, CG.lookupSCC(D2));
  1065. EXPECT_EQ(NewCs.end(), std::next(NewCs.begin()));
  1066. LazyCallGraph::SCC &NewDC = *NewCs.begin();
  1067. EXPECT_EQ(&NewDC, CG.lookupSCC(D1));
  1068. EXPECT_EQ(&NewDC, CG.lookupSCC(D3));
  1069. auto NewRCs = DRC.removeInternalRefEdge(D1, {&D2});
  1070. ASSERT_EQ(2u, NewRCs.size());
  1071. LazyCallGraph::RefSCC &NewDRC = *NewRCs[0];
  1072. EXPECT_EQ(&NewDRC, CG.lookupRefSCC(D1));
  1073. EXPECT_EQ(&NewDRC, CG.lookupRefSCC(D3));
  1074. LazyCallGraph::RefSCC &D2RC = *NewRCs[1];
  1075. EXPECT_EQ(&D2RC, CG.lookupRefSCC(D2));
  1076. EXPECT_FALSE(NewDRC.isParentOf(D2RC));
  1077. EXPECT_TRUE(CRC.isParentOf(D2RC));
  1078. EXPECT_TRUE(CRC.isParentOf(NewDRC));
  1079. EXPECT_TRUE(D2RC.isParentOf(NewDRC));
  1080. CRC.removeOutgoingEdge(C1, D2);
  1081. EXPECT_FALSE(CRC.isParentOf(D2RC));
  1082. EXPECT_TRUE(CRC.isParentOf(NewDRC));
  1083. EXPECT_TRUE(D2RC.isParentOf(NewDRC));
  1084. // Now that we've updated the call graph, D2 is dead, so remove it.
  1085. CG.removeDeadFunction(D2F);
  1086. // Check that the graph still looks the same.
  1087. EXPECT_EQ(&ARC, CG.lookupRefSCC(A1));
  1088. EXPECT_EQ(&ARC, CG.lookupRefSCC(A2));
  1089. EXPECT_EQ(&ARC, CG.lookupRefSCC(A3));
  1090. EXPECT_EQ(&BRC, CG.lookupRefSCC(B1));
  1091. EXPECT_EQ(&BRC, CG.lookupRefSCC(B2));
  1092. EXPECT_EQ(&BRC, CG.lookupRefSCC(B3));
  1093. EXPECT_EQ(&CRC, CG.lookupRefSCC(C1));
  1094. EXPECT_EQ(&CRC, CG.lookupRefSCC(C2));
  1095. EXPECT_EQ(&CRC, CG.lookupRefSCC(C3));
  1096. EXPECT_EQ(&NewDRC, CG.lookupRefSCC(D1));
  1097. EXPECT_EQ(&NewDRC, CG.lookupRefSCC(D3));
  1098. EXPECT_TRUE(CRC.isParentOf(NewDRC));
  1099. // Verify the post-order walk hasn't changed.
  1100. auto I = CG.postorder_ref_scc_begin(), E = CG.postorder_ref_scc_end();
  1101. ASSERT_NE(I, E);
  1102. EXPECT_EQ(&NewDRC, &*I) << "Actual RefSCC: " << *I;
  1103. ASSERT_NE(++I, E);
  1104. EXPECT_EQ(&CRC, &*I) << "Actual RefSCC: " << *I;
  1105. ASSERT_NE(++I, E);
  1106. EXPECT_EQ(&BRC, &*I) << "Actual RefSCC: " << *I;
  1107. ASSERT_NE(++I, E);
  1108. EXPECT_EQ(&ARC, &*I) << "Actual RefSCC: " << *I;
  1109. EXPECT_EQ(++I, E);
  1110. }
  1111. TEST(LazyCallGraphTest, InternalEdgeMutation) {
  1112. LLVMContext Context;
  1113. std::unique_ptr<Module> M = parseAssembly(Context, "define void @a() {\n"
  1114. "entry:\n"
  1115. " call void @b()\n"
  1116. " ret void\n"
  1117. "}\n"
  1118. "define void @b() {\n"
  1119. "entry:\n"
  1120. " call void @c()\n"
  1121. " ret void\n"
  1122. "}\n"
  1123. "define void @c() {\n"
  1124. "entry:\n"
  1125. " call void @a()\n"
  1126. " ret void\n"
  1127. "}\n");
  1128. LazyCallGraph CG = buildCG(*M);
  1129. // Force the graph to be fully expanded.
  1130. CG.buildRefSCCs();
  1131. auto I = CG.postorder_ref_scc_begin();
  1132. LazyCallGraph::RefSCC &RC = *I++;
  1133. EXPECT_EQ(CG.postorder_ref_scc_end(), I);
  1134. LazyCallGraph::Node &A = *CG.lookup(lookupFunction(*M, "a"));
  1135. LazyCallGraph::Node &B = *CG.lookup(lookupFunction(*M, "b"));
  1136. LazyCallGraph::Node &C = *CG.lookup(lookupFunction(*M, "c"));
  1137. EXPECT_EQ(&RC, CG.lookupRefSCC(A));
  1138. EXPECT_EQ(&RC, CG.lookupRefSCC(B));
  1139. EXPECT_EQ(&RC, CG.lookupRefSCC(C));
  1140. EXPECT_EQ(1, RC.size());
  1141. EXPECT_EQ(&*RC.begin(), CG.lookupSCC(A));
  1142. EXPECT_EQ(&*RC.begin(), CG.lookupSCC(B));
  1143. EXPECT_EQ(&*RC.begin(), CG.lookupSCC(C));
  1144. // Insert an edge from 'a' to 'c'. Nothing changes about the graph.
  1145. RC.insertInternalRefEdge(A, C);
  1146. EXPECT_EQ(2, std::distance(A->begin(), A->end()));
  1147. EXPECT_EQ(&RC, CG.lookupRefSCC(A));
  1148. EXPECT_EQ(&RC, CG.lookupRefSCC(B));
  1149. EXPECT_EQ(&RC, CG.lookupRefSCC(C));
  1150. EXPECT_EQ(1, RC.size());
  1151. EXPECT_EQ(&*RC.begin(), CG.lookupSCC(A));
  1152. EXPECT_EQ(&*RC.begin(), CG.lookupSCC(B));
  1153. EXPECT_EQ(&*RC.begin(), CG.lookupSCC(C));
  1154. // Switch the call edge from 'b' to 'c' to a ref edge. This will break the
  1155. // call cycle and cause us to form more SCCs. The RefSCC will remain the same
  1156. // though.
  1157. auto NewCs = RC.switchInternalEdgeToRef(B, C);
  1158. EXPECT_EQ(&RC, CG.lookupRefSCC(A));
  1159. EXPECT_EQ(&RC, CG.lookupRefSCC(B));
  1160. EXPECT_EQ(&RC, CG.lookupRefSCC(C));
  1161. auto J = RC.begin();
  1162. // The SCCs must be in *post-order* which means successors before
  1163. // predecessors. At this point we have call edges from C to A and from A to
  1164. // B. The only valid postorder is B, A, C.
  1165. EXPECT_EQ(&*J++, CG.lookupSCC(B));
  1166. EXPECT_EQ(&*J++, CG.lookupSCC(A));
  1167. EXPECT_EQ(&*J++, CG.lookupSCC(C));
  1168. EXPECT_EQ(RC.end(), J);
  1169. // And the returned range must be the slice of this sequence containing new
  1170. // SCCs.
  1171. EXPECT_EQ(RC.begin(), NewCs.begin());
  1172. EXPECT_EQ(std::prev(RC.end()), NewCs.end());
  1173. // Test turning the ref edge from A to C into a call edge. This will form an
  1174. // SCC out of A and C. Since we previously had a call edge from C to A, the
  1175. // C SCC should be preserved and have A merged into it while the A SCC should
  1176. // be invalidated.
  1177. LazyCallGraph::SCC &AC = *CG.lookupSCC(A);
  1178. LazyCallGraph::SCC &CC = *CG.lookupSCC(C);
  1179. EXPECT_TRUE(RC.switchInternalEdgeToCall(A, C, [&](ArrayRef<LazyCallGraph::SCC *> MergedCs) {
  1180. ASSERT_EQ(1u, MergedCs.size());
  1181. EXPECT_EQ(&AC, MergedCs[0]);
  1182. }));
  1183. EXPECT_EQ(2, CC.size());
  1184. EXPECT_EQ(&CC, CG.lookupSCC(A));
  1185. EXPECT_EQ(&CC, CG.lookupSCC(C));
  1186. J = RC.begin();
  1187. EXPECT_EQ(&*J++, CG.lookupSCC(B));
  1188. EXPECT_EQ(&*J++, CG.lookupSCC(C));
  1189. EXPECT_EQ(RC.end(), J);
  1190. }
  1191. TEST(LazyCallGraphTest, InternalEdgeRemoval) {
  1192. LLVMContext Context;
  1193. // A nice fully connected (including self-edges) RefSCC.
  1194. std::unique_ptr<Module> M = parseAssembly(
  1195. Context, "define void @a(i8** %ptr) {\n"
  1196. "entry:\n"
  1197. " store i8* bitcast (void(i8**)* @a to i8*), i8** %ptr\n"
  1198. " store i8* bitcast (void(i8**)* @b to i8*), i8** %ptr\n"
  1199. " store i8* bitcast (void(i8**)* @c to i8*), i8** %ptr\n"
  1200. " ret void\n"
  1201. "}\n"
  1202. "define void @b(i8** %ptr) {\n"
  1203. "entry:\n"
  1204. " store i8* bitcast (void(i8**)* @a to i8*), i8** %ptr\n"
  1205. " store i8* bitcast (void(i8**)* @b to i8*), i8** %ptr\n"
  1206. " store i8* bitcast (void(i8**)* @c to i8*), i8** %ptr\n"
  1207. " ret void\n"
  1208. "}\n"
  1209. "define void @c(i8** %ptr) {\n"
  1210. "entry:\n"
  1211. " store i8* bitcast (void(i8**)* @a to i8*), i8** %ptr\n"
  1212. " store i8* bitcast (void(i8**)* @b to i8*), i8** %ptr\n"
  1213. " store i8* bitcast (void(i8**)* @c to i8*), i8** %ptr\n"
  1214. " ret void\n"
  1215. "}\n");
  1216. LazyCallGraph CG = buildCG(*M);
  1217. // Force the graph to be fully expanded.
  1218. CG.buildRefSCCs();
  1219. auto I = CG.postorder_ref_scc_begin(), E = CG.postorder_ref_scc_end();
  1220. LazyCallGraph::RefSCC &RC = *I;
  1221. EXPECT_EQ(E, std::next(I));
  1222. LazyCallGraph::Node &A = *CG.lookup(lookupFunction(*M, "a"));
  1223. LazyCallGraph::Node &B = *CG.lookup(lookupFunction(*M, "b"));
  1224. LazyCallGraph::Node &C = *CG.lookup(lookupFunction(*M, "c"));
  1225. EXPECT_EQ(&RC, CG.lookupRefSCC(A));
  1226. EXPECT_EQ(&RC, CG.lookupRefSCC(B));
  1227. EXPECT_EQ(&RC, CG.lookupRefSCC(C));
  1228. // Remove the edge from b -> a, which should leave the 3 functions still in
  1229. // a single connected component because of a -> b -> c -> a.
  1230. SmallVector<LazyCallGraph::RefSCC *, 1> NewRCs =
  1231. RC.removeInternalRefEdge(B, {&A});
  1232. EXPECT_EQ(0u, NewRCs.size());
  1233. EXPECT_EQ(&RC, CG.lookupRefSCC(A));
  1234. EXPECT_EQ(&RC, CG.lookupRefSCC(B));
  1235. EXPECT_EQ(&RC, CG.lookupRefSCC(C));
  1236. auto J = CG.postorder_ref_scc_begin();
  1237. EXPECT_EQ(I, J);
  1238. EXPECT_EQ(&RC, &*J);
  1239. EXPECT_EQ(E, std::next(J));
  1240. // Increment I before we actually mutate the structure so that it remains
  1241. // a valid iterator.
  1242. ++I;
  1243. // Remove the edge from c -> a, which should leave 'a' in the original RefSCC
  1244. // and form a new RefSCC for 'b' and 'c'.
  1245. NewRCs = RC.removeInternalRefEdge(C, {&A});
  1246. ASSERT_EQ(2u, NewRCs.size());
  1247. LazyCallGraph::RefSCC &BCRC = *NewRCs[0];
  1248. LazyCallGraph::RefSCC &ARC = *NewRCs[1];
  1249. EXPECT_EQ(&ARC, CG.lookupRefSCC(A));
  1250. EXPECT_EQ(1, std::distance(ARC.begin(), ARC.end()));
  1251. EXPECT_EQ(&BCRC, CG.lookupRefSCC(B));
  1252. EXPECT_EQ(&BCRC, CG.lookupRefSCC(C));
  1253. J = CG.postorder_ref_scc_begin();
  1254. EXPECT_NE(I, J);
  1255. EXPECT_EQ(&BCRC, &*J);
  1256. ++J;
  1257. EXPECT_NE(I, J);
  1258. EXPECT_EQ(&ARC, &*J);
  1259. ++J;
  1260. EXPECT_EQ(I, J);
  1261. EXPECT_EQ(E, J);
  1262. }
  1263. TEST(LazyCallGraphTest, InternalMultiEdgeRemoval) {
  1264. LLVMContext Context;
  1265. // A nice fully connected (including self-edges) RefSCC.
  1266. std::unique_ptr<Module> M = parseAssembly(
  1267. Context, "define void @a(i8** %ptr) {\n"
  1268. "entry:\n"
  1269. " store i8* bitcast (void(i8**)* @a to i8*), i8** %ptr\n"
  1270. " store i8* bitcast (void(i8**)* @b to i8*), i8** %ptr\n"
  1271. " store i8* bitcast (void(i8**)* @c to i8*), i8** %ptr\n"
  1272. " ret void\n"
  1273. "}\n"
  1274. "define void @b(i8** %ptr) {\n"
  1275. "entry:\n"
  1276. " store i8* bitcast (void(i8**)* @a to i8*), i8** %ptr\n"
  1277. " store i8* bitcast (void(i8**)* @b to i8*), i8** %ptr\n"
  1278. " store i8* bitcast (void(i8**)* @c to i8*), i8** %ptr\n"
  1279. " ret void\n"
  1280. "}\n"
  1281. "define void @c(i8** %ptr) {\n"
  1282. "entry:\n"
  1283. " store i8* bitcast (void(i8**)* @a to i8*), i8** %ptr\n"
  1284. " store i8* bitcast (void(i8**)* @b to i8*), i8** %ptr\n"
  1285. " store i8* bitcast (void(i8**)* @c to i8*), i8** %ptr\n"
  1286. " ret void\n"
  1287. "}\n");
  1288. LazyCallGraph CG = buildCG(*M);
  1289. // Force the graph to be fully expanded.
  1290. CG.buildRefSCCs();
  1291. auto I = CG.postorder_ref_scc_begin(), E = CG.postorder_ref_scc_end();
  1292. LazyCallGraph::RefSCC &RC = *I;
  1293. EXPECT_EQ(E, std::next(I));
  1294. LazyCallGraph::Node &A = *CG.lookup(lookupFunction(*M, "a"));
  1295. LazyCallGraph::Node &B = *CG.lookup(lookupFunction(*M, "b"));
  1296. LazyCallGraph::Node &C = *CG.lookup(lookupFunction(*M, "c"));
  1297. EXPECT_EQ(&RC, CG.lookupRefSCC(A));
  1298. EXPECT_EQ(&RC, CG.lookupRefSCC(B));
  1299. EXPECT_EQ(&RC, CG.lookupRefSCC(C));
  1300. // Increment I before we actually mutate the structure so that it remains
  1301. // a valid iterator.
  1302. ++I;
  1303. // Remove the edges from b -> a and b -> c, leaving b in its own RefSCC.
  1304. SmallVector<LazyCallGraph::RefSCC *, 1> NewRCs =
  1305. RC.removeInternalRefEdge(B, {&A, &C});
  1306. ASSERT_EQ(2u, NewRCs.size());
  1307. LazyCallGraph::RefSCC &BRC = *NewRCs[0];
  1308. LazyCallGraph::RefSCC &ACRC = *NewRCs[1];
  1309. EXPECT_EQ(&BRC, CG.lookupRefSCC(B));
  1310. EXPECT_EQ(1, std::distance(BRC.begin(), BRC.end()));
  1311. EXPECT_EQ(&ACRC, CG.lookupRefSCC(A));
  1312. EXPECT_EQ(&ACRC, CG.lookupRefSCC(C));
  1313. auto J = CG.postorder_ref_scc_begin();
  1314. EXPECT_NE(I, J);
  1315. EXPECT_EQ(&BRC, &*J);
  1316. ++J;
  1317. EXPECT_NE(I, J);
  1318. EXPECT_EQ(&ACRC, &*J);
  1319. ++J;
  1320. EXPECT_EQ(I, J);
  1321. EXPECT_EQ(E, J);
  1322. }
  1323. TEST(LazyCallGraphTest, InternalNoOpEdgeRemoval) {
  1324. LLVMContext Context;
  1325. // A graph with a single cycle formed both from call and reference edges
  1326. // which makes the reference edges trivial to delete. The graph looks like:
  1327. //
  1328. // Reference edges: a -> b -> c -> a
  1329. // Call edges: a -> c -> b -> a
  1330. std::unique_ptr<Module> M = parseAssembly(
  1331. Context, "define void @a(i8** %ptr) {\n"
  1332. "entry:\n"
  1333. " call void @b(i8** %ptr)\n"
  1334. " store i8* bitcast (void(i8**)* @c to i8*), i8** %ptr\n"
  1335. " ret void\n"
  1336. "}\n"
  1337. "define void @b(i8** %ptr) {\n"
  1338. "entry:\n"
  1339. " store i8* bitcast (void(i8**)* @a to i8*), i8** %ptr\n"
  1340. " call void @c(i8** %ptr)\n"
  1341. " ret void\n"
  1342. "}\n"
  1343. "define void @c(i8** %ptr) {\n"
  1344. "entry:\n"
  1345. " call void @a(i8** %ptr)\n"
  1346. " store i8* bitcast (void(i8**)* @b to i8*), i8** %ptr\n"
  1347. " ret void\n"
  1348. "}\n");
  1349. LazyCallGraph CG = buildCG(*M);
  1350. // Force the graph to be fully expanded.
  1351. CG.buildRefSCCs();
  1352. auto I = CG.postorder_ref_scc_begin(), E = CG.postorder_ref_scc_end();
  1353. LazyCallGraph::RefSCC &RC = *I;
  1354. EXPECT_EQ(E, std::next(I));
  1355. LazyCallGraph::SCC &C = *RC.begin();
  1356. EXPECT_EQ(RC.end(), std::next(RC.begin()));
  1357. LazyCallGraph::Node &AN = *CG.lookup(lookupFunction(*M, "a"));
  1358. LazyCallGraph::Node &BN = *CG.lookup(lookupFunction(*M, "b"));
  1359. LazyCallGraph::Node &CN = *CG.lookup(lookupFunction(*M, "c"));
  1360. EXPECT_EQ(&RC, CG.lookupRefSCC(AN));
  1361. EXPECT_EQ(&RC, CG.lookupRefSCC(BN));
  1362. EXPECT_EQ(&RC, CG.lookupRefSCC(CN));
  1363. EXPECT_EQ(&C, CG.lookupSCC(AN));
  1364. EXPECT_EQ(&C, CG.lookupSCC(BN));
  1365. EXPECT_EQ(&C, CG.lookupSCC(CN));
  1366. // Remove the edge from a -> c which doesn't change anything.
  1367. SmallVector<LazyCallGraph::RefSCC *, 1> NewRCs =
  1368. RC.removeInternalRefEdge(AN, {&CN});
  1369. EXPECT_EQ(0u, NewRCs.size());
  1370. EXPECT_EQ(&RC, CG.lookupRefSCC(AN));
  1371. EXPECT_EQ(&RC, CG.lookupRefSCC(BN));
  1372. EXPECT_EQ(&RC, CG.lookupRefSCC(CN));
  1373. EXPECT_EQ(&C, CG.lookupSCC(AN));
  1374. EXPECT_EQ(&C, CG.lookupSCC(BN));
  1375. EXPECT_EQ(&C, CG.lookupSCC(CN));
  1376. auto J = CG.postorder_ref_scc_begin();
  1377. EXPECT_EQ(I, J);
  1378. EXPECT_EQ(&RC, &*J);
  1379. EXPECT_EQ(E, std::next(J));
  1380. // Remove the edge from b -> a and c -> b; again this doesn't change
  1381. // anything.
  1382. NewRCs = RC.removeInternalRefEdge(BN, {&AN});
  1383. NewRCs = RC.removeInternalRefEdge(CN, {&BN});
  1384. EXPECT_EQ(0u, NewRCs.size());
  1385. EXPECT_EQ(&RC, CG.lookupRefSCC(AN));
  1386. EXPECT_EQ(&RC, CG.lookupRefSCC(BN));
  1387. EXPECT_EQ(&RC, CG.lookupRefSCC(CN));
  1388. EXPECT_EQ(&C, CG.lookupSCC(AN));
  1389. EXPECT_EQ(&C, CG.lookupSCC(BN));
  1390. EXPECT_EQ(&C, CG.lookupSCC(CN));
  1391. J = CG.postorder_ref_scc_begin();
  1392. EXPECT_EQ(I, J);
  1393. EXPECT_EQ(&RC, &*J);
  1394. EXPECT_EQ(E, std::next(J));
  1395. }
  1396. TEST(LazyCallGraphTest, InternalCallEdgeToRef) {
  1397. LLVMContext Context;
  1398. // A nice fully connected (including self-edges) SCC (and RefSCC)
  1399. std::unique_ptr<Module> M = parseAssembly(Context, "define void @a() {\n"
  1400. "entry:\n"
  1401. " call void @a()\n"
  1402. " call void @b()\n"
  1403. " call void @c()\n"
  1404. " ret void\n"
  1405. "}\n"
  1406. "define void @b() {\n"
  1407. "entry:\n"
  1408. " call void @a()\n"
  1409. " call void @b()\n"
  1410. " call void @c()\n"
  1411. " ret void\n"
  1412. "}\n"
  1413. "define void @c() {\n"
  1414. "entry:\n"
  1415. " call void @a()\n"
  1416. " call void @b()\n"
  1417. " call void @c()\n"
  1418. " ret void\n"
  1419. "}\n");
  1420. LazyCallGraph CG = buildCG(*M);
  1421. // Force the graph to be fully expanded.
  1422. CG.buildRefSCCs();
  1423. auto I = CG.postorder_ref_scc_begin();
  1424. LazyCallGraph::RefSCC &RC = *I++;
  1425. EXPECT_EQ(CG.postorder_ref_scc_end(), I);
  1426. EXPECT_EQ(1, RC.size());
  1427. LazyCallGraph::SCC &AC = *RC.begin();
  1428. LazyCallGraph::Node &AN = *CG.lookup(lookupFunction(*M, "a"));
  1429. LazyCallGraph::Node &BN = *CG.lookup(lookupFunction(*M, "b"));
  1430. LazyCallGraph::Node &CN = *CG.lookup(lookupFunction(*M, "c"));
  1431. EXPECT_EQ(&AC, CG.lookupSCC(AN));
  1432. EXPECT_EQ(&AC, CG.lookupSCC(BN));
  1433. EXPECT_EQ(&AC, CG.lookupSCC(CN));
  1434. // Remove the call edge from b -> a to a ref edge, which should leave the
  1435. // 3 functions still in a single connected component because of a -> b ->
  1436. // c -> a.
  1437. auto NewCs = RC.switchInternalEdgeToRef(BN, AN);
  1438. EXPECT_EQ(NewCs.begin(), NewCs.end());
  1439. EXPECT_EQ(1, RC.size());
  1440. EXPECT_EQ(&AC, CG.lookupSCC(AN));
  1441. EXPECT_EQ(&AC, CG.lookupSCC(BN));
  1442. EXPECT_EQ(&AC, CG.lookupSCC(CN));
  1443. // Remove the edge from c -> a, which should leave 'a' in the original SCC
  1444. // and form a new SCC for 'b' and 'c'.
  1445. NewCs = RC.switchInternalEdgeToRef(CN, AN);
  1446. EXPECT_EQ(1, std::distance(NewCs.begin(), NewCs.end()));
  1447. EXPECT_EQ(2, RC.size());
  1448. EXPECT_EQ(&AC, CG.lookupSCC(AN));
  1449. LazyCallGraph::SCC &BC = *CG.lookupSCC(BN);
  1450. EXPECT_NE(&BC, &AC);
  1451. EXPECT_EQ(&BC, CG.lookupSCC(CN));
  1452. auto J = RC.find(AC);
  1453. EXPECT_EQ(&AC, &*J);
  1454. --J;
  1455. EXPECT_EQ(&BC, &*J);
  1456. EXPECT_EQ(RC.begin(), J);
  1457. EXPECT_EQ(J, NewCs.begin());
  1458. // Remove the edge from c -> b, which should leave 'b' in the original SCC
  1459. // and form a new SCC for 'c'. It shouldn't change 'a's SCC.
  1460. NewCs = RC.switchInternalEdgeToRef(CN, BN);
  1461. EXPECT_EQ(1, std::distance(NewCs.begin(), NewCs.end()));
  1462. EXPECT_EQ(3, RC.size());
  1463. EXPECT_EQ(&AC, CG.lookupSCC(AN));
  1464. EXPECT_EQ(&BC, CG.lookupSCC(BN));
  1465. LazyCallGraph::SCC &CC = *CG.lookupSCC(CN);
  1466. EXPECT_NE(&CC, &AC);
  1467. EXPECT_NE(&CC, &BC);
  1468. J = RC.find(AC);
  1469. EXPECT_EQ(&AC, &*J);
  1470. --J;
  1471. EXPECT_EQ(&BC, &*J);
  1472. --J;
  1473. EXPECT_EQ(&CC, &*J);
  1474. EXPECT_EQ(RC.begin(), J);
  1475. EXPECT_EQ(J, NewCs.begin());
  1476. }
  1477. TEST(LazyCallGraphTest, InternalRefEdgeToCall) {
  1478. LLVMContext Context;
  1479. // Basic tests for making a ref edge a call. This hits the basics of the
  1480. // process only.
  1481. std::unique_ptr<Module> M =
  1482. parseAssembly(Context, "define void @a() {\n"
  1483. "entry:\n"
  1484. " call void @b()\n"
  1485. " call void @c()\n"
  1486. " store void()* @d, void()** undef\n"
  1487. " ret void\n"
  1488. "}\n"
  1489. "define void @b() {\n"
  1490. "entry:\n"
  1491. " store void()* @c, void()** undef\n"
  1492. " call void @d()\n"
  1493. " ret void\n"
  1494. "}\n"
  1495. "define void @c() {\n"
  1496. "entry:\n"
  1497. " store void()* @b, void()** undef\n"
  1498. " call void @d()\n"
  1499. " ret void\n"
  1500. "}\n"
  1501. "define void @d() {\n"
  1502. "entry:\n"
  1503. " store void()* @a, void()** undef\n"
  1504. " ret void\n"
  1505. "}\n");
  1506. LazyCallGraph CG = buildCG(*M);
  1507. // Force the graph to be fully expanded.
  1508. CG.buildRefSCCs();
  1509. auto I = CG.postorder_ref_scc_begin();
  1510. LazyCallGraph::RefSCC &RC = *I++;
  1511. EXPECT_EQ(CG.postorder_ref_scc_end(), I);
  1512. LazyCallGraph::Node &A = *CG.lookup(lookupFunction(*M, "a"));
  1513. LazyCallGraph::Node &B = *CG.lookup(lookupFunction(*M, "b"));
  1514. LazyCallGraph::Node &C = *CG.lookup(lookupFunction(*M, "c"));
  1515. LazyCallGraph::Node &D = *CG.lookup(lookupFunction(*M, "d"));
  1516. LazyCallGraph::SCC &AC = *CG.lookupSCC(A);
  1517. LazyCallGraph::SCC &BC = *CG.lookupSCC(B);
  1518. LazyCallGraph::SCC &CC = *CG.lookupSCC(C);
  1519. LazyCallGraph::SCC &DC = *CG.lookupSCC(D);
  1520. // Check the initial post-order. Note that B and C could be flipped here (and
  1521. // in our mutation) without changing the nature of this test.
  1522. ASSERT_EQ(4, RC.size());
  1523. EXPECT_EQ(&DC, &RC[0]);
  1524. EXPECT_EQ(&BC, &RC[1]);
  1525. EXPECT_EQ(&CC, &RC[2]);
  1526. EXPECT_EQ(&AC, &RC[3]);
  1527. // Switch the ref edge from A -> D to a call edge. This should have no
  1528. // effect as it is already in postorder and no new cycles are formed.
  1529. EXPECT_FALSE(RC.switchInternalEdgeToCall(A, D));
  1530. ASSERT_EQ(4, RC.size());
  1531. EXPECT_EQ(&DC, &RC[0]);
  1532. EXPECT_EQ(&BC, &RC[1]);
  1533. EXPECT_EQ(&CC, &RC[2]);
  1534. EXPECT_EQ(&AC, &RC[3]);
  1535. // Switch B -> C to a call edge. This doesn't form any new cycles but does
  1536. // require reordering the SCCs.
  1537. EXPECT_FALSE(RC.switchInternalEdgeToCall(B, C));
  1538. ASSERT_EQ(4, RC.size());
  1539. EXPECT_EQ(&DC, &RC[0]);
  1540. EXPECT_EQ(&CC, &RC[1]);
  1541. EXPECT_EQ(&BC, &RC[2]);
  1542. EXPECT_EQ(&AC, &RC[3]);
  1543. // Switch C -> B to a call edge. This forms a cycle and forces merging SCCs.
  1544. EXPECT_TRUE(RC.switchInternalEdgeToCall(C, B, [&](ArrayRef<LazyCallGraph::SCC *> MergedCs) {
  1545. ASSERT_EQ(1u, MergedCs.size());
  1546. EXPECT_EQ(&CC, MergedCs[0]);
  1547. }));
  1548. ASSERT_EQ(3, RC.size());
  1549. EXPECT_EQ(&DC, &RC[0]);
  1550. EXPECT_EQ(&BC, &RC[1]);
  1551. EXPECT_EQ(&AC, &RC[2]);
  1552. EXPECT_EQ(2, BC.size());
  1553. EXPECT_EQ(&BC, CG.lookupSCC(B));
  1554. EXPECT_EQ(&BC, CG.lookupSCC(C));
  1555. }
  1556. TEST(LazyCallGraphTest, InternalRefEdgeToCallNoCycleInterleaved) {
  1557. LLVMContext Context;
  1558. // Test for having a post-order prior to changing a ref edge to a call edge
  1559. // with SCCs connecting to the source and connecting to the target, but not
  1560. // connecting to both, interleaved between the source and target. This
  1561. // ensures we correctly partition the range rather than simply moving one or
  1562. // the other.
  1563. std::unique_ptr<Module> M =
  1564. parseAssembly(Context, "define void @a() {\n"
  1565. "entry:\n"
  1566. " call void @b1()\n"
  1567. " call void @c1()\n"
  1568. " ret void\n"
  1569. "}\n"
  1570. "define void @b1() {\n"
  1571. "entry:\n"
  1572. " call void @c1()\n"
  1573. " call void @b2()\n"
  1574. " ret void\n"
  1575. "}\n"
  1576. "define void @c1() {\n"
  1577. "entry:\n"
  1578. " call void @b2()\n"
  1579. " call void @c2()\n"
  1580. " ret void\n"
  1581. "}\n"
  1582. "define void @b2() {\n"
  1583. "entry:\n"
  1584. " call void @c2()\n"
  1585. " call void @b3()\n"
  1586. " ret void\n"
  1587. "}\n"
  1588. "define void @c2() {\n"
  1589. "entry:\n"
  1590. " call void @b3()\n"
  1591. " call void @c3()\n"
  1592. " ret void\n"
  1593. "}\n"
  1594. "define void @b3() {\n"
  1595. "entry:\n"
  1596. " call void @c3()\n"
  1597. " call void @d()\n"
  1598. " ret void\n"
  1599. "}\n"
  1600. "define void @c3() {\n"
  1601. "entry:\n"
  1602. " store void()* @b1, void()** undef\n"
  1603. " call void @d()\n"
  1604. " ret void\n"
  1605. "}\n"
  1606. "define void @d() {\n"
  1607. "entry:\n"
  1608. " store void()* @a, void()** undef\n"
  1609. " ret void\n"
  1610. "}\n");
  1611. LazyCallGraph CG = buildCG(*M);
  1612. // Force the graph to be fully expanded.
  1613. CG.buildRefSCCs();
  1614. auto I = CG.postorder_ref_scc_begin();
  1615. LazyCallGraph::RefSCC &RC = *I++;
  1616. EXPECT_EQ(CG.postorder_ref_scc_end(), I);
  1617. LazyCallGraph::Node &A = *CG.lookup(lookupFunction(*M, "a"));
  1618. LazyCallGraph::Node &B1 = *CG.lookup(lookupFunction(*M, "b1"));
  1619. LazyCallGraph::Node &B2 = *CG.lookup(lookupFunction(*M, "b2"));
  1620. LazyCallGraph::Node &B3 = *CG.lookup(lookupFunction(*M, "b3"));
  1621. LazyCallGraph::Node &C1 = *CG.lookup(lookupFunction(*M, "c1"));
  1622. LazyCallGraph::Node &C2 = *CG.lookup(lookupFunction(*M, "c2"));
  1623. LazyCallGraph::Node &C3 = *CG.lookup(lookupFunction(*M, "c3"));
  1624. LazyCallGraph::Node &D = *CG.lookup(lookupFunction(*M, "d"));
  1625. LazyCallGraph::SCC &AC = *CG.lookupSCC(A);
  1626. LazyCallGraph::SCC &B1C = *CG.lookupSCC(B1);
  1627. LazyCallGraph::SCC &B2C = *CG.lookupSCC(B2);
  1628. LazyCallGraph::SCC &B3C = *CG.lookupSCC(B3);
  1629. LazyCallGraph::SCC &C1C = *CG.lookupSCC(C1);
  1630. LazyCallGraph::SCC &C2C = *CG.lookupSCC(C2);
  1631. LazyCallGraph::SCC &C3C = *CG.lookupSCC(C3);
  1632. LazyCallGraph::SCC &DC = *CG.lookupSCC(D);
  1633. // Several call edges are initially present to force a particual post-order.
  1634. // Remove them now, leaving an interleaved post-order pattern.
  1635. RC.switchTrivialInternalEdgeToRef(B3, C3);
  1636. RC.switchTrivialInternalEdgeToRef(C2, B3);
  1637. RC.switchTrivialInternalEdgeToRef(B2, C2);
  1638. RC.switchTrivialInternalEdgeToRef(C1, B2);
  1639. RC.switchTrivialInternalEdgeToRef(B1, C1);
  1640. // Check the initial post-order. We ensure this order with the extra edges
  1641. // that are nuked above.
  1642. ASSERT_EQ(8, RC.size());
  1643. EXPECT_EQ(&DC, &RC[0]);
  1644. EXPECT_EQ(&C3C, &RC[1]);
  1645. EXPECT_EQ(&B3C, &RC[2]);
  1646. EXPECT_EQ(&C2C, &RC[3]);
  1647. EXPECT_EQ(&B2C, &RC[4]);
  1648. EXPECT_EQ(&C1C, &RC[5]);
  1649. EXPECT_EQ(&B1C, &RC[6]);
  1650. EXPECT_EQ(&AC, &RC[7]);
  1651. // Switch C3 -> B1 to a call edge. This doesn't form any new cycles but does
  1652. // require reordering the SCCs in the face of tricky internal node
  1653. // structures.
  1654. EXPECT_FALSE(RC.switchInternalEdgeToCall(C3, B1));
  1655. ASSERT_EQ(8, RC.size());
  1656. EXPECT_EQ(&DC, &RC[0]);
  1657. EXPECT_EQ(&B3C, &RC[1]);
  1658. EXPECT_EQ(&B2C, &RC[2]);
  1659. EXPECT_EQ(&B1C, &RC[3]);
  1660. EXPECT_EQ(&C3C, &RC[4]);
  1661. EXPECT_EQ(&C2C, &RC[5]);
  1662. EXPECT_EQ(&C1C, &RC[6]);
  1663. EXPECT_EQ(&AC, &RC[7]);
  1664. }
  1665. TEST(LazyCallGraphTest, InternalRefEdgeToCallBothPartitionAndMerge) {
  1666. LLVMContext Context;
  1667. // Test for having a postorder where between the source and target are all
  1668. // three kinds of other SCCs:
  1669. // 1) One connected to the target only that have to be shifted below the
  1670. // source.
  1671. // 2) One connected to the source only that have to be shifted below the
  1672. // target.
  1673. // 3) One connected to both source and target that has to remain and get
  1674. // merged away.
  1675. //
  1676. // To achieve this we construct a heavily connected graph to force
  1677. // a particular post-order. Then we remove the forcing edges and connect
  1678. // a cycle.
  1679. //
  1680. // Diagram for the graph we want on the left and the graph we use to force
  1681. // the ordering on the right. Edges ponit down or right.
  1682. //
  1683. // A | A |
  1684. // / \ | / \ |
  1685. // B E | B \ |
  1686. // |\ | | |\ | |
  1687. // | D | | C-D-E |
  1688. // | \| | | \| |
  1689. // C F | \ F |
  1690. // \ / | \ / |
  1691. // G | G |
  1692. //
  1693. // And we form a cycle by connecting F to B.
  1694. std::unique_ptr<Module> M =
  1695. parseAssembly(Context, "define void @a() {\n"
  1696. "entry:\n"
  1697. " call void @b()\n"
  1698. " call void @e()\n"
  1699. " ret void\n"
  1700. "}\n"
  1701. "define void @b() {\n"
  1702. "entry:\n"
  1703. " call void @c()\n"
  1704. " call void @d()\n"
  1705. " ret void\n"
  1706. "}\n"
  1707. "define void @c() {\n"
  1708. "entry:\n"
  1709. " call void @d()\n"
  1710. " call void @g()\n"
  1711. " ret void\n"
  1712. "}\n"
  1713. "define void @d() {\n"
  1714. "entry:\n"
  1715. " call void @e()\n"
  1716. " call void @f()\n"
  1717. " ret void\n"
  1718. "}\n"
  1719. "define void @e() {\n"
  1720. "entry:\n"
  1721. " call void @f()\n"
  1722. " ret void\n"
  1723. "}\n"
  1724. "define void @f() {\n"
  1725. "entry:\n"
  1726. " store void()* @b, void()** undef\n"
  1727. " call void @g()\n"
  1728. " ret void\n"
  1729. "}\n"
  1730. "define void @g() {\n"
  1731. "entry:\n"
  1732. " store void()* @a, void()** undef\n"
  1733. " ret void\n"
  1734. "}\n");
  1735. LazyCallGraph CG = buildCG(*M);
  1736. // Force the graph to be fully expanded.
  1737. CG.buildRefSCCs();
  1738. auto I = CG.postorder_ref_scc_begin();
  1739. LazyCallGraph::RefSCC &RC = *I++;
  1740. EXPECT_EQ(CG.postorder_ref_scc_end(), I);
  1741. LazyCallGraph::Node &A = *CG.lookup(lookupFunction(*M, "a"));
  1742. LazyCallGraph::Node &B = *CG.lookup(lookupFunction(*M, "b"));
  1743. LazyCallGraph::Node &C = *CG.lookup(lookupFunction(*M, "c"));
  1744. LazyCallGraph::Node &D = *CG.lookup(lookupFunction(*M, "d"));
  1745. LazyCallGraph::Node &E = *CG.lookup(lookupFunction(*M, "e"));
  1746. LazyCallGraph::Node &F = *CG.lookup(lookupFunction(*M, "f"));
  1747. LazyCallGraph::Node &G = *CG.lookup(lookupFunction(*M, "g"));
  1748. LazyCallGraph::SCC &AC = *CG.lookupSCC(A);
  1749. LazyCallGraph::SCC &BC = *CG.lookupSCC(B);
  1750. LazyCallGraph::SCC &CC = *CG.lookupSCC(C);
  1751. LazyCallGraph::SCC &DC = *CG.lookupSCC(D);
  1752. LazyCallGraph::SCC &EC = *CG.lookupSCC(E);
  1753. LazyCallGraph::SCC &FC = *CG.lookupSCC(F);
  1754. LazyCallGraph::SCC &GC = *CG.lookupSCC(G);
  1755. // Remove the extra edges that were used to force a particular post-order.
  1756. RC.switchTrivialInternalEdgeToRef(C, D);
  1757. RC.switchTrivialInternalEdgeToRef(D, E);
  1758. // Check the initial post-order. We ensure this order with the extra edges
  1759. // that are nuked above.
  1760. ASSERT_EQ(7, RC.size());
  1761. EXPECT_EQ(&GC, &RC[0]);
  1762. EXPECT_EQ(&FC, &RC[1]);
  1763. EXPECT_EQ(&EC, &RC[2]);
  1764. EXPECT_EQ(&DC, &RC[3]);
  1765. EXPECT_EQ(&CC, &RC[4]);
  1766. EXPECT_EQ(&BC, &RC[5]);
  1767. EXPECT_EQ(&AC, &RC[6]);
  1768. // Switch F -> B to a call edge. This merges B, D, and F into a single SCC,
  1769. // and has to place the C and E SCCs on either side of it:
  1770. // A A |
  1771. // / \ / \ |
  1772. // B E | E |
  1773. // |\ | \ / |
  1774. // | D | -> B |
  1775. // | \| / \ |
  1776. // C F C | |
  1777. // \ / \ / |
  1778. // G G |
  1779. EXPECT_TRUE(RC.switchInternalEdgeToCall(
  1780. F, B, [&](ArrayRef<LazyCallGraph::SCC *> MergedCs) {
  1781. ASSERT_EQ(2u, MergedCs.size());
  1782. EXPECT_EQ(&FC, MergedCs[0]);
  1783. EXPECT_EQ(&DC, MergedCs[1]);
  1784. }));
  1785. EXPECT_EQ(3, BC.size());
  1786. // And make sure the postorder was updated.
  1787. ASSERT_EQ(5, RC.size());
  1788. EXPECT_EQ(&GC, &RC[0]);
  1789. EXPECT_EQ(&CC, &RC[1]);
  1790. EXPECT_EQ(&BC, &RC[2]);
  1791. EXPECT_EQ(&EC, &RC[3]);
  1792. EXPECT_EQ(&AC, &RC[4]);
  1793. }
  1794. // Test for IR containing constants using blockaddress constant expressions.
  1795. // These are truly unique constructs: constant expressions with non-constant
  1796. // operands.
  1797. TEST(LazyCallGraphTest, HandleBlockAddress) {
  1798. LLVMContext Context;
  1799. std::unique_ptr<Module> M =
  1800. parseAssembly(Context, "define void @f() {\n"
  1801. "entry:\n"
  1802. " ret void\n"
  1803. "bb:\n"
  1804. " unreachable\n"
  1805. "}\n"
  1806. "define void @g(i8** %ptr) {\n"
  1807. "entry:\n"
  1808. " store i8* blockaddress(@f, %bb), i8** %ptr\n"
  1809. " ret void\n"
  1810. "}\n");
  1811. LazyCallGraph CG = buildCG(*M);
  1812. CG.buildRefSCCs();
  1813. auto I = CG.postorder_ref_scc_begin();
  1814. LazyCallGraph::RefSCC &FRC = *I++;
  1815. LazyCallGraph::RefSCC &GRC = *I++;
  1816. EXPECT_EQ(CG.postorder_ref_scc_end(), I);
  1817. LazyCallGraph::Node &F = *CG.lookup(lookupFunction(*M, "f"));
  1818. LazyCallGraph::Node &G = *CG.lookup(lookupFunction(*M, "g"));
  1819. EXPECT_EQ(&FRC, CG.lookupRefSCC(F));
  1820. EXPECT_EQ(&GRC, CG.lookupRefSCC(G));
  1821. EXPECT_TRUE(GRC.isParentOf(FRC));
  1822. }
  1823. // Test that a blockaddress that refers to itself creates no new RefSCC
  1824. // connections. https://bugs.llvm.org/show_bug.cgi?id=40722
  1825. TEST(LazyCallGraphTest, HandleBlockAddress2) {
  1826. LLVMContext Context;
  1827. std::unique_ptr<Module> M =
  1828. parseAssembly(Context, "define void @f() {\n"
  1829. " ret void\n"
  1830. "}\n"
  1831. "define void @g(i8** %ptr) {\n"
  1832. "bb:\n"
  1833. " store i8* blockaddress(@g, %bb), i8** %ptr\n"
  1834. " ret void\n"
  1835. "}\n");
  1836. LazyCallGraph CG = buildCG(*M);
  1837. CG.buildRefSCCs();
  1838. auto I = CG.postorder_ref_scc_begin();
  1839. LazyCallGraph::RefSCC &GRC = *I++;
  1840. LazyCallGraph::RefSCC &FRC = *I++;
  1841. EXPECT_EQ(CG.postorder_ref_scc_end(), I);
  1842. LazyCallGraph::Node &F = *CG.lookup(lookupFunction(*M, "f"));
  1843. LazyCallGraph::Node &G = *CG.lookup(lookupFunction(*M, "g"));
  1844. EXPECT_EQ(&FRC, CG.lookupRefSCC(F));
  1845. EXPECT_EQ(&GRC, CG.lookupRefSCC(G));
  1846. EXPECT_FALSE(GRC.isParentOf(FRC));
  1847. EXPECT_FALSE(FRC.isParentOf(GRC));
  1848. }
  1849. TEST(LazyCallGraphTest, ReplaceNodeFunction) {
  1850. LLVMContext Context;
  1851. // A graph with several different kinds of edges pointing at a particular
  1852. // function.
  1853. std::unique_ptr<Module> M =
  1854. parseAssembly(Context,
  1855. "define void @a(i8** %ptr) {\n"
  1856. "entry:\n"
  1857. " store i8* bitcast (void(i8**)* @d to i8*), i8** %ptr\n"
  1858. " ret void\n"
  1859. "}\n"
  1860. "define void @b(i8** %ptr) {\n"
  1861. "entry:\n"
  1862. " store i8* bitcast (void(i8**)* @d to i8*), i8** %ptr\n"
  1863. " store i8* bitcast (void(i8**)* @d to i8*), i8** %ptr\n"
  1864. " call void @d(i8** %ptr)"
  1865. " ret void\n"
  1866. "}\n"
  1867. "define void @c(i8** %ptr) {\n"
  1868. "entry:\n"
  1869. " call void @d(i8** %ptr)"
  1870. " call void @d(i8** %ptr)"
  1871. " store i8* bitcast (void(i8**)* @d to i8*), i8** %ptr\n"
  1872. " ret void\n"
  1873. "}\n"
  1874. "define void @d(i8** %ptr) {\n"
  1875. "entry:\n"
  1876. " store i8* bitcast (void(i8**)* @b to i8*), i8** %ptr\n"
  1877. " call void @c(i8** %ptr)"
  1878. " call void @d(i8** %ptr)"
  1879. " store i8* bitcast (void(i8**)* @d to i8*), i8** %ptr\n"
  1880. " ret void\n"
  1881. "}\n");
  1882. LazyCallGraph CG = buildCG(*M);
  1883. // Force the graph to be fully expanded.
  1884. CG.buildRefSCCs();
  1885. auto I = CG.postorder_ref_scc_begin();
  1886. LazyCallGraph::RefSCC &RC1 = *I++;
  1887. LazyCallGraph::RefSCC &RC2 = *I++;
  1888. EXPECT_EQ(CG.postorder_ref_scc_end(), I);
  1889. ASSERT_EQ(2, RC1.size());
  1890. LazyCallGraph::SCC &C1 = RC1[0];
  1891. LazyCallGraph::SCC &C2 = RC1[1];
  1892. LazyCallGraph::Node &AN = *CG.lookup(lookupFunction(*M, "a"));
  1893. LazyCallGraph::Node &BN = *CG.lookup(lookupFunction(*M, "b"));
  1894. LazyCallGraph::Node &CN = *CG.lookup(lookupFunction(*M, "c"));
  1895. LazyCallGraph::Node &DN = *CG.lookup(lookupFunction(*M, "d"));
  1896. EXPECT_EQ(&C1, CG.lookupSCC(DN));
  1897. EXPECT_EQ(&C1, CG.lookupSCC(CN));
  1898. EXPECT_EQ(&C2, CG.lookupSCC(BN));
  1899. EXPECT_EQ(&RC1, CG.lookupRefSCC(DN));
  1900. EXPECT_EQ(&RC1, CG.lookupRefSCC(CN));
  1901. EXPECT_EQ(&RC1, CG.lookupRefSCC(BN));
  1902. EXPECT_EQ(&RC2, CG.lookupRefSCC(AN));
  1903. // Now we need to build a new function 'e' with the same signature as 'd'.
  1904. Function &D = DN.getFunction();
  1905. Function &E = *Function::Create(D.getFunctionType(), D.getLinkage(), "e");
  1906. D.getParent()->getFunctionList().insert(D.getIterator(), &E);
  1907. // Change each use of 'd' to use 'e'. This is particularly easy as they have
  1908. // the same type.
  1909. D.replaceAllUsesWith(&E);
  1910. // Splice the body of the old function into the new one.
  1911. E.getBasicBlockList().splice(E.begin(), D.getBasicBlockList());
  1912. // And fix up the one argument.
  1913. D.arg_begin()->replaceAllUsesWith(&*E.arg_begin());
  1914. E.arg_begin()->takeName(&*D.arg_begin());
  1915. // Now replace the function in the graph.
  1916. RC1.replaceNodeFunction(DN, E);
  1917. EXPECT_EQ(&E, &DN.getFunction());
  1918. EXPECT_EQ(&DN, &(*CN)[DN].getNode());
  1919. EXPECT_EQ(&DN, &(*BN)[DN].getNode());
  1920. }
  1921. TEST(LazyCallGraphTest, RemoveFunctionWithSpurriousRef) {
  1922. LLVMContext Context;
  1923. // A graph with a couple of RefSCCs.
  1924. std::unique_ptr<Module> M =
  1925. parseAssembly(Context,
  1926. "define void @a(i8** %ptr) {\n"
  1927. "entry:\n"
  1928. " store i8* bitcast (void(i8**)* @d to i8*), i8** %ptr\n"
  1929. " ret void\n"
  1930. "}\n"
  1931. "define void @b(i8** %ptr) {\n"
  1932. "entry:\n"
  1933. " store i8* bitcast (void(i8**)* @c to i8*), i8** %ptr\n"
  1934. " ret void\n"
  1935. "}\n"
  1936. "define void @c(i8** %ptr) {\n"
  1937. "entry:\n"
  1938. " call void @d(i8** %ptr)"
  1939. " ret void\n"
  1940. "}\n"
  1941. "define void @d(i8** %ptr) {\n"
  1942. "entry:\n"
  1943. " call void @c(i8** %ptr)"
  1944. " store i8* bitcast (void(i8**)* @b to i8*), i8** %ptr\n"
  1945. " ret void\n"
  1946. "}\n"
  1947. "define void @dead() {\n"
  1948. "entry:\n"
  1949. " ret void\n"
  1950. "}\n");
  1951. LazyCallGraph CG = buildCG(*M);
  1952. // Insert spurious ref edges.
  1953. LazyCallGraph::Node &AN = CG.get(lookupFunction(*M, "a"));
  1954. LazyCallGraph::Node &BN = CG.get(lookupFunction(*M, "b"));
  1955. LazyCallGraph::Node &CN = CG.get(lookupFunction(*M, "c"));
  1956. LazyCallGraph::Node &DN = CG.get(lookupFunction(*M, "d"));
  1957. LazyCallGraph::Node &DeadN = CG.get(lookupFunction(*M, "dead"));
  1958. AN.populate();
  1959. BN.populate();
  1960. CN.populate();
  1961. DN.populate();
  1962. DeadN.populate();
  1963. CG.insertEdge(AN, DeadN, LazyCallGraph::Edge::Ref);
  1964. CG.insertEdge(BN, DeadN, LazyCallGraph::Edge::Ref);
  1965. CG.insertEdge(CN, DeadN, LazyCallGraph::Edge::Ref);
  1966. CG.insertEdge(DN, DeadN, LazyCallGraph::Edge::Ref);
  1967. // Force the graph to be fully expanded.
  1968. CG.buildRefSCCs();
  1969. auto I = CG.postorder_ref_scc_begin();
  1970. LazyCallGraph::RefSCC &DeadRC = *I++;
  1971. LazyCallGraph::RefSCC &RC1 = *I++;
  1972. LazyCallGraph::RefSCC &RC2 = *I++;
  1973. EXPECT_EQ(CG.postorder_ref_scc_end(), I);
  1974. ASSERT_EQ(2, RC1.size());
  1975. LazyCallGraph::SCC &C1 = RC1[0];
  1976. LazyCallGraph::SCC &C2 = RC1[1];
  1977. EXPECT_EQ(&DeadRC, CG.lookupRefSCC(DeadN));
  1978. EXPECT_EQ(&C1, CG.lookupSCC(DN));
  1979. EXPECT_EQ(&C1, CG.lookupSCC(CN));
  1980. EXPECT_EQ(&C2, CG.lookupSCC(BN));
  1981. EXPECT_EQ(&RC1, CG.lookupRefSCC(DN));
  1982. EXPECT_EQ(&RC1, CG.lookupRefSCC(CN));
  1983. EXPECT_EQ(&RC1, CG.lookupRefSCC(BN));
  1984. EXPECT_EQ(&RC2, CG.lookupRefSCC(AN));
  1985. // Now delete 'dead'. There are no uses of this function but there are
  1986. // spurious references.
  1987. CG.removeDeadFunction(DeadN.getFunction());
  1988. // The only observable change should be that the RefSCC is gone from the
  1989. // postorder sequence.
  1990. I = CG.postorder_ref_scc_begin();
  1991. EXPECT_EQ(&RC1, &*I++);
  1992. EXPECT_EQ(&RC2, &*I++);
  1993. EXPECT_EQ(CG.postorder_ref_scc_end(), I);
  1994. }
  1995. }