CoreEngine.cpp 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682
  1. //===- CoreEngine.cpp - Path-Sensitive Dataflow Engine --------------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This file defines a generic engine for intraprocedural, path-sensitive,
  10. // dataflow analysis via graph reachability engine.
  11. //
  12. //===----------------------------------------------------------------------===//
  13. #include "clang/StaticAnalyzer/Core/PathSensitive/CoreEngine.h"
  14. #include "clang/AST/Expr.h"
  15. #include "clang/AST/ExprCXX.h"
  16. #include "clang/AST/Stmt.h"
  17. #include "clang/AST/StmtCXX.h"
  18. #include "clang/Analysis/AnalysisDeclContext.h"
  19. #include "clang/Analysis/CFG.h"
  20. #include "clang/Analysis/ProgramPoint.h"
  21. #include "clang/Basic/LLVM.h"
  22. #include "clang/StaticAnalyzer/Core/AnalyzerOptions.h"
  23. #include "clang/StaticAnalyzer/Core/PathSensitive/BlockCounter.h"
  24. #include "clang/StaticAnalyzer/Core/PathSensitive/ExplodedGraph.h"
  25. #include "clang/StaticAnalyzer/Core/PathSensitive/FunctionSummary.h"
  26. #include "clang/StaticAnalyzer/Core/PathSensitive/SubEngine.h"
  27. #include "clang/StaticAnalyzer/Core/PathSensitive/WorkList.h"
  28. #include "llvm/ADT/Optional.h"
  29. #include "llvm/ADT/STLExtras.h"
  30. #include "llvm/ADT/Statistic.h"
  31. #include "llvm/Support/Casting.h"
  32. #include "llvm/Support/ErrorHandling.h"
  33. #include <algorithm>
  34. #include <cassert>
  35. #include <memory>
  36. #include <utility>
  37. using namespace clang;
  38. using namespace ento;
  39. #define DEBUG_TYPE "CoreEngine"
  40. STATISTIC(NumSteps,
  41. "The # of steps executed.");
  42. STATISTIC(NumReachedMaxSteps,
  43. "The # of times we reached the max number of steps.");
  44. STATISTIC(NumPathsExplored,
  45. "The # of paths explored by the analyzer.");
  46. //===----------------------------------------------------------------------===//
  47. // Core analysis engine.
  48. //===----------------------------------------------------------------------===//
  49. static std::unique_ptr<WorkList> generateWorkList(AnalyzerOptions &Opts,
  50. SubEngine &subengine) {
  51. switch (Opts.getExplorationStrategy()) {
  52. case ExplorationStrategyKind::DFS:
  53. return WorkList::makeDFS();
  54. case ExplorationStrategyKind::BFS:
  55. return WorkList::makeBFS();
  56. case ExplorationStrategyKind::BFSBlockDFSContents:
  57. return WorkList::makeBFSBlockDFSContents();
  58. case ExplorationStrategyKind::UnexploredFirst:
  59. return WorkList::makeUnexploredFirst();
  60. case ExplorationStrategyKind::UnexploredFirstQueue:
  61. return WorkList::makeUnexploredFirstPriorityQueue();
  62. case ExplorationStrategyKind::UnexploredFirstLocationQueue:
  63. return WorkList::makeUnexploredFirstPriorityLocationQueue();
  64. }
  65. llvm_unreachable("Unknown AnalyzerOptions::ExplorationStrategyKind");
  66. }
  67. CoreEngine::CoreEngine(SubEngine &subengine, FunctionSummariesTy *FS,
  68. AnalyzerOptions &Opts)
  69. : SubEng(subengine), WList(generateWorkList(Opts, subengine)),
  70. BCounterFactory(G.getAllocator()), FunctionSummaries(FS) {}
  71. /// ExecuteWorkList - Run the worklist algorithm for a maximum number of steps.
  72. bool CoreEngine::ExecuteWorkList(const LocationContext *L, unsigned Steps,
  73. ProgramStateRef InitState) {
  74. if (G.num_roots() == 0) { // Initialize the analysis by constructing
  75. // the root if none exists.
  76. const CFGBlock *Entry = &(L->getCFG()->getEntry());
  77. assert(Entry->empty() && "Entry block must be empty.");
  78. assert(Entry->succ_size() == 1 && "Entry block must have 1 successor.");
  79. // Mark the entry block as visited.
  80. FunctionSummaries->markVisitedBasicBlock(Entry->getBlockID(),
  81. L->getDecl(),
  82. L->getCFG()->getNumBlockIDs());
  83. // Get the solitary successor.
  84. const CFGBlock *Succ = *(Entry->succ_begin());
  85. // Construct an edge representing the
  86. // starting location in the function.
  87. BlockEdge StartLoc(Entry, Succ, L);
  88. // Set the current block counter to being empty.
  89. WList->setBlockCounter(BCounterFactory.GetEmptyCounter());
  90. if (!InitState)
  91. InitState = SubEng.getInitialState(L);
  92. bool IsNew;
  93. ExplodedNode *Node = G.getNode(StartLoc, InitState, false, &IsNew);
  94. assert(IsNew);
  95. G.addRoot(Node);
  96. NodeBuilderContext BuilderCtx(*this, StartLoc.getDst(), Node);
  97. ExplodedNodeSet DstBegin;
  98. SubEng.processBeginOfFunction(BuilderCtx, Node, DstBegin, StartLoc);
  99. enqueue(DstBegin);
  100. }
  101. // Check if we have a steps limit
  102. bool UnlimitedSteps = Steps == 0;
  103. // Cap our pre-reservation in the event that the user specifies
  104. // a very large number of maximum steps.
  105. const unsigned PreReservationCap = 4000000;
  106. if(!UnlimitedSteps)
  107. G.reserve(std::min(Steps,PreReservationCap));
  108. while (WList->hasWork()) {
  109. if (!UnlimitedSteps) {
  110. if (Steps == 0) {
  111. NumReachedMaxSteps++;
  112. break;
  113. }
  114. --Steps;
  115. }
  116. NumSteps++;
  117. const WorkListUnit& WU = WList->dequeue();
  118. // Set the current block counter.
  119. WList->setBlockCounter(WU.getBlockCounter());
  120. // Retrieve the node.
  121. ExplodedNode *Node = WU.getNode();
  122. dispatchWorkItem(Node, Node->getLocation(), WU);
  123. }
  124. SubEng.processEndWorklist();
  125. return WList->hasWork();
  126. }
  127. void CoreEngine::dispatchWorkItem(ExplodedNode* Pred, ProgramPoint Loc,
  128. const WorkListUnit& WU) {
  129. // Dispatch on the location type.
  130. switch (Loc.getKind()) {
  131. case ProgramPoint::BlockEdgeKind:
  132. HandleBlockEdge(Loc.castAs<BlockEdge>(), Pred);
  133. break;
  134. case ProgramPoint::BlockEntranceKind:
  135. HandleBlockEntrance(Loc.castAs<BlockEntrance>(), Pred);
  136. break;
  137. case ProgramPoint::BlockExitKind:
  138. assert(false && "BlockExit location never occur in forward analysis.");
  139. break;
  140. case ProgramPoint::CallEnterKind:
  141. HandleCallEnter(Loc.castAs<CallEnter>(), Pred);
  142. break;
  143. case ProgramPoint::CallExitBeginKind:
  144. SubEng.processCallExit(Pred);
  145. break;
  146. case ProgramPoint::EpsilonKind: {
  147. assert(Pred->hasSinglePred() &&
  148. "Assume epsilon has exactly one predecessor by construction");
  149. ExplodedNode *PNode = Pred->getFirstPred();
  150. dispatchWorkItem(Pred, PNode->getLocation(), WU);
  151. break;
  152. }
  153. default:
  154. assert(Loc.getAs<PostStmt>() ||
  155. Loc.getAs<PostInitializer>() ||
  156. Loc.getAs<PostImplicitCall>() ||
  157. Loc.getAs<CallExitEnd>() ||
  158. Loc.getAs<LoopExit>() ||
  159. Loc.getAs<PostAllocatorCall>());
  160. HandlePostStmt(WU.getBlock(), WU.getIndex(), Pred);
  161. break;
  162. }
  163. }
  164. bool CoreEngine::ExecuteWorkListWithInitialState(const LocationContext *L,
  165. unsigned Steps,
  166. ProgramStateRef InitState,
  167. ExplodedNodeSet &Dst) {
  168. bool DidNotFinish = ExecuteWorkList(L, Steps, InitState);
  169. for (ExplodedGraph::eop_iterator I = G.eop_begin(), E = G.eop_end(); I != E;
  170. ++I) {
  171. Dst.Add(*I);
  172. }
  173. return DidNotFinish;
  174. }
  175. void CoreEngine::HandleBlockEdge(const BlockEdge &L, ExplodedNode *Pred) {
  176. const CFGBlock *Blk = L.getDst();
  177. NodeBuilderContext BuilderCtx(*this, Blk, Pred);
  178. // Mark this block as visited.
  179. const LocationContext *LC = Pred->getLocationContext();
  180. FunctionSummaries->markVisitedBasicBlock(Blk->getBlockID(),
  181. LC->getDecl(),
  182. LC->getCFG()->getNumBlockIDs());
  183. // Check if we are entering the EXIT block.
  184. if (Blk == &(L.getLocationContext()->getCFG()->getExit())) {
  185. assert(L.getLocationContext()->getCFG()->getExit().empty() &&
  186. "EXIT block cannot contain Stmts.");
  187. // Get return statement..
  188. const ReturnStmt *RS = nullptr;
  189. if (!L.getSrc()->empty()) {
  190. CFGElement LastElement = L.getSrc()->back();
  191. if (Optional<CFGStmt> LastStmt = LastElement.getAs<CFGStmt>()) {
  192. RS = dyn_cast<ReturnStmt>(LastStmt->getStmt());
  193. } else if (Optional<CFGAutomaticObjDtor> AutoDtor =
  194. LastElement.getAs<CFGAutomaticObjDtor>()) {
  195. RS = dyn_cast<ReturnStmt>(AutoDtor->getTriggerStmt());
  196. }
  197. }
  198. // Process the final state transition.
  199. SubEng.processEndOfFunction(BuilderCtx, Pred, RS);
  200. // This path is done. Don't enqueue any more nodes.
  201. return;
  202. }
  203. // Call into the SubEngine to process entering the CFGBlock.
  204. ExplodedNodeSet dstNodes;
  205. BlockEntrance BE(Blk, Pred->getLocationContext());
  206. NodeBuilderWithSinks nodeBuilder(Pred, dstNodes, BuilderCtx, BE);
  207. SubEng.processCFGBlockEntrance(L, nodeBuilder, Pred);
  208. // Auto-generate a node.
  209. if (!nodeBuilder.hasGeneratedNodes()) {
  210. nodeBuilder.generateNode(Pred->State, Pred);
  211. }
  212. // Enqueue nodes onto the worklist.
  213. enqueue(dstNodes);
  214. }
  215. void CoreEngine::HandleBlockEntrance(const BlockEntrance &L,
  216. ExplodedNode *Pred) {
  217. // Increment the block counter.
  218. const LocationContext *LC = Pred->getLocationContext();
  219. unsigned BlockId = L.getBlock()->getBlockID();
  220. BlockCounter Counter = WList->getBlockCounter();
  221. Counter = BCounterFactory.IncrementCount(Counter, LC->getStackFrame(),
  222. BlockId);
  223. WList->setBlockCounter(Counter);
  224. // Process the entrance of the block.
  225. if (Optional<CFGElement> E = L.getFirstElement()) {
  226. NodeBuilderContext Ctx(*this, L.getBlock(), Pred);
  227. SubEng.processCFGElement(*E, Pred, 0, &Ctx);
  228. }
  229. else
  230. HandleBlockExit(L.getBlock(), Pred);
  231. }
  232. void CoreEngine::HandleBlockExit(const CFGBlock * B, ExplodedNode *Pred) {
  233. if (const Stmt *Term = B->getTerminatorStmt()) {
  234. switch (Term->getStmtClass()) {
  235. default:
  236. llvm_unreachable("Analysis for this terminator not implemented.");
  237. case Stmt::CXXBindTemporaryExprClass:
  238. HandleCleanupTemporaryBranch(
  239. cast<CXXBindTemporaryExpr>(Term), B, Pred);
  240. return;
  241. // Model static initializers.
  242. case Stmt::DeclStmtClass:
  243. HandleStaticInit(cast<DeclStmt>(Term), B, Pred);
  244. return;
  245. case Stmt::BinaryOperatorClass: // '&&' and '||'
  246. HandleBranch(cast<BinaryOperator>(Term)->getLHS(), Term, B, Pred);
  247. return;
  248. case Stmt::BinaryConditionalOperatorClass:
  249. case Stmt::ConditionalOperatorClass:
  250. HandleBranch(cast<AbstractConditionalOperator>(Term)->getCond(),
  251. Term, B, Pred);
  252. return;
  253. // FIXME: Use constant-folding in CFG construction to simplify this
  254. // case.
  255. case Stmt::ChooseExprClass:
  256. HandleBranch(cast<ChooseExpr>(Term)->getCond(), Term, B, Pred);
  257. return;
  258. case Stmt::CXXTryStmtClass:
  259. // Generate a node for each of the successors.
  260. // Our logic for EH analysis can certainly be improved.
  261. for (CFGBlock::const_succ_iterator it = B->succ_begin(),
  262. et = B->succ_end(); it != et; ++it) {
  263. if (const CFGBlock *succ = *it) {
  264. generateNode(BlockEdge(B, succ, Pred->getLocationContext()),
  265. Pred->State, Pred);
  266. }
  267. }
  268. return;
  269. case Stmt::DoStmtClass:
  270. HandleBranch(cast<DoStmt>(Term)->getCond(), Term, B, Pred);
  271. return;
  272. case Stmt::CXXForRangeStmtClass:
  273. HandleBranch(cast<CXXForRangeStmt>(Term)->getCond(), Term, B, Pred);
  274. return;
  275. case Stmt::ForStmtClass:
  276. HandleBranch(cast<ForStmt>(Term)->getCond(), Term, B, Pred);
  277. return;
  278. case Stmt::ContinueStmtClass:
  279. case Stmt::BreakStmtClass:
  280. case Stmt::GotoStmtClass:
  281. break;
  282. case Stmt::IfStmtClass:
  283. HandleBranch(cast<IfStmt>(Term)->getCond(), Term, B, Pred);
  284. return;
  285. case Stmt::IndirectGotoStmtClass: {
  286. // Only 1 successor: the indirect goto dispatch block.
  287. assert(B->succ_size() == 1);
  288. IndirectGotoNodeBuilder
  289. builder(Pred, B, cast<IndirectGotoStmt>(Term)->getTarget(),
  290. *(B->succ_begin()), this);
  291. SubEng.processIndirectGoto(builder);
  292. return;
  293. }
  294. case Stmt::ObjCForCollectionStmtClass:
  295. // In the case of ObjCForCollectionStmt, it appears twice in a CFG:
  296. //
  297. // (1) inside a basic block, which represents the binding of the
  298. // 'element' variable to a value.
  299. // (2) in a terminator, which represents the branch.
  300. //
  301. // For (1), subengines will bind a value (i.e., 0 or 1) indicating
  302. // whether or not collection contains any more elements. We cannot
  303. // just test to see if the element is nil because a container can
  304. // contain nil elements.
  305. HandleBranch(Term, Term, B, Pred);
  306. return;
  307. case Stmt::SwitchStmtClass: {
  308. SwitchNodeBuilder builder(Pred, B, cast<SwitchStmt>(Term)->getCond(),
  309. this);
  310. SubEng.processSwitch(builder);
  311. return;
  312. }
  313. case Stmt::WhileStmtClass:
  314. HandleBranch(cast<WhileStmt>(Term)->getCond(), Term, B, Pred);
  315. return;
  316. }
  317. }
  318. if (B->getTerminator().isVirtualBaseBranch()) {
  319. HandleVirtualBaseBranch(B, Pred);
  320. return;
  321. }
  322. assert(B->succ_size() == 1 &&
  323. "Blocks with no terminator should have at most 1 successor.");
  324. generateNode(BlockEdge(B, *(B->succ_begin()), Pred->getLocationContext()),
  325. Pred->State, Pred);
  326. }
  327. void CoreEngine::HandleCallEnter(const CallEnter &CE, ExplodedNode *Pred) {
  328. NodeBuilderContext BuilderCtx(*this, CE.getEntry(), Pred);
  329. SubEng.processCallEnter(BuilderCtx, CE, Pred);
  330. }
  331. void CoreEngine::HandleBranch(const Stmt *Cond, const Stmt *Term,
  332. const CFGBlock * B, ExplodedNode *Pred) {
  333. assert(B->succ_size() == 2);
  334. NodeBuilderContext Ctx(*this, B, Pred);
  335. ExplodedNodeSet Dst;
  336. SubEng.processBranch(Cond, Ctx, Pred, Dst, *(B->succ_begin()),
  337. *(B->succ_begin() + 1));
  338. // Enqueue the new frontier onto the worklist.
  339. enqueue(Dst);
  340. }
  341. void CoreEngine::HandleCleanupTemporaryBranch(const CXXBindTemporaryExpr *BTE,
  342. const CFGBlock *B,
  343. ExplodedNode *Pred) {
  344. assert(B->succ_size() == 2);
  345. NodeBuilderContext Ctx(*this, B, Pred);
  346. ExplodedNodeSet Dst;
  347. SubEng.processCleanupTemporaryBranch(BTE, Ctx, Pred, Dst, *(B->succ_begin()),
  348. *(B->succ_begin() + 1));
  349. // Enqueue the new frontier onto the worklist.
  350. enqueue(Dst);
  351. }
  352. void CoreEngine::HandleStaticInit(const DeclStmt *DS, const CFGBlock *B,
  353. ExplodedNode *Pred) {
  354. assert(B->succ_size() == 2);
  355. NodeBuilderContext Ctx(*this, B, Pred);
  356. ExplodedNodeSet Dst;
  357. SubEng.processStaticInitializer(DS, Ctx, Pred, Dst,
  358. *(B->succ_begin()), *(B->succ_begin()+1));
  359. // Enqueue the new frontier onto the worklist.
  360. enqueue(Dst);
  361. }
  362. void CoreEngine::HandlePostStmt(const CFGBlock *B, unsigned StmtIdx,
  363. ExplodedNode *Pred) {
  364. assert(B);
  365. assert(!B->empty());
  366. if (StmtIdx == B->size())
  367. HandleBlockExit(B, Pred);
  368. else {
  369. NodeBuilderContext Ctx(*this, B, Pred);
  370. SubEng.processCFGElement((*B)[StmtIdx], Pred, StmtIdx, &Ctx);
  371. }
  372. }
  373. void CoreEngine::HandleVirtualBaseBranch(const CFGBlock *B,
  374. ExplodedNode *Pred) {
  375. const LocationContext *LCtx = Pred->getLocationContext();
  376. if (const auto *CallerCtor = dyn_cast_or_null<CXXConstructExpr>(
  377. LCtx->getStackFrame()->getCallSite())) {
  378. switch (CallerCtor->getConstructionKind()) {
  379. case CXXConstructExpr::CK_NonVirtualBase:
  380. case CXXConstructExpr::CK_VirtualBase: {
  381. BlockEdge Loc(B, *B->succ_begin(), LCtx);
  382. HandleBlockEdge(Loc, Pred);
  383. return;
  384. }
  385. default:
  386. break;
  387. }
  388. }
  389. // We either don't see a parent stack frame because we're in the top frame,
  390. // or the parent stack frame doesn't initialize our virtual bases.
  391. BlockEdge Loc(B, *(B->succ_begin() + 1), LCtx);
  392. HandleBlockEdge(Loc, Pred);
  393. }
  394. /// generateNode - Utility method to generate nodes, hook up successors,
  395. /// and add nodes to the worklist.
  396. void CoreEngine::generateNode(const ProgramPoint &Loc,
  397. ProgramStateRef State,
  398. ExplodedNode *Pred) {
  399. bool IsNew;
  400. ExplodedNode *Node = G.getNode(Loc, State, false, &IsNew);
  401. if (Pred)
  402. Node->addPredecessor(Pred, G); // Link 'Node' with its predecessor.
  403. else {
  404. assert(IsNew);
  405. G.addRoot(Node); // 'Node' has no predecessor. Make it a root.
  406. }
  407. // Only add 'Node' to the worklist if it was freshly generated.
  408. if (IsNew) WList->enqueue(Node);
  409. }
  410. void CoreEngine::enqueueStmtNode(ExplodedNode *N,
  411. const CFGBlock *Block, unsigned Idx) {
  412. assert(Block);
  413. assert(!N->isSink());
  414. // Check if this node entered a callee.
  415. if (N->getLocation().getAs<CallEnter>()) {
  416. // Still use the index of the CallExpr. It's needed to create the callee
  417. // StackFrameContext.
  418. WList->enqueue(N, Block, Idx);
  419. return;
  420. }
  421. // Do not create extra nodes. Move to the next CFG element.
  422. if (N->getLocation().getAs<PostInitializer>() ||
  423. N->getLocation().getAs<PostImplicitCall>()||
  424. N->getLocation().getAs<LoopExit>()) {
  425. WList->enqueue(N, Block, Idx+1);
  426. return;
  427. }
  428. if (N->getLocation().getAs<EpsilonPoint>()) {
  429. WList->enqueue(N, Block, Idx);
  430. return;
  431. }
  432. if ((*Block)[Idx].getKind() == CFGElement::NewAllocator) {
  433. WList->enqueue(N, Block, Idx+1);
  434. return;
  435. }
  436. // At this point, we know we're processing a normal statement.
  437. CFGStmt CS = (*Block)[Idx].castAs<CFGStmt>();
  438. PostStmt Loc(CS.getStmt(), N->getLocationContext());
  439. if (Loc == N->getLocation().withTag(nullptr)) {
  440. // Note: 'N' should be a fresh node because otherwise it shouldn't be
  441. // a member of Deferred.
  442. WList->enqueue(N, Block, Idx+1);
  443. return;
  444. }
  445. bool IsNew;
  446. ExplodedNode *Succ = G.getNode(Loc, N->getState(), false, &IsNew);
  447. Succ->addPredecessor(N, G);
  448. if (IsNew)
  449. WList->enqueue(Succ, Block, Idx+1);
  450. }
  451. ExplodedNode *CoreEngine::generateCallExitBeginNode(ExplodedNode *N,
  452. const ReturnStmt *RS) {
  453. // Create a CallExitBegin node and enqueue it.
  454. const auto *LocCtx = cast<StackFrameContext>(N->getLocationContext());
  455. // Use the callee location context.
  456. CallExitBegin Loc(LocCtx, RS);
  457. bool isNew;
  458. ExplodedNode *Node = G.getNode(Loc, N->getState(), false, &isNew);
  459. Node->addPredecessor(N, G);
  460. return isNew ? Node : nullptr;
  461. }
  462. void CoreEngine::enqueue(ExplodedNodeSet &Set) {
  463. for (const auto I : Set)
  464. WList->enqueue(I);
  465. }
  466. void CoreEngine::enqueue(ExplodedNodeSet &Set,
  467. const CFGBlock *Block, unsigned Idx) {
  468. for (const auto I : Set)
  469. enqueueStmtNode(I, Block, Idx);
  470. }
  471. void CoreEngine::enqueueEndOfFunction(ExplodedNodeSet &Set, const ReturnStmt *RS) {
  472. for (auto I : Set) {
  473. // If we are in an inlined call, generate CallExitBegin node.
  474. if (I->getLocationContext()->getParent()) {
  475. I = generateCallExitBeginNode(I, RS);
  476. if (I)
  477. WList->enqueue(I);
  478. } else {
  479. // TODO: We should run remove dead bindings here.
  480. G.addEndOfPath(I);
  481. NumPathsExplored++;
  482. }
  483. }
  484. }
  485. void NodeBuilder::anchor() {}
  486. ExplodedNode* NodeBuilder::generateNodeImpl(const ProgramPoint &Loc,
  487. ProgramStateRef State,
  488. ExplodedNode *FromN,
  489. bool MarkAsSink) {
  490. HasGeneratedNodes = true;
  491. bool IsNew;
  492. ExplodedNode *N = C.Eng.G.getNode(Loc, State, MarkAsSink, &IsNew);
  493. N->addPredecessor(FromN, C.Eng.G);
  494. Frontier.erase(FromN);
  495. if (!IsNew)
  496. return nullptr;
  497. if (!MarkAsSink)
  498. Frontier.Add(N);
  499. return N;
  500. }
  501. void NodeBuilderWithSinks::anchor() {}
  502. StmtNodeBuilder::~StmtNodeBuilder() {
  503. if (EnclosingBldr)
  504. for (const auto I : Frontier)
  505. EnclosingBldr->addNodes(I);
  506. }
  507. void BranchNodeBuilder::anchor() {}
  508. ExplodedNode *BranchNodeBuilder::generateNode(ProgramStateRef State,
  509. bool branch,
  510. ExplodedNode *NodePred) {
  511. // If the branch has been marked infeasible we should not generate a node.
  512. if (!isFeasible(branch))
  513. return nullptr;
  514. ProgramPoint Loc = BlockEdge(C.Block, branch ? DstT:DstF,
  515. NodePred->getLocationContext());
  516. ExplodedNode *Succ = generateNodeImpl(Loc, State, NodePred);
  517. return Succ;
  518. }
  519. ExplodedNode*
  520. IndirectGotoNodeBuilder::generateNode(const iterator &I,
  521. ProgramStateRef St,
  522. bool IsSink) {
  523. bool IsNew;
  524. ExplodedNode *Succ =
  525. Eng.G.getNode(BlockEdge(Src, I.getBlock(), Pred->getLocationContext()),
  526. St, IsSink, &IsNew);
  527. Succ->addPredecessor(Pred, Eng.G);
  528. if (!IsNew)
  529. return nullptr;
  530. if (!IsSink)
  531. Eng.WList->enqueue(Succ);
  532. return Succ;
  533. }
  534. ExplodedNode*
  535. SwitchNodeBuilder::generateCaseStmtNode(const iterator &I,
  536. ProgramStateRef St) {
  537. bool IsNew;
  538. ExplodedNode *Succ =
  539. Eng.G.getNode(BlockEdge(Src, I.getBlock(), Pred->getLocationContext()),
  540. St, false, &IsNew);
  541. Succ->addPredecessor(Pred, Eng.G);
  542. if (!IsNew)
  543. return nullptr;
  544. Eng.WList->enqueue(Succ);
  545. return Succ;
  546. }
  547. ExplodedNode*
  548. SwitchNodeBuilder::generateDefaultCaseNode(ProgramStateRef St,
  549. bool IsSink) {
  550. // Get the block for the default case.
  551. assert(Src->succ_rbegin() != Src->succ_rend());
  552. CFGBlock *DefaultBlock = *Src->succ_rbegin();
  553. // Sanity check for default blocks that are unreachable and not caught
  554. // by earlier stages.
  555. if (!DefaultBlock)
  556. return nullptr;
  557. bool IsNew;
  558. ExplodedNode *Succ =
  559. Eng.G.getNode(BlockEdge(Src, DefaultBlock, Pred->getLocationContext()),
  560. St, IsSink, &IsNew);
  561. Succ->addPredecessor(Pred, Eng.G);
  562. if (!IsNew)
  563. return nullptr;
  564. if (!IsSink)
  565. Eng.WList->enqueue(Succ);
  566. return Succ;
  567. }