ExprEngine.cpp 100 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748
  1. //=-- ExprEngine.cpp - Path-Sensitive Expression-Level Dataflow ---*- C++ -*-=
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This file defines a meta-engine for path-sensitive dataflow analysis that
  11. // is built on GREngine, but provides the boilerplate to execute transfer
  12. // functions and build the ExplodedGraph at the expression level.
  13. //
  14. //===----------------------------------------------------------------------===//
  15. #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
  16. #include "PrettyStackTraceLocationContext.h"
  17. #include "clang/AST/CharUnits.h"
  18. #include "clang/AST/ParentMap.h"
  19. #include "clang/AST/StmtCXX.h"
  20. #include "clang/AST/StmtObjC.h"
  21. #include "clang/Basic/Builtins.h"
  22. #include "clang/Basic/PrettyStackTrace.h"
  23. #include "clang/Basic/SourceManager.h"
  24. #include "clang/StaticAnalyzer/Core/BugReporter/BugType.h"
  25. #include "clang/StaticAnalyzer/Core/CheckerManager.h"
  26. #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h"
  27. #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
  28. #include "clang/StaticAnalyzer/Core/PathSensitive/LoopWidening.h"
  29. #include "llvm/ADT/ImmutableList.h"
  30. #include "llvm/ADT/Statistic.h"
  31. #include "llvm/Support/raw_ostream.h"
  32. #ifndef NDEBUG
  33. #include "llvm/Support/GraphWriter.h"
  34. #endif
  35. using namespace clang;
  36. using namespace ento;
  37. using llvm::APSInt;
  38. #define DEBUG_TYPE "ExprEngine"
  39. STATISTIC(NumRemoveDeadBindings,
  40. "The # of times RemoveDeadBindings is called");
  41. STATISTIC(NumMaxBlockCountReached,
  42. "The # of aborted paths due to reaching the maximum block count in "
  43. "a top level function");
  44. STATISTIC(NumMaxBlockCountReachedInInlined,
  45. "The # of aborted paths due to reaching the maximum block count in "
  46. "an inlined function");
  47. STATISTIC(NumTimesRetriedWithoutInlining,
  48. "The # of times we re-evaluated a call without inlining");
  49. typedef std::pair<const CXXBindTemporaryExpr *, const StackFrameContext *>
  50. CXXBindTemporaryContext;
  51. // Keeps track of whether CXXBindTemporaryExpr nodes have been evaluated.
  52. // The StackFrameContext assures that nested calls due to inlined recursive
  53. // functions do not interfere.
  54. REGISTER_TRAIT_WITH_PROGRAMSTATE(InitializedTemporariesSet,
  55. llvm::ImmutableSet<CXXBindTemporaryContext>)
  56. //===----------------------------------------------------------------------===//
  57. // Engine construction and deletion.
  58. //===----------------------------------------------------------------------===//
  59. static const char* TagProviderName = "ExprEngine";
  60. ExprEngine::ExprEngine(AnalysisManager &mgr, bool gcEnabled,
  61. SetOfConstDecls *VisitedCalleesIn,
  62. FunctionSummariesTy *FS,
  63. InliningModes HowToInlineIn)
  64. : AMgr(mgr),
  65. AnalysisDeclContexts(mgr.getAnalysisDeclContextManager()),
  66. Engine(*this, FS),
  67. G(Engine.getGraph()),
  68. StateMgr(getContext(), mgr.getStoreManagerCreator(),
  69. mgr.getConstraintManagerCreator(), G.getAllocator(),
  70. this),
  71. SymMgr(StateMgr.getSymbolManager()),
  72. svalBuilder(StateMgr.getSValBuilder()),
  73. currStmtIdx(0), currBldrCtx(nullptr),
  74. ObjCNoRet(mgr.getASTContext()),
  75. ObjCGCEnabled(gcEnabled), BR(mgr, *this),
  76. VisitedCallees(VisitedCalleesIn),
  77. HowToInline(HowToInlineIn)
  78. {
  79. unsigned TrimInterval = mgr.options.getGraphTrimInterval();
  80. if (TrimInterval != 0) {
  81. // Enable eager node reclaimation when constructing the ExplodedGraph.
  82. G.enableNodeReclamation(TrimInterval);
  83. }
  84. }
  85. ExprEngine::~ExprEngine() {
  86. BR.FlushReports();
  87. }
  88. //===----------------------------------------------------------------------===//
  89. // Utility methods.
  90. //===----------------------------------------------------------------------===//
  91. ProgramStateRef ExprEngine::getInitialState(const LocationContext *InitLoc) {
  92. ProgramStateRef state = StateMgr.getInitialState(InitLoc);
  93. const Decl *D = InitLoc->getDecl();
  94. // Preconditions.
  95. // FIXME: It would be nice if we had a more general mechanism to add
  96. // such preconditions. Some day.
  97. do {
  98. if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
  99. // Precondition: the first argument of 'main' is an integer guaranteed
  100. // to be > 0.
  101. const IdentifierInfo *II = FD->getIdentifier();
  102. if (!II || !(II->getName() == "main" && FD->getNumParams() > 0))
  103. break;
  104. const ParmVarDecl *PD = FD->getParamDecl(0);
  105. QualType T = PD->getType();
  106. const BuiltinType *BT = dyn_cast<BuiltinType>(T);
  107. if (!BT || !BT->isInteger())
  108. break;
  109. const MemRegion *R = state->getRegion(PD, InitLoc);
  110. if (!R)
  111. break;
  112. SVal V = state->getSVal(loc::MemRegionVal(R));
  113. SVal Constraint_untested = evalBinOp(state, BO_GT, V,
  114. svalBuilder.makeZeroVal(T),
  115. svalBuilder.getConditionType());
  116. Optional<DefinedOrUnknownSVal> Constraint =
  117. Constraint_untested.getAs<DefinedOrUnknownSVal>();
  118. if (!Constraint)
  119. break;
  120. if (ProgramStateRef newState = state->assume(*Constraint, true))
  121. state = newState;
  122. }
  123. break;
  124. }
  125. while (0);
  126. if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
  127. // Precondition: 'self' is always non-null upon entry to an Objective-C
  128. // method.
  129. const ImplicitParamDecl *SelfD = MD->getSelfDecl();
  130. const MemRegion *R = state->getRegion(SelfD, InitLoc);
  131. SVal V = state->getSVal(loc::MemRegionVal(R));
  132. if (Optional<Loc> LV = V.getAs<Loc>()) {
  133. // Assume that the pointer value in 'self' is non-null.
  134. state = state->assume(*LV, true);
  135. assert(state && "'self' cannot be null");
  136. }
  137. }
  138. if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(D)) {
  139. if (!MD->isStatic()) {
  140. // Precondition: 'this' is always non-null upon entry to the
  141. // top-level function. This is our starting assumption for
  142. // analyzing an "open" program.
  143. const StackFrameContext *SFC = InitLoc->getCurrentStackFrame();
  144. if (SFC->getParent() == nullptr) {
  145. loc::MemRegionVal L = svalBuilder.getCXXThis(MD, SFC);
  146. SVal V = state->getSVal(L);
  147. if (Optional<Loc> LV = V.getAs<Loc>()) {
  148. state = state->assume(*LV, true);
  149. assert(state && "'this' cannot be null");
  150. }
  151. }
  152. }
  153. }
  154. return state;
  155. }
  156. ProgramStateRef
  157. ExprEngine::createTemporaryRegionIfNeeded(ProgramStateRef State,
  158. const LocationContext *LC,
  159. const Expr *Ex,
  160. const Expr *Result) {
  161. SVal V = State->getSVal(Ex, LC);
  162. if (!Result) {
  163. // If we don't have an explicit result expression, we're in "if needed"
  164. // mode. Only create a region if the current value is a NonLoc.
  165. if (!V.getAs<NonLoc>())
  166. return State;
  167. Result = Ex;
  168. } else {
  169. // We need to create a region no matter what. For sanity, make sure we don't
  170. // try to stuff a Loc into a non-pointer temporary region.
  171. assert(!V.getAs<Loc>() || Loc::isLocType(Result->getType()) ||
  172. Result->getType()->isMemberPointerType());
  173. }
  174. ProgramStateManager &StateMgr = State->getStateManager();
  175. MemRegionManager &MRMgr = StateMgr.getRegionManager();
  176. StoreManager &StoreMgr = StateMgr.getStoreManager();
  177. // We need to be careful about treating a derived type's value as
  178. // bindings for a base type. Unless we're creating a temporary pointer region,
  179. // start by stripping and recording base casts.
  180. SmallVector<const CastExpr *, 4> Casts;
  181. const Expr *Inner = Ex->IgnoreParens();
  182. if (!Loc::isLocType(Result->getType())) {
  183. while (const CastExpr *CE = dyn_cast<CastExpr>(Inner)) {
  184. if (CE->getCastKind() == CK_DerivedToBase ||
  185. CE->getCastKind() == CK_UncheckedDerivedToBase)
  186. Casts.push_back(CE);
  187. else if (CE->getCastKind() != CK_NoOp)
  188. break;
  189. Inner = CE->getSubExpr()->IgnoreParens();
  190. }
  191. }
  192. // Create a temporary object region for the inner expression (which may have
  193. // a more derived type) and bind the value into it.
  194. const TypedValueRegion *TR = nullptr;
  195. if (const MaterializeTemporaryExpr *MT =
  196. dyn_cast<MaterializeTemporaryExpr>(Result)) {
  197. StorageDuration SD = MT->getStorageDuration();
  198. // If this object is bound to a reference with static storage duration, we
  199. // put it in a different region to prevent "address leakage" warnings.
  200. if (SD == SD_Static || SD == SD_Thread)
  201. TR = MRMgr.getCXXStaticTempObjectRegion(Inner);
  202. }
  203. if (!TR)
  204. TR = MRMgr.getCXXTempObjectRegion(Inner, LC);
  205. SVal Reg = loc::MemRegionVal(TR);
  206. if (V.isUnknown())
  207. V = getSValBuilder().conjureSymbolVal(Result, LC, TR->getValueType(),
  208. currBldrCtx->blockCount());
  209. State = State->bindLoc(Reg, V);
  210. // Re-apply the casts (from innermost to outermost) for type sanity.
  211. for (SmallVectorImpl<const CastExpr *>::reverse_iterator I = Casts.rbegin(),
  212. E = Casts.rend();
  213. I != E; ++I) {
  214. Reg = StoreMgr.evalDerivedToBase(Reg, *I);
  215. }
  216. State = State->BindExpr(Result, LC, Reg);
  217. return State;
  218. }
  219. //===----------------------------------------------------------------------===//
  220. // Top-level transfer function logic (Dispatcher).
  221. //===----------------------------------------------------------------------===//
  222. /// evalAssume - Called by ConstraintManager. Used to call checker-specific
  223. /// logic for handling assumptions on symbolic values.
  224. ProgramStateRef ExprEngine::processAssume(ProgramStateRef state,
  225. SVal cond, bool assumption) {
  226. return getCheckerManager().runCheckersForEvalAssume(state, cond, assumption);
  227. }
  228. bool ExprEngine::wantsRegionChangeUpdate(ProgramStateRef state) {
  229. return getCheckerManager().wantsRegionChangeUpdate(state);
  230. }
  231. ProgramStateRef
  232. ExprEngine::processRegionChanges(ProgramStateRef state,
  233. const InvalidatedSymbols *invalidated,
  234. ArrayRef<const MemRegion *> Explicits,
  235. ArrayRef<const MemRegion *> Regions,
  236. const CallEvent *Call) {
  237. return getCheckerManager().runCheckersForRegionChanges(state, invalidated,
  238. Explicits, Regions, Call);
  239. }
  240. void ExprEngine::printState(raw_ostream &Out, ProgramStateRef State,
  241. const char *NL, const char *Sep) {
  242. getCheckerManager().runCheckersForPrintState(Out, State, NL, Sep);
  243. }
  244. void ExprEngine::processEndWorklist(bool hasWorkRemaining) {
  245. getCheckerManager().runCheckersForEndAnalysis(G, BR, *this);
  246. }
  247. void ExprEngine::processCFGElement(const CFGElement E, ExplodedNode *Pred,
  248. unsigned StmtIdx, NodeBuilderContext *Ctx) {
  249. PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext());
  250. currStmtIdx = StmtIdx;
  251. currBldrCtx = Ctx;
  252. switch (E.getKind()) {
  253. case CFGElement::Statement:
  254. ProcessStmt(const_cast<Stmt*>(E.castAs<CFGStmt>().getStmt()), Pred);
  255. return;
  256. case CFGElement::Initializer:
  257. ProcessInitializer(E.castAs<CFGInitializer>().getInitializer(), Pred);
  258. return;
  259. case CFGElement::NewAllocator:
  260. ProcessNewAllocator(E.castAs<CFGNewAllocator>().getAllocatorExpr(),
  261. Pred);
  262. return;
  263. case CFGElement::AutomaticObjectDtor:
  264. case CFGElement::DeleteDtor:
  265. case CFGElement::BaseDtor:
  266. case CFGElement::MemberDtor:
  267. case CFGElement::TemporaryDtor:
  268. ProcessImplicitDtor(E.castAs<CFGImplicitDtor>(), Pred);
  269. return;
  270. }
  271. }
  272. static bool shouldRemoveDeadBindings(AnalysisManager &AMgr,
  273. const CFGStmt S,
  274. const ExplodedNode *Pred,
  275. const LocationContext *LC) {
  276. // Are we never purging state values?
  277. if (AMgr.options.AnalysisPurgeOpt == PurgeNone)
  278. return false;
  279. // Is this the beginning of a basic block?
  280. if (Pred->getLocation().getAs<BlockEntrance>())
  281. return true;
  282. // Is this on a non-expression?
  283. if (!isa<Expr>(S.getStmt()))
  284. return true;
  285. // Run before processing a call.
  286. if (CallEvent::isCallStmt(S.getStmt()))
  287. return true;
  288. // Is this an expression that is consumed by another expression? If so,
  289. // postpone cleaning out the state.
  290. ParentMap &PM = LC->getAnalysisDeclContext()->getParentMap();
  291. return !PM.isConsumedExpr(cast<Expr>(S.getStmt()));
  292. }
  293. void ExprEngine::removeDead(ExplodedNode *Pred, ExplodedNodeSet &Out,
  294. const Stmt *ReferenceStmt,
  295. const LocationContext *LC,
  296. const Stmt *DiagnosticStmt,
  297. ProgramPoint::Kind K) {
  298. assert((K == ProgramPoint::PreStmtPurgeDeadSymbolsKind ||
  299. ReferenceStmt == nullptr || isa<ReturnStmt>(ReferenceStmt))
  300. && "PostStmt is not generally supported by the SymbolReaper yet");
  301. assert(LC && "Must pass the current (or expiring) LocationContext");
  302. if (!DiagnosticStmt) {
  303. DiagnosticStmt = ReferenceStmt;
  304. assert(DiagnosticStmt && "Required for clearing a LocationContext");
  305. }
  306. NumRemoveDeadBindings++;
  307. ProgramStateRef CleanedState = Pred->getState();
  308. // LC is the location context being destroyed, but SymbolReaper wants a
  309. // location context that is still live. (If this is the top-level stack
  310. // frame, this will be null.)
  311. if (!ReferenceStmt) {
  312. assert(K == ProgramPoint::PostStmtPurgeDeadSymbolsKind &&
  313. "Use PostStmtPurgeDeadSymbolsKind for clearing a LocationContext");
  314. LC = LC->getParent();
  315. }
  316. const StackFrameContext *SFC = LC ? LC->getCurrentStackFrame() : nullptr;
  317. SymbolReaper SymReaper(SFC, ReferenceStmt, SymMgr, getStoreManager());
  318. getCheckerManager().runCheckersForLiveSymbols(CleanedState, SymReaper);
  319. // Create a state in which dead bindings are removed from the environment
  320. // and the store. TODO: The function should just return new env and store,
  321. // not a new state.
  322. CleanedState = StateMgr.removeDeadBindings(CleanedState, SFC, SymReaper);
  323. // Process any special transfer function for dead symbols.
  324. // A tag to track convenience transitions, which can be removed at cleanup.
  325. static SimpleProgramPointTag cleanupTag(TagProviderName, "Clean Node");
  326. if (!SymReaper.hasDeadSymbols()) {
  327. // Generate a CleanedNode that has the environment and store cleaned
  328. // up. Since no symbols are dead, we can optimize and not clean out
  329. // the constraint manager.
  330. StmtNodeBuilder Bldr(Pred, Out, *currBldrCtx);
  331. Bldr.generateNode(DiagnosticStmt, Pred, CleanedState, &cleanupTag, K);
  332. } else {
  333. // Call checkers with the non-cleaned state so that they could query the
  334. // values of the soon to be dead symbols.
  335. ExplodedNodeSet CheckedSet;
  336. getCheckerManager().runCheckersForDeadSymbols(CheckedSet, Pred, SymReaper,
  337. DiagnosticStmt, *this, K);
  338. // For each node in CheckedSet, generate CleanedNodes that have the
  339. // environment, the store, and the constraints cleaned up but have the
  340. // user-supplied states as the predecessors.
  341. StmtNodeBuilder Bldr(CheckedSet, Out, *currBldrCtx);
  342. for (ExplodedNodeSet::const_iterator
  343. I = CheckedSet.begin(), E = CheckedSet.end(); I != E; ++I) {
  344. ProgramStateRef CheckerState = (*I)->getState();
  345. // The constraint manager has not been cleaned up yet, so clean up now.
  346. CheckerState = getConstraintManager().removeDeadBindings(CheckerState,
  347. SymReaper);
  348. assert(StateMgr.haveEqualEnvironments(CheckerState, Pred->getState()) &&
  349. "Checkers are not allowed to modify the Environment as a part of "
  350. "checkDeadSymbols processing.");
  351. assert(StateMgr.haveEqualStores(CheckerState, Pred->getState()) &&
  352. "Checkers are not allowed to modify the Store as a part of "
  353. "checkDeadSymbols processing.");
  354. // Create a state based on CleanedState with CheckerState GDM and
  355. // generate a transition to that state.
  356. ProgramStateRef CleanedCheckerSt =
  357. StateMgr.getPersistentStateWithGDM(CleanedState, CheckerState);
  358. Bldr.generateNode(DiagnosticStmt, *I, CleanedCheckerSt, &cleanupTag, K);
  359. }
  360. }
  361. }
  362. void ExprEngine::ProcessStmt(const CFGStmt S,
  363. ExplodedNode *Pred) {
  364. // Reclaim any unnecessary nodes in the ExplodedGraph.
  365. G.reclaimRecentlyAllocatedNodes();
  366. const Stmt *currStmt = S.getStmt();
  367. PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
  368. currStmt->getLocStart(),
  369. "Error evaluating statement");
  370. // Remove dead bindings and symbols.
  371. ExplodedNodeSet CleanedStates;
  372. if (shouldRemoveDeadBindings(AMgr, S, Pred, Pred->getLocationContext())){
  373. removeDead(Pred, CleanedStates, currStmt, Pred->getLocationContext());
  374. } else
  375. CleanedStates.Add(Pred);
  376. // Visit the statement.
  377. ExplodedNodeSet Dst;
  378. for (ExplodedNodeSet::iterator I = CleanedStates.begin(),
  379. E = CleanedStates.end(); I != E; ++I) {
  380. ExplodedNodeSet DstI;
  381. // Visit the statement.
  382. Visit(currStmt, *I, DstI);
  383. Dst.insert(DstI);
  384. }
  385. // Enqueue the new nodes onto the work list.
  386. Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx);
  387. }
  388. void ExprEngine::ProcessInitializer(const CFGInitializer Init,
  389. ExplodedNode *Pred) {
  390. const CXXCtorInitializer *BMI = Init.getInitializer();
  391. PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
  392. BMI->getSourceLocation(),
  393. "Error evaluating initializer");
  394. // We don't clean up dead bindings here.
  395. const StackFrameContext *stackFrame =
  396. cast<StackFrameContext>(Pred->getLocationContext());
  397. const CXXConstructorDecl *decl =
  398. cast<CXXConstructorDecl>(stackFrame->getDecl());
  399. ProgramStateRef State = Pred->getState();
  400. SVal thisVal = State->getSVal(svalBuilder.getCXXThis(decl, stackFrame));
  401. ExplodedNodeSet Tmp(Pred);
  402. SVal FieldLoc;
  403. // Evaluate the initializer, if necessary
  404. if (BMI->isAnyMemberInitializer()) {
  405. // Constructors build the object directly in the field,
  406. // but non-objects must be copied in from the initializer.
  407. const Expr *Init = BMI->getInit()->IgnoreImplicit();
  408. if (!isa<CXXConstructExpr>(Init)) {
  409. const ValueDecl *Field;
  410. if (BMI->isIndirectMemberInitializer()) {
  411. Field = BMI->getIndirectMember();
  412. FieldLoc = State->getLValue(BMI->getIndirectMember(), thisVal);
  413. } else {
  414. Field = BMI->getMember();
  415. FieldLoc = State->getLValue(BMI->getMember(), thisVal);
  416. }
  417. SVal InitVal;
  418. if (BMI->getNumArrayIndices() > 0) {
  419. // Handle arrays of trivial type. We can represent this with a
  420. // primitive load/copy from the base array region.
  421. const ArraySubscriptExpr *ASE;
  422. while ((ASE = dyn_cast<ArraySubscriptExpr>(Init)))
  423. Init = ASE->getBase()->IgnoreImplicit();
  424. SVal LValue = State->getSVal(Init, stackFrame);
  425. if (Optional<Loc> LValueLoc = LValue.getAs<Loc>())
  426. InitVal = State->getSVal(*LValueLoc);
  427. // If we fail to get the value for some reason, use a symbolic value.
  428. if (InitVal.isUnknownOrUndef()) {
  429. SValBuilder &SVB = getSValBuilder();
  430. InitVal = SVB.conjureSymbolVal(BMI->getInit(), stackFrame,
  431. Field->getType(),
  432. currBldrCtx->blockCount());
  433. }
  434. } else {
  435. InitVal = State->getSVal(BMI->getInit(), stackFrame);
  436. }
  437. assert(Tmp.size() == 1 && "have not generated any new nodes yet");
  438. assert(*Tmp.begin() == Pred && "have not generated any new nodes yet");
  439. Tmp.clear();
  440. PostInitializer PP(BMI, FieldLoc.getAsRegion(), stackFrame);
  441. evalBind(Tmp, Init, Pred, FieldLoc, InitVal, /*isInit=*/true, &PP);
  442. }
  443. } else {
  444. assert(BMI->isBaseInitializer() || BMI->isDelegatingInitializer());
  445. // We already did all the work when visiting the CXXConstructExpr.
  446. }
  447. // Construct PostInitializer nodes whether the state changed or not,
  448. // so that the diagnostics don't get confused.
  449. PostInitializer PP(BMI, FieldLoc.getAsRegion(), stackFrame);
  450. ExplodedNodeSet Dst;
  451. NodeBuilder Bldr(Tmp, Dst, *currBldrCtx);
  452. for (ExplodedNodeSet::iterator I = Tmp.begin(), E = Tmp.end(); I != E; ++I) {
  453. ExplodedNode *N = *I;
  454. Bldr.generateNode(PP, N->getState(), N);
  455. }
  456. // Enqueue the new nodes onto the work list.
  457. Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx);
  458. }
  459. void ExprEngine::ProcessImplicitDtor(const CFGImplicitDtor D,
  460. ExplodedNode *Pred) {
  461. ExplodedNodeSet Dst;
  462. switch (D.getKind()) {
  463. case CFGElement::AutomaticObjectDtor:
  464. ProcessAutomaticObjDtor(D.castAs<CFGAutomaticObjDtor>(), Pred, Dst);
  465. break;
  466. case CFGElement::BaseDtor:
  467. ProcessBaseDtor(D.castAs<CFGBaseDtor>(), Pred, Dst);
  468. break;
  469. case CFGElement::MemberDtor:
  470. ProcessMemberDtor(D.castAs<CFGMemberDtor>(), Pred, Dst);
  471. break;
  472. case CFGElement::TemporaryDtor:
  473. ProcessTemporaryDtor(D.castAs<CFGTemporaryDtor>(), Pred, Dst);
  474. break;
  475. case CFGElement::DeleteDtor:
  476. ProcessDeleteDtor(D.castAs<CFGDeleteDtor>(), Pred, Dst);
  477. break;
  478. default:
  479. llvm_unreachable("Unexpected dtor kind.");
  480. }
  481. // Enqueue the new nodes onto the work list.
  482. Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx);
  483. }
  484. void ExprEngine::ProcessNewAllocator(const CXXNewExpr *NE,
  485. ExplodedNode *Pred) {
  486. ExplodedNodeSet Dst;
  487. AnalysisManager &AMgr = getAnalysisManager();
  488. AnalyzerOptions &Opts = AMgr.options;
  489. // TODO: We're not evaluating allocators for all cases just yet as
  490. // we're not handling the return value correctly, which causes false
  491. // positives when the alpha.cplusplus.NewDeleteLeaks check is on.
  492. if (Opts.mayInlineCXXAllocator())
  493. VisitCXXNewAllocatorCall(NE, Pred, Dst);
  494. else {
  495. NodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  496. const LocationContext *LCtx = Pred->getLocationContext();
  497. PostImplicitCall PP(NE->getOperatorNew(), NE->getLocStart(), LCtx);
  498. Bldr.generateNode(PP, Pred->getState(), Pred);
  499. }
  500. Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx);
  501. }
  502. void ExprEngine::ProcessAutomaticObjDtor(const CFGAutomaticObjDtor Dtor,
  503. ExplodedNode *Pred,
  504. ExplodedNodeSet &Dst) {
  505. const VarDecl *varDecl = Dtor.getVarDecl();
  506. QualType varType = varDecl->getType();
  507. ProgramStateRef state = Pred->getState();
  508. SVal dest = state->getLValue(varDecl, Pred->getLocationContext());
  509. const MemRegion *Region = dest.castAs<loc::MemRegionVal>().getRegion();
  510. if (const ReferenceType *refType = varType->getAs<ReferenceType>()) {
  511. varType = refType->getPointeeType();
  512. Region = state->getSVal(Region).getAsRegion();
  513. }
  514. VisitCXXDestructor(varType, Region, Dtor.getTriggerStmt(), /*IsBase=*/ false,
  515. Pred, Dst);
  516. }
  517. void ExprEngine::ProcessDeleteDtor(const CFGDeleteDtor Dtor,
  518. ExplodedNode *Pred,
  519. ExplodedNodeSet &Dst) {
  520. ProgramStateRef State = Pred->getState();
  521. const LocationContext *LCtx = Pred->getLocationContext();
  522. const CXXDeleteExpr *DE = Dtor.getDeleteExpr();
  523. const Stmt *Arg = DE->getArgument();
  524. SVal ArgVal = State->getSVal(Arg, LCtx);
  525. // If the argument to delete is known to be a null value,
  526. // don't run destructor.
  527. if (State->isNull(ArgVal).isConstrainedTrue()) {
  528. QualType DTy = DE->getDestroyedType();
  529. QualType BTy = getContext().getBaseElementType(DTy);
  530. const CXXRecordDecl *RD = BTy->getAsCXXRecordDecl();
  531. const CXXDestructorDecl *Dtor = RD->getDestructor();
  532. PostImplicitCall PP(Dtor, DE->getLocStart(), LCtx);
  533. NodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  534. Bldr.generateNode(PP, Pred->getState(), Pred);
  535. return;
  536. }
  537. VisitCXXDestructor(DE->getDestroyedType(),
  538. ArgVal.getAsRegion(),
  539. DE, /*IsBase=*/ false,
  540. Pred, Dst);
  541. }
  542. void ExprEngine::ProcessBaseDtor(const CFGBaseDtor D,
  543. ExplodedNode *Pred, ExplodedNodeSet &Dst) {
  544. const LocationContext *LCtx = Pred->getLocationContext();
  545. const CXXDestructorDecl *CurDtor = cast<CXXDestructorDecl>(LCtx->getDecl());
  546. Loc ThisPtr = getSValBuilder().getCXXThis(CurDtor,
  547. LCtx->getCurrentStackFrame());
  548. SVal ThisVal = Pred->getState()->getSVal(ThisPtr);
  549. // Create the base object region.
  550. const CXXBaseSpecifier *Base = D.getBaseSpecifier();
  551. QualType BaseTy = Base->getType();
  552. SVal BaseVal = getStoreManager().evalDerivedToBase(ThisVal, BaseTy,
  553. Base->isVirtual());
  554. VisitCXXDestructor(BaseTy, BaseVal.castAs<loc::MemRegionVal>().getRegion(),
  555. CurDtor->getBody(), /*IsBase=*/ true, Pred, Dst);
  556. }
  557. void ExprEngine::ProcessMemberDtor(const CFGMemberDtor D,
  558. ExplodedNode *Pred, ExplodedNodeSet &Dst) {
  559. const FieldDecl *Member = D.getFieldDecl();
  560. ProgramStateRef State = Pred->getState();
  561. const LocationContext *LCtx = Pred->getLocationContext();
  562. const CXXDestructorDecl *CurDtor = cast<CXXDestructorDecl>(LCtx->getDecl());
  563. Loc ThisVal = getSValBuilder().getCXXThis(CurDtor,
  564. LCtx->getCurrentStackFrame());
  565. SVal FieldVal =
  566. State->getLValue(Member, State->getSVal(ThisVal).castAs<Loc>());
  567. VisitCXXDestructor(Member->getType(),
  568. FieldVal.castAs<loc::MemRegionVal>().getRegion(),
  569. CurDtor->getBody(), /*IsBase=*/false, Pred, Dst);
  570. }
  571. void ExprEngine::ProcessTemporaryDtor(const CFGTemporaryDtor D,
  572. ExplodedNode *Pred,
  573. ExplodedNodeSet &Dst) {
  574. ExplodedNodeSet CleanDtorState;
  575. StmtNodeBuilder StmtBldr(Pred, CleanDtorState, *currBldrCtx);
  576. ProgramStateRef State = Pred->getState();
  577. if (State->contains<InitializedTemporariesSet>(
  578. std::make_pair(D.getBindTemporaryExpr(), Pred->getStackFrame()))) {
  579. // FIXME: Currently we insert temporary destructors for default parameters,
  580. // but we don't insert the constructors.
  581. State = State->remove<InitializedTemporariesSet>(
  582. std::make_pair(D.getBindTemporaryExpr(), Pred->getStackFrame()));
  583. }
  584. StmtBldr.generateNode(D.getBindTemporaryExpr(), Pred, State);
  585. QualType varType = D.getBindTemporaryExpr()->getSubExpr()->getType();
  586. // FIXME: Currently CleanDtorState can be empty here due to temporaries being
  587. // bound to default parameters.
  588. assert(CleanDtorState.size() <= 1);
  589. ExplodedNode *CleanPred =
  590. CleanDtorState.empty() ? Pred : *CleanDtorState.begin();
  591. // FIXME: Inlining of temporary destructors is not supported yet anyway, so
  592. // we just put a NULL region for now. This will need to be changed later.
  593. VisitCXXDestructor(varType, nullptr, D.getBindTemporaryExpr(),
  594. /*IsBase=*/false, CleanPred, Dst);
  595. }
  596. void ExprEngine::processCleanupTemporaryBranch(const CXXBindTemporaryExpr *BTE,
  597. NodeBuilderContext &BldCtx,
  598. ExplodedNode *Pred,
  599. ExplodedNodeSet &Dst,
  600. const CFGBlock *DstT,
  601. const CFGBlock *DstF) {
  602. BranchNodeBuilder TempDtorBuilder(Pred, Dst, BldCtx, DstT, DstF);
  603. if (Pred->getState()->contains<InitializedTemporariesSet>(
  604. std::make_pair(BTE, Pred->getStackFrame()))) {
  605. TempDtorBuilder.markInfeasible(false);
  606. TempDtorBuilder.generateNode(Pred->getState(), true, Pred);
  607. } else {
  608. TempDtorBuilder.markInfeasible(true);
  609. TempDtorBuilder.generateNode(Pred->getState(), false, Pred);
  610. }
  611. }
  612. void ExprEngine::VisitCXXBindTemporaryExpr(const CXXBindTemporaryExpr *BTE,
  613. ExplodedNodeSet &PreVisit,
  614. ExplodedNodeSet &Dst) {
  615. if (!getAnalysisManager().options.includeTemporaryDtorsInCFG()) {
  616. // In case we don't have temporary destructors in the CFG, do not mark
  617. // the initialization - we would otherwise never clean it up.
  618. Dst = PreVisit;
  619. return;
  620. }
  621. StmtNodeBuilder StmtBldr(PreVisit, Dst, *currBldrCtx);
  622. for (ExplodedNode *Node : PreVisit) {
  623. ProgramStateRef State = Node->getState();
  624. if (!State->contains<InitializedTemporariesSet>(
  625. std::make_pair(BTE, Node->getStackFrame()))) {
  626. // FIXME: Currently the state might already contain the marker due to
  627. // incorrect handling of temporaries bound to default parameters; for
  628. // those, we currently skip the CXXBindTemporaryExpr but rely on adding
  629. // temporary destructor nodes.
  630. State = State->add<InitializedTemporariesSet>(
  631. std::make_pair(BTE, Node->getStackFrame()));
  632. }
  633. StmtBldr.generateNode(BTE, Node, State);
  634. }
  635. }
  636. void ExprEngine::Visit(const Stmt *S, ExplodedNode *Pred,
  637. ExplodedNodeSet &DstTop) {
  638. PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
  639. S->getLocStart(),
  640. "Error evaluating statement");
  641. ExplodedNodeSet Dst;
  642. StmtNodeBuilder Bldr(Pred, DstTop, *currBldrCtx);
  643. assert(!isa<Expr>(S) || S == cast<Expr>(S)->IgnoreParens());
  644. switch (S->getStmtClass()) {
  645. // C++ and ARC stuff we don't support yet.
  646. case Expr::ObjCIndirectCopyRestoreExprClass:
  647. case Stmt::CXXDependentScopeMemberExprClass:
  648. case Stmt::CXXTryStmtClass:
  649. case Stmt::CXXTypeidExprClass:
  650. case Stmt::CXXUuidofExprClass:
  651. case Stmt::CXXFoldExprClass:
  652. case Stmt::MSPropertyRefExprClass:
  653. case Stmt::MSPropertySubscriptExprClass:
  654. case Stmt::CXXUnresolvedConstructExprClass:
  655. case Stmt::DependentScopeDeclRefExprClass:
  656. case Stmt::ArrayTypeTraitExprClass:
  657. case Stmt::ExpressionTraitExprClass:
  658. case Stmt::UnresolvedLookupExprClass:
  659. case Stmt::UnresolvedMemberExprClass:
  660. case Stmt::TypoExprClass:
  661. case Stmt::CXXNoexceptExprClass:
  662. case Stmt::PackExpansionExprClass:
  663. case Stmt::SubstNonTypeTemplateParmPackExprClass:
  664. case Stmt::FunctionParmPackExprClass:
  665. case Stmt::CoroutineBodyStmtClass:
  666. case Stmt::CoawaitExprClass:
  667. case Stmt::CoreturnStmtClass:
  668. case Stmt::CoyieldExprClass:
  669. case Stmt::SEHTryStmtClass:
  670. case Stmt::SEHExceptStmtClass:
  671. case Stmt::SEHLeaveStmtClass:
  672. case Stmt::SEHFinallyStmtClass: {
  673. const ExplodedNode *node = Bldr.generateSink(S, Pred, Pred->getState());
  674. Engine.addAbortedBlock(node, currBldrCtx->getBlock());
  675. break;
  676. }
  677. case Stmt::ParenExprClass:
  678. llvm_unreachable("ParenExprs already handled.");
  679. case Stmt::GenericSelectionExprClass:
  680. llvm_unreachable("GenericSelectionExprs already handled.");
  681. // Cases that should never be evaluated simply because they shouldn't
  682. // appear in the CFG.
  683. case Stmt::BreakStmtClass:
  684. case Stmt::CaseStmtClass:
  685. case Stmt::CompoundStmtClass:
  686. case Stmt::ContinueStmtClass:
  687. case Stmt::CXXForRangeStmtClass:
  688. case Stmt::DefaultStmtClass:
  689. case Stmt::DoStmtClass:
  690. case Stmt::ForStmtClass:
  691. case Stmt::GotoStmtClass:
  692. case Stmt::IfStmtClass:
  693. case Stmt::IndirectGotoStmtClass:
  694. case Stmt::LabelStmtClass:
  695. case Stmt::NoStmtClass:
  696. case Stmt::NullStmtClass:
  697. case Stmt::SwitchStmtClass:
  698. case Stmt::WhileStmtClass:
  699. case Expr::MSDependentExistsStmtClass:
  700. case Stmt::CapturedStmtClass:
  701. case Stmt::OMPParallelDirectiveClass:
  702. case Stmt::OMPSimdDirectiveClass:
  703. case Stmt::OMPForDirectiveClass:
  704. case Stmt::OMPForSimdDirectiveClass:
  705. case Stmt::OMPSectionsDirectiveClass:
  706. case Stmt::OMPSectionDirectiveClass:
  707. case Stmt::OMPSingleDirectiveClass:
  708. case Stmt::OMPMasterDirectiveClass:
  709. case Stmt::OMPCriticalDirectiveClass:
  710. case Stmt::OMPParallelForDirectiveClass:
  711. case Stmt::OMPParallelForSimdDirectiveClass:
  712. case Stmt::OMPParallelSectionsDirectiveClass:
  713. case Stmt::OMPTaskDirectiveClass:
  714. case Stmt::OMPTaskyieldDirectiveClass:
  715. case Stmt::OMPBarrierDirectiveClass:
  716. case Stmt::OMPTaskwaitDirectiveClass:
  717. case Stmt::OMPTaskgroupDirectiveClass:
  718. case Stmt::OMPFlushDirectiveClass:
  719. case Stmt::OMPOrderedDirectiveClass:
  720. case Stmt::OMPAtomicDirectiveClass:
  721. case Stmt::OMPTargetDirectiveClass:
  722. case Stmt::OMPTargetDataDirectiveClass:
  723. case Stmt::OMPTeamsDirectiveClass:
  724. case Stmt::OMPCancellationPointDirectiveClass:
  725. case Stmt::OMPCancelDirectiveClass:
  726. case Stmt::OMPTaskLoopDirectiveClass:
  727. case Stmt::OMPTaskLoopSimdDirectiveClass:
  728. case Stmt::OMPDistributeDirectiveClass:
  729. llvm_unreachable("Stmt should not be in analyzer evaluation loop");
  730. case Stmt::ObjCSubscriptRefExprClass:
  731. case Stmt::ObjCPropertyRefExprClass:
  732. llvm_unreachable("These are handled by PseudoObjectExpr");
  733. case Stmt::GNUNullExprClass: {
  734. // GNU __null is a pointer-width integer, not an actual pointer.
  735. ProgramStateRef state = Pred->getState();
  736. state = state->BindExpr(S, Pred->getLocationContext(),
  737. svalBuilder.makeIntValWithPtrWidth(0, false));
  738. Bldr.generateNode(S, Pred, state);
  739. break;
  740. }
  741. case Stmt::ObjCAtSynchronizedStmtClass:
  742. Bldr.takeNodes(Pred);
  743. VisitObjCAtSynchronizedStmt(cast<ObjCAtSynchronizedStmt>(S), Pred, Dst);
  744. Bldr.addNodes(Dst);
  745. break;
  746. case Stmt::ExprWithCleanupsClass:
  747. // Handled due to fully linearised CFG.
  748. break;
  749. case Stmt::CXXBindTemporaryExprClass: {
  750. Bldr.takeNodes(Pred);
  751. ExplodedNodeSet PreVisit;
  752. getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this);
  753. ExplodedNodeSet Next;
  754. VisitCXXBindTemporaryExpr(cast<CXXBindTemporaryExpr>(S), PreVisit, Next);
  755. getCheckerManager().runCheckersForPostStmt(Dst, Next, S, *this);
  756. Bldr.addNodes(Dst);
  757. break;
  758. }
  759. // Cases not handled yet; but will handle some day.
  760. case Stmt::DesignatedInitExprClass:
  761. case Stmt::DesignatedInitUpdateExprClass:
  762. case Stmt::ExtVectorElementExprClass:
  763. case Stmt::ImaginaryLiteralClass:
  764. case Stmt::ObjCAtCatchStmtClass:
  765. case Stmt::ObjCAtFinallyStmtClass:
  766. case Stmt::ObjCAtTryStmtClass:
  767. case Stmt::ObjCAutoreleasePoolStmtClass:
  768. case Stmt::ObjCEncodeExprClass:
  769. case Stmt::ObjCIsaExprClass:
  770. case Stmt::ObjCProtocolExprClass:
  771. case Stmt::ObjCSelectorExprClass:
  772. case Stmt::ParenListExprClass:
  773. case Stmt::ShuffleVectorExprClass:
  774. case Stmt::ConvertVectorExprClass:
  775. case Stmt::VAArgExprClass:
  776. case Stmt::CUDAKernelCallExprClass:
  777. case Stmt::OpaqueValueExprClass:
  778. case Stmt::AsTypeExprClass:
  779. case Stmt::AtomicExprClass:
  780. // Fall through.
  781. // Cases we intentionally don't evaluate, since they don't need
  782. // to be explicitly evaluated.
  783. case Stmt::PredefinedExprClass:
  784. case Stmt::AddrLabelExprClass:
  785. case Stmt::AttributedStmtClass:
  786. case Stmt::IntegerLiteralClass:
  787. case Stmt::CharacterLiteralClass:
  788. case Stmt::ImplicitValueInitExprClass:
  789. case Stmt::CXXScalarValueInitExprClass:
  790. case Stmt::CXXBoolLiteralExprClass:
  791. case Stmt::ObjCBoolLiteralExprClass:
  792. case Stmt::FloatingLiteralClass:
  793. case Stmt::NoInitExprClass:
  794. case Stmt::SizeOfPackExprClass:
  795. case Stmt::StringLiteralClass:
  796. case Stmt::ObjCStringLiteralClass:
  797. case Stmt::CXXPseudoDestructorExprClass:
  798. case Stmt::SubstNonTypeTemplateParmExprClass:
  799. case Stmt::CXXNullPtrLiteralExprClass:
  800. case Stmt::OMPArraySectionExprClass:
  801. case Stmt::TypeTraitExprClass: {
  802. Bldr.takeNodes(Pred);
  803. ExplodedNodeSet preVisit;
  804. getCheckerManager().runCheckersForPreStmt(preVisit, Pred, S, *this);
  805. getCheckerManager().runCheckersForPostStmt(Dst, preVisit, S, *this);
  806. Bldr.addNodes(Dst);
  807. break;
  808. }
  809. case Stmt::CXXDefaultArgExprClass:
  810. case Stmt::CXXDefaultInitExprClass: {
  811. Bldr.takeNodes(Pred);
  812. ExplodedNodeSet PreVisit;
  813. getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this);
  814. ExplodedNodeSet Tmp;
  815. StmtNodeBuilder Bldr2(PreVisit, Tmp, *currBldrCtx);
  816. const Expr *ArgE;
  817. if (const CXXDefaultArgExpr *DefE = dyn_cast<CXXDefaultArgExpr>(S))
  818. ArgE = DefE->getExpr();
  819. else if (const CXXDefaultInitExpr *DefE = dyn_cast<CXXDefaultInitExpr>(S))
  820. ArgE = DefE->getExpr();
  821. else
  822. llvm_unreachable("unknown constant wrapper kind");
  823. bool IsTemporary = false;
  824. if (const MaterializeTemporaryExpr *MTE =
  825. dyn_cast<MaterializeTemporaryExpr>(ArgE)) {
  826. ArgE = MTE->GetTemporaryExpr();
  827. IsTemporary = true;
  828. }
  829. Optional<SVal> ConstantVal = svalBuilder.getConstantVal(ArgE);
  830. if (!ConstantVal)
  831. ConstantVal = UnknownVal();
  832. const LocationContext *LCtx = Pred->getLocationContext();
  833. for (ExplodedNodeSet::iterator I = PreVisit.begin(), E = PreVisit.end();
  834. I != E; ++I) {
  835. ProgramStateRef State = (*I)->getState();
  836. State = State->BindExpr(S, LCtx, *ConstantVal);
  837. if (IsTemporary)
  838. State = createTemporaryRegionIfNeeded(State, LCtx,
  839. cast<Expr>(S),
  840. cast<Expr>(S));
  841. Bldr2.generateNode(S, *I, State);
  842. }
  843. getCheckerManager().runCheckersForPostStmt(Dst, Tmp, S, *this);
  844. Bldr.addNodes(Dst);
  845. break;
  846. }
  847. // Cases we evaluate as opaque expressions, conjuring a symbol.
  848. case Stmt::CXXStdInitializerListExprClass:
  849. case Expr::ObjCArrayLiteralClass:
  850. case Expr::ObjCDictionaryLiteralClass:
  851. case Expr::ObjCBoxedExprClass: {
  852. Bldr.takeNodes(Pred);
  853. ExplodedNodeSet preVisit;
  854. getCheckerManager().runCheckersForPreStmt(preVisit, Pred, S, *this);
  855. ExplodedNodeSet Tmp;
  856. StmtNodeBuilder Bldr2(preVisit, Tmp, *currBldrCtx);
  857. const Expr *Ex = cast<Expr>(S);
  858. QualType resultType = Ex->getType();
  859. for (ExplodedNodeSet::iterator it = preVisit.begin(), et = preVisit.end();
  860. it != et; ++it) {
  861. ExplodedNode *N = *it;
  862. const LocationContext *LCtx = N->getLocationContext();
  863. SVal result = svalBuilder.conjureSymbolVal(nullptr, Ex, LCtx,
  864. resultType,
  865. currBldrCtx->blockCount());
  866. ProgramStateRef state = N->getState()->BindExpr(Ex, LCtx, result);
  867. Bldr2.generateNode(S, N, state);
  868. }
  869. getCheckerManager().runCheckersForPostStmt(Dst, Tmp, S, *this);
  870. Bldr.addNodes(Dst);
  871. break;
  872. }
  873. case Stmt::ArraySubscriptExprClass:
  874. Bldr.takeNodes(Pred);
  875. VisitLvalArraySubscriptExpr(cast<ArraySubscriptExpr>(S), Pred, Dst);
  876. Bldr.addNodes(Dst);
  877. break;
  878. case Stmt::GCCAsmStmtClass:
  879. Bldr.takeNodes(Pred);
  880. VisitGCCAsmStmt(cast<GCCAsmStmt>(S), Pred, Dst);
  881. Bldr.addNodes(Dst);
  882. break;
  883. case Stmt::MSAsmStmtClass:
  884. Bldr.takeNodes(Pred);
  885. VisitMSAsmStmt(cast<MSAsmStmt>(S), Pred, Dst);
  886. Bldr.addNodes(Dst);
  887. break;
  888. case Stmt::BlockExprClass:
  889. Bldr.takeNodes(Pred);
  890. VisitBlockExpr(cast<BlockExpr>(S), Pred, Dst);
  891. Bldr.addNodes(Dst);
  892. break;
  893. case Stmt::LambdaExprClass:
  894. if (AMgr.options.shouldInlineLambdas()) {
  895. Bldr.takeNodes(Pred);
  896. VisitLambdaExpr(cast<LambdaExpr>(S), Pred, Dst);
  897. Bldr.addNodes(Dst);
  898. } else {
  899. const ExplodedNode *node = Bldr.generateSink(S, Pred, Pred->getState());
  900. Engine.addAbortedBlock(node, currBldrCtx->getBlock());
  901. }
  902. break;
  903. case Stmt::BinaryOperatorClass: {
  904. const BinaryOperator* B = cast<BinaryOperator>(S);
  905. if (B->isLogicalOp()) {
  906. Bldr.takeNodes(Pred);
  907. VisitLogicalExpr(B, Pred, Dst);
  908. Bldr.addNodes(Dst);
  909. break;
  910. }
  911. else if (B->getOpcode() == BO_Comma) {
  912. ProgramStateRef state = Pred->getState();
  913. Bldr.generateNode(B, Pred,
  914. state->BindExpr(B, Pred->getLocationContext(),
  915. state->getSVal(B->getRHS(),
  916. Pred->getLocationContext())));
  917. break;
  918. }
  919. Bldr.takeNodes(Pred);
  920. if (AMgr.options.eagerlyAssumeBinOpBifurcation &&
  921. (B->isRelationalOp() || B->isEqualityOp())) {
  922. ExplodedNodeSet Tmp;
  923. VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Tmp);
  924. evalEagerlyAssumeBinOpBifurcation(Dst, Tmp, cast<Expr>(S));
  925. }
  926. else
  927. VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst);
  928. Bldr.addNodes(Dst);
  929. break;
  930. }
  931. case Stmt::CXXOperatorCallExprClass: {
  932. const CXXOperatorCallExpr *OCE = cast<CXXOperatorCallExpr>(S);
  933. // For instance method operators, make sure the 'this' argument has a
  934. // valid region.
  935. const Decl *Callee = OCE->getCalleeDecl();
  936. if (const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(Callee)) {
  937. if (MD->isInstance()) {
  938. ProgramStateRef State = Pred->getState();
  939. const LocationContext *LCtx = Pred->getLocationContext();
  940. ProgramStateRef NewState =
  941. createTemporaryRegionIfNeeded(State, LCtx, OCE->getArg(0));
  942. if (NewState != State) {
  943. Pred = Bldr.generateNode(OCE, Pred, NewState, /*Tag=*/nullptr,
  944. ProgramPoint::PreStmtKind);
  945. // Did we cache out?
  946. if (!Pred)
  947. break;
  948. }
  949. }
  950. }
  951. // FALLTHROUGH
  952. }
  953. case Stmt::CallExprClass:
  954. case Stmt::CXXMemberCallExprClass:
  955. case Stmt::UserDefinedLiteralClass: {
  956. Bldr.takeNodes(Pred);
  957. VisitCallExpr(cast<CallExpr>(S), Pred, Dst);
  958. Bldr.addNodes(Dst);
  959. break;
  960. }
  961. case Stmt::CXXCatchStmtClass: {
  962. Bldr.takeNodes(Pred);
  963. VisitCXXCatchStmt(cast<CXXCatchStmt>(S), Pred, Dst);
  964. Bldr.addNodes(Dst);
  965. break;
  966. }
  967. case Stmt::CXXTemporaryObjectExprClass:
  968. case Stmt::CXXConstructExprClass: {
  969. Bldr.takeNodes(Pred);
  970. VisitCXXConstructExpr(cast<CXXConstructExpr>(S), Pred, Dst);
  971. Bldr.addNodes(Dst);
  972. break;
  973. }
  974. case Stmt::CXXNewExprClass: {
  975. Bldr.takeNodes(Pred);
  976. ExplodedNodeSet PostVisit;
  977. VisitCXXNewExpr(cast<CXXNewExpr>(S), Pred, PostVisit);
  978. getCheckerManager().runCheckersForPostStmt(Dst, PostVisit, S, *this);
  979. Bldr.addNodes(Dst);
  980. break;
  981. }
  982. case Stmt::CXXDeleteExprClass: {
  983. Bldr.takeNodes(Pred);
  984. ExplodedNodeSet PreVisit;
  985. const CXXDeleteExpr *CDE = cast<CXXDeleteExpr>(S);
  986. getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this);
  987. for (ExplodedNodeSet::iterator i = PreVisit.begin(),
  988. e = PreVisit.end(); i != e ; ++i)
  989. VisitCXXDeleteExpr(CDE, *i, Dst);
  990. Bldr.addNodes(Dst);
  991. break;
  992. }
  993. // FIXME: ChooseExpr is really a constant. We need to fix
  994. // the CFG do not model them as explicit control-flow.
  995. case Stmt::ChooseExprClass: { // __builtin_choose_expr
  996. Bldr.takeNodes(Pred);
  997. const ChooseExpr *C = cast<ChooseExpr>(S);
  998. VisitGuardedExpr(C, C->getLHS(), C->getRHS(), Pred, Dst);
  999. Bldr.addNodes(Dst);
  1000. break;
  1001. }
  1002. case Stmt::CompoundAssignOperatorClass:
  1003. Bldr.takeNodes(Pred);
  1004. VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst);
  1005. Bldr.addNodes(Dst);
  1006. break;
  1007. case Stmt::CompoundLiteralExprClass:
  1008. Bldr.takeNodes(Pred);
  1009. VisitCompoundLiteralExpr(cast<CompoundLiteralExpr>(S), Pred, Dst);
  1010. Bldr.addNodes(Dst);
  1011. break;
  1012. case Stmt::BinaryConditionalOperatorClass:
  1013. case Stmt::ConditionalOperatorClass: { // '?' operator
  1014. Bldr.takeNodes(Pred);
  1015. const AbstractConditionalOperator *C
  1016. = cast<AbstractConditionalOperator>(S);
  1017. VisitGuardedExpr(C, C->getTrueExpr(), C->getFalseExpr(), Pred, Dst);
  1018. Bldr.addNodes(Dst);
  1019. break;
  1020. }
  1021. case Stmt::CXXThisExprClass:
  1022. Bldr.takeNodes(Pred);
  1023. VisitCXXThisExpr(cast<CXXThisExpr>(S), Pred, Dst);
  1024. Bldr.addNodes(Dst);
  1025. break;
  1026. case Stmt::DeclRefExprClass: {
  1027. Bldr.takeNodes(Pred);
  1028. const DeclRefExpr *DE = cast<DeclRefExpr>(S);
  1029. VisitCommonDeclRefExpr(DE, DE->getDecl(), Pred, Dst);
  1030. Bldr.addNodes(Dst);
  1031. break;
  1032. }
  1033. case Stmt::DeclStmtClass:
  1034. Bldr.takeNodes(Pred);
  1035. VisitDeclStmt(cast<DeclStmt>(S), Pred, Dst);
  1036. Bldr.addNodes(Dst);
  1037. break;
  1038. case Stmt::ImplicitCastExprClass:
  1039. case Stmt::CStyleCastExprClass:
  1040. case Stmt::CXXStaticCastExprClass:
  1041. case Stmt::CXXDynamicCastExprClass:
  1042. case Stmt::CXXReinterpretCastExprClass:
  1043. case Stmt::CXXConstCastExprClass:
  1044. case Stmt::CXXFunctionalCastExprClass:
  1045. case Stmt::ObjCBridgedCastExprClass: {
  1046. Bldr.takeNodes(Pred);
  1047. const CastExpr *C = cast<CastExpr>(S);
  1048. // Handle the previsit checks.
  1049. ExplodedNodeSet dstPrevisit;
  1050. getCheckerManager().runCheckersForPreStmt(dstPrevisit, Pred, C, *this);
  1051. // Handle the expression itself.
  1052. ExplodedNodeSet dstExpr;
  1053. for (ExplodedNodeSet::iterator i = dstPrevisit.begin(),
  1054. e = dstPrevisit.end(); i != e ; ++i) {
  1055. VisitCast(C, C->getSubExpr(), *i, dstExpr);
  1056. }
  1057. // Handle the postvisit checks.
  1058. getCheckerManager().runCheckersForPostStmt(Dst, dstExpr, C, *this);
  1059. Bldr.addNodes(Dst);
  1060. break;
  1061. }
  1062. case Expr::MaterializeTemporaryExprClass: {
  1063. Bldr.takeNodes(Pred);
  1064. const MaterializeTemporaryExpr *MTE = cast<MaterializeTemporaryExpr>(S);
  1065. CreateCXXTemporaryObject(MTE, Pred, Dst);
  1066. Bldr.addNodes(Dst);
  1067. break;
  1068. }
  1069. case Stmt::InitListExprClass:
  1070. Bldr.takeNodes(Pred);
  1071. VisitInitListExpr(cast<InitListExpr>(S), Pred, Dst);
  1072. Bldr.addNodes(Dst);
  1073. break;
  1074. case Stmt::MemberExprClass:
  1075. Bldr.takeNodes(Pred);
  1076. VisitMemberExpr(cast<MemberExpr>(S), Pred, Dst);
  1077. Bldr.addNodes(Dst);
  1078. break;
  1079. case Stmt::ObjCIvarRefExprClass:
  1080. Bldr.takeNodes(Pred);
  1081. VisitLvalObjCIvarRefExpr(cast<ObjCIvarRefExpr>(S), Pred, Dst);
  1082. Bldr.addNodes(Dst);
  1083. break;
  1084. case Stmt::ObjCForCollectionStmtClass:
  1085. Bldr.takeNodes(Pred);
  1086. VisitObjCForCollectionStmt(cast<ObjCForCollectionStmt>(S), Pred, Dst);
  1087. Bldr.addNodes(Dst);
  1088. break;
  1089. case Stmt::ObjCMessageExprClass:
  1090. Bldr.takeNodes(Pred);
  1091. VisitObjCMessage(cast<ObjCMessageExpr>(S), Pred, Dst);
  1092. Bldr.addNodes(Dst);
  1093. break;
  1094. case Stmt::ObjCAtThrowStmtClass:
  1095. case Stmt::CXXThrowExprClass:
  1096. // FIXME: This is not complete. We basically treat @throw as
  1097. // an abort.
  1098. Bldr.generateSink(S, Pred, Pred->getState());
  1099. break;
  1100. case Stmt::ReturnStmtClass:
  1101. Bldr.takeNodes(Pred);
  1102. VisitReturnStmt(cast<ReturnStmt>(S), Pred, Dst);
  1103. Bldr.addNodes(Dst);
  1104. break;
  1105. case Stmt::OffsetOfExprClass:
  1106. Bldr.takeNodes(Pred);
  1107. VisitOffsetOfExpr(cast<OffsetOfExpr>(S), Pred, Dst);
  1108. Bldr.addNodes(Dst);
  1109. break;
  1110. case Stmt::UnaryExprOrTypeTraitExprClass:
  1111. Bldr.takeNodes(Pred);
  1112. VisitUnaryExprOrTypeTraitExpr(cast<UnaryExprOrTypeTraitExpr>(S),
  1113. Pred, Dst);
  1114. Bldr.addNodes(Dst);
  1115. break;
  1116. case Stmt::StmtExprClass: {
  1117. const StmtExpr *SE = cast<StmtExpr>(S);
  1118. if (SE->getSubStmt()->body_empty()) {
  1119. // Empty statement expression.
  1120. assert(SE->getType() == getContext().VoidTy
  1121. && "Empty statement expression must have void type.");
  1122. break;
  1123. }
  1124. if (Expr *LastExpr = dyn_cast<Expr>(*SE->getSubStmt()->body_rbegin())) {
  1125. ProgramStateRef state = Pred->getState();
  1126. Bldr.generateNode(SE, Pred,
  1127. state->BindExpr(SE, Pred->getLocationContext(),
  1128. state->getSVal(LastExpr,
  1129. Pred->getLocationContext())));
  1130. }
  1131. break;
  1132. }
  1133. case Stmt::UnaryOperatorClass: {
  1134. Bldr.takeNodes(Pred);
  1135. const UnaryOperator *U = cast<UnaryOperator>(S);
  1136. if (AMgr.options.eagerlyAssumeBinOpBifurcation && (U->getOpcode() == UO_LNot)) {
  1137. ExplodedNodeSet Tmp;
  1138. VisitUnaryOperator(U, Pred, Tmp);
  1139. evalEagerlyAssumeBinOpBifurcation(Dst, Tmp, U);
  1140. }
  1141. else
  1142. VisitUnaryOperator(U, Pred, Dst);
  1143. Bldr.addNodes(Dst);
  1144. break;
  1145. }
  1146. case Stmt::PseudoObjectExprClass: {
  1147. Bldr.takeNodes(Pred);
  1148. ProgramStateRef state = Pred->getState();
  1149. const PseudoObjectExpr *PE = cast<PseudoObjectExpr>(S);
  1150. if (const Expr *Result = PE->getResultExpr()) {
  1151. SVal V = state->getSVal(Result, Pred->getLocationContext());
  1152. Bldr.generateNode(S, Pred,
  1153. state->BindExpr(S, Pred->getLocationContext(), V));
  1154. }
  1155. else
  1156. Bldr.generateNode(S, Pred,
  1157. state->BindExpr(S, Pred->getLocationContext(),
  1158. UnknownVal()));
  1159. Bldr.addNodes(Dst);
  1160. break;
  1161. }
  1162. }
  1163. }
  1164. bool ExprEngine::replayWithoutInlining(ExplodedNode *N,
  1165. const LocationContext *CalleeLC) {
  1166. const StackFrameContext *CalleeSF = CalleeLC->getCurrentStackFrame();
  1167. const StackFrameContext *CallerSF = CalleeSF->getParent()->getCurrentStackFrame();
  1168. assert(CalleeSF && CallerSF);
  1169. ExplodedNode *BeforeProcessingCall = nullptr;
  1170. const Stmt *CE = CalleeSF->getCallSite();
  1171. // Find the first node before we started processing the call expression.
  1172. while (N) {
  1173. ProgramPoint L = N->getLocation();
  1174. BeforeProcessingCall = N;
  1175. N = N->pred_empty() ? nullptr : *(N->pred_begin());
  1176. // Skip the nodes corresponding to the inlined code.
  1177. if (L.getLocationContext()->getCurrentStackFrame() != CallerSF)
  1178. continue;
  1179. // We reached the caller. Find the node right before we started
  1180. // processing the call.
  1181. if (L.isPurgeKind())
  1182. continue;
  1183. if (L.getAs<PreImplicitCall>())
  1184. continue;
  1185. if (L.getAs<CallEnter>())
  1186. continue;
  1187. if (Optional<StmtPoint> SP = L.getAs<StmtPoint>())
  1188. if (SP->getStmt() == CE)
  1189. continue;
  1190. break;
  1191. }
  1192. if (!BeforeProcessingCall)
  1193. return false;
  1194. // TODO: Clean up the unneeded nodes.
  1195. // Build an Epsilon node from which we will restart the analyzes.
  1196. // Note that CE is permitted to be NULL!
  1197. ProgramPoint NewNodeLoc =
  1198. EpsilonPoint(BeforeProcessingCall->getLocationContext(), CE);
  1199. // Add the special flag to GDM to signal retrying with no inlining.
  1200. // Note, changing the state ensures that we are not going to cache out.
  1201. ProgramStateRef NewNodeState = BeforeProcessingCall->getState();
  1202. NewNodeState =
  1203. NewNodeState->set<ReplayWithoutInlining>(const_cast<Stmt *>(CE));
  1204. // Make the new node a successor of BeforeProcessingCall.
  1205. bool IsNew = false;
  1206. ExplodedNode *NewNode = G.getNode(NewNodeLoc, NewNodeState, false, &IsNew);
  1207. // We cached out at this point. Caching out is common due to us backtracking
  1208. // from the inlined function, which might spawn several paths.
  1209. if (!IsNew)
  1210. return true;
  1211. NewNode->addPredecessor(BeforeProcessingCall, G);
  1212. // Add the new node to the work list.
  1213. Engine.enqueueStmtNode(NewNode, CalleeSF->getCallSiteBlock(),
  1214. CalleeSF->getIndex());
  1215. NumTimesRetriedWithoutInlining++;
  1216. return true;
  1217. }
  1218. /// Block entrance. (Update counters).
  1219. void ExprEngine::processCFGBlockEntrance(const BlockEdge &L,
  1220. NodeBuilderWithSinks &nodeBuilder,
  1221. ExplodedNode *Pred) {
  1222. PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext());
  1223. // If this block is terminated by a loop and it has already been visited the
  1224. // maximum number of times, widen the loop.
  1225. unsigned int BlockCount = nodeBuilder.getContext().blockCount();
  1226. if (BlockCount == AMgr.options.maxBlockVisitOnPath - 1 &&
  1227. AMgr.options.shouldWidenLoops()) {
  1228. const Stmt *Term = nodeBuilder.getContext().getBlock()->getTerminator();
  1229. if (!(Term &&
  1230. (isa<ForStmt>(Term) || isa<WhileStmt>(Term) || isa<DoStmt>(Term))))
  1231. return;
  1232. // Widen.
  1233. const LocationContext *LCtx = Pred->getLocationContext();
  1234. ProgramStateRef WidenedState =
  1235. getWidenedLoopState(Pred->getState(), LCtx, BlockCount, Term);
  1236. nodeBuilder.generateNode(WidenedState, Pred);
  1237. return;
  1238. }
  1239. // FIXME: Refactor this into a checker.
  1240. if (BlockCount >= AMgr.options.maxBlockVisitOnPath) {
  1241. static SimpleProgramPointTag tag(TagProviderName, "Block count exceeded");
  1242. const ExplodedNode *Sink =
  1243. nodeBuilder.generateSink(Pred->getState(), Pred, &tag);
  1244. // Check if we stopped at the top level function or not.
  1245. // Root node should have the location context of the top most function.
  1246. const LocationContext *CalleeLC = Pred->getLocation().getLocationContext();
  1247. const LocationContext *CalleeSF = CalleeLC->getCurrentStackFrame();
  1248. const LocationContext *RootLC =
  1249. (*G.roots_begin())->getLocation().getLocationContext();
  1250. if (RootLC->getCurrentStackFrame() != CalleeSF) {
  1251. Engine.FunctionSummaries->markReachedMaxBlockCount(CalleeSF->getDecl());
  1252. // Re-run the call evaluation without inlining it, by storing the
  1253. // no-inlining policy in the state and enqueuing the new work item on
  1254. // the list. Replay should almost never fail. Use the stats to catch it
  1255. // if it does.
  1256. if ((!AMgr.options.NoRetryExhausted &&
  1257. replayWithoutInlining(Pred, CalleeLC)))
  1258. return;
  1259. NumMaxBlockCountReachedInInlined++;
  1260. } else
  1261. NumMaxBlockCountReached++;
  1262. // Make sink nodes as exhausted(for stats) only if retry failed.
  1263. Engine.blocksExhausted.push_back(std::make_pair(L, Sink));
  1264. }
  1265. }
  1266. //===----------------------------------------------------------------------===//
  1267. // Branch processing.
  1268. //===----------------------------------------------------------------------===//
  1269. /// RecoverCastedSymbol - A helper function for ProcessBranch that is used
  1270. /// to try to recover some path-sensitivity for casts of symbolic
  1271. /// integers that promote their values (which are currently not tracked well).
  1272. /// This function returns the SVal bound to Condition->IgnoreCasts if all the
  1273. // cast(s) did was sign-extend the original value.
  1274. static SVal RecoverCastedSymbol(ProgramStateManager& StateMgr,
  1275. ProgramStateRef state,
  1276. const Stmt *Condition,
  1277. const LocationContext *LCtx,
  1278. ASTContext &Ctx) {
  1279. const Expr *Ex = dyn_cast<Expr>(Condition);
  1280. if (!Ex)
  1281. return UnknownVal();
  1282. uint64_t bits = 0;
  1283. bool bitsInit = false;
  1284. while (const CastExpr *CE = dyn_cast<CastExpr>(Ex)) {
  1285. QualType T = CE->getType();
  1286. if (!T->isIntegralOrEnumerationType())
  1287. return UnknownVal();
  1288. uint64_t newBits = Ctx.getTypeSize(T);
  1289. if (!bitsInit || newBits < bits) {
  1290. bitsInit = true;
  1291. bits = newBits;
  1292. }
  1293. Ex = CE->getSubExpr();
  1294. }
  1295. // We reached a non-cast. Is it a symbolic value?
  1296. QualType T = Ex->getType();
  1297. if (!bitsInit || !T->isIntegralOrEnumerationType() ||
  1298. Ctx.getTypeSize(T) > bits)
  1299. return UnknownVal();
  1300. return state->getSVal(Ex, LCtx);
  1301. }
  1302. #ifndef NDEBUG
  1303. static const Stmt *getRightmostLeaf(const Stmt *Condition) {
  1304. while (Condition) {
  1305. const BinaryOperator *BO = dyn_cast<BinaryOperator>(Condition);
  1306. if (!BO || !BO->isLogicalOp()) {
  1307. return Condition;
  1308. }
  1309. Condition = BO->getRHS()->IgnoreParens();
  1310. }
  1311. return nullptr;
  1312. }
  1313. #endif
  1314. // Returns the condition the branch at the end of 'B' depends on and whose value
  1315. // has been evaluated within 'B'.
  1316. // In most cases, the terminator condition of 'B' will be evaluated fully in
  1317. // the last statement of 'B'; in those cases, the resolved condition is the
  1318. // given 'Condition'.
  1319. // If the condition of the branch is a logical binary operator tree, the CFG is
  1320. // optimized: in that case, we know that the expression formed by all but the
  1321. // rightmost leaf of the logical binary operator tree must be true, and thus
  1322. // the branch condition is at this point equivalent to the truth value of that
  1323. // rightmost leaf; the CFG block thus only evaluates this rightmost leaf
  1324. // expression in its final statement. As the full condition in that case was
  1325. // not evaluated, and is thus not in the SVal cache, we need to use that leaf
  1326. // expression to evaluate the truth value of the condition in the current state
  1327. // space.
  1328. static const Stmt *ResolveCondition(const Stmt *Condition,
  1329. const CFGBlock *B) {
  1330. if (const Expr *Ex = dyn_cast<Expr>(Condition))
  1331. Condition = Ex->IgnoreParens();
  1332. const BinaryOperator *BO = dyn_cast<BinaryOperator>(Condition);
  1333. if (!BO || !BO->isLogicalOp())
  1334. return Condition;
  1335. assert(!B->getTerminator().isTemporaryDtorsBranch() &&
  1336. "Temporary destructor branches handled by processBindTemporary.");
  1337. // For logical operations, we still have the case where some branches
  1338. // use the traditional "merge" approach and others sink the branch
  1339. // directly into the basic blocks representing the logical operation.
  1340. // We need to distinguish between those two cases here.
  1341. // The invariants are still shifting, but it is possible that the
  1342. // last element in a CFGBlock is not a CFGStmt. Look for the last
  1343. // CFGStmt as the value of the condition.
  1344. CFGBlock::const_reverse_iterator I = B->rbegin(), E = B->rend();
  1345. for (; I != E; ++I) {
  1346. CFGElement Elem = *I;
  1347. Optional<CFGStmt> CS = Elem.getAs<CFGStmt>();
  1348. if (!CS)
  1349. continue;
  1350. const Stmt *LastStmt = CS->getStmt();
  1351. assert(LastStmt == Condition || LastStmt == getRightmostLeaf(Condition));
  1352. return LastStmt;
  1353. }
  1354. llvm_unreachable("could not resolve condition");
  1355. }
  1356. void ExprEngine::processBranch(const Stmt *Condition, const Stmt *Term,
  1357. NodeBuilderContext& BldCtx,
  1358. ExplodedNode *Pred,
  1359. ExplodedNodeSet &Dst,
  1360. const CFGBlock *DstT,
  1361. const CFGBlock *DstF) {
  1362. assert((!Condition || !isa<CXXBindTemporaryExpr>(Condition)) &&
  1363. "CXXBindTemporaryExprs are handled by processBindTemporary.");
  1364. const LocationContext *LCtx = Pred->getLocationContext();
  1365. PrettyStackTraceLocationContext StackCrashInfo(LCtx);
  1366. currBldrCtx = &BldCtx;
  1367. // Check for NULL conditions; e.g. "for(;;)"
  1368. if (!Condition) {
  1369. BranchNodeBuilder NullCondBldr(Pred, Dst, BldCtx, DstT, DstF);
  1370. NullCondBldr.markInfeasible(false);
  1371. NullCondBldr.generateNode(Pred->getState(), true, Pred);
  1372. return;
  1373. }
  1374. if (const Expr *Ex = dyn_cast<Expr>(Condition))
  1375. Condition = Ex->IgnoreParens();
  1376. Condition = ResolveCondition(Condition, BldCtx.getBlock());
  1377. PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
  1378. Condition->getLocStart(),
  1379. "Error evaluating branch");
  1380. ExplodedNodeSet CheckersOutSet;
  1381. getCheckerManager().runCheckersForBranchCondition(Condition, CheckersOutSet,
  1382. Pred, *this);
  1383. // We generated only sinks.
  1384. if (CheckersOutSet.empty())
  1385. return;
  1386. BranchNodeBuilder builder(CheckersOutSet, Dst, BldCtx, DstT, DstF);
  1387. for (NodeBuilder::iterator I = CheckersOutSet.begin(),
  1388. E = CheckersOutSet.end(); E != I; ++I) {
  1389. ExplodedNode *PredI = *I;
  1390. if (PredI->isSink())
  1391. continue;
  1392. ProgramStateRef PrevState = PredI->getState();
  1393. SVal X = PrevState->getSVal(Condition, PredI->getLocationContext());
  1394. if (X.isUnknownOrUndef()) {
  1395. // Give it a chance to recover from unknown.
  1396. if (const Expr *Ex = dyn_cast<Expr>(Condition)) {
  1397. if (Ex->getType()->isIntegralOrEnumerationType()) {
  1398. // Try to recover some path-sensitivity. Right now casts of symbolic
  1399. // integers that promote their values are currently not tracked well.
  1400. // If 'Condition' is such an expression, try and recover the
  1401. // underlying value and use that instead.
  1402. SVal recovered = RecoverCastedSymbol(getStateManager(),
  1403. PrevState, Condition,
  1404. PredI->getLocationContext(),
  1405. getContext());
  1406. if (!recovered.isUnknown()) {
  1407. X = recovered;
  1408. }
  1409. }
  1410. }
  1411. }
  1412. // If the condition is still unknown, give up.
  1413. if (X.isUnknownOrUndef()) {
  1414. builder.generateNode(PrevState, true, PredI);
  1415. builder.generateNode(PrevState, false, PredI);
  1416. continue;
  1417. }
  1418. DefinedSVal V = X.castAs<DefinedSVal>();
  1419. ProgramStateRef StTrue, StFalse;
  1420. std::tie(StTrue, StFalse) = PrevState->assume(V);
  1421. // Process the true branch.
  1422. if (builder.isFeasible(true)) {
  1423. if (StTrue)
  1424. builder.generateNode(StTrue, true, PredI);
  1425. else
  1426. builder.markInfeasible(true);
  1427. }
  1428. // Process the false branch.
  1429. if (builder.isFeasible(false)) {
  1430. if (StFalse)
  1431. builder.generateNode(StFalse, false, PredI);
  1432. else
  1433. builder.markInfeasible(false);
  1434. }
  1435. }
  1436. currBldrCtx = nullptr;
  1437. }
  1438. /// The GDM component containing the set of global variables which have been
  1439. /// previously initialized with explicit initializers.
  1440. REGISTER_TRAIT_WITH_PROGRAMSTATE(InitializedGlobalsSet,
  1441. llvm::ImmutableSet<const VarDecl *>)
  1442. void ExprEngine::processStaticInitializer(const DeclStmt *DS,
  1443. NodeBuilderContext &BuilderCtx,
  1444. ExplodedNode *Pred,
  1445. clang::ento::ExplodedNodeSet &Dst,
  1446. const CFGBlock *DstT,
  1447. const CFGBlock *DstF) {
  1448. PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext());
  1449. currBldrCtx = &BuilderCtx;
  1450. const VarDecl *VD = cast<VarDecl>(DS->getSingleDecl());
  1451. ProgramStateRef state = Pred->getState();
  1452. bool initHasRun = state->contains<InitializedGlobalsSet>(VD);
  1453. BranchNodeBuilder builder(Pred, Dst, BuilderCtx, DstT, DstF);
  1454. if (!initHasRun) {
  1455. state = state->add<InitializedGlobalsSet>(VD);
  1456. }
  1457. builder.generateNode(state, initHasRun, Pred);
  1458. builder.markInfeasible(!initHasRun);
  1459. currBldrCtx = nullptr;
  1460. }
  1461. /// processIndirectGoto - Called by CoreEngine. Used to generate successor
  1462. /// nodes by processing the 'effects' of a computed goto jump.
  1463. void ExprEngine::processIndirectGoto(IndirectGotoNodeBuilder &builder) {
  1464. ProgramStateRef state = builder.getState();
  1465. SVal V = state->getSVal(builder.getTarget(), builder.getLocationContext());
  1466. // Three possibilities:
  1467. //
  1468. // (1) We know the computed label.
  1469. // (2) The label is NULL (or some other constant), or Undefined.
  1470. // (3) We have no clue about the label. Dispatch to all targets.
  1471. //
  1472. typedef IndirectGotoNodeBuilder::iterator iterator;
  1473. if (Optional<loc::GotoLabel> LV = V.getAs<loc::GotoLabel>()) {
  1474. const LabelDecl *L = LV->getLabel();
  1475. for (iterator I = builder.begin(), E = builder.end(); I != E; ++I) {
  1476. if (I.getLabel() == L) {
  1477. builder.generateNode(I, state);
  1478. return;
  1479. }
  1480. }
  1481. llvm_unreachable("No block with label.");
  1482. }
  1483. if (V.getAs<loc::ConcreteInt>() || V.getAs<UndefinedVal>()) {
  1484. // Dispatch to the first target and mark it as a sink.
  1485. //ExplodedNode* N = builder.generateNode(builder.begin(), state, true);
  1486. // FIXME: add checker visit.
  1487. // UndefBranches.insert(N);
  1488. return;
  1489. }
  1490. // This is really a catch-all. We don't support symbolics yet.
  1491. // FIXME: Implement dispatch for symbolic pointers.
  1492. for (iterator I=builder.begin(), E=builder.end(); I != E; ++I)
  1493. builder.generateNode(I, state);
  1494. }
  1495. #if 0
  1496. static bool stackFrameDoesNotContainInitializedTemporaries(ExplodedNode &Pred) {
  1497. const StackFrameContext* Frame = Pred.getStackFrame();
  1498. const llvm::ImmutableSet<CXXBindTemporaryContext> &Set =
  1499. Pred.getState()->get<InitializedTemporariesSet>();
  1500. return std::find_if(Set.begin(), Set.end(),
  1501. [&](const CXXBindTemporaryContext &Ctx) {
  1502. if (Ctx.second == Frame) {
  1503. Ctx.first->dump();
  1504. llvm::errs() << "\n";
  1505. }
  1506. return Ctx.second == Frame;
  1507. }) == Set.end();
  1508. }
  1509. #endif
  1510. /// ProcessEndPath - Called by CoreEngine. Used to generate end-of-path
  1511. /// nodes when the control reaches the end of a function.
  1512. void ExprEngine::processEndOfFunction(NodeBuilderContext& BC,
  1513. ExplodedNode *Pred) {
  1514. // FIXME: Assert that stackFrameDoesNotContainInitializedTemporaries(*Pred)).
  1515. // We currently cannot enable this assert, as lifetime extended temporaries
  1516. // are not modelled correctly.
  1517. PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext());
  1518. StateMgr.EndPath(Pred->getState());
  1519. ExplodedNodeSet Dst;
  1520. if (Pred->getLocationContext()->inTopFrame()) {
  1521. // Remove dead symbols.
  1522. ExplodedNodeSet AfterRemovedDead;
  1523. removeDeadOnEndOfFunction(BC, Pred, AfterRemovedDead);
  1524. // Notify checkers.
  1525. for (ExplodedNodeSet::iterator I = AfterRemovedDead.begin(),
  1526. E = AfterRemovedDead.end(); I != E; ++I) {
  1527. getCheckerManager().runCheckersForEndFunction(BC, Dst, *I, *this);
  1528. }
  1529. } else {
  1530. getCheckerManager().runCheckersForEndFunction(BC, Dst, Pred, *this);
  1531. }
  1532. Engine.enqueueEndOfFunction(Dst);
  1533. }
  1534. /// ProcessSwitch - Called by CoreEngine. Used to generate successor
  1535. /// nodes by processing the 'effects' of a switch statement.
  1536. void ExprEngine::processSwitch(SwitchNodeBuilder& builder) {
  1537. typedef SwitchNodeBuilder::iterator iterator;
  1538. ProgramStateRef state = builder.getState();
  1539. const Expr *CondE = builder.getCondition();
  1540. SVal CondV_untested = state->getSVal(CondE, builder.getLocationContext());
  1541. if (CondV_untested.isUndef()) {
  1542. //ExplodedNode* N = builder.generateDefaultCaseNode(state, true);
  1543. // FIXME: add checker
  1544. //UndefBranches.insert(N);
  1545. return;
  1546. }
  1547. DefinedOrUnknownSVal CondV = CondV_untested.castAs<DefinedOrUnknownSVal>();
  1548. ProgramStateRef DefaultSt = state;
  1549. iterator I = builder.begin(), EI = builder.end();
  1550. bool defaultIsFeasible = I == EI;
  1551. for ( ; I != EI; ++I) {
  1552. // Successor may be pruned out during CFG construction.
  1553. if (!I.getBlock())
  1554. continue;
  1555. const CaseStmt *Case = I.getCase();
  1556. // Evaluate the LHS of the case value.
  1557. llvm::APSInt V1 = Case->getLHS()->EvaluateKnownConstInt(getContext());
  1558. assert(V1.getBitWidth() == getContext().getTypeSize(CondE->getType()));
  1559. // Get the RHS of the case, if it exists.
  1560. llvm::APSInt V2;
  1561. if (const Expr *E = Case->getRHS())
  1562. V2 = E->EvaluateKnownConstInt(getContext());
  1563. else
  1564. V2 = V1;
  1565. ProgramStateRef StateCase;
  1566. if (Optional<NonLoc> NL = CondV.getAs<NonLoc>())
  1567. std::tie(StateCase, DefaultSt) =
  1568. DefaultSt->assumeWithinInclusiveRange(*NL, V1, V2);
  1569. else // UnknownVal
  1570. StateCase = DefaultSt;
  1571. if (StateCase)
  1572. builder.generateCaseStmtNode(I, StateCase);
  1573. // Now "assume" that the case doesn't match. Add this state
  1574. // to the default state (if it is feasible).
  1575. if (DefaultSt)
  1576. defaultIsFeasible = true;
  1577. else {
  1578. defaultIsFeasible = false;
  1579. break;
  1580. }
  1581. }
  1582. if (!defaultIsFeasible)
  1583. return;
  1584. // If we have switch(enum value), the default branch is not
  1585. // feasible if all of the enum constants not covered by 'case:' statements
  1586. // are not feasible values for the switch condition.
  1587. //
  1588. // Note that this isn't as accurate as it could be. Even if there isn't
  1589. // a case for a particular enum value as long as that enum value isn't
  1590. // feasible then it shouldn't be considered for making 'default:' reachable.
  1591. const SwitchStmt *SS = builder.getSwitch();
  1592. const Expr *CondExpr = SS->getCond()->IgnoreParenImpCasts();
  1593. if (CondExpr->getType()->getAs<EnumType>()) {
  1594. if (SS->isAllEnumCasesCovered())
  1595. return;
  1596. }
  1597. builder.generateDefaultCaseNode(DefaultSt);
  1598. }
  1599. //===----------------------------------------------------------------------===//
  1600. // Transfer functions: Loads and stores.
  1601. //===----------------------------------------------------------------------===//
  1602. void ExprEngine::VisitCommonDeclRefExpr(const Expr *Ex, const NamedDecl *D,
  1603. ExplodedNode *Pred,
  1604. ExplodedNodeSet &Dst) {
  1605. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  1606. ProgramStateRef state = Pred->getState();
  1607. const LocationContext *LCtx = Pred->getLocationContext();
  1608. if (const VarDecl *VD = dyn_cast<VarDecl>(D)) {
  1609. // C permits "extern void v", and if you cast the address to a valid type,
  1610. // you can even do things with it. We simply pretend
  1611. assert(Ex->isGLValue() || VD->getType()->isVoidType());
  1612. const LocationContext *LocCtxt = Pred->getLocationContext();
  1613. const Decl *D = LocCtxt->getDecl();
  1614. const auto *MD = D ? dyn_cast<CXXMethodDecl>(D) : nullptr;
  1615. const auto *DeclRefEx = dyn_cast<DeclRefExpr>(Ex);
  1616. SVal V;
  1617. bool IsReference;
  1618. if (AMgr.options.shouldInlineLambdas() && DeclRefEx &&
  1619. DeclRefEx->refersToEnclosingVariableOrCapture() && MD &&
  1620. MD->getParent()->isLambda()) {
  1621. // Lookup the field of the lambda.
  1622. const CXXRecordDecl *CXXRec = MD->getParent();
  1623. llvm::DenseMap<const VarDecl *, FieldDecl *> LambdaCaptureFields;
  1624. FieldDecl *LambdaThisCaptureField;
  1625. CXXRec->getCaptureFields(LambdaCaptureFields, LambdaThisCaptureField);
  1626. const FieldDecl *FD = LambdaCaptureFields[VD];
  1627. if (!FD) {
  1628. // When a constant is captured, sometimes no corresponding field is
  1629. // created in the lambda object.
  1630. assert(VD->getType().isConstQualified());
  1631. V = state->getLValue(VD, LocCtxt);
  1632. IsReference = false;
  1633. } else {
  1634. Loc CXXThis =
  1635. svalBuilder.getCXXThis(MD, LocCtxt->getCurrentStackFrame());
  1636. SVal CXXThisVal = state->getSVal(CXXThis);
  1637. V = state->getLValue(FD, CXXThisVal);
  1638. IsReference = FD->getType()->isReferenceType();
  1639. }
  1640. } else {
  1641. V = state->getLValue(VD, LocCtxt);
  1642. IsReference = VD->getType()->isReferenceType();
  1643. }
  1644. // For references, the 'lvalue' is the pointer address stored in the
  1645. // reference region.
  1646. if (IsReference) {
  1647. if (const MemRegion *R = V.getAsRegion())
  1648. V = state->getSVal(R);
  1649. else
  1650. V = UnknownVal();
  1651. }
  1652. Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr,
  1653. ProgramPoint::PostLValueKind);
  1654. return;
  1655. }
  1656. if (const EnumConstantDecl *ED = dyn_cast<EnumConstantDecl>(D)) {
  1657. assert(!Ex->isGLValue());
  1658. SVal V = svalBuilder.makeIntVal(ED->getInitVal());
  1659. Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V));
  1660. return;
  1661. }
  1662. if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
  1663. SVal V = svalBuilder.getFunctionPointer(FD);
  1664. Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr,
  1665. ProgramPoint::PostLValueKind);
  1666. return;
  1667. }
  1668. if (isa<FieldDecl>(D)) {
  1669. // FIXME: Compute lvalue of field pointers-to-member.
  1670. // Right now we just use a non-null void pointer, so that it gives proper
  1671. // results in boolean contexts.
  1672. SVal V = svalBuilder.conjureSymbolVal(Ex, LCtx, getContext().VoidPtrTy,
  1673. currBldrCtx->blockCount());
  1674. state = state->assume(V.castAs<DefinedOrUnknownSVal>(), true);
  1675. Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr,
  1676. ProgramPoint::PostLValueKind);
  1677. return;
  1678. }
  1679. llvm_unreachable("Support for this Decl not implemented.");
  1680. }
  1681. /// VisitArraySubscriptExpr - Transfer function for array accesses
  1682. void ExprEngine::VisitLvalArraySubscriptExpr(const ArraySubscriptExpr *A,
  1683. ExplodedNode *Pred,
  1684. ExplodedNodeSet &Dst){
  1685. const Expr *Base = A->getBase()->IgnoreParens();
  1686. const Expr *Idx = A->getIdx()->IgnoreParens();
  1687. ExplodedNodeSet checkerPreStmt;
  1688. getCheckerManager().runCheckersForPreStmt(checkerPreStmt, Pred, A, *this);
  1689. StmtNodeBuilder Bldr(checkerPreStmt, Dst, *currBldrCtx);
  1690. assert(A->isGLValue() ||
  1691. (!AMgr.getLangOpts().CPlusPlus &&
  1692. A->getType().isCForbiddenLValueType()));
  1693. for (ExplodedNodeSet::iterator it = checkerPreStmt.begin(),
  1694. ei = checkerPreStmt.end(); it != ei; ++it) {
  1695. const LocationContext *LCtx = (*it)->getLocationContext();
  1696. ProgramStateRef state = (*it)->getState();
  1697. SVal V = state->getLValue(A->getType(),
  1698. state->getSVal(Idx, LCtx),
  1699. state->getSVal(Base, LCtx));
  1700. Bldr.generateNode(A, *it, state->BindExpr(A, LCtx, V), nullptr,
  1701. ProgramPoint::PostLValueKind);
  1702. }
  1703. }
  1704. /// VisitMemberExpr - Transfer function for member expressions.
  1705. void ExprEngine::VisitMemberExpr(const MemberExpr *M, ExplodedNode *Pred,
  1706. ExplodedNodeSet &Dst) {
  1707. // FIXME: Prechecks eventually go in ::Visit().
  1708. ExplodedNodeSet CheckedSet;
  1709. getCheckerManager().runCheckersForPreStmt(CheckedSet, Pred, M, *this);
  1710. ExplodedNodeSet EvalSet;
  1711. ValueDecl *Member = M->getMemberDecl();
  1712. // Handle static member variables and enum constants accessed via
  1713. // member syntax.
  1714. if (isa<VarDecl>(Member) || isa<EnumConstantDecl>(Member)) {
  1715. ExplodedNodeSet Dst;
  1716. for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end();
  1717. I != E; ++I) {
  1718. VisitCommonDeclRefExpr(M, Member, Pred, EvalSet);
  1719. }
  1720. } else {
  1721. StmtNodeBuilder Bldr(CheckedSet, EvalSet, *currBldrCtx);
  1722. ExplodedNodeSet Tmp;
  1723. for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end();
  1724. I != E; ++I) {
  1725. ProgramStateRef state = (*I)->getState();
  1726. const LocationContext *LCtx = (*I)->getLocationContext();
  1727. Expr *BaseExpr = M->getBase();
  1728. // Handle C++ method calls.
  1729. if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(Member)) {
  1730. if (MD->isInstance())
  1731. state = createTemporaryRegionIfNeeded(state, LCtx, BaseExpr);
  1732. SVal MDVal = svalBuilder.getFunctionPointer(MD);
  1733. state = state->BindExpr(M, LCtx, MDVal);
  1734. Bldr.generateNode(M, *I, state);
  1735. continue;
  1736. }
  1737. // Handle regular struct fields / member variables.
  1738. state = createTemporaryRegionIfNeeded(state, LCtx, BaseExpr);
  1739. SVal baseExprVal = state->getSVal(BaseExpr, LCtx);
  1740. FieldDecl *field = cast<FieldDecl>(Member);
  1741. SVal L = state->getLValue(field, baseExprVal);
  1742. if (M->isGLValue() || M->getType()->isArrayType()) {
  1743. // We special-case rvalues of array type because the analyzer cannot
  1744. // reason about them, since we expect all regions to be wrapped in Locs.
  1745. // We instead treat these as lvalues and assume that they will decay to
  1746. // pointers as soon as they are used.
  1747. if (!M->isGLValue()) {
  1748. assert(M->getType()->isArrayType());
  1749. const ImplicitCastExpr *PE =
  1750. dyn_cast<ImplicitCastExpr>((*I)->getParentMap().getParent(M));
  1751. if (!PE || PE->getCastKind() != CK_ArrayToPointerDecay) {
  1752. llvm_unreachable("should always be wrapped in ArrayToPointerDecay");
  1753. }
  1754. }
  1755. if (field->getType()->isReferenceType()) {
  1756. if (const MemRegion *R = L.getAsRegion())
  1757. L = state->getSVal(R);
  1758. else
  1759. L = UnknownVal();
  1760. }
  1761. Bldr.generateNode(M, *I, state->BindExpr(M, LCtx, L), nullptr,
  1762. ProgramPoint::PostLValueKind);
  1763. } else {
  1764. Bldr.takeNodes(*I);
  1765. evalLoad(Tmp, M, M, *I, state, L);
  1766. Bldr.addNodes(Tmp);
  1767. }
  1768. }
  1769. }
  1770. getCheckerManager().runCheckersForPostStmt(Dst, EvalSet, M, *this);
  1771. }
  1772. namespace {
  1773. class CollectReachableSymbolsCallback final : public SymbolVisitor {
  1774. InvalidatedSymbols Symbols;
  1775. public:
  1776. CollectReachableSymbolsCallback(ProgramStateRef State) {}
  1777. const InvalidatedSymbols &getSymbols() const { return Symbols; }
  1778. bool VisitSymbol(SymbolRef Sym) override {
  1779. Symbols.insert(Sym);
  1780. return true;
  1781. }
  1782. };
  1783. } // end anonymous namespace
  1784. // A value escapes in three possible cases:
  1785. // (1) We are binding to something that is not a memory region.
  1786. // (2) We are binding to a MemrRegion that does not have stack storage.
  1787. // (3) We are binding to a MemRegion with stack storage that the store
  1788. // does not understand.
  1789. ProgramStateRef ExprEngine::processPointerEscapedOnBind(ProgramStateRef State,
  1790. SVal Loc, SVal Val) {
  1791. // Are we storing to something that causes the value to "escape"?
  1792. bool escapes = true;
  1793. // TODO: Move to StoreManager.
  1794. if (Optional<loc::MemRegionVal> regionLoc = Loc.getAs<loc::MemRegionVal>()) {
  1795. escapes = !regionLoc->getRegion()->hasStackStorage();
  1796. if (!escapes) {
  1797. // To test (3), generate a new state with the binding added. If it is
  1798. // the same state, then it escapes (since the store cannot represent
  1799. // the binding).
  1800. // Do this only if we know that the store is not supposed to generate the
  1801. // same state.
  1802. SVal StoredVal = State->getSVal(regionLoc->getRegion());
  1803. if (StoredVal != Val)
  1804. escapes = (State == (State->bindLoc(*regionLoc, Val)));
  1805. }
  1806. }
  1807. // If our store can represent the binding and we aren't storing to something
  1808. // that doesn't have local storage then just return and have the simulation
  1809. // state continue as is.
  1810. if (!escapes)
  1811. return State;
  1812. // Otherwise, find all symbols referenced by 'val' that we are tracking
  1813. // and stop tracking them.
  1814. CollectReachableSymbolsCallback Scanner =
  1815. State->scanReachableSymbols<CollectReachableSymbolsCallback>(Val);
  1816. const InvalidatedSymbols &EscapedSymbols = Scanner.getSymbols();
  1817. State = getCheckerManager().runCheckersForPointerEscape(State,
  1818. EscapedSymbols,
  1819. /*CallEvent*/ nullptr,
  1820. PSK_EscapeOnBind,
  1821. nullptr);
  1822. return State;
  1823. }
  1824. ProgramStateRef
  1825. ExprEngine::notifyCheckersOfPointerEscape(ProgramStateRef State,
  1826. const InvalidatedSymbols *Invalidated,
  1827. ArrayRef<const MemRegion *> ExplicitRegions,
  1828. ArrayRef<const MemRegion *> Regions,
  1829. const CallEvent *Call,
  1830. RegionAndSymbolInvalidationTraits &ITraits) {
  1831. if (!Invalidated || Invalidated->empty())
  1832. return State;
  1833. if (!Call)
  1834. return getCheckerManager().runCheckersForPointerEscape(State,
  1835. *Invalidated,
  1836. nullptr,
  1837. PSK_EscapeOther,
  1838. &ITraits);
  1839. // If the symbols were invalidated by a call, we want to find out which ones
  1840. // were invalidated directly due to being arguments to the call.
  1841. InvalidatedSymbols SymbolsDirectlyInvalidated;
  1842. for (ArrayRef<const MemRegion *>::iterator I = ExplicitRegions.begin(),
  1843. E = ExplicitRegions.end(); I != E; ++I) {
  1844. if (const SymbolicRegion *R = (*I)->StripCasts()->getAs<SymbolicRegion>())
  1845. SymbolsDirectlyInvalidated.insert(R->getSymbol());
  1846. }
  1847. InvalidatedSymbols SymbolsIndirectlyInvalidated;
  1848. for (InvalidatedSymbols::const_iterator I=Invalidated->begin(),
  1849. E = Invalidated->end(); I!=E; ++I) {
  1850. SymbolRef sym = *I;
  1851. if (SymbolsDirectlyInvalidated.count(sym))
  1852. continue;
  1853. SymbolsIndirectlyInvalidated.insert(sym);
  1854. }
  1855. if (!SymbolsDirectlyInvalidated.empty())
  1856. State = getCheckerManager().runCheckersForPointerEscape(State,
  1857. SymbolsDirectlyInvalidated, Call, PSK_DirectEscapeOnCall, &ITraits);
  1858. // Notify about the symbols that get indirectly invalidated by the call.
  1859. if (!SymbolsIndirectlyInvalidated.empty())
  1860. State = getCheckerManager().runCheckersForPointerEscape(State,
  1861. SymbolsIndirectlyInvalidated, Call, PSK_IndirectEscapeOnCall, &ITraits);
  1862. return State;
  1863. }
  1864. /// evalBind - Handle the semantics of binding a value to a specific location.
  1865. /// This method is used by evalStore and (soon) VisitDeclStmt, and others.
  1866. void ExprEngine::evalBind(ExplodedNodeSet &Dst, const Stmt *StoreE,
  1867. ExplodedNode *Pred,
  1868. SVal location, SVal Val,
  1869. bool atDeclInit, const ProgramPoint *PP) {
  1870. const LocationContext *LC = Pred->getLocationContext();
  1871. PostStmt PS(StoreE, LC);
  1872. if (!PP)
  1873. PP = &PS;
  1874. // Do a previsit of the bind.
  1875. ExplodedNodeSet CheckedSet;
  1876. getCheckerManager().runCheckersForBind(CheckedSet, Pred, location, Val,
  1877. StoreE, *this, *PP);
  1878. StmtNodeBuilder Bldr(CheckedSet, Dst, *currBldrCtx);
  1879. // If the location is not a 'Loc', it will already be handled by
  1880. // the checkers. There is nothing left to do.
  1881. if (!location.getAs<Loc>()) {
  1882. const ProgramPoint L = PostStore(StoreE, LC, /*Loc*/nullptr,
  1883. /*tag*/nullptr);
  1884. ProgramStateRef state = Pred->getState();
  1885. state = processPointerEscapedOnBind(state, location, Val);
  1886. Bldr.generateNode(L, state, Pred);
  1887. return;
  1888. }
  1889. for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end();
  1890. I!=E; ++I) {
  1891. ExplodedNode *PredI = *I;
  1892. ProgramStateRef state = PredI->getState();
  1893. state = processPointerEscapedOnBind(state, location, Val);
  1894. // When binding the value, pass on the hint that this is a initialization.
  1895. // For initializations, we do not need to inform clients of region
  1896. // changes.
  1897. state = state->bindLoc(location.castAs<Loc>(),
  1898. Val, /* notifyChanges = */ !atDeclInit);
  1899. const MemRegion *LocReg = nullptr;
  1900. if (Optional<loc::MemRegionVal> LocRegVal =
  1901. location.getAs<loc::MemRegionVal>()) {
  1902. LocReg = LocRegVal->getRegion();
  1903. }
  1904. const ProgramPoint L = PostStore(StoreE, LC, LocReg, nullptr);
  1905. Bldr.generateNode(L, state, PredI);
  1906. }
  1907. }
  1908. /// evalStore - Handle the semantics of a store via an assignment.
  1909. /// @param Dst The node set to store generated state nodes
  1910. /// @param AssignE The assignment expression if the store happens in an
  1911. /// assignment.
  1912. /// @param LocationE The location expression that is stored to.
  1913. /// @param state The current simulation state
  1914. /// @param location The location to store the value
  1915. /// @param Val The value to be stored
  1916. void ExprEngine::evalStore(ExplodedNodeSet &Dst, const Expr *AssignE,
  1917. const Expr *LocationE,
  1918. ExplodedNode *Pred,
  1919. ProgramStateRef state, SVal location, SVal Val,
  1920. const ProgramPointTag *tag) {
  1921. // Proceed with the store. We use AssignE as the anchor for the PostStore
  1922. // ProgramPoint if it is non-NULL, and LocationE otherwise.
  1923. const Expr *StoreE = AssignE ? AssignE : LocationE;
  1924. // Evaluate the location (checks for bad dereferences).
  1925. ExplodedNodeSet Tmp;
  1926. evalLocation(Tmp, AssignE, LocationE, Pred, state, location, tag, false);
  1927. if (Tmp.empty())
  1928. return;
  1929. if (location.isUndef())
  1930. return;
  1931. for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI)
  1932. evalBind(Dst, StoreE, *NI, location, Val, false);
  1933. }
  1934. void ExprEngine::evalLoad(ExplodedNodeSet &Dst,
  1935. const Expr *NodeEx,
  1936. const Expr *BoundEx,
  1937. ExplodedNode *Pred,
  1938. ProgramStateRef state,
  1939. SVal location,
  1940. const ProgramPointTag *tag,
  1941. QualType LoadTy)
  1942. {
  1943. assert(!location.getAs<NonLoc>() && "location cannot be a NonLoc.");
  1944. // Are we loading from a region? This actually results in two loads; one
  1945. // to fetch the address of the referenced value and one to fetch the
  1946. // referenced value.
  1947. if (const TypedValueRegion *TR =
  1948. dyn_cast_or_null<TypedValueRegion>(location.getAsRegion())) {
  1949. QualType ValTy = TR->getValueType();
  1950. if (const ReferenceType *RT = ValTy->getAs<ReferenceType>()) {
  1951. static SimpleProgramPointTag
  1952. loadReferenceTag(TagProviderName, "Load Reference");
  1953. ExplodedNodeSet Tmp;
  1954. evalLoadCommon(Tmp, NodeEx, BoundEx, Pred, state,
  1955. location, &loadReferenceTag,
  1956. getContext().getPointerType(RT->getPointeeType()));
  1957. // Perform the load from the referenced value.
  1958. for (ExplodedNodeSet::iterator I=Tmp.begin(), E=Tmp.end() ; I!=E; ++I) {
  1959. state = (*I)->getState();
  1960. location = state->getSVal(BoundEx, (*I)->getLocationContext());
  1961. evalLoadCommon(Dst, NodeEx, BoundEx, *I, state, location, tag, LoadTy);
  1962. }
  1963. return;
  1964. }
  1965. }
  1966. evalLoadCommon(Dst, NodeEx, BoundEx, Pred, state, location, tag, LoadTy);
  1967. }
  1968. void ExprEngine::evalLoadCommon(ExplodedNodeSet &Dst,
  1969. const Expr *NodeEx,
  1970. const Expr *BoundEx,
  1971. ExplodedNode *Pred,
  1972. ProgramStateRef state,
  1973. SVal location,
  1974. const ProgramPointTag *tag,
  1975. QualType LoadTy) {
  1976. assert(NodeEx);
  1977. assert(BoundEx);
  1978. // Evaluate the location (checks for bad dereferences).
  1979. ExplodedNodeSet Tmp;
  1980. evalLocation(Tmp, NodeEx, BoundEx, Pred, state, location, tag, true);
  1981. if (Tmp.empty())
  1982. return;
  1983. StmtNodeBuilder Bldr(Tmp, Dst, *currBldrCtx);
  1984. if (location.isUndef())
  1985. return;
  1986. // Proceed with the load.
  1987. for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI) {
  1988. state = (*NI)->getState();
  1989. const LocationContext *LCtx = (*NI)->getLocationContext();
  1990. SVal V = UnknownVal();
  1991. if (location.isValid()) {
  1992. if (LoadTy.isNull())
  1993. LoadTy = BoundEx->getType();
  1994. V = state->getSVal(location.castAs<Loc>(), LoadTy);
  1995. }
  1996. Bldr.generateNode(NodeEx, *NI, state->BindExpr(BoundEx, LCtx, V), tag,
  1997. ProgramPoint::PostLoadKind);
  1998. }
  1999. }
  2000. void ExprEngine::evalLocation(ExplodedNodeSet &Dst,
  2001. const Stmt *NodeEx,
  2002. const Stmt *BoundEx,
  2003. ExplodedNode *Pred,
  2004. ProgramStateRef state,
  2005. SVal location,
  2006. const ProgramPointTag *tag,
  2007. bool isLoad) {
  2008. StmtNodeBuilder BldrTop(Pred, Dst, *currBldrCtx);
  2009. // Early checks for performance reason.
  2010. if (location.isUnknown()) {
  2011. return;
  2012. }
  2013. ExplodedNodeSet Src;
  2014. BldrTop.takeNodes(Pred);
  2015. StmtNodeBuilder Bldr(Pred, Src, *currBldrCtx);
  2016. if (Pred->getState() != state) {
  2017. // Associate this new state with an ExplodedNode.
  2018. // FIXME: If I pass null tag, the graph is incorrect, e.g for
  2019. // int *p;
  2020. // p = 0;
  2021. // *p = 0xDEADBEEF;
  2022. // "p = 0" is not noted as "Null pointer value stored to 'p'" but
  2023. // instead "int *p" is noted as
  2024. // "Variable 'p' initialized to a null pointer value"
  2025. static SimpleProgramPointTag tag(TagProviderName, "Location");
  2026. Bldr.generateNode(NodeEx, Pred, state, &tag);
  2027. }
  2028. ExplodedNodeSet Tmp;
  2029. getCheckerManager().runCheckersForLocation(Tmp, Src, location, isLoad,
  2030. NodeEx, BoundEx, *this);
  2031. BldrTop.addNodes(Tmp);
  2032. }
  2033. std::pair<const ProgramPointTag *, const ProgramPointTag*>
  2034. ExprEngine::geteagerlyAssumeBinOpBifurcationTags() {
  2035. static SimpleProgramPointTag
  2036. eagerlyAssumeBinOpBifurcationTrue(TagProviderName,
  2037. "Eagerly Assume True"),
  2038. eagerlyAssumeBinOpBifurcationFalse(TagProviderName,
  2039. "Eagerly Assume False");
  2040. return std::make_pair(&eagerlyAssumeBinOpBifurcationTrue,
  2041. &eagerlyAssumeBinOpBifurcationFalse);
  2042. }
  2043. void ExprEngine::evalEagerlyAssumeBinOpBifurcation(ExplodedNodeSet &Dst,
  2044. ExplodedNodeSet &Src,
  2045. const Expr *Ex) {
  2046. StmtNodeBuilder Bldr(Src, Dst, *currBldrCtx);
  2047. for (ExplodedNodeSet::iterator I=Src.begin(), E=Src.end(); I!=E; ++I) {
  2048. ExplodedNode *Pred = *I;
  2049. // Test if the previous node was as the same expression. This can happen
  2050. // when the expression fails to evaluate to anything meaningful and
  2051. // (as an optimization) we don't generate a node.
  2052. ProgramPoint P = Pred->getLocation();
  2053. if (!P.getAs<PostStmt>() || P.castAs<PostStmt>().getStmt() != Ex) {
  2054. continue;
  2055. }
  2056. ProgramStateRef state = Pred->getState();
  2057. SVal V = state->getSVal(Ex, Pred->getLocationContext());
  2058. Optional<nonloc::SymbolVal> SEV = V.getAs<nonloc::SymbolVal>();
  2059. if (SEV && SEV->isExpression()) {
  2060. const std::pair<const ProgramPointTag *, const ProgramPointTag*> &tags =
  2061. geteagerlyAssumeBinOpBifurcationTags();
  2062. ProgramStateRef StateTrue, StateFalse;
  2063. std::tie(StateTrue, StateFalse) = state->assume(*SEV);
  2064. // First assume that the condition is true.
  2065. if (StateTrue) {
  2066. SVal Val = svalBuilder.makeIntVal(1U, Ex->getType());
  2067. StateTrue = StateTrue->BindExpr(Ex, Pred->getLocationContext(), Val);
  2068. Bldr.generateNode(Ex, Pred, StateTrue, tags.first);
  2069. }
  2070. // Next, assume that the condition is false.
  2071. if (StateFalse) {
  2072. SVal Val = svalBuilder.makeIntVal(0U, Ex->getType());
  2073. StateFalse = StateFalse->BindExpr(Ex, Pred->getLocationContext(), Val);
  2074. Bldr.generateNode(Ex, Pred, StateFalse, tags.second);
  2075. }
  2076. }
  2077. }
  2078. }
  2079. void ExprEngine::VisitGCCAsmStmt(const GCCAsmStmt *A, ExplodedNode *Pred,
  2080. ExplodedNodeSet &Dst) {
  2081. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  2082. // We have processed both the inputs and the outputs. All of the outputs
  2083. // should evaluate to Locs. Nuke all of their values.
  2084. // FIXME: Some day in the future it would be nice to allow a "plug-in"
  2085. // which interprets the inline asm and stores proper results in the
  2086. // outputs.
  2087. ProgramStateRef state = Pred->getState();
  2088. for (const Expr *O : A->outputs()) {
  2089. SVal X = state->getSVal(O, Pred->getLocationContext());
  2090. assert (!X.getAs<NonLoc>()); // Should be an Lval, or unknown, undef.
  2091. if (Optional<Loc> LV = X.getAs<Loc>())
  2092. state = state->bindLoc(*LV, UnknownVal());
  2093. }
  2094. Bldr.generateNode(A, Pred, state);
  2095. }
  2096. void ExprEngine::VisitMSAsmStmt(const MSAsmStmt *A, ExplodedNode *Pred,
  2097. ExplodedNodeSet &Dst) {
  2098. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  2099. Bldr.generateNode(A, Pred, Pred->getState());
  2100. }
  2101. //===----------------------------------------------------------------------===//
  2102. // Visualization.
  2103. //===----------------------------------------------------------------------===//
  2104. #ifndef NDEBUG
  2105. static ExprEngine* GraphPrintCheckerState;
  2106. static SourceManager* GraphPrintSourceManager;
  2107. namespace llvm {
  2108. template<>
  2109. struct DOTGraphTraits<ExplodedNode*> :
  2110. public DefaultDOTGraphTraits {
  2111. DOTGraphTraits (bool isSimple=false) : DefaultDOTGraphTraits(isSimple) {}
  2112. // FIXME: Since we do not cache error nodes in ExprEngine now, this does not
  2113. // work.
  2114. static std::string getNodeAttributes(const ExplodedNode *N, void*) {
  2115. #if 0
  2116. // FIXME: Replace with a general scheme to tell if the node is
  2117. // an error node.
  2118. if (GraphPrintCheckerState->isImplicitNullDeref(N) ||
  2119. GraphPrintCheckerState->isExplicitNullDeref(N) ||
  2120. GraphPrintCheckerState->isUndefDeref(N) ||
  2121. GraphPrintCheckerState->isUndefStore(N) ||
  2122. GraphPrintCheckerState->isUndefControlFlow(N) ||
  2123. GraphPrintCheckerState->isUndefResult(N) ||
  2124. GraphPrintCheckerState->isBadCall(N) ||
  2125. GraphPrintCheckerState->isUndefArg(N))
  2126. return "color=\"red\",style=\"filled\"";
  2127. if (GraphPrintCheckerState->isNoReturnCall(N))
  2128. return "color=\"blue\",style=\"filled\"";
  2129. #endif
  2130. return "";
  2131. }
  2132. static void printLocation(raw_ostream &Out, SourceLocation SLoc) {
  2133. if (SLoc.isFileID()) {
  2134. Out << "\\lline="
  2135. << GraphPrintSourceManager->getExpansionLineNumber(SLoc)
  2136. << " col="
  2137. << GraphPrintSourceManager->getExpansionColumnNumber(SLoc)
  2138. << "\\l";
  2139. }
  2140. }
  2141. static std::string getNodeLabel(const ExplodedNode *N, void*){
  2142. std::string sbuf;
  2143. llvm::raw_string_ostream Out(sbuf);
  2144. // Program Location.
  2145. ProgramPoint Loc = N->getLocation();
  2146. switch (Loc.getKind()) {
  2147. case ProgramPoint::BlockEntranceKind: {
  2148. Out << "Block Entrance: B"
  2149. << Loc.castAs<BlockEntrance>().getBlock()->getBlockID();
  2150. if (const NamedDecl *ND =
  2151. dyn_cast<NamedDecl>(Loc.getLocationContext()->getDecl())) {
  2152. Out << " (";
  2153. ND->printName(Out);
  2154. Out << ")";
  2155. }
  2156. break;
  2157. }
  2158. case ProgramPoint::BlockExitKind:
  2159. assert (false);
  2160. break;
  2161. case ProgramPoint::CallEnterKind:
  2162. Out << "CallEnter";
  2163. break;
  2164. case ProgramPoint::CallExitBeginKind:
  2165. Out << "CallExitBegin";
  2166. break;
  2167. case ProgramPoint::CallExitEndKind:
  2168. Out << "CallExitEnd";
  2169. break;
  2170. case ProgramPoint::PostStmtPurgeDeadSymbolsKind:
  2171. Out << "PostStmtPurgeDeadSymbols";
  2172. break;
  2173. case ProgramPoint::PreStmtPurgeDeadSymbolsKind:
  2174. Out << "PreStmtPurgeDeadSymbols";
  2175. break;
  2176. case ProgramPoint::EpsilonKind:
  2177. Out << "Epsilon Point";
  2178. break;
  2179. case ProgramPoint::PreImplicitCallKind: {
  2180. ImplicitCallPoint PC = Loc.castAs<ImplicitCallPoint>();
  2181. Out << "PreCall: ";
  2182. // FIXME: Get proper printing options.
  2183. PC.getDecl()->print(Out, LangOptions());
  2184. printLocation(Out, PC.getLocation());
  2185. break;
  2186. }
  2187. case ProgramPoint::PostImplicitCallKind: {
  2188. ImplicitCallPoint PC = Loc.castAs<ImplicitCallPoint>();
  2189. Out << "PostCall: ";
  2190. // FIXME: Get proper printing options.
  2191. PC.getDecl()->print(Out, LangOptions());
  2192. printLocation(Out, PC.getLocation());
  2193. break;
  2194. }
  2195. case ProgramPoint::PostInitializerKind: {
  2196. Out << "PostInitializer: ";
  2197. const CXXCtorInitializer *Init =
  2198. Loc.castAs<PostInitializer>().getInitializer();
  2199. if (const FieldDecl *FD = Init->getAnyMember())
  2200. Out << *FD;
  2201. else {
  2202. QualType Ty = Init->getTypeSourceInfo()->getType();
  2203. Ty = Ty.getLocalUnqualifiedType();
  2204. LangOptions LO; // FIXME.
  2205. Ty.print(Out, LO);
  2206. }
  2207. break;
  2208. }
  2209. case ProgramPoint::BlockEdgeKind: {
  2210. const BlockEdge &E = Loc.castAs<BlockEdge>();
  2211. Out << "Edge: (B" << E.getSrc()->getBlockID() << ", B"
  2212. << E.getDst()->getBlockID() << ')';
  2213. if (const Stmt *T = E.getSrc()->getTerminator()) {
  2214. SourceLocation SLoc = T->getLocStart();
  2215. Out << "\\|Terminator: ";
  2216. LangOptions LO; // FIXME.
  2217. E.getSrc()->printTerminator(Out, LO);
  2218. if (SLoc.isFileID()) {
  2219. Out << "\\lline="
  2220. << GraphPrintSourceManager->getExpansionLineNumber(SLoc)
  2221. << " col="
  2222. << GraphPrintSourceManager->getExpansionColumnNumber(SLoc);
  2223. }
  2224. if (isa<SwitchStmt>(T)) {
  2225. const Stmt *Label = E.getDst()->getLabel();
  2226. if (Label) {
  2227. if (const CaseStmt *C = dyn_cast<CaseStmt>(Label)) {
  2228. Out << "\\lcase ";
  2229. LangOptions LO; // FIXME.
  2230. if (C->getLHS())
  2231. C->getLHS()->printPretty(Out, nullptr, PrintingPolicy(LO));
  2232. if (const Stmt *RHS = C->getRHS()) {
  2233. Out << " .. ";
  2234. RHS->printPretty(Out, nullptr, PrintingPolicy(LO));
  2235. }
  2236. Out << ":";
  2237. }
  2238. else {
  2239. assert (isa<DefaultStmt>(Label));
  2240. Out << "\\ldefault:";
  2241. }
  2242. }
  2243. else
  2244. Out << "\\l(implicit) default:";
  2245. }
  2246. else if (isa<IndirectGotoStmt>(T)) {
  2247. // FIXME
  2248. }
  2249. else {
  2250. Out << "\\lCondition: ";
  2251. if (*E.getSrc()->succ_begin() == E.getDst())
  2252. Out << "true";
  2253. else
  2254. Out << "false";
  2255. }
  2256. Out << "\\l";
  2257. }
  2258. #if 0
  2259. // FIXME: Replace with a general scheme to determine
  2260. // the name of the check.
  2261. if (GraphPrintCheckerState->isUndefControlFlow(N)) {
  2262. Out << "\\|Control-flow based on\\lUndefined value.\\l";
  2263. }
  2264. #endif
  2265. break;
  2266. }
  2267. default: {
  2268. const Stmt *S = Loc.castAs<StmtPoint>().getStmt();
  2269. assert(S != nullptr && "Expecting non-null Stmt");
  2270. Out << S->getStmtClassName() << ' ' << (const void*) S << ' ';
  2271. LangOptions LO; // FIXME.
  2272. S->printPretty(Out, nullptr, PrintingPolicy(LO));
  2273. printLocation(Out, S->getLocStart());
  2274. if (Loc.getAs<PreStmt>())
  2275. Out << "\\lPreStmt\\l;";
  2276. else if (Loc.getAs<PostLoad>())
  2277. Out << "\\lPostLoad\\l;";
  2278. else if (Loc.getAs<PostStore>())
  2279. Out << "\\lPostStore\\l";
  2280. else if (Loc.getAs<PostLValue>())
  2281. Out << "\\lPostLValue\\l";
  2282. #if 0
  2283. // FIXME: Replace with a general scheme to determine
  2284. // the name of the check.
  2285. if (GraphPrintCheckerState->isImplicitNullDeref(N))
  2286. Out << "\\|Implicit-Null Dereference.\\l";
  2287. else if (GraphPrintCheckerState->isExplicitNullDeref(N))
  2288. Out << "\\|Explicit-Null Dereference.\\l";
  2289. else if (GraphPrintCheckerState->isUndefDeref(N))
  2290. Out << "\\|Dereference of undefialied value.\\l";
  2291. else if (GraphPrintCheckerState->isUndefStore(N))
  2292. Out << "\\|Store to Undefined Loc.";
  2293. else if (GraphPrintCheckerState->isUndefResult(N))
  2294. Out << "\\|Result of operation is undefined.";
  2295. else if (GraphPrintCheckerState->isNoReturnCall(N))
  2296. Out << "\\|Call to function marked \"noreturn\".";
  2297. else if (GraphPrintCheckerState->isBadCall(N))
  2298. Out << "\\|Call to NULL/Undefined.";
  2299. else if (GraphPrintCheckerState->isUndefArg(N))
  2300. Out << "\\|Argument in call is undefined";
  2301. #endif
  2302. break;
  2303. }
  2304. }
  2305. ProgramStateRef state = N->getState();
  2306. Out << "\\|StateID: " << (const void*) state.get()
  2307. << " NodeID: " << (const void*) N << "\\|";
  2308. state->printDOT(Out);
  2309. Out << "\\l";
  2310. if (const ProgramPointTag *tag = Loc.getTag()) {
  2311. Out << "\\|Tag: " << tag->getTagDescription();
  2312. Out << "\\l";
  2313. }
  2314. return Out.str();
  2315. }
  2316. };
  2317. } // end llvm namespace
  2318. #endif
  2319. void ExprEngine::ViewGraph(bool trim) {
  2320. #ifndef NDEBUG
  2321. if (trim) {
  2322. std::vector<const ExplodedNode*> Src;
  2323. // Flush any outstanding reports to make sure we cover all the nodes.
  2324. // This does not cause them to get displayed.
  2325. for (BugReporter::iterator I=BR.begin(), E=BR.end(); I!=E; ++I)
  2326. const_cast<BugType*>(*I)->FlushReports(BR);
  2327. // Iterate through the reports and get their nodes.
  2328. for (BugReporter::EQClasses_iterator
  2329. EI = BR.EQClasses_begin(), EE = BR.EQClasses_end(); EI != EE; ++EI) {
  2330. ExplodedNode *N = const_cast<ExplodedNode*>(EI->begin()->getErrorNode());
  2331. if (N) Src.push_back(N);
  2332. }
  2333. ViewGraph(Src);
  2334. }
  2335. else {
  2336. GraphPrintCheckerState = this;
  2337. GraphPrintSourceManager = &getContext().getSourceManager();
  2338. llvm::ViewGraph(*G.roots_begin(), "ExprEngine");
  2339. GraphPrintCheckerState = nullptr;
  2340. GraphPrintSourceManager = nullptr;
  2341. }
  2342. #endif
  2343. }
  2344. void ExprEngine::ViewGraph(ArrayRef<const ExplodedNode*> Nodes) {
  2345. #ifndef NDEBUG
  2346. GraphPrintCheckerState = this;
  2347. GraphPrintSourceManager = &getContext().getSourceManager();
  2348. std::unique_ptr<ExplodedGraph> TrimmedG(G.trim(Nodes));
  2349. if (!TrimmedG.get())
  2350. llvm::errs() << "warning: Trimmed ExplodedGraph is empty.\n";
  2351. else
  2352. llvm::ViewGraph(*TrimmedG->roots_begin(), "TrimmedExprEngine");
  2353. GraphPrintCheckerState = nullptr;
  2354. GraphPrintSourceManager = nullptr;
  2355. #endif
  2356. }