ExprEngine.cpp 102 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803
  1. //=-- ExprEngine.cpp - Path-Sensitive Expression-Level Dataflow ---*- C++ -*-=
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This file defines a meta-engine for path-sensitive dataflow analysis that
  11. // is built on GREngine, but provides the boilerplate to execute transfer
  12. // functions and build the ExplodedGraph at the expression level.
  13. //
  14. //===----------------------------------------------------------------------===//
  15. #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
  16. #include "PrettyStackTraceLocationContext.h"
  17. #include "clang/AST/CharUnits.h"
  18. #include "clang/AST/ParentMap.h"
  19. #include "clang/AST/StmtCXX.h"
  20. #include "clang/AST/StmtObjC.h"
  21. #include "clang/Basic/Builtins.h"
  22. #include "clang/Basic/PrettyStackTrace.h"
  23. #include "clang/Basic/SourceManager.h"
  24. #include "clang/StaticAnalyzer/Core/BugReporter/BugType.h"
  25. #include "clang/StaticAnalyzer/Core/CheckerManager.h"
  26. #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h"
  27. #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
  28. #include "clang/StaticAnalyzer/Core/PathSensitive/LoopWidening.h"
  29. #include "llvm/ADT/Statistic.h"
  30. #include "llvm/Support/SaveAndRestore.h"
  31. #include "llvm/Support/raw_ostream.h"
  32. #ifndef NDEBUG
  33. #include "llvm/Support/GraphWriter.h"
  34. #endif
  35. using namespace clang;
  36. using namespace ento;
  37. using llvm::APSInt;
  38. #define DEBUG_TYPE "ExprEngine"
  39. STATISTIC(NumRemoveDeadBindings,
  40. "The # of times RemoveDeadBindings is called");
  41. STATISTIC(NumMaxBlockCountReached,
  42. "The # of aborted paths due to reaching the maximum block count in "
  43. "a top level function");
  44. STATISTIC(NumMaxBlockCountReachedInInlined,
  45. "The # of aborted paths due to reaching the maximum block count in "
  46. "an inlined function");
  47. STATISTIC(NumTimesRetriedWithoutInlining,
  48. "The # of times we re-evaluated a call without inlining");
  49. typedef std::pair<const CXXBindTemporaryExpr *, const StackFrameContext *>
  50. CXXBindTemporaryContext;
  51. // Keeps track of whether CXXBindTemporaryExpr nodes have been evaluated.
  52. // The StackFrameContext assures that nested calls due to inlined recursive
  53. // functions do not interfere.
  54. REGISTER_TRAIT_WITH_PROGRAMSTATE(InitializedTemporariesSet,
  55. llvm::ImmutableSet<CXXBindTemporaryContext>)
  56. //===----------------------------------------------------------------------===//
  57. // Engine construction and deletion.
  58. //===----------------------------------------------------------------------===//
  59. static const char* TagProviderName = "ExprEngine";
  60. ExprEngine::ExprEngine(AnalysisManager &mgr, bool gcEnabled,
  61. SetOfConstDecls *VisitedCalleesIn,
  62. FunctionSummariesTy *FS,
  63. InliningModes HowToInlineIn)
  64. : AMgr(mgr),
  65. AnalysisDeclContexts(mgr.getAnalysisDeclContextManager()),
  66. Engine(*this, FS),
  67. G(Engine.getGraph()),
  68. StateMgr(getContext(), mgr.getStoreManagerCreator(),
  69. mgr.getConstraintManagerCreator(), G.getAllocator(),
  70. this),
  71. SymMgr(StateMgr.getSymbolManager()),
  72. svalBuilder(StateMgr.getSValBuilder()),
  73. currStmtIdx(0), currBldrCtx(nullptr),
  74. ObjCNoRet(mgr.getASTContext()),
  75. ObjCGCEnabled(gcEnabled), BR(mgr, *this),
  76. VisitedCallees(VisitedCalleesIn),
  77. HowToInline(HowToInlineIn)
  78. {
  79. unsigned TrimInterval = mgr.options.getGraphTrimInterval();
  80. if (TrimInterval != 0) {
  81. // Enable eager node reclaimation when constructing the ExplodedGraph.
  82. G.enableNodeReclamation(TrimInterval);
  83. }
  84. }
  85. ExprEngine::~ExprEngine() {
  86. BR.FlushReports();
  87. }
  88. //===----------------------------------------------------------------------===//
  89. // Utility methods.
  90. //===----------------------------------------------------------------------===//
  91. ProgramStateRef ExprEngine::getInitialState(const LocationContext *InitLoc) {
  92. ProgramStateRef state = StateMgr.getInitialState(InitLoc);
  93. const Decl *D = InitLoc->getDecl();
  94. // Preconditions.
  95. // FIXME: It would be nice if we had a more general mechanism to add
  96. // such preconditions. Some day.
  97. do {
  98. if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
  99. // Precondition: the first argument of 'main' is an integer guaranteed
  100. // to be > 0.
  101. const IdentifierInfo *II = FD->getIdentifier();
  102. if (!II || !(II->getName() == "main" && FD->getNumParams() > 0))
  103. break;
  104. const ParmVarDecl *PD = FD->getParamDecl(0);
  105. QualType T = PD->getType();
  106. const BuiltinType *BT = dyn_cast<BuiltinType>(T);
  107. if (!BT || !BT->isInteger())
  108. break;
  109. const MemRegion *R = state->getRegion(PD, InitLoc);
  110. if (!R)
  111. break;
  112. SVal V = state->getSVal(loc::MemRegionVal(R));
  113. SVal Constraint_untested = evalBinOp(state, BO_GT, V,
  114. svalBuilder.makeZeroVal(T),
  115. svalBuilder.getConditionType());
  116. Optional<DefinedOrUnknownSVal> Constraint =
  117. Constraint_untested.getAs<DefinedOrUnknownSVal>();
  118. if (!Constraint)
  119. break;
  120. if (ProgramStateRef newState = state->assume(*Constraint, true))
  121. state = newState;
  122. }
  123. break;
  124. }
  125. while (0);
  126. if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
  127. // Precondition: 'self' is always non-null upon entry to an Objective-C
  128. // method.
  129. const ImplicitParamDecl *SelfD = MD->getSelfDecl();
  130. const MemRegion *R = state->getRegion(SelfD, InitLoc);
  131. SVal V = state->getSVal(loc::MemRegionVal(R));
  132. if (Optional<Loc> LV = V.getAs<Loc>()) {
  133. // Assume that the pointer value in 'self' is non-null.
  134. state = state->assume(*LV, true);
  135. assert(state && "'self' cannot be null");
  136. }
  137. }
  138. if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(D)) {
  139. if (!MD->isStatic()) {
  140. // Precondition: 'this' is always non-null upon entry to the
  141. // top-level function. This is our starting assumption for
  142. // analyzing an "open" program.
  143. const StackFrameContext *SFC = InitLoc->getCurrentStackFrame();
  144. if (SFC->getParent() == nullptr) {
  145. loc::MemRegionVal L = svalBuilder.getCXXThis(MD, SFC);
  146. SVal V = state->getSVal(L);
  147. if (Optional<Loc> LV = V.getAs<Loc>()) {
  148. state = state->assume(*LV, true);
  149. assert(state && "'this' cannot be null");
  150. }
  151. }
  152. }
  153. }
  154. return state;
  155. }
  156. ProgramStateRef
  157. ExprEngine::createTemporaryRegionIfNeeded(ProgramStateRef State,
  158. const LocationContext *LC,
  159. const Expr *Ex,
  160. const Expr *Result) {
  161. SVal V = State->getSVal(Ex, LC);
  162. if (!Result) {
  163. // If we don't have an explicit result expression, we're in "if needed"
  164. // mode. Only create a region if the current value is a NonLoc.
  165. if (!V.getAs<NonLoc>())
  166. return State;
  167. Result = Ex;
  168. } else {
  169. // We need to create a region no matter what. For sanity, make sure we don't
  170. // try to stuff a Loc into a non-pointer temporary region.
  171. assert(!V.getAs<Loc>() || Loc::isLocType(Result->getType()) ||
  172. Result->getType()->isMemberPointerType());
  173. }
  174. ProgramStateManager &StateMgr = State->getStateManager();
  175. MemRegionManager &MRMgr = StateMgr.getRegionManager();
  176. StoreManager &StoreMgr = StateMgr.getStoreManager();
  177. // We need to be careful about treating a derived type's value as
  178. // bindings for a base type. Unless we're creating a temporary pointer region,
  179. // start by stripping and recording base casts.
  180. SmallVector<const CastExpr *, 4> Casts;
  181. const Expr *Inner = Ex->IgnoreParens();
  182. if (!Loc::isLocType(Result->getType())) {
  183. while (const CastExpr *CE = dyn_cast<CastExpr>(Inner)) {
  184. if (CE->getCastKind() == CK_DerivedToBase ||
  185. CE->getCastKind() == CK_UncheckedDerivedToBase)
  186. Casts.push_back(CE);
  187. else if (CE->getCastKind() != CK_NoOp)
  188. break;
  189. Inner = CE->getSubExpr()->IgnoreParens();
  190. }
  191. }
  192. // Create a temporary object region for the inner expression (which may have
  193. // a more derived type) and bind the value into it.
  194. const TypedValueRegion *TR = nullptr;
  195. if (const MaterializeTemporaryExpr *MT =
  196. dyn_cast<MaterializeTemporaryExpr>(Result)) {
  197. StorageDuration SD = MT->getStorageDuration();
  198. // If this object is bound to a reference with static storage duration, we
  199. // put it in a different region to prevent "address leakage" warnings.
  200. if (SD == SD_Static || SD == SD_Thread)
  201. TR = MRMgr.getCXXStaticTempObjectRegion(Inner);
  202. }
  203. if (!TR)
  204. TR = MRMgr.getCXXTempObjectRegion(Inner, LC);
  205. SVal Reg = loc::MemRegionVal(TR);
  206. if (V.isUnknown())
  207. V = getSValBuilder().conjureSymbolVal(Result, LC, TR->getValueType(),
  208. currBldrCtx->blockCount());
  209. State = State->bindLoc(Reg, V);
  210. // Re-apply the casts (from innermost to outermost) for type sanity.
  211. for (SmallVectorImpl<const CastExpr *>::reverse_iterator I = Casts.rbegin(),
  212. E = Casts.rend();
  213. I != E; ++I) {
  214. Reg = StoreMgr.evalDerivedToBase(Reg, *I);
  215. }
  216. State = State->BindExpr(Result, LC, Reg);
  217. return State;
  218. }
  219. //===----------------------------------------------------------------------===//
  220. // Top-level transfer function logic (Dispatcher).
  221. //===----------------------------------------------------------------------===//
  222. /// evalAssume - Called by ConstraintManager. Used to call checker-specific
  223. /// logic for handling assumptions on symbolic values.
  224. ProgramStateRef ExprEngine::processAssume(ProgramStateRef state,
  225. SVal cond, bool assumption) {
  226. return getCheckerManager().runCheckersForEvalAssume(state, cond, assumption);
  227. }
  228. bool ExprEngine::wantsRegionChangeUpdate(ProgramStateRef state) {
  229. return getCheckerManager().wantsRegionChangeUpdate(state);
  230. }
  231. ProgramStateRef
  232. ExprEngine::processRegionChanges(ProgramStateRef state,
  233. const InvalidatedSymbols *invalidated,
  234. ArrayRef<const MemRegion *> Explicits,
  235. ArrayRef<const MemRegion *> Regions,
  236. const CallEvent *Call) {
  237. return getCheckerManager().runCheckersForRegionChanges(state, invalidated,
  238. Explicits, Regions, Call);
  239. }
  240. void ExprEngine::printState(raw_ostream &Out, ProgramStateRef State,
  241. const char *NL, const char *Sep) {
  242. getCheckerManager().runCheckersForPrintState(Out, State, NL, Sep);
  243. }
  244. void ExprEngine::processEndWorklist(bool hasWorkRemaining) {
  245. getCheckerManager().runCheckersForEndAnalysis(G, BR, *this);
  246. }
  247. void ExprEngine::processCFGElement(const CFGElement E, ExplodedNode *Pred,
  248. unsigned StmtIdx, NodeBuilderContext *Ctx) {
  249. PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext());
  250. currStmtIdx = StmtIdx;
  251. currBldrCtx = Ctx;
  252. switch (E.getKind()) {
  253. case CFGElement::Statement:
  254. ProcessStmt(const_cast<Stmt*>(E.castAs<CFGStmt>().getStmt()), Pred);
  255. return;
  256. case CFGElement::Initializer:
  257. ProcessInitializer(E.castAs<CFGInitializer>().getInitializer(), Pred);
  258. return;
  259. case CFGElement::NewAllocator:
  260. ProcessNewAllocator(E.castAs<CFGNewAllocator>().getAllocatorExpr(),
  261. Pred);
  262. return;
  263. case CFGElement::AutomaticObjectDtor:
  264. case CFGElement::DeleteDtor:
  265. case CFGElement::BaseDtor:
  266. case CFGElement::MemberDtor:
  267. case CFGElement::TemporaryDtor:
  268. ProcessImplicitDtor(E.castAs<CFGImplicitDtor>(), Pred);
  269. return;
  270. }
  271. }
  272. static bool shouldRemoveDeadBindings(AnalysisManager &AMgr,
  273. const CFGStmt S,
  274. const ExplodedNode *Pred,
  275. const LocationContext *LC) {
  276. // Are we never purging state values?
  277. if (AMgr.options.AnalysisPurgeOpt == PurgeNone)
  278. return false;
  279. // Is this the beginning of a basic block?
  280. if (Pred->getLocation().getAs<BlockEntrance>())
  281. return true;
  282. // Is this on a non-expression?
  283. if (!isa<Expr>(S.getStmt()))
  284. return true;
  285. // Run before processing a call.
  286. if (CallEvent::isCallStmt(S.getStmt()))
  287. return true;
  288. // Is this an expression that is consumed by another expression? If so,
  289. // postpone cleaning out the state.
  290. ParentMap &PM = LC->getAnalysisDeclContext()->getParentMap();
  291. return !PM.isConsumedExpr(cast<Expr>(S.getStmt()));
  292. }
  293. void ExprEngine::removeDead(ExplodedNode *Pred, ExplodedNodeSet &Out,
  294. const Stmt *ReferenceStmt,
  295. const LocationContext *LC,
  296. const Stmt *DiagnosticStmt,
  297. ProgramPoint::Kind K) {
  298. assert((K == ProgramPoint::PreStmtPurgeDeadSymbolsKind ||
  299. ReferenceStmt == nullptr || isa<ReturnStmt>(ReferenceStmt))
  300. && "PostStmt is not generally supported by the SymbolReaper yet");
  301. assert(LC && "Must pass the current (or expiring) LocationContext");
  302. if (!DiagnosticStmt) {
  303. DiagnosticStmt = ReferenceStmt;
  304. assert(DiagnosticStmt && "Required for clearing a LocationContext");
  305. }
  306. NumRemoveDeadBindings++;
  307. ProgramStateRef CleanedState = Pred->getState();
  308. // LC is the location context being destroyed, but SymbolReaper wants a
  309. // location context that is still live. (If this is the top-level stack
  310. // frame, this will be null.)
  311. if (!ReferenceStmt) {
  312. assert(K == ProgramPoint::PostStmtPurgeDeadSymbolsKind &&
  313. "Use PostStmtPurgeDeadSymbolsKind for clearing a LocationContext");
  314. LC = LC->getParent();
  315. }
  316. const StackFrameContext *SFC = LC ? LC->getCurrentStackFrame() : nullptr;
  317. SymbolReaper SymReaper(SFC, ReferenceStmt, SymMgr, getStoreManager());
  318. getCheckerManager().runCheckersForLiveSymbols(CleanedState, SymReaper);
  319. // Create a state in which dead bindings are removed from the environment
  320. // and the store. TODO: The function should just return new env and store,
  321. // not a new state.
  322. CleanedState = StateMgr.removeDeadBindings(CleanedState, SFC, SymReaper);
  323. // Process any special transfer function for dead symbols.
  324. // A tag to track convenience transitions, which can be removed at cleanup.
  325. static SimpleProgramPointTag cleanupTag(TagProviderName, "Clean Node");
  326. if (!SymReaper.hasDeadSymbols()) {
  327. // Generate a CleanedNode that has the environment and store cleaned
  328. // up. Since no symbols are dead, we can optimize and not clean out
  329. // the constraint manager.
  330. StmtNodeBuilder Bldr(Pred, Out, *currBldrCtx);
  331. Bldr.generateNode(DiagnosticStmt, Pred, CleanedState, &cleanupTag, K);
  332. } else {
  333. // Call checkers with the non-cleaned state so that they could query the
  334. // values of the soon to be dead symbols.
  335. ExplodedNodeSet CheckedSet;
  336. getCheckerManager().runCheckersForDeadSymbols(CheckedSet, Pred, SymReaper,
  337. DiagnosticStmt, *this, K);
  338. // For each node in CheckedSet, generate CleanedNodes that have the
  339. // environment, the store, and the constraints cleaned up but have the
  340. // user-supplied states as the predecessors.
  341. StmtNodeBuilder Bldr(CheckedSet, Out, *currBldrCtx);
  342. for (ExplodedNodeSet::const_iterator
  343. I = CheckedSet.begin(), E = CheckedSet.end(); I != E; ++I) {
  344. ProgramStateRef CheckerState = (*I)->getState();
  345. // The constraint manager has not been cleaned up yet, so clean up now.
  346. CheckerState = getConstraintManager().removeDeadBindings(CheckerState,
  347. SymReaper);
  348. assert(StateMgr.haveEqualEnvironments(CheckerState, Pred->getState()) &&
  349. "Checkers are not allowed to modify the Environment as a part of "
  350. "checkDeadSymbols processing.");
  351. assert(StateMgr.haveEqualStores(CheckerState, Pred->getState()) &&
  352. "Checkers are not allowed to modify the Store as a part of "
  353. "checkDeadSymbols processing.");
  354. // Create a state based on CleanedState with CheckerState GDM and
  355. // generate a transition to that state.
  356. ProgramStateRef CleanedCheckerSt =
  357. StateMgr.getPersistentStateWithGDM(CleanedState, CheckerState);
  358. Bldr.generateNode(DiagnosticStmt, *I, CleanedCheckerSt, &cleanupTag, K);
  359. }
  360. }
  361. }
  362. void ExprEngine::ProcessStmt(const CFGStmt S,
  363. ExplodedNode *Pred) {
  364. // Reclaim any unnecessary nodes in the ExplodedGraph.
  365. G.reclaimRecentlyAllocatedNodes();
  366. const Stmt *currStmt = S.getStmt();
  367. PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
  368. currStmt->getLocStart(),
  369. "Error evaluating statement");
  370. // Remove dead bindings and symbols.
  371. ExplodedNodeSet CleanedStates;
  372. if (shouldRemoveDeadBindings(AMgr, S, Pred, Pred->getLocationContext())){
  373. removeDead(Pred, CleanedStates, currStmt, Pred->getLocationContext());
  374. } else
  375. CleanedStates.Add(Pred);
  376. // Visit the statement.
  377. ExplodedNodeSet Dst;
  378. for (ExplodedNodeSet::iterator I = CleanedStates.begin(),
  379. E = CleanedStates.end(); I != E; ++I) {
  380. ExplodedNodeSet DstI;
  381. // Visit the statement.
  382. Visit(currStmt, *I, DstI);
  383. Dst.insert(DstI);
  384. }
  385. // Enqueue the new nodes onto the work list.
  386. Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx);
  387. }
  388. void ExprEngine::ProcessInitializer(const CFGInitializer Init,
  389. ExplodedNode *Pred) {
  390. const CXXCtorInitializer *BMI = Init.getInitializer();
  391. PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
  392. BMI->getSourceLocation(),
  393. "Error evaluating initializer");
  394. // We don't clean up dead bindings here.
  395. const StackFrameContext *stackFrame =
  396. cast<StackFrameContext>(Pred->getLocationContext());
  397. const CXXConstructorDecl *decl =
  398. cast<CXXConstructorDecl>(stackFrame->getDecl());
  399. ProgramStateRef State = Pred->getState();
  400. SVal thisVal = State->getSVal(svalBuilder.getCXXThis(decl, stackFrame));
  401. ExplodedNodeSet Tmp(Pred);
  402. SVal FieldLoc;
  403. // Evaluate the initializer, if necessary
  404. if (BMI->isAnyMemberInitializer()) {
  405. // Constructors build the object directly in the field,
  406. // but non-objects must be copied in from the initializer.
  407. if (auto *CtorExpr = findDirectConstructorForCurrentCFGElement()) {
  408. assert(BMI->getInit()->IgnoreImplicit() == CtorExpr);
  409. (void)CtorExpr;
  410. // The field was directly constructed, so there is no need to bind.
  411. } else {
  412. const Expr *Init = BMI->getInit()->IgnoreImplicit();
  413. const ValueDecl *Field;
  414. if (BMI->isIndirectMemberInitializer()) {
  415. Field = BMI->getIndirectMember();
  416. FieldLoc = State->getLValue(BMI->getIndirectMember(), thisVal);
  417. } else {
  418. Field = BMI->getMember();
  419. FieldLoc = State->getLValue(BMI->getMember(), thisVal);
  420. }
  421. SVal InitVal;
  422. if (BMI->getNumArrayIndices() > 0) {
  423. // Handle arrays of trivial type. We can represent this with a
  424. // primitive load/copy from the base array region.
  425. const ArraySubscriptExpr *ASE;
  426. while ((ASE = dyn_cast<ArraySubscriptExpr>(Init)))
  427. Init = ASE->getBase()->IgnoreImplicit();
  428. SVal LValue = State->getSVal(Init, stackFrame);
  429. if (Optional<Loc> LValueLoc = LValue.getAs<Loc>())
  430. InitVal = State->getSVal(*LValueLoc);
  431. // If we fail to get the value for some reason, use a symbolic value.
  432. if (InitVal.isUnknownOrUndef()) {
  433. SValBuilder &SVB = getSValBuilder();
  434. InitVal = SVB.conjureSymbolVal(BMI->getInit(), stackFrame,
  435. Field->getType(),
  436. currBldrCtx->blockCount());
  437. }
  438. } else {
  439. InitVal = State->getSVal(BMI->getInit(), stackFrame);
  440. }
  441. assert(Tmp.size() == 1 && "have not generated any new nodes yet");
  442. assert(*Tmp.begin() == Pred && "have not generated any new nodes yet");
  443. Tmp.clear();
  444. PostInitializer PP(BMI, FieldLoc.getAsRegion(), stackFrame);
  445. evalBind(Tmp, Init, Pred, FieldLoc, InitVal, /*isInit=*/true, &PP);
  446. }
  447. } else {
  448. assert(BMI->isBaseInitializer() || BMI->isDelegatingInitializer());
  449. // We already did all the work when visiting the CXXConstructExpr.
  450. }
  451. // Construct PostInitializer nodes whether the state changed or not,
  452. // so that the diagnostics don't get confused.
  453. PostInitializer PP(BMI, FieldLoc.getAsRegion(), stackFrame);
  454. ExplodedNodeSet Dst;
  455. NodeBuilder Bldr(Tmp, Dst, *currBldrCtx);
  456. for (ExplodedNodeSet::iterator I = Tmp.begin(), E = Tmp.end(); I != E; ++I) {
  457. ExplodedNode *N = *I;
  458. Bldr.generateNode(PP, N->getState(), N);
  459. }
  460. // Enqueue the new nodes onto the work list.
  461. Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx);
  462. }
  463. void ExprEngine::ProcessImplicitDtor(const CFGImplicitDtor D,
  464. ExplodedNode *Pred) {
  465. ExplodedNodeSet Dst;
  466. switch (D.getKind()) {
  467. case CFGElement::AutomaticObjectDtor:
  468. ProcessAutomaticObjDtor(D.castAs<CFGAutomaticObjDtor>(), Pred, Dst);
  469. break;
  470. case CFGElement::BaseDtor:
  471. ProcessBaseDtor(D.castAs<CFGBaseDtor>(), Pred, Dst);
  472. break;
  473. case CFGElement::MemberDtor:
  474. ProcessMemberDtor(D.castAs<CFGMemberDtor>(), Pred, Dst);
  475. break;
  476. case CFGElement::TemporaryDtor:
  477. ProcessTemporaryDtor(D.castAs<CFGTemporaryDtor>(), Pred, Dst);
  478. break;
  479. case CFGElement::DeleteDtor:
  480. ProcessDeleteDtor(D.castAs<CFGDeleteDtor>(), Pred, Dst);
  481. break;
  482. default:
  483. llvm_unreachable("Unexpected dtor kind.");
  484. }
  485. // Enqueue the new nodes onto the work list.
  486. Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx);
  487. }
  488. void ExprEngine::ProcessNewAllocator(const CXXNewExpr *NE,
  489. ExplodedNode *Pred) {
  490. ExplodedNodeSet Dst;
  491. AnalysisManager &AMgr = getAnalysisManager();
  492. AnalyzerOptions &Opts = AMgr.options;
  493. // TODO: We're not evaluating allocators for all cases just yet as
  494. // we're not handling the return value correctly, which causes false
  495. // positives when the alpha.cplusplus.NewDeleteLeaks check is on.
  496. if (Opts.mayInlineCXXAllocator())
  497. VisitCXXNewAllocatorCall(NE, Pred, Dst);
  498. else {
  499. NodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  500. const LocationContext *LCtx = Pred->getLocationContext();
  501. PostImplicitCall PP(NE->getOperatorNew(), NE->getLocStart(), LCtx);
  502. Bldr.generateNode(PP, Pred->getState(), Pred);
  503. }
  504. Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx);
  505. }
  506. void ExprEngine::ProcessAutomaticObjDtor(const CFGAutomaticObjDtor Dtor,
  507. ExplodedNode *Pred,
  508. ExplodedNodeSet &Dst) {
  509. const VarDecl *varDecl = Dtor.getVarDecl();
  510. QualType varType = varDecl->getType();
  511. ProgramStateRef state = Pred->getState();
  512. SVal dest = state->getLValue(varDecl, Pred->getLocationContext());
  513. const MemRegion *Region = dest.castAs<loc::MemRegionVal>().getRegion();
  514. if (const ReferenceType *refType = varType->getAs<ReferenceType>()) {
  515. varType = refType->getPointeeType();
  516. Region = state->getSVal(Region).getAsRegion();
  517. }
  518. VisitCXXDestructor(varType, Region, Dtor.getTriggerStmt(), /*IsBase=*/ false,
  519. Pred, Dst);
  520. }
  521. void ExprEngine::ProcessDeleteDtor(const CFGDeleteDtor Dtor,
  522. ExplodedNode *Pred,
  523. ExplodedNodeSet &Dst) {
  524. ProgramStateRef State = Pred->getState();
  525. const LocationContext *LCtx = Pred->getLocationContext();
  526. const CXXDeleteExpr *DE = Dtor.getDeleteExpr();
  527. const Stmt *Arg = DE->getArgument();
  528. SVal ArgVal = State->getSVal(Arg, LCtx);
  529. // If the argument to delete is known to be a null value,
  530. // don't run destructor.
  531. if (State->isNull(ArgVal).isConstrainedTrue()) {
  532. QualType DTy = DE->getDestroyedType();
  533. QualType BTy = getContext().getBaseElementType(DTy);
  534. const CXXRecordDecl *RD = BTy->getAsCXXRecordDecl();
  535. const CXXDestructorDecl *Dtor = RD->getDestructor();
  536. PostImplicitCall PP(Dtor, DE->getLocStart(), LCtx);
  537. NodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  538. Bldr.generateNode(PP, Pred->getState(), Pred);
  539. return;
  540. }
  541. VisitCXXDestructor(DE->getDestroyedType(),
  542. ArgVal.getAsRegion(),
  543. DE, /*IsBase=*/ false,
  544. Pred, Dst);
  545. }
  546. void ExprEngine::ProcessBaseDtor(const CFGBaseDtor D,
  547. ExplodedNode *Pred, ExplodedNodeSet &Dst) {
  548. const LocationContext *LCtx = Pred->getLocationContext();
  549. const CXXDestructorDecl *CurDtor = cast<CXXDestructorDecl>(LCtx->getDecl());
  550. Loc ThisPtr = getSValBuilder().getCXXThis(CurDtor,
  551. LCtx->getCurrentStackFrame());
  552. SVal ThisVal = Pred->getState()->getSVal(ThisPtr);
  553. // Create the base object region.
  554. const CXXBaseSpecifier *Base = D.getBaseSpecifier();
  555. QualType BaseTy = Base->getType();
  556. SVal BaseVal = getStoreManager().evalDerivedToBase(ThisVal, BaseTy,
  557. Base->isVirtual());
  558. VisitCXXDestructor(BaseTy, BaseVal.castAs<loc::MemRegionVal>().getRegion(),
  559. CurDtor->getBody(), /*IsBase=*/ true, Pred, Dst);
  560. }
  561. void ExprEngine::ProcessMemberDtor(const CFGMemberDtor D,
  562. ExplodedNode *Pred, ExplodedNodeSet &Dst) {
  563. const FieldDecl *Member = D.getFieldDecl();
  564. ProgramStateRef State = Pred->getState();
  565. const LocationContext *LCtx = Pred->getLocationContext();
  566. const CXXDestructorDecl *CurDtor = cast<CXXDestructorDecl>(LCtx->getDecl());
  567. Loc ThisVal = getSValBuilder().getCXXThis(CurDtor,
  568. LCtx->getCurrentStackFrame());
  569. SVal FieldVal =
  570. State->getLValue(Member, State->getSVal(ThisVal).castAs<Loc>());
  571. VisitCXXDestructor(Member->getType(),
  572. FieldVal.castAs<loc::MemRegionVal>().getRegion(),
  573. CurDtor->getBody(), /*IsBase=*/false, Pred, Dst);
  574. }
  575. void ExprEngine::ProcessTemporaryDtor(const CFGTemporaryDtor D,
  576. ExplodedNode *Pred,
  577. ExplodedNodeSet &Dst) {
  578. ExplodedNodeSet CleanDtorState;
  579. StmtNodeBuilder StmtBldr(Pred, CleanDtorState, *currBldrCtx);
  580. ProgramStateRef State = Pred->getState();
  581. if (State->contains<InitializedTemporariesSet>(
  582. std::make_pair(D.getBindTemporaryExpr(), Pred->getStackFrame()))) {
  583. // FIXME: Currently we insert temporary destructors for default parameters,
  584. // but we don't insert the constructors.
  585. State = State->remove<InitializedTemporariesSet>(
  586. std::make_pair(D.getBindTemporaryExpr(), Pred->getStackFrame()));
  587. }
  588. StmtBldr.generateNode(D.getBindTemporaryExpr(), Pred, State);
  589. QualType varType = D.getBindTemporaryExpr()->getSubExpr()->getType();
  590. // FIXME: Currently CleanDtorState can be empty here due to temporaries being
  591. // bound to default parameters.
  592. assert(CleanDtorState.size() <= 1);
  593. ExplodedNode *CleanPred =
  594. CleanDtorState.empty() ? Pred : *CleanDtorState.begin();
  595. // FIXME: Inlining of temporary destructors is not supported yet anyway, so
  596. // we just put a NULL region for now. This will need to be changed later.
  597. VisitCXXDestructor(varType, nullptr, D.getBindTemporaryExpr(),
  598. /*IsBase=*/false, CleanPred, Dst);
  599. }
  600. void ExprEngine::processCleanupTemporaryBranch(const CXXBindTemporaryExpr *BTE,
  601. NodeBuilderContext &BldCtx,
  602. ExplodedNode *Pred,
  603. ExplodedNodeSet &Dst,
  604. const CFGBlock *DstT,
  605. const CFGBlock *DstF) {
  606. BranchNodeBuilder TempDtorBuilder(Pred, Dst, BldCtx, DstT, DstF);
  607. if (Pred->getState()->contains<InitializedTemporariesSet>(
  608. std::make_pair(BTE, Pred->getStackFrame()))) {
  609. TempDtorBuilder.markInfeasible(false);
  610. TempDtorBuilder.generateNode(Pred->getState(), true, Pred);
  611. } else {
  612. TempDtorBuilder.markInfeasible(true);
  613. TempDtorBuilder.generateNode(Pred->getState(), false, Pred);
  614. }
  615. }
  616. void ExprEngine::VisitCXXBindTemporaryExpr(const CXXBindTemporaryExpr *BTE,
  617. ExplodedNodeSet &PreVisit,
  618. ExplodedNodeSet &Dst) {
  619. if (!getAnalysisManager().options.includeTemporaryDtorsInCFG()) {
  620. // In case we don't have temporary destructors in the CFG, do not mark
  621. // the initialization - we would otherwise never clean it up.
  622. Dst = PreVisit;
  623. return;
  624. }
  625. StmtNodeBuilder StmtBldr(PreVisit, Dst, *currBldrCtx);
  626. for (ExplodedNode *Node : PreVisit) {
  627. ProgramStateRef State = Node->getState();
  628. if (!State->contains<InitializedTemporariesSet>(
  629. std::make_pair(BTE, Node->getStackFrame()))) {
  630. // FIXME: Currently the state might already contain the marker due to
  631. // incorrect handling of temporaries bound to default parameters; for
  632. // those, we currently skip the CXXBindTemporaryExpr but rely on adding
  633. // temporary destructor nodes.
  634. State = State->add<InitializedTemporariesSet>(
  635. std::make_pair(BTE, Node->getStackFrame()));
  636. }
  637. StmtBldr.generateNode(BTE, Node, State);
  638. }
  639. }
  640. void ExprEngine::Visit(const Stmt *S, ExplodedNode *Pred,
  641. ExplodedNodeSet &DstTop) {
  642. PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
  643. S->getLocStart(),
  644. "Error evaluating statement");
  645. ExplodedNodeSet Dst;
  646. StmtNodeBuilder Bldr(Pred, DstTop, *currBldrCtx);
  647. assert(!isa<Expr>(S) || S == cast<Expr>(S)->IgnoreParens());
  648. switch (S->getStmtClass()) {
  649. // C++ and ARC stuff we don't support yet.
  650. case Expr::ObjCIndirectCopyRestoreExprClass:
  651. case Stmt::CXXDependentScopeMemberExprClass:
  652. case Stmt::CXXInheritedCtorInitExprClass:
  653. case Stmt::CXXTryStmtClass:
  654. case Stmt::CXXTypeidExprClass:
  655. case Stmt::CXXUuidofExprClass:
  656. case Stmt::CXXFoldExprClass:
  657. case Stmt::MSPropertyRefExprClass:
  658. case Stmt::MSPropertySubscriptExprClass:
  659. case Stmt::CXXUnresolvedConstructExprClass:
  660. case Stmt::DependentScopeDeclRefExprClass:
  661. case Stmt::ArrayTypeTraitExprClass:
  662. case Stmt::ExpressionTraitExprClass:
  663. case Stmt::UnresolvedLookupExprClass:
  664. case Stmt::UnresolvedMemberExprClass:
  665. case Stmt::TypoExprClass:
  666. case Stmt::CXXNoexceptExprClass:
  667. case Stmt::PackExpansionExprClass:
  668. case Stmt::SubstNonTypeTemplateParmPackExprClass:
  669. case Stmt::FunctionParmPackExprClass:
  670. case Stmt::CoroutineBodyStmtClass:
  671. case Stmt::CoawaitExprClass:
  672. case Stmt::CoreturnStmtClass:
  673. case Stmt::CoyieldExprClass:
  674. case Stmt::SEHTryStmtClass:
  675. case Stmt::SEHExceptStmtClass:
  676. case Stmt::SEHLeaveStmtClass:
  677. case Stmt::SEHFinallyStmtClass: {
  678. const ExplodedNode *node = Bldr.generateSink(S, Pred, Pred->getState());
  679. Engine.addAbortedBlock(node, currBldrCtx->getBlock());
  680. break;
  681. }
  682. case Stmt::ParenExprClass:
  683. llvm_unreachable("ParenExprs already handled.");
  684. case Stmt::GenericSelectionExprClass:
  685. llvm_unreachable("GenericSelectionExprs already handled.");
  686. // Cases that should never be evaluated simply because they shouldn't
  687. // appear in the CFG.
  688. case Stmt::BreakStmtClass:
  689. case Stmt::CaseStmtClass:
  690. case Stmt::CompoundStmtClass:
  691. case Stmt::ContinueStmtClass:
  692. case Stmt::CXXForRangeStmtClass:
  693. case Stmt::DefaultStmtClass:
  694. case Stmt::DoStmtClass:
  695. case Stmt::ForStmtClass:
  696. case Stmt::GotoStmtClass:
  697. case Stmt::IfStmtClass:
  698. case Stmt::IndirectGotoStmtClass:
  699. case Stmt::LabelStmtClass:
  700. case Stmt::NoStmtClass:
  701. case Stmt::NullStmtClass:
  702. case Stmt::SwitchStmtClass:
  703. case Stmt::WhileStmtClass:
  704. case Expr::MSDependentExistsStmtClass:
  705. case Stmt::CapturedStmtClass:
  706. case Stmt::OMPParallelDirectiveClass:
  707. case Stmt::OMPSimdDirectiveClass:
  708. case Stmt::OMPForDirectiveClass:
  709. case Stmt::OMPForSimdDirectiveClass:
  710. case Stmt::OMPSectionsDirectiveClass:
  711. case Stmt::OMPSectionDirectiveClass:
  712. case Stmt::OMPSingleDirectiveClass:
  713. case Stmt::OMPMasterDirectiveClass:
  714. case Stmt::OMPCriticalDirectiveClass:
  715. case Stmt::OMPParallelForDirectiveClass:
  716. case Stmt::OMPParallelForSimdDirectiveClass:
  717. case Stmt::OMPParallelSectionsDirectiveClass:
  718. case Stmt::OMPTaskDirectiveClass:
  719. case Stmt::OMPTaskyieldDirectiveClass:
  720. case Stmt::OMPBarrierDirectiveClass:
  721. case Stmt::OMPTaskwaitDirectiveClass:
  722. case Stmt::OMPTaskgroupDirectiveClass:
  723. case Stmt::OMPFlushDirectiveClass:
  724. case Stmt::OMPOrderedDirectiveClass:
  725. case Stmt::OMPAtomicDirectiveClass:
  726. case Stmt::OMPTargetDirectiveClass:
  727. case Stmt::OMPTargetDataDirectiveClass:
  728. case Stmt::OMPTargetEnterDataDirectiveClass:
  729. case Stmt::OMPTargetExitDataDirectiveClass:
  730. case Stmt::OMPTargetParallelDirectiveClass:
  731. case Stmt::OMPTargetParallelForDirectiveClass:
  732. case Stmt::OMPTargetUpdateDirectiveClass:
  733. case Stmt::OMPTeamsDirectiveClass:
  734. case Stmt::OMPCancellationPointDirectiveClass:
  735. case Stmt::OMPCancelDirectiveClass:
  736. case Stmt::OMPTaskLoopDirectiveClass:
  737. case Stmt::OMPTaskLoopSimdDirectiveClass:
  738. case Stmt::OMPDistributeDirectiveClass:
  739. case Stmt::OMPDistributeParallelForDirectiveClass:
  740. case Stmt::OMPDistributeParallelForSimdDirectiveClass:
  741. case Stmt::OMPDistributeSimdDirectiveClass:
  742. case Stmt::OMPTargetParallelForSimdDirectiveClass:
  743. case Stmt::OMPTargetSimdDirectiveClass:
  744. case Stmt::OMPTeamsDistributeDirectiveClass:
  745. llvm_unreachable("Stmt should not be in analyzer evaluation loop");
  746. case Stmt::ObjCSubscriptRefExprClass:
  747. case Stmt::ObjCPropertyRefExprClass:
  748. llvm_unreachable("These are handled by PseudoObjectExpr");
  749. case Stmt::GNUNullExprClass: {
  750. // GNU __null is a pointer-width integer, not an actual pointer.
  751. ProgramStateRef state = Pred->getState();
  752. state = state->BindExpr(S, Pred->getLocationContext(),
  753. svalBuilder.makeIntValWithPtrWidth(0, false));
  754. Bldr.generateNode(S, Pred, state);
  755. break;
  756. }
  757. case Stmt::ObjCAtSynchronizedStmtClass:
  758. Bldr.takeNodes(Pred);
  759. VisitObjCAtSynchronizedStmt(cast<ObjCAtSynchronizedStmt>(S), Pred, Dst);
  760. Bldr.addNodes(Dst);
  761. break;
  762. case Stmt::ExprWithCleanupsClass:
  763. // Handled due to fully linearised CFG.
  764. break;
  765. case Stmt::CXXBindTemporaryExprClass: {
  766. Bldr.takeNodes(Pred);
  767. ExplodedNodeSet PreVisit;
  768. getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this);
  769. ExplodedNodeSet Next;
  770. VisitCXXBindTemporaryExpr(cast<CXXBindTemporaryExpr>(S), PreVisit, Next);
  771. getCheckerManager().runCheckersForPostStmt(Dst, Next, S, *this);
  772. Bldr.addNodes(Dst);
  773. break;
  774. }
  775. // Cases not handled yet; but will handle some day.
  776. case Stmt::DesignatedInitExprClass:
  777. case Stmt::DesignatedInitUpdateExprClass:
  778. case Stmt::ExtVectorElementExprClass:
  779. case Stmt::ImaginaryLiteralClass:
  780. case Stmt::ObjCAtCatchStmtClass:
  781. case Stmt::ObjCAtFinallyStmtClass:
  782. case Stmt::ObjCAtTryStmtClass:
  783. case Stmt::ObjCAutoreleasePoolStmtClass:
  784. case Stmt::ObjCEncodeExprClass:
  785. case Stmt::ObjCIsaExprClass:
  786. case Stmt::ObjCProtocolExprClass:
  787. case Stmt::ObjCSelectorExprClass:
  788. case Stmt::ParenListExprClass:
  789. case Stmt::ShuffleVectorExprClass:
  790. case Stmt::ConvertVectorExprClass:
  791. case Stmt::VAArgExprClass:
  792. case Stmt::CUDAKernelCallExprClass:
  793. case Stmt::OpaqueValueExprClass:
  794. case Stmt::AsTypeExprClass:
  795. // Fall through.
  796. // Cases we intentionally don't evaluate, since they don't need
  797. // to be explicitly evaluated.
  798. case Stmt::PredefinedExprClass:
  799. case Stmt::AddrLabelExprClass:
  800. case Stmt::AttributedStmtClass:
  801. case Stmt::IntegerLiteralClass:
  802. case Stmt::CharacterLiteralClass:
  803. case Stmt::ImplicitValueInitExprClass:
  804. case Stmt::CXXScalarValueInitExprClass:
  805. case Stmt::CXXBoolLiteralExprClass:
  806. case Stmt::ObjCBoolLiteralExprClass:
  807. case Stmt::ObjCAvailabilityCheckExprClass:
  808. case Stmt::FloatingLiteralClass:
  809. case Stmt::NoInitExprClass:
  810. case Stmt::SizeOfPackExprClass:
  811. case Stmt::StringLiteralClass:
  812. case Stmt::ObjCStringLiteralClass:
  813. case Stmt::CXXPseudoDestructorExprClass:
  814. case Stmt::SubstNonTypeTemplateParmExprClass:
  815. case Stmt::CXXNullPtrLiteralExprClass:
  816. case Stmt::OMPArraySectionExprClass:
  817. case Stmt::TypeTraitExprClass: {
  818. Bldr.takeNodes(Pred);
  819. ExplodedNodeSet preVisit;
  820. getCheckerManager().runCheckersForPreStmt(preVisit, Pred, S, *this);
  821. getCheckerManager().runCheckersForPostStmt(Dst, preVisit, S, *this);
  822. Bldr.addNodes(Dst);
  823. break;
  824. }
  825. case Stmt::CXXDefaultArgExprClass:
  826. case Stmt::CXXDefaultInitExprClass: {
  827. Bldr.takeNodes(Pred);
  828. ExplodedNodeSet PreVisit;
  829. getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this);
  830. ExplodedNodeSet Tmp;
  831. StmtNodeBuilder Bldr2(PreVisit, Tmp, *currBldrCtx);
  832. const Expr *ArgE;
  833. if (const CXXDefaultArgExpr *DefE = dyn_cast<CXXDefaultArgExpr>(S))
  834. ArgE = DefE->getExpr();
  835. else if (const CXXDefaultInitExpr *DefE = dyn_cast<CXXDefaultInitExpr>(S))
  836. ArgE = DefE->getExpr();
  837. else
  838. llvm_unreachable("unknown constant wrapper kind");
  839. bool IsTemporary = false;
  840. if (const MaterializeTemporaryExpr *MTE =
  841. dyn_cast<MaterializeTemporaryExpr>(ArgE)) {
  842. ArgE = MTE->GetTemporaryExpr();
  843. IsTemporary = true;
  844. }
  845. Optional<SVal> ConstantVal = svalBuilder.getConstantVal(ArgE);
  846. if (!ConstantVal)
  847. ConstantVal = UnknownVal();
  848. const LocationContext *LCtx = Pred->getLocationContext();
  849. for (ExplodedNodeSet::iterator I = PreVisit.begin(), E = PreVisit.end();
  850. I != E; ++I) {
  851. ProgramStateRef State = (*I)->getState();
  852. State = State->BindExpr(S, LCtx, *ConstantVal);
  853. if (IsTemporary)
  854. State = createTemporaryRegionIfNeeded(State, LCtx,
  855. cast<Expr>(S),
  856. cast<Expr>(S));
  857. Bldr2.generateNode(S, *I, State);
  858. }
  859. getCheckerManager().runCheckersForPostStmt(Dst, Tmp, S, *this);
  860. Bldr.addNodes(Dst);
  861. break;
  862. }
  863. // Cases we evaluate as opaque expressions, conjuring a symbol.
  864. case Stmt::CXXStdInitializerListExprClass:
  865. case Expr::ObjCArrayLiteralClass:
  866. case Expr::ObjCDictionaryLiteralClass:
  867. case Expr::ObjCBoxedExprClass: {
  868. Bldr.takeNodes(Pred);
  869. ExplodedNodeSet preVisit;
  870. getCheckerManager().runCheckersForPreStmt(preVisit, Pred, S, *this);
  871. ExplodedNodeSet Tmp;
  872. StmtNodeBuilder Bldr2(preVisit, Tmp, *currBldrCtx);
  873. const Expr *Ex = cast<Expr>(S);
  874. QualType resultType = Ex->getType();
  875. for (ExplodedNodeSet::iterator it = preVisit.begin(), et = preVisit.end();
  876. it != et; ++it) {
  877. ExplodedNode *N = *it;
  878. const LocationContext *LCtx = N->getLocationContext();
  879. SVal result = svalBuilder.conjureSymbolVal(nullptr, Ex, LCtx,
  880. resultType,
  881. currBldrCtx->blockCount());
  882. ProgramStateRef state = N->getState()->BindExpr(Ex, LCtx, result);
  883. Bldr2.generateNode(S, N, state);
  884. }
  885. getCheckerManager().runCheckersForPostStmt(Dst, Tmp, S, *this);
  886. Bldr.addNodes(Dst);
  887. break;
  888. }
  889. case Stmt::ArraySubscriptExprClass:
  890. Bldr.takeNodes(Pred);
  891. VisitLvalArraySubscriptExpr(cast<ArraySubscriptExpr>(S), Pred, Dst);
  892. Bldr.addNodes(Dst);
  893. break;
  894. case Stmt::GCCAsmStmtClass:
  895. Bldr.takeNodes(Pred);
  896. VisitGCCAsmStmt(cast<GCCAsmStmt>(S), Pred, Dst);
  897. Bldr.addNodes(Dst);
  898. break;
  899. case Stmt::MSAsmStmtClass:
  900. Bldr.takeNodes(Pred);
  901. VisitMSAsmStmt(cast<MSAsmStmt>(S), Pred, Dst);
  902. Bldr.addNodes(Dst);
  903. break;
  904. case Stmt::BlockExprClass:
  905. Bldr.takeNodes(Pred);
  906. VisitBlockExpr(cast<BlockExpr>(S), Pred, Dst);
  907. Bldr.addNodes(Dst);
  908. break;
  909. case Stmt::LambdaExprClass:
  910. if (AMgr.options.shouldInlineLambdas()) {
  911. Bldr.takeNodes(Pred);
  912. VisitLambdaExpr(cast<LambdaExpr>(S), Pred, Dst);
  913. Bldr.addNodes(Dst);
  914. } else {
  915. const ExplodedNode *node = Bldr.generateSink(S, Pred, Pred->getState());
  916. Engine.addAbortedBlock(node, currBldrCtx->getBlock());
  917. }
  918. break;
  919. case Stmt::BinaryOperatorClass: {
  920. const BinaryOperator* B = cast<BinaryOperator>(S);
  921. if (B->isLogicalOp()) {
  922. Bldr.takeNodes(Pred);
  923. VisitLogicalExpr(B, Pred, Dst);
  924. Bldr.addNodes(Dst);
  925. break;
  926. }
  927. else if (B->getOpcode() == BO_Comma) {
  928. ProgramStateRef state = Pred->getState();
  929. Bldr.generateNode(B, Pred,
  930. state->BindExpr(B, Pred->getLocationContext(),
  931. state->getSVal(B->getRHS(),
  932. Pred->getLocationContext())));
  933. break;
  934. }
  935. Bldr.takeNodes(Pred);
  936. if (AMgr.options.eagerlyAssumeBinOpBifurcation &&
  937. (B->isRelationalOp() || B->isEqualityOp())) {
  938. ExplodedNodeSet Tmp;
  939. VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Tmp);
  940. evalEagerlyAssumeBinOpBifurcation(Dst, Tmp, cast<Expr>(S));
  941. }
  942. else
  943. VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst);
  944. Bldr.addNodes(Dst);
  945. break;
  946. }
  947. case Stmt::CXXOperatorCallExprClass: {
  948. const CXXOperatorCallExpr *OCE = cast<CXXOperatorCallExpr>(S);
  949. // For instance method operators, make sure the 'this' argument has a
  950. // valid region.
  951. const Decl *Callee = OCE->getCalleeDecl();
  952. if (const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(Callee)) {
  953. if (MD->isInstance()) {
  954. ProgramStateRef State = Pred->getState();
  955. const LocationContext *LCtx = Pred->getLocationContext();
  956. ProgramStateRef NewState =
  957. createTemporaryRegionIfNeeded(State, LCtx, OCE->getArg(0));
  958. if (NewState != State) {
  959. Pred = Bldr.generateNode(OCE, Pred, NewState, /*Tag=*/nullptr,
  960. ProgramPoint::PreStmtKind);
  961. // Did we cache out?
  962. if (!Pred)
  963. break;
  964. }
  965. }
  966. }
  967. // FALLTHROUGH
  968. }
  969. case Stmt::CallExprClass:
  970. case Stmt::CXXMemberCallExprClass:
  971. case Stmt::UserDefinedLiteralClass: {
  972. Bldr.takeNodes(Pred);
  973. VisitCallExpr(cast<CallExpr>(S), Pred, Dst);
  974. Bldr.addNodes(Dst);
  975. break;
  976. }
  977. case Stmt::CXXCatchStmtClass: {
  978. Bldr.takeNodes(Pred);
  979. VisitCXXCatchStmt(cast<CXXCatchStmt>(S), Pred, Dst);
  980. Bldr.addNodes(Dst);
  981. break;
  982. }
  983. case Stmt::CXXTemporaryObjectExprClass:
  984. case Stmt::CXXConstructExprClass: {
  985. Bldr.takeNodes(Pred);
  986. VisitCXXConstructExpr(cast<CXXConstructExpr>(S), Pred, Dst);
  987. Bldr.addNodes(Dst);
  988. break;
  989. }
  990. case Stmt::CXXNewExprClass: {
  991. Bldr.takeNodes(Pred);
  992. ExplodedNodeSet PostVisit;
  993. VisitCXXNewExpr(cast<CXXNewExpr>(S), Pred, PostVisit);
  994. getCheckerManager().runCheckersForPostStmt(Dst, PostVisit, S, *this);
  995. Bldr.addNodes(Dst);
  996. break;
  997. }
  998. case Stmt::CXXDeleteExprClass: {
  999. Bldr.takeNodes(Pred);
  1000. ExplodedNodeSet PreVisit;
  1001. const CXXDeleteExpr *CDE = cast<CXXDeleteExpr>(S);
  1002. getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this);
  1003. for (ExplodedNodeSet::iterator i = PreVisit.begin(),
  1004. e = PreVisit.end(); i != e ; ++i)
  1005. VisitCXXDeleteExpr(CDE, *i, Dst);
  1006. Bldr.addNodes(Dst);
  1007. break;
  1008. }
  1009. // FIXME: ChooseExpr is really a constant. We need to fix
  1010. // the CFG do not model them as explicit control-flow.
  1011. case Stmt::ChooseExprClass: { // __builtin_choose_expr
  1012. Bldr.takeNodes(Pred);
  1013. const ChooseExpr *C = cast<ChooseExpr>(S);
  1014. VisitGuardedExpr(C, C->getLHS(), C->getRHS(), Pred, Dst);
  1015. Bldr.addNodes(Dst);
  1016. break;
  1017. }
  1018. case Stmt::CompoundAssignOperatorClass:
  1019. Bldr.takeNodes(Pred);
  1020. VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst);
  1021. Bldr.addNodes(Dst);
  1022. break;
  1023. case Stmt::CompoundLiteralExprClass:
  1024. Bldr.takeNodes(Pred);
  1025. VisitCompoundLiteralExpr(cast<CompoundLiteralExpr>(S), Pred, Dst);
  1026. Bldr.addNodes(Dst);
  1027. break;
  1028. case Stmt::BinaryConditionalOperatorClass:
  1029. case Stmt::ConditionalOperatorClass: { // '?' operator
  1030. Bldr.takeNodes(Pred);
  1031. const AbstractConditionalOperator *C
  1032. = cast<AbstractConditionalOperator>(S);
  1033. VisitGuardedExpr(C, C->getTrueExpr(), C->getFalseExpr(), Pred, Dst);
  1034. Bldr.addNodes(Dst);
  1035. break;
  1036. }
  1037. case Stmt::CXXThisExprClass:
  1038. Bldr.takeNodes(Pred);
  1039. VisitCXXThisExpr(cast<CXXThisExpr>(S), Pred, Dst);
  1040. Bldr.addNodes(Dst);
  1041. break;
  1042. case Stmt::DeclRefExprClass: {
  1043. Bldr.takeNodes(Pred);
  1044. const DeclRefExpr *DE = cast<DeclRefExpr>(S);
  1045. VisitCommonDeclRefExpr(DE, DE->getDecl(), Pred, Dst);
  1046. Bldr.addNodes(Dst);
  1047. break;
  1048. }
  1049. case Stmt::DeclStmtClass:
  1050. Bldr.takeNodes(Pred);
  1051. VisitDeclStmt(cast<DeclStmt>(S), Pred, Dst);
  1052. Bldr.addNodes(Dst);
  1053. break;
  1054. case Stmt::ImplicitCastExprClass:
  1055. case Stmt::CStyleCastExprClass:
  1056. case Stmt::CXXStaticCastExprClass:
  1057. case Stmt::CXXDynamicCastExprClass:
  1058. case Stmt::CXXReinterpretCastExprClass:
  1059. case Stmt::CXXConstCastExprClass:
  1060. case Stmt::CXXFunctionalCastExprClass:
  1061. case Stmt::ObjCBridgedCastExprClass: {
  1062. Bldr.takeNodes(Pred);
  1063. const CastExpr *C = cast<CastExpr>(S);
  1064. // Handle the previsit checks.
  1065. ExplodedNodeSet dstPrevisit;
  1066. getCheckerManager().runCheckersForPreStmt(dstPrevisit, Pred, C, *this);
  1067. // Handle the expression itself.
  1068. ExplodedNodeSet dstExpr;
  1069. for (ExplodedNodeSet::iterator i = dstPrevisit.begin(),
  1070. e = dstPrevisit.end(); i != e ; ++i) {
  1071. VisitCast(C, C->getSubExpr(), *i, dstExpr);
  1072. }
  1073. // Handle the postvisit checks.
  1074. getCheckerManager().runCheckersForPostStmt(Dst, dstExpr, C, *this);
  1075. Bldr.addNodes(Dst);
  1076. break;
  1077. }
  1078. case Expr::MaterializeTemporaryExprClass: {
  1079. Bldr.takeNodes(Pred);
  1080. const MaterializeTemporaryExpr *MTE = cast<MaterializeTemporaryExpr>(S);
  1081. CreateCXXTemporaryObject(MTE, Pred, Dst);
  1082. Bldr.addNodes(Dst);
  1083. break;
  1084. }
  1085. case Stmt::InitListExprClass:
  1086. Bldr.takeNodes(Pred);
  1087. VisitInitListExpr(cast<InitListExpr>(S), Pred, Dst);
  1088. Bldr.addNodes(Dst);
  1089. break;
  1090. case Stmt::MemberExprClass:
  1091. Bldr.takeNodes(Pred);
  1092. VisitMemberExpr(cast<MemberExpr>(S), Pred, Dst);
  1093. Bldr.addNodes(Dst);
  1094. break;
  1095. case Stmt::AtomicExprClass:
  1096. Bldr.takeNodes(Pred);
  1097. VisitAtomicExpr(cast<AtomicExpr>(S), Pred, Dst);
  1098. Bldr.addNodes(Dst);
  1099. break;
  1100. case Stmt::ObjCIvarRefExprClass:
  1101. Bldr.takeNodes(Pred);
  1102. VisitLvalObjCIvarRefExpr(cast<ObjCIvarRefExpr>(S), Pred, Dst);
  1103. Bldr.addNodes(Dst);
  1104. break;
  1105. case Stmt::ObjCForCollectionStmtClass:
  1106. Bldr.takeNodes(Pred);
  1107. VisitObjCForCollectionStmt(cast<ObjCForCollectionStmt>(S), Pred, Dst);
  1108. Bldr.addNodes(Dst);
  1109. break;
  1110. case Stmt::ObjCMessageExprClass:
  1111. Bldr.takeNodes(Pred);
  1112. VisitObjCMessage(cast<ObjCMessageExpr>(S), Pred, Dst);
  1113. Bldr.addNodes(Dst);
  1114. break;
  1115. case Stmt::ObjCAtThrowStmtClass:
  1116. case Stmt::CXXThrowExprClass:
  1117. // FIXME: This is not complete. We basically treat @throw as
  1118. // an abort.
  1119. Bldr.generateSink(S, Pred, Pred->getState());
  1120. break;
  1121. case Stmt::ReturnStmtClass:
  1122. Bldr.takeNodes(Pred);
  1123. VisitReturnStmt(cast<ReturnStmt>(S), Pred, Dst);
  1124. Bldr.addNodes(Dst);
  1125. break;
  1126. case Stmt::OffsetOfExprClass:
  1127. Bldr.takeNodes(Pred);
  1128. VisitOffsetOfExpr(cast<OffsetOfExpr>(S), Pred, Dst);
  1129. Bldr.addNodes(Dst);
  1130. break;
  1131. case Stmt::UnaryExprOrTypeTraitExprClass:
  1132. Bldr.takeNodes(Pred);
  1133. VisitUnaryExprOrTypeTraitExpr(cast<UnaryExprOrTypeTraitExpr>(S),
  1134. Pred, Dst);
  1135. Bldr.addNodes(Dst);
  1136. break;
  1137. case Stmt::StmtExprClass: {
  1138. const StmtExpr *SE = cast<StmtExpr>(S);
  1139. if (SE->getSubStmt()->body_empty()) {
  1140. // Empty statement expression.
  1141. assert(SE->getType() == getContext().VoidTy
  1142. && "Empty statement expression must have void type.");
  1143. break;
  1144. }
  1145. if (Expr *LastExpr = dyn_cast<Expr>(*SE->getSubStmt()->body_rbegin())) {
  1146. ProgramStateRef state = Pred->getState();
  1147. Bldr.generateNode(SE, Pred,
  1148. state->BindExpr(SE, Pred->getLocationContext(),
  1149. state->getSVal(LastExpr,
  1150. Pred->getLocationContext())));
  1151. }
  1152. break;
  1153. }
  1154. case Stmt::UnaryOperatorClass: {
  1155. Bldr.takeNodes(Pred);
  1156. const UnaryOperator *U = cast<UnaryOperator>(S);
  1157. if (AMgr.options.eagerlyAssumeBinOpBifurcation && (U->getOpcode() == UO_LNot)) {
  1158. ExplodedNodeSet Tmp;
  1159. VisitUnaryOperator(U, Pred, Tmp);
  1160. evalEagerlyAssumeBinOpBifurcation(Dst, Tmp, U);
  1161. }
  1162. else
  1163. VisitUnaryOperator(U, Pred, Dst);
  1164. Bldr.addNodes(Dst);
  1165. break;
  1166. }
  1167. case Stmt::PseudoObjectExprClass: {
  1168. Bldr.takeNodes(Pred);
  1169. ProgramStateRef state = Pred->getState();
  1170. const PseudoObjectExpr *PE = cast<PseudoObjectExpr>(S);
  1171. if (const Expr *Result = PE->getResultExpr()) {
  1172. SVal V = state->getSVal(Result, Pred->getLocationContext());
  1173. Bldr.generateNode(S, Pred,
  1174. state->BindExpr(S, Pred->getLocationContext(), V));
  1175. }
  1176. else
  1177. Bldr.generateNode(S, Pred,
  1178. state->BindExpr(S, Pred->getLocationContext(),
  1179. UnknownVal()));
  1180. Bldr.addNodes(Dst);
  1181. break;
  1182. }
  1183. }
  1184. }
  1185. bool ExprEngine::replayWithoutInlining(ExplodedNode *N,
  1186. const LocationContext *CalleeLC) {
  1187. const StackFrameContext *CalleeSF = CalleeLC->getCurrentStackFrame();
  1188. const StackFrameContext *CallerSF = CalleeSF->getParent()->getCurrentStackFrame();
  1189. assert(CalleeSF && CallerSF);
  1190. ExplodedNode *BeforeProcessingCall = nullptr;
  1191. const Stmt *CE = CalleeSF->getCallSite();
  1192. // Find the first node before we started processing the call expression.
  1193. while (N) {
  1194. ProgramPoint L = N->getLocation();
  1195. BeforeProcessingCall = N;
  1196. N = N->pred_empty() ? nullptr : *(N->pred_begin());
  1197. // Skip the nodes corresponding to the inlined code.
  1198. if (L.getLocationContext()->getCurrentStackFrame() != CallerSF)
  1199. continue;
  1200. // We reached the caller. Find the node right before we started
  1201. // processing the call.
  1202. if (L.isPurgeKind())
  1203. continue;
  1204. if (L.getAs<PreImplicitCall>())
  1205. continue;
  1206. if (L.getAs<CallEnter>())
  1207. continue;
  1208. if (Optional<StmtPoint> SP = L.getAs<StmtPoint>())
  1209. if (SP->getStmt() == CE)
  1210. continue;
  1211. break;
  1212. }
  1213. if (!BeforeProcessingCall)
  1214. return false;
  1215. // TODO: Clean up the unneeded nodes.
  1216. // Build an Epsilon node from which we will restart the analyzes.
  1217. // Note that CE is permitted to be NULL!
  1218. ProgramPoint NewNodeLoc =
  1219. EpsilonPoint(BeforeProcessingCall->getLocationContext(), CE);
  1220. // Add the special flag to GDM to signal retrying with no inlining.
  1221. // Note, changing the state ensures that we are not going to cache out.
  1222. ProgramStateRef NewNodeState = BeforeProcessingCall->getState();
  1223. NewNodeState =
  1224. NewNodeState->set<ReplayWithoutInlining>(const_cast<Stmt *>(CE));
  1225. // Make the new node a successor of BeforeProcessingCall.
  1226. bool IsNew = false;
  1227. ExplodedNode *NewNode = G.getNode(NewNodeLoc, NewNodeState, false, &IsNew);
  1228. // We cached out at this point. Caching out is common due to us backtracking
  1229. // from the inlined function, which might spawn several paths.
  1230. if (!IsNew)
  1231. return true;
  1232. NewNode->addPredecessor(BeforeProcessingCall, G);
  1233. // Add the new node to the work list.
  1234. Engine.enqueueStmtNode(NewNode, CalleeSF->getCallSiteBlock(),
  1235. CalleeSF->getIndex());
  1236. NumTimesRetriedWithoutInlining++;
  1237. return true;
  1238. }
  1239. /// Block entrance. (Update counters).
  1240. void ExprEngine::processCFGBlockEntrance(const BlockEdge &L,
  1241. NodeBuilderWithSinks &nodeBuilder,
  1242. ExplodedNode *Pred) {
  1243. PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext());
  1244. // If this block is terminated by a loop and it has already been visited the
  1245. // maximum number of times, widen the loop.
  1246. unsigned int BlockCount = nodeBuilder.getContext().blockCount();
  1247. if (BlockCount == AMgr.options.maxBlockVisitOnPath - 1 &&
  1248. AMgr.options.shouldWidenLoops()) {
  1249. const Stmt *Term = nodeBuilder.getContext().getBlock()->getTerminator();
  1250. if (!(Term &&
  1251. (isa<ForStmt>(Term) || isa<WhileStmt>(Term) || isa<DoStmt>(Term))))
  1252. return;
  1253. // Widen.
  1254. const LocationContext *LCtx = Pred->getLocationContext();
  1255. ProgramStateRef WidenedState =
  1256. getWidenedLoopState(Pred->getState(), LCtx, BlockCount, Term);
  1257. nodeBuilder.generateNode(WidenedState, Pred);
  1258. return;
  1259. }
  1260. // FIXME: Refactor this into a checker.
  1261. if (BlockCount >= AMgr.options.maxBlockVisitOnPath) {
  1262. static SimpleProgramPointTag tag(TagProviderName, "Block count exceeded");
  1263. const ExplodedNode *Sink =
  1264. nodeBuilder.generateSink(Pred->getState(), Pred, &tag);
  1265. // Check if we stopped at the top level function or not.
  1266. // Root node should have the location context of the top most function.
  1267. const LocationContext *CalleeLC = Pred->getLocation().getLocationContext();
  1268. const LocationContext *CalleeSF = CalleeLC->getCurrentStackFrame();
  1269. const LocationContext *RootLC =
  1270. (*G.roots_begin())->getLocation().getLocationContext();
  1271. if (RootLC->getCurrentStackFrame() != CalleeSF) {
  1272. Engine.FunctionSummaries->markReachedMaxBlockCount(CalleeSF->getDecl());
  1273. // Re-run the call evaluation without inlining it, by storing the
  1274. // no-inlining policy in the state and enqueuing the new work item on
  1275. // the list. Replay should almost never fail. Use the stats to catch it
  1276. // if it does.
  1277. if ((!AMgr.options.NoRetryExhausted &&
  1278. replayWithoutInlining(Pred, CalleeLC)))
  1279. return;
  1280. NumMaxBlockCountReachedInInlined++;
  1281. } else
  1282. NumMaxBlockCountReached++;
  1283. // Make sink nodes as exhausted(for stats) only if retry failed.
  1284. Engine.blocksExhausted.push_back(std::make_pair(L, Sink));
  1285. }
  1286. }
  1287. //===----------------------------------------------------------------------===//
  1288. // Branch processing.
  1289. //===----------------------------------------------------------------------===//
  1290. /// RecoverCastedSymbol - A helper function for ProcessBranch that is used
  1291. /// to try to recover some path-sensitivity for casts of symbolic
  1292. /// integers that promote their values (which are currently not tracked well).
  1293. /// This function returns the SVal bound to Condition->IgnoreCasts if all the
  1294. // cast(s) did was sign-extend the original value.
  1295. static SVal RecoverCastedSymbol(ProgramStateManager& StateMgr,
  1296. ProgramStateRef state,
  1297. const Stmt *Condition,
  1298. const LocationContext *LCtx,
  1299. ASTContext &Ctx) {
  1300. const Expr *Ex = dyn_cast<Expr>(Condition);
  1301. if (!Ex)
  1302. return UnknownVal();
  1303. uint64_t bits = 0;
  1304. bool bitsInit = false;
  1305. while (const CastExpr *CE = dyn_cast<CastExpr>(Ex)) {
  1306. QualType T = CE->getType();
  1307. if (!T->isIntegralOrEnumerationType())
  1308. return UnknownVal();
  1309. uint64_t newBits = Ctx.getTypeSize(T);
  1310. if (!bitsInit || newBits < bits) {
  1311. bitsInit = true;
  1312. bits = newBits;
  1313. }
  1314. Ex = CE->getSubExpr();
  1315. }
  1316. // We reached a non-cast. Is it a symbolic value?
  1317. QualType T = Ex->getType();
  1318. if (!bitsInit || !T->isIntegralOrEnumerationType() ||
  1319. Ctx.getTypeSize(T) > bits)
  1320. return UnknownVal();
  1321. return state->getSVal(Ex, LCtx);
  1322. }
  1323. #ifndef NDEBUG
  1324. static const Stmt *getRightmostLeaf(const Stmt *Condition) {
  1325. while (Condition) {
  1326. const BinaryOperator *BO = dyn_cast<BinaryOperator>(Condition);
  1327. if (!BO || !BO->isLogicalOp()) {
  1328. return Condition;
  1329. }
  1330. Condition = BO->getRHS()->IgnoreParens();
  1331. }
  1332. return nullptr;
  1333. }
  1334. #endif
  1335. // Returns the condition the branch at the end of 'B' depends on and whose value
  1336. // has been evaluated within 'B'.
  1337. // In most cases, the terminator condition of 'B' will be evaluated fully in
  1338. // the last statement of 'B'; in those cases, the resolved condition is the
  1339. // given 'Condition'.
  1340. // If the condition of the branch is a logical binary operator tree, the CFG is
  1341. // optimized: in that case, we know that the expression formed by all but the
  1342. // rightmost leaf of the logical binary operator tree must be true, and thus
  1343. // the branch condition is at this point equivalent to the truth value of that
  1344. // rightmost leaf; the CFG block thus only evaluates this rightmost leaf
  1345. // expression in its final statement. As the full condition in that case was
  1346. // not evaluated, and is thus not in the SVal cache, we need to use that leaf
  1347. // expression to evaluate the truth value of the condition in the current state
  1348. // space.
  1349. static const Stmt *ResolveCondition(const Stmt *Condition,
  1350. const CFGBlock *B) {
  1351. if (const Expr *Ex = dyn_cast<Expr>(Condition))
  1352. Condition = Ex->IgnoreParens();
  1353. const BinaryOperator *BO = dyn_cast<BinaryOperator>(Condition);
  1354. if (!BO || !BO->isLogicalOp())
  1355. return Condition;
  1356. assert(!B->getTerminator().isTemporaryDtorsBranch() &&
  1357. "Temporary destructor branches handled by processBindTemporary.");
  1358. // For logical operations, we still have the case where some branches
  1359. // use the traditional "merge" approach and others sink the branch
  1360. // directly into the basic blocks representing the logical operation.
  1361. // We need to distinguish between those two cases here.
  1362. // The invariants are still shifting, but it is possible that the
  1363. // last element in a CFGBlock is not a CFGStmt. Look for the last
  1364. // CFGStmt as the value of the condition.
  1365. CFGBlock::const_reverse_iterator I = B->rbegin(), E = B->rend();
  1366. for (; I != E; ++I) {
  1367. CFGElement Elem = *I;
  1368. Optional<CFGStmt> CS = Elem.getAs<CFGStmt>();
  1369. if (!CS)
  1370. continue;
  1371. const Stmt *LastStmt = CS->getStmt();
  1372. assert(LastStmt == Condition || LastStmt == getRightmostLeaf(Condition));
  1373. return LastStmt;
  1374. }
  1375. llvm_unreachable("could not resolve condition");
  1376. }
  1377. void ExprEngine::processBranch(const Stmt *Condition, const Stmt *Term,
  1378. NodeBuilderContext& BldCtx,
  1379. ExplodedNode *Pred,
  1380. ExplodedNodeSet &Dst,
  1381. const CFGBlock *DstT,
  1382. const CFGBlock *DstF) {
  1383. assert((!Condition || !isa<CXXBindTemporaryExpr>(Condition)) &&
  1384. "CXXBindTemporaryExprs are handled by processBindTemporary.");
  1385. const LocationContext *LCtx = Pred->getLocationContext();
  1386. PrettyStackTraceLocationContext StackCrashInfo(LCtx);
  1387. currBldrCtx = &BldCtx;
  1388. // Check for NULL conditions; e.g. "for(;;)"
  1389. if (!Condition) {
  1390. BranchNodeBuilder NullCondBldr(Pred, Dst, BldCtx, DstT, DstF);
  1391. NullCondBldr.markInfeasible(false);
  1392. NullCondBldr.generateNode(Pred->getState(), true, Pred);
  1393. return;
  1394. }
  1395. if (const Expr *Ex = dyn_cast<Expr>(Condition))
  1396. Condition = Ex->IgnoreParens();
  1397. Condition = ResolveCondition(Condition, BldCtx.getBlock());
  1398. PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
  1399. Condition->getLocStart(),
  1400. "Error evaluating branch");
  1401. ExplodedNodeSet CheckersOutSet;
  1402. getCheckerManager().runCheckersForBranchCondition(Condition, CheckersOutSet,
  1403. Pred, *this);
  1404. // We generated only sinks.
  1405. if (CheckersOutSet.empty())
  1406. return;
  1407. BranchNodeBuilder builder(CheckersOutSet, Dst, BldCtx, DstT, DstF);
  1408. for (NodeBuilder::iterator I = CheckersOutSet.begin(),
  1409. E = CheckersOutSet.end(); E != I; ++I) {
  1410. ExplodedNode *PredI = *I;
  1411. if (PredI->isSink())
  1412. continue;
  1413. ProgramStateRef PrevState = PredI->getState();
  1414. SVal X = PrevState->getSVal(Condition, PredI->getLocationContext());
  1415. if (X.isUnknownOrUndef()) {
  1416. // Give it a chance to recover from unknown.
  1417. if (const Expr *Ex = dyn_cast<Expr>(Condition)) {
  1418. if (Ex->getType()->isIntegralOrEnumerationType()) {
  1419. // Try to recover some path-sensitivity. Right now casts of symbolic
  1420. // integers that promote their values are currently not tracked well.
  1421. // If 'Condition' is such an expression, try and recover the
  1422. // underlying value and use that instead.
  1423. SVal recovered = RecoverCastedSymbol(getStateManager(),
  1424. PrevState, Condition,
  1425. PredI->getLocationContext(),
  1426. getContext());
  1427. if (!recovered.isUnknown()) {
  1428. X = recovered;
  1429. }
  1430. }
  1431. }
  1432. }
  1433. // If the condition is still unknown, give up.
  1434. if (X.isUnknownOrUndef()) {
  1435. builder.generateNode(PrevState, true, PredI);
  1436. builder.generateNode(PrevState, false, PredI);
  1437. continue;
  1438. }
  1439. DefinedSVal V = X.castAs<DefinedSVal>();
  1440. ProgramStateRef StTrue, StFalse;
  1441. std::tie(StTrue, StFalse) = PrevState->assume(V);
  1442. // Process the true branch.
  1443. if (builder.isFeasible(true)) {
  1444. if (StTrue)
  1445. builder.generateNode(StTrue, true, PredI);
  1446. else
  1447. builder.markInfeasible(true);
  1448. }
  1449. // Process the false branch.
  1450. if (builder.isFeasible(false)) {
  1451. if (StFalse)
  1452. builder.generateNode(StFalse, false, PredI);
  1453. else
  1454. builder.markInfeasible(false);
  1455. }
  1456. }
  1457. currBldrCtx = nullptr;
  1458. }
  1459. /// The GDM component containing the set of global variables which have been
  1460. /// previously initialized with explicit initializers.
  1461. REGISTER_TRAIT_WITH_PROGRAMSTATE(InitializedGlobalsSet,
  1462. llvm::ImmutableSet<const VarDecl *>)
  1463. void ExprEngine::processStaticInitializer(const DeclStmt *DS,
  1464. NodeBuilderContext &BuilderCtx,
  1465. ExplodedNode *Pred,
  1466. clang::ento::ExplodedNodeSet &Dst,
  1467. const CFGBlock *DstT,
  1468. const CFGBlock *DstF) {
  1469. PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext());
  1470. currBldrCtx = &BuilderCtx;
  1471. const VarDecl *VD = cast<VarDecl>(DS->getSingleDecl());
  1472. ProgramStateRef state = Pred->getState();
  1473. bool initHasRun = state->contains<InitializedGlobalsSet>(VD);
  1474. BranchNodeBuilder builder(Pred, Dst, BuilderCtx, DstT, DstF);
  1475. if (!initHasRun) {
  1476. state = state->add<InitializedGlobalsSet>(VD);
  1477. }
  1478. builder.generateNode(state, initHasRun, Pred);
  1479. builder.markInfeasible(!initHasRun);
  1480. currBldrCtx = nullptr;
  1481. }
  1482. /// processIndirectGoto - Called by CoreEngine. Used to generate successor
  1483. /// nodes by processing the 'effects' of a computed goto jump.
  1484. void ExprEngine::processIndirectGoto(IndirectGotoNodeBuilder &builder) {
  1485. ProgramStateRef state = builder.getState();
  1486. SVal V = state->getSVal(builder.getTarget(), builder.getLocationContext());
  1487. // Three possibilities:
  1488. //
  1489. // (1) We know the computed label.
  1490. // (2) The label is NULL (or some other constant), or Undefined.
  1491. // (3) We have no clue about the label. Dispatch to all targets.
  1492. //
  1493. typedef IndirectGotoNodeBuilder::iterator iterator;
  1494. if (Optional<loc::GotoLabel> LV = V.getAs<loc::GotoLabel>()) {
  1495. const LabelDecl *L = LV->getLabel();
  1496. for (iterator I = builder.begin(), E = builder.end(); I != E; ++I) {
  1497. if (I.getLabel() == L) {
  1498. builder.generateNode(I, state);
  1499. return;
  1500. }
  1501. }
  1502. llvm_unreachable("No block with label.");
  1503. }
  1504. if (V.getAs<loc::ConcreteInt>() || V.getAs<UndefinedVal>()) {
  1505. // Dispatch to the first target and mark it as a sink.
  1506. //ExplodedNode* N = builder.generateNode(builder.begin(), state, true);
  1507. // FIXME: add checker visit.
  1508. // UndefBranches.insert(N);
  1509. return;
  1510. }
  1511. // This is really a catch-all. We don't support symbolics yet.
  1512. // FIXME: Implement dispatch for symbolic pointers.
  1513. for (iterator I=builder.begin(), E=builder.end(); I != E; ++I)
  1514. builder.generateNode(I, state);
  1515. }
  1516. #if 0
  1517. static bool stackFrameDoesNotContainInitializedTemporaries(ExplodedNode &Pred) {
  1518. const StackFrameContext* Frame = Pred.getStackFrame();
  1519. const llvm::ImmutableSet<CXXBindTemporaryContext> &Set =
  1520. Pred.getState()->get<InitializedTemporariesSet>();
  1521. return std::find_if(Set.begin(), Set.end(),
  1522. [&](const CXXBindTemporaryContext &Ctx) {
  1523. if (Ctx.second == Frame) {
  1524. Ctx.first->dump();
  1525. llvm::errs() << "\n";
  1526. }
  1527. return Ctx.second == Frame;
  1528. }) == Set.end();
  1529. }
  1530. #endif
  1531. void ExprEngine::processBeginOfFunction(NodeBuilderContext &BC,
  1532. ExplodedNode *Pred,
  1533. ExplodedNodeSet &Dst,
  1534. const BlockEdge &L) {
  1535. SaveAndRestore<const NodeBuilderContext *> NodeContextRAII(currBldrCtx, &BC);
  1536. getCheckerManager().runCheckersForBeginFunction(Dst, L, Pred, *this);
  1537. }
  1538. /// ProcessEndPath - Called by CoreEngine. Used to generate end-of-path
  1539. /// nodes when the control reaches the end of a function.
  1540. void ExprEngine::processEndOfFunction(NodeBuilderContext& BC,
  1541. ExplodedNode *Pred) {
  1542. // FIXME: Assert that stackFrameDoesNotContainInitializedTemporaries(*Pred)).
  1543. // We currently cannot enable this assert, as lifetime extended temporaries
  1544. // are not modelled correctly.
  1545. PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext());
  1546. StateMgr.EndPath(Pred->getState());
  1547. ExplodedNodeSet Dst;
  1548. if (Pred->getLocationContext()->inTopFrame()) {
  1549. // Remove dead symbols.
  1550. ExplodedNodeSet AfterRemovedDead;
  1551. removeDeadOnEndOfFunction(BC, Pred, AfterRemovedDead);
  1552. // Notify checkers.
  1553. for (ExplodedNodeSet::iterator I = AfterRemovedDead.begin(),
  1554. E = AfterRemovedDead.end(); I != E; ++I) {
  1555. getCheckerManager().runCheckersForEndFunction(BC, Dst, *I, *this);
  1556. }
  1557. } else {
  1558. getCheckerManager().runCheckersForEndFunction(BC, Dst, Pred, *this);
  1559. }
  1560. Engine.enqueueEndOfFunction(Dst);
  1561. }
  1562. /// ProcessSwitch - Called by CoreEngine. Used to generate successor
  1563. /// nodes by processing the 'effects' of a switch statement.
  1564. void ExprEngine::processSwitch(SwitchNodeBuilder& builder) {
  1565. typedef SwitchNodeBuilder::iterator iterator;
  1566. ProgramStateRef state = builder.getState();
  1567. const Expr *CondE = builder.getCondition();
  1568. SVal CondV_untested = state->getSVal(CondE, builder.getLocationContext());
  1569. if (CondV_untested.isUndef()) {
  1570. //ExplodedNode* N = builder.generateDefaultCaseNode(state, true);
  1571. // FIXME: add checker
  1572. //UndefBranches.insert(N);
  1573. return;
  1574. }
  1575. DefinedOrUnknownSVal CondV = CondV_untested.castAs<DefinedOrUnknownSVal>();
  1576. ProgramStateRef DefaultSt = state;
  1577. iterator I = builder.begin(), EI = builder.end();
  1578. bool defaultIsFeasible = I == EI;
  1579. for ( ; I != EI; ++I) {
  1580. // Successor may be pruned out during CFG construction.
  1581. if (!I.getBlock())
  1582. continue;
  1583. const CaseStmt *Case = I.getCase();
  1584. // Evaluate the LHS of the case value.
  1585. llvm::APSInt V1 = Case->getLHS()->EvaluateKnownConstInt(getContext());
  1586. assert(V1.getBitWidth() == getContext().getTypeSize(CondE->getType()));
  1587. // Get the RHS of the case, if it exists.
  1588. llvm::APSInt V2;
  1589. if (const Expr *E = Case->getRHS())
  1590. V2 = E->EvaluateKnownConstInt(getContext());
  1591. else
  1592. V2 = V1;
  1593. ProgramStateRef StateCase;
  1594. if (Optional<NonLoc> NL = CondV.getAs<NonLoc>())
  1595. std::tie(StateCase, DefaultSt) =
  1596. DefaultSt->assumeWithinInclusiveRange(*NL, V1, V2);
  1597. else // UnknownVal
  1598. StateCase = DefaultSt;
  1599. if (StateCase)
  1600. builder.generateCaseStmtNode(I, StateCase);
  1601. // Now "assume" that the case doesn't match. Add this state
  1602. // to the default state (if it is feasible).
  1603. if (DefaultSt)
  1604. defaultIsFeasible = true;
  1605. else {
  1606. defaultIsFeasible = false;
  1607. break;
  1608. }
  1609. }
  1610. if (!defaultIsFeasible)
  1611. return;
  1612. // If we have switch(enum value), the default branch is not
  1613. // feasible if all of the enum constants not covered by 'case:' statements
  1614. // are not feasible values for the switch condition.
  1615. //
  1616. // Note that this isn't as accurate as it could be. Even if there isn't
  1617. // a case for a particular enum value as long as that enum value isn't
  1618. // feasible then it shouldn't be considered for making 'default:' reachable.
  1619. const SwitchStmt *SS = builder.getSwitch();
  1620. const Expr *CondExpr = SS->getCond()->IgnoreParenImpCasts();
  1621. if (CondExpr->getType()->getAs<EnumType>()) {
  1622. if (SS->isAllEnumCasesCovered())
  1623. return;
  1624. }
  1625. builder.generateDefaultCaseNode(DefaultSt);
  1626. }
  1627. //===----------------------------------------------------------------------===//
  1628. // Transfer functions: Loads and stores.
  1629. //===----------------------------------------------------------------------===//
  1630. void ExprEngine::VisitCommonDeclRefExpr(const Expr *Ex, const NamedDecl *D,
  1631. ExplodedNode *Pred,
  1632. ExplodedNodeSet &Dst) {
  1633. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  1634. ProgramStateRef state = Pred->getState();
  1635. const LocationContext *LCtx = Pred->getLocationContext();
  1636. if (const VarDecl *VD = dyn_cast<VarDecl>(D)) {
  1637. // C permits "extern void v", and if you cast the address to a valid type,
  1638. // you can even do things with it. We simply pretend
  1639. assert(Ex->isGLValue() || VD->getType()->isVoidType());
  1640. const LocationContext *LocCtxt = Pred->getLocationContext();
  1641. const Decl *D = LocCtxt->getDecl();
  1642. const auto *MD = D ? dyn_cast<CXXMethodDecl>(D) : nullptr;
  1643. const auto *DeclRefEx = dyn_cast<DeclRefExpr>(Ex);
  1644. SVal V;
  1645. bool IsReference;
  1646. if (AMgr.options.shouldInlineLambdas() && DeclRefEx &&
  1647. DeclRefEx->refersToEnclosingVariableOrCapture() && MD &&
  1648. MD->getParent()->isLambda()) {
  1649. // Lookup the field of the lambda.
  1650. const CXXRecordDecl *CXXRec = MD->getParent();
  1651. llvm::DenseMap<const VarDecl *, FieldDecl *> LambdaCaptureFields;
  1652. FieldDecl *LambdaThisCaptureField;
  1653. CXXRec->getCaptureFields(LambdaCaptureFields, LambdaThisCaptureField);
  1654. const FieldDecl *FD = LambdaCaptureFields[VD];
  1655. if (!FD) {
  1656. // When a constant is captured, sometimes no corresponding field is
  1657. // created in the lambda object.
  1658. assert(VD->getType().isConstQualified());
  1659. V = state->getLValue(VD, LocCtxt);
  1660. IsReference = false;
  1661. } else {
  1662. Loc CXXThis =
  1663. svalBuilder.getCXXThis(MD, LocCtxt->getCurrentStackFrame());
  1664. SVal CXXThisVal = state->getSVal(CXXThis);
  1665. V = state->getLValue(FD, CXXThisVal);
  1666. IsReference = FD->getType()->isReferenceType();
  1667. }
  1668. } else {
  1669. V = state->getLValue(VD, LocCtxt);
  1670. IsReference = VD->getType()->isReferenceType();
  1671. }
  1672. // For references, the 'lvalue' is the pointer address stored in the
  1673. // reference region.
  1674. if (IsReference) {
  1675. if (const MemRegion *R = V.getAsRegion())
  1676. V = state->getSVal(R);
  1677. else
  1678. V = UnknownVal();
  1679. }
  1680. Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr,
  1681. ProgramPoint::PostLValueKind);
  1682. return;
  1683. }
  1684. if (const EnumConstantDecl *ED = dyn_cast<EnumConstantDecl>(D)) {
  1685. assert(!Ex->isGLValue());
  1686. SVal V = svalBuilder.makeIntVal(ED->getInitVal());
  1687. Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V));
  1688. return;
  1689. }
  1690. if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
  1691. SVal V = svalBuilder.getFunctionPointer(FD);
  1692. Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr,
  1693. ProgramPoint::PostLValueKind);
  1694. return;
  1695. }
  1696. if (isa<FieldDecl>(D)) {
  1697. // FIXME: Compute lvalue of field pointers-to-member.
  1698. // Right now we just use a non-null void pointer, so that it gives proper
  1699. // results in boolean contexts.
  1700. SVal V = svalBuilder.conjureSymbolVal(Ex, LCtx, getContext().VoidPtrTy,
  1701. currBldrCtx->blockCount());
  1702. state = state->assume(V.castAs<DefinedOrUnknownSVal>(), true);
  1703. Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr,
  1704. ProgramPoint::PostLValueKind);
  1705. return;
  1706. }
  1707. llvm_unreachable("Support for this Decl not implemented.");
  1708. }
  1709. /// VisitArraySubscriptExpr - Transfer function for array accesses
  1710. void ExprEngine::VisitLvalArraySubscriptExpr(const ArraySubscriptExpr *A,
  1711. ExplodedNode *Pred,
  1712. ExplodedNodeSet &Dst){
  1713. const Expr *Base = A->getBase()->IgnoreParens();
  1714. const Expr *Idx = A->getIdx()->IgnoreParens();
  1715. ExplodedNodeSet checkerPreStmt;
  1716. getCheckerManager().runCheckersForPreStmt(checkerPreStmt, Pred, A, *this);
  1717. StmtNodeBuilder Bldr(checkerPreStmt, Dst, *currBldrCtx);
  1718. assert(A->isGLValue() ||
  1719. (!AMgr.getLangOpts().CPlusPlus &&
  1720. A->getType().isCForbiddenLValueType()));
  1721. for (ExplodedNodeSet::iterator it = checkerPreStmt.begin(),
  1722. ei = checkerPreStmt.end(); it != ei; ++it) {
  1723. const LocationContext *LCtx = (*it)->getLocationContext();
  1724. ProgramStateRef state = (*it)->getState();
  1725. SVal V = state->getLValue(A->getType(),
  1726. state->getSVal(Idx, LCtx),
  1727. state->getSVal(Base, LCtx));
  1728. Bldr.generateNode(A, *it, state->BindExpr(A, LCtx, V), nullptr,
  1729. ProgramPoint::PostLValueKind);
  1730. }
  1731. }
  1732. /// VisitMemberExpr - Transfer function for member expressions.
  1733. void ExprEngine::VisitMemberExpr(const MemberExpr *M, ExplodedNode *Pred,
  1734. ExplodedNodeSet &Dst) {
  1735. // FIXME: Prechecks eventually go in ::Visit().
  1736. ExplodedNodeSet CheckedSet;
  1737. getCheckerManager().runCheckersForPreStmt(CheckedSet, Pred, M, *this);
  1738. ExplodedNodeSet EvalSet;
  1739. ValueDecl *Member = M->getMemberDecl();
  1740. // Handle static member variables and enum constants accessed via
  1741. // member syntax.
  1742. if (isa<VarDecl>(Member) || isa<EnumConstantDecl>(Member)) {
  1743. ExplodedNodeSet Dst;
  1744. for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end();
  1745. I != E; ++I) {
  1746. VisitCommonDeclRefExpr(M, Member, Pred, EvalSet);
  1747. }
  1748. } else {
  1749. StmtNodeBuilder Bldr(CheckedSet, EvalSet, *currBldrCtx);
  1750. ExplodedNodeSet Tmp;
  1751. for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end();
  1752. I != E; ++I) {
  1753. ProgramStateRef state = (*I)->getState();
  1754. const LocationContext *LCtx = (*I)->getLocationContext();
  1755. Expr *BaseExpr = M->getBase();
  1756. // Handle C++ method calls.
  1757. if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(Member)) {
  1758. if (MD->isInstance())
  1759. state = createTemporaryRegionIfNeeded(state, LCtx, BaseExpr);
  1760. SVal MDVal = svalBuilder.getFunctionPointer(MD);
  1761. state = state->BindExpr(M, LCtx, MDVal);
  1762. Bldr.generateNode(M, *I, state);
  1763. continue;
  1764. }
  1765. // Handle regular struct fields / member variables.
  1766. state = createTemporaryRegionIfNeeded(state, LCtx, BaseExpr);
  1767. SVal baseExprVal = state->getSVal(BaseExpr, LCtx);
  1768. FieldDecl *field = cast<FieldDecl>(Member);
  1769. SVal L = state->getLValue(field, baseExprVal);
  1770. if (M->isGLValue() || M->getType()->isArrayType()) {
  1771. // We special-case rvalues of array type because the analyzer cannot
  1772. // reason about them, since we expect all regions to be wrapped in Locs.
  1773. // We instead treat these as lvalues and assume that they will decay to
  1774. // pointers as soon as they are used.
  1775. if (!M->isGLValue()) {
  1776. assert(M->getType()->isArrayType());
  1777. const ImplicitCastExpr *PE =
  1778. dyn_cast<ImplicitCastExpr>((*I)->getParentMap().getParent(M));
  1779. if (!PE || PE->getCastKind() != CK_ArrayToPointerDecay) {
  1780. llvm_unreachable("should always be wrapped in ArrayToPointerDecay");
  1781. }
  1782. }
  1783. if (field->getType()->isReferenceType()) {
  1784. if (const MemRegion *R = L.getAsRegion())
  1785. L = state->getSVal(R);
  1786. else
  1787. L = UnknownVal();
  1788. }
  1789. Bldr.generateNode(M, *I, state->BindExpr(M, LCtx, L), nullptr,
  1790. ProgramPoint::PostLValueKind);
  1791. } else {
  1792. Bldr.takeNodes(*I);
  1793. evalLoad(Tmp, M, M, *I, state, L);
  1794. Bldr.addNodes(Tmp);
  1795. }
  1796. }
  1797. }
  1798. getCheckerManager().runCheckersForPostStmt(Dst, EvalSet, M, *this);
  1799. }
  1800. void ExprEngine::VisitAtomicExpr(const AtomicExpr *AE, ExplodedNode *Pred,
  1801. ExplodedNodeSet &Dst) {
  1802. ExplodedNodeSet AfterPreSet;
  1803. getCheckerManager().runCheckersForPreStmt(AfterPreSet, Pred, AE, *this);
  1804. // For now, treat all the arguments to C11 atomics as escaping.
  1805. // FIXME: Ideally we should model the behavior of the atomics precisely here.
  1806. ExplodedNodeSet AfterInvalidateSet;
  1807. StmtNodeBuilder Bldr(AfterPreSet, AfterInvalidateSet, *currBldrCtx);
  1808. for (ExplodedNodeSet::iterator I = AfterPreSet.begin(), E = AfterPreSet.end();
  1809. I != E; ++I) {
  1810. ProgramStateRef State = (*I)->getState();
  1811. const LocationContext *LCtx = (*I)->getLocationContext();
  1812. SmallVector<SVal, 8> ValuesToInvalidate;
  1813. for (unsigned SI = 0, Count = AE->getNumSubExprs(); SI != Count; SI++) {
  1814. const Expr *SubExpr = AE->getSubExprs()[SI];
  1815. SVal SubExprVal = State->getSVal(SubExpr, LCtx);
  1816. ValuesToInvalidate.push_back(SubExprVal);
  1817. }
  1818. State = State->invalidateRegions(ValuesToInvalidate, AE,
  1819. currBldrCtx->blockCount(),
  1820. LCtx,
  1821. /*CausedByPointerEscape*/true,
  1822. /*Symbols=*/nullptr);
  1823. SVal ResultVal = UnknownVal();
  1824. State = State->BindExpr(AE, LCtx, ResultVal);
  1825. Bldr.generateNode(AE, *I, State, nullptr,
  1826. ProgramPoint::PostStmtKind);
  1827. }
  1828. getCheckerManager().runCheckersForPostStmt(Dst, AfterInvalidateSet, AE, *this);
  1829. }
  1830. namespace {
  1831. class CollectReachableSymbolsCallback final : public SymbolVisitor {
  1832. InvalidatedSymbols Symbols;
  1833. public:
  1834. CollectReachableSymbolsCallback(ProgramStateRef State) {}
  1835. const InvalidatedSymbols &getSymbols() const { return Symbols; }
  1836. bool VisitSymbol(SymbolRef Sym) override {
  1837. Symbols.insert(Sym);
  1838. return true;
  1839. }
  1840. };
  1841. } // end anonymous namespace
  1842. // A value escapes in three possible cases:
  1843. // (1) We are binding to something that is not a memory region.
  1844. // (2) We are binding to a MemrRegion that does not have stack storage.
  1845. // (3) We are binding to a MemRegion with stack storage that the store
  1846. // does not understand.
  1847. ProgramStateRef ExprEngine::processPointerEscapedOnBind(ProgramStateRef State,
  1848. SVal Loc, SVal Val) {
  1849. // Are we storing to something that causes the value to "escape"?
  1850. bool escapes = true;
  1851. // TODO: Move to StoreManager.
  1852. if (Optional<loc::MemRegionVal> regionLoc = Loc.getAs<loc::MemRegionVal>()) {
  1853. escapes = !regionLoc->getRegion()->hasStackStorage();
  1854. if (!escapes) {
  1855. // To test (3), generate a new state with the binding added. If it is
  1856. // the same state, then it escapes (since the store cannot represent
  1857. // the binding).
  1858. // Do this only if we know that the store is not supposed to generate the
  1859. // same state.
  1860. SVal StoredVal = State->getSVal(regionLoc->getRegion());
  1861. if (StoredVal != Val)
  1862. escapes = (State == (State->bindLoc(*regionLoc, Val)));
  1863. }
  1864. }
  1865. // If our store can represent the binding and we aren't storing to something
  1866. // that doesn't have local storage then just return and have the simulation
  1867. // state continue as is.
  1868. if (!escapes)
  1869. return State;
  1870. // Otherwise, find all symbols referenced by 'val' that we are tracking
  1871. // and stop tracking them.
  1872. CollectReachableSymbolsCallback Scanner =
  1873. State->scanReachableSymbols<CollectReachableSymbolsCallback>(Val);
  1874. const InvalidatedSymbols &EscapedSymbols = Scanner.getSymbols();
  1875. State = getCheckerManager().runCheckersForPointerEscape(State,
  1876. EscapedSymbols,
  1877. /*CallEvent*/ nullptr,
  1878. PSK_EscapeOnBind,
  1879. nullptr);
  1880. return State;
  1881. }
  1882. ProgramStateRef
  1883. ExprEngine::notifyCheckersOfPointerEscape(ProgramStateRef State,
  1884. const InvalidatedSymbols *Invalidated,
  1885. ArrayRef<const MemRegion *> ExplicitRegions,
  1886. ArrayRef<const MemRegion *> Regions,
  1887. const CallEvent *Call,
  1888. RegionAndSymbolInvalidationTraits &ITraits) {
  1889. if (!Invalidated || Invalidated->empty())
  1890. return State;
  1891. if (!Call)
  1892. return getCheckerManager().runCheckersForPointerEscape(State,
  1893. *Invalidated,
  1894. nullptr,
  1895. PSK_EscapeOther,
  1896. &ITraits);
  1897. // If the symbols were invalidated by a call, we want to find out which ones
  1898. // were invalidated directly due to being arguments to the call.
  1899. InvalidatedSymbols SymbolsDirectlyInvalidated;
  1900. for (ArrayRef<const MemRegion *>::iterator I = ExplicitRegions.begin(),
  1901. E = ExplicitRegions.end(); I != E; ++I) {
  1902. if (const SymbolicRegion *R = (*I)->StripCasts()->getAs<SymbolicRegion>())
  1903. SymbolsDirectlyInvalidated.insert(R->getSymbol());
  1904. }
  1905. InvalidatedSymbols SymbolsIndirectlyInvalidated;
  1906. for (InvalidatedSymbols::const_iterator I=Invalidated->begin(),
  1907. E = Invalidated->end(); I!=E; ++I) {
  1908. SymbolRef sym = *I;
  1909. if (SymbolsDirectlyInvalidated.count(sym))
  1910. continue;
  1911. SymbolsIndirectlyInvalidated.insert(sym);
  1912. }
  1913. if (!SymbolsDirectlyInvalidated.empty())
  1914. State = getCheckerManager().runCheckersForPointerEscape(State,
  1915. SymbolsDirectlyInvalidated, Call, PSK_DirectEscapeOnCall, &ITraits);
  1916. // Notify about the symbols that get indirectly invalidated by the call.
  1917. if (!SymbolsIndirectlyInvalidated.empty())
  1918. State = getCheckerManager().runCheckersForPointerEscape(State,
  1919. SymbolsIndirectlyInvalidated, Call, PSK_IndirectEscapeOnCall, &ITraits);
  1920. return State;
  1921. }
  1922. /// evalBind - Handle the semantics of binding a value to a specific location.
  1923. /// This method is used by evalStore and (soon) VisitDeclStmt, and others.
  1924. void ExprEngine::evalBind(ExplodedNodeSet &Dst, const Stmt *StoreE,
  1925. ExplodedNode *Pred,
  1926. SVal location, SVal Val,
  1927. bool atDeclInit, const ProgramPoint *PP) {
  1928. const LocationContext *LC = Pred->getLocationContext();
  1929. PostStmt PS(StoreE, LC);
  1930. if (!PP)
  1931. PP = &PS;
  1932. // Do a previsit of the bind.
  1933. ExplodedNodeSet CheckedSet;
  1934. getCheckerManager().runCheckersForBind(CheckedSet, Pred, location, Val,
  1935. StoreE, *this, *PP);
  1936. StmtNodeBuilder Bldr(CheckedSet, Dst, *currBldrCtx);
  1937. // If the location is not a 'Loc', it will already be handled by
  1938. // the checkers. There is nothing left to do.
  1939. if (!location.getAs<Loc>()) {
  1940. const ProgramPoint L = PostStore(StoreE, LC, /*Loc*/nullptr,
  1941. /*tag*/nullptr);
  1942. ProgramStateRef state = Pred->getState();
  1943. state = processPointerEscapedOnBind(state, location, Val);
  1944. Bldr.generateNode(L, state, Pred);
  1945. return;
  1946. }
  1947. for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end();
  1948. I!=E; ++I) {
  1949. ExplodedNode *PredI = *I;
  1950. ProgramStateRef state = PredI->getState();
  1951. state = processPointerEscapedOnBind(state, location, Val);
  1952. // When binding the value, pass on the hint that this is a initialization.
  1953. // For initializations, we do not need to inform clients of region
  1954. // changes.
  1955. state = state->bindLoc(location.castAs<Loc>(),
  1956. Val, /* notifyChanges = */ !atDeclInit);
  1957. const MemRegion *LocReg = nullptr;
  1958. if (Optional<loc::MemRegionVal> LocRegVal =
  1959. location.getAs<loc::MemRegionVal>()) {
  1960. LocReg = LocRegVal->getRegion();
  1961. }
  1962. const ProgramPoint L = PostStore(StoreE, LC, LocReg, nullptr);
  1963. Bldr.generateNode(L, state, PredI);
  1964. }
  1965. }
  1966. /// evalStore - Handle the semantics of a store via an assignment.
  1967. /// @param Dst The node set to store generated state nodes
  1968. /// @param AssignE The assignment expression if the store happens in an
  1969. /// assignment.
  1970. /// @param LocationE The location expression that is stored to.
  1971. /// @param state The current simulation state
  1972. /// @param location The location to store the value
  1973. /// @param Val The value to be stored
  1974. void ExprEngine::evalStore(ExplodedNodeSet &Dst, const Expr *AssignE,
  1975. const Expr *LocationE,
  1976. ExplodedNode *Pred,
  1977. ProgramStateRef state, SVal location, SVal Val,
  1978. const ProgramPointTag *tag) {
  1979. // Proceed with the store. We use AssignE as the anchor for the PostStore
  1980. // ProgramPoint if it is non-NULL, and LocationE otherwise.
  1981. const Expr *StoreE = AssignE ? AssignE : LocationE;
  1982. // Evaluate the location (checks for bad dereferences).
  1983. ExplodedNodeSet Tmp;
  1984. evalLocation(Tmp, AssignE, LocationE, Pred, state, location, tag, false);
  1985. if (Tmp.empty())
  1986. return;
  1987. if (location.isUndef())
  1988. return;
  1989. for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI)
  1990. evalBind(Dst, StoreE, *NI, location, Val, false);
  1991. }
  1992. void ExprEngine::evalLoad(ExplodedNodeSet &Dst,
  1993. const Expr *NodeEx,
  1994. const Expr *BoundEx,
  1995. ExplodedNode *Pred,
  1996. ProgramStateRef state,
  1997. SVal location,
  1998. const ProgramPointTag *tag,
  1999. QualType LoadTy)
  2000. {
  2001. assert(!location.getAs<NonLoc>() && "location cannot be a NonLoc.");
  2002. // Are we loading from a region? This actually results in two loads; one
  2003. // to fetch the address of the referenced value and one to fetch the
  2004. // referenced value.
  2005. if (const TypedValueRegion *TR =
  2006. dyn_cast_or_null<TypedValueRegion>(location.getAsRegion())) {
  2007. QualType ValTy = TR->getValueType();
  2008. if (const ReferenceType *RT = ValTy->getAs<ReferenceType>()) {
  2009. static SimpleProgramPointTag
  2010. loadReferenceTag(TagProviderName, "Load Reference");
  2011. ExplodedNodeSet Tmp;
  2012. evalLoadCommon(Tmp, NodeEx, BoundEx, Pred, state,
  2013. location, &loadReferenceTag,
  2014. getContext().getPointerType(RT->getPointeeType()));
  2015. // Perform the load from the referenced value.
  2016. for (ExplodedNodeSet::iterator I=Tmp.begin(), E=Tmp.end() ; I!=E; ++I) {
  2017. state = (*I)->getState();
  2018. location = state->getSVal(BoundEx, (*I)->getLocationContext());
  2019. evalLoadCommon(Dst, NodeEx, BoundEx, *I, state, location, tag, LoadTy);
  2020. }
  2021. return;
  2022. }
  2023. }
  2024. evalLoadCommon(Dst, NodeEx, BoundEx, Pred, state, location, tag, LoadTy);
  2025. }
  2026. void ExprEngine::evalLoadCommon(ExplodedNodeSet &Dst,
  2027. const Expr *NodeEx,
  2028. const Expr *BoundEx,
  2029. ExplodedNode *Pred,
  2030. ProgramStateRef state,
  2031. SVal location,
  2032. const ProgramPointTag *tag,
  2033. QualType LoadTy) {
  2034. assert(NodeEx);
  2035. assert(BoundEx);
  2036. // Evaluate the location (checks for bad dereferences).
  2037. ExplodedNodeSet Tmp;
  2038. evalLocation(Tmp, NodeEx, BoundEx, Pred, state, location, tag, true);
  2039. if (Tmp.empty())
  2040. return;
  2041. StmtNodeBuilder Bldr(Tmp, Dst, *currBldrCtx);
  2042. if (location.isUndef())
  2043. return;
  2044. // Proceed with the load.
  2045. for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI) {
  2046. state = (*NI)->getState();
  2047. const LocationContext *LCtx = (*NI)->getLocationContext();
  2048. SVal V = UnknownVal();
  2049. if (location.isValid()) {
  2050. if (LoadTy.isNull())
  2051. LoadTy = BoundEx->getType();
  2052. V = state->getSVal(location.castAs<Loc>(), LoadTy);
  2053. }
  2054. Bldr.generateNode(NodeEx, *NI, state->BindExpr(BoundEx, LCtx, V), tag,
  2055. ProgramPoint::PostLoadKind);
  2056. }
  2057. }
  2058. void ExprEngine::evalLocation(ExplodedNodeSet &Dst,
  2059. const Stmt *NodeEx,
  2060. const Stmt *BoundEx,
  2061. ExplodedNode *Pred,
  2062. ProgramStateRef state,
  2063. SVal location,
  2064. const ProgramPointTag *tag,
  2065. bool isLoad) {
  2066. StmtNodeBuilder BldrTop(Pred, Dst, *currBldrCtx);
  2067. // Early checks for performance reason.
  2068. if (location.isUnknown()) {
  2069. return;
  2070. }
  2071. ExplodedNodeSet Src;
  2072. BldrTop.takeNodes(Pred);
  2073. StmtNodeBuilder Bldr(Pred, Src, *currBldrCtx);
  2074. if (Pred->getState() != state) {
  2075. // Associate this new state with an ExplodedNode.
  2076. // FIXME: If I pass null tag, the graph is incorrect, e.g for
  2077. // int *p;
  2078. // p = 0;
  2079. // *p = 0xDEADBEEF;
  2080. // "p = 0" is not noted as "Null pointer value stored to 'p'" but
  2081. // instead "int *p" is noted as
  2082. // "Variable 'p' initialized to a null pointer value"
  2083. static SimpleProgramPointTag tag(TagProviderName, "Location");
  2084. Bldr.generateNode(NodeEx, Pred, state, &tag);
  2085. }
  2086. ExplodedNodeSet Tmp;
  2087. getCheckerManager().runCheckersForLocation(Tmp, Src, location, isLoad,
  2088. NodeEx, BoundEx, *this);
  2089. BldrTop.addNodes(Tmp);
  2090. }
  2091. std::pair<const ProgramPointTag *, const ProgramPointTag*>
  2092. ExprEngine::geteagerlyAssumeBinOpBifurcationTags() {
  2093. static SimpleProgramPointTag
  2094. eagerlyAssumeBinOpBifurcationTrue(TagProviderName,
  2095. "Eagerly Assume True"),
  2096. eagerlyAssumeBinOpBifurcationFalse(TagProviderName,
  2097. "Eagerly Assume False");
  2098. return std::make_pair(&eagerlyAssumeBinOpBifurcationTrue,
  2099. &eagerlyAssumeBinOpBifurcationFalse);
  2100. }
  2101. void ExprEngine::evalEagerlyAssumeBinOpBifurcation(ExplodedNodeSet &Dst,
  2102. ExplodedNodeSet &Src,
  2103. const Expr *Ex) {
  2104. StmtNodeBuilder Bldr(Src, Dst, *currBldrCtx);
  2105. for (ExplodedNodeSet::iterator I=Src.begin(), E=Src.end(); I!=E; ++I) {
  2106. ExplodedNode *Pred = *I;
  2107. // Test if the previous node was as the same expression. This can happen
  2108. // when the expression fails to evaluate to anything meaningful and
  2109. // (as an optimization) we don't generate a node.
  2110. ProgramPoint P = Pred->getLocation();
  2111. if (!P.getAs<PostStmt>() || P.castAs<PostStmt>().getStmt() != Ex) {
  2112. continue;
  2113. }
  2114. ProgramStateRef state = Pred->getState();
  2115. SVal V = state->getSVal(Ex, Pred->getLocationContext());
  2116. Optional<nonloc::SymbolVal> SEV = V.getAs<nonloc::SymbolVal>();
  2117. if (SEV && SEV->isExpression()) {
  2118. const std::pair<const ProgramPointTag *, const ProgramPointTag*> &tags =
  2119. geteagerlyAssumeBinOpBifurcationTags();
  2120. ProgramStateRef StateTrue, StateFalse;
  2121. std::tie(StateTrue, StateFalse) = state->assume(*SEV);
  2122. // First assume that the condition is true.
  2123. if (StateTrue) {
  2124. SVal Val = svalBuilder.makeIntVal(1U, Ex->getType());
  2125. StateTrue = StateTrue->BindExpr(Ex, Pred->getLocationContext(), Val);
  2126. Bldr.generateNode(Ex, Pred, StateTrue, tags.first);
  2127. }
  2128. // Next, assume that the condition is false.
  2129. if (StateFalse) {
  2130. SVal Val = svalBuilder.makeIntVal(0U, Ex->getType());
  2131. StateFalse = StateFalse->BindExpr(Ex, Pred->getLocationContext(), Val);
  2132. Bldr.generateNode(Ex, Pred, StateFalse, tags.second);
  2133. }
  2134. }
  2135. }
  2136. }
  2137. void ExprEngine::VisitGCCAsmStmt(const GCCAsmStmt *A, ExplodedNode *Pred,
  2138. ExplodedNodeSet &Dst) {
  2139. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  2140. // We have processed both the inputs and the outputs. All of the outputs
  2141. // should evaluate to Locs. Nuke all of their values.
  2142. // FIXME: Some day in the future it would be nice to allow a "plug-in"
  2143. // which interprets the inline asm and stores proper results in the
  2144. // outputs.
  2145. ProgramStateRef state = Pred->getState();
  2146. for (const Expr *O : A->outputs()) {
  2147. SVal X = state->getSVal(O, Pred->getLocationContext());
  2148. assert (!X.getAs<NonLoc>()); // Should be an Lval, or unknown, undef.
  2149. if (Optional<Loc> LV = X.getAs<Loc>())
  2150. state = state->bindLoc(*LV, UnknownVal());
  2151. }
  2152. Bldr.generateNode(A, Pred, state);
  2153. }
  2154. void ExprEngine::VisitMSAsmStmt(const MSAsmStmt *A, ExplodedNode *Pred,
  2155. ExplodedNodeSet &Dst) {
  2156. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  2157. Bldr.generateNode(A, Pred, Pred->getState());
  2158. }
  2159. //===----------------------------------------------------------------------===//
  2160. // Visualization.
  2161. //===----------------------------------------------------------------------===//
  2162. #ifndef NDEBUG
  2163. static ExprEngine* GraphPrintCheckerState;
  2164. static SourceManager* GraphPrintSourceManager;
  2165. namespace llvm {
  2166. template<>
  2167. struct DOTGraphTraits<ExplodedNode*> :
  2168. public DefaultDOTGraphTraits {
  2169. DOTGraphTraits (bool isSimple=false) : DefaultDOTGraphTraits(isSimple) {}
  2170. // FIXME: Since we do not cache error nodes in ExprEngine now, this does not
  2171. // work.
  2172. static std::string getNodeAttributes(const ExplodedNode *N, void*) {
  2173. return "";
  2174. }
  2175. // De-duplicate some source location pretty-printing.
  2176. static void printLocation(raw_ostream &Out, SourceLocation SLoc) {
  2177. if (SLoc.isFileID()) {
  2178. Out << "\\lline="
  2179. << GraphPrintSourceManager->getExpansionLineNumber(SLoc)
  2180. << " col="
  2181. << GraphPrintSourceManager->getExpansionColumnNumber(SLoc)
  2182. << "\\l";
  2183. }
  2184. }
  2185. static void printLocation2(raw_ostream &Out, SourceLocation SLoc) {
  2186. if (SLoc.isFileID() && GraphPrintSourceManager->isInMainFile(SLoc))
  2187. Out << "line " << GraphPrintSourceManager->getExpansionLineNumber(SLoc);
  2188. else
  2189. SLoc.print(Out, *GraphPrintSourceManager);
  2190. }
  2191. static std::string getNodeLabel(const ExplodedNode *N, void*){
  2192. std::string sbuf;
  2193. llvm::raw_string_ostream Out(sbuf);
  2194. // Program Location.
  2195. ProgramPoint Loc = N->getLocation();
  2196. switch (Loc.getKind()) {
  2197. case ProgramPoint::BlockEntranceKind: {
  2198. Out << "Block Entrance: B"
  2199. << Loc.castAs<BlockEntrance>().getBlock()->getBlockID();
  2200. break;
  2201. }
  2202. case ProgramPoint::BlockExitKind:
  2203. assert (false);
  2204. break;
  2205. case ProgramPoint::CallEnterKind:
  2206. Out << "CallEnter";
  2207. break;
  2208. case ProgramPoint::CallExitBeginKind:
  2209. Out << "CallExitBegin";
  2210. break;
  2211. case ProgramPoint::CallExitEndKind:
  2212. Out << "CallExitEnd";
  2213. break;
  2214. case ProgramPoint::PostStmtPurgeDeadSymbolsKind:
  2215. Out << "PostStmtPurgeDeadSymbols";
  2216. break;
  2217. case ProgramPoint::PreStmtPurgeDeadSymbolsKind:
  2218. Out << "PreStmtPurgeDeadSymbols";
  2219. break;
  2220. case ProgramPoint::EpsilonKind:
  2221. Out << "Epsilon Point";
  2222. break;
  2223. case ProgramPoint::PreImplicitCallKind: {
  2224. ImplicitCallPoint PC = Loc.castAs<ImplicitCallPoint>();
  2225. Out << "PreCall: ";
  2226. // FIXME: Get proper printing options.
  2227. PC.getDecl()->print(Out, LangOptions());
  2228. printLocation(Out, PC.getLocation());
  2229. break;
  2230. }
  2231. case ProgramPoint::PostImplicitCallKind: {
  2232. ImplicitCallPoint PC = Loc.castAs<ImplicitCallPoint>();
  2233. Out << "PostCall: ";
  2234. // FIXME: Get proper printing options.
  2235. PC.getDecl()->print(Out, LangOptions());
  2236. printLocation(Out, PC.getLocation());
  2237. break;
  2238. }
  2239. case ProgramPoint::PostInitializerKind: {
  2240. Out << "PostInitializer: ";
  2241. const CXXCtorInitializer *Init =
  2242. Loc.castAs<PostInitializer>().getInitializer();
  2243. if (const FieldDecl *FD = Init->getAnyMember())
  2244. Out << *FD;
  2245. else {
  2246. QualType Ty = Init->getTypeSourceInfo()->getType();
  2247. Ty = Ty.getLocalUnqualifiedType();
  2248. LangOptions LO; // FIXME.
  2249. Ty.print(Out, LO);
  2250. }
  2251. break;
  2252. }
  2253. case ProgramPoint::BlockEdgeKind: {
  2254. const BlockEdge &E = Loc.castAs<BlockEdge>();
  2255. Out << "Edge: (B" << E.getSrc()->getBlockID() << ", B"
  2256. << E.getDst()->getBlockID() << ')';
  2257. if (const Stmt *T = E.getSrc()->getTerminator()) {
  2258. SourceLocation SLoc = T->getLocStart();
  2259. Out << "\\|Terminator: ";
  2260. LangOptions LO; // FIXME.
  2261. E.getSrc()->printTerminator(Out, LO);
  2262. if (SLoc.isFileID()) {
  2263. Out << "\\lline="
  2264. << GraphPrintSourceManager->getExpansionLineNumber(SLoc)
  2265. << " col="
  2266. << GraphPrintSourceManager->getExpansionColumnNumber(SLoc);
  2267. }
  2268. if (isa<SwitchStmt>(T)) {
  2269. const Stmt *Label = E.getDst()->getLabel();
  2270. if (Label) {
  2271. if (const CaseStmt *C = dyn_cast<CaseStmt>(Label)) {
  2272. Out << "\\lcase ";
  2273. LangOptions LO; // FIXME.
  2274. if (C->getLHS())
  2275. C->getLHS()->printPretty(Out, nullptr, PrintingPolicy(LO));
  2276. if (const Stmt *RHS = C->getRHS()) {
  2277. Out << " .. ";
  2278. RHS->printPretty(Out, nullptr, PrintingPolicy(LO));
  2279. }
  2280. Out << ":";
  2281. }
  2282. else {
  2283. assert (isa<DefaultStmt>(Label));
  2284. Out << "\\ldefault:";
  2285. }
  2286. }
  2287. else
  2288. Out << "\\l(implicit) default:";
  2289. }
  2290. else if (isa<IndirectGotoStmt>(T)) {
  2291. // FIXME
  2292. }
  2293. else {
  2294. Out << "\\lCondition: ";
  2295. if (*E.getSrc()->succ_begin() == E.getDst())
  2296. Out << "true";
  2297. else
  2298. Out << "false";
  2299. }
  2300. Out << "\\l";
  2301. }
  2302. break;
  2303. }
  2304. default: {
  2305. const Stmt *S = Loc.castAs<StmtPoint>().getStmt();
  2306. assert(S != nullptr && "Expecting non-null Stmt");
  2307. Out << S->getStmtClassName() << ' ' << (const void*) S << ' ';
  2308. LangOptions LO; // FIXME.
  2309. S->printPretty(Out, nullptr, PrintingPolicy(LO));
  2310. printLocation(Out, S->getLocStart());
  2311. if (Loc.getAs<PreStmt>())
  2312. Out << "\\lPreStmt\\l;";
  2313. else if (Loc.getAs<PostLoad>())
  2314. Out << "\\lPostLoad\\l;";
  2315. else if (Loc.getAs<PostStore>())
  2316. Out << "\\lPostStore\\l";
  2317. else if (Loc.getAs<PostLValue>())
  2318. Out << "\\lPostLValue\\l";
  2319. break;
  2320. }
  2321. }
  2322. ProgramStateRef state = N->getState();
  2323. Out << "\\|StateID: " << (const void*) state.get()
  2324. << " NodeID: " << (const void*) N << "\\|";
  2325. // Analysis stack backtrace.
  2326. Out << "Location context stack (from current to outer):\\l";
  2327. const LocationContext *LC = Loc.getLocationContext();
  2328. unsigned Idx = 0;
  2329. for (; LC; LC = LC->getParent(), ++Idx) {
  2330. Out << Idx << ". (" << (const void *)LC << ") ";
  2331. switch (LC->getKind()) {
  2332. case LocationContext::StackFrame:
  2333. if (const NamedDecl *D = dyn_cast<NamedDecl>(LC->getDecl()))
  2334. Out << "Calling " << D->getQualifiedNameAsString();
  2335. else
  2336. Out << "Calling anonymous code";
  2337. if (const Stmt *S = cast<StackFrameContext>(LC)->getCallSite()) {
  2338. Out << " at ";
  2339. printLocation2(Out, S->getLocStart());
  2340. }
  2341. break;
  2342. case LocationContext::Block:
  2343. Out << "Invoking block";
  2344. if (const Decl *D = cast<BlockInvocationContext>(LC)->getBlockDecl()) {
  2345. Out << " defined at ";
  2346. printLocation2(Out, D->getLocStart());
  2347. }
  2348. break;
  2349. case LocationContext::Scope:
  2350. Out << "Entering scope";
  2351. // FIXME: Add more info once ScopeContext is activated.
  2352. break;
  2353. }
  2354. Out << "\\l";
  2355. }
  2356. Out << "\\l";
  2357. state->printDOT(Out);
  2358. Out << "\\l";
  2359. if (const ProgramPointTag *tag = Loc.getTag()) {
  2360. Out << "\\|Tag: " << tag->getTagDescription();
  2361. Out << "\\l";
  2362. }
  2363. return Out.str();
  2364. }
  2365. };
  2366. } // end llvm namespace
  2367. #endif
  2368. void ExprEngine::ViewGraph(bool trim) {
  2369. #ifndef NDEBUG
  2370. if (trim) {
  2371. std::vector<const ExplodedNode*> Src;
  2372. // Flush any outstanding reports to make sure we cover all the nodes.
  2373. // This does not cause them to get displayed.
  2374. for (BugReporter::iterator I=BR.begin(), E=BR.end(); I!=E; ++I)
  2375. const_cast<BugType*>(*I)->FlushReports(BR);
  2376. // Iterate through the reports and get their nodes.
  2377. for (BugReporter::EQClasses_iterator
  2378. EI = BR.EQClasses_begin(), EE = BR.EQClasses_end(); EI != EE; ++EI) {
  2379. ExplodedNode *N = const_cast<ExplodedNode*>(EI->begin()->getErrorNode());
  2380. if (N) Src.push_back(N);
  2381. }
  2382. ViewGraph(Src);
  2383. }
  2384. else {
  2385. GraphPrintCheckerState = this;
  2386. GraphPrintSourceManager = &getContext().getSourceManager();
  2387. llvm::ViewGraph(*G.roots_begin(), "ExprEngine");
  2388. GraphPrintCheckerState = nullptr;
  2389. GraphPrintSourceManager = nullptr;
  2390. }
  2391. #endif
  2392. }
  2393. void ExprEngine::ViewGraph(ArrayRef<const ExplodedNode*> Nodes) {
  2394. #ifndef NDEBUG
  2395. GraphPrintCheckerState = this;
  2396. GraphPrintSourceManager = &getContext().getSourceManager();
  2397. std::unique_ptr<ExplodedGraph> TrimmedG(G.trim(Nodes));
  2398. if (!TrimmedG.get())
  2399. llvm::errs() << "warning: Trimmed ExplodedGraph is empty.\n";
  2400. else
  2401. llvm::ViewGraph(*TrimmedG->roots_begin(), "TrimmedExprEngine");
  2402. GraphPrintCheckerState = nullptr;
  2403. GraphPrintSourceManager = nullptr;
  2404. #endif
  2405. }