ExprEngine.cpp 103 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821
  1. //=-- ExprEngine.cpp - Path-Sensitive Expression-Level Dataflow ---*- C++ -*-=
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This file defines a meta-engine for path-sensitive dataflow analysis that
  11. // is built on GREngine, but provides the boilerplate to execute transfer
  12. // functions and build the ExplodedGraph at the expression level.
  13. //
  14. //===----------------------------------------------------------------------===//
  15. #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
  16. #include "PrettyStackTraceLocationContext.h"
  17. #include "clang/AST/CharUnits.h"
  18. #include "clang/AST/ParentMap.h"
  19. #include "clang/AST/StmtCXX.h"
  20. #include "clang/AST/StmtObjC.h"
  21. #include "clang/Basic/Builtins.h"
  22. #include "clang/Basic/PrettyStackTrace.h"
  23. #include "clang/Basic/SourceManager.h"
  24. #include "clang/StaticAnalyzer/Core/BugReporter/BugType.h"
  25. #include "clang/StaticAnalyzer/Core/CheckerManager.h"
  26. #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h"
  27. #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
  28. #include "clang/StaticAnalyzer/Core/PathSensitive/LoopWidening.h"
  29. #include "llvm/ADT/Statistic.h"
  30. #include "llvm/Support/SaveAndRestore.h"
  31. #include "llvm/Support/raw_ostream.h"
  32. #ifndef NDEBUG
  33. #include "llvm/Support/GraphWriter.h"
  34. #endif
  35. using namespace clang;
  36. using namespace ento;
  37. using llvm::APSInt;
  38. #define DEBUG_TYPE "ExprEngine"
  39. STATISTIC(NumRemoveDeadBindings,
  40. "The # of times RemoveDeadBindings is called");
  41. STATISTIC(NumMaxBlockCountReached,
  42. "The # of aborted paths due to reaching the maximum block count in "
  43. "a top level function");
  44. STATISTIC(NumMaxBlockCountReachedInInlined,
  45. "The # of aborted paths due to reaching the maximum block count in "
  46. "an inlined function");
  47. STATISTIC(NumTimesRetriedWithoutInlining,
  48. "The # of times we re-evaluated a call without inlining");
  49. typedef std::pair<const CXXBindTemporaryExpr *, const StackFrameContext *>
  50. CXXBindTemporaryContext;
  51. // Keeps track of whether CXXBindTemporaryExpr nodes have been evaluated.
  52. // The StackFrameContext assures that nested calls due to inlined recursive
  53. // functions do not interfere.
  54. REGISTER_TRAIT_WITH_PROGRAMSTATE(InitializedTemporariesSet,
  55. llvm::ImmutableSet<CXXBindTemporaryContext>)
  56. //===----------------------------------------------------------------------===//
  57. // Engine construction and deletion.
  58. //===----------------------------------------------------------------------===//
  59. static const char* TagProviderName = "ExprEngine";
  60. ExprEngine::ExprEngine(AnalysisManager &mgr, bool gcEnabled,
  61. SetOfConstDecls *VisitedCalleesIn,
  62. FunctionSummariesTy *FS,
  63. InliningModes HowToInlineIn)
  64. : AMgr(mgr),
  65. AnalysisDeclContexts(mgr.getAnalysisDeclContextManager()),
  66. Engine(*this, FS),
  67. G(Engine.getGraph()),
  68. StateMgr(getContext(), mgr.getStoreManagerCreator(),
  69. mgr.getConstraintManagerCreator(), G.getAllocator(),
  70. this),
  71. SymMgr(StateMgr.getSymbolManager()),
  72. svalBuilder(StateMgr.getSValBuilder()),
  73. currStmtIdx(0), currBldrCtx(nullptr),
  74. ObjCNoRet(mgr.getASTContext()),
  75. ObjCGCEnabled(gcEnabled), BR(mgr, *this),
  76. VisitedCallees(VisitedCalleesIn),
  77. HowToInline(HowToInlineIn)
  78. {
  79. unsigned TrimInterval = mgr.options.getGraphTrimInterval();
  80. if (TrimInterval != 0) {
  81. // Enable eager node reclaimation when constructing the ExplodedGraph.
  82. G.enableNodeReclamation(TrimInterval);
  83. }
  84. }
  85. ExprEngine::~ExprEngine() {
  86. BR.FlushReports();
  87. }
  88. //===----------------------------------------------------------------------===//
  89. // Utility methods.
  90. //===----------------------------------------------------------------------===//
  91. ProgramStateRef ExprEngine::getInitialState(const LocationContext *InitLoc) {
  92. ProgramStateRef state = StateMgr.getInitialState(InitLoc);
  93. const Decl *D = InitLoc->getDecl();
  94. // Preconditions.
  95. // FIXME: It would be nice if we had a more general mechanism to add
  96. // such preconditions. Some day.
  97. do {
  98. if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
  99. // Precondition: the first argument of 'main' is an integer guaranteed
  100. // to be > 0.
  101. const IdentifierInfo *II = FD->getIdentifier();
  102. if (!II || !(II->getName() == "main" && FD->getNumParams() > 0))
  103. break;
  104. const ParmVarDecl *PD = FD->getParamDecl(0);
  105. QualType T = PD->getType();
  106. const BuiltinType *BT = dyn_cast<BuiltinType>(T);
  107. if (!BT || !BT->isInteger())
  108. break;
  109. const MemRegion *R = state->getRegion(PD, InitLoc);
  110. if (!R)
  111. break;
  112. SVal V = state->getSVal(loc::MemRegionVal(R));
  113. SVal Constraint_untested = evalBinOp(state, BO_GT, V,
  114. svalBuilder.makeZeroVal(T),
  115. svalBuilder.getConditionType());
  116. Optional<DefinedOrUnknownSVal> Constraint =
  117. Constraint_untested.getAs<DefinedOrUnknownSVal>();
  118. if (!Constraint)
  119. break;
  120. if (ProgramStateRef newState = state->assume(*Constraint, true))
  121. state = newState;
  122. }
  123. break;
  124. }
  125. while (0);
  126. if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
  127. // Precondition: 'self' is always non-null upon entry to an Objective-C
  128. // method.
  129. const ImplicitParamDecl *SelfD = MD->getSelfDecl();
  130. const MemRegion *R = state->getRegion(SelfD, InitLoc);
  131. SVal V = state->getSVal(loc::MemRegionVal(R));
  132. if (Optional<Loc> LV = V.getAs<Loc>()) {
  133. // Assume that the pointer value in 'self' is non-null.
  134. state = state->assume(*LV, true);
  135. assert(state && "'self' cannot be null");
  136. }
  137. }
  138. if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(D)) {
  139. if (!MD->isStatic()) {
  140. // Precondition: 'this' is always non-null upon entry to the
  141. // top-level function. This is our starting assumption for
  142. // analyzing an "open" program.
  143. const StackFrameContext *SFC = InitLoc->getCurrentStackFrame();
  144. if (SFC->getParent() == nullptr) {
  145. loc::MemRegionVal L = svalBuilder.getCXXThis(MD, SFC);
  146. SVal V = state->getSVal(L);
  147. if (Optional<Loc> LV = V.getAs<Loc>()) {
  148. state = state->assume(*LV, true);
  149. assert(state && "'this' cannot be null");
  150. }
  151. }
  152. }
  153. }
  154. return state;
  155. }
  156. ProgramStateRef
  157. ExprEngine::createTemporaryRegionIfNeeded(ProgramStateRef State,
  158. const LocationContext *LC,
  159. const Expr *Ex,
  160. const Expr *Result) {
  161. SVal V = State->getSVal(Ex, LC);
  162. if (!Result) {
  163. // If we don't have an explicit result expression, we're in "if needed"
  164. // mode. Only create a region if the current value is a NonLoc.
  165. if (!V.getAs<NonLoc>())
  166. return State;
  167. Result = Ex;
  168. } else {
  169. // We need to create a region no matter what. For sanity, make sure we don't
  170. // try to stuff a Loc into a non-pointer temporary region.
  171. assert(!V.getAs<Loc>() || Loc::isLocType(Result->getType()) ||
  172. Result->getType()->isMemberPointerType());
  173. }
  174. ProgramStateManager &StateMgr = State->getStateManager();
  175. MemRegionManager &MRMgr = StateMgr.getRegionManager();
  176. StoreManager &StoreMgr = StateMgr.getStoreManager();
  177. // MaterializeTemporaryExpr may appear out of place, after a few field and
  178. // base-class accesses have been made to the object, even though semantically
  179. // it is the whole object that gets materialized and lifetime-extended.
  180. //
  181. // For example:
  182. //
  183. // `-MaterializeTemporaryExpr
  184. // `-MemberExpr
  185. // `-CXXTemporaryObjectExpr
  186. //
  187. // instead of the more natural
  188. //
  189. // `-MemberExpr
  190. // `-MaterializeTemporaryExpr
  191. // `-CXXTemporaryObjectExpr
  192. //
  193. // Use the usual methods for obtaining the expression of the base object,
  194. // and record the adjustments that we need to make to obtain the sub-object
  195. // that the whole expression 'Ex' refers to. This trick is usual,
  196. // in the sense that CodeGen takes a similar route.
  197. SmallVector<const Expr *, 2> CommaLHSs;
  198. SmallVector<SubobjectAdjustment, 2> Adjustments;
  199. const Expr *Init = Ex->skipRValueSubobjectAdjustments(CommaLHSs, Adjustments);
  200. const TypedValueRegion *TR = nullptr;
  201. if (const MaterializeTemporaryExpr *MT =
  202. dyn_cast<MaterializeTemporaryExpr>(Result)) {
  203. StorageDuration SD = MT->getStorageDuration();
  204. // If this object is bound to a reference with static storage duration, we
  205. // put it in a different region to prevent "address leakage" warnings.
  206. if (SD == SD_Static || SD == SD_Thread)
  207. TR = MRMgr.getCXXStaticTempObjectRegion(Init);
  208. }
  209. if (!TR)
  210. TR = MRMgr.getCXXTempObjectRegion(Init, LC);
  211. SVal Reg = loc::MemRegionVal(TR);
  212. // Make the necessary adjustments to obtain the sub-object.
  213. for (auto I = Adjustments.rbegin(), E = Adjustments.rend(); I != E; ++I) {
  214. const SubobjectAdjustment &Adj = *I;
  215. switch (Adj.Kind) {
  216. case SubobjectAdjustment::DerivedToBaseAdjustment:
  217. Reg = StoreMgr.evalDerivedToBase(Reg, Adj.DerivedToBase.BasePath);
  218. break;
  219. case SubobjectAdjustment::FieldAdjustment:
  220. Reg = StoreMgr.getLValueField(Adj.Field, Reg);
  221. break;
  222. case SubobjectAdjustment::MemberPointerAdjustment:
  223. // FIXME: Unimplemented.
  224. State->bindDefault(Reg, UnknownVal());
  225. return State;
  226. }
  227. }
  228. // Try to recover some path sensitivity in case we couldn't compute the value.
  229. if (V.isUnknown())
  230. V = getSValBuilder().conjureSymbolVal(Result, LC, TR->getValueType(),
  231. currBldrCtx->blockCount());
  232. // Bind the value of the expression to the sub-object region, and then bind
  233. // the sub-object region to our expression.
  234. State = State->bindLoc(Reg, V);
  235. State = State->BindExpr(Result, LC, Reg);
  236. return State;
  237. }
  238. //===----------------------------------------------------------------------===//
  239. // Top-level transfer function logic (Dispatcher).
  240. //===----------------------------------------------------------------------===//
  241. /// evalAssume - Called by ConstraintManager. Used to call checker-specific
  242. /// logic for handling assumptions on symbolic values.
  243. ProgramStateRef ExprEngine::processAssume(ProgramStateRef state,
  244. SVal cond, bool assumption) {
  245. return getCheckerManager().runCheckersForEvalAssume(state, cond, assumption);
  246. }
  247. ProgramStateRef
  248. ExprEngine::processRegionChanges(ProgramStateRef state,
  249. const InvalidatedSymbols *invalidated,
  250. ArrayRef<const MemRegion *> Explicits,
  251. ArrayRef<const MemRegion *> Regions,
  252. const CallEvent *Call) {
  253. return getCheckerManager().runCheckersForRegionChanges(state, invalidated,
  254. Explicits, Regions, Call);
  255. }
  256. void ExprEngine::printState(raw_ostream &Out, ProgramStateRef State,
  257. const char *NL, const char *Sep) {
  258. getCheckerManager().runCheckersForPrintState(Out, State, NL, Sep);
  259. }
  260. void ExprEngine::processEndWorklist(bool hasWorkRemaining) {
  261. getCheckerManager().runCheckersForEndAnalysis(G, BR, *this);
  262. }
  263. void ExprEngine::processCFGElement(const CFGElement E, ExplodedNode *Pred,
  264. unsigned StmtIdx, NodeBuilderContext *Ctx) {
  265. PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext());
  266. currStmtIdx = StmtIdx;
  267. currBldrCtx = Ctx;
  268. switch (E.getKind()) {
  269. case CFGElement::Statement:
  270. ProcessStmt(const_cast<Stmt*>(E.castAs<CFGStmt>().getStmt()), Pred);
  271. return;
  272. case CFGElement::Initializer:
  273. ProcessInitializer(E.castAs<CFGInitializer>().getInitializer(), Pred);
  274. return;
  275. case CFGElement::NewAllocator:
  276. ProcessNewAllocator(E.castAs<CFGNewAllocator>().getAllocatorExpr(),
  277. Pred);
  278. return;
  279. case CFGElement::AutomaticObjectDtor:
  280. case CFGElement::DeleteDtor:
  281. case CFGElement::BaseDtor:
  282. case CFGElement::MemberDtor:
  283. case CFGElement::TemporaryDtor:
  284. ProcessImplicitDtor(E.castAs<CFGImplicitDtor>(), Pred);
  285. return;
  286. }
  287. }
  288. static bool shouldRemoveDeadBindings(AnalysisManager &AMgr,
  289. const CFGStmt S,
  290. const ExplodedNode *Pred,
  291. const LocationContext *LC) {
  292. // Are we never purging state values?
  293. if (AMgr.options.AnalysisPurgeOpt == PurgeNone)
  294. return false;
  295. // Is this the beginning of a basic block?
  296. if (Pred->getLocation().getAs<BlockEntrance>())
  297. return true;
  298. // Is this on a non-expression?
  299. if (!isa<Expr>(S.getStmt()))
  300. return true;
  301. // Run before processing a call.
  302. if (CallEvent::isCallStmt(S.getStmt()))
  303. return true;
  304. // Is this an expression that is consumed by another expression? If so,
  305. // postpone cleaning out the state.
  306. ParentMap &PM = LC->getAnalysisDeclContext()->getParentMap();
  307. return !PM.isConsumedExpr(cast<Expr>(S.getStmt()));
  308. }
  309. void ExprEngine::removeDead(ExplodedNode *Pred, ExplodedNodeSet &Out,
  310. const Stmt *ReferenceStmt,
  311. const LocationContext *LC,
  312. const Stmt *DiagnosticStmt,
  313. ProgramPoint::Kind K) {
  314. assert((K == ProgramPoint::PreStmtPurgeDeadSymbolsKind ||
  315. ReferenceStmt == nullptr || isa<ReturnStmt>(ReferenceStmt))
  316. && "PostStmt is not generally supported by the SymbolReaper yet");
  317. assert(LC && "Must pass the current (or expiring) LocationContext");
  318. if (!DiagnosticStmt) {
  319. DiagnosticStmt = ReferenceStmt;
  320. assert(DiagnosticStmt && "Required for clearing a LocationContext");
  321. }
  322. NumRemoveDeadBindings++;
  323. ProgramStateRef CleanedState = Pred->getState();
  324. // LC is the location context being destroyed, but SymbolReaper wants a
  325. // location context that is still live. (If this is the top-level stack
  326. // frame, this will be null.)
  327. if (!ReferenceStmt) {
  328. assert(K == ProgramPoint::PostStmtPurgeDeadSymbolsKind &&
  329. "Use PostStmtPurgeDeadSymbolsKind for clearing a LocationContext");
  330. LC = LC->getParent();
  331. }
  332. const StackFrameContext *SFC = LC ? LC->getCurrentStackFrame() : nullptr;
  333. SymbolReaper SymReaper(SFC, ReferenceStmt, SymMgr, getStoreManager());
  334. getCheckerManager().runCheckersForLiveSymbols(CleanedState, SymReaper);
  335. // Create a state in which dead bindings are removed from the environment
  336. // and the store. TODO: The function should just return new env and store,
  337. // not a new state.
  338. CleanedState = StateMgr.removeDeadBindings(CleanedState, SFC, SymReaper);
  339. // Process any special transfer function for dead symbols.
  340. // A tag to track convenience transitions, which can be removed at cleanup.
  341. static SimpleProgramPointTag cleanupTag(TagProviderName, "Clean Node");
  342. if (!SymReaper.hasDeadSymbols()) {
  343. // Generate a CleanedNode that has the environment and store cleaned
  344. // up. Since no symbols are dead, we can optimize and not clean out
  345. // the constraint manager.
  346. StmtNodeBuilder Bldr(Pred, Out, *currBldrCtx);
  347. Bldr.generateNode(DiagnosticStmt, Pred, CleanedState, &cleanupTag, K);
  348. } else {
  349. // Call checkers with the non-cleaned state so that they could query the
  350. // values of the soon to be dead symbols.
  351. ExplodedNodeSet CheckedSet;
  352. getCheckerManager().runCheckersForDeadSymbols(CheckedSet, Pred, SymReaper,
  353. DiagnosticStmt, *this, K);
  354. // For each node in CheckedSet, generate CleanedNodes that have the
  355. // environment, the store, and the constraints cleaned up but have the
  356. // user-supplied states as the predecessors.
  357. StmtNodeBuilder Bldr(CheckedSet, Out, *currBldrCtx);
  358. for (ExplodedNodeSet::const_iterator
  359. I = CheckedSet.begin(), E = CheckedSet.end(); I != E; ++I) {
  360. ProgramStateRef CheckerState = (*I)->getState();
  361. // The constraint manager has not been cleaned up yet, so clean up now.
  362. CheckerState = getConstraintManager().removeDeadBindings(CheckerState,
  363. SymReaper);
  364. assert(StateMgr.haveEqualEnvironments(CheckerState, Pred->getState()) &&
  365. "Checkers are not allowed to modify the Environment as a part of "
  366. "checkDeadSymbols processing.");
  367. assert(StateMgr.haveEqualStores(CheckerState, Pred->getState()) &&
  368. "Checkers are not allowed to modify the Store as a part of "
  369. "checkDeadSymbols processing.");
  370. // Create a state based on CleanedState with CheckerState GDM and
  371. // generate a transition to that state.
  372. ProgramStateRef CleanedCheckerSt =
  373. StateMgr.getPersistentStateWithGDM(CleanedState, CheckerState);
  374. Bldr.generateNode(DiagnosticStmt, *I, CleanedCheckerSt, &cleanupTag, K);
  375. }
  376. }
  377. }
  378. void ExprEngine::ProcessStmt(const CFGStmt S,
  379. ExplodedNode *Pred) {
  380. // Reclaim any unnecessary nodes in the ExplodedGraph.
  381. G.reclaimRecentlyAllocatedNodes();
  382. const Stmt *currStmt = S.getStmt();
  383. PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
  384. currStmt->getLocStart(),
  385. "Error evaluating statement");
  386. // Remove dead bindings and symbols.
  387. ExplodedNodeSet CleanedStates;
  388. if (shouldRemoveDeadBindings(AMgr, S, Pred, Pred->getLocationContext())){
  389. removeDead(Pred, CleanedStates, currStmt, Pred->getLocationContext());
  390. } else
  391. CleanedStates.Add(Pred);
  392. // Visit the statement.
  393. ExplodedNodeSet Dst;
  394. for (ExplodedNodeSet::iterator I = CleanedStates.begin(),
  395. E = CleanedStates.end(); I != E; ++I) {
  396. ExplodedNodeSet DstI;
  397. // Visit the statement.
  398. Visit(currStmt, *I, DstI);
  399. Dst.insert(DstI);
  400. }
  401. // Enqueue the new nodes onto the work list.
  402. Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx);
  403. }
  404. void ExprEngine::ProcessInitializer(const CFGInitializer Init,
  405. ExplodedNode *Pred) {
  406. const CXXCtorInitializer *BMI = Init.getInitializer();
  407. PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
  408. BMI->getSourceLocation(),
  409. "Error evaluating initializer");
  410. // We don't clean up dead bindings here.
  411. const StackFrameContext *stackFrame =
  412. cast<StackFrameContext>(Pred->getLocationContext());
  413. const CXXConstructorDecl *decl =
  414. cast<CXXConstructorDecl>(stackFrame->getDecl());
  415. ProgramStateRef State = Pred->getState();
  416. SVal thisVal = State->getSVal(svalBuilder.getCXXThis(decl, stackFrame));
  417. ExplodedNodeSet Tmp(Pred);
  418. SVal FieldLoc;
  419. // Evaluate the initializer, if necessary
  420. if (BMI->isAnyMemberInitializer()) {
  421. // Constructors build the object directly in the field,
  422. // but non-objects must be copied in from the initializer.
  423. if (auto *CtorExpr = findDirectConstructorForCurrentCFGElement()) {
  424. assert(BMI->getInit()->IgnoreImplicit() == CtorExpr);
  425. (void)CtorExpr;
  426. // The field was directly constructed, so there is no need to bind.
  427. } else {
  428. const Expr *Init = BMI->getInit()->IgnoreImplicit();
  429. const ValueDecl *Field;
  430. if (BMI->isIndirectMemberInitializer()) {
  431. Field = BMI->getIndirectMember();
  432. FieldLoc = State->getLValue(BMI->getIndirectMember(), thisVal);
  433. } else {
  434. Field = BMI->getMember();
  435. FieldLoc = State->getLValue(BMI->getMember(), thisVal);
  436. }
  437. SVal InitVal;
  438. if (Init->getType()->isArrayType()) {
  439. // Handle arrays of trivial type. We can represent this with a
  440. // primitive load/copy from the base array region.
  441. const ArraySubscriptExpr *ASE;
  442. while ((ASE = dyn_cast<ArraySubscriptExpr>(Init)))
  443. Init = ASE->getBase()->IgnoreImplicit();
  444. SVal LValue = State->getSVal(Init, stackFrame);
  445. if (Optional<Loc> LValueLoc = LValue.getAs<Loc>())
  446. InitVal = State->getSVal(*LValueLoc);
  447. // If we fail to get the value for some reason, use a symbolic value.
  448. if (InitVal.isUnknownOrUndef()) {
  449. SValBuilder &SVB = getSValBuilder();
  450. InitVal = SVB.conjureSymbolVal(BMI->getInit(), stackFrame,
  451. Field->getType(),
  452. currBldrCtx->blockCount());
  453. }
  454. } else {
  455. InitVal = State->getSVal(BMI->getInit(), stackFrame);
  456. }
  457. assert(Tmp.size() == 1 && "have not generated any new nodes yet");
  458. assert(*Tmp.begin() == Pred && "have not generated any new nodes yet");
  459. Tmp.clear();
  460. PostInitializer PP(BMI, FieldLoc.getAsRegion(), stackFrame);
  461. evalBind(Tmp, Init, Pred, FieldLoc, InitVal, /*isInit=*/true, &PP);
  462. }
  463. } else {
  464. assert(BMI->isBaseInitializer() || BMI->isDelegatingInitializer());
  465. // We already did all the work when visiting the CXXConstructExpr.
  466. }
  467. // Construct PostInitializer nodes whether the state changed or not,
  468. // so that the diagnostics don't get confused.
  469. PostInitializer PP(BMI, FieldLoc.getAsRegion(), stackFrame);
  470. ExplodedNodeSet Dst;
  471. NodeBuilder Bldr(Tmp, Dst, *currBldrCtx);
  472. for (ExplodedNodeSet::iterator I = Tmp.begin(), E = Tmp.end(); I != E; ++I) {
  473. ExplodedNode *N = *I;
  474. Bldr.generateNode(PP, N->getState(), N);
  475. }
  476. // Enqueue the new nodes onto the work list.
  477. Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx);
  478. }
  479. void ExprEngine::ProcessImplicitDtor(const CFGImplicitDtor D,
  480. ExplodedNode *Pred) {
  481. ExplodedNodeSet Dst;
  482. switch (D.getKind()) {
  483. case CFGElement::AutomaticObjectDtor:
  484. ProcessAutomaticObjDtor(D.castAs<CFGAutomaticObjDtor>(), Pred, Dst);
  485. break;
  486. case CFGElement::BaseDtor:
  487. ProcessBaseDtor(D.castAs<CFGBaseDtor>(), Pred, Dst);
  488. break;
  489. case CFGElement::MemberDtor:
  490. ProcessMemberDtor(D.castAs<CFGMemberDtor>(), Pred, Dst);
  491. break;
  492. case CFGElement::TemporaryDtor:
  493. ProcessTemporaryDtor(D.castAs<CFGTemporaryDtor>(), Pred, Dst);
  494. break;
  495. case CFGElement::DeleteDtor:
  496. ProcessDeleteDtor(D.castAs<CFGDeleteDtor>(), Pred, Dst);
  497. break;
  498. default:
  499. llvm_unreachable("Unexpected dtor kind.");
  500. }
  501. // Enqueue the new nodes onto the work list.
  502. Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx);
  503. }
  504. void ExprEngine::ProcessNewAllocator(const CXXNewExpr *NE,
  505. ExplodedNode *Pred) {
  506. ExplodedNodeSet Dst;
  507. AnalysisManager &AMgr = getAnalysisManager();
  508. AnalyzerOptions &Opts = AMgr.options;
  509. // TODO: We're not evaluating allocators for all cases just yet as
  510. // we're not handling the return value correctly, which causes false
  511. // positives when the alpha.cplusplus.NewDeleteLeaks check is on.
  512. if (Opts.mayInlineCXXAllocator())
  513. VisitCXXNewAllocatorCall(NE, Pred, Dst);
  514. else {
  515. NodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  516. const LocationContext *LCtx = Pred->getLocationContext();
  517. PostImplicitCall PP(NE->getOperatorNew(), NE->getLocStart(), LCtx);
  518. Bldr.generateNode(PP, Pred->getState(), Pred);
  519. }
  520. Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx);
  521. }
  522. void ExprEngine::ProcessAutomaticObjDtor(const CFGAutomaticObjDtor Dtor,
  523. ExplodedNode *Pred,
  524. ExplodedNodeSet &Dst) {
  525. const VarDecl *varDecl = Dtor.getVarDecl();
  526. QualType varType = varDecl->getType();
  527. ProgramStateRef state = Pred->getState();
  528. SVal dest = state->getLValue(varDecl, Pred->getLocationContext());
  529. const MemRegion *Region = dest.castAs<loc::MemRegionVal>().getRegion();
  530. if (varType->isReferenceType()) {
  531. Region = state->getSVal(Region).getAsRegion()->getBaseRegion();
  532. varType = cast<TypedValueRegion>(Region)->getValueType();
  533. }
  534. VisitCXXDestructor(varType, Region, Dtor.getTriggerStmt(), /*IsBase=*/ false,
  535. Pred, Dst);
  536. }
  537. void ExprEngine::ProcessDeleteDtor(const CFGDeleteDtor Dtor,
  538. ExplodedNode *Pred,
  539. ExplodedNodeSet &Dst) {
  540. ProgramStateRef State = Pred->getState();
  541. const LocationContext *LCtx = Pred->getLocationContext();
  542. const CXXDeleteExpr *DE = Dtor.getDeleteExpr();
  543. const Stmt *Arg = DE->getArgument();
  544. SVal ArgVal = State->getSVal(Arg, LCtx);
  545. // If the argument to delete is known to be a null value,
  546. // don't run destructor.
  547. if (State->isNull(ArgVal).isConstrainedTrue()) {
  548. QualType DTy = DE->getDestroyedType();
  549. QualType BTy = getContext().getBaseElementType(DTy);
  550. const CXXRecordDecl *RD = BTy->getAsCXXRecordDecl();
  551. const CXXDestructorDecl *Dtor = RD->getDestructor();
  552. PostImplicitCall PP(Dtor, DE->getLocStart(), LCtx);
  553. NodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  554. Bldr.generateNode(PP, Pred->getState(), Pred);
  555. return;
  556. }
  557. VisitCXXDestructor(DE->getDestroyedType(),
  558. ArgVal.getAsRegion(),
  559. DE, /*IsBase=*/ false,
  560. Pred, Dst);
  561. }
  562. void ExprEngine::ProcessBaseDtor(const CFGBaseDtor D,
  563. ExplodedNode *Pred, ExplodedNodeSet &Dst) {
  564. const LocationContext *LCtx = Pred->getLocationContext();
  565. const CXXDestructorDecl *CurDtor = cast<CXXDestructorDecl>(LCtx->getDecl());
  566. Loc ThisPtr = getSValBuilder().getCXXThis(CurDtor,
  567. LCtx->getCurrentStackFrame());
  568. SVal ThisVal = Pred->getState()->getSVal(ThisPtr);
  569. // Create the base object region.
  570. const CXXBaseSpecifier *Base = D.getBaseSpecifier();
  571. QualType BaseTy = Base->getType();
  572. SVal BaseVal = getStoreManager().evalDerivedToBase(ThisVal, BaseTy,
  573. Base->isVirtual());
  574. VisitCXXDestructor(BaseTy, BaseVal.castAs<loc::MemRegionVal>().getRegion(),
  575. CurDtor->getBody(), /*IsBase=*/ true, Pred, Dst);
  576. }
  577. void ExprEngine::ProcessMemberDtor(const CFGMemberDtor D,
  578. ExplodedNode *Pred, ExplodedNodeSet &Dst) {
  579. const FieldDecl *Member = D.getFieldDecl();
  580. ProgramStateRef State = Pred->getState();
  581. const LocationContext *LCtx = Pred->getLocationContext();
  582. const CXXDestructorDecl *CurDtor = cast<CXXDestructorDecl>(LCtx->getDecl());
  583. Loc ThisVal = getSValBuilder().getCXXThis(CurDtor,
  584. LCtx->getCurrentStackFrame());
  585. SVal FieldVal =
  586. State->getLValue(Member, State->getSVal(ThisVal).castAs<Loc>());
  587. VisitCXXDestructor(Member->getType(),
  588. FieldVal.castAs<loc::MemRegionVal>().getRegion(),
  589. CurDtor->getBody(), /*IsBase=*/false, Pred, Dst);
  590. }
  591. void ExprEngine::ProcessTemporaryDtor(const CFGTemporaryDtor D,
  592. ExplodedNode *Pred,
  593. ExplodedNodeSet &Dst) {
  594. ExplodedNodeSet CleanDtorState;
  595. StmtNodeBuilder StmtBldr(Pred, CleanDtorState, *currBldrCtx);
  596. ProgramStateRef State = Pred->getState();
  597. if (State->contains<InitializedTemporariesSet>(
  598. std::make_pair(D.getBindTemporaryExpr(), Pred->getStackFrame()))) {
  599. // FIXME: Currently we insert temporary destructors for default parameters,
  600. // but we don't insert the constructors.
  601. State = State->remove<InitializedTemporariesSet>(
  602. std::make_pair(D.getBindTemporaryExpr(), Pred->getStackFrame()));
  603. }
  604. StmtBldr.generateNode(D.getBindTemporaryExpr(), Pred, State);
  605. QualType varType = D.getBindTemporaryExpr()->getSubExpr()->getType();
  606. // FIXME: Currently CleanDtorState can be empty here due to temporaries being
  607. // bound to default parameters.
  608. assert(CleanDtorState.size() <= 1);
  609. ExplodedNode *CleanPred =
  610. CleanDtorState.empty() ? Pred : *CleanDtorState.begin();
  611. // FIXME: Inlining of temporary destructors is not supported yet anyway, so
  612. // we just put a NULL region for now. This will need to be changed later.
  613. VisitCXXDestructor(varType, nullptr, D.getBindTemporaryExpr(),
  614. /*IsBase=*/false, CleanPred, Dst);
  615. }
  616. void ExprEngine::processCleanupTemporaryBranch(const CXXBindTemporaryExpr *BTE,
  617. NodeBuilderContext &BldCtx,
  618. ExplodedNode *Pred,
  619. ExplodedNodeSet &Dst,
  620. const CFGBlock *DstT,
  621. const CFGBlock *DstF) {
  622. BranchNodeBuilder TempDtorBuilder(Pred, Dst, BldCtx, DstT, DstF);
  623. if (Pred->getState()->contains<InitializedTemporariesSet>(
  624. std::make_pair(BTE, Pred->getStackFrame()))) {
  625. TempDtorBuilder.markInfeasible(false);
  626. TempDtorBuilder.generateNode(Pred->getState(), true, Pred);
  627. } else {
  628. TempDtorBuilder.markInfeasible(true);
  629. TempDtorBuilder.generateNode(Pred->getState(), false, Pred);
  630. }
  631. }
  632. void ExprEngine::VisitCXXBindTemporaryExpr(const CXXBindTemporaryExpr *BTE,
  633. ExplodedNodeSet &PreVisit,
  634. ExplodedNodeSet &Dst) {
  635. if (!getAnalysisManager().options.includeTemporaryDtorsInCFG()) {
  636. // In case we don't have temporary destructors in the CFG, do not mark
  637. // the initialization - we would otherwise never clean it up.
  638. Dst = PreVisit;
  639. return;
  640. }
  641. StmtNodeBuilder StmtBldr(PreVisit, Dst, *currBldrCtx);
  642. for (ExplodedNode *Node : PreVisit) {
  643. ProgramStateRef State = Node->getState();
  644. if (!State->contains<InitializedTemporariesSet>(
  645. std::make_pair(BTE, Node->getStackFrame()))) {
  646. // FIXME: Currently the state might already contain the marker due to
  647. // incorrect handling of temporaries bound to default parameters; for
  648. // those, we currently skip the CXXBindTemporaryExpr but rely on adding
  649. // temporary destructor nodes.
  650. State = State->add<InitializedTemporariesSet>(
  651. std::make_pair(BTE, Node->getStackFrame()));
  652. }
  653. StmtBldr.generateNode(BTE, Node, State);
  654. }
  655. }
  656. void ExprEngine::Visit(const Stmt *S, ExplodedNode *Pred,
  657. ExplodedNodeSet &DstTop) {
  658. PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
  659. S->getLocStart(),
  660. "Error evaluating statement");
  661. ExplodedNodeSet Dst;
  662. StmtNodeBuilder Bldr(Pred, DstTop, *currBldrCtx);
  663. assert(!isa<Expr>(S) || S == cast<Expr>(S)->IgnoreParens());
  664. switch (S->getStmtClass()) {
  665. // C++ and ARC stuff we don't support yet.
  666. case Expr::ObjCIndirectCopyRestoreExprClass:
  667. case Stmt::CXXDependentScopeMemberExprClass:
  668. case Stmt::CXXInheritedCtorInitExprClass:
  669. case Stmt::CXXTryStmtClass:
  670. case Stmt::CXXTypeidExprClass:
  671. case Stmt::CXXUuidofExprClass:
  672. case Stmt::CXXFoldExprClass:
  673. case Stmt::MSPropertyRefExprClass:
  674. case Stmt::MSPropertySubscriptExprClass:
  675. case Stmt::CXXUnresolvedConstructExprClass:
  676. case Stmt::DependentScopeDeclRefExprClass:
  677. case Stmt::ArrayTypeTraitExprClass:
  678. case Stmt::ExpressionTraitExprClass:
  679. case Stmt::UnresolvedLookupExprClass:
  680. case Stmt::UnresolvedMemberExprClass:
  681. case Stmt::TypoExprClass:
  682. case Stmt::CXXNoexceptExprClass:
  683. case Stmt::PackExpansionExprClass:
  684. case Stmt::SubstNonTypeTemplateParmPackExprClass:
  685. case Stmt::FunctionParmPackExprClass:
  686. case Stmt::CoroutineBodyStmtClass:
  687. case Stmt::CoawaitExprClass:
  688. case Stmt::CoreturnStmtClass:
  689. case Stmt::CoyieldExprClass:
  690. case Stmt::SEHTryStmtClass:
  691. case Stmt::SEHExceptStmtClass:
  692. case Stmt::SEHLeaveStmtClass:
  693. case Stmt::SEHFinallyStmtClass: {
  694. const ExplodedNode *node = Bldr.generateSink(S, Pred, Pred->getState());
  695. Engine.addAbortedBlock(node, currBldrCtx->getBlock());
  696. break;
  697. }
  698. case Stmt::ParenExprClass:
  699. llvm_unreachable("ParenExprs already handled.");
  700. case Stmt::GenericSelectionExprClass:
  701. llvm_unreachable("GenericSelectionExprs already handled.");
  702. // Cases that should never be evaluated simply because they shouldn't
  703. // appear in the CFG.
  704. case Stmt::BreakStmtClass:
  705. case Stmt::CaseStmtClass:
  706. case Stmt::CompoundStmtClass:
  707. case Stmt::ContinueStmtClass:
  708. case Stmt::CXXForRangeStmtClass:
  709. case Stmt::DefaultStmtClass:
  710. case Stmt::DoStmtClass:
  711. case Stmt::ForStmtClass:
  712. case Stmt::GotoStmtClass:
  713. case Stmt::IfStmtClass:
  714. case Stmt::IndirectGotoStmtClass:
  715. case Stmt::LabelStmtClass:
  716. case Stmt::NoStmtClass:
  717. case Stmt::NullStmtClass:
  718. case Stmt::SwitchStmtClass:
  719. case Stmt::WhileStmtClass:
  720. case Expr::MSDependentExistsStmtClass:
  721. case Stmt::CapturedStmtClass:
  722. case Stmt::OMPParallelDirectiveClass:
  723. case Stmt::OMPSimdDirectiveClass:
  724. case Stmt::OMPForDirectiveClass:
  725. case Stmt::OMPForSimdDirectiveClass:
  726. case Stmt::OMPSectionsDirectiveClass:
  727. case Stmt::OMPSectionDirectiveClass:
  728. case Stmt::OMPSingleDirectiveClass:
  729. case Stmt::OMPMasterDirectiveClass:
  730. case Stmt::OMPCriticalDirectiveClass:
  731. case Stmt::OMPParallelForDirectiveClass:
  732. case Stmt::OMPParallelForSimdDirectiveClass:
  733. case Stmt::OMPParallelSectionsDirectiveClass:
  734. case Stmt::OMPTaskDirectiveClass:
  735. case Stmt::OMPTaskyieldDirectiveClass:
  736. case Stmt::OMPBarrierDirectiveClass:
  737. case Stmt::OMPTaskwaitDirectiveClass:
  738. case Stmt::OMPTaskgroupDirectiveClass:
  739. case Stmt::OMPFlushDirectiveClass:
  740. case Stmt::OMPOrderedDirectiveClass:
  741. case Stmt::OMPAtomicDirectiveClass:
  742. case Stmt::OMPTargetDirectiveClass:
  743. case Stmt::OMPTargetDataDirectiveClass:
  744. case Stmt::OMPTargetEnterDataDirectiveClass:
  745. case Stmt::OMPTargetExitDataDirectiveClass:
  746. case Stmt::OMPTargetParallelDirectiveClass:
  747. case Stmt::OMPTargetParallelForDirectiveClass:
  748. case Stmt::OMPTargetUpdateDirectiveClass:
  749. case Stmt::OMPTeamsDirectiveClass:
  750. case Stmt::OMPCancellationPointDirectiveClass:
  751. case Stmt::OMPCancelDirectiveClass:
  752. case Stmt::OMPTaskLoopDirectiveClass:
  753. case Stmt::OMPTaskLoopSimdDirectiveClass:
  754. case Stmt::OMPDistributeDirectiveClass:
  755. case Stmt::OMPDistributeParallelForDirectiveClass:
  756. case Stmt::OMPDistributeParallelForSimdDirectiveClass:
  757. case Stmt::OMPDistributeSimdDirectiveClass:
  758. case Stmt::OMPTargetParallelForSimdDirectiveClass:
  759. case Stmt::OMPTargetSimdDirectiveClass:
  760. case Stmt::OMPTeamsDistributeDirectiveClass:
  761. case Stmt::OMPTeamsDistributeSimdDirectiveClass:
  762. case Stmt::OMPTeamsDistributeParallelForSimdDirectiveClass:
  763. case Stmt::OMPTeamsDistributeParallelForDirectiveClass:
  764. case Stmt::OMPTargetTeamsDirectiveClass:
  765. case Stmt::OMPTargetTeamsDistributeDirectiveClass:
  766. case Stmt::OMPTargetTeamsDistributeParallelForDirectiveClass:
  767. llvm_unreachable("Stmt should not be in analyzer evaluation loop");
  768. case Stmt::ObjCSubscriptRefExprClass:
  769. case Stmt::ObjCPropertyRefExprClass:
  770. llvm_unreachable("These are handled by PseudoObjectExpr");
  771. case Stmt::GNUNullExprClass: {
  772. // GNU __null is a pointer-width integer, not an actual pointer.
  773. ProgramStateRef state = Pred->getState();
  774. state = state->BindExpr(S, Pred->getLocationContext(),
  775. svalBuilder.makeIntValWithPtrWidth(0, false));
  776. Bldr.generateNode(S, Pred, state);
  777. break;
  778. }
  779. case Stmt::ObjCAtSynchronizedStmtClass:
  780. Bldr.takeNodes(Pred);
  781. VisitObjCAtSynchronizedStmt(cast<ObjCAtSynchronizedStmt>(S), Pred, Dst);
  782. Bldr.addNodes(Dst);
  783. break;
  784. case Stmt::ExprWithCleanupsClass:
  785. // Handled due to fully linearised CFG.
  786. break;
  787. case Stmt::CXXBindTemporaryExprClass: {
  788. Bldr.takeNodes(Pred);
  789. ExplodedNodeSet PreVisit;
  790. getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this);
  791. ExplodedNodeSet Next;
  792. VisitCXXBindTemporaryExpr(cast<CXXBindTemporaryExpr>(S), PreVisit, Next);
  793. getCheckerManager().runCheckersForPostStmt(Dst, Next, S, *this);
  794. Bldr.addNodes(Dst);
  795. break;
  796. }
  797. // Cases not handled yet; but will handle some day.
  798. case Stmt::DesignatedInitExprClass:
  799. case Stmt::DesignatedInitUpdateExprClass:
  800. case Stmt::ArrayInitLoopExprClass:
  801. case Stmt::ArrayInitIndexExprClass:
  802. case Stmt::ExtVectorElementExprClass:
  803. case Stmt::ImaginaryLiteralClass:
  804. case Stmt::ObjCAtCatchStmtClass:
  805. case Stmt::ObjCAtFinallyStmtClass:
  806. case Stmt::ObjCAtTryStmtClass:
  807. case Stmt::ObjCAutoreleasePoolStmtClass:
  808. case Stmt::ObjCEncodeExprClass:
  809. case Stmt::ObjCIsaExprClass:
  810. case Stmt::ObjCProtocolExprClass:
  811. case Stmt::ObjCSelectorExprClass:
  812. case Stmt::ParenListExprClass:
  813. case Stmt::ShuffleVectorExprClass:
  814. case Stmt::ConvertVectorExprClass:
  815. case Stmt::VAArgExprClass:
  816. case Stmt::CUDAKernelCallExprClass:
  817. case Stmt::OpaqueValueExprClass:
  818. case Stmt::AsTypeExprClass:
  819. // Fall through.
  820. // Cases we intentionally don't evaluate, since they don't need
  821. // to be explicitly evaluated.
  822. case Stmt::PredefinedExprClass:
  823. case Stmt::AddrLabelExprClass:
  824. case Stmt::AttributedStmtClass:
  825. case Stmt::IntegerLiteralClass:
  826. case Stmt::CharacterLiteralClass:
  827. case Stmt::ImplicitValueInitExprClass:
  828. case Stmt::CXXScalarValueInitExprClass:
  829. case Stmt::CXXBoolLiteralExprClass:
  830. case Stmt::ObjCBoolLiteralExprClass:
  831. case Stmt::ObjCAvailabilityCheckExprClass:
  832. case Stmt::FloatingLiteralClass:
  833. case Stmt::NoInitExprClass:
  834. case Stmt::SizeOfPackExprClass:
  835. case Stmt::StringLiteralClass:
  836. case Stmt::ObjCStringLiteralClass:
  837. case Stmt::CXXPseudoDestructorExprClass:
  838. case Stmt::SubstNonTypeTemplateParmExprClass:
  839. case Stmt::CXXNullPtrLiteralExprClass:
  840. case Stmt::OMPArraySectionExprClass:
  841. case Stmt::TypeTraitExprClass: {
  842. Bldr.takeNodes(Pred);
  843. ExplodedNodeSet preVisit;
  844. getCheckerManager().runCheckersForPreStmt(preVisit, Pred, S, *this);
  845. getCheckerManager().runCheckersForPostStmt(Dst, preVisit, S, *this);
  846. Bldr.addNodes(Dst);
  847. break;
  848. }
  849. case Stmt::CXXDefaultArgExprClass:
  850. case Stmt::CXXDefaultInitExprClass: {
  851. Bldr.takeNodes(Pred);
  852. ExplodedNodeSet PreVisit;
  853. getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this);
  854. ExplodedNodeSet Tmp;
  855. StmtNodeBuilder Bldr2(PreVisit, Tmp, *currBldrCtx);
  856. const Expr *ArgE;
  857. if (const CXXDefaultArgExpr *DefE = dyn_cast<CXXDefaultArgExpr>(S))
  858. ArgE = DefE->getExpr();
  859. else if (const CXXDefaultInitExpr *DefE = dyn_cast<CXXDefaultInitExpr>(S))
  860. ArgE = DefE->getExpr();
  861. else
  862. llvm_unreachable("unknown constant wrapper kind");
  863. bool IsTemporary = false;
  864. if (const MaterializeTemporaryExpr *MTE =
  865. dyn_cast<MaterializeTemporaryExpr>(ArgE)) {
  866. ArgE = MTE->GetTemporaryExpr();
  867. IsTemporary = true;
  868. }
  869. Optional<SVal> ConstantVal = svalBuilder.getConstantVal(ArgE);
  870. if (!ConstantVal)
  871. ConstantVal = UnknownVal();
  872. const LocationContext *LCtx = Pred->getLocationContext();
  873. for (ExplodedNodeSet::iterator I = PreVisit.begin(), E = PreVisit.end();
  874. I != E; ++I) {
  875. ProgramStateRef State = (*I)->getState();
  876. State = State->BindExpr(S, LCtx, *ConstantVal);
  877. if (IsTemporary)
  878. State = createTemporaryRegionIfNeeded(State, LCtx,
  879. cast<Expr>(S),
  880. cast<Expr>(S));
  881. Bldr2.generateNode(S, *I, State);
  882. }
  883. getCheckerManager().runCheckersForPostStmt(Dst, Tmp, S, *this);
  884. Bldr.addNodes(Dst);
  885. break;
  886. }
  887. // Cases we evaluate as opaque expressions, conjuring a symbol.
  888. case Stmt::CXXStdInitializerListExprClass:
  889. case Expr::ObjCArrayLiteralClass:
  890. case Expr::ObjCDictionaryLiteralClass:
  891. case Expr::ObjCBoxedExprClass: {
  892. Bldr.takeNodes(Pred);
  893. ExplodedNodeSet preVisit;
  894. getCheckerManager().runCheckersForPreStmt(preVisit, Pred, S, *this);
  895. ExplodedNodeSet Tmp;
  896. StmtNodeBuilder Bldr2(preVisit, Tmp, *currBldrCtx);
  897. const Expr *Ex = cast<Expr>(S);
  898. QualType resultType = Ex->getType();
  899. for (ExplodedNodeSet::iterator it = preVisit.begin(), et = preVisit.end();
  900. it != et; ++it) {
  901. ExplodedNode *N = *it;
  902. const LocationContext *LCtx = N->getLocationContext();
  903. SVal result = svalBuilder.conjureSymbolVal(nullptr, Ex, LCtx,
  904. resultType,
  905. currBldrCtx->blockCount());
  906. ProgramStateRef state = N->getState()->BindExpr(Ex, LCtx, result);
  907. Bldr2.generateNode(S, N, state);
  908. }
  909. getCheckerManager().runCheckersForPostStmt(Dst, Tmp, S, *this);
  910. Bldr.addNodes(Dst);
  911. break;
  912. }
  913. case Stmt::ArraySubscriptExprClass:
  914. Bldr.takeNodes(Pred);
  915. VisitLvalArraySubscriptExpr(cast<ArraySubscriptExpr>(S), Pred, Dst);
  916. Bldr.addNodes(Dst);
  917. break;
  918. case Stmt::GCCAsmStmtClass:
  919. Bldr.takeNodes(Pred);
  920. VisitGCCAsmStmt(cast<GCCAsmStmt>(S), Pred, Dst);
  921. Bldr.addNodes(Dst);
  922. break;
  923. case Stmt::MSAsmStmtClass:
  924. Bldr.takeNodes(Pred);
  925. VisitMSAsmStmt(cast<MSAsmStmt>(S), Pred, Dst);
  926. Bldr.addNodes(Dst);
  927. break;
  928. case Stmt::BlockExprClass:
  929. Bldr.takeNodes(Pred);
  930. VisitBlockExpr(cast<BlockExpr>(S), Pred, Dst);
  931. Bldr.addNodes(Dst);
  932. break;
  933. case Stmt::LambdaExprClass:
  934. if (AMgr.options.shouldInlineLambdas()) {
  935. Bldr.takeNodes(Pred);
  936. VisitLambdaExpr(cast<LambdaExpr>(S), Pred, Dst);
  937. Bldr.addNodes(Dst);
  938. } else {
  939. const ExplodedNode *node = Bldr.generateSink(S, Pred, Pred->getState());
  940. Engine.addAbortedBlock(node, currBldrCtx->getBlock());
  941. }
  942. break;
  943. case Stmt::BinaryOperatorClass: {
  944. const BinaryOperator* B = cast<BinaryOperator>(S);
  945. if (B->isLogicalOp()) {
  946. Bldr.takeNodes(Pred);
  947. VisitLogicalExpr(B, Pred, Dst);
  948. Bldr.addNodes(Dst);
  949. break;
  950. }
  951. else if (B->getOpcode() == BO_Comma) {
  952. ProgramStateRef state = Pred->getState();
  953. Bldr.generateNode(B, Pred,
  954. state->BindExpr(B, Pred->getLocationContext(),
  955. state->getSVal(B->getRHS(),
  956. Pred->getLocationContext())));
  957. break;
  958. }
  959. Bldr.takeNodes(Pred);
  960. if (AMgr.options.eagerlyAssumeBinOpBifurcation &&
  961. (B->isRelationalOp() || B->isEqualityOp())) {
  962. ExplodedNodeSet Tmp;
  963. VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Tmp);
  964. evalEagerlyAssumeBinOpBifurcation(Dst, Tmp, cast<Expr>(S));
  965. }
  966. else
  967. VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst);
  968. Bldr.addNodes(Dst);
  969. break;
  970. }
  971. case Stmt::CXXOperatorCallExprClass: {
  972. const CXXOperatorCallExpr *OCE = cast<CXXOperatorCallExpr>(S);
  973. // For instance method operators, make sure the 'this' argument has a
  974. // valid region.
  975. const Decl *Callee = OCE->getCalleeDecl();
  976. if (const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(Callee)) {
  977. if (MD->isInstance()) {
  978. ProgramStateRef State = Pred->getState();
  979. const LocationContext *LCtx = Pred->getLocationContext();
  980. ProgramStateRef NewState =
  981. createTemporaryRegionIfNeeded(State, LCtx, OCE->getArg(0));
  982. if (NewState != State) {
  983. Pred = Bldr.generateNode(OCE, Pred, NewState, /*Tag=*/nullptr,
  984. ProgramPoint::PreStmtKind);
  985. // Did we cache out?
  986. if (!Pred)
  987. break;
  988. }
  989. }
  990. }
  991. // FALLTHROUGH
  992. }
  993. case Stmt::CallExprClass:
  994. case Stmt::CXXMemberCallExprClass:
  995. case Stmt::UserDefinedLiteralClass: {
  996. Bldr.takeNodes(Pred);
  997. VisitCallExpr(cast<CallExpr>(S), Pred, Dst);
  998. Bldr.addNodes(Dst);
  999. break;
  1000. }
  1001. case Stmt::CXXCatchStmtClass: {
  1002. Bldr.takeNodes(Pred);
  1003. VisitCXXCatchStmt(cast<CXXCatchStmt>(S), Pred, Dst);
  1004. Bldr.addNodes(Dst);
  1005. break;
  1006. }
  1007. case Stmt::CXXTemporaryObjectExprClass:
  1008. case Stmt::CXXConstructExprClass: {
  1009. Bldr.takeNodes(Pred);
  1010. VisitCXXConstructExpr(cast<CXXConstructExpr>(S), Pred, Dst);
  1011. Bldr.addNodes(Dst);
  1012. break;
  1013. }
  1014. case Stmt::CXXNewExprClass: {
  1015. Bldr.takeNodes(Pred);
  1016. ExplodedNodeSet PostVisit;
  1017. VisitCXXNewExpr(cast<CXXNewExpr>(S), Pred, PostVisit);
  1018. getCheckerManager().runCheckersForPostStmt(Dst, PostVisit, S, *this);
  1019. Bldr.addNodes(Dst);
  1020. break;
  1021. }
  1022. case Stmt::CXXDeleteExprClass: {
  1023. Bldr.takeNodes(Pred);
  1024. ExplodedNodeSet PreVisit;
  1025. const CXXDeleteExpr *CDE = cast<CXXDeleteExpr>(S);
  1026. getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this);
  1027. for (ExplodedNodeSet::iterator i = PreVisit.begin(),
  1028. e = PreVisit.end(); i != e ; ++i)
  1029. VisitCXXDeleteExpr(CDE, *i, Dst);
  1030. Bldr.addNodes(Dst);
  1031. break;
  1032. }
  1033. // FIXME: ChooseExpr is really a constant. We need to fix
  1034. // the CFG do not model them as explicit control-flow.
  1035. case Stmt::ChooseExprClass: { // __builtin_choose_expr
  1036. Bldr.takeNodes(Pred);
  1037. const ChooseExpr *C = cast<ChooseExpr>(S);
  1038. VisitGuardedExpr(C, C->getLHS(), C->getRHS(), Pred, Dst);
  1039. Bldr.addNodes(Dst);
  1040. break;
  1041. }
  1042. case Stmt::CompoundAssignOperatorClass:
  1043. Bldr.takeNodes(Pred);
  1044. VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst);
  1045. Bldr.addNodes(Dst);
  1046. break;
  1047. case Stmt::CompoundLiteralExprClass:
  1048. Bldr.takeNodes(Pred);
  1049. VisitCompoundLiteralExpr(cast<CompoundLiteralExpr>(S), Pred, Dst);
  1050. Bldr.addNodes(Dst);
  1051. break;
  1052. case Stmt::BinaryConditionalOperatorClass:
  1053. case Stmt::ConditionalOperatorClass: { // '?' operator
  1054. Bldr.takeNodes(Pred);
  1055. const AbstractConditionalOperator *C
  1056. = cast<AbstractConditionalOperator>(S);
  1057. VisitGuardedExpr(C, C->getTrueExpr(), C->getFalseExpr(), Pred, Dst);
  1058. Bldr.addNodes(Dst);
  1059. break;
  1060. }
  1061. case Stmt::CXXThisExprClass:
  1062. Bldr.takeNodes(Pred);
  1063. VisitCXXThisExpr(cast<CXXThisExpr>(S), Pred, Dst);
  1064. Bldr.addNodes(Dst);
  1065. break;
  1066. case Stmt::DeclRefExprClass: {
  1067. Bldr.takeNodes(Pred);
  1068. const DeclRefExpr *DE = cast<DeclRefExpr>(S);
  1069. VisitCommonDeclRefExpr(DE, DE->getDecl(), Pred, Dst);
  1070. Bldr.addNodes(Dst);
  1071. break;
  1072. }
  1073. case Stmt::DeclStmtClass:
  1074. Bldr.takeNodes(Pred);
  1075. VisitDeclStmt(cast<DeclStmt>(S), Pred, Dst);
  1076. Bldr.addNodes(Dst);
  1077. break;
  1078. case Stmt::ImplicitCastExprClass:
  1079. case Stmt::CStyleCastExprClass:
  1080. case Stmt::CXXStaticCastExprClass:
  1081. case Stmt::CXXDynamicCastExprClass:
  1082. case Stmt::CXXReinterpretCastExprClass:
  1083. case Stmt::CXXConstCastExprClass:
  1084. case Stmt::CXXFunctionalCastExprClass:
  1085. case Stmt::ObjCBridgedCastExprClass: {
  1086. Bldr.takeNodes(Pred);
  1087. const CastExpr *C = cast<CastExpr>(S);
  1088. ExplodedNodeSet dstExpr;
  1089. VisitCast(C, C->getSubExpr(), Pred, dstExpr);
  1090. // Handle the postvisit checks.
  1091. getCheckerManager().runCheckersForPostStmt(Dst, dstExpr, C, *this);
  1092. Bldr.addNodes(Dst);
  1093. break;
  1094. }
  1095. case Expr::MaterializeTemporaryExprClass: {
  1096. Bldr.takeNodes(Pred);
  1097. const MaterializeTemporaryExpr *MTE = cast<MaterializeTemporaryExpr>(S);
  1098. CreateCXXTemporaryObject(MTE, Pred, Dst);
  1099. Bldr.addNodes(Dst);
  1100. break;
  1101. }
  1102. case Stmt::InitListExprClass:
  1103. Bldr.takeNodes(Pred);
  1104. VisitInitListExpr(cast<InitListExpr>(S), Pred, Dst);
  1105. Bldr.addNodes(Dst);
  1106. break;
  1107. case Stmt::MemberExprClass:
  1108. Bldr.takeNodes(Pred);
  1109. VisitMemberExpr(cast<MemberExpr>(S), Pred, Dst);
  1110. Bldr.addNodes(Dst);
  1111. break;
  1112. case Stmt::AtomicExprClass:
  1113. Bldr.takeNodes(Pred);
  1114. VisitAtomicExpr(cast<AtomicExpr>(S), Pred, Dst);
  1115. Bldr.addNodes(Dst);
  1116. break;
  1117. case Stmt::ObjCIvarRefExprClass:
  1118. Bldr.takeNodes(Pred);
  1119. VisitLvalObjCIvarRefExpr(cast<ObjCIvarRefExpr>(S), Pred, Dst);
  1120. Bldr.addNodes(Dst);
  1121. break;
  1122. case Stmt::ObjCForCollectionStmtClass:
  1123. Bldr.takeNodes(Pred);
  1124. VisitObjCForCollectionStmt(cast<ObjCForCollectionStmt>(S), Pred, Dst);
  1125. Bldr.addNodes(Dst);
  1126. break;
  1127. case Stmt::ObjCMessageExprClass:
  1128. Bldr.takeNodes(Pred);
  1129. VisitObjCMessage(cast<ObjCMessageExpr>(S), Pred, Dst);
  1130. Bldr.addNodes(Dst);
  1131. break;
  1132. case Stmt::ObjCAtThrowStmtClass:
  1133. case Stmt::CXXThrowExprClass:
  1134. // FIXME: This is not complete. We basically treat @throw as
  1135. // an abort.
  1136. Bldr.generateSink(S, Pred, Pred->getState());
  1137. break;
  1138. case Stmt::ReturnStmtClass:
  1139. Bldr.takeNodes(Pred);
  1140. VisitReturnStmt(cast<ReturnStmt>(S), Pred, Dst);
  1141. Bldr.addNodes(Dst);
  1142. break;
  1143. case Stmt::OffsetOfExprClass:
  1144. Bldr.takeNodes(Pred);
  1145. VisitOffsetOfExpr(cast<OffsetOfExpr>(S), Pred, Dst);
  1146. Bldr.addNodes(Dst);
  1147. break;
  1148. case Stmt::UnaryExprOrTypeTraitExprClass:
  1149. Bldr.takeNodes(Pred);
  1150. VisitUnaryExprOrTypeTraitExpr(cast<UnaryExprOrTypeTraitExpr>(S),
  1151. Pred, Dst);
  1152. Bldr.addNodes(Dst);
  1153. break;
  1154. case Stmt::StmtExprClass: {
  1155. const StmtExpr *SE = cast<StmtExpr>(S);
  1156. if (SE->getSubStmt()->body_empty()) {
  1157. // Empty statement expression.
  1158. assert(SE->getType() == getContext().VoidTy
  1159. && "Empty statement expression must have void type.");
  1160. break;
  1161. }
  1162. if (Expr *LastExpr = dyn_cast<Expr>(*SE->getSubStmt()->body_rbegin())) {
  1163. ProgramStateRef state = Pred->getState();
  1164. Bldr.generateNode(SE, Pred,
  1165. state->BindExpr(SE, Pred->getLocationContext(),
  1166. state->getSVal(LastExpr,
  1167. Pred->getLocationContext())));
  1168. }
  1169. break;
  1170. }
  1171. case Stmt::UnaryOperatorClass: {
  1172. Bldr.takeNodes(Pred);
  1173. const UnaryOperator *U = cast<UnaryOperator>(S);
  1174. if (AMgr.options.eagerlyAssumeBinOpBifurcation && (U->getOpcode() == UO_LNot)) {
  1175. ExplodedNodeSet Tmp;
  1176. VisitUnaryOperator(U, Pred, Tmp);
  1177. evalEagerlyAssumeBinOpBifurcation(Dst, Tmp, U);
  1178. }
  1179. else
  1180. VisitUnaryOperator(U, Pred, Dst);
  1181. Bldr.addNodes(Dst);
  1182. break;
  1183. }
  1184. case Stmt::PseudoObjectExprClass: {
  1185. Bldr.takeNodes(Pred);
  1186. ProgramStateRef state = Pred->getState();
  1187. const PseudoObjectExpr *PE = cast<PseudoObjectExpr>(S);
  1188. if (const Expr *Result = PE->getResultExpr()) {
  1189. SVal V = state->getSVal(Result, Pred->getLocationContext());
  1190. Bldr.generateNode(S, Pred,
  1191. state->BindExpr(S, Pred->getLocationContext(), V));
  1192. }
  1193. else
  1194. Bldr.generateNode(S, Pred,
  1195. state->BindExpr(S, Pred->getLocationContext(),
  1196. UnknownVal()));
  1197. Bldr.addNodes(Dst);
  1198. break;
  1199. }
  1200. }
  1201. }
  1202. bool ExprEngine::replayWithoutInlining(ExplodedNode *N,
  1203. const LocationContext *CalleeLC) {
  1204. const StackFrameContext *CalleeSF = CalleeLC->getCurrentStackFrame();
  1205. const StackFrameContext *CallerSF = CalleeSF->getParent()->getCurrentStackFrame();
  1206. assert(CalleeSF && CallerSF);
  1207. ExplodedNode *BeforeProcessingCall = nullptr;
  1208. const Stmt *CE = CalleeSF->getCallSite();
  1209. // Find the first node before we started processing the call expression.
  1210. while (N) {
  1211. ProgramPoint L = N->getLocation();
  1212. BeforeProcessingCall = N;
  1213. N = N->pred_empty() ? nullptr : *(N->pred_begin());
  1214. // Skip the nodes corresponding to the inlined code.
  1215. if (L.getLocationContext()->getCurrentStackFrame() != CallerSF)
  1216. continue;
  1217. // We reached the caller. Find the node right before we started
  1218. // processing the call.
  1219. if (L.isPurgeKind())
  1220. continue;
  1221. if (L.getAs<PreImplicitCall>())
  1222. continue;
  1223. if (L.getAs<CallEnter>())
  1224. continue;
  1225. if (Optional<StmtPoint> SP = L.getAs<StmtPoint>())
  1226. if (SP->getStmt() == CE)
  1227. continue;
  1228. break;
  1229. }
  1230. if (!BeforeProcessingCall)
  1231. return false;
  1232. // TODO: Clean up the unneeded nodes.
  1233. // Build an Epsilon node from which we will restart the analyzes.
  1234. // Note that CE is permitted to be NULL!
  1235. ProgramPoint NewNodeLoc =
  1236. EpsilonPoint(BeforeProcessingCall->getLocationContext(), CE);
  1237. // Add the special flag to GDM to signal retrying with no inlining.
  1238. // Note, changing the state ensures that we are not going to cache out.
  1239. ProgramStateRef NewNodeState = BeforeProcessingCall->getState();
  1240. NewNodeState =
  1241. NewNodeState->set<ReplayWithoutInlining>(const_cast<Stmt *>(CE));
  1242. // Make the new node a successor of BeforeProcessingCall.
  1243. bool IsNew = false;
  1244. ExplodedNode *NewNode = G.getNode(NewNodeLoc, NewNodeState, false, &IsNew);
  1245. // We cached out at this point. Caching out is common due to us backtracking
  1246. // from the inlined function, which might spawn several paths.
  1247. if (!IsNew)
  1248. return true;
  1249. NewNode->addPredecessor(BeforeProcessingCall, G);
  1250. // Add the new node to the work list.
  1251. Engine.enqueueStmtNode(NewNode, CalleeSF->getCallSiteBlock(),
  1252. CalleeSF->getIndex());
  1253. NumTimesRetriedWithoutInlining++;
  1254. return true;
  1255. }
  1256. /// Block entrance. (Update counters).
  1257. void ExprEngine::processCFGBlockEntrance(const BlockEdge &L,
  1258. NodeBuilderWithSinks &nodeBuilder,
  1259. ExplodedNode *Pred) {
  1260. PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext());
  1261. // If this block is terminated by a loop and it has already been visited the
  1262. // maximum number of times, widen the loop.
  1263. unsigned int BlockCount = nodeBuilder.getContext().blockCount();
  1264. if (BlockCount == AMgr.options.maxBlockVisitOnPath - 1 &&
  1265. AMgr.options.shouldWidenLoops()) {
  1266. const Stmt *Term = nodeBuilder.getContext().getBlock()->getTerminator();
  1267. if (!(Term &&
  1268. (isa<ForStmt>(Term) || isa<WhileStmt>(Term) || isa<DoStmt>(Term))))
  1269. return;
  1270. // Widen.
  1271. const LocationContext *LCtx = Pred->getLocationContext();
  1272. ProgramStateRef WidenedState =
  1273. getWidenedLoopState(Pred->getState(), LCtx, BlockCount, Term);
  1274. nodeBuilder.generateNode(WidenedState, Pred);
  1275. return;
  1276. }
  1277. // FIXME: Refactor this into a checker.
  1278. if (BlockCount >= AMgr.options.maxBlockVisitOnPath) {
  1279. static SimpleProgramPointTag tag(TagProviderName, "Block count exceeded");
  1280. const ExplodedNode *Sink =
  1281. nodeBuilder.generateSink(Pred->getState(), Pred, &tag);
  1282. // Check if we stopped at the top level function or not.
  1283. // Root node should have the location context of the top most function.
  1284. const LocationContext *CalleeLC = Pred->getLocation().getLocationContext();
  1285. const LocationContext *CalleeSF = CalleeLC->getCurrentStackFrame();
  1286. const LocationContext *RootLC =
  1287. (*G.roots_begin())->getLocation().getLocationContext();
  1288. if (RootLC->getCurrentStackFrame() != CalleeSF) {
  1289. Engine.FunctionSummaries->markReachedMaxBlockCount(CalleeSF->getDecl());
  1290. // Re-run the call evaluation without inlining it, by storing the
  1291. // no-inlining policy in the state and enqueuing the new work item on
  1292. // the list. Replay should almost never fail. Use the stats to catch it
  1293. // if it does.
  1294. if ((!AMgr.options.NoRetryExhausted &&
  1295. replayWithoutInlining(Pred, CalleeLC)))
  1296. return;
  1297. NumMaxBlockCountReachedInInlined++;
  1298. } else
  1299. NumMaxBlockCountReached++;
  1300. // Make sink nodes as exhausted(for stats) only if retry failed.
  1301. Engine.blocksExhausted.push_back(std::make_pair(L, Sink));
  1302. }
  1303. }
  1304. //===----------------------------------------------------------------------===//
  1305. // Branch processing.
  1306. //===----------------------------------------------------------------------===//
  1307. /// RecoverCastedSymbol - A helper function for ProcessBranch that is used
  1308. /// to try to recover some path-sensitivity for casts of symbolic
  1309. /// integers that promote their values (which are currently not tracked well).
  1310. /// This function returns the SVal bound to Condition->IgnoreCasts if all the
  1311. // cast(s) did was sign-extend the original value.
  1312. static SVal RecoverCastedSymbol(ProgramStateManager& StateMgr,
  1313. ProgramStateRef state,
  1314. const Stmt *Condition,
  1315. const LocationContext *LCtx,
  1316. ASTContext &Ctx) {
  1317. const Expr *Ex = dyn_cast<Expr>(Condition);
  1318. if (!Ex)
  1319. return UnknownVal();
  1320. uint64_t bits = 0;
  1321. bool bitsInit = false;
  1322. while (const CastExpr *CE = dyn_cast<CastExpr>(Ex)) {
  1323. QualType T = CE->getType();
  1324. if (!T->isIntegralOrEnumerationType())
  1325. return UnknownVal();
  1326. uint64_t newBits = Ctx.getTypeSize(T);
  1327. if (!bitsInit || newBits < bits) {
  1328. bitsInit = true;
  1329. bits = newBits;
  1330. }
  1331. Ex = CE->getSubExpr();
  1332. }
  1333. // We reached a non-cast. Is it a symbolic value?
  1334. QualType T = Ex->getType();
  1335. if (!bitsInit || !T->isIntegralOrEnumerationType() ||
  1336. Ctx.getTypeSize(T) > bits)
  1337. return UnknownVal();
  1338. return state->getSVal(Ex, LCtx);
  1339. }
  1340. #ifndef NDEBUG
  1341. static const Stmt *getRightmostLeaf(const Stmt *Condition) {
  1342. while (Condition) {
  1343. const BinaryOperator *BO = dyn_cast<BinaryOperator>(Condition);
  1344. if (!BO || !BO->isLogicalOp()) {
  1345. return Condition;
  1346. }
  1347. Condition = BO->getRHS()->IgnoreParens();
  1348. }
  1349. return nullptr;
  1350. }
  1351. #endif
  1352. // Returns the condition the branch at the end of 'B' depends on and whose value
  1353. // has been evaluated within 'B'.
  1354. // In most cases, the terminator condition of 'B' will be evaluated fully in
  1355. // the last statement of 'B'; in those cases, the resolved condition is the
  1356. // given 'Condition'.
  1357. // If the condition of the branch is a logical binary operator tree, the CFG is
  1358. // optimized: in that case, we know that the expression formed by all but the
  1359. // rightmost leaf of the logical binary operator tree must be true, and thus
  1360. // the branch condition is at this point equivalent to the truth value of that
  1361. // rightmost leaf; the CFG block thus only evaluates this rightmost leaf
  1362. // expression in its final statement. As the full condition in that case was
  1363. // not evaluated, and is thus not in the SVal cache, we need to use that leaf
  1364. // expression to evaluate the truth value of the condition in the current state
  1365. // space.
  1366. static const Stmt *ResolveCondition(const Stmt *Condition,
  1367. const CFGBlock *B) {
  1368. if (const Expr *Ex = dyn_cast<Expr>(Condition))
  1369. Condition = Ex->IgnoreParens();
  1370. const BinaryOperator *BO = dyn_cast<BinaryOperator>(Condition);
  1371. if (!BO || !BO->isLogicalOp())
  1372. return Condition;
  1373. assert(!B->getTerminator().isTemporaryDtorsBranch() &&
  1374. "Temporary destructor branches handled by processBindTemporary.");
  1375. // For logical operations, we still have the case where some branches
  1376. // use the traditional "merge" approach and others sink the branch
  1377. // directly into the basic blocks representing the logical operation.
  1378. // We need to distinguish between those two cases here.
  1379. // The invariants are still shifting, but it is possible that the
  1380. // last element in a CFGBlock is not a CFGStmt. Look for the last
  1381. // CFGStmt as the value of the condition.
  1382. CFGBlock::const_reverse_iterator I = B->rbegin(), E = B->rend();
  1383. for (; I != E; ++I) {
  1384. CFGElement Elem = *I;
  1385. Optional<CFGStmt> CS = Elem.getAs<CFGStmt>();
  1386. if (!CS)
  1387. continue;
  1388. const Stmt *LastStmt = CS->getStmt();
  1389. assert(LastStmt == Condition || LastStmt == getRightmostLeaf(Condition));
  1390. return LastStmt;
  1391. }
  1392. llvm_unreachable("could not resolve condition");
  1393. }
  1394. void ExprEngine::processBranch(const Stmt *Condition, const Stmt *Term,
  1395. NodeBuilderContext& BldCtx,
  1396. ExplodedNode *Pred,
  1397. ExplodedNodeSet &Dst,
  1398. const CFGBlock *DstT,
  1399. const CFGBlock *DstF) {
  1400. assert((!Condition || !isa<CXXBindTemporaryExpr>(Condition)) &&
  1401. "CXXBindTemporaryExprs are handled by processBindTemporary.");
  1402. const LocationContext *LCtx = Pred->getLocationContext();
  1403. PrettyStackTraceLocationContext StackCrashInfo(LCtx);
  1404. currBldrCtx = &BldCtx;
  1405. // Check for NULL conditions; e.g. "for(;;)"
  1406. if (!Condition) {
  1407. BranchNodeBuilder NullCondBldr(Pred, Dst, BldCtx, DstT, DstF);
  1408. NullCondBldr.markInfeasible(false);
  1409. NullCondBldr.generateNode(Pred->getState(), true, Pred);
  1410. return;
  1411. }
  1412. if (const Expr *Ex = dyn_cast<Expr>(Condition))
  1413. Condition = Ex->IgnoreParens();
  1414. Condition = ResolveCondition(Condition, BldCtx.getBlock());
  1415. PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
  1416. Condition->getLocStart(),
  1417. "Error evaluating branch");
  1418. ExplodedNodeSet CheckersOutSet;
  1419. getCheckerManager().runCheckersForBranchCondition(Condition, CheckersOutSet,
  1420. Pred, *this);
  1421. // We generated only sinks.
  1422. if (CheckersOutSet.empty())
  1423. return;
  1424. BranchNodeBuilder builder(CheckersOutSet, Dst, BldCtx, DstT, DstF);
  1425. for (NodeBuilder::iterator I = CheckersOutSet.begin(),
  1426. E = CheckersOutSet.end(); E != I; ++I) {
  1427. ExplodedNode *PredI = *I;
  1428. if (PredI->isSink())
  1429. continue;
  1430. ProgramStateRef PrevState = PredI->getState();
  1431. SVal X = PrevState->getSVal(Condition, PredI->getLocationContext());
  1432. if (X.isUnknownOrUndef()) {
  1433. // Give it a chance to recover from unknown.
  1434. if (const Expr *Ex = dyn_cast<Expr>(Condition)) {
  1435. if (Ex->getType()->isIntegralOrEnumerationType()) {
  1436. // Try to recover some path-sensitivity. Right now casts of symbolic
  1437. // integers that promote their values are currently not tracked well.
  1438. // If 'Condition' is such an expression, try and recover the
  1439. // underlying value and use that instead.
  1440. SVal recovered = RecoverCastedSymbol(getStateManager(),
  1441. PrevState, Condition,
  1442. PredI->getLocationContext(),
  1443. getContext());
  1444. if (!recovered.isUnknown()) {
  1445. X = recovered;
  1446. }
  1447. }
  1448. }
  1449. }
  1450. // If the condition is still unknown, give up.
  1451. if (X.isUnknownOrUndef()) {
  1452. builder.generateNode(PrevState, true, PredI);
  1453. builder.generateNode(PrevState, false, PredI);
  1454. continue;
  1455. }
  1456. DefinedSVal V = X.castAs<DefinedSVal>();
  1457. ProgramStateRef StTrue, StFalse;
  1458. std::tie(StTrue, StFalse) = PrevState->assume(V);
  1459. // Process the true branch.
  1460. if (builder.isFeasible(true)) {
  1461. if (StTrue)
  1462. builder.generateNode(StTrue, true, PredI);
  1463. else
  1464. builder.markInfeasible(true);
  1465. }
  1466. // Process the false branch.
  1467. if (builder.isFeasible(false)) {
  1468. if (StFalse)
  1469. builder.generateNode(StFalse, false, PredI);
  1470. else
  1471. builder.markInfeasible(false);
  1472. }
  1473. }
  1474. currBldrCtx = nullptr;
  1475. }
  1476. /// The GDM component containing the set of global variables which have been
  1477. /// previously initialized with explicit initializers.
  1478. REGISTER_TRAIT_WITH_PROGRAMSTATE(InitializedGlobalsSet,
  1479. llvm::ImmutableSet<const VarDecl *>)
  1480. void ExprEngine::processStaticInitializer(const DeclStmt *DS,
  1481. NodeBuilderContext &BuilderCtx,
  1482. ExplodedNode *Pred,
  1483. clang::ento::ExplodedNodeSet &Dst,
  1484. const CFGBlock *DstT,
  1485. const CFGBlock *DstF) {
  1486. PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext());
  1487. currBldrCtx = &BuilderCtx;
  1488. const VarDecl *VD = cast<VarDecl>(DS->getSingleDecl());
  1489. ProgramStateRef state = Pred->getState();
  1490. bool initHasRun = state->contains<InitializedGlobalsSet>(VD);
  1491. BranchNodeBuilder builder(Pred, Dst, BuilderCtx, DstT, DstF);
  1492. if (!initHasRun) {
  1493. state = state->add<InitializedGlobalsSet>(VD);
  1494. }
  1495. builder.generateNode(state, initHasRun, Pred);
  1496. builder.markInfeasible(!initHasRun);
  1497. currBldrCtx = nullptr;
  1498. }
  1499. /// processIndirectGoto - Called by CoreEngine. Used to generate successor
  1500. /// nodes by processing the 'effects' of a computed goto jump.
  1501. void ExprEngine::processIndirectGoto(IndirectGotoNodeBuilder &builder) {
  1502. ProgramStateRef state = builder.getState();
  1503. SVal V = state->getSVal(builder.getTarget(), builder.getLocationContext());
  1504. // Three possibilities:
  1505. //
  1506. // (1) We know the computed label.
  1507. // (2) The label is NULL (or some other constant), or Undefined.
  1508. // (3) We have no clue about the label. Dispatch to all targets.
  1509. //
  1510. typedef IndirectGotoNodeBuilder::iterator iterator;
  1511. if (Optional<loc::GotoLabel> LV = V.getAs<loc::GotoLabel>()) {
  1512. const LabelDecl *L = LV->getLabel();
  1513. for (iterator I = builder.begin(), E = builder.end(); I != E; ++I) {
  1514. if (I.getLabel() == L) {
  1515. builder.generateNode(I, state);
  1516. return;
  1517. }
  1518. }
  1519. llvm_unreachable("No block with label.");
  1520. }
  1521. if (V.getAs<loc::ConcreteInt>() || V.getAs<UndefinedVal>()) {
  1522. // Dispatch to the first target and mark it as a sink.
  1523. //ExplodedNode* N = builder.generateNode(builder.begin(), state, true);
  1524. // FIXME: add checker visit.
  1525. // UndefBranches.insert(N);
  1526. return;
  1527. }
  1528. // This is really a catch-all. We don't support symbolics yet.
  1529. // FIXME: Implement dispatch for symbolic pointers.
  1530. for (iterator I=builder.begin(), E=builder.end(); I != E; ++I)
  1531. builder.generateNode(I, state);
  1532. }
  1533. #if 0
  1534. static bool stackFrameDoesNotContainInitializedTemporaries(ExplodedNode &Pred) {
  1535. const StackFrameContext* Frame = Pred.getStackFrame();
  1536. const llvm::ImmutableSet<CXXBindTemporaryContext> &Set =
  1537. Pred.getState()->get<InitializedTemporariesSet>();
  1538. return std::find_if(Set.begin(), Set.end(),
  1539. [&](const CXXBindTemporaryContext &Ctx) {
  1540. if (Ctx.second == Frame) {
  1541. Ctx.first->dump();
  1542. llvm::errs() << "\n";
  1543. }
  1544. return Ctx.second == Frame;
  1545. }) == Set.end();
  1546. }
  1547. #endif
  1548. void ExprEngine::processBeginOfFunction(NodeBuilderContext &BC,
  1549. ExplodedNode *Pred,
  1550. ExplodedNodeSet &Dst,
  1551. const BlockEdge &L) {
  1552. SaveAndRestore<const NodeBuilderContext *> NodeContextRAII(currBldrCtx, &BC);
  1553. getCheckerManager().runCheckersForBeginFunction(Dst, L, Pred, *this);
  1554. }
  1555. /// ProcessEndPath - Called by CoreEngine. Used to generate end-of-path
  1556. /// nodes when the control reaches the end of a function.
  1557. void ExprEngine::processEndOfFunction(NodeBuilderContext& BC,
  1558. ExplodedNode *Pred,
  1559. const ReturnStmt *RS) {
  1560. // FIXME: Assert that stackFrameDoesNotContainInitializedTemporaries(*Pred)).
  1561. // We currently cannot enable this assert, as lifetime extended temporaries
  1562. // are not modelled correctly.
  1563. PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext());
  1564. StateMgr.EndPath(Pred->getState());
  1565. ExplodedNodeSet Dst;
  1566. if (Pred->getLocationContext()->inTopFrame()) {
  1567. // Remove dead symbols.
  1568. ExplodedNodeSet AfterRemovedDead;
  1569. removeDeadOnEndOfFunction(BC, Pred, AfterRemovedDead);
  1570. // Notify checkers.
  1571. for (ExplodedNodeSet::iterator I = AfterRemovedDead.begin(),
  1572. E = AfterRemovedDead.end(); I != E; ++I) {
  1573. getCheckerManager().runCheckersForEndFunction(BC, Dst, *I, *this);
  1574. }
  1575. } else {
  1576. getCheckerManager().runCheckersForEndFunction(BC, Dst, Pred, *this);
  1577. }
  1578. Engine.enqueueEndOfFunction(Dst, RS);
  1579. }
  1580. /// ProcessSwitch - Called by CoreEngine. Used to generate successor
  1581. /// nodes by processing the 'effects' of a switch statement.
  1582. void ExprEngine::processSwitch(SwitchNodeBuilder& builder) {
  1583. typedef SwitchNodeBuilder::iterator iterator;
  1584. ProgramStateRef state = builder.getState();
  1585. const Expr *CondE = builder.getCondition();
  1586. SVal CondV_untested = state->getSVal(CondE, builder.getLocationContext());
  1587. if (CondV_untested.isUndef()) {
  1588. //ExplodedNode* N = builder.generateDefaultCaseNode(state, true);
  1589. // FIXME: add checker
  1590. //UndefBranches.insert(N);
  1591. return;
  1592. }
  1593. DefinedOrUnknownSVal CondV = CondV_untested.castAs<DefinedOrUnknownSVal>();
  1594. ProgramStateRef DefaultSt = state;
  1595. iterator I = builder.begin(), EI = builder.end();
  1596. bool defaultIsFeasible = I == EI;
  1597. for ( ; I != EI; ++I) {
  1598. // Successor may be pruned out during CFG construction.
  1599. if (!I.getBlock())
  1600. continue;
  1601. const CaseStmt *Case = I.getCase();
  1602. // Evaluate the LHS of the case value.
  1603. llvm::APSInt V1 = Case->getLHS()->EvaluateKnownConstInt(getContext());
  1604. assert(V1.getBitWidth() == getContext().getTypeSize(CondE->getType()));
  1605. // Get the RHS of the case, if it exists.
  1606. llvm::APSInt V2;
  1607. if (const Expr *E = Case->getRHS())
  1608. V2 = E->EvaluateKnownConstInt(getContext());
  1609. else
  1610. V2 = V1;
  1611. ProgramStateRef StateCase;
  1612. if (Optional<NonLoc> NL = CondV.getAs<NonLoc>())
  1613. std::tie(StateCase, DefaultSt) =
  1614. DefaultSt->assumeInclusiveRange(*NL, V1, V2);
  1615. else // UnknownVal
  1616. StateCase = DefaultSt;
  1617. if (StateCase)
  1618. builder.generateCaseStmtNode(I, StateCase);
  1619. // Now "assume" that the case doesn't match. Add this state
  1620. // to the default state (if it is feasible).
  1621. if (DefaultSt)
  1622. defaultIsFeasible = true;
  1623. else {
  1624. defaultIsFeasible = false;
  1625. break;
  1626. }
  1627. }
  1628. if (!defaultIsFeasible)
  1629. return;
  1630. // If we have switch(enum value), the default branch is not
  1631. // feasible if all of the enum constants not covered by 'case:' statements
  1632. // are not feasible values for the switch condition.
  1633. //
  1634. // Note that this isn't as accurate as it could be. Even if there isn't
  1635. // a case for a particular enum value as long as that enum value isn't
  1636. // feasible then it shouldn't be considered for making 'default:' reachable.
  1637. const SwitchStmt *SS = builder.getSwitch();
  1638. const Expr *CondExpr = SS->getCond()->IgnoreParenImpCasts();
  1639. if (CondExpr->getType()->getAs<EnumType>()) {
  1640. if (SS->isAllEnumCasesCovered())
  1641. return;
  1642. }
  1643. builder.generateDefaultCaseNode(DefaultSt);
  1644. }
  1645. //===----------------------------------------------------------------------===//
  1646. // Transfer functions: Loads and stores.
  1647. //===----------------------------------------------------------------------===//
  1648. void ExprEngine::VisitCommonDeclRefExpr(const Expr *Ex, const NamedDecl *D,
  1649. ExplodedNode *Pred,
  1650. ExplodedNodeSet &Dst) {
  1651. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  1652. ProgramStateRef state = Pred->getState();
  1653. const LocationContext *LCtx = Pred->getLocationContext();
  1654. if (const VarDecl *VD = dyn_cast<VarDecl>(D)) {
  1655. // C permits "extern void v", and if you cast the address to a valid type,
  1656. // you can even do things with it. We simply pretend
  1657. assert(Ex->isGLValue() || VD->getType()->isVoidType());
  1658. const LocationContext *LocCtxt = Pred->getLocationContext();
  1659. const Decl *D = LocCtxt->getDecl();
  1660. const auto *MD = D ? dyn_cast<CXXMethodDecl>(D) : nullptr;
  1661. const auto *DeclRefEx = dyn_cast<DeclRefExpr>(Ex);
  1662. SVal V;
  1663. bool IsReference;
  1664. if (AMgr.options.shouldInlineLambdas() && DeclRefEx &&
  1665. DeclRefEx->refersToEnclosingVariableOrCapture() && MD &&
  1666. MD->getParent()->isLambda()) {
  1667. // Lookup the field of the lambda.
  1668. const CXXRecordDecl *CXXRec = MD->getParent();
  1669. llvm::DenseMap<const VarDecl *, FieldDecl *> LambdaCaptureFields;
  1670. FieldDecl *LambdaThisCaptureField;
  1671. CXXRec->getCaptureFields(LambdaCaptureFields, LambdaThisCaptureField);
  1672. const FieldDecl *FD = LambdaCaptureFields[VD];
  1673. if (!FD) {
  1674. // When a constant is captured, sometimes no corresponding field is
  1675. // created in the lambda object.
  1676. assert(VD->getType().isConstQualified());
  1677. V = state->getLValue(VD, LocCtxt);
  1678. IsReference = false;
  1679. } else {
  1680. Loc CXXThis =
  1681. svalBuilder.getCXXThis(MD, LocCtxt->getCurrentStackFrame());
  1682. SVal CXXThisVal = state->getSVal(CXXThis);
  1683. V = state->getLValue(FD, CXXThisVal);
  1684. IsReference = FD->getType()->isReferenceType();
  1685. }
  1686. } else {
  1687. V = state->getLValue(VD, LocCtxt);
  1688. IsReference = VD->getType()->isReferenceType();
  1689. }
  1690. // For references, the 'lvalue' is the pointer address stored in the
  1691. // reference region.
  1692. if (IsReference) {
  1693. if (const MemRegion *R = V.getAsRegion())
  1694. V = state->getSVal(R);
  1695. else
  1696. V = UnknownVal();
  1697. }
  1698. Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr,
  1699. ProgramPoint::PostLValueKind);
  1700. return;
  1701. }
  1702. if (const EnumConstantDecl *ED = dyn_cast<EnumConstantDecl>(D)) {
  1703. assert(!Ex->isGLValue());
  1704. SVal V = svalBuilder.makeIntVal(ED->getInitVal());
  1705. Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V));
  1706. return;
  1707. }
  1708. if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
  1709. SVal V = svalBuilder.getFunctionPointer(FD);
  1710. Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr,
  1711. ProgramPoint::PostLValueKind);
  1712. return;
  1713. }
  1714. if (isa<FieldDecl>(D)) {
  1715. // FIXME: Compute lvalue of field pointers-to-member.
  1716. // Right now we just use a non-null void pointer, so that it gives proper
  1717. // results in boolean contexts.
  1718. SVal V = svalBuilder.conjureSymbolVal(Ex, LCtx, getContext().VoidPtrTy,
  1719. currBldrCtx->blockCount());
  1720. state = state->assume(V.castAs<DefinedOrUnknownSVal>(), true);
  1721. Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr,
  1722. ProgramPoint::PostLValueKind);
  1723. return;
  1724. }
  1725. llvm_unreachable("Support for this Decl not implemented.");
  1726. }
  1727. /// VisitArraySubscriptExpr - Transfer function for array accesses
  1728. void ExprEngine::VisitLvalArraySubscriptExpr(const ArraySubscriptExpr *A,
  1729. ExplodedNode *Pred,
  1730. ExplodedNodeSet &Dst){
  1731. const Expr *Base = A->getBase()->IgnoreParens();
  1732. const Expr *Idx = A->getIdx()->IgnoreParens();
  1733. ExplodedNodeSet CheckerPreStmt;
  1734. getCheckerManager().runCheckersForPreStmt(CheckerPreStmt, Pred, A, *this);
  1735. ExplodedNodeSet EvalSet;
  1736. StmtNodeBuilder Bldr(CheckerPreStmt, EvalSet, *currBldrCtx);
  1737. assert(A->isGLValue() ||
  1738. (!AMgr.getLangOpts().CPlusPlus &&
  1739. A->getType().isCForbiddenLValueType()));
  1740. for (auto *Node : CheckerPreStmt) {
  1741. const LocationContext *LCtx = Node->getLocationContext();
  1742. ProgramStateRef state = Node->getState();
  1743. SVal V = state->getLValue(A->getType(),
  1744. state->getSVal(Idx, LCtx),
  1745. state->getSVal(Base, LCtx));
  1746. Bldr.generateNode(A, Node, state->BindExpr(A, LCtx, V), nullptr,
  1747. ProgramPoint::PostLValueKind);
  1748. }
  1749. getCheckerManager().runCheckersForPostStmt(Dst, EvalSet, A, *this);
  1750. }
  1751. /// VisitMemberExpr - Transfer function for member expressions.
  1752. void ExprEngine::VisitMemberExpr(const MemberExpr *M, ExplodedNode *Pred,
  1753. ExplodedNodeSet &Dst) {
  1754. // FIXME: Prechecks eventually go in ::Visit().
  1755. ExplodedNodeSet CheckedSet;
  1756. getCheckerManager().runCheckersForPreStmt(CheckedSet, Pred, M, *this);
  1757. ExplodedNodeSet EvalSet;
  1758. ValueDecl *Member = M->getMemberDecl();
  1759. // Handle static member variables and enum constants accessed via
  1760. // member syntax.
  1761. if (isa<VarDecl>(Member) || isa<EnumConstantDecl>(Member)) {
  1762. ExplodedNodeSet Dst;
  1763. for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end();
  1764. I != E; ++I) {
  1765. VisitCommonDeclRefExpr(M, Member, Pred, EvalSet);
  1766. }
  1767. } else {
  1768. StmtNodeBuilder Bldr(CheckedSet, EvalSet, *currBldrCtx);
  1769. ExplodedNodeSet Tmp;
  1770. for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end();
  1771. I != E; ++I) {
  1772. ProgramStateRef state = (*I)->getState();
  1773. const LocationContext *LCtx = (*I)->getLocationContext();
  1774. Expr *BaseExpr = M->getBase();
  1775. // Handle C++ method calls.
  1776. if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(Member)) {
  1777. if (MD->isInstance())
  1778. state = createTemporaryRegionIfNeeded(state, LCtx, BaseExpr);
  1779. SVal MDVal = svalBuilder.getFunctionPointer(MD);
  1780. state = state->BindExpr(M, LCtx, MDVal);
  1781. Bldr.generateNode(M, *I, state);
  1782. continue;
  1783. }
  1784. // Handle regular struct fields / member variables.
  1785. state = createTemporaryRegionIfNeeded(state, LCtx, BaseExpr);
  1786. SVal baseExprVal = state->getSVal(BaseExpr, LCtx);
  1787. FieldDecl *field = cast<FieldDecl>(Member);
  1788. SVal L = state->getLValue(field, baseExprVal);
  1789. if (M->isGLValue() || M->getType()->isArrayType()) {
  1790. // We special-case rvalues of array type because the analyzer cannot
  1791. // reason about them, since we expect all regions to be wrapped in Locs.
  1792. // We instead treat these as lvalues and assume that they will decay to
  1793. // pointers as soon as they are used.
  1794. if (!M->isGLValue()) {
  1795. assert(M->getType()->isArrayType());
  1796. const ImplicitCastExpr *PE =
  1797. dyn_cast<ImplicitCastExpr>((*I)->getParentMap().getParentIgnoreParens(M));
  1798. if (!PE || PE->getCastKind() != CK_ArrayToPointerDecay) {
  1799. llvm_unreachable("should always be wrapped in ArrayToPointerDecay");
  1800. }
  1801. }
  1802. if (field->getType()->isReferenceType()) {
  1803. if (const MemRegion *R = L.getAsRegion())
  1804. L = state->getSVal(R);
  1805. else
  1806. L = UnknownVal();
  1807. }
  1808. Bldr.generateNode(M, *I, state->BindExpr(M, LCtx, L), nullptr,
  1809. ProgramPoint::PostLValueKind);
  1810. } else {
  1811. Bldr.takeNodes(*I);
  1812. evalLoad(Tmp, M, M, *I, state, L);
  1813. Bldr.addNodes(Tmp);
  1814. }
  1815. }
  1816. }
  1817. getCheckerManager().runCheckersForPostStmt(Dst, EvalSet, M, *this);
  1818. }
  1819. void ExprEngine::VisitAtomicExpr(const AtomicExpr *AE, ExplodedNode *Pred,
  1820. ExplodedNodeSet &Dst) {
  1821. ExplodedNodeSet AfterPreSet;
  1822. getCheckerManager().runCheckersForPreStmt(AfterPreSet, Pred, AE, *this);
  1823. // For now, treat all the arguments to C11 atomics as escaping.
  1824. // FIXME: Ideally we should model the behavior of the atomics precisely here.
  1825. ExplodedNodeSet AfterInvalidateSet;
  1826. StmtNodeBuilder Bldr(AfterPreSet, AfterInvalidateSet, *currBldrCtx);
  1827. for (ExplodedNodeSet::iterator I = AfterPreSet.begin(), E = AfterPreSet.end();
  1828. I != E; ++I) {
  1829. ProgramStateRef State = (*I)->getState();
  1830. const LocationContext *LCtx = (*I)->getLocationContext();
  1831. SmallVector<SVal, 8> ValuesToInvalidate;
  1832. for (unsigned SI = 0, Count = AE->getNumSubExprs(); SI != Count; SI++) {
  1833. const Expr *SubExpr = AE->getSubExprs()[SI];
  1834. SVal SubExprVal = State->getSVal(SubExpr, LCtx);
  1835. ValuesToInvalidate.push_back(SubExprVal);
  1836. }
  1837. State = State->invalidateRegions(ValuesToInvalidate, AE,
  1838. currBldrCtx->blockCount(),
  1839. LCtx,
  1840. /*CausedByPointerEscape*/true,
  1841. /*Symbols=*/nullptr);
  1842. SVal ResultVal = UnknownVal();
  1843. State = State->BindExpr(AE, LCtx, ResultVal);
  1844. Bldr.generateNode(AE, *I, State, nullptr,
  1845. ProgramPoint::PostStmtKind);
  1846. }
  1847. getCheckerManager().runCheckersForPostStmt(Dst, AfterInvalidateSet, AE, *this);
  1848. }
  1849. namespace {
  1850. class CollectReachableSymbolsCallback final : public SymbolVisitor {
  1851. InvalidatedSymbols Symbols;
  1852. public:
  1853. CollectReachableSymbolsCallback(ProgramStateRef State) {}
  1854. const InvalidatedSymbols &getSymbols() const { return Symbols; }
  1855. bool VisitSymbol(SymbolRef Sym) override {
  1856. Symbols.insert(Sym);
  1857. return true;
  1858. }
  1859. };
  1860. } // end anonymous namespace
  1861. // A value escapes in three possible cases:
  1862. // (1) We are binding to something that is not a memory region.
  1863. // (2) We are binding to a MemrRegion that does not have stack storage.
  1864. // (3) We are binding to a MemRegion with stack storage that the store
  1865. // does not understand.
  1866. ProgramStateRef ExprEngine::processPointerEscapedOnBind(ProgramStateRef State,
  1867. SVal Loc, SVal Val) {
  1868. // Are we storing to something that causes the value to "escape"?
  1869. bool escapes = true;
  1870. // TODO: Move to StoreManager.
  1871. if (Optional<loc::MemRegionVal> regionLoc = Loc.getAs<loc::MemRegionVal>()) {
  1872. escapes = !regionLoc->getRegion()->hasStackStorage();
  1873. if (!escapes) {
  1874. // To test (3), generate a new state with the binding added. If it is
  1875. // the same state, then it escapes (since the store cannot represent
  1876. // the binding).
  1877. // Do this only if we know that the store is not supposed to generate the
  1878. // same state.
  1879. SVal StoredVal = State->getSVal(regionLoc->getRegion());
  1880. if (StoredVal != Val)
  1881. escapes = (State == (State->bindLoc(*regionLoc, Val)));
  1882. }
  1883. }
  1884. // If our store can represent the binding and we aren't storing to something
  1885. // that doesn't have local storage then just return and have the simulation
  1886. // state continue as is.
  1887. if (!escapes)
  1888. return State;
  1889. // Otherwise, find all symbols referenced by 'val' that we are tracking
  1890. // and stop tracking them.
  1891. CollectReachableSymbolsCallback Scanner =
  1892. State->scanReachableSymbols<CollectReachableSymbolsCallback>(Val);
  1893. const InvalidatedSymbols &EscapedSymbols = Scanner.getSymbols();
  1894. State = getCheckerManager().runCheckersForPointerEscape(State,
  1895. EscapedSymbols,
  1896. /*CallEvent*/ nullptr,
  1897. PSK_EscapeOnBind,
  1898. nullptr);
  1899. return State;
  1900. }
  1901. ProgramStateRef
  1902. ExprEngine::notifyCheckersOfPointerEscape(ProgramStateRef State,
  1903. const InvalidatedSymbols *Invalidated,
  1904. ArrayRef<const MemRegion *> ExplicitRegions,
  1905. ArrayRef<const MemRegion *> Regions,
  1906. const CallEvent *Call,
  1907. RegionAndSymbolInvalidationTraits &ITraits) {
  1908. if (!Invalidated || Invalidated->empty())
  1909. return State;
  1910. if (!Call)
  1911. return getCheckerManager().runCheckersForPointerEscape(State,
  1912. *Invalidated,
  1913. nullptr,
  1914. PSK_EscapeOther,
  1915. &ITraits);
  1916. // If the symbols were invalidated by a call, we want to find out which ones
  1917. // were invalidated directly due to being arguments to the call.
  1918. InvalidatedSymbols SymbolsDirectlyInvalidated;
  1919. for (ArrayRef<const MemRegion *>::iterator I = ExplicitRegions.begin(),
  1920. E = ExplicitRegions.end(); I != E; ++I) {
  1921. if (const SymbolicRegion *R = (*I)->StripCasts()->getAs<SymbolicRegion>())
  1922. SymbolsDirectlyInvalidated.insert(R->getSymbol());
  1923. }
  1924. InvalidatedSymbols SymbolsIndirectlyInvalidated;
  1925. for (InvalidatedSymbols::const_iterator I=Invalidated->begin(),
  1926. E = Invalidated->end(); I!=E; ++I) {
  1927. SymbolRef sym = *I;
  1928. if (SymbolsDirectlyInvalidated.count(sym))
  1929. continue;
  1930. SymbolsIndirectlyInvalidated.insert(sym);
  1931. }
  1932. if (!SymbolsDirectlyInvalidated.empty())
  1933. State = getCheckerManager().runCheckersForPointerEscape(State,
  1934. SymbolsDirectlyInvalidated, Call, PSK_DirectEscapeOnCall, &ITraits);
  1935. // Notify about the symbols that get indirectly invalidated by the call.
  1936. if (!SymbolsIndirectlyInvalidated.empty())
  1937. State = getCheckerManager().runCheckersForPointerEscape(State,
  1938. SymbolsIndirectlyInvalidated, Call, PSK_IndirectEscapeOnCall, &ITraits);
  1939. return State;
  1940. }
  1941. /// evalBind - Handle the semantics of binding a value to a specific location.
  1942. /// This method is used by evalStore and (soon) VisitDeclStmt, and others.
  1943. void ExprEngine::evalBind(ExplodedNodeSet &Dst, const Stmt *StoreE,
  1944. ExplodedNode *Pred,
  1945. SVal location, SVal Val,
  1946. bool atDeclInit, const ProgramPoint *PP) {
  1947. const LocationContext *LC = Pred->getLocationContext();
  1948. PostStmt PS(StoreE, LC);
  1949. if (!PP)
  1950. PP = &PS;
  1951. // Do a previsit of the bind.
  1952. ExplodedNodeSet CheckedSet;
  1953. getCheckerManager().runCheckersForBind(CheckedSet, Pred, location, Val,
  1954. StoreE, *this, *PP);
  1955. StmtNodeBuilder Bldr(CheckedSet, Dst, *currBldrCtx);
  1956. // If the location is not a 'Loc', it will already be handled by
  1957. // the checkers. There is nothing left to do.
  1958. if (!location.getAs<Loc>()) {
  1959. const ProgramPoint L = PostStore(StoreE, LC, /*Loc*/nullptr,
  1960. /*tag*/nullptr);
  1961. ProgramStateRef state = Pred->getState();
  1962. state = processPointerEscapedOnBind(state, location, Val);
  1963. Bldr.generateNode(L, state, Pred);
  1964. return;
  1965. }
  1966. for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end();
  1967. I!=E; ++I) {
  1968. ExplodedNode *PredI = *I;
  1969. ProgramStateRef state = PredI->getState();
  1970. state = processPointerEscapedOnBind(state, location, Val);
  1971. // When binding the value, pass on the hint that this is a initialization.
  1972. // For initializations, we do not need to inform clients of region
  1973. // changes.
  1974. state = state->bindLoc(location.castAs<Loc>(),
  1975. Val, /* notifyChanges = */ !atDeclInit);
  1976. const MemRegion *LocReg = nullptr;
  1977. if (Optional<loc::MemRegionVal> LocRegVal =
  1978. location.getAs<loc::MemRegionVal>()) {
  1979. LocReg = LocRegVal->getRegion();
  1980. }
  1981. const ProgramPoint L = PostStore(StoreE, LC, LocReg, nullptr);
  1982. Bldr.generateNode(L, state, PredI);
  1983. }
  1984. }
  1985. /// evalStore - Handle the semantics of a store via an assignment.
  1986. /// @param Dst The node set to store generated state nodes
  1987. /// @param AssignE The assignment expression if the store happens in an
  1988. /// assignment.
  1989. /// @param LocationE The location expression that is stored to.
  1990. /// @param state The current simulation state
  1991. /// @param location The location to store the value
  1992. /// @param Val The value to be stored
  1993. void ExprEngine::evalStore(ExplodedNodeSet &Dst, const Expr *AssignE,
  1994. const Expr *LocationE,
  1995. ExplodedNode *Pred,
  1996. ProgramStateRef state, SVal location, SVal Val,
  1997. const ProgramPointTag *tag) {
  1998. // Proceed with the store. We use AssignE as the anchor for the PostStore
  1999. // ProgramPoint if it is non-NULL, and LocationE otherwise.
  2000. const Expr *StoreE = AssignE ? AssignE : LocationE;
  2001. // Evaluate the location (checks for bad dereferences).
  2002. ExplodedNodeSet Tmp;
  2003. evalLocation(Tmp, AssignE, LocationE, Pred, state, location, tag, false);
  2004. if (Tmp.empty())
  2005. return;
  2006. if (location.isUndef())
  2007. return;
  2008. for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI)
  2009. evalBind(Dst, StoreE, *NI, location, Val, false);
  2010. }
  2011. void ExprEngine::evalLoad(ExplodedNodeSet &Dst,
  2012. const Expr *NodeEx,
  2013. const Expr *BoundEx,
  2014. ExplodedNode *Pred,
  2015. ProgramStateRef state,
  2016. SVal location,
  2017. const ProgramPointTag *tag,
  2018. QualType LoadTy)
  2019. {
  2020. assert(!location.getAs<NonLoc>() && "location cannot be a NonLoc.");
  2021. // Are we loading from a region? This actually results in two loads; one
  2022. // to fetch the address of the referenced value and one to fetch the
  2023. // referenced value.
  2024. if (const TypedValueRegion *TR =
  2025. dyn_cast_or_null<TypedValueRegion>(location.getAsRegion())) {
  2026. QualType ValTy = TR->getValueType();
  2027. if (const ReferenceType *RT = ValTy->getAs<ReferenceType>()) {
  2028. static SimpleProgramPointTag
  2029. loadReferenceTag(TagProviderName, "Load Reference");
  2030. ExplodedNodeSet Tmp;
  2031. evalLoadCommon(Tmp, NodeEx, BoundEx, Pred, state,
  2032. location, &loadReferenceTag,
  2033. getContext().getPointerType(RT->getPointeeType()));
  2034. // Perform the load from the referenced value.
  2035. for (ExplodedNodeSet::iterator I=Tmp.begin(), E=Tmp.end() ; I!=E; ++I) {
  2036. state = (*I)->getState();
  2037. location = state->getSVal(BoundEx, (*I)->getLocationContext());
  2038. evalLoadCommon(Dst, NodeEx, BoundEx, *I, state, location, tag, LoadTy);
  2039. }
  2040. return;
  2041. }
  2042. }
  2043. evalLoadCommon(Dst, NodeEx, BoundEx, Pred, state, location, tag, LoadTy);
  2044. }
  2045. void ExprEngine::evalLoadCommon(ExplodedNodeSet &Dst,
  2046. const Expr *NodeEx,
  2047. const Expr *BoundEx,
  2048. ExplodedNode *Pred,
  2049. ProgramStateRef state,
  2050. SVal location,
  2051. const ProgramPointTag *tag,
  2052. QualType LoadTy) {
  2053. assert(NodeEx);
  2054. assert(BoundEx);
  2055. // Evaluate the location (checks for bad dereferences).
  2056. ExplodedNodeSet Tmp;
  2057. evalLocation(Tmp, NodeEx, BoundEx, Pred, state, location, tag, true);
  2058. if (Tmp.empty())
  2059. return;
  2060. StmtNodeBuilder Bldr(Tmp, Dst, *currBldrCtx);
  2061. if (location.isUndef())
  2062. return;
  2063. // Proceed with the load.
  2064. for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI) {
  2065. state = (*NI)->getState();
  2066. const LocationContext *LCtx = (*NI)->getLocationContext();
  2067. SVal V = UnknownVal();
  2068. if (location.isValid()) {
  2069. if (LoadTy.isNull())
  2070. LoadTy = BoundEx->getType();
  2071. V = state->getSVal(location.castAs<Loc>(), LoadTy);
  2072. }
  2073. Bldr.generateNode(NodeEx, *NI, state->BindExpr(BoundEx, LCtx, V), tag,
  2074. ProgramPoint::PostLoadKind);
  2075. }
  2076. }
  2077. void ExprEngine::evalLocation(ExplodedNodeSet &Dst,
  2078. const Stmt *NodeEx,
  2079. const Stmt *BoundEx,
  2080. ExplodedNode *Pred,
  2081. ProgramStateRef state,
  2082. SVal location,
  2083. const ProgramPointTag *tag,
  2084. bool isLoad) {
  2085. StmtNodeBuilder BldrTop(Pred, Dst, *currBldrCtx);
  2086. // Early checks for performance reason.
  2087. if (location.isUnknown()) {
  2088. return;
  2089. }
  2090. ExplodedNodeSet Src;
  2091. BldrTop.takeNodes(Pred);
  2092. StmtNodeBuilder Bldr(Pred, Src, *currBldrCtx);
  2093. if (Pred->getState() != state) {
  2094. // Associate this new state with an ExplodedNode.
  2095. // FIXME: If I pass null tag, the graph is incorrect, e.g for
  2096. // int *p;
  2097. // p = 0;
  2098. // *p = 0xDEADBEEF;
  2099. // "p = 0" is not noted as "Null pointer value stored to 'p'" but
  2100. // instead "int *p" is noted as
  2101. // "Variable 'p' initialized to a null pointer value"
  2102. static SimpleProgramPointTag tag(TagProviderName, "Location");
  2103. Bldr.generateNode(NodeEx, Pred, state, &tag);
  2104. }
  2105. ExplodedNodeSet Tmp;
  2106. getCheckerManager().runCheckersForLocation(Tmp, Src, location, isLoad,
  2107. NodeEx, BoundEx, *this);
  2108. BldrTop.addNodes(Tmp);
  2109. }
  2110. std::pair<const ProgramPointTag *, const ProgramPointTag*>
  2111. ExprEngine::geteagerlyAssumeBinOpBifurcationTags() {
  2112. static SimpleProgramPointTag
  2113. eagerlyAssumeBinOpBifurcationTrue(TagProviderName,
  2114. "Eagerly Assume True"),
  2115. eagerlyAssumeBinOpBifurcationFalse(TagProviderName,
  2116. "Eagerly Assume False");
  2117. return std::make_pair(&eagerlyAssumeBinOpBifurcationTrue,
  2118. &eagerlyAssumeBinOpBifurcationFalse);
  2119. }
  2120. void ExprEngine::evalEagerlyAssumeBinOpBifurcation(ExplodedNodeSet &Dst,
  2121. ExplodedNodeSet &Src,
  2122. const Expr *Ex) {
  2123. StmtNodeBuilder Bldr(Src, Dst, *currBldrCtx);
  2124. for (ExplodedNodeSet::iterator I=Src.begin(), E=Src.end(); I!=E; ++I) {
  2125. ExplodedNode *Pred = *I;
  2126. // Test if the previous node was as the same expression. This can happen
  2127. // when the expression fails to evaluate to anything meaningful and
  2128. // (as an optimization) we don't generate a node.
  2129. ProgramPoint P = Pred->getLocation();
  2130. if (!P.getAs<PostStmt>() || P.castAs<PostStmt>().getStmt() != Ex) {
  2131. continue;
  2132. }
  2133. ProgramStateRef state = Pred->getState();
  2134. SVal V = state->getSVal(Ex, Pred->getLocationContext());
  2135. Optional<nonloc::SymbolVal> SEV = V.getAs<nonloc::SymbolVal>();
  2136. if (SEV && SEV->isExpression()) {
  2137. const std::pair<const ProgramPointTag *, const ProgramPointTag*> &tags =
  2138. geteagerlyAssumeBinOpBifurcationTags();
  2139. ProgramStateRef StateTrue, StateFalse;
  2140. std::tie(StateTrue, StateFalse) = state->assume(*SEV);
  2141. // First assume that the condition is true.
  2142. if (StateTrue) {
  2143. SVal Val = svalBuilder.makeIntVal(1U, Ex->getType());
  2144. StateTrue = StateTrue->BindExpr(Ex, Pred->getLocationContext(), Val);
  2145. Bldr.generateNode(Ex, Pred, StateTrue, tags.first);
  2146. }
  2147. // Next, assume that the condition is false.
  2148. if (StateFalse) {
  2149. SVal Val = svalBuilder.makeIntVal(0U, Ex->getType());
  2150. StateFalse = StateFalse->BindExpr(Ex, Pred->getLocationContext(), Val);
  2151. Bldr.generateNode(Ex, Pred, StateFalse, tags.second);
  2152. }
  2153. }
  2154. }
  2155. }
  2156. void ExprEngine::VisitGCCAsmStmt(const GCCAsmStmt *A, ExplodedNode *Pred,
  2157. ExplodedNodeSet &Dst) {
  2158. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  2159. // We have processed both the inputs and the outputs. All of the outputs
  2160. // should evaluate to Locs. Nuke all of their values.
  2161. // FIXME: Some day in the future it would be nice to allow a "plug-in"
  2162. // which interprets the inline asm and stores proper results in the
  2163. // outputs.
  2164. ProgramStateRef state = Pred->getState();
  2165. for (const Expr *O : A->outputs()) {
  2166. SVal X = state->getSVal(O, Pred->getLocationContext());
  2167. assert (!X.getAs<NonLoc>()); // Should be an Lval, or unknown, undef.
  2168. if (Optional<Loc> LV = X.getAs<Loc>())
  2169. state = state->bindLoc(*LV, UnknownVal());
  2170. }
  2171. Bldr.generateNode(A, Pred, state);
  2172. }
  2173. void ExprEngine::VisitMSAsmStmt(const MSAsmStmt *A, ExplodedNode *Pred,
  2174. ExplodedNodeSet &Dst) {
  2175. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  2176. Bldr.generateNode(A, Pred, Pred->getState());
  2177. }
  2178. //===----------------------------------------------------------------------===//
  2179. // Visualization.
  2180. //===----------------------------------------------------------------------===//
  2181. #ifndef NDEBUG
  2182. static ExprEngine* GraphPrintCheckerState;
  2183. static SourceManager* GraphPrintSourceManager;
  2184. namespace llvm {
  2185. template<>
  2186. struct DOTGraphTraits<ExplodedNode*> :
  2187. public DefaultDOTGraphTraits {
  2188. DOTGraphTraits (bool isSimple=false) : DefaultDOTGraphTraits(isSimple) {}
  2189. // FIXME: Since we do not cache error nodes in ExprEngine now, this does not
  2190. // work.
  2191. static std::string getNodeAttributes(const ExplodedNode *N, void*) {
  2192. return "";
  2193. }
  2194. // De-duplicate some source location pretty-printing.
  2195. static void printLocation(raw_ostream &Out, SourceLocation SLoc) {
  2196. if (SLoc.isFileID()) {
  2197. Out << "\\lline="
  2198. << GraphPrintSourceManager->getExpansionLineNumber(SLoc)
  2199. << " col="
  2200. << GraphPrintSourceManager->getExpansionColumnNumber(SLoc)
  2201. << "\\l";
  2202. }
  2203. }
  2204. static void printLocation2(raw_ostream &Out, SourceLocation SLoc) {
  2205. if (SLoc.isFileID() && GraphPrintSourceManager->isInMainFile(SLoc))
  2206. Out << "line " << GraphPrintSourceManager->getExpansionLineNumber(SLoc);
  2207. else
  2208. SLoc.print(Out, *GraphPrintSourceManager);
  2209. }
  2210. static std::string getNodeLabel(const ExplodedNode *N, void*){
  2211. std::string sbuf;
  2212. llvm::raw_string_ostream Out(sbuf);
  2213. // Program Location.
  2214. ProgramPoint Loc = N->getLocation();
  2215. switch (Loc.getKind()) {
  2216. case ProgramPoint::BlockEntranceKind: {
  2217. Out << "Block Entrance: B"
  2218. << Loc.castAs<BlockEntrance>().getBlock()->getBlockID();
  2219. break;
  2220. }
  2221. case ProgramPoint::BlockExitKind:
  2222. assert (false);
  2223. break;
  2224. case ProgramPoint::CallEnterKind:
  2225. Out << "CallEnter";
  2226. break;
  2227. case ProgramPoint::CallExitBeginKind:
  2228. Out << "CallExitBegin";
  2229. break;
  2230. case ProgramPoint::CallExitEndKind:
  2231. Out << "CallExitEnd";
  2232. break;
  2233. case ProgramPoint::PostStmtPurgeDeadSymbolsKind:
  2234. Out << "PostStmtPurgeDeadSymbols";
  2235. break;
  2236. case ProgramPoint::PreStmtPurgeDeadSymbolsKind:
  2237. Out << "PreStmtPurgeDeadSymbols";
  2238. break;
  2239. case ProgramPoint::EpsilonKind:
  2240. Out << "Epsilon Point";
  2241. break;
  2242. case ProgramPoint::PreImplicitCallKind: {
  2243. ImplicitCallPoint PC = Loc.castAs<ImplicitCallPoint>();
  2244. Out << "PreCall: ";
  2245. // FIXME: Get proper printing options.
  2246. PC.getDecl()->print(Out, LangOptions());
  2247. printLocation(Out, PC.getLocation());
  2248. break;
  2249. }
  2250. case ProgramPoint::PostImplicitCallKind: {
  2251. ImplicitCallPoint PC = Loc.castAs<ImplicitCallPoint>();
  2252. Out << "PostCall: ";
  2253. // FIXME: Get proper printing options.
  2254. PC.getDecl()->print(Out, LangOptions());
  2255. printLocation(Out, PC.getLocation());
  2256. break;
  2257. }
  2258. case ProgramPoint::PostInitializerKind: {
  2259. Out << "PostInitializer: ";
  2260. const CXXCtorInitializer *Init =
  2261. Loc.castAs<PostInitializer>().getInitializer();
  2262. if (const FieldDecl *FD = Init->getAnyMember())
  2263. Out << *FD;
  2264. else {
  2265. QualType Ty = Init->getTypeSourceInfo()->getType();
  2266. Ty = Ty.getLocalUnqualifiedType();
  2267. LangOptions LO; // FIXME.
  2268. Ty.print(Out, LO);
  2269. }
  2270. break;
  2271. }
  2272. case ProgramPoint::BlockEdgeKind: {
  2273. const BlockEdge &E = Loc.castAs<BlockEdge>();
  2274. Out << "Edge: (B" << E.getSrc()->getBlockID() << ", B"
  2275. << E.getDst()->getBlockID() << ')';
  2276. if (const Stmt *T = E.getSrc()->getTerminator()) {
  2277. SourceLocation SLoc = T->getLocStart();
  2278. Out << "\\|Terminator: ";
  2279. LangOptions LO; // FIXME.
  2280. E.getSrc()->printTerminator(Out, LO);
  2281. if (SLoc.isFileID()) {
  2282. Out << "\\lline="
  2283. << GraphPrintSourceManager->getExpansionLineNumber(SLoc)
  2284. << " col="
  2285. << GraphPrintSourceManager->getExpansionColumnNumber(SLoc);
  2286. }
  2287. if (isa<SwitchStmt>(T)) {
  2288. const Stmt *Label = E.getDst()->getLabel();
  2289. if (Label) {
  2290. if (const CaseStmt *C = dyn_cast<CaseStmt>(Label)) {
  2291. Out << "\\lcase ";
  2292. LangOptions LO; // FIXME.
  2293. if (C->getLHS())
  2294. C->getLHS()->printPretty(Out, nullptr, PrintingPolicy(LO));
  2295. if (const Stmt *RHS = C->getRHS()) {
  2296. Out << " .. ";
  2297. RHS->printPretty(Out, nullptr, PrintingPolicy(LO));
  2298. }
  2299. Out << ":";
  2300. }
  2301. else {
  2302. assert (isa<DefaultStmt>(Label));
  2303. Out << "\\ldefault:";
  2304. }
  2305. }
  2306. else
  2307. Out << "\\l(implicit) default:";
  2308. }
  2309. else if (isa<IndirectGotoStmt>(T)) {
  2310. // FIXME
  2311. }
  2312. else {
  2313. Out << "\\lCondition: ";
  2314. if (*E.getSrc()->succ_begin() == E.getDst())
  2315. Out << "true";
  2316. else
  2317. Out << "false";
  2318. }
  2319. Out << "\\l";
  2320. }
  2321. break;
  2322. }
  2323. default: {
  2324. const Stmt *S = Loc.castAs<StmtPoint>().getStmt();
  2325. assert(S != nullptr && "Expecting non-null Stmt");
  2326. Out << S->getStmtClassName() << ' ' << (const void*) S << ' ';
  2327. LangOptions LO; // FIXME.
  2328. S->printPretty(Out, nullptr, PrintingPolicy(LO));
  2329. printLocation(Out, S->getLocStart());
  2330. if (Loc.getAs<PreStmt>())
  2331. Out << "\\lPreStmt\\l;";
  2332. else if (Loc.getAs<PostLoad>())
  2333. Out << "\\lPostLoad\\l;";
  2334. else if (Loc.getAs<PostStore>())
  2335. Out << "\\lPostStore\\l";
  2336. else if (Loc.getAs<PostLValue>())
  2337. Out << "\\lPostLValue\\l";
  2338. break;
  2339. }
  2340. }
  2341. ProgramStateRef state = N->getState();
  2342. Out << "\\|StateID: " << (const void*) state.get()
  2343. << " NodeID: " << (const void*) N << "\\|";
  2344. // Analysis stack backtrace.
  2345. Out << "Location context stack (from current to outer):\\l";
  2346. const LocationContext *LC = Loc.getLocationContext();
  2347. unsigned Idx = 0;
  2348. for (; LC; LC = LC->getParent(), ++Idx) {
  2349. Out << Idx << ". (" << (const void *)LC << ") ";
  2350. switch (LC->getKind()) {
  2351. case LocationContext::StackFrame:
  2352. if (const NamedDecl *D = dyn_cast<NamedDecl>(LC->getDecl()))
  2353. Out << "Calling " << D->getQualifiedNameAsString();
  2354. else
  2355. Out << "Calling anonymous code";
  2356. if (const Stmt *S = cast<StackFrameContext>(LC)->getCallSite()) {
  2357. Out << " at ";
  2358. printLocation2(Out, S->getLocStart());
  2359. }
  2360. break;
  2361. case LocationContext::Block:
  2362. Out << "Invoking block";
  2363. if (const Decl *D = cast<BlockInvocationContext>(LC)->getBlockDecl()) {
  2364. Out << " defined at ";
  2365. printLocation2(Out, D->getLocStart());
  2366. }
  2367. break;
  2368. case LocationContext::Scope:
  2369. Out << "Entering scope";
  2370. // FIXME: Add more info once ScopeContext is activated.
  2371. break;
  2372. }
  2373. Out << "\\l";
  2374. }
  2375. Out << "\\l";
  2376. state->printDOT(Out);
  2377. Out << "\\l";
  2378. if (const ProgramPointTag *tag = Loc.getTag()) {
  2379. Out << "\\|Tag: " << tag->getTagDescription();
  2380. Out << "\\l";
  2381. }
  2382. return Out.str();
  2383. }
  2384. };
  2385. } // end llvm namespace
  2386. #endif
  2387. void ExprEngine::ViewGraph(bool trim) {
  2388. #ifndef NDEBUG
  2389. if (trim) {
  2390. std::vector<const ExplodedNode*> Src;
  2391. // Flush any outstanding reports to make sure we cover all the nodes.
  2392. // This does not cause them to get displayed.
  2393. for (BugReporter::iterator I=BR.begin(), E=BR.end(); I!=E; ++I)
  2394. const_cast<BugType*>(*I)->FlushReports(BR);
  2395. // Iterate through the reports and get their nodes.
  2396. for (BugReporter::EQClasses_iterator
  2397. EI = BR.EQClasses_begin(), EE = BR.EQClasses_end(); EI != EE; ++EI) {
  2398. ExplodedNode *N = const_cast<ExplodedNode*>(EI->begin()->getErrorNode());
  2399. if (N) Src.push_back(N);
  2400. }
  2401. ViewGraph(Src);
  2402. }
  2403. else {
  2404. GraphPrintCheckerState = this;
  2405. GraphPrintSourceManager = &getContext().getSourceManager();
  2406. llvm::ViewGraph(*G.roots_begin(), "ExprEngine");
  2407. GraphPrintCheckerState = nullptr;
  2408. GraphPrintSourceManager = nullptr;
  2409. }
  2410. #endif
  2411. }
  2412. void ExprEngine::ViewGraph(ArrayRef<const ExplodedNode*> Nodes) {
  2413. #ifndef NDEBUG
  2414. GraphPrintCheckerState = this;
  2415. GraphPrintSourceManager = &getContext().getSourceManager();
  2416. std::unique_ptr<ExplodedGraph> TrimmedG(G.trim(Nodes));
  2417. if (!TrimmedG.get())
  2418. llvm::errs() << "warning: Trimmed ExplodedGraph is empty.\n";
  2419. else
  2420. llvm::ViewGraph(*TrimmedG->roots_begin(), "TrimmedExprEngine");
  2421. GraphPrintCheckerState = nullptr;
  2422. GraphPrintSourceManager = nullptr;
  2423. #endif
  2424. }