ExprEngine.cpp 99 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738
  1. //=-- ExprEngine.cpp - Path-Sensitive Expression-Level Dataflow ---*- C++ -*-=
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This file defines a meta-engine for path-sensitive dataflow analysis that
  11. // is built on GREngine, but provides the boilerplate to execute transfer
  12. // functions and build the ExplodedGraph at the expression level.
  13. //
  14. //===----------------------------------------------------------------------===//
  15. #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
  16. #include "PrettyStackTraceLocationContext.h"
  17. #include "clang/AST/CharUnits.h"
  18. #include "clang/AST/ParentMap.h"
  19. #include "clang/AST/StmtCXX.h"
  20. #include "clang/AST/StmtObjC.h"
  21. #include "clang/Basic/Builtins.h"
  22. #include "clang/Basic/PrettyStackTrace.h"
  23. #include "clang/Basic/SourceManager.h"
  24. #include "clang/StaticAnalyzer/Core/BugReporter/BugType.h"
  25. #include "clang/StaticAnalyzer/Core/CheckerManager.h"
  26. #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h"
  27. #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
  28. #include "llvm/ADT/ImmutableList.h"
  29. #include "llvm/ADT/Statistic.h"
  30. #include "llvm/Support/raw_ostream.h"
  31. #ifndef NDEBUG
  32. #include "llvm/Support/GraphWriter.h"
  33. #endif
  34. using namespace clang;
  35. using namespace ento;
  36. using llvm::APSInt;
  37. #define DEBUG_TYPE "ExprEngine"
  38. STATISTIC(NumRemoveDeadBindings,
  39. "The # of times RemoveDeadBindings is called");
  40. STATISTIC(NumMaxBlockCountReached,
  41. "The # of aborted paths due to reaching the maximum block count in "
  42. "a top level function");
  43. STATISTIC(NumMaxBlockCountReachedInInlined,
  44. "The # of aborted paths due to reaching the maximum block count in "
  45. "an inlined function");
  46. STATISTIC(NumTimesRetriedWithoutInlining,
  47. "The # of times we re-evaluated a call without inlining");
  48. typedef std::pair<const CXXBindTemporaryExpr *, const StackFrameContext *>
  49. CXXBindTemporaryContext;
  50. // Keeps track of whether CXXBindTemporaryExpr nodes have been evaluated.
  51. // The StackFrameContext assures that nested calls due to inlined recursive
  52. // functions do not interfere.
  53. REGISTER_TRAIT_WITH_PROGRAMSTATE(InitializedTemporariesSet,
  54. llvm::ImmutableSet<CXXBindTemporaryContext>)
  55. //===----------------------------------------------------------------------===//
  56. // Engine construction and deletion.
  57. //===----------------------------------------------------------------------===//
  58. static const char* TagProviderName = "ExprEngine";
  59. ExprEngine::ExprEngine(AnalysisManager &mgr, bool gcEnabled,
  60. SetOfConstDecls *VisitedCalleesIn,
  61. FunctionSummariesTy *FS,
  62. InliningModes HowToInlineIn)
  63. : AMgr(mgr),
  64. AnalysisDeclContexts(mgr.getAnalysisDeclContextManager()),
  65. Engine(*this, FS),
  66. G(Engine.getGraph()),
  67. StateMgr(getContext(), mgr.getStoreManagerCreator(),
  68. mgr.getConstraintManagerCreator(), G.getAllocator(),
  69. this),
  70. SymMgr(StateMgr.getSymbolManager()),
  71. svalBuilder(StateMgr.getSValBuilder()),
  72. currStmtIdx(0), currBldrCtx(nullptr),
  73. ObjCNoRet(mgr.getASTContext()),
  74. ObjCGCEnabled(gcEnabled), BR(mgr, *this),
  75. VisitedCallees(VisitedCalleesIn),
  76. HowToInline(HowToInlineIn)
  77. {
  78. unsigned TrimInterval = mgr.options.getGraphTrimInterval();
  79. if (TrimInterval != 0) {
  80. // Enable eager node reclaimation when constructing the ExplodedGraph.
  81. G.enableNodeReclamation(TrimInterval);
  82. }
  83. }
  84. ExprEngine::~ExprEngine() {
  85. BR.FlushReports();
  86. }
  87. //===----------------------------------------------------------------------===//
  88. // Utility methods.
  89. //===----------------------------------------------------------------------===//
  90. ProgramStateRef ExprEngine::getInitialState(const LocationContext *InitLoc) {
  91. ProgramStateRef state = StateMgr.getInitialState(InitLoc);
  92. const Decl *D = InitLoc->getDecl();
  93. // Preconditions.
  94. // FIXME: It would be nice if we had a more general mechanism to add
  95. // such preconditions. Some day.
  96. do {
  97. if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
  98. // Precondition: the first argument of 'main' is an integer guaranteed
  99. // to be > 0.
  100. const IdentifierInfo *II = FD->getIdentifier();
  101. if (!II || !(II->getName() == "main" && FD->getNumParams() > 0))
  102. break;
  103. const ParmVarDecl *PD = FD->getParamDecl(0);
  104. QualType T = PD->getType();
  105. const BuiltinType *BT = dyn_cast<BuiltinType>(T);
  106. if (!BT || !BT->isInteger())
  107. break;
  108. const MemRegion *R = state->getRegion(PD, InitLoc);
  109. if (!R)
  110. break;
  111. SVal V = state->getSVal(loc::MemRegionVal(R));
  112. SVal Constraint_untested = evalBinOp(state, BO_GT, V,
  113. svalBuilder.makeZeroVal(T),
  114. svalBuilder.getConditionType());
  115. Optional<DefinedOrUnknownSVal> Constraint =
  116. Constraint_untested.getAs<DefinedOrUnknownSVal>();
  117. if (!Constraint)
  118. break;
  119. if (ProgramStateRef newState = state->assume(*Constraint, true))
  120. state = newState;
  121. }
  122. break;
  123. }
  124. while (0);
  125. if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
  126. // Precondition: 'self' is always non-null upon entry to an Objective-C
  127. // method.
  128. const ImplicitParamDecl *SelfD = MD->getSelfDecl();
  129. const MemRegion *R = state->getRegion(SelfD, InitLoc);
  130. SVal V = state->getSVal(loc::MemRegionVal(R));
  131. if (Optional<Loc> LV = V.getAs<Loc>()) {
  132. // Assume that the pointer value in 'self' is non-null.
  133. state = state->assume(*LV, true);
  134. assert(state && "'self' cannot be null");
  135. }
  136. }
  137. if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(D)) {
  138. if (!MD->isStatic()) {
  139. // Precondition: 'this' is always non-null upon entry to the
  140. // top-level function. This is our starting assumption for
  141. // analyzing an "open" program.
  142. const StackFrameContext *SFC = InitLoc->getCurrentStackFrame();
  143. if (SFC->getParent() == nullptr) {
  144. loc::MemRegionVal L = svalBuilder.getCXXThis(MD, SFC);
  145. SVal V = state->getSVal(L);
  146. if (Optional<Loc> LV = V.getAs<Loc>()) {
  147. state = state->assume(*LV, true);
  148. assert(state && "'this' cannot be null");
  149. }
  150. }
  151. }
  152. }
  153. return state;
  154. }
  155. ProgramStateRef
  156. ExprEngine::createTemporaryRegionIfNeeded(ProgramStateRef State,
  157. const LocationContext *LC,
  158. const Expr *Ex,
  159. const Expr *Result) {
  160. SVal V = State->getSVal(Ex, LC);
  161. if (!Result) {
  162. // If we don't have an explicit result expression, we're in "if needed"
  163. // mode. Only create a region if the current value is a NonLoc.
  164. if (!V.getAs<NonLoc>())
  165. return State;
  166. Result = Ex;
  167. } else {
  168. // We need to create a region no matter what. For sanity, make sure we don't
  169. // try to stuff a Loc into a non-pointer temporary region.
  170. assert(!V.getAs<Loc>() || Loc::isLocType(Result->getType()) ||
  171. Result->getType()->isMemberPointerType());
  172. }
  173. ProgramStateManager &StateMgr = State->getStateManager();
  174. MemRegionManager &MRMgr = StateMgr.getRegionManager();
  175. StoreManager &StoreMgr = StateMgr.getStoreManager();
  176. // We need to be careful about treating a derived type's value as
  177. // bindings for a base type. Unless we're creating a temporary pointer region,
  178. // start by stripping and recording base casts.
  179. SmallVector<const CastExpr *, 4> Casts;
  180. const Expr *Inner = Ex->IgnoreParens();
  181. if (!Loc::isLocType(Result->getType())) {
  182. while (const CastExpr *CE = dyn_cast<CastExpr>(Inner)) {
  183. if (CE->getCastKind() == CK_DerivedToBase ||
  184. CE->getCastKind() == CK_UncheckedDerivedToBase)
  185. Casts.push_back(CE);
  186. else if (CE->getCastKind() != CK_NoOp)
  187. break;
  188. Inner = CE->getSubExpr()->IgnoreParens();
  189. }
  190. }
  191. // Create a temporary object region for the inner expression (which may have
  192. // a more derived type) and bind the value into it.
  193. const TypedValueRegion *TR = nullptr;
  194. if (const MaterializeTemporaryExpr *MT =
  195. dyn_cast<MaterializeTemporaryExpr>(Result)) {
  196. StorageDuration SD = MT->getStorageDuration();
  197. // If this object is bound to a reference with static storage duration, we
  198. // put it in a different region to prevent "address leakage" warnings.
  199. if (SD == SD_Static || SD == SD_Thread)
  200. TR = MRMgr.getCXXStaticTempObjectRegion(Inner);
  201. }
  202. if (!TR)
  203. TR = MRMgr.getCXXTempObjectRegion(Inner, LC);
  204. SVal Reg = loc::MemRegionVal(TR);
  205. if (V.isUnknown())
  206. V = getSValBuilder().conjureSymbolVal(Result, LC, TR->getValueType(),
  207. currBldrCtx->blockCount());
  208. State = State->bindLoc(Reg, V);
  209. // Re-apply the casts (from innermost to outermost) for type sanity.
  210. for (SmallVectorImpl<const CastExpr *>::reverse_iterator I = Casts.rbegin(),
  211. E = Casts.rend();
  212. I != E; ++I) {
  213. Reg = StoreMgr.evalDerivedToBase(Reg, *I);
  214. }
  215. State = State->BindExpr(Result, LC, Reg);
  216. return State;
  217. }
  218. //===----------------------------------------------------------------------===//
  219. // Top-level transfer function logic (Dispatcher).
  220. //===----------------------------------------------------------------------===//
  221. /// evalAssume - Called by ConstraintManager. Used to call checker-specific
  222. /// logic for handling assumptions on symbolic values.
  223. ProgramStateRef ExprEngine::processAssume(ProgramStateRef state,
  224. SVal cond, bool assumption) {
  225. return getCheckerManager().runCheckersForEvalAssume(state, cond, assumption);
  226. }
  227. bool ExprEngine::wantsRegionChangeUpdate(ProgramStateRef state) {
  228. return getCheckerManager().wantsRegionChangeUpdate(state);
  229. }
  230. ProgramStateRef
  231. ExprEngine::processRegionChanges(ProgramStateRef state,
  232. const InvalidatedSymbols *invalidated,
  233. ArrayRef<const MemRegion *> Explicits,
  234. ArrayRef<const MemRegion *> Regions,
  235. const CallEvent *Call) {
  236. return getCheckerManager().runCheckersForRegionChanges(state, invalidated,
  237. Explicits, Regions, Call);
  238. }
  239. void ExprEngine::printState(raw_ostream &Out, ProgramStateRef State,
  240. const char *NL, const char *Sep) {
  241. getCheckerManager().runCheckersForPrintState(Out, State, NL, Sep);
  242. }
  243. void ExprEngine::processEndWorklist(bool hasWorkRemaining) {
  244. getCheckerManager().runCheckersForEndAnalysis(G, BR, *this);
  245. }
  246. void ExprEngine::processCFGElement(const CFGElement E, ExplodedNode *Pred,
  247. unsigned StmtIdx, NodeBuilderContext *Ctx) {
  248. PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext());
  249. currStmtIdx = StmtIdx;
  250. currBldrCtx = Ctx;
  251. switch (E.getKind()) {
  252. case CFGElement::Statement:
  253. ProcessStmt(const_cast<Stmt*>(E.castAs<CFGStmt>().getStmt()), Pred);
  254. return;
  255. case CFGElement::Initializer:
  256. ProcessInitializer(E.castAs<CFGInitializer>().getInitializer(), Pred);
  257. return;
  258. case CFGElement::NewAllocator:
  259. ProcessNewAllocator(E.castAs<CFGNewAllocator>().getAllocatorExpr(),
  260. Pred);
  261. return;
  262. case CFGElement::AutomaticObjectDtor:
  263. case CFGElement::DeleteDtor:
  264. case CFGElement::BaseDtor:
  265. case CFGElement::MemberDtor:
  266. case CFGElement::TemporaryDtor:
  267. ProcessImplicitDtor(E.castAs<CFGImplicitDtor>(), Pred);
  268. return;
  269. }
  270. }
  271. static bool shouldRemoveDeadBindings(AnalysisManager &AMgr,
  272. const CFGStmt S,
  273. const ExplodedNode *Pred,
  274. const LocationContext *LC) {
  275. // Are we never purging state values?
  276. if (AMgr.options.AnalysisPurgeOpt == PurgeNone)
  277. return false;
  278. // Is this the beginning of a basic block?
  279. if (Pred->getLocation().getAs<BlockEntrance>())
  280. return true;
  281. // Is this on a non-expression?
  282. if (!isa<Expr>(S.getStmt()))
  283. return true;
  284. // Run before processing a call.
  285. if (CallEvent::isCallStmt(S.getStmt()))
  286. return true;
  287. // Is this an expression that is consumed by another expression? If so,
  288. // postpone cleaning out the state.
  289. ParentMap &PM = LC->getAnalysisDeclContext()->getParentMap();
  290. return !PM.isConsumedExpr(cast<Expr>(S.getStmt()));
  291. }
  292. void ExprEngine::removeDead(ExplodedNode *Pred, ExplodedNodeSet &Out,
  293. const Stmt *ReferenceStmt,
  294. const LocationContext *LC,
  295. const Stmt *DiagnosticStmt,
  296. ProgramPoint::Kind K) {
  297. assert((K == ProgramPoint::PreStmtPurgeDeadSymbolsKind ||
  298. ReferenceStmt == nullptr || isa<ReturnStmt>(ReferenceStmt))
  299. && "PostStmt is not generally supported by the SymbolReaper yet");
  300. assert(LC && "Must pass the current (or expiring) LocationContext");
  301. if (!DiagnosticStmt) {
  302. DiagnosticStmt = ReferenceStmt;
  303. assert(DiagnosticStmt && "Required for clearing a LocationContext");
  304. }
  305. NumRemoveDeadBindings++;
  306. ProgramStateRef CleanedState = Pred->getState();
  307. // LC is the location context being destroyed, but SymbolReaper wants a
  308. // location context that is still live. (If this is the top-level stack
  309. // frame, this will be null.)
  310. if (!ReferenceStmt) {
  311. assert(K == ProgramPoint::PostStmtPurgeDeadSymbolsKind &&
  312. "Use PostStmtPurgeDeadSymbolsKind for clearing a LocationContext");
  313. LC = LC->getParent();
  314. }
  315. const StackFrameContext *SFC = LC ? LC->getCurrentStackFrame() : nullptr;
  316. SymbolReaper SymReaper(SFC, ReferenceStmt, SymMgr, getStoreManager());
  317. getCheckerManager().runCheckersForLiveSymbols(CleanedState, SymReaper);
  318. // Create a state in which dead bindings are removed from the environment
  319. // and the store. TODO: The function should just return new env and store,
  320. // not a new state.
  321. CleanedState = StateMgr.removeDeadBindings(CleanedState, SFC, SymReaper);
  322. // Process any special transfer function for dead symbols.
  323. // A tag to track convenience transitions, which can be removed at cleanup.
  324. static SimpleProgramPointTag cleanupTag(TagProviderName, "Clean Node");
  325. if (!SymReaper.hasDeadSymbols()) {
  326. // Generate a CleanedNode that has the environment and store cleaned
  327. // up. Since no symbols are dead, we can optimize and not clean out
  328. // the constraint manager.
  329. StmtNodeBuilder Bldr(Pred, Out, *currBldrCtx);
  330. Bldr.generateNode(DiagnosticStmt, Pred, CleanedState, &cleanupTag, K);
  331. } else {
  332. // Call checkers with the non-cleaned state so that they could query the
  333. // values of the soon to be dead symbols.
  334. ExplodedNodeSet CheckedSet;
  335. getCheckerManager().runCheckersForDeadSymbols(CheckedSet, Pred, SymReaper,
  336. DiagnosticStmt, *this, K);
  337. // For each node in CheckedSet, generate CleanedNodes that have the
  338. // environment, the store, and the constraints cleaned up but have the
  339. // user-supplied states as the predecessors.
  340. StmtNodeBuilder Bldr(CheckedSet, Out, *currBldrCtx);
  341. for (ExplodedNodeSet::const_iterator
  342. I = CheckedSet.begin(), E = CheckedSet.end(); I != E; ++I) {
  343. ProgramStateRef CheckerState = (*I)->getState();
  344. // The constraint manager has not been cleaned up yet, so clean up now.
  345. CheckerState = getConstraintManager().removeDeadBindings(CheckerState,
  346. SymReaper);
  347. assert(StateMgr.haveEqualEnvironments(CheckerState, Pred->getState()) &&
  348. "Checkers are not allowed to modify the Environment as a part of "
  349. "checkDeadSymbols processing.");
  350. assert(StateMgr.haveEqualStores(CheckerState, Pred->getState()) &&
  351. "Checkers are not allowed to modify the Store as a part of "
  352. "checkDeadSymbols processing.");
  353. // Create a state based on CleanedState with CheckerState GDM and
  354. // generate a transition to that state.
  355. ProgramStateRef CleanedCheckerSt =
  356. StateMgr.getPersistentStateWithGDM(CleanedState, CheckerState);
  357. Bldr.generateNode(DiagnosticStmt, *I, CleanedCheckerSt, &cleanupTag, K);
  358. }
  359. }
  360. }
  361. void ExprEngine::ProcessStmt(const CFGStmt S,
  362. ExplodedNode *Pred) {
  363. // Reclaim any unnecessary nodes in the ExplodedGraph.
  364. G.reclaimRecentlyAllocatedNodes();
  365. const Stmt *currStmt = S.getStmt();
  366. PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
  367. currStmt->getLocStart(),
  368. "Error evaluating statement");
  369. // Remove dead bindings and symbols.
  370. ExplodedNodeSet CleanedStates;
  371. if (shouldRemoveDeadBindings(AMgr, S, Pred, Pred->getLocationContext())){
  372. removeDead(Pred, CleanedStates, currStmt, Pred->getLocationContext());
  373. } else
  374. CleanedStates.Add(Pred);
  375. // Visit the statement.
  376. ExplodedNodeSet Dst;
  377. for (ExplodedNodeSet::iterator I = CleanedStates.begin(),
  378. E = CleanedStates.end(); I != E; ++I) {
  379. ExplodedNodeSet DstI;
  380. // Visit the statement.
  381. Visit(currStmt, *I, DstI);
  382. Dst.insert(DstI);
  383. }
  384. // Enqueue the new nodes onto the work list.
  385. Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx);
  386. }
  387. void ExprEngine::ProcessInitializer(const CFGInitializer Init,
  388. ExplodedNode *Pred) {
  389. const CXXCtorInitializer *BMI = Init.getInitializer();
  390. PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
  391. BMI->getSourceLocation(),
  392. "Error evaluating initializer");
  393. // We don't clean up dead bindings here.
  394. const StackFrameContext *stackFrame =
  395. cast<StackFrameContext>(Pred->getLocationContext());
  396. const CXXConstructorDecl *decl =
  397. cast<CXXConstructorDecl>(stackFrame->getDecl());
  398. ProgramStateRef State = Pred->getState();
  399. SVal thisVal = State->getSVal(svalBuilder.getCXXThis(decl, stackFrame));
  400. ExplodedNodeSet Tmp(Pred);
  401. SVal FieldLoc;
  402. // Evaluate the initializer, if necessary
  403. if (BMI->isAnyMemberInitializer()) {
  404. // Constructors build the object directly in the field,
  405. // but non-objects must be copied in from the initializer.
  406. const Expr *Init = BMI->getInit()->IgnoreImplicit();
  407. if (!isa<CXXConstructExpr>(Init)) {
  408. const ValueDecl *Field;
  409. if (BMI->isIndirectMemberInitializer()) {
  410. Field = BMI->getIndirectMember();
  411. FieldLoc = State->getLValue(BMI->getIndirectMember(), thisVal);
  412. } else {
  413. Field = BMI->getMember();
  414. FieldLoc = State->getLValue(BMI->getMember(), thisVal);
  415. }
  416. SVal InitVal;
  417. if (BMI->getNumArrayIndices() > 0) {
  418. // Handle arrays of trivial type. We can represent this with a
  419. // primitive load/copy from the base array region.
  420. const ArraySubscriptExpr *ASE;
  421. while ((ASE = dyn_cast<ArraySubscriptExpr>(Init)))
  422. Init = ASE->getBase()->IgnoreImplicit();
  423. SVal LValue = State->getSVal(Init, stackFrame);
  424. if (Optional<Loc> LValueLoc = LValue.getAs<Loc>())
  425. InitVal = State->getSVal(*LValueLoc);
  426. // If we fail to get the value for some reason, use a symbolic value.
  427. if (InitVal.isUnknownOrUndef()) {
  428. SValBuilder &SVB = getSValBuilder();
  429. InitVal = SVB.conjureSymbolVal(BMI->getInit(), stackFrame,
  430. Field->getType(),
  431. currBldrCtx->blockCount());
  432. }
  433. } else {
  434. InitVal = State->getSVal(BMI->getInit(), stackFrame);
  435. }
  436. assert(Tmp.size() == 1 && "have not generated any new nodes yet");
  437. assert(*Tmp.begin() == Pred && "have not generated any new nodes yet");
  438. Tmp.clear();
  439. PostInitializer PP(BMI, FieldLoc.getAsRegion(), stackFrame);
  440. evalBind(Tmp, Init, Pred, FieldLoc, InitVal, /*isInit=*/true, &PP);
  441. }
  442. } else {
  443. assert(BMI->isBaseInitializer() || BMI->isDelegatingInitializer());
  444. // We already did all the work when visiting the CXXConstructExpr.
  445. }
  446. // Construct PostInitializer nodes whether the state changed or not,
  447. // so that the diagnostics don't get confused.
  448. PostInitializer PP(BMI, FieldLoc.getAsRegion(), stackFrame);
  449. ExplodedNodeSet Dst;
  450. NodeBuilder Bldr(Tmp, Dst, *currBldrCtx);
  451. for (ExplodedNodeSet::iterator I = Tmp.begin(), E = Tmp.end(); I != E; ++I) {
  452. ExplodedNode *N = *I;
  453. Bldr.generateNode(PP, N->getState(), N);
  454. }
  455. // Enqueue the new nodes onto the work list.
  456. Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx);
  457. }
  458. void ExprEngine::ProcessImplicitDtor(const CFGImplicitDtor D,
  459. ExplodedNode *Pred) {
  460. ExplodedNodeSet Dst;
  461. switch (D.getKind()) {
  462. case CFGElement::AutomaticObjectDtor:
  463. ProcessAutomaticObjDtor(D.castAs<CFGAutomaticObjDtor>(), Pred, Dst);
  464. break;
  465. case CFGElement::BaseDtor:
  466. ProcessBaseDtor(D.castAs<CFGBaseDtor>(), Pred, Dst);
  467. break;
  468. case CFGElement::MemberDtor:
  469. ProcessMemberDtor(D.castAs<CFGMemberDtor>(), Pred, Dst);
  470. break;
  471. case CFGElement::TemporaryDtor:
  472. ProcessTemporaryDtor(D.castAs<CFGTemporaryDtor>(), Pred, Dst);
  473. break;
  474. case CFGElement::DeleteDtor:
  475. ProcessDeleteDtor(D.castAs<CFGDeleteDtor>(), Pred, Dst);
  476. break;
  477. default:
  478. llvm_unreachable("Unexpected dtor kind.");
  479. }
  480. // Enqueue the new nodes onto the work list.
  481. Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx);
  482. }
  483. void ExprEngine::ProcessNewAllocator(const CXXNewExpr *NE,
  484. ExplodedNode *Pred) {
  485. ExplodedNodeSet Dst;
  486. AnalysisManager &AMgr = getAnalysisManager();
  487. AnalyzerOptions &Opts = AMgr.options;
  488. // TODO: We're not evaluating allocators for all cases just yet as
  489. // we're not handling the return value correctly, which causes false
  490. // positives when the alpha.cplusplus.NewDeleteLeaks check is on.
  491. if (Opts.mayInlineCXXAllocator())
  492. VisitCXXNewAllocatorCall(NE, Pred, Dst);
  493. else {
  494. NodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  495. const LocationContext *LCtx = Pred->getLocationContext();
  496. PostImplicitCall PP(NE->getOperatorNew(), NE->getLocStart(), LCtx);
  497. Bldr.generateNode(PP, Pred->getState(), Pred);
  498. }
  499. Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx);
  500. }
  501. void ExprEngine::ProcessAutomaticObjDtor(const CFGAutomaticObjDtor Dtor,
  502. ExplodedNode *Pred,
  503. ExplodedNodeSet &Dst) {
  504. const VarDecl *varDecl = Dtor.getVarDecl();
  505. QualType varType = varDecl->getType();
  506. ProgramStateRef state = Pred->getState();
  507. SVal dest = state->getLValue(varDecl, Pred->getLocationContext());
  508. const MemRegion *Region = dest.castAs<loc::MemRegionVal>().getRegion();
  509. if (const ReferenceType *refType = varType->getAs<ReferenceType>()) {
  510. varType = refType->getPointeeType();
  511. Region = state->getSVal(Region).getAsRegion();
  512. }
  513. VisitCXXDestructor(varType, Region, Dtor.getTriggerStmt(), /*IsBase=*/ false,
  514. Pred, Dst);
  515. }
  516. void ExprEngine::ProcessDeleteDtor(const CFGDeleteDtor Dtor,
  517. ExplodedNode *Pred,
  518. ExplodedNodeSet &Dst) {
  519. ProgramStateRef State = Pred->getState();
  520. const LocationContext *LCtx = Pred->getLocationContext();
  521. const CXXDeleteExpr *DE = Dtor.getDeleteExpr();
  522. const Stmt *Arg = DE->getArgument();
  523. SVal ArgVal = State->getSVal(Arg, LCtx);
  524. // If the argument to delete is known to be a null value,
  525. // don't run destructor.
  526. if (State->isNull(ArgVal).isConstrainedTrue()) {
  527. QualType DTy = DE->getDestroyedType();
  528. QualType BTy = getContext().getBaseElementType(DTy);
  529. const CXXRecordDecl *RD = BTy->getAsCXXRecordDecl();
  530. const CXXDestructorDecl *Dtor = RD->getDestructor();
  531. PostImplicitCall PP(Dtor, DE->getLocStart(), LCtx);
  532. NodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  533. Bldr.generateNode(PP, Pred->getState(), Pred);
  534. return;
  535. }
  536. VisitCXXDestructor(DE->getDestroyedType(),
  537. ArgVal.getAsRegion(),
  538. DE, /*IsBase=*/ false,
  539. Pred, Dst);
  540. }
  541. void ExprEngine::ProcessBaseDtor(const CFGBaseDtor D,
  542. ExplodedNode *Pred, ExplodedNodeSet &Dst) {
  543. const LocationContext *LCtx = Pred->getLocationContext();
  544. const CXXDestructorDecl *CurDtor = cast<CXXDestructorDecl>(LCtx->getDecl());
  545. Loc ThisPtr = getSValBuilder().getCXXThis(CurDtor,
  546. LCtx->getCurrentStackFrame());
  547. SVal ThisVal = Pred->getState()->getSVal(ThisPtr);
  548. // Create the base object region.
  549. const CXXBaseSpecifier *Base = D.getBaseSpecifier();
  550. QualType BaseTy = Base->getType();
  551. SVal BaseVal = getStoreManager().evalDerivedToBase(ThisVal, BaseTy,
  552. Base->isVirtual());
  553. VisitCXXDestructor(BaseTy, BaseVal.castAs<loc::MemRegionVal>().getRegion(),
  554. CurDtor->getBody(), /*IsBase=*/ true, Pred, Dst);
  555. }
  556. void ExprEngine::ProcessMemberDtor(const CFGMemberDtor D,
  557. ExplodedNode *Pred, ExplodedNodeSet &Dst) {
  558. const FieldDecl *Member = D.getFieldDecl();
  559. ProgramStateRef State = Pred->getState();
  560. const LocationContext *LCtx = Pred->getLocationContext();
  561. const CXXDestructorDecl *CurDtor = cast<CXXDestructorDecl>(LCtx->getDecl());
  562. Loc ThisVal = getSValBuilder().getCXXThis(CurDtor,
  563. LCtx->getCurrentStackFrame());
  564. SVal FieldVal =
  565. State->getLValue(Member, State->getSVal(ThisVal).castAs<Loc>());
  566. VisitCXXDestructor(Member->getType(),
  567. FieldVal.castAs<loc::MemRegionVal>().getRegion(),
  568. CurDtor->getBody(), /*IsBase=*/false, Pred, Dst);
  569. }
  570. void ExprEngine::ProcessTemporaryDtor(const CFGTemporaryDtor D,
  571. ExplodedNode *Pred,
  572. ExplodedNodeSet &Dst) {
  573. ExplodedNodeSet CleanDtorState;
  574. StmtNodeBuilder StmtBldr(Pred, CleanDtorState, *currBldrCtx);
  575. ProgramStateRef State = Pred->getState();
  576. if (State->contains<InitializedTemporariesSet>(
  577. std::make_pair(D.getBindTemporaryExpr(), Pred->getStackFrame()))) {
  578. // FIXME: Currently we insert temporary destructors for default parameters,
  579. // but we don't insert the constructors.
  580. State = State->remove<InitializedTemporariesSet>(
  581. std::make_pair(D.getBindTemporaryExpr(), Pred->getStackFrame()));
  582. }
  583. StmtBldr.generateNode(D.getBindTemporaryExpr(), Pred, State);
  584. QualType varType = D.getBindTemporaryExpr()->getSubExpr()->getType();
  585. // FIXME: Currently CleanDtorState can be empty here due to temporaries being
  586. // bound to default parameters.
  587. assert(CleanDtorState.size() <= 1);
  588. ExplodedNode *CleanPred =
  589. CleanDtorState.empty() ? Pred : *CleanDtorState.begin();
  590. // FIXME: Inlining of temporary destructors is not supported yet anyway, so
  591. // we just put a NULL region for now. This will need to be changed later.
  592. VisitCXXDestructor(varType, nullptr, D.getBindTemporaryExpr(),
  593. /*IsBase=*/false, CleanPred, Dst);
  594. }
  595. void ExprEngine::processCleanupTemporaryBranch(const CXXBindTemporaryExpr *BTE,
  596. NodeBuilderContext &BldCtx,
  597. ExplodedNode *Pred,
  598. ExplodedNodeSet &Dst,
  599. const CFGBlock *DstT,
  600. const CFGBlock *DstF) {
  601. BranchNodeBuilder TempDtorBuilder(Pred, Dst, BldCtx, DstT, DstF);
  602. if (Pred->getState()->contains<InitializedTemporariesSet>(
  603. std::make_pair(BTE, Pred->getStackFrame()))) {
  604. TempDtorBuilder.markInfeasible(false);
  605. TempDtorBuilder.generateNode(Pred->getState(), true, Pred);
  606. } else {
  607. TempDtorBuilder.markInfeasible(true);
  608. TempDtorBuilder.generateNode(Pred->getState(), false, Pred);
  609. }
  610. }
  611. void ExprEngine::VisitCXXBindTemporaryExpr(const CXXBindTemporaryExpr *BTE,
  612. ExplodedNodeSet &PreVisit,
  613. ExplodedNodeSet &Dst) {
  614. if (!getAnalysisManager().options.includeTemporaryDtorsInCFG()) {
  615. // In case we don't have temporary destructors in the CFG, do not mark
  616. // the initialization - we would otherwise never clean it up.
  617. Dst = PreVisit;
  618. return;
  619. }
  620. StmtNodeBuilder StmtBldr(PreVisit, Dst, *currBldrCtx);
  621. for (ExplodedNode *Node : PreVisit) {
  622. ProgramStateRef State = Node->getState();
  623. if (!State->contains<InitializedTemporariesSet>(
  624. std::make_pair(BTE, Node->getStackFrame()))) {
  625. // FIXME: Currently the state might already contain the marker due to
  626. // incorrect handling of temporaries bound to default parameters; for
  627. // those, we currently skip the CXXBindTemporaryExpr but rely on adding
  628. // temporary destructor nodes.
  629. State = State->add<InitializedTemporariesSet>(
  630. std::make_pair(BTE, Node->getStackFrame()));
  631. }
  632. StmtBldr.generateNode(BTE, Node, State);
  633. }
  634. }
  635. void ExprEngine::Visit(const Stmt *S, ExplodedNode *Pred,
  636. ExplodedNodeSet &DstTop) {
  637. PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
  638. S->getLocStart(),
  639. "Error evaluating statement");
  640. ExplodedNodeSet Dst;
  641. StmtNodeBuilder Bldr(Pred, DstTop, *currBldrCtx);
  642. assert(!isa<Expr>(S) || S == cast<Expr>(S)->IgnoreParens());
  643. switch (S->getStmtClass()) {
  644. // C++ and ARC stuff we don't support yet.
  645. case Expr::ObjCIndirectCopyRestoreExprClass:
  646. case Stmt::CXXDependentScopeMemberExprClass:
  647. case Stmt::CXXTryStmtClass:
  648. case Stmt::CXXTypeidExprClass:
  649. case Stmt::CXXUuidofExprClass:
  650. case Stmt::CXXFoldExprClass:
  651. case Stmt::MSPropertyRefExprClass:
  652. case Stmt::CXXUnresolvedConstructExprClass:
  653. case Stmt::DependentScopeDeclRefExprClass:
  654. case Stmt::TypeTraitExprClass:
  655. case Stmt::ArrayTypeTraitExprClass:
  656. case Stmt::ExpressionTraitExprClass:
  657. case Stmt::UnresolvedLookupExprClass:
  658. case Stmt::UnresolvedMemberExprClass:
  659. case Stmt::TypoExprClass:
  660. case Stmt::CXXNoexceptExprClass:
  661. case Stmt::PackExpansionExprClass:
  662. case Stmt::SubstNonTypeTemplateParmPackExprClass:
  663. case Stmt::FunctionParmPackExprClass:
  664. case Stmt::SEHTryStmtClass:
  665. case Stmt::SEHExceptStmtClass:
  666. case Stmt::SEHLeaveStmtClass:
  667. case Stmt::SEHFinallyStmtClass: {
  668. const ExplodedNode *node = Bldr.generateSink(S, Pred, Pred->getState());
  669. Engine.addAbortedBlock(node, currBldrCtx->getBlock());
  670. break;
  671. }
  672. case Stmt::ParenExprClass:
  673. llvm_unreachable("ParenExprs already handled.");
  674. case Stmt::GenericSelectionExprClass:
  675. llvm_unreachable("GenericSelectionExprs already handled.");
  676. // Cases that should never be evaluated simply because they shouldn't
  677. // appear in the CFG.
  678. case Stmt::BreakStmtClass:
  679. case Stmt::CaseStmtClass:
  680. case Stmt::CompoundStmtClass:
  681. case Stmt::ContinueStmtClass:
  682. case Stmt::CXXForRangeStmtClass:
  683. case Stmt::DefaultStmtClass:
  684. case Stmt::DoStmtClass:
  685. case Stmt::ForStmtClass:
  686. case Stmt::GotoStmtClass:
  687. case Stmt::IfStmtClass:
  688. case Stmt::IndirectGotoStmtClass:
  689. case Stmt::LabelStmtClass:
  690. case Stmt::NoStmtClass:
  691. case Stmt::NullStmtClass:
  692. case Stmt::SwitchStmtClass:
  693. case Stmt::WhileStmtClass:
  694. case Expr::MSDependentExistsStmtClass:
  695. case Stmt::CapturedStmtClass:
  696. case Stmt::OMPParallelDirectiveClass:
  697. case Stmt::OMPSimdDirectiveClass:
  698. case Stmt::OMPForDirectiveClass:
  699. case Stmt::OMPForSimdDirectiveClass:
  700. case Stmt::OMPSectionsDirectiveClass:
  701. case Stmt::OMPSectionDirectiveClass:
  702. case Stmt::OMPSingleDirectiveClass:
  703. case Stmt::OMPMasterDirectiveClass:
  704. case Stmt::OMPCriticalDirectiveClass:
  705. case Stmt::OMPParallelForDirectiveClass:
  706. case Stmt::OMPParallelForSimdDirectiveClass:
  707. case Stmt::OMPParallelSectionsDirectiveClass:
  708. case Stmt::OMPTaskDirectiveClass:
  709. case Stmt::OMPTaskyieldDirectiveClass:
  710. case Stmt::OMPBarrierDirectiveClass:
  711. case Stmt::OMPTaskwaitDirectiveClass:
  712. case Stmt::OMPTaskgroupDirectiveClass:
  713. case Stmt::OMPFlushDirectiveClass:
  714. case Stmt::OMPOrderedDirectiveClass:
  715. case Stmt::OMPAtomicDirectiveClass:
  716. case Stmt::OMPTargetDirectiveClass:
  717. case Stmt::OMPTargetDataDirectiveClass:
  718. case Stmt::OMPTeamsDirectiveClass:
  719. case Stmt::OMPCancellationPointDirectiveClass:
  720. case Stmt::OMPCancelDirectiveClass:
  721. llvm_unreachable("Stmt should not be in analyzer evaluation loop");
  722. case Stmt::ObjCSubscriptRefExprClass:
  723. case Stmt::ObjCPropertyRefExprClass:
  724. llvm_unreachable("These are handled by PseudoObjectExpr");
  725. case Stmt::GNUNullExprClass: {
  726. // GNU __null is a pointer-width integer, not an actual pointer.
  727. ProgramStateRef state = Pred->getState();
  728. state = state->BindExpr(S, Pred->getLocationContext(),
  729. svalBuilder.makeIntValWithPtrWidth(0, false));
  730. Bldr.generateNode(S, Pred, state);
  731. break;
  732. }
  733. case Stmt::ObjCAtSynchronizedStmtClass:
  734. Bldr.takeNodes(Pred);
  735. VisitObjCAtSynchronizedStmt(cast<ObjCAtSynchronizedStmt>(S), Pred, Dst);
  736. Bldr.addNodes(Dst);
  737. break;
  738. case Stmt::ExprWithCleanupsClass:
  739. // Handled due to fully linearised CFG.
  740. break;
  741. case Stmt::CXXBindTemporaryExprClass: {
  742. Bldr.takeNodes(Pred);
  743. ExplodedNodeSet PreVisit;
  744. getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this);
  745. ExplodedNodeSet Next;
  746. VisitCXXBindTemporaryExpr(cast<CXXBindTemporaryExpr>(S), PreVisit, Next);
  747. getCheckerManager().runCheckersForPostStmt(Dst, Next, S, *this);
  748. Bldr.addNodes(Dst);
  749. break;
  750. }
  751. // Cases not handled yet; but will handle some day.
  752. case Stmt::DesignatedInitExprClass:
  753. case Stmt::DesignatedInitUpdateExprClass:
  754. case Stmt::ExtVectorElementExprClass:
  755. case Stmt::ImaginaryLiteralClass:
  756. case Stmt::ObjCAtCatchStmtClass:
  757. case Stmt::ObjCAtFinallyStmtClass:
  758. case Stmt::ObjCAtTryStmtClass:
  759. case Stmt::ObjCAutoreleasePoolStmtClass:
  760. case Stmt::ObjCEncodeExprClass:
  761. case Stmt::ObjCIsaExprClass:
  762. case Stmt::ObjCProtocolExprClass:
  763. case Stmt::ObjCSelectorExprClass:
  764. case Stmt::ParenListExprClass:
  765. case Stmt::ShuffleVectorExprClass:
  766. case Stmt::ConvertVectorExprClass:
  767. case Stmt::VAArgExprClass:
  768. case Stmt::CUDAKernelCallExprClass:
  769. case Stmt::OpaqueValueExprClass:
  770. case Stmt::AsTypeExprClass:
  771. case Stmt::AtomicExprClass:
  772. // Fall through.
  773. // Cases we intentionally don't evaluate, since they don't need
  774. // to be explicitly evaluated.
  775. case Stmt::PredefinedExprClass:
  776. case Stmt::AddrLabelExprClass:
  777. case Stmt::AttributedStmtClass:
  778. case Stmt::IntegerLiteralClass:
  779. case Stmt::CharacterLiteralClass:
  780. case Stmt::ImplicitValueInitExprClass:
  781. case Stmt::CXXScalarValueInitExprClass:
  782. case Stmt::CXXBoolLiteralExprClass:
  783. case Stmt::ObjCBoolLiteralExprClass:
  784. case Stmt::FloatingLiteralClass:
  785. case Stmt::NoInitExprClass:
  786. case Stmt::SizeOfPackExprClass:
  787. case Stmt::StringLiteralClass:
  788. case Stmt::ObjCStringLiteralClass:
  789. case Stmt::CXXPseudoDestructorExprClass:
  790. case Stmt::SubstNonTypeTemplateParmExprClass:
  791. case Stmt::CXXNullPtrLiteralExprClass:
  792. case Stmt::OMPArraySectionExprClass: {
  793. Bldr.takeNodes(Pred);
  794. ExplodedNodeSet preVisit;
  795. getCheckerManager().runCheckersForPreStmt(preVisit, Pred, S, *this);
  796. getCheckerManager().runCheckersForPostStmt(Dst, preVisit, S, *this);
  797. Bldr.addNodes(Dst);
  798. break;
  799. }
  800. case Stmt::CXXDefaultArgExprClass:
  801. case Stmt::CXXDefaultInitExprClass: {
  802. Bldr.takeNodes(Pred);
  803. ExplodedNodeSet PreVisit;
  804. getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this);
  805. ExplodedNodeSet Tmp;
  806. StmtNodeBuilder Bldr2(PreVisit, Tmp, *currBldrCtx);
  807. const Expr *ArgE;
  808. if (const CXXDefaultArgExpr *DefE = dyn_cast<CXXDefaultArgExpr>(S))
  809. ArgE = DefE->getExpr();
  810. else if (const CXXDefaultInitExpr *DefE = dyn_cast<CXXDefaultInitExpr>(S))
  811. ArgE = DefE->getExpr();
  812. else
  813. llvm_unreachable("unknown constant wrapper kind");
  814. bool IsTemporary = false;
  815. if (const MaterializeTemporaryExpr *MTE =
  816. dyn_cast<MaterializeTemporaryExpr>(ArgE)) {
  817. ArgE = MTE->GetTemporaryExpr();
  818. IsTemporary = true;
  819. }
  820. Optional<SVal> ConstantVal = svalBuilder.getConstantVal(ArgE);
  821. if (!ConstantVal)
  822. ConstantVal = UnknownVal();
  823. const LocationContext *LCtx = Pred->getLocationContext();
  824. for (ExplodedNodeSet::iterator I = PreVisit.begin(), E = PreVisit.end();
  825. I != E; ++I) {
  826. ProgramStateRef State = (*I)->getState();
  827. State = State->BindExpr(S, LCtx, *ConstantVal);
  828. if (IsTemporary)
  829. State = createTemporaryRegionIfNeeded(State, LCtx,
  830. cast<Expr>(S),
  831. cast<Expr>(S));
  832. Bldr2.generateNode(S, *I, State);
  833. }
  834. getCheckerManager().runCheckersForPostStmt(Dst, Tmp, S, *this);
  835. Bldr.addNodes(Dst);
  836. break;
  837. }
  838. // Cases we evaluate as opaque expressions, conjuring a symbol.
  839. case Stmt::CXXStdInitializerListExprClass:
  840. case Expr::ObjCArrayLiteralClass:
  841. case Expr::ObjCDictionaryLiteralClass:
  842. case Expr::ObjCBoxedExprClass: {
  843. Bldr.takeNodes(Pred);
  844. ExplodedNodeSet preVisit;
  845. getCheckerManager().runCheckersForPreStmt(preVisit, Pred, S, *this);
  846. ExplodedNodeSet Tmp;
  847. StmtNodeBuilder Bldr2(preVisit, Tmp, *currBldrCtx);
  848. const Expr *Ex = cast<Expr>(S);
  849. QualType resultType = Ex->getType();
  850. for (ExplodedNodeSet::iterator it = preVisit.begin(), et = preVisit.end();
  851. it != et; ++it) {
  852. ExplodedNode *N = *it;
  853. const LocationContext *LCtx = N->getLocationContext();
  854. SVal result = svalBuilder.conjureSymbolVal(nullptr, Ex, LCtx,
  855. resultType,
  856. currBldrCtx->blockCount());
  857. ProgramStateRef state = N->getState()->BindExpr(Ex, LCtx, result);
  858. Bldr2.generateNode(S, N, state);
  859. }
  860. getCheckerManager().runCheckersForPostStmt(Dst, Tmp, S, *this);
  861. Bldr.addNodes(Dst);
  862. break;
  863. }
  864. case Stmt::ArraySubscriptExprClass:
  865. Bldr.takeNodes(Pred);
  866. VisitLvalArraySubscriptExpr(cast<ArraySubscriptExpr>(S), Pred, Dst);
  867. Bldr.addNodes(Dst);
  868. break;
  869. case Stmt::GCCAsmStmtClass:
  870. Bldr.takeNodes(Pred);
  871. VisitGCCAsmStmt(cast<GCCAsmStmt>(S), Pred, Dst);
  872. Bldr.addNodes(Dst);
  873. break;
  874. case Stmt::MSAsmStmtClass:
  875. Bldr.takeNodes(Pred);
  876. VisitMSAsmStmt(cast<MSAsmStmt>(S), Pred, Dst);
  877. Bldr.addNodes(Dst);
  878. break;
  879. case Stmt::BlockExprClass:
  880. Bldr.takeNodes(Pred);
  881. VisitBlockExpr(cast<BlockExpr>(S), Pred, Dst);
  882. Bldr.addNodes(Dst);
  883. break;
  884. case Stmt::LambdaExprClass:
  885. if (AMgr.options.shouldInlineLambdas()) {
  886. Bldr.takeNodes(Pred);
  887. VisitLambdaExpr(cast<LambdaExpr>(S), Pred, Dst);
  888. Bldr.addNodes(Dst);
  889. } else {
  890. const ExplodedNode *node = Bldr.generateSink(S, Pred, Pred->getState());
  891. Engine.addAbortedBlock(node, currBldrCtx->getBlock());
  892. }
  893. break;
  894. case Stmt::BinaryOperatorClass: {
  895. const BinaryOperator* B = cast<BinaryOperator>(S);
  896. if (B->isLogicalOp()) {
  897. Bldr.takeNodes(Pred);
  898. VisitLogicalExpr(B, Pred, Dst);
  899. Bldr.addNodes(Dst);
  900. break;
  901. }
  902. else if (B->getOpcode() == BO_Comma) {
  903. ProgramStateRef state = Pred->getState();
  904. Bldr.generateNode(B, Pred,
  905. state->BindExpr(B, Pred->getLocationContext(),
  906. state->getSVal(B->getRHS(),
  907. Pred->getLocationContext())));
  908. break;
  909. }
  910. Bldr.takeNodes(Pred);
  911. if (AMgr.options.eagerlyAssumeBinOpBifurcation &&
  912. (B->isRelationalOp() || B->isEqualityOp())) {
  913. ExplodedNodeSet Tmp;
  914. VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Tmp);
  915. evalEagerlyAssumeBinOpBifurcation(Dst, Tmp, cast<Expr>(S));
  916. }
  917. else
  918. VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst);
  919. Bldr.addNodes(Dst);
  920. break;
  921. }
  922. case Stmt::CXXOperatorCallExprClass: {
  923. const CXXOperatorCallExpr *OCE = cast<CXXOperatorCallExpr>(S);
  924. // For instance method operators, make sure the 'this' argument has a
  925. // valid region.
  926. const Decl *Callee = OCE->getCalleeDecl();
  927. if (const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(Callee)) {
  928. if (MD->isInstance()) {
  929. ProgramStateRef State = Pred->getState();
  930. const LocationContext *LCtx = Pred->getLocationContext();
  931. ProgramStateRef NewState =
  932. createTemporaryRegionIfNeeded(State, LCtx, OCE->getArg(0));
  933. if (NewState != State) {
  934. Pred = Bldr.generateNode(OCE, Pred, NewState, /*Tag=*/nullptr,
  935. ProgramPoint::PreStmtKind);
  936. // Did we cache out?
  937. if (!Pred)
  938. break;
  939. }
  940. }
  941. }
  942. // FALLTHROUGH
  943. }
  944. case Stmt::CallExprClass:
  945. case Stmt::CXXMemberCallExprClass:
  946. case Stmt::UserDefinedLiteralClass: {
  947. Bldr.takeNodes(Pred);
  948. VisitCallExpr(cast<CallExpr>(S), Pred, Dst);
  949. Bldr.addNodes(Dst);
  950. break;
  951. }
  952. case Stmt::CXXCatchStmtClass: {
  953. Bldr.takeNodes(Pred);
  954. VisitCXXCatchStmt(cast<CXXCatchStmt>(S), Pred, Dst);
  955. Bldr.addNodes(Dst);
  956. break;
  957. }
  958. case Stmt::CXXTemporaryObjectExprClass:
  959. case Stmt::CXXConstructExprClass: {
  960. Bldr.takeNodes(Pred);
  961. VisitCXXConstructExpr(cast<CXXConstructExpr>(S), Pred, Dst);
  962. Bldr.addNodes(Dst);
  963. break;
  964. }
  965. case Stmt::CXXNewExprClass: {
  966. Bldr.takeNodes(Pred);
  967. ExplodedNodeSet PostVisit;
  968. VisitCXXNewExpr(cast<CXXNewExpr>(S), Pred, PostVisit);
  969. getCheckerManager().runCheckersForPostStmt(Dst, PostVisit, S, *this);
  970. Bldr.addNodes(Dst);
  971. break;
  972. }
  973. case Stmt::CXXDeleteExprClass: {
  974. Bldr.takeNodes(Pred);
  975. ExplodedNodeSet PreVisit;
  976. const CXXDeleteExpr *CDE = cast<CXXDeleteExpr>(S);
  977. getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this);
  978. for (ExplodedNodeSet::iterator i = PreVisit.begin(),
  979. e = PreVisit.end(); i != e ; ++i)
  980. VisitCXXDeleteExpr(CDE, *i, Dst);
  981. Bldr.addNodes(Dst);
  982. break;
  983. }
  984. // FIXME: ChooseExpr is really a constant. We need to fix
  985. // the CFG do not model them as explicit control-flow.
  986. case Stmt::ChooseExprClass: { // __builtin_choose_expr
  987. Bldr.takeNodes(Pred);
  988. const ChooseExpr *C = cast<ChooseExpr>(S);
  989. VisitGuardedExpr(C, C->getLHS(), C->getRHS(), Pred, Dst);
  990. Bldr.addNodes(Dst);
  991. break;
  992. }
  993. case Stmt::CompoundAssignOperatorClass:
  994. Bldr.takeNodes(Pred);
  995. VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst);
  996. Bldr.addNodes(Dst);
  997. break;
  998. case Stmt::CompoundLiteralExprClass:
  999. Bldr.takeNodes(Pred);
  1000. VisitCompoundLiteralExpr(cast<CompoundLiteralExpr>(S), Pred, Dst);
  1001. Bldr.addNodes(Dst);
  1002. break;
  1003. case Stmt::BinaryConditionalOperatorClass:
  1004. case Stmt::ConditionalOperatorClass: { // '?' operator
  1005. Bldr.takeNodes(Pred);
  1006. const AbstractConditionalOperator *C
  1007. = cast<AbstractConditionalOperator>(S);
  1008. VisitGuardedExpr(C, C->getTrueExpr(), C->getFalseExpr(), Pred, Dst);
  1009. Bldr.addNodes(Dst);
  1010. break;
  1011. }
  1012. case Stmt::CXXThisExprClass:
  1013. Bldr.takeNodes(Pred);
  1014. VisitCXXThisExpr(cast<CXXThisExpr>(S), Pred, Dst);
  1015. Bldr.addNodes(Dst);
  1016. break;
  1017. case Stmt::DeclRefExprClass: {
  1018. Bldr.takeNodes(Pred);
  1019. const DeclRefExpr *DE = cast<DeclRefExpr>(S);
  1020. VisitCommonDeclRefExpr(DE, DE->getDecl(), Pred, Dst);
  1021. Bldr.addNodes(Dst);
  1022. break;
  1023. }
  1024. case Stmt::DeclStmtClass:
  1025. Bldr.takeNodes(Pred);
  1026. VisitDeclStmt(cast<DeclStmt>(S), Pred, Dst);
  1027. Bldr.addNodes(Dst);
  1028. break;
  1029. case Stmt::ImplicitCastExprClass:
  1030. case Stmt::CStyleCastExprClass:
  1031. case Stmt::CXXStaticCastExprClass:
  1032. case Stmt::CXXDynamicCastExprClass:
  1033. case Stmt::CXXReinterpretCastExprClass:
  1034. case Stmt::CXXConstCastExprClass:
  1035. case Stmt::CXXFunctionalCastExprClass:
  1036. case Stmt::ObjCBridgedCastExprClass: {
  1037. Bldr.takeNodes(Pred);
  1038. const CastExpr *C = cast<CastExpr>(S);
  1039. // Handle the previsit checks.
  1040. ExplodedNodeSet dstPrevisit;
  1041. getCheckerManager().runCheckersForPreStmt(dstPrevisit, Pred, C, *this);
  1042. // Handle the expression itself.
  1043. ExplodedNodeSet dstExpr;
  1044. for (ExplodedNodeSet::iterator i = dstPrevisit.begin(),
  1045. e = dstPrevisit.end(); i != e ; ++i) {
  1046. VisitCast(C, C->getSubExpr(), *i, dstExpr);
  1047. }
  1048. // Handle the postvisit checks.
  1049. getCheckerManager().runCheckersForPostStmt(Dst, dstExpr, C, *this);
  1050. Bldr.addNodes(Dst);
  1051. break;
  1052. }
  1053. case Expr::MaterializeTemporaryExprClass: {
  1054. Bldr.takeNodes(Pred);
  1055. const MaterializeTemporaryExpr *MTE = cast<MaterializeTemporaryExpr>(S);
  1056. CreateCXXTemporaryObject(MTE, Pred, Dst);
  1057. Bldr.addNodes(Dst);
  1058. break;
  1059. }
  1060. case Stmt::InitListExprClass:
  1061. Bldr.takeNodes(Pred);
  1062. VisitInitListExpr(cast<InitListExpr>(S), Pred, Dst);
  1063. Bldr.addNodes(Dst);
  1064. break;
  1065. case Stmt::MemberExprClass:
  1066. Bldr.takeNodes(Pred);
  1067. VisitMemberExpr(cast<MemberExpr>(S), Pred, Dst);
  1068. Bldr.addNodes(Dst);
  1069. break;
  1070. case Stmt::ObjCIvarRefExprClass:
  1071. Bldr.takeNodes(Pred);
  1072. VisitLvalObjCIvarRefExpr(cast<ObjCIvarRefExpr>(S), Pred, Dst);
  1073. Bldr.addNodes(Dst);
  1074. break;
  1075. case Stmt::ObjCForCollectionStmtClass:
  1076. Bldr.takeNodes(Pred);
  1077. VisitObjCForCollectionStmt(cast<ObjCForCollectionStmt>(S), Pred, Dst);
  1078. Bldr.addNodes(Dst);
  1079. break;
  1080. case Stmt::ObjCMessageExprClass:
  1081. Bldr.takeNodes(Pred);
  1082. VisitObjCMessage(cast<ObjCMessageExpr>(S), Pred, Dst);
  1083. Bldr.addNodes(Dst);
  1084. break;
  1085. case Stmt::ObjCAtThrowStmtClass:
  1086. case Stmt::CXXThrowExprClass:
  1087. // FIXME: This is not complete. We basically treat @throw as
  1088. // an abort.
  1089. Bldr.generateSink(S, Pred, Pred->getState());
  1090. break;
  1091. case Stmt::ReturnStmtClass:
  1092. Bldr.takeNodes(Pred);
  1093. VisitReturnStmt(cast<ReturnStmt>(S), Pred, Dst);
  1094. Bldr.addNodes(Dst);
  1095. break;
  1096. case Stmt::OffsetOfExprClass:
  1097. Bldr.takeNodes(Pred);
  1098. VisitOffsetOfExpr(cast<OffsetOfExpr>(S), Pred, Dst);
  1099. Bldr.addNodes(Dst);
  1100. break;
  1101. case Stmt::UnaryExprOrTypeTraitExprClass:
  1102. Bldr.takeNodes(Pred);
  1103. VisitUnaryExprOrTypeTraitExpr(cast<UnaryExprOrTypeTraitExpr>(S),
  1104. Pred, Dst);
  1105. Bldr.addNodes(Dst);
  1106. break;
  1107. case Stmt::StmtExprClass: {
  1108. const StmtExpr *SE = cast<StmtExpr>(S);
  1109. if (SE->getSubStmt()->body_empty()) {
  1110. // Empty statement expression.
  1111. assert(SE->getType() == getContext().VoidTy
  1112. && "Empty statement expression must have void type.");
  1113. break;
  1114. }
  1115. if (Expr *LastExpr = dyn_cast<Expr>(*SE->getSubStmt()->body_rbegin())) {
  1116. ProgramStateRef state = Pred->getState();
  1117. Bldr.generateNode(SE, Pred,
  1118. state->BindExpr(SE, Pred->getLocationContext(),
  1119. state->getSVal(LastExpr,
  1120. Pred->getLocationContext())));
  1121. }
  1122. break;
  1123. }
  1124. case Stmt::UnaryOperatorClass: {
  1125. Bldr.takeNodes(Pred);
  1126. const UnaryOperator *U = cast<UnaryOperator>(S);
  1127. if (AMgr.options.eagerlyAssumeBinOpBifurcation && (U->getOpcode() == UO_LNot)) {
  1128. ExplodedNodeSet Tmp;
  1129. VisitUnaryOperator(U, Pred, Tmp);
  1130. evalEagerlyAssumeBinOpBifurcation(Dst, Tmp, U);
  1131. }
  1132. else
  1133. VisitUnaryOperator(U, Pred, Dst);
  1134. Bldr.addNodes(Dst);
  1135. break;
  1136. }
  1137. case Stmt::PseudoObjectExprClass: {
  1138. Bldr.takeNodes(Pred);
  1139. ProgramStateRef state = Pred->getState();
  1140. const PseudoObjectExpr *PE = cast<PseudoObjectExpr>(S);
  1141. if (const Expr *Result = PE->getResultExpr()) {
  1142. SVal V = state->getSVal(Result, Pred->getLocationContext());
  1143. Bldr.generateNode(S, Pred,
  1144. state->BindExpr(S, Pred->getLocationContext(), V));
  1145. }
  1146. else
  1147. Bldr.generateNode(S, Pred,
  1148. state->BindExpr(S, Pred->getLocationContext(),
  1149. UnknownVal()));
  1150. Bldr.addNodes(Dst);
  1151. break;
  1152. }
  1153. }
  1154. }
  1155. bool ExprEngine::replayWithoutInlining(ExplodedNode *N,
  1156. const LocationContext *CalleeLC) {
  1157. const StackFrameContext *CalleeSF = CalleeLC->getCurrentStackFrame();
  1158. const StackFrameContext *CallerSF = CalleeSF->getParent()->getCurrentStackFrame();
  1159. assert(CalleeSF && CallerSF);
  1160. ExplodedNode *BeforeProcessingCall = nullptr;
  1161. const Stmt *CE = CalleeSF->getCallSite();
  1162. // Find the first node before we started processing the call expression.
  1163. while (N) {
  1164. ProgramPoint L = N->getLocation();
  1165. BeforeProcessingCall = N;
  1166. N = N->pred_empty() ? nullptr : *(N->pred_begin());
  1167. // Skip the nodes corresponding to the inlined code.
  1168. if (L.getLocationContext()->getCurrentStackFrame() != CallerSF)
  1169. continue;
  1170. // We reached the caller. Find the node right before we started
  1171. // processing the call.
  1172. if (L.isPurgeKind())
  1173. continue;
  1174. if (L.getAs<PreImplicitCall>())
  1175. continue;
  1176. if (L.getAs<CallEnter>())
  1177. continue;
  1178. if (Optional<StmtPoint> SP = L.getAs<StmtPoint>())
  1179. if (SP->getStmt() == CE)
  1180. continue;
  1181. break;
  1182. }
  1183. if (!BeforeProcessingCall)
  1184. return false;
  1185. // TODO: Clean up the unneeded nodes.
  1186. // Build an Epsilon node from which we will restart the analyzes.
  1187. // Note that CE is permitted to be NULL!
  1188. ProgramPoint NewNodeLoc =
  1189. EpsilonPoint(BeforeProcessingCall->getLocationContext(), CE);
  1190. // Add the special flag to GDM to signal retrying with no inlining.
  1191. // Note, changing the state ensures that we are not going to cache out.
  1192. ProgramStateRef NewNodeState = BeforeProcessingCall->getState();
  1193. NewNodeState =
  1194. NewNodeState->set<ReplayWithoutInlining>(const_cast<Stmt *>(CE));
  1195. // Make the new node a successor of BeforeProcessingCall.
  1196. bool IsNew = false;
  1197. ExplodedNode *NewNode = G.getNode(NewNodeLoc, NewNodeState, false, &IsNew);
  1198. // We cached out at this point. Caching out is common due to us backtracking
  1199. // from the inlined function, which might spawn several paths.
  1200. if (!IsNew)
  1201. return true;
  1202. NewNode->addPredecessor(BeforeProcessingCall, G);
  1203. // Add the new node to the work list.
  1204. Engine.enqueueStmtNode(NewNode, CalleeSF->getCallSiteBlock(),
  1205. CalleeSF->getIndex());
  1206. NumTimesRetriedWithoutInlining++;
  1207. return true;
  1208. }
  1209. /// Block entrance. (Update counters).
  1210. void ExprEngine::processCFGBlockEntrance(const BlockEdge &L,
  1211. NodeBuilderWithSinks &nodeBuilder,
  1212. ExplodedNode *Pred) {
  1213. PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext());
  1214. // FIXME: Refactor this into a checker.
  1215. if (nodeBuilder.getContext().blockCount() >= AMgr.options.maxBlockVisitOnPath) {
  1216. static SimpleProgramPointTag tag(TagProviderName, "Block count exceeded");
  1217. const ExplodedNode *Sink =
  1218. nodeBuilder.generateSink(Pred->getState(), Pred, &tag);
  1219. // Check if we stopped at the top level function or not.
  1220. // Root node should have the location context of the top most function.
  1221. const LocationContext *CalleeLC = Pred->getLocation().getLocationContext();
  1222. const LocationContext *CalleeSF = CalleeLC->getCurrentStackFrame();
  1223. const LocationContext *RootLC =
  1224. (*G.roots_begin())->getLocation().getLocationContext();
  1225. if (RootLC->getCurrentStackFrame() != CalleeSF) {
  1226. Engine.FunctionSummaries->markReachedMaxBlockCount(CalleeSF->getDecl());
  1227. // Re-run the call evaluation without inlining it, by storing the
  1228. // no-inlining policy in the state and enqueuing the new work item on
  1229. // the list. Replay should almost never fail. Use the stats to catch it
  1230. // if it does.
  1231. if ((!AMgr.options.NoRetryExhausted &&
  1232. replayWithoutInlining(Pred, CalleeLC)))
  1233. return;
  1234. NumMaxBlockCountReachedInInlined++;
  1235. } else
  1236. NumMaxBlockCountReached++;
  1237. // Make sink nodes as exhausted(for stats) only if retry failed.
  1238. Engine.blocksExhausted.push_back(std::make_pair(L, Sink));
  1239. }
  1240. }
  1241. //===----------------------------------------------------------------------===//
  1242. // Branch processing.
  1243. //===----------------------------------------------------------------------===//
  1244. /// RecoverCastedSymbol - A helper function for ProcessBranch that is used
  1245. /// to try to recover some path-sensitivity for casts of symbolic
  1246. /// integers that promote their values (which are currently not tracked well).
  1247. /// This function returns the SVal bound to Condition->IgnoreCasts if all the
  1248. // cast(s) did was sign-extend the original value.
  1249. static SVal RecoverCastedSymbol(ProgramStateManager& StateMgr,
  1250. ProgramStateRef state,
  1251. const Stmt *Condition,
  1252. const LocationContext *LCtx,
  1253. ASTContext &Ctx) {
  1254. const Expr *Ex = dyn_cast<Expr>(Condition);
  1255. if (!Ex)
  1256. return UnknownVal();
  1257. uint64_t bits = 0;
  1258. bool bitsInit = false;
  1259. while (const CastExpr *CE = dyn_cast<CastExpr>(Ex)) {
  1260. QualType T = CE->getType();
  1261. if (!T->isIntegralOrEnumerationType())
  1262. return UnknownVal();
  1263. uint64_t newBits = Ctx.getTypeSize(T);
  1264. if (!bitsInit || newBits < bits) {
  1265. bitsInit = true;
  1266. bits = newBits;
  1267. }
  1268. Ex = CE->getSubExpr();
  1269. }
  1270. // We reached a non-cast. Is it a symbolic value?
  1271. QualType T = Ex->getType();
  1272. if (!bitsInit || !T->isIntegralOrEnumerationType() ||
  1273. Ctx.getTypeSize(T) > bits)
  1274. return UnknownVal();
  1275. return state->getSVal(Ex, LCtx);
  1276. }
  1277. #ifndef NDEBUG
  1278. static const Stmt *getRightmostLeaf(const Stmt *Condition) {
  1279. while (Condition) {
  1280. const BinaryOperator *BO = dyn_cast<BinaryOperator>(Condition);
  1281. if (!BO || !BO->isLogicalOp()) {
  1282. return Condition;
  1283. }
  1284. Condition = BO->getRHS()->IgnoreParens();
  1285. }
  1286. return nullptr;
  1287. }
  1288. #endif
  1289. // Returns the condition the branch at the end of 'B' depends on and whose value
  1290. // has been evaluated within 'B'.
  1291. // In most cases, the terminator condition of 'B' will be evaluated fully in
  1292. // the last statement of 'B'; in those cases, the resolved condition is the
  1293. // given 'Condition'.
  1294. // If the condition of the branch is a logical binary operator tree, the CFG is
  1295. // optimized: in that case, we know that the expression formed by all but the
  1296. // rightmost leaf of the logical binary operator tree must be true, and thus
  1297. // the branch condition is at this point equivalent to the truth value of that
  1298. // rightmost leaf; the CFG block thus only evaluates this rightmost leaf
  1299. // expression in its final statement. As the full condition in that case was
  1300. // not evaluated, and is thus not in the SVal cache, we need to use that leaf
  1301. // expression to evaluate the truth value of the condition in the current state
  1302. // space.
  1303. static const Stmt *ResolveCondition(const Stmt *Condition,
  1304. const CFGBlock *B) {
  1305. if (const Expr *Ex = dyn_cast<Expr>(Condition))
  1306. Condition = Ex->IgnoreParens();
  1307. const BinaryOperator *BO = dyn_cast<BinaryOperator>(Condition);
  1308. if (!BO || !BO->isLogicalOp())
  1309. return Condition;
  1310. assert(!B->getTerminator().isTemporaryDtorsBranch() &&
  1311. "Temporary destructor branches handled by processBindTemporary.");
  1312. // For logical operations, we still have the case where some branches
  1313. // use the traditional "merge" approach and others sink the branch
  1314. // directly into the basic blocks representing the logical operation.
  1315. // We need to distinguish between those two cases here.
  1316. // The invariants are still shifting, but it is possible that the
  1317. // last element in a CFGBlock is not a CFGStmt. Look for the last
  1318. // CFGStmt as the value of the condition.
  1319. CFGBlock::const_reverse_iterator I = B->rbegin(), E = B->rend();
  1320. for (; I != E; ++I) {
  1321. CFGElement Elem = *I;
  1322. Optional<CFGStmt> CS = Elem.getAs<CFGStmt>();
  1323. if (!CS)
  1324. continue;
  1325. const Stmt *LastStmt = CS->getStmt();
  1326. assert(LastStmt == Condition || LastStmt == getRightmostLeaf(Condition));
  1327. return LastStmt;
  1328. }
  1329. llvm_unreachable("could not resolve condition");
  1330. }
  1331. void ExprEngine::processBranch(const Stmt *Condition, const Stmt *Term,
  1332. NodeBuilderContext& BldCtx,
  1333. ExplodedNode *Pred,
  1334. ExplodedNodeSet &Dst,
  1335. const CFGBlock *DstT,
  1336. const CFGBlock *DstF) {
  1337. assert((!Condition || !isa<CXXBindTemporaryExpr>(Condition)) &&
  1338. "CXXBindTemporaryExprs are handled by processBindTemporary.");
  1339. const LocationContext *LCtx = Pred->getLocationContext();
  1340. PrettyStackTraceLocationContext StackCrashInfo(LCtx);
  1341. currBldrCtx = &BldCtx;
  1342. // Check for NULL conditions; e.g. "for(;;)"
  1343. if (!Condition) {
  1344. BranchNodeBuilder NullCondBldr(Pred, Dst, BldCtx, DstT, DstF);
  1345. NullCondBldr.markInfeasible(false);
  1346. NullCondBldr.generateNode(Pred->getState(), true, Pred);
  1347. return;
  1348. }
  1349. if (const Expr *Ex = dyn_cast<Expr>(Condition))
  1350. Condition = Ex->IgnoreParens();
  1351. Condition = ResolveCondition(Condition, BldCtx.getBlock());
  1352. PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
  1353. Condition->getLocStart(),
  1354. "Error evaluating branch");
  1355. ExplodedNodeSet CheckersOutSet;
  1356. getCheckerManager().runCheckersForBranchCondition(Condition, CheckersOutSet,
  1357. Pred, *this);
  1358. // We generated only sinks.
  1359. if (CheckersOutSet.empty())
  1360. return;
  1361. BranchNodeBuilder builder(CheckersOutSet, Dst, BldCtx, DstT, DstF);
  1362. for (NodeBuilder::iterator I = CheckersOutSet.begin(),
  1363. E = CheckersOutSet.end(); E != I; ++I) {
  1364. ExplodedNode *PredI = *I;
  1365. if (PredI->isSink())
  1366. continue;
  1367. ProgramStateRef PrevState = PredI->getState();
  1368. SVal X = PrevState->getSVal(Condition, PredI->getLocationContext());
  1369. if (X.isUnknownOrUndef()) {
  1370. // Give it a chance to recover from unknown.
  1371. if (const Expr *Ex = dyn_cast<Expr>(Condition)) {
  1372. if (Ex->getType()->isIntegralOrEnumerationType()) {
  1373. // Try to recover some path-sensitivity. Right now casts of symbolic
  1374. // integers that promote their values are currently not tracked well.
  1375. // If 'Condition' is such an expression, try and recover the
  1376. // underlying value and use that instead.
  1377. SVal recovered = RecoverCastedSymbol(getStateManager(),
  1378. PrevState, Condition,
  1379. PredI->getLocationContext(),
  1380. getContext());
  1381. if (!recovered.isUnknown()) {
  1382. X = recovered;
  1383. }
  1384. }
  1385. }
  1386. }
  1387. // If the condition is still unknown, give up.
  1388. if (X.isUnknownOrUndef()) {
  1389. builder.generateNode(PrevState, true, PredI);
  1390. builder.generateNode(PrevState, false, PredI);
  1391. continue;
  1392. }
  1393. DefinedSVal V = X.castAs<DefinedSVal>();
  1394. ProgramStateRef StTrue, StFalse;
  1395. std::tie(StTrue, StFalse) = PrevState->assume(V);
  1396. // Process the true branch.
  1397. if (builder.isFeasible(true)) {
  1398. if (StTrue)
  1399. builder.generateNode(StTrue, true, PredI);
  1400. else
  1401. builder.markInfeasible(true);
  1402. }
  1403. // Process the false branch.
  1404. if (builder.isFeasible(false)) {
  1405. if (StFalse)
  1406. builder.generateNode(StFalse, false, PredI);
  1407. else
  1408. builder.markInfeasible(false);
  1409. }
  1410. }
  1411. currBldrCtx = nullptr;
  1412. }
  1413. /// The GDM component containing the set of global variables which have been
  1414. /// previously initialized with explicit initializers.
  1415. REGISTER_TRAIT_WITH_PROGRAMSTATE(InitializedGlobalsSet,
  1416. llvm::ImmutableSet<const VarDecl *>)
  1417. void ExprEngine::processStaticInitializer(const DeclStmt *DS,
  1418. NodeBuilderContext &BuilderCtx,
  1419. ExplodedNode *Pred,
  1420. clang::ento::ExplodedNodeSet &Dst,
  1421. const CFGBlock *DstT,
  1422. const CFGBlock *DstF) {
  1423. PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext());
  1424. currBldrCtx = &BuilderCtx;
  1425. const VarDecl *VD = cast<VarDecl>(DS->getSingleDecl());
  1426. ProgramStateRef state = Pred->getState();
  1427. bool initHasRun = state->contains<InitializedGlobalsSet>(VD);
  1428. BranchNodeBuilder builder(Pred, Dst, BuilderCtx, DstT, DstF);
  1429. if (!initHasRun) {
  1430. state = state->add<InitializedGlobalsSet>(VD);
  1431. }
  1432. builder.generateNode(state, initHasRun, Pred);
  1433. builder.markInfeasible(!initHasRun);
  1434. currBldrCtx = nullptr;
  1435. }
  1436. /// processIndirectGoto - Called by CoreEngine. Used to generate successor
  1437. /// nodes by processing the 'effects' of a computed goto jump.
  1438. void ExprEngine::processIndirectGoto(IndirectGotoNodeBuilder &builder) {
  1439. ProgramStateRef state = builder.getState();
  1440. SVal V = state->getSVal(builder.getTarget(), builder.getLocationContext());
  1441. // Three possibilities:
  1442. //
  1443. // (1) We know the computed label.
  1444. // (2) The label is NULL (or some other constant), or Undefined.
  1445. // (3) We have no clue about the label. Dispatch to all targets.
  1446. //
  1447. typedef IndirectGotoNodeBuilder::iterator iterator;
  1448. if (Optional<loc::GotoLabel> LV = V.getAs<loc::GotoLabel>()) {
  1449. const LabelDecl *L = LV->getLabel();
  1450. for (iterator I = builder.begin(), E = builder.end(); I != E; ++I) {
  1451. if (I.getLabel() == L) {
  1452. builder.generateNode(I, state);
  1453. return;
  1454. }
  1455. }
  1456. llvm_unreachable("No block with label.");
  1457. }
  1458. if (V.getAs<loc::ConcreteInt>() || V.getAs<UndefinedVal>()) {
  1459. // Dispatch to the first target and mark it as a sink.
  1460. //ExplodedNode* N = builder.generateNode(builder.begin(), state, true);
  1461. // FIXME: add checker visit.
  1462. // UndefBranches.insert(N);
  1463. return;
  1464. }
  1465. // This is really a catch-all. We don't support symbolics yet.
  1466. // FIXME: Implement dispatch for symbolic pointers.
  1467. for (iterator I=builder.begin(), E=builder.end(); I != E; ++I)
  1468. builder.generateNode(I, state);
  1469. }
  1470. #if 0
  1471. static bool stackFrameDoesNotContainInitializedTemporaries(ExplodedNode &Pred) {
  1472. const StackFrameContext* Frame = Pred.getStackFrame();
  1473. const llvm::ImmutableSet<CXXBindTemporaryContext> &Set =
  1474. Pred.getState()->get<InitializedTemporariesSet>();
  1475. return std::find_if(Set.begin(), Set.end(),
  1476. [&](const CXXBindTemporaryContext &Ctx) {
  1477. if (Ctx.second == Frame) {
  1478. Ctx.first->dump();
  1479. llvm::errs() << "\n";
  1480. }
  1481. return Ctx.second == Frame;
  1482. }) == Set.end();
  1483. }
  1484. #endif
  1485. /// ProcessEndPath - Called by CoreEngine. Used to generate end-of-path
  1486. /// nodes when the control reaches the end of a function.
  1487. void ExprEngine::processEndOfFunction(NodeBuilderContext& BC,
  1488. ExplodedNode *Pred) {
  1489. // FIXME: Assert that stackFrameDoesNotContainInitializedTemporaries(*Pred)).
  1490. // We currently cannot enable this assert, as lifetime extended temporaries
  1491. // are not modelled correctly.
  1492. PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext());
  1493. StateMgr.EndPath(Pred->getState());
  1494. ExplodedNodeSet Dst;
  1495. if (Pred->getLocationContext()->inTopFrame()) {
  1496. // Remove dead symbols.
  1497. ExplodedNodeSet AfterRemovedDead;
  1498. removeDeadOnEndOfFunction(BC, Pred, AfterRemovedDead);
  1499. // Notify checkers.
  1500. for (ExplodedNodeSet::iterator I = AfterRemovedDead.begin(),
  1501. E = AfterRemovedDead.end(); I != E; ++I) {
  1502. getCheckerManager().runCheckersForEndFunction(BC, Dst, *I, *this);
  1503. }
  1504. } else {
  1505. getCheckerManager().runCheckersForEndFunction(BC, Dst, Pred, *this);
  1506. }
  1507. Engine.enqueueEndOfFunction(Dst);
  1508. }
  1509. /// ProcessSwitch - Called by CoreEngine. Used to generate successor
  1510. /// nodes by processing the 'effects' of a switch statement.
  1511. void ExprEngine::processSwitch(SwitchNodeBuilder& builder) {
  1512. typedef SwitchNodeBuilder::iterator iterator;
  1513. ProgramStateRef state = builder.getState();
  1514. const Expr *CondE = builder.getCondition();
  1515. SVal CondV_untested = state->getSVal(CondE, builder.getLocationContext());
  1516. if (CondV_untested.isUndef()) {
  1517. //ExplodedNode* N = builder.generateDefaultCaseNode(state, true);
  1518. // FIXME: add checker
  1519. //UndefBranches.insert(N);
  1520. return;
  1521. }
  1522. DefinedOrUnknownSVal CondV = CondV_untested.castAs<DefinedOrUnknownSVal>();
  1523. ProgramStateRef DefaultSt = state;
  1524. iterator I = builder.begin(), EI = builder.end();
  1525. bool defaultIsFeasible = I == EI;
  1526. for ( ; I != EI; ++I) {
  1527. // Successor may be pruned out during CFG construction.
  1528. if (!I.getBlock())
  1529. continue;
  1530. const CaseStmt *Case = I.getCase();
  1531. // Evaluate the LHS of the case value.
  1532. llvm::APSInt V1 = Case->getLHS()->EvaluateKnownConstInt(getContext());
  1533. assert(V1.getBitWidth() == getContext().getTypeSize(CondE->getType()));
  1534. // Get the RHS of the case, if it exists.
  1535. llvm::APSInt V2;
  1536. if (const Expr *E = Case->getRHS())
  1537. V2 = E->EvaluateKnownConstInt(getContext());
  1538. else
  1539. V2 = V1;
  1540. // FIXME: Eventually we should replace the logic below with a range
  1541. // comparison, rather than concretize the values within the range.
  1542. // This should be easy once we have "ranges" for NonLVals.
  1543. do {
  1544. nonloc::ConcreteInt CaseVal(getBasicVals().getValue(V1));
  1545. DefinedOrUnknownSVal Res = svalBuilder.evalEQ(DefaultSt ? DefaultSt : state,
  1546. CondV, CaseVal);
  1547. // Now "assume" that the case matches.
  1548. if (ProgramStateRef stateNew = state->assume(Res, true)) {
  1549. builder.generateCaseStmtNode(I, stateNew);
  1550. // If CondV evaluates to a constant, then we know that this
  1551. // is the *only* case that we can take, so stop evaluating the
  1552. // others.
  1553. if (CondV.getAs<nonloc::ConcreteInt>())
  1554. return;
  1555. }
  1556. // Now "assume" that the case doesn't match. Add this state
  1557. // to the default state (if it is feasible).
  1558. if (DefaultSt) {
  1559. if (ProgramStateRef stateNew = DefaultSt->assume(Res, false)) {
  1560. defaultIsFeasible = true;
  1561. DefaultSt = stateNew;
  1562. }
  1563. else {
  1564. defaultIsFeasible = false;
  1565. DefaultSt = nullptr;
  1566. }
  1567. }
  1568. // Concretize the next value in the range.
  1569. if (V1 == V2)
  1570. break;
  1571. ++V1;
  1572. assert (V1 <= V2);
  1573. } while (true);
  1574. }
  1575. if (!defaultIsFeasible)
  1576. return;
  1577. // If we have switch(enum value), the default branch is not
  1578. // feasible if all of the enum constants not covered by 'case:' statements
  1579. // are not feasible values for the switch condition.
  1580. //
  1581. // Note that this isn't as accurate as it could be. Even if there isn't
  1582. // a case for a particular enum value as long as that enum value isn't
  1583. // feasible then it shouldn't be considered for making 'default:' reachable.
  1584. const SwitchStmt *SS = builder.getSwitch();
  1585. const Expr *CondExpr = SS->getCond()->IgnoreParenImpCasts();
  1586. if (CondExpr->getType()->getAs<EnumType>()) {
  1587. if (SS->isAllEnumCasesCovered())
  1588. return;
  1589. }
  1590. builder.generateDefaultCaseNode(DefaultSt);
  1591. }
  1592. //===----------------------------------------------------------------------===//
  1593. // Transfer functions: Loads and stores.
  1594. //===----------------------------------------------------------------------===//
  1595. void ExprEngine::VisitCommonDeclRefExpr(const Expr *Ex, const NamedDecl *D,
  1596. ExplodedNode *Pred,
  1597. ExplodedNodeSet &Dst) {
  1598. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  1599. ProgramStateRef state = Pred->getState();
  1600. const LocationContext *LCtx = Pred->getLocationContext();
  1601. if (const VarDecl *VD = dyn_cast<VarDecl>(D)) {
  1602. // C permits "extern void v", and if you cast the address to a valid type,
  1603. // you can even do things with it. We simply pretend
  1604. assert(Ex->isGLValue() || VD->getType()->isVoidType());
  1605. const LocationContext *LocCtxt = Pred->getLocationContext();
  1606. const Decl *D = LocCtxt->getDecl();
  1607. const auto *MD = D ? dyn_cast<CXXMethodDecl>(D) : nullptr;
  1608. const auto *DeclRefEx = dyn_cast<DeclRefExpr>(Ex);
  1609. SVal V;
  1610. bool CaptureByReference = false;
  1611. if (AMgr.options.shouldInlineLambdas() && DeclRefEx &&
  1612. DeclRefEx->refersToEnclosingVariableOrCapture() && MD &&
  1613. MD->getParent()->isLambda()) {
  1614. // Lookup the field of the lambda.
  1615. const CXXRecordDecl *CXXRec = MD->getParent();
  1616. llvm::DenseMap<const VarDecl *, FieldDecl *> LambdaCaptureFields;
  1617. FieldDecl *LambdaThisCaptureField;
  1618. CXXRec->getCaptureFields(LambdaCaptureFields, LambdaThisCaptureField);
  1619. const FieldDecl *FD = LambdaCaptureFields[VD];
  1620. Loc CXXThis =
  1621. svalBuilder.getCXXThis(MD, LocCtxt->getCurrentStackFrame());
  1622. SVal CXXThisVal = state->getSVal(CXXThis);
  1623. V = state->getLValue(FD, CXXThisVal);
  1624. if (FD->getType()->isReferenceType() &&
  1625. !VD->getType()->isReferenceType())
  1626. CaptureByReference = true;
  1627. } else {
  1628. V = state->getLValue(VD, LocCtxt);
  1629. }
  1630. // For references, the 'lvalue' is the pointer address stored in the
  1631. // reference region.
  1632. if (VD->getType()->isReferenceType() || CaptureByReference) {
  1633. if (const MemRegion *R = V.getAsRegion())
  1634. V = state->getSVal(R);
  1635. else
  1636. V = UnknownVal();
  1637. }
  1638. Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr,
  1639. ProgramPoint::PostLValueKind);
  1640. return;
  1641. }
  1642. if (const EnumConstantDecl *ED = dyn_cast<EnumConstantDecl>(D)) {
  1643. assert(!Ex->isGLValue());
  1644. SVal V = svalBuilder.makeIntVal(ED->getInitVal());
  1645. Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V));
  1646. return;
  1647. }
  1648. if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
  1649. SVal V = svalBuilder.getFunctionPointer(FD);
  1650. Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr,
  1651. ProgramPoint::PostLValueKind);
  1652. return;
  1653. }
  1654. if (isa<FieldDecl>(D)) {
  1655. // FIXME: Compute lvalue of field pointers-to-member.
  1656. // Right now we just use a non-null void pointer, so that it gives proper
  1657. // results in boolean contexts.
  1658. SVal V = svalBuilder.conjureSymbolVal(Ex, LCtx, getContext().VoidPtrTy,
  1659. currBldrCtx->blockCount());
  1660. state = state->assume(V.castAs<DefinedOrUnknownSVal>(), true);
  1661. Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr,
  1662. ProgramPoint::PostLValueKind);
  1663. return;
  1664. }
  1665. llvm_unreachable("Support for this Decl not implemented.");
  1666. }
  1667. /// VisitArraySubscriptExpr - Transfer function for array accesses
  1668. void ExprEngine::VisitLvalArraySubscriptExpr(const ArraySubscriptExpr *A,
  1669. ExplodedNode *Pred,
  1670. ExplodedNodeSet &Dst){
  1671. const Expr *Base = A->getBase()->IgnoreParens();
  1672. const Expr *Idx = A->getIdx()->IgnoreParens();
  1673. ExplodedNodeSet checkerPreStmt;
  1674. getCheckerManager().runCheckersForPreStmt(checkerPreStmt, Pred, A, *this);
  1675. StmtNodeBuilder Bldr(checkerPreStmt, Dst, *currBldrCtx);
  1676. assert(A->isGLValue() ||
  1677. (!AMgr.getLangOpts().CPlusPlus &&
  1678. A->getType().isCForbiddenLValueType()));
  1679. for (ExplodedNodeSet::iterator it = checkerPreStmt.begin(),
  1680. ei = checkerPreStmt.end(); it != ei; ++it) {
  1681. const LocationContext *LCtx = (*it)->getLocationContext();
  1682. ProgramStateRef state = (*it)->getState();
  1683. SVal V = state->getLValue(A->getType(),
  1684. state->getSVal(Idx, LCtx),
  1685. state->getSVal(Base, LCtx));
  1686. Bldr.generateNode(A, *it, state->BindExpr(A, LCtx, V), nullptr,
  1687. ProgramPoint::PostLValueKind);
  1688. }
  1689. }
  1690. /// VisitMemberExpr - Transfer function for member expressions.
  1691. void ExprEngine::VisitMemberExpr(const MemberExpr *M, ExplodedNode *Pred,
  1692. ExplodedNodeSet &Dst) {
  1693. // FIXME: Prechecks eventually go in ::Visit().
  1694. ExplodedNodeSet CheckedSet;
  1695. getCheckerManager().runCheckersForPreStmt(CheckedSet, Pred, M, *this);
  1696. ExplodedNodeSet EvalSet;
  1697. ValueDecl *Member = M->getMemberDecl();
  1698. // Handle static member variables and enum constants accessed via
  1699. // member syntax.
  1700. if (isa<VarDecl>(Member) || isa<EnumConstantDecl>(Member)) {
  1701. ExplodedNodeSet Dst;
  1702. for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end();
  1703. I != E; ++I) {
  1704. VisitCommonDeclRefExpr(M, Member, Pred, EvalSet);
  1705. }
  1706. } else {
  1707. StmtNodeBuilder Bldr(CheckedSet, EvalSet, *currBldrCtx);
  1708. ExplodedNodeSet Tmp;
  1709. for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end();
  1710. I != E; ++I) {
  1711. ProgramStateRef state = (*I)->getState();
  1712. const LocationContext *LCtx = (*I)->getLocationContext();
  1713. Expr *BaseExpr = M->getBase();
  1714. // Handle C++ method calls.
  1715. if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(Member)) {
  1716. if (MD->isInstance())
  1717. state = createTemporaryRegionIfNeeded(state, LCtx, BaseExpr);
  1718. SVal MDVal = svalBuilder.getFunctionPointer(MD);
  1719. state = state->BindExpr(M, LCtx, MDVal);
  1720. Bldr.generateNode(M, *I, state);
  1721. continue;
  1722. }
  1723. // Handle regular struct fields / member variables.
  1724. state = createTemporaryRegionIfNeeded(state, LCtx, BaseExpr);
  1725. SVal baseExprVal = state->getSVal(BaseExpr, LCtx);
  1726. FieldDecl *field = cast<FieldDecl>(Member);
  1727. SVal L = state->getLValue(field, baseExprVal);
  1728. if (M->isGLValue() || M->getType()->isArrayType()) {
  1729. // We special-case rvalues of array type because the analyzer cannot
  1730. // reason about them, since we expect all regions to be wrapped in Locs.
  1731. // We instead treat these as lvalues and assume that they will decay to
  1732. // pointers as soon as they are used.
  1733. if (!M->isGLValue()) {
  1734. assert(M->getType()->isArrayType());
  1735. const ImplicitCastExpr *PE =
  1736. dyn_cast<ImplicitCastExpr>((*I)->getParentMap().getParent(M));
  1737. if (!PE || PE->getCastKind() != CK_ArrayToPointerDecay) {
  1738. llvm_unreachable("should always be wrapped in ArrayToPointerDecay");
  1739. }
  1740. }
  1741. if (field->getType()->isReferenceType()) {
  1742. if (const MemRegion *R = L.getAsRegion())
  1743. L = state->getSVal(R);
  1744. else
  1745. L = UnknownVal();
  1746. }
  1747. Bldr.generateNode(M, *I, state->BindExpr(M, LCtx, L), nullptr,
  1748. ProgramPoint::PostLValueKind);
  1749. } else {
  1750. Bldr.takeNodes(*I);
  1751. evalLoad(Tmp, M, M, *I, state, L);
  1752. Bldr.addNodes(Tmp);
  1753. }
  1754. }
  1755. }
  1756. getCheckerManager().runCheckersForPostStmt(Dst, EvalSet, M, *this);
  1757. }
  1758. namespace {
  1759. class CollectReachableSymbolsCallback final : public SymbolVisitor {
  1760. InvalidatedSymbols Symbols;
  1761. public:
  1762. CollectReachableSymbolsCallback(ProgramStateRef State) {}
  1763. const InvalidatedSymbols &getSymbols() const { return Symbols; }
  1764. bool VisitSymbol(SymbolRef Sym) override {
  1765. Symbols.insert(Sym);
  1766. return true;
  1767. }
  1768. };
  1769. } // end anonymous namespace
  1770. // A value escapes in three possible cases:
  1771. // (1) We are binding to something that is not a memory region.
  1772. // (2) We are binding to a MemrRegion that does not have stack storage.
  1773. // (3) We are binding to a MemRegion with stack storage that the store
  1774. // does not understand.
  1775. ProgramStateRef ExprEngine::processPointerEscapedOnBind(ProgramStateRef State,
  1776. SVal Loc, SVal Val) {
  1777. // Are we storing to something that causes the value to "escape"?
  1778. bool escapes = true;
  1779. // TODO: Move to StoreManager.
  1780. if (Optional<loc::MemRegionVal> regionLoc = Loc.getAs<loc::MemRegionVal>()) {
  1781. escapes = !regionLoc->getRegion()->hasStackStorage();
  1782. if (!escapes) {
  1783. // To test (3), generate a new state with the binding added. If it is
  1784. // the same state, then it escapes (since the store cannot represent
  1785. // the binding).
  1786. // Do this only if we know that the store is not supposed to generate the
  1787. // same state.
  1788. SVal StoredVal = State->getSVal(regionLoc->getRegion());
  1789. if (StoredVal != Val)
  1790. escapes = (State == (State->bindLoc(*regionLoc, Val)));
  1791. }
  1792. }
  1793. // If our store can represent the binding and we aren't storing to something
  1794. // that doesn't have local storage then just return and have the simulation
  1795. // state continue as is.
  1796. if (!escapes)
  1797. return State;
  1798. // Otherwise, find all symbols referenced by 'val' that we are tracking
  1799. // and stop tracking them.
  1800. CollectReachableSymbolsCallback Scanner =
  1801. State->scanReachableSymbols<CollectReachableSymbolsCallback>(Val);
  1802. const InvalidatedSymbols &EscapedSymbols = Scanner.getSymbols();
  1803. State = getCheckerManager().runCheckersForPointerEscape(State,
  1804. EscapedSymbols,
  1805. /*CallEvent*/ nullptr,
  1806. PSK_EscapeOnBind,
  1807. nullptr);
  1808. return State;
  1809. }
  1810. ProgramStateRef
  1811. ExprEngine::notifyCheckersOfPointerEscape(ProgramStateRef State,
  1812. const InvalidatedSymbols *Invalidated,
  1813. ArrayRef<const MemRegion *> ExplicitRegions,
  1814. ArrayRef<const MemRegion *> Regions,
  1815. const CallEvent *Call,
  1816. RegionAndSymbolInvalidationTraits &ITraits) {
  1817. if (!Invalidated || Invalidated->empty())
  1818. return State;
  1819. if (!Call)
  1820. return getCheckerManager().runCheckersForPointerEscape(State,
  1821. *Invalidated,
  1822. nullptr,
  1823. PSK_EscapeOther,
  1824. &ITraits);
  1825. // If the symbols were invalidated by a call, we want to find out which ones
  1826. // were invalidated directly due to being arguments to the call.
  1827. InvalidatedSymbols SymbolsDirectlyInvalidated;
  1828. for (ArrayRef<const MemRegion *>::iterator I = ExplicitRegions.begin(),
  1829. E = ExplicitRegions.end(); I != E; ++I) {
  1830. if (const SymbolicRegion *R = (*I)->StripCasts()->getAs<SymbolicRegion>())
  1831. SymbolsDirectlyInvalidated.insert(R->getSymbol());
  1832. }
  1833. InvalidatedSymbols SymbolsIndirectlyInvalidated;
  1834. for (InvalidatedSymbols::const_iterator I=Invalidated->begin(),
  1835. E = Invalidated->end(); I!=E; ++I) {
  1836. SymbolRef sym = *I;
  1837. if (SymbolsDirectlyInvalidated.count(sym))
  1838. continue;
  1839. SymbolsIndirectlyInvalidated.insert(sym);
  1840. }
  1841. if (!SymbolsDirectlyInvalidated.empty())
  1842. State = getCheckerManager().runCheckersForPointerEscape(State,
  1843. SymbolsDirectlyInvalidated, Call, PSK_DirectEscapeOnCall, &ITraits);
  1844. // Notify about the symbols that get indirectly invalidated by the call.
  1845. if (!SymbolsIndirectlyInvalidated.empty())
  1846. State = getCheckerManager().runCheckersForPointerEscape(State,
  1847. SymbolsIndirectlyInvalidated, Call, PSK_IndirectEscapeOnCall, &ITraits);
  1848. return State;
  1849. }
  1850. /// evalBind - Handle the semantics of binding a value to a specific location.
  1851. /// This method is used by evalStore and (soon) VisitDeclStmt, and others.
  1852. void ExprEngine::evalBind(ExplodedNodeSet &Dst, const Stmt *StoreE,
  1853. ExplodedNode *Pred,
  1854. SVal location, SVal Val,
  1855. bool atDeclInit, const ProgramPoint *PP) {
  1856. const LocationContext *LC = Pred->getLocationContext();
  1857. PostStmt PS(StoreE, LC);
  1858. if (!PP)
  1859. PP = &PS;
  1860. // Do a previsit of the bind.
  1861. ExplodedNodeSet CheckedSet;
  1862. getCheckerManager().runCheckersForBind(CheckedSet, Pred, location, Val,
  1863. StoreE, *this, *PP);
  1864. StmtNodeBuilder Bldr(CheckedSet, Dst, *currBldrCtx);
  1865. // If the location is not a 'Loc', it will already be handled by
  1866. // the checkers. There is nothing left to do.
  1867. if (!location.getAs<Loc>()) {
  1868. const ProgramPoint L = PostStore(StoreE, LC, /*Loc*/nullptr,
  1869. /*tag*/nullptr);
  1870. ProgramStateRef state = Pred->getState();
  1871. state = processPointerEscapedOnBind(state, location, Val);
  1872. Bldr.generateNode(L, state, Pred);
  1873. return;
  1874. }
  1875. for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end();
  1876. I!=E; ++I) {
  1877. ExplodedNode *PredI = *I;
  1878. ProgramStateRef state = PredI->getState();
  1879. state = processPointerEscapedOnBind(state, location, Val);
  1880. // When binding the value, pass on the hint that this is a initialization.
  1881. // For initializations, we do not need to inform clients of region
  1882. // changes.
  1883. state = state->bindLoc(location.castAs<Loc>(),
  1884. Val, /* notifyChanges = */ !atDeclInit);
  1885. const MemRegion *LocReg = nullptr;
  1886. if (Optional<loc::MemRegionVal> LocRegVal =
  1887. location.getAs<loc::MemRegionVal>()) {
  1888. LocReg = LocRegVal->getRegion();
  1889. }
  1890. const ProgramPoint L = PostStore(StoreE, LC, LocReg, nullptr);
  1891. Bldr.generateNode(L, state, PredI);
  1892. }
  1893. }
  1894. /// evalStore - Handle the semantics of a store via an assignment.
  1895. /// @param Dst The node set to store generated state nodes
  1896. /// @param AssignE The assignment expression if the store happens in an
  1897. /// assignment.
  1898. /// @param LocationE The location expression that is stored to.
  1899. /// @param state The current simulation state
  1900. /// @param location The location to store the value
  1901. /// @param Val The value to be stored
  1902. void ExprEngine::evalStore(ExplodedNodeSet &Dst, const Expr *AssignE,
  1903. const Expr *LocationE,
  1904. ExplodedNode *Pred,
  1905. ProgramStateRef state, SVal location, SVal Val,
  1906. const ProgramPointTag *tag) {
  1907. // Proceed with the store. We use AssignE as the anchor for the PostStore
  1908. // ProgramPoint if it is non-NULL, and LocationE otherwise.
  1909. const Expr *StoreE = AssignE ? AssignE : LocationE;
  1910. // Evaluate the location (checks for bad dereferences).
  1911. ExplodedNodeSet Tmp;
  1912. evalLocation(Tmp, AssignE, LocationE, Pred, state, location, tag, false);
  1913. if (Tmp.empty())
  1914. return;
  1915. if (location.isUndef())
  1916. return;
  1917. for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI)
  1918. evalBind(Dst, StoreE, *NI, location, Val, false);
  1919. }
  1920. void ExprEngine::evalLoad(ExplodedNodeSet &Dst,
  1921. const Expr *NodeEx,
  1922. const Expr *BoundEx,
  1923. ExplodedNode *Pred,
  1924. ProgramStateRef state,
  1925. SVal location,
  1926. const ProgramPointTag *tag,
  1927. QualType LoadTy)
  1928. {
  1929. assert(!location.getAs<NonLoc>() && "location cannot be a NonLoc.");
  1930. // Are we loading from a region? This actually results in two loads; one
  1931. // to fetch the address of the referenced value and one to fetch the
  1932. // referenced value.
  1933. if (const TypedValueRegion *TR =
  1934. dyn_cast_or_null<TypedValueRegion>(location.getAsRegion())) {
  1935. QualType ValTy = TR->getValueType();
  1936. if (const ReferenceType *RT = ValTy->getAs<ReferenceType>()) {
  1937. static SimpleProgramPointTag
  1938. loadReferenceTag(TagProviderName, "Load Reference");
  1939. ExplodedNodeSet Tmp;
  1940. evalLoadCommon(Tmp, NodeEx, BoundEx, Pred, state,
  1941. location, &loadReferenceTag,
  1942. getContext().getPointerType(RT->getPointeeType()));
  1943. // Perform the load from the referenced value.
  1944. for (ExplodedNodeSet::iterator I=Tmp.begin(), E=Tmp.end() ; I!=E; ++I) {
  1945. state = (*I)->getState();
  1946. location = state->getSVal(BoundEx, (*I)->getLocationContext());
  1947. evalLoadCommon(Dst, NodeEx, BoundEx, *I, state, location, tag, LoadTy);
  1948. }
  1949. return;
  1950. }
  1951. }
  1952. evalLoadCommon(Dst, NodeEx, BoundEx, Pred, state, location, tag, LoadTy);
  1953. }
  1954. void ExprEngine::evalLoadCommon(ExplodedNodeSet &Dst,
  1955. const Expr *NodeEx,
  1956. const Expr *BoundEx,
  1957. ExplodedNode *Pred,
  1958. ProgramStateRef state,
  1959. SVal location,
  1960. const ProgramPointTag *tag,
  1961. QualType LoadTy) {
  1962. assert(NodeEx);
  1963. assert(BoundEx);
  1964. // Evaluate the location (checks for bad dereferences).
  1965. ExplodedNodeSet Tmp;
  1966. evalLocation(Tmp, NodeEx, BoundEx, Pred, state, location, tag, true);
  1967. if (Tmp.empty())
  1968. return;
  1969. StmtNodeBuilder Bldr(Tmp, Dst, *currBldrCtx);
  1970. if (location.isUndef())
  1971. return;
  1972. // Proceed with the load.
  1973. for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI) {
  1974. state = (*NI)->getState();
  1975. const LocationContext *LCtx = (*NI)->getLocationContext();
  1976. SVal V = UnknownVal();
  1977. if (location.isValid()) {
  1978. if (LoadTy.isNull())
  1979. LoadTy = BoundEx->getType();
  1980. V = state->getSVal(location.castAs<Loc>(), LoadTy);
  1981. }
  1982. Bldr.generateNode(NodeEx, *NI, state->BindExpr(BoundEx, LCtx, V), tag,
  1983. ProgramPoint::PostLoadKind);
  1984. }
  1985. }
  1986. void ExprEngine::evalLocation(ExplodedNodeSet &Dst,
  1987. const Stmt *NodeEx,
  1988. const Stmt *BoundEx,
  1989. ExplodedNode *Pred,
  1990. ProgramStateRef state,
  1991. SVal location,
  1992. const ProgramPointTag *tag,
  1993. bool isLoad) {
  1994. StmtNodeBuilder BldrTop(Pred, Dst, *currBldrCtx);
  1995. // Early checks for performance reason.
  1996. if (location.isUnknown()) {
  1997. return;
  1998. }
  1999. ExplodedNodeSet Src;
  2000. BldrTop.takeNodes(Pred);
  2001. StmtNodeBuilder Bldr(Pred, Src, *currBldrCtx);
  2002. if (Pred->getState() != state) {
  2003. // Associate this new state with an ExplodedNode.
  2004. // FIXME: If I pass null tag, the graph is incorrect, e.g for
  2005. // int *p;
  2006. // p = 0;
  2007. // *p = 0xDEADBEEF;
  2008. // "p = 0" is not noted as "Null pointer value stored to 'p'" but
  2009. // instead "int *p" is noted as
  2010. // "Variable 'p' initialized to a null pointer value"
  2011. static SimpleProgramPointTag tag(TagProviderName, "Location");
  2012. Bldr.generateNode(NodeEx, Pred, state, &tag);
  2013. }
  2014. ExplodedNodeSet Tmp;
  2015. getCheckerManager().runCheckersForLocation(Tmp, Src, location, isLoad,
  2016. NodeEx, BoundEx, *this);
  2017. BldrTop.addNodes(Tmp);
  2018. }
  2019. std::pair<const ProgramPointTag *, const ProgramPointTag*>
  2020. ExprEngine::geteagerlyAssumeBinOpBifurcationTags() {
  2021. static SimpleProgramPointTag
  2022. eagerlyAssumeBinOpBifurcationTrue(TagProviderName,
  2023. "Eagerly Assume True"),
  2024. eagerlyAssumeBinOpBifurcationFalse(TagProviderName,
  2025. "Eagerly Assume False");
  2026. return std::make_pair(&eagerlyAssumeBinOpBifurcationTrue,
  2027. &eagerlyAssumeBinOpBifurcationFalse);
  2028. }
  2029. void ExprEngine::evalEagerlyAssumeBinOpBifurcation(ExplodedNodeSet &Dst,
  2030. ExplodedNodeSet &Src,
  2031. const Expr *Ex) {
  2032. StmtNodeBuilder Bldr(Src, Dst, *currBldrCtx);
  2033. for (ExplodedNodeSet::iterator I=Src.begin(), E=Src.end(); I!=E; ++I) {
  2034. ExplodedNode *Pred = *I;
  2035. // Test if the previous node was as the same expression. This can happen
  2036. // when the expression fails to evaluate to anything meaningful and
  2037. // (as an optimization) we don't generate a node.
  2038. ProgramPoint P = Pred->getLocation();
  2039. if (!P.getAs<PostStmt>() || P.castAs<PostStmt>().getStmt() != Ex) {
  2040. continue;
  2041. }
  2042. ProgramStateRef state = Pred->getState();
  2043. SVal V = state->getSVal(Ex, Pred->getLocationContext());
  2044. Optional<nonloc::SymbolVal> SEV = V.getAs<nonloc::SymbolVal>();
  2045. if (SEV && SEV->isExpression()) {
  2046. const std::pair<const ProgramPointTag *, const ProgramPointTag*> &tags =
  2047. geteagerlyAssumeBinOpBifurcationTags();
  2048. ProgramStateRef StateTrue, StateFalse;
  2049. std::tie(StateTrue, StateFalse) = state->assume(*SEV);
  2050. // First assume that the condition is true.
  2051. if (StateTrue) {
  2052. SVal Val = svalBuilder.makeIntVal(1U, Ex->getType());
  2053. StateTrue = StateTrue->BindExpr(Ex, Pred->getLocationContext(), Val);
  2054. Bldr.generateNode(Ex, Pred, StateTrue, tags.first);
  2055. }
  2056. // Next, assume that the condition is false.
  2057. if (StateFalse) {
  2058. SVal Val = svalBuilder.makeIntVal(0U, Ex->getType());
  2059. StateFalse = StateFalse->BindExpr(Ex, Pred->getLocationContext(), Val);
  2060. Bldr.generateNode(Ex, Pred, StateFalse, tags.second);
  2061. }
  2062. }
  2063. }
  2064. }
  2065. void ExprEngine::VisitGCCAsmStmt(const GCCAsmStmt *A, ExplodedNode *Pred,
  2066. ExplodedNodeSet &Dst) {
  2067. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  2068. // We have processed both the inputs and the outputs. All of the outputs
  2069. // should evaluate to Locs. Nuke all of their values.
  2070. // FIXME: Some day in the future it would be nice to allow a "plug-in"
  2071. // which interprets the inline asm and stores proper results in the
  2072. // outputs.
  2073. ProgramStateRef state = Pred->getState();
  2074. for (const Expr *O : A->outputs()) {
  2075. SVal X = state->getSVal(O, Pred->getLocationContext());
  2076. assert (!X.getAs<NonLoc>()); // Should be an Lval, or unknown, undef.
  2077. if (Optional<Loc> LV = X.getAs<Loc>())
  2078. state = state->bindLoc(*LV, UnknownVal());
  2079. }
  2080. Bldr.generateNode(A, Pred, state);
  2081. }
  2082. void ExprEngine::VisitMSAsmStmt(const MSAsmStmt *A, ExplodedNode *Pred,
  2083. ExplodedNodeSet &Dst) {
  2084. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  2085. Bldr.generateNode(A, Pred, Pred->getState());
  2086. }
  2087. //===----------------------------------------------------------------------===//
  2088. // Visualization.
  2089. //===----------------------------------------------------------------------===//
  2090. #ifndef NDEBUG
  2091. static ExprEngine* GraphPrintCheckerState;
  2092. static SourceManager* GraphPrintSourceManager;
  2093. namespace llvm {
  2094. template<>
  2095. struct DOTGraphTraits<ExplodedNode*> :
  2096. public DefaultDOTGraphTraits {
  2097. DOTGraphTraits (bool isSimple=false) : DefaultDOTGraphTraits(isSimple) {}
  2098. // FIXME: Since we do not cache error nodes in ExprEngine now, this does not
  2099. // work.
  2100. static std::string getNodeAttributes(const ExplodedNode *N, void*) {
  2101. #if 0
  2102. // FIXME: Replace with a general scheme to tell if the node is
  2103. // an error node.
  2104. if (GraphPrintCheckerState->isImplicitNullDeref(N) ||
  2105. GraphPrintCheckerState->isExplicitNullDeref(N) ||
  2106. GraphPrintCheckerState->isUndefDeref(N) ||
  2107. GraphPrintCheckerState->isUndefStore(N) ||
  2108. GraphPrintCheckerState->isUndefControlFlow(N) ||
  2109. GraphPrintCheckerState->isUndefResult(N) ||
  2110. GraphPrintCheckerState->isBadCall(N) ||
  2111. GraphPrintCheckerState->isUndefArg(N))
  2112. return "color=\"red\",style=\"filled\"";
  2113. if (GraphPrintCheckerState->isNoReturnCall(N))
  2114. return "color=\"blue\",style=\"filled\"";
  2115. #endif
  2116. return "";
  2117. }
  2118. static void printLocation(raw_ostream &Out, SourceLocation SLoc) {
  2119. if (SLoc.isFileID()) {
  2120. Out << "\\lline="
  2121. << GraphPrintSourceManager->getExpansionLineNumber(SLoc)
  2122. << " col="
  2123. << GraphPrintSourceManager->getExpansionColumnNumber(SLoc)
  2124. << "\\l";
  2125. }
  2126. }
  2127. static std::string getNodeLabel(const ExplodedNode *N, void*){
  2128. std::string sbuf;
  2129. llvm::raw_string_ostream Out(sbuf);
  2130. // Program Location.
  2131. ProgramPoint Loc = N->getLocation();
  2132. switch (Loc.getKind()) {
  2133. case ProgramPoint::BlockEntranceKind: {
  2134. Out << "Block Entrance: B"
  2135. << Loc.castAs<BlockEntrance>().getBlock()->getBlockID();
  2136. if (const NamedDecl *ND =
  2137. dyn_cast<NamedDecl>(Loc.getLocationContext()->getDecl())) {
  2138. Out << " (";
  2139. ND->printName(Out);
  2140. Out << ")";
  2141. }
  2142. break;
  2143. }
  2144. case ProgramPoint::BlockExitKind:
  2145. assert (false);
  2146. break;
  2147. case ProgramPoint::CallEnterKind:
  2148. Out << "CallEnter";
  2149. break;
  2150. case ProgramPoint::CallExitBeginKind:
  2151. Out << "CallExitBegin";
  2152. break;
  2153. case ProgramPoint::CallExitEndKind:
  2154. Out << "CallExitEnd";
  2155. break;
  2156. case ProgramPoint::PostStmtPurgeDeadSymbolsKind:
  2157. Out << "PostStmtPurgeDeadSymbols";
  2158. break;
  2159. case ProgramPoint::PreStmtPurgeDeadSymbolsKind:
  2160. Out << "PreStmtPurgeDeadSymbols";
  2161. break;
  2162. case ProgramPoint::EpsilonKind:
  2163. Out << "Epsilon Point";
  2164. break;
  2165. case ProgramPoint::PreImplicitCallKind: {
  2166. ImplicitCallPoint PC = Loc.castAs<ImplicitCallPoint>();
  2167. Out << "PreCall: ";
  2168. // FIXME: Get proper printing options.
  2169. PC.getDecl()->print(Out, LangOptions());
  2170. printLocation(Out, PC.getLocation());
  2171. break;
  2172. }
  2173. case ProgramPoint::PostImplicitCallKind: {
  2174. ImplicitCallPoint PC = Loc.castAs<ImplicitCallPoint>();
  2175. Out << "PostCall: ";
  2176. // FIXME: Get proper printing options.
  2177. PC.getDecl()->print(Out, LangOptions());
  2178. printLocation(Out, PC.getLocation());
  2179. break;
  2180. }
  2181. case ProgramPoint::PostInitializerKind: {
  2182. Out << "PostInitializer: ";
  2183. const CXXCtorInitializer *Init =
  2184. Loc.castAs<PostInitializer>().getInitializer();
  2185. if (const FieldDecl *FD = Init->getAnyMember())
  2186. Out << *FD;
  2187. else {
  2188. QualType Ty = Init->getTypeSourceInfo()->getType();
  2189. Ty = Ty.getLocalUnqualifiedType();
  2190. LangOptions LO; // FIXME.
  2191. Ty.print(Out, LO);
  2192. }
  2193. break;
  2194. }
  2195. case ProgramPoint::BlockEdgeKind: {
  2196. const BlockEdge &E = Loc.castAs<BlockEdge>();
  2197. Out << "Edge: (B" << E.getSrc()->getBlockID() << ", B"
  2198. << E.getDst()->getBlockID() << ')';
  2199. if (const Stmt *T = E.getSrc()->getTerminator()) {
  2200. SourceLocation SLoc = T->getLocStart();
  2201. Out << "\\|Terminator: ";
  2202. LangOptions LO; // FIXME.
  2203. E.getSrc()->printTerminator(Out, LO);
  2204. if (SLoc.isFileID()) {
  2205. Out << "\\lline="
  2206. << GraphPrintSourceManager->getExpansionLineNumber(SLoc)
  2207. << " col="
  2208. << GraphPrintSourceManager->getExpansionColumnNumber(SLoc);
  2209. }
  2210. if (isa<SwitchStmt>(T)) {
  2211. const Stmt *Label = E.getDst()->getLabel();
  2212. if (Label) {
  2213. if (const CaseStmt *C = dyn_cast<CaseStmt>(Label)) {
  2214. Out << "\\lcase ";
  2215. LangOptions LO; // FIXME.
  2216. if (C->getLHS())
  2217. C->getLHS()->printPretty(Out, nullptr, PrintingPolicy(LO));
  2218. if (const Stmt *RHS = C->getRHS()) {
  2219. Out << " .. ";
  2220. RHS->printPretty(Out, nullptr, PrintingPolicy(LO));
  2221. }
  2222. Out << ":";
  2223. }
  2224. else {
  2225. assert (isa<DefaultStmt>(Label));
  2226. Out << "\\ldefault:";
  2227. }
  2228. }
  2229. else
  2230. Out << "\\l(implicit) default:";
  2231. }
  2232. else if (isa<IndirectGotoStmt>(T)) {
  2233. // FIXME
  2234. }
  2235. else {
  2236. Out << "\\lCondition: ";
  2237. if (*E.getSrc()->succ_begin() == E.getDst())
  2238. Out << "true";
  2239. else
  2240. Out << "false";
  2241. }
  2242. Out << "\\l";
  2243. }
  2244. #if 0
  2245. // FIXME: Replace with a general scheme to determine
  2246. // the name of the check.
  2247. if (GraphPrintCheckerState->isUndefControlFlow(N)) {
  2248. Out << "\\|Control-flow based on\\lUndefined value.\\l";
  2249. }
  2250. #endif
  2251. break;
  2252. }
  2253. default: {
  2254. const Stmt *S = Loc.castAs<StmtPoint>().getStmt();
  2255. assert(S != nullptr && "Expecting non-null Stmt");
  2256. Out << S->getStmtClassName() << ' ' << (const void*) S << ' ';
  2257. LangOptions LO; // FIXME.
  2258. S->printPretty(Out, nullptr, PrintingPolicy(LO));
  2259. printLocation(Out, S->getLocStart());
  2260. if (Loc.getAs<PreStmt>())
  2261. Out << "\\lPreStmt\\l;";
  2262. else if (Loc.getAs<PostLoad>())
  2263. Out << "\\lPostLoad\\l;";
  2264. else if (Loc.getAs<PostStore>())
  2265. Out << "\\lPostStore\\l";
  2266. else if (Loc.getAs<PostLValue>())
  2267. Out << "\\lPostLValue\\l";
  2268. #if 0
  2269. // FIXME: Replace with a general scheme to determine
  2270. // the name of the check.
  2271. if (GraphPrintCheckerState->isImplicitNullDeref(N))
  2272. Out << "\\|Implicit-Null Dereference.\\l";
  2273. else if (GraphPrintCheckerState->isExplicitNullDeref(N))
  2274. Out << "\\|Explicit-Null Dereference.\\l";
  2275. else if (GraphPrintCheckerState->isUndefDeref(N))
  2276. Out << "\\|Dereference of undefialied value.\\l";
  2277. else if (GraphPrintCheckerState->isUndefStore(N))
  2278. Out << "\\|Store to Undefined Loc.";
  2279. else if (GraphPrintCheckerState->isUndefResult(N))
  2280. Out << "\\|Result of operation is undefined.";
  2281. else if (GraphPrintCheckerState->isNoReturnCall(N))
  2282. Out << "\\|Call to function marked \"noreturn\".";
  2283. else if (GraphPrintCheckerState->isBadCall(N))
  2284. Out << "\\|Call to NULL/Undefined.";
  2285. else if (GraphPrintCheckerState->isUndefArg(N))
  2286. Out << "\\|Argument in call is undefined";
  2287. #endif
  2288. break;
  2289. }
  2290. }
  2291. ProgramStateRef state = N->getState();
  2292. Out << "\\|StateID: " << (const void*) state.get()
  2293. << " NodeID: " << (const void*) N << "\\|";
  2294. state->printDOT(Out);
  2295. Out << "\\l";
  2296. if (const ProgramPointTag *tag = Loc.getTag()) {
  2297. Out << "\\|Tag: " << tag->getTagDescription();
  2298. Out << "\\l";
  2299. }
  2300. return Out.str();
  2301. }
  2302. };
  2303. } // end llvm namespace
  2304. #endif
  2305. void ExprEngine::ViewGraph(bool trim) {
  2306. #ifndef NDEBUG
  2307. if (trim) {
  2308. std::vector<const ExplodedNode*> Src;
  2309. // Flush any outstanding reports to make sure we cover all the nodes.
  2310. // This does not cause them to get displayed.
  2311. for (BugReporter::iterator I=BR.begin(), E=BR.end(); I!=E; ++I)
  2312. const_cast<BugType*>(*I)->FlushReports(BR);
  2313. // Iterate through the reports and get their nodes.
  2314. for (BugReporter::EQClasses_iterator
  2315. EI = BR.EQClasses_begin(), EE = BR.EQClasses_end(); EI != EE; ++EI) {
  2316. ExplodedNode *N = const_cast<ExplodedNode*>(EI->begin()->getErrorNode());
  2317. if (N) Src.push_back(N);
  2318. }
  2319. ViewGraph(Src);
  2320. }
  2321. else {
  2322. GraphPrintCheckerState = this;
  2323. GraphPrintSourceManager = &getContext().getSourceManager();
  2324. llvm::ViewGraph(*G.roots_begin(), "ExprEngine");
  2325. GraphPrintCheckerState = nullptr;
  2326. GraphPrintSourceManager = nullptr;
  2327. }
  2328. #endif
  2329. }
  2330. void ExprEngine::ViewGraph(ArrayRef<const ExplodedNode*> Nodes) {
  2331. #ifndef NDEBUG
  2332. GraphPrintCheckerState = this;
  2333. GraphPrintSourceManager = &getContext().getSourceManager();
  2334. std::unique_ptr<ExplodedGraph> TrimmedG(G.trim(Nodes));
  2335. if (!TrimmedG.get())
  2336. llvm::errs() << "warning: Trimmed ExplodedGraph is empty.\n";
  2337. else
  2338. llvm::ViewGraph(*TrimmedG->roots_begin(), "TrimmedExprEngine");
  2339. GraphPrintCheckerState = nullptr;
  2340. GraphPrintSourceManager = nullptr;
  2341. #endif
  2342. }