ExprEngineCXX.cpp 34 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819
  1. //===- ExprEngineCXX.cpp - ExprEngine support for C++ -----------*- C++ -*-===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This file defines the C++ expression evaluation engine.
  11. //
  12. //===----------------------------------------------------------------------===//
  13. #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
  14. #include "clang/Analysis/ConstructionContext.h"
  15. #include "clang/AST/DeclCXX.h"
  16. #include "clang/AST/StmtCXX.h"
  17. #include "clang/AST/ParentMap.h"
  18. #include "clang/Basic/PrettyStackTrace.h"
  19. #include "clang/StaticAnalyzer/Core/CheckerManager.h"
  20. #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h"
  21. #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
  22. using namespace clang;
  23. using namespace ento;
  24. void ExprEngine::CreateCXXTemporaryObject(const MaterializeTemporaryExpr *ME,
  25. ExplodedNode *Pred,
  26. ExplodedNodeSet &Dst) {
  27. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  28. const Expr *tempExpr = ME->GetTemporaryExpr()->IgnoreParens();
  29. ProgramStateRef state = Pred->getState();
  30. const LocationContext *LCtx = Pred->getLocationContext();
  31. state = createTemporaryRegionIfNeeded(state, LCtx, tempExpr, ME);
  32. Bldr.generateNode(ME, Pred, state);
  33. }
  34. // FIXME: This is the sort of code that should eventually live in a Core
  35. // checker rather than as a special case in ExprEngine.
  36. void ExprEngine::performTrivialCopy(NodeBuilder &Bldr, ExplodedNode *Pred,
  37. const CallEvent &Call) {
  38. SVal ThisVal;
  39. bool AlwaysReturnsLValue;
  40. const CXXRecordDecl *ThisRD = nullptr;
  41. if (const CXXConstructorCall *Ctor = dyn_cast<CXXConstructorCall>(&Call)) {
  42. assert(Ctor->getDecl()->isTrivial());
  43. assert(Ctor->getDecl()->isCopyOrMoveConstructor());
  44. ThisVal = Ctor->getCXXThisVal();
  45. ThisRD = Ctor->getDecl()->getParent();
  46. AlwaysReturnsLValue = false;
  47. } else {
  48. assert(cast<CXXMethodDecl>(Call.getDecl())->isTrivial());
  49. assert(cast<CXXMethodDecl>(Call.getDecl())->getOverloadedOperator() ==
  50. OO_Equal);
  51. ThisVal = cast<CXXInstanceCall>(Call).getCXXThisVal();
  52. ThisRD = cast<CXXMethodDecl>(Call.getDecl())->getParent();
  53. AlwaysReturnsLValue = true;
  54. }
  55. assert(ThisRD);
  56. if (ThisRD->isEmpty()) {
  57. // Do nothing for empty classes. Otherwise it'd retrieve an UnknownVal
  58. // and bind it and RegionStore would think that the actual value
  59. // in this region at this offset is unknown.
  60. return;
  61. }
  62. const LocationContext *LCtx = Pred->getLocationContext();
  63. ExplodedNodeSet Dst;
  64. Bldr.takeNodes(Pred);
  65. SVal V = Call.getArgSVal(0);
  66. // If the value being copied is not unknown, load from its location to get
  67. // an aggregate rvalue.
  68. if (Optional<Loc> L = V.getAs<Loc>())
  69. V = Pred->getState()->getSVal(*L);
  70. else
  71. assert(V.isUnknownOrUndef());
  72. const Expr *CallExpr = Call.getOriginExpr();
  73. evalBind(Dst, CallExpr, Pred, ThisVal, V, true);
  74. PostStmt PS(CallExpr, LCtx);
  75. for (ExplodedNodeSet::iterator I = Dst.begin(), E = Dst.end();
  76. I != E; ++I) {
  77. ProgramStateRef State = (*I)->getState();
  78. if (AlwaysReturnsLValue)
  79. State = State->BindExpr(CallExpr, LCtx, ThisVal);
  80. else
  81. State = bindReturnValue(Call, LCtx, State);
  82. Bldr.generateNode(PS, State, *I);
  83. }
  84. }
  85. SVal ExprEngine::makeZeroElementRegion(ProgramStateRef State, SVal LValue,
  86. QualType &Ty, bool &IsArray) {
  87. SValBuilder &SVB = State->getStateManager().getSValBuilder();
  88. ASTContext &Ctx = SVB.getContext();
  89. while (const ArrayType *AT = Ctx.getAsArrayType(Ty)) {
  90. Ty = AT->getElementType();
  91. LValue = State->getLValue(Ty, SVB.makeZeroArrayIndex(), LValue);
  92. IsArray = true;
  93. }
  94. return LValue;
  95. }
  96. std::pair<ProgramStateRef, SVal> ExprEngine::prepareForObjectConstruction(
  97. const Expr *E, ProgramStateRef State, const LocationContext *LCtx,
  98. const ConstructionContext *CC, EvalCallOptions &CallOpts) {
  99. MemRegionManager &MRMgr = getSValBuilder().getRegionManager();
  100. // See if we're constructing an existing region by looking at the
  101. // current construction context.
  102. if (CC) {
  103. switch (CC->getKind()) {
  104. case ConstructionContext::CXX17ElidedCopyVariableKind:
  105. case ConstructionContext::SimpleVariableKind: {
  106. const auto *DSCC = cast<VariableConstructionContext>(CC);
  107. const auto *DS = DSCC->getDeclStmt();
  108. const auto *Var = cast<VarDecl>(DS->getSingleDecl());
  109. SVal LValue = State->getLValue(Var, LCtx);
  110. QualType Ty = Var->getType();
  111. LValue =
  112. makeZeroElementRegion(State, LValue, Ty, CallOpts.IsArrayCtorOrDtor);
  113. State =
  114. addObjectUnderConstruction(State, DSCC->getDeclStmt(), LCtx, LValue);
  115. return std::make_pair(State, LValue);
  116. }
  117. case ConstructionContext::CXX17ElidedCopyConstructorInitializerKind:
  118. case ConstructionContext::SimpleConstructorInitializerKind: {
  119. const auto *ICC = cast<ConstructorInitializerConstructionContext>(CC);
  120. const auto *Init = ICC->getCXXCtorInitializer();
  121. assert(Init->isAnyMemberInitializer());
  122. const CXXMethodDecl *CurCtor = cast<CXXMethodDecl>(LCtx->getDecl());
  123. Loc ThisPtr =
  124. getSValBuilder().getCXXThis(CurCtor, LCtx->getStackFrame());
  125. SVal ThisVal = State->getSVal(ThisPtr);
  126. const ValueDecl *Field;
  127. SVal FieldVal;
  128. if (Init->isIndirectMemberInitializer()) {
  129. Field = Init->getIndirectMember();
  130. FieldVal = State->getLValue(Init->getIndirectMember(), ThisVal);
  131. } else {
  132. Field = Init->getMember();
  133. FieldVal = State->getLValue(Init->getMember(), ThisVal);
  134. }
  135. QualType Ty = Field->getType();
  136. FieldVal = makeZeroElementRegion(State, FieldVal, Ty,
  137. CallOpts.IsArrayCtorOrDtor);
  138. State = addObjectUnderConstruction(State, Init, LCtx, FieldVal);
  139. return std::make_pair(State, FieldVal);
  140. }
  141. case ConstructionContext::NewAllocatedObjectKind: {
  142. if (AMgr.getAnalyzerOptions().mayInlineCXXAllocator()) {
  143. const auto *NECC = cast<NewAllocatedObjectConstructionContext>(CC);
  144. const auto *NE = NECC->getCXXNewExpr();
  145. SVal V = *getObjectUnderConstruction(State, NE, LCtx);
  146. if (const SubRegion *MR =
  147. dyn_cast_or_null<SubRegion>(V.getAsRegion())) {
  148. if (NE->isArray()) {
  149. // TODO: In fact, we need to call the constructor for every
  150. // allocated element, not just the first one!
  151. CallOpts.IsArrayCtorOrDtor = true;
  152. return std::make_pair(
  153. State, loc::MemRegionVal(getStoreManager().GetElementZeroRegion(
  154. MR, NE->getType()->getPointeeType())));
  155. }
  156. return std::make_pair(State, V);
  157. }
  158. // TODO: Detect when the allocator returns a null pointer.
  159. // Constructor shall not be called in this case.
  160. }
  161. break;
  162. }
  163. case ConstructionContext::SimpleReturnedValueKind:
  164. case ConstructionContext::CXX17ElidedCopyReturnedValueKind: {
  165. // The temporary is to be managed by the parent stack frame.
  166. // So build it in the parent stack frame if we're not in the
  167. // top frame of the analysis.
  168. const StackFrameContext *SFC = LCtx->getStackFrame();
  169. if (const LocationContext *CallerLCtx = SFC->getParent()) {
  170. auto RTC = (*SFC->getCallSiteBlock())[SFC->getIndex()]
  171. .getAs<CFGCXXRecordTypedCall>();
  172. if (!RTC) {
  173. // We were unable to find the correct construction context for the
  174. // call in the parent stack frame. This is equivalent to not being
  175. // able to find construction context at all.
  176. break;
  177. }
  178. return prepareForObjectConstruction(
  179. cast<Expr>(SFC->getCallSite()), State, CallerLCtx,
  180. RTC->getConstructionContext(), CallOpts);
  181. } else {
  182. // We are on the top frame of the analysis.
  183. // TODO: What exactly happens when we are? Does the temporary object
  184. // live long enough in the region store in this case? Would checkers
  185. // think that this object immediately goes out of scope?
  186. CallOpts.IsTemporaryCtorOrDtor = true;
  187. SVal V = loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx));
  188. return std::make_pair(State, V);
  189. }
  190. llvm_unreachable("Unhandled return value construction context!");
  191. }
  192. case ConstructionContext::ElidedTemporaryObjectKind: {
  193. assert(AMgr.getAnalyzerOptions().shouldElideConstructors());
  194. const auto *TCC = cast<ElidedTemporaryObjectConstructionContext>(CC);
  195. const CXXBindTemporaryExpr *BTE = TCC->getCXXBindTemporaryExpr();
  196. const MaterializeTemporaryExpr *MTE = TCC->getMaterializedTemporaryExpr();
  197. const CXXConstructExpr *CE = TCC->getConstructorAfterElision();
  198. // Support pre-C++17 copy elision. We'll have the elidable copy
  199. // constructor in the AST and in the CFG, but we'll skip it
  200. // and construct directly into the final object. This call
  201. // also sets the CallOpts flags for us.
  202. SVal V;
  203. // If the elided copy/move constructor is not supported, there's still
  204. // benefit in trying to model the non-elided constructor.
  205. // Stash our state before trying to elide, as it'll get overwritten.
  206. ProgramStateRef PreElideState = State;
  207. EvalCallOptions PreElideCallOpts = CallOpts;
  208. std::tie(State, V) = prepareForObjectConstruction(
  209. CE, State, LCtx, TCC->getConstructionContextAfterElision(), CallOpts);
  210. // FIXME: This definition of "copy elision has not failed" is unreliable.
  211. // It doesn't indicate that the constructor will actually be inlined
  212. // later; it is still up to evalCall() to decide.
  213. if (!CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion) {
  214. // Remember that we've elided the constructor.
  215. State = addObjectUnderConstruction(State, CE, LCtx, V);
  216. // Remember that we've elided the destructor.
  217. if (BTE)
  218. State = elideDestructor(State, BTE, LCtx);
  219. // Instead of materialization, shamelessly return
  220. // the final object destination.
  221. if (MTE)
  222. State = addObjectUnderConstruction(State, MTE, LCtx, V);
  223. return std::make_pair(State, V);
  224. } else {
  225. // Copy elision failed. Revert the changes and proceed as if we have
  226. // a simple temporary.
  227. State = PreElideState;
  228. CallOpts = PreElideCallOpts;
  229. }
  230. LLVM_FALLTHROUGH;
  231. }
  232. case ConstructionContext::SimpleTemporaryObjectKind: {
  233. const auto *TCC = cast<TemporaryObjectConstructionContext>(CC);
  234. const CXXBindTemporaryExpr *BTE = TCC->getCXXBindTemporaryExpr();
  235. const MaterializeTemporaryExpr *MTE = TCC->getMaterializedTemporaryExpr();
  236. SVal V = UnknownVal();
  237. if (MTE) {
  238. if (const ValueDecl *VD = MTE->getExtendingDecl()) {
  239. assert(MTE->getStorageDuration() != SD_FullExpression);
  240. if (!VD->getType()->isReferenceType()) {
  241. // We're lifetime-extended by a surrounding aggregate.
  242. // Automatic destructors aren't quite working in this case
  243. // on the CFG side. We should warn the caller about that.
  244. // FIXME: Is there a better way to retrieve this information from
  245. // the MaterializeTemporaryExpr?
  246. CallOpts.IsTemporaryLifetimeExtendedViaAggregate = true;
  247. }
  248. }
  249. if (MTE->getStorageDuration() == SD_Static ||
  250. MTE->getStorageDuration() == SD_Thread)
  251. V = loc::MemRegionVal(MRMgr.getCXXStaticTempObjectRegion(E));
  252. }
  253. if (V.isUnknown())
  254. V = loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx));
  255. if (BTE)
  256. State = addObjectUnderConstruction(State, BTE, LCtx, V);
  257. if (MTE)
  258. State = addObjectUnderConstruction(State, MTE, LCtx, V);
  259. CallOpts.IsTemporaryCtorOrDtor = true;
  260. return std::make_pair(State, V);
  261. }
  262. case ConstructionContext::ArgumentKind: {
  263. // Function argument constructors. Not implemented yet.
  264. break;
  265. }
  266. }
  267. }
  268. // If we couldn't find an existing region to construct into, assume we're
  269. // constructing a temporary. Notify the caller of our failure.
  270. CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true;
  271. return std::make_pair(
  272. State, loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx)));
  273. }
  274. void ExprEngine::VisitCXXConstructExpr(const CXXConstructExpr *CE,
  275. ExplodedNode *Pred,
  276. ExplodedNodeSet &destNodes) {
  277. const LocationContext *LCtx = Pred->getLocationContext();
  278. ProgramStateRef State = Pred->getState();
  279. SVal Target = UnknownVal();
  280. if (Optional<SVal> ElidedTarget =
  281. getObjectUnderConstruction(State, CE, LCtx)) {
  282. // We've previously modeled an elidable constructor by pretending that it in
  283. // fact constructs into the correct target. This constructor can therefore
  284. // be skipped.
  285. Target = *ElidedTarget;
  286. StmtNodeBuilder Bldr(Pred, destNodes, *currBldrCtx);
  287. State = finishObjectConstruction(State, CE, LCtx);
  288. if (auto L = Target.getAs<Loc>())
  289. State = State->BindExpr(CE, LCtx, State->getSVal(*L, CE->getType()));
  290. Bldr.generateNode(CE, Pred, State);
  291. return;
  292. }
  293. // FIXME: Handle arrays, which run the same constructor for every element.
  294. // For now, we just run the first constructor (which should still invalidate
  295. // the entire array).
  296. EvalCallOptions CallOpts;
  297. auto C = getCurrentCFGElement().getAs<CFGConstructor>();
  298. assert(C || getCurrentCFGElement().getAs<CFGStmt>());
  299. const ConstructionContext *CC = C ? C->getConstructionContext() : nullptr;
  300. switch (CE->getConstructionKind()) {
  301. case CXXConstructExpr::CK_Complete: {
  302. std::tie(State, Target) =
  303. prepareForObjectConstruction(CE, State, LCtx, CC, CallOpts);
  304. break;
  305. }
  306. case CXXConstructExpr::CK_VirtualBase:
  307. // Make sure we are not calling virtual base class initializers twice.
  308. // Only the most-derived object should initialize virtual base classes.
  309. if (const Stmt *Outer = LCtx->getStackFrame()->getCallSite()) {
  310. const CXXConstructExpr *OuterCtor = dyn_cast<CXXConstructExpr>(Outer);
  311. if (OuterCtor) {
  312. switch (OuterCtor->getConstructionKind()) {
  313. case CXXConstructExpr::CK_NonVirtualBase:
  314. case CXXConstructExpr::CK_VirtualBase:
  315. // Bail out!
  316. destNodes.Add(Pred);
  317. return;
  318. case CXXConstructExpr::CK_Complete:
  319. case CXXConstructExpr::CK_Delegating:
  320. break;
  321. }
  322. }
  323. }
  324. // FALLTHROUGH
  325. case CXXConstructExpr::CK_NonVirtualBase:
  326. // In C++17, classes with non-virtual bases may be aggregates, so they would
  327. // be initialized as aggregates without a constructor call, so we may have
  328. // a base class constructed directly into an initializer list without
  329. // having the derived-class constructor call on the previous stack frame.
  330. // Initializer lists may be nested into more initializer lists that
  331. // correspond to surrounding aggregate initializations.
  332. // FIXME: For now this code essentially bails out. We need to find the
  333. // correct target region and set it.
  334. // FIXME: Instead of relying on the ParentMap, we should have the
  335. // trigger-statement (InitListExpr in this case) passed down from CFG or
  336. // otherwise always available during construction.
  337. if (dyn_cast_or_null<InitListExpr>(LCtx->getParentMap().getParent(CE))) {
  338. MemRegionManager &MRMgr = getSValBuilder().getRegionManager();
  339. Target = loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(CE, LCtx));
  340. CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true;
  341. break;
  342. }
  343. // FALLTHROUGH
  344. case CXXConstructExpr::CK_Delegating: {
  345. const CXXMethodDecl *CurCtor = cast<CXXMethodDecl>(LCtx->getDecl());
  346. Loc ThisPtr = getSValBuilder().getCXXThis(CurCtor,
  347. LCtx->getStackFrame());
  348. SVal ThisVal = State->getSVal(ThisPtr);
  349. if (CE->getConstructionKind() == CXXConstructExpr::CK_Delegating) {
  350. Target = ThisVal;
  351. } else {
  352. // Cast to the base type.
  353. bool IsVirtual =
  354. (CE->getConstructionKind() == CXXConstructExpr::CK_VirtualBase);
  355. SVal BaseVal = getStoreManager().evalDerivedToBase(ThisVal, CE->getType(),
  356. IsVirtual);
  357. Target = BaseVal;
  358. }
  359. break;
  360. }
  361. }
  362. if (State != Pred->getState()) {
  363. static SimpleProgramPointTag T("ExprEngine",
  364. "Prepare for object construction");
  365. ExplodedNodeSet DstPrepare;
  366. StmtNodeBuilder BldrPrepare(Pred, DstPrepare, *currBldrCtx);
  367. BldrPrepare.generateNode(CE, Pred, State, &T, ProgramPoint::PreStmtKind);
  368. assert(DstPrepare.size() <= 1);
  369. if (DstPrepare.size() == 0)
  370. return;
  371. Pred = *BldrPrepare.begin();
  372. }
  373. CallEventManager &CEMgr = getStateManager().getCallEventManager();
  374. CallEventRef<CXXConstructorCall> Call =
  375. CEMgr.getCXXConstructorCall(CE, Target.getAsRegion(), State, LCtx);
  376. ExplodedNodeSet DstPreVisit;
  377. getCheckerManager().runCheckersForPreStmt(DstPreVisit, Pred, CE, *this);
  378. // FIXME: Is it possible and/or useful to do this before PreStmt?
  379. ExplodedNodeSet PreInitialized;
  380. {
  381. StmtNodeBuilder Bldr(DstPreVisit, PreInitialized, *currBldrCtx);
  382. for (ExplodedNodeSet::iterator I = DstPreVisit.begin(),
  383. E = DstPreVisit.end();
  384. I != E; ++I) {
  385. ProgramStateRef State = (*I)->getState();
  386. if (CE->requiresZeroInitialization()) {
  387. // FIXME: Once we properly handle constructors in new-expressions, we'll
  388. // need to invalidate the region before setting a default value, to make
  389. // sure there aren't any lingering bindings around. This probably needs
  390. // to happen regardless of whether or not the object is zero-initialized
  391. // to handle random fields of a placement-initialized object picking up
  392. // old bindings. We might only want to do it when we need to, though.
  393. // FIXME: This isn't actually correct for arrays -- we need to zero-
  394. // initialize the entire array, not just the first element -- but our
  395. // handling of arrays everywhere else is weak as well, so this shouldn't
  396. // actually make things worse. Placement new makes this tricky as well,
  397. // since it's then possible to be initializing one part of a multi-
  398. // dimensional array.
  399. State = State->bindDefaultZero(Target, LCtx);
  400. }
  401. Bldr.generateNode(CE, *I, State, /*tag=*/nullptr,
  402. ProgramPoint::PreStmtKind);
  403. }
  404. }
  405. ExplodedNodeSet DstPreCall;
  406. getCheckerManager().runCheckersForPreCall(DstPreCall, PreInitialized,
  407. *Call, *this);
  408. ExplodedNodeSet DstEvaluated;
  409. StmtNodeBuilder Bldr(DstPreCall, DstEvaluated, *currBldrCtx);
  410. if (CE->getConstructor()->isTrivial() &&
  411. CE->getConstructor()->isCopyOrMoveConstructor() &&
  412. !CallOpts.IsArrayCtorOrDtor) {
  413. // FIXME: Handle other kinds of trivial constructors as well.
  414. for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end();
  415. I != E; ++I)
  416. performTrivialCopy(Bldr, *I, *Call);
  417. } else {
  418. for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end();
  419. I != E; ++I)
  420. defaultEvalCall(Bldr, *I, *Call, CallOpts);
  421. }
  422. // If the CFG was constructed without elements for temporary destructors
  423. // and the just-called constructor created a temporary object then
  424. // stop exploration if the temporary object has a noreturn constructor.
  425. // This can lose coverage because the destructor, if it were present
  426. // in the CFG, would be called at the end of the full expression or
  427. // later (for life-time extended temporaries) -- but avoids infeasible
  428. // paths when no-return temporary destructors are used for assertions.
  429. const AnalysisDeclContext *ADC = LCtx->getAnalysisDeclContext();
  430. if (!ADC->getCFGBuildOptions().AddTemporaryDtors) {
  431. const MemRegion *Target = Call->getCXXThisVal().getAsRegion();
  432. if (Target && isa<CXXTempObjectRegion>(Target) &&
  433. Call->getDecl()->getParent()->isAnyDestructorNoReturn()) {
  434. // If we've inlined the constructor, then DstEvaluated would be empty.
  435. // In this case we still want a sink, which could be implemented
  436. // in processCallExit. But we don't have that implemented at the moment,
  437. // so if you hit this assertion, see if you can avoid inlining
  438. // the respective constructor when analyzer-config cfg-temporary-dtors
  439. // is set to false.
  440. // Otherwise there's nothing wrong with inlining such constructor.
  441. assert(!DstEvaluated.empty() &&
  442. "We should not have inlined this constructor!");
  443. for (ExplodedNode *N : DstEvaluated) {
  444. Bldr.generateSink(CE, N, N->getState());
  445. }
  446. // There is no need to run the PostCall and PostStmt checker
  447. // callbacks because we just generated sinks on all nodes in th
  448. // frontier.
  449. return;
  450. }
  451. }
  452. ExplodedNodeSet DstPostCall;
  453. getCheckerManager().runCheckersForPostCall(DstPostCall, DstEvaluated,
  454. *Call, *this);
  455. getCheckerManager().runCheckersForPostStmt(destNodes, DstPostCall, CE, *this);
  456. }
  457. void ExprEngine::VisitCXXDestructor(QualType ObjectType,
  458. const MemRegion *Dest,
  459. const Stmt *S,
  460. bool IsBaseDtor,
  461. ExplodedNode *Pred,
  462. ExplodedNodeSet &Dst,
  463. const EvalCallOptions &CallOpts) {
  464. const LocationContext *LCtx = Pred->getLocationContext();
  465. ProgramStateRef State = Pred->getState();
  466. const CXXRecordDecl *RecordDecl = ObjectType->getAsCXXRecordDecl();
  467. assert(RecordDecl && "Only CXXRecordDecls should have destructors");
  468. const CXXDestructorDecl *DtorDecl = RecordDecl->getDestructor();
  469. CallEventManager &CEMgr = getStateManager().getCallEventManager();
  470. CallEventRef<CXXDestructorCall> Call =
  471. CEMgr.getCXXDestructorCall(DtorDecl, S, Dest, IsBaseDtor, State, LCtx);
  472. PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
  473. Call->getSourceRange().getBegin(),
  474. "Error evaluating destructor");
  475. ExplodedNodeSet DstPreCall;
  476. getCheckerManager().runCheckersForPreCall(DstPreCall, Pred,
  477. *Call, *this);
  478. ExplodedNodeSet DstInvalidated;
  479. StmtNodeBuilder Bldr(DstPreCall, DstInvalidated, *currBldrCtx);
  480. for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end();
  481. I != E; ++I)
  482. defaultEvalCall(Bldr, *I, *Call, CallOpts);
  483. ExplodedNodeSet DstPostCall;
  484. getCheckerManager().runCheckersForPostCall(Dst, DstInvalidated,
  485. *Call, *this);
  486. }
  487. void ExprEngine::VisitCXXNewAllocatorCall(const CXXNewExpr *CNE,
  488. ExplodedNode *Pred,
  489. ExplodedNodeSet &Dst) {
  490. ProgramStateRef State = Pred->getState();
  491. const LocationContext *LCtx = Pred->getLocationContext();
  492. PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
  493. CNE->getStartLoc(),
  494. "Error evaluating New Allocator Call");
  495. CallEventManager &CEMgr = getStateManager().getCallEventManager();
  496. CallEventRef<CXXAllocatorCall> Call =
  497. CEMgr.getCXXAllocatorCall(CNE, State, LCtx);
  498. ExplodedNodeSet DstPreCall;
  499. getCheckerManager().runCheckersForPreCall(DstPreCall, Pred,
  500. *Call, *this);
  501. ExplodedNodeSet DstPostCall;
  502. StmtNodeBuilder CallBldr(DstPreCall, DstPostCall, *currBldrCtx);
  503. for (auto I : DstPreCall) {
  504. // FIXME: Provide evalCall for checkers?
  505. defaultEvalCall(CallBldr, I, *Call);
  506. }
  507. // If the call is inlined, DstPostCall will be empty and we bail out now.
  508. // Store return value of operator new() for future use, until the actual
  509. // CXXNewExpr gets processed.
  510. ExplodedNodeSet DstPostValue;
  511. StmtNodeBuilder ValueBldr(DstPostCall, DstPostValue, *currBldrCtx);
  512. for (auto I : DstPostCall) {
  513. // FIXME: Because CNE serves as the "call site" for the allocator (due to
  514. // lack of a better expression in the AST), the conjured return value symbol
  515. // is going to be of the same type (C++ object pointer type). Technically
  516. // this is not correct because the operator new's prototype always says that
  517. // it returns a 'void *'. So we should change the type of the symbol,
  518. // and then evaluate the cast over the symbolic pointer from 'void *' to
  519. // the object pointer type. But without changing the symbol's type it
  520. // is breaking too much to evaluate the no-op symbolic cast over it, so we
  521. // skip it for now.
  522. ProgramStateRef State = I->getState();
  523. SVal RetVal = State->getSVal(CNE, LCtx);
  524. // If this allocation function is not declared as non-throwing, failures
  525. // /must/ be signalled by exceptions, and thus the return value will never
  526. // be NULL. -fno-exceptions does not influence this semantics.
  527. // FIXME: GCC has a -fcheck-new option, which forces it to consider the case
  528. // where new can return NULL. If we end up supporting that option, we can
  529. // consider adding a check for it here.
  530. // C++11 [basic.stc.dynamic.allocation]p3.
  531. if (const FunctionDecl *FD = CNE->getOperatorNew()) {
  532. QualType Ty = FD->getType();
  533. if (const auto *ProtoType = Ty->getAs<FunctionProtoType>())
  534. if (!ProtoType->isNothrow())
  535. State = State->assume(RetVal.castAs<DefinedOrUnknownSVal>(), true);
  536. }
  537. ValueBldr.generateNode(
  538. CNE, I, addObjectUnderConstruction(State, CNE, LCtx, RetVal));
  539. }
  540. ExplodedNodeSet DstPostPostCallCallback;
  541. getCheckerManager().runCheckersForPostCall(DstPostPostCallCallback,
  542. DstPostValue, *Call, *this);
  543. for (auto I : DstPostPostCallCallback) {
  544. getCheckerManager().runCheckersForNewAllocator(
  545. CNE, *getObjectUnderConstruction(I->getState(), CNE, LCtx), Dst, I,
  546. *this);
  547. }
  548. }
  549. void ExprEngine::VisitCXXNewExpr(const CXXNewExpr *CNE, ExplodedNode *Pred,
  550. ExplodedNodeSet &Dst) {
  551. // FIXME: Much of this should eventually migrate to CXXAllocatorCall.
  552. // Also, we need to decide how allocators actually work -- they're not
  553. // really part of the CXXNewExpr because they happen BEFORE the
  554. // CXXConstructExpr subexpression. See PR12014 for some discussion.
  555. unsigned blockCount = currBldrCtx->blockCount();
  556. const LocationContext *LCtx = Pred->getLocationContext();
  557. SVal symVal = UnknownVal();
  558. FunctionDecl *FD = CNE->getOperatorNew();
  559. bool IsStandardGlobalOpNewFunction =
  560. FD->isReplaceableGlobalAllocationFunction();
  561. ProgramStateRef State = Pred->getState();
  562. // Retrieve the stored operator new() return value.
  563. if (AMgr.getAnalyzerOptions().mayInlineCXXAllocator()) {
  564. symVal = *getObjectUnderConstruction(State, CNE, LCtx);
  565. State = finishObjectConstruction(State, CNE, LCtx);
  566. }
  567. // We assume all standard global 'operator new' functions allocate memory in
  568. // heap. We realize this is an approximation that might not correctly model
  569. // a custom global allocator.
  570. if (symVal.isUnknown()) {
  571. if (IsStandardGlobalOpNewFunction)
  572. symVal = svalBuilder.getConjuredHeapSymbolVal(CNE, LCtx, blockCount);
  573. else
  574. symVal = svalBuilder.conjureSymbolVal(nullptr, CNE, LCtx, CNE->getType(),
  575. blockCount);
  576. }
  577. CallEventManager &CEMgr = getStateManager().getCallEventManager();
  578. CallEventRef<CXXAllocatorCall> Call =
  579. CEMgr.getCXXAllocatorCall(CNE, State, LCtx);
  580. if (!AMgr.getAnalyzerOptions().mayInlineCXXAllocator()) {
  581. // Invalidate placement args.
  582. // FIXME: Once we figure out how we want allocators to work,
  583. // we should be using the usual pre-/(default-)eval-/post-call checks here.
  584. State = Call->invalidateRegions(blockCount);
  585. if (!State)
  586. return;
  587. // If this allocation function is not declared as non-throwing, failures
  588. // /must/ be signalled by exceptions, and thus the return value will never
  589. // be NULL. -fno-exceptions does not influence this semantics.
  590. // FIXME: GCC has a -fcheck-new option, which forces it to consider the case
  591. // where new can return NULL. If we end up supporting that option, we can
  592. // consider adding a check for it here.
  593. // C++11 [basic.stc.dynamic.allocation]p3.
  594. if (FD) {
  595. QualType Ty = FD->getType();
  596. if (const auto *ProtoType = Ty->getAs<FunctionProtoType>())
  597. if (!ProtoType->isNothrow())
  598. if (auto dSymVal = symVal.getAs<DefinedOrUnknownSVal>())
  599. State = State->assume(*dSymVal, true);
  600. }
  601. }
  602. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  603. SVal Result = symVal;
  604. if (CNE->isArray()) {
  605. // FIXME: allocating an array requires simulating the constructors.
  606. // For now, just return a symbolicated region.
  607. if (const SubRegion *NewReg =
  608. dyn_cast_or_null<SubRegion>(symVal.getAsRegion())) {
  609. QualType ObjTy = CNE->getType()->getAs<PointerType>()->getPointeeType();
  610. const ElementRegion *EleReg =
  611. getStoreManager().GetElementZeroRegion(NewReg, ObjTy);
  612. Result = loc::MemRegionVal(EleReg);
  613. }
  614. State = State->BindExpr(CNE, Pred->getLocationContext(), Result);
  615. Bldr.generateNode(CNE, Pred, State);
  616. return;
  617. }
  618. // FIXME: Once we have proper support for CXXConstructExprs inside
  619. // CXXNewExpr, we need to make sure that the constructed object is not
  620. // immediately invalidated here. (The placement call should happen before
  621. // the constructor call anyway.)
  622. if (FD && FD->isReservedGlobalPlacementOperator()) {
  623. // Non-array placement new should always return the placement location.
  624. SVal PlacementLoc = State->getSVal(CNE->getPlacementArg(0), LCtx);
  625. Result = svalBuilder.evalCast(PlacementLoc, CNE->getType(),
  626. CNE->getPlacementArg(0)->getType());
  627. }
  628. // Bind the address of the object, then check to see if we cached out.
  629. State = State->BindExpr(CNE, LCtx, Result);
  630. ExplodedNode *NewN = Bldr.generateNode(CNE, Pred, State);
  631. if (!NewN)
  632. return;
  633. // If the type is not a record, we won't have a CXXConstructExpr as an
  634. // initializer. Copy the value over.
  635. if (const Expr *Init = CNE->getInitializer()) {
  636. if (!isa<CXXConstructExpr>(Init)) {
  637. assert(Bldr.getResults().size() == 1);
  638. Bldr.takeNodes(NewN);
  639. evalBind(Dst, CNE, NewN, Result, State->getSVal(Init, LCtx),
  640. /*FirstInit=*/IsStandardGlobalOpNewFunction);
  641. }
  642. }
  643. }
  644. void ExprEngine::VisitCXXDeleteExpr(const CXXDeleteExpr *CDE,
  645. ExplodedNode *Pred, ExplodedNodeSet &Dst) {
  646. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  647. ProgramStateRef state = Pred->getState();
  648. Bldr.generateNode(CDE, Pred, state);
  649. }
  650. void ExprEngine::VisitCXXCatchStmt(const CXXCatchStmt *CS,
  651. ExplodedNode *Pred,
  652. ExplodedNodeSet &Dst) {
  653. const VarDecl *VD = CS->getExceptionDecl();
  654. if (!VD) {
  655. Dst.Add(Pred);
  656. return;
  657. }
  658. const LocationContext *LCtx = Pred->getLocationContext();
  659. SVal V = svalBuilder.conjureSymbolVal(CS, LCtx, VD->getType(),
  660. currBldrCtx->blockCount());
  661. ProgramStateRef state = Pred->getState();
  662. state = state->bindLoc(state->getLValue(VD, LCtx), V, LCtx);
  663. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  664. Bldr.generateNode(CS, Pred, state);
  665. }
  666. void ExprEngine::VisitCXXThisExpr(const CXXThisExpr *TE, ExplodedNode *Pred,
  667. ExplodedNodeSet &Dst) {
  668. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  669. // Get the this object region from StoreManager.
  670. const LocationContext *LCtx = Pred->getLocationContext();
  671. const MemRegion *R =
  672. svalBuilder.getRegionManager().getCXXThisRegion(
  673. getContext().getCanonicalType(TE->getType()),
  674. LCtx);
  675. ProgramStateRef state = Pred->getState();
  676. SVal V = state->getSVal(loc::MemRegionVal(R));
  677. Bldr.generateNode(TE, Pred, state->BindExpr(TE, LCtx, V));
  678. }
  679. void ExprEngine::VisitLambdaExpr(const LambdaExpr *LE, ExplodedNode *Pred,
  680. ExplodedNodeSet &Dst) {
  681. const LocationContext *LocCtxt = Pred->getLocationContext();
  682. // Get the region of the lambda itself.
  683. const MemRegion *R = svalBuilder.getRegionManager().getCXXTempObjectRegion(
  684. LE, LocCtxt);
  685. SVal V = loc::MemRegionVal(R);
  686. ProgramStateRef State = Pred->getState();
  687. // If we created a new MemRegion for the lambda, we should explicitly bind
  688. // the captures.
  689. CXXRecordDecl::field_iterator CurField = LE->getLambdaClass()->field_begin();
  690. for (LambdaExpr::const_capture_init_iterator i = LE->capture_init_begin(),
  691. e = LE->capture_init_end();
  692. i != e; ++i, ++CurField) {
  693. FieldDecl *FieldForCapture = *CurField;
  694. SVal FieldLoc = State->getLValue(FieldForCapture, V);
  695. SVal InitVal;
  696. if (!FieldForCapture->hasCapturedVLAType()) {
  697. Expr *InitExpr = *i;
  698. assert(InitExpr && "Capture missing initialization expression");
  699. InitVal = State->getSVal(InitExpr, LocCtxt);
  700. } else {
  701. // The field stores the length of a captured variable-length array.
  702. // These captures don't have initialization expressions; instead we
  703. // get the length from the VLAType size expression.
  704. Expr *SizeExpr = FieldForCapture->getCapturedVLAType()->getSizeExpr();
  705. InitVal = State->getSVal(SizeExpr, LocCtxt);
  706. }
  707. State = State->bindLoc(FieldLoc, InitVal, LocCtxt);
  708. }
  709. // Decay the Loc into an RValue, because there might be a
  710. // MaterializeTemporaryExpr node above this one which expects the bound value
  711. // to be an RValue.
  712. SVal LambdaRVal = State->getSVal(R);
  713. ExplodedNodeSet Tmp;
  714. StmtNodeBuilder Bldr(Pred, Tmp, *currBldrCtx);
  715. // FIXME: is this the right program point kind?
  716. Bldr.generateNode(LE, Pred,
  717. State->BindExpr(LE, LocCtxt, LambdaRVal),
  718. nullptr, ProgramPoint::PostLValueKind);
  719. // FIXME: Move all post/pre visits to ::Visit().
  720. getCheckerManager().runCheckersForPostStmt(Dst, Tmp, LE, *this);
  721. }