ExprEngineCXX.cpp 39 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921
  1. //===- ExprEngineCXX.cpp - ExprEngine support for C++ -----------*- C++ -*-===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This file defines the C++ expression evaluation engine.
  10. //
  11. //===----------------------------------------------------------------------===//
  12. #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
  13. #include "clang/Analysis/ConstructionContext.h"
  14. #include "clang/AST/DeclCXX.h"
  15. #include "clang/AST/StmtCXX.h"
  16. #include "clang/AST/ParentMap.h"
  17. #include "clang/Basic/PrettyStackTrace.h"
  18. #include "clang/StaticAnalyzer/Core/CheckerManager.h"
  19. #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h"
  20. #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
  21. using namespace clang;
  22. using namespace ento;
  23. void ExprEngine::CreateCXXTemporaryObject(const MaterializeTemporaryExpr *ME,
  24. ExplodedNode *Pred,
  25. ExplodedNodeSet &Dst) {
  26. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  27. const Expr *tempExpr = ME->GetTemporaryExpr()->IgnoreParens();
  28. ProgramStateRef state = Pred->getState();
  29. const LocationContext *LCtx = Pred->getLocationContext();
  30. state = createTemporaryRegionIfNeeded(state, LCtx, tempExpr, ME);
  31. Bldr.generateNode(ME, Pred, state);
  32. }
  33. // FIXME: This is the sort of code that should eventually live in a Core
  34. // checker rather than as a special case in ExprEngine.
  35. void ExprEngine::performTrivialCopy(NodeBuilder &Bldr, ExplodedNode *Pred,
  36. const CallEvent &Call) {
  37. SVal ThisVal;
  38. bool AlwaysReturnsLValue;
  39. const CXXRecordDecl *ThisRD = nullptr;
  40. if (const CXXConstructorCall *Ctor = dyn_cast<CXXConstructorCall>(&Call)) {
  41. assert(Ctor->getDecl()->isTrivial());
  42. assert(Ctor->getDecl()->isCopyOrMoveConstructor());
  43. ThisVal = Ctor->getCXXThisVal();
  44. ThisRD = Ctor->getDecl()->getParent();
  45. AlwaysReturnsLValue = false;
  46. } else {
  47. assert(cast<CXXMethodDecl>(Call.getDecl())->isTrivial());
  48. assert(cast<CXXMethodDecl>(Call.getDecl())->getOverloadedOperator() ==
  49. OO_Equal);
  50. ThisVal = cast<CXXInstanceCall>(Call).getCXXThisVal();
  51. ThisRD = cast<CXXMethodDecl>(Call.getDecl())->getParent();
  52. AlwaysReturnsLValue = true;
  53. }
  54. assert(ThisRD);
  55. if (ThisRD->isEmpty()) {
  56. // Do nothing for empty classes. Otherwise it'd retrieve an UnknownVal
  57. // and bind it and RegionStore would think that the actual value
  58. // in this region at this offset is unknown.
  59. return;
  60. }
  61. const LocationContext *LCtx = Pred->getLocationContext();
  62. ExplodedNodeSet Dst;
  63. Bldr.takeNodes(Pred);
  64. SVal V = Call.getArgSVal(0);
  65. // If the value being copied is not unknown, load from its location to get
  66. // an aggregate rvalue.
  67. if (Optional<Loc> L = V.getAs<Loc>())
  68. V = Pred->getState()->getSVal(*L);
  69. else
  70. assert(V.isUnknownOrUndef());
  71. const Expr *CallExpr = Call.getOriginExpr();
  72. evalBind(Dst, CallExpr, Pred, ThisVal, V, true);
  73. PostStmt PS(CallExpr, LCtx);
  74. for (ExplodedNodeSet::iterator I = Dst.begin(), E = Dst.end();
  75. I != E; ++I) {
  76. ProgramStateRef State = (*I)->getState();
  77. if (AlwaysReturnsLValue)
  78. State = State->BindExpr(CallExpr, LCtx, ThisVal);
  79. else
  80. State = bindReturnValue(Call, LCtx, State);
  81. Bldr.generateNode(PS, State, *I);
  82. }
  83. }
  84. SVal ExprEngine::makeZeroElementRegion(ProgramStateRef State, SVal LValue,
  85. QualType &Ty, bool &IsArray) {
  86. SValBuilder &SVB = State->getStateManager().getSValBuilder();
  87. ASTContext &Ctx = SVB.getContext();
  88. while (const ArrayType *AT = Ctx.getAsArrayType(Ty)) {
  89. Ty = AT->getElementType();
  90. LValue = State->getLValue(Ty, SVB.makeZeroArrayIndex(), LValue);
  91. IsArray = true;
  92. }
  93. return LValue;
  94. }
  95. std::pair<ProgramStateRef, SVal> ExprEngine::prepareForObjectConstruction(
  96. const Expr *E, ProgramStateRef State, const LocationContext *LCtx,
  97. const ConstructionContext *CC, EvalCallOptions &CallOpts) {
  98. SValBuilder &SVB = getSValBuilder();
  99. MemRegionManager &MRMgr = SVB.getRegionManager();
  100. ASTContext &ACtx = SVB.getContext();
  101. // See if we're constructing an existing region by looking at the
  102. // current construction context.
  103. if (CC) {
  104. switch (CC->getKind()) {
  105. case ConstructionContext::CXX17ElidedCopyVariableKind:
  106. case ConstructionContext::SimpleVariableKind: {
  107. const auto *DSCC = cast<VariableConstructionContext>(CC);
  108. const auto *DS = DSCC->getDeclStmt();
  109. const auto *Var = cast<VarDecl>(DS->getSingleDecl());
  110. SVal LValue = State->getLValue(Var, LCtx);
  111. QualType Ty = Var->getType();
  112. LValue =
  113. makeZeroElementRegion(State, LValue, Ty, CallOpts.IsArrayCtorOrDtor);
  114. State =
  115. addObjectUnderConstruction(State, DSCC->getDeclStmt(), LCtx, LValue);
  116. return std::make_pair(State, LValue);
  117. }
  118. case ConstructionContext::CXX17ElidedCopyConstructorInitializerKind:
  119. case ConstructionContext::SimpleConstructorInitializerKind: {
  120. const auto *ICC = cast<ConstructorInitializerConstructionContext>(CC);
  121. const auto *Init = ICC->getCXXCtorInitializer();
  122. assert(Init->isAnyMemberInitializer());
  123. const CXXMethodDecl *CurCtor = cast<CXXMethodDecl>(LCtx->getDecl());
  124. Loc ThisPtr =
  125. SVB.getCXXThis(CurCtor, LCtx->getStackFrame());
  126. SVal ThisVal = State->getSVal(ThisPtr);
  127. const ValueDecl *Field;
  128. SVal FieldVal;
  129. if (Init->isIndirectMemberInitializer()) {
  130. Field = Init->getIndirectMember();
  131. FieldVal = State->getLValue(Init->getIndirectMember(), ThisVal);
  132. } else {
  133. Field = Init->getMember();
  134. FieldVal = State->getLValue(Init->getMember(), ThisVal);
  135. }
  136. QualType Ty = Field->getType();
  137. FieldVal = makeZeroElementRegion(State, FieldVal, Ty,
  138. CallOpts.IsArrayCtorOrDtor);
  139. State = addObjectUnderConstruction(State, Init, LCtx, FieldVal);
  140. return std::make_pair(State, FieldVal);
  141. }
  142. case ConstructionContext::NewAllocatedObjectKind: {
  143. if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) {
  144. const auto *NECC = cast<NewAllocatedObjectConstructionContext>(CC);
  145. const auto *NE = NECC->getCXXNewExpr();
  146. SVal V = *getObjectUnderConstruction(State, NE, LCtx);
  147. if (const SubRegion *MR =
  148. dyn_cast_or_null<SubRegion>(V.getAsRegion())) {
  149. if (NE->isArray()) {
  150. // TODO: In fact, we need to call the constructor for every
  151. // allocated element, not just the first one!
  152. CallOpts.IsArrayCtorOrDtor = true;
  153. return std::make_pair(
  154. State, loc::MemRegionVal(getStoreManager().GetElementZeroRegion(
  155. MR, NE->getType()->getPointeeType())));
  156. }
  157. return std::make_pair(State, V);
  158. }
  159. // TODO: Detect when the allocator returns a null pointer.
  160. // Constructor shall not be called in this case.
  161. }
  162. break;
  163. }
  164. case ConstructionContext::SimpleReturnedValueKind:
  165. case ConstructionContext::CXX17ElidedCopyReturnedValueKind: {
  166. // The temporary is to be managed by the parent stack frame.
  167. // So build it in the parent stack frame if we're not in the
  168. // top frame of the analysis.
  169. const StackFrameContext *SFC = LCtx->getStackFrame();
  170. if (const LocationContext *CallerLCtx = SFC->getParent()) {
  171. auto RTC = (*SFC->getCallSiteBlock())[SFC->getIndex()]
  172. .getAs<CFGCXXRecordTypedCall>();
  173. if (!RTC) {
  174. // We were unable to find the correct construction context for the
  175. // call in the parent stack frame. This is equivalent to not being
  176. // able to find construction context at all.
  177. break;
  178. }
  179. if (isa<BlockInvocationContext>(CallerLCtx)) {
  180. // Unwrap block invocation contexts. They're mostly part of
  181. // the current stack frame.
  182. CallerLCtx = CallerLCtx->getParent();
  183. assert(!isa<BlockInvocationContext>(CallerLCtx));
  184. }
  185. return prepareForObjectConstruction(
  186. cast<Expr>(SFC->getCallSite()), State, CallerLCtx,
  187. RTC->getConstructionContext(), CallOpts);
  188. } else {
  189. // We are on the top frame of the analysis. We do not know where is the
  190. // object returned to. Conjure a symbolic region for the return value.
  191. // TODO: We probably need a new MemRegion kind to represent the storage
  192. // of that SymbolicRegion, so that we cound produce a fancy symbol
  193. // instead of an anonymous conjured symbol.
  194. // TODO: Do we need to track the region to avoid having it dead
  195. // too early? It does die too early, at least in C++17, but because
  196. // putting anything into a SymbolicRegion causes an immediate escape,
  197. // it doesn't cause any leak false positives.
  198. const auto *RCC = cast<ReturnedValueConstructionContext>(CC);
  199. // Make sure that this doesn't coincide with any other symbol
  200. // conjured for the returned expression.
  201. static const int TopLevelSymRegionTag = 0;
  202. const Expr *RetE = RCC->getReturnStmt()->getRetValue();
  203. assert(RetE && "Void returns should not have a construction context");
  204. QualType ReturnTy = RetE->getType();
  205. QualType RegionTy = ACtx.getPointerType(ReturnTy);
  206. SVal V = SVB.conjureSymbolVal(&TopLevelSymRegionTag, RetE, SFC,
  207. RegionTy, currBldrCtx->blockCount());
  208. return std::make_pair(State, V);
  209. }
  210. llvm_unreachable("Unhandled return value construction context!");
  211. }
  212. case ConstructionContext::ElidedTemporaryObjectKind: {
  213. assert(AMgr.getAnalyzerOptions().ShouldElideConstructors);
  214. const auto *TCC = cast<ElidedTemporaryObjectConstructionContext>(CC);
  215. const CXXBindTemporaryExpr *BTE = TCC->getCXXBindTemporaryExpr();
  216. const MaterializeTemporaryExpr *MTE = TCC->getMaterializedTemporaryExpr();
  217. const CXXConstructExpr *CE = TCC->getConstructorAfterElision();
  218. // Support pre-C++17 copy elision. We'll have the elidable copy
  219. // constructor in the AST and in the CFG, but we'll skip it
  220. // and construct directly into the final object. This call
  221. // also sets the CallOpts flags for us.
  222. SVal V;
  223. // If the elided copy/move constructor is not supported, there's still
  224. // benefit in trying to model the non-elided constructor.
  225. // Stash our state before trying to elide, as it'll get overwritten.
  226. ProgramStateRef PreElideState = State;
  227. EvalCallOptions PreElideCallOpts = CallOpts;
  228. std::tie(State, V) = prepareForObjectConstruction(
  229. CE, State, LCtx, TCC->getConstructionContextAfterElision(), CallOpts);
  230. // FIXME: This definition of "copy elision has not failed" is unreliable.
  231. // It doesn't indicate that the constructor will actually be inlined
  232. // later; it is still up to evalCall() to decide.
  233. if (!CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion) {
  234. // Remember that we've elided the constructor.
  235. State = addObjectUnderConstruction(State, CE, LCtx, V);
  236. // Remember that we've elided the destructor.
  237. if (BTE)
  238. State = elideDestructor(State, BTE, LCtx);
  239. // Instead of materialization, shamelessly return
  240. // the final object destination.
  241. if (MTE)
  242. State = addObjectUnderConstruction(State, MTE, LCtx, V);
  243. return std::make_pair(State, V);
  244. } else {
  245. // Copy elision failed. Revert the changes and proceed as if we have
  246. // a simple temporary.
  247. State = PreElideState;
  248. CallOpts = PreElideCallOpts;
  249. }
  250. LLVM_FALLTHROUGH;
  251. }
  252. case ConstructionContext::SimpleTemporaryObjectKind: {
  253. const auto *TCC = cast<TemporaryObjectConstructionContext>(CC);
  254. const CXXBindTemporaryExpr *BTE = TCC->getCXXBindTemporaryExpr();
  255. const MaterializeTemporaryExpr *MTE = TCC->getMaterializedTemporaryExpr();
  256. SVal V = UnknownVal();
  257. if (MTE) {
  258. if (const ValueDecl *VD = MTE->getExtendingDecl()) {
  259. assert(MTE->getStorageDuration() != SD_FullExpression);
  260. if (!VD->getType()->isReferenceType()) {
  261. // We're lifetime-extended by a surrounding aggregate.
  262. // Automatic destructors aren't quite working in this case
  263. // on the CFG side. We should warn the caller about that.
  264. // FIXME: Is there a better way to retrieve this information from
  265. // the MaterializeTemporaryExpr?
  266. CallOpts.IsTemporaryLifetimeExtendedViaAggregate = true;
  267. }
  268. }
  269. if (MTE->getStorageDuration() == SD_Static ||
  270. MTE->getStorageDuration() == SD_Thread)
  271. V = loc::MemRegionVal(MRMgr.getCXXStaticTempObjectRegion(E));
  272. }
  273. if (V.isUnknown())
  274. V = loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx));
  275. if (BTE)
  276. State = addObjectUnderConstruction(State, BTE, LCtx, V);
  277. if (MTE)
  278. State = addObjectUnderConstruction(State, MTE, LCtx, V);
  279. CallOpts.IsTemporaryCtorOrDtor = true;
  280. return std::make_pair(State, V);
  281. }
  282. case ConstructionContext::ArgumentKind: {
  283. // Arguments are technically temporaries.
  284. CallOpts.IsTemporaryCtorOrDtor = true;
  285. const auto *ACC = cast<ArgumentConstructionContext>(CC);
  286. const Expr *E = ACC->getCallLikeExpr();
  287. unsigned Idx = ACC->getIndex();
  288. const CXXBindTemporaryExpr *BTE = ACC->getCXXBindTemporaryExpr();
  289. CallEventManager &CEMgr = getStateManager().getCallEventManager();
  290. SVal V = UnknownVal();
  291. auto getArgLoc = [&](CallEventRef<> Caller) -> Optional<SVal> {
  292. const LocationContext *FutureSFC = Caller->getCalleeStackFrame();
  293. // Return early if we are unable to reliably foresee
  294. // the future stack frame.
  295. if (!FutureSFC)
  296. return None;
  297. // This should be equivalent to Caller->getDecl() for now, but
  298. // FutureSFC->getDecl() is likely to support better stuff (like
  299. // virtual functions) earlier.
  300. const Decl *CalleeD = FutureSFC->getDecl();
  301. // FIXME: Support for variadic arguments is not implemented here yet.
  302. if (CallEvent::isVariadic(CalleeD))
  303. return None;
  304. // Operator arguments do not correspond to operator parameters
  305. // because this-argument is implemented as a normal argument in
  306. // operator call expressions but not in operator declarations.
  307. const VarRegion *VR = Caller->getParameterLocation(
  308. *Caller->getAdjustedParameterIndex(Idx));
  309. if (!VR)
  310. return None;
  311. return loc::MemRegionVal(VR);
  312. };
  313. if (const auto *CE = dyn_cast<CallExpr>(E)) {
  314. CallEventRef<> Caller = CEMgr.getSimpleCall(CE, State, LCtx);
  315. if (auto OptV = getArgLoc(Caller))
  316. V = *OptV;
  317. else
  318. break;
  319. State = addObjectUnderConstruction(State, {CE, Idx}, LCtx, V);
  320. } else if (const auto *CCE = dyn_cast<CXXConstructExpr>(E)) {
  321. // Don't bother figuring out the target region for the future
  322. // constructor because we won't need it.
  323. CallEventRef<> Caller =
  324. CEMgr.getCXXConstructorCall(CCE, /*Target=*/nullptr, State, LCtx);
  325. if (auto OptV = getArgLoc(Caller))
  326. V = *OptV;
  327. else
  328. break;
  329. State = addObjectUnderConstruction(State, {CCE, Idx}, LCtx, V);
  330. } else if (const auto *ME = dyn_cast<ObjCMessageExpr>(E)) {
  331. CallEventRef<> Caller = CEMgr.getObjCMethodCall(ME, State, LCtx);
  332. if (auto OptV = getArgLoc(Caller))
  333. V = *OptV;
  334. else
  335. break;
  336. State = addObjectUnderConstruction(State, {ME, Idx}, LCtx, V);
  337. }
  338. assert(!V.isUnknown());
  339. if (BTE)
  340. State = addObjectUnderConstruction(State, BTE, LCtx, V);
  341. return std::make_pair(State, V);
  342. }
  343. }
  344. }
  345. // If we couldn't find an existing region to construct into, assume we're
  346. // constructing a temporary. Notify the caller of our failure.
  347. CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true;
  348. return std::make_pair(
  349. State, loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx)));
  350. }
  351. void ExprEngine::VisitCXXConstructExpr(const CXXConstructExpr *CE,
  352. ExplodedNode *Pred,
  353. ExplodedNodeSet &destNodes) {
  354. const LocationContext *LCtx = Pred->getLocationContext();
  355. ProgramStateRef State = Pred->getState();
  356. SVal Target = UnknownVal();
  357. if (Optional<SVal> ElidedTarget =
  358. getObjectUnderConstruction(State, CE, LCtx)) {
  359. // We've previously modeled an elidable constructor by pretending that it in
  360. // fact constructs into the correct target. This constructor can therefore
  361. // be skipped.
  362. Target = *ElidedTarget;
  363. StmtNodeBuilder Bldr(Pred, destNodes, *currBldrCtx);
  364. State = finishObjectConstruction(State, CE, LCtx);
  365. if (auto L = Target.getAs<Loc>())
  366. State = State->BindExpr(CE, LCtx, State->getSVal(*L, CE->getType()));
  367. Bldr.generateNode(CE, Pred, State);
  368. return;
  369. }
  370. // FIXME: Handle arrays, which run the same constructor for every element.
  371. // For now, we just run the first constructor (which should still invalidate
  372. // the entire array).
  373. EvalCallOptions CallOpts;
  374. auto C = getCurrentCFGElement().getAs<CFGConstructor>();
  375. assert(C || getCurrentCFGElement().getAs<CFGStmt>());
  376. const ConstructionContext *CC = C ? C->getConstructionContext() : nullptr;
  377. switch (CE->getConstructionKind()) {
  378. case CXXConstructExpr::CK_Complete: {
  379. std::tie(State, Target) =
  380. prepareForObjectConstruction(CE, State, LCtx, CC, CallOpts);
  381. break;
  382. }
  383. case CXXConstructExpr::CK_VirtualBase: {
  384. // Make sure we are not calling virtual base class initializers twice.
  385. // Only the most-derived object should initialize virtual base classes.
  386. const auto *OuterCtor = dyn_cast_or_null<CXXConstructExpr>(
  387. LCtx->getStackFrame()->getCallSite());
  388. assert(
  389. (!OuterCtor ||
  390. OuterCtor->getConstructionKind() == CXXConstructExpr::CK_Complete ||
  391. OuterCtor->getConstructionKind() == CXXConstructExpr::CK_Delegating) &&
  392. ("This virtual base should have already been initialized by "
  393. "the most derived class!"));
  394. (void)OuterCtor;
  395. LLVM_FALLTHROUGH;
  396. }
  397. case CXXConstructExpr::CK_NonVirtualBase:
  398. // In C++17, classes with non-virtual bases may be aggregates, so they would
  399. // be initialized as aggregates without a constructor call, so we may have
  400. // a base class constructed directly into an initializer list without
  401. // having the derived-class constructor call on the previous stack frame.
  402. // Initializer lists may be nested into more initializer lists that
  403. // correspond to surrounding aggregate initializations.
  404. // FIXME: For now this code essentially bails out. We need to find the
  405. // correct target region and set it.
  406. // FIXME: Instead of relying on the ParentMap, we should have the
  407. // trigger-statement (InitListExpr in this case) passed down from CFG or
  408. // otherwise always available during construction.
  409. if (dyn_cast_or_null<InitListExpr>(LCtx->getParentMap().getParent(CE))) {
  410. MemRegionManager &MRMgr = getSValBuilder().getRegionManager();
  411. Target = loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(CE, LCtx));
  412. CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true;
  413. break;
  414. }
  415. LLVM_FALLTHROUGH;
  416. case CXXConstructExpr::CK_Delegating: {
  417. const CXXMethodDecl *CurCtor = cast<CXXMethodDecl>(LCtx->getDecl());
  418. Loc ThisPtr = getSValBuilder().getCXXThis(CurCtor,
  419. LCtx->getStackFrame());
  420. SVal ThisVal = State->getSVal(ThisPtr);
  421. if (CE->getConstructionKind() == CXXConstructExpr::CK_Delegating) {
  422. Target = ThisVal;
  423. } else {
  424. // Cast to the base type.
  425. bool IsVirtual =
  426. (CE->getConstructionKind() == CXXConstructExpr::CK_VirtualBase);
  427. SVal BaseVal = getStoreManager().evalDerivedToBase(ThisVal, CE->getType(),
  428. IsVirtual);
  429. Target = BaseVal;
  430. }
  431. break;
  432. }
  433. }
  434. if (State != Pred->getState()) {
  435. static SimpleProgramPointTag T("ExprEngine",
  436. "Prepare for object construction");
  437. ExplodedNodeSet DstPrepare;
  438. StmtNodeBuilder BldrPrepare(Pred, DstPrepare, *currBldrCtx);
  439. BldrPrepare.generateNode(CE, Pred, State, &T, ProgramPoint::PreStmtKind);
  440. assert(DstPrepare.size() <= 1);
  441. if (DstPrepare.size() == 0)
  442. return;
  443. Pred = *BldrPrepare.begin();
  444. }
  445. CallEventManager &CEMgr = getStateManager().getCallEventManager();
  446. CallEventRef<CXXConstructorCall> Call =
  447. CEMgr.getCXXConstructorCall(CE, Target.getAsRegion(), State, LCtx);
  448. ExplodedNodeSet DstPreVisit;
  449. getCheckerManager().runCheckersForPreStmt(DstPreVisit, Pred, CE, *this);
  450. // FIXME: Is it possible and/or useful to do this before PreStmt?
  451. ExplodedNodeSet PreInitialized;
  452. {
  453. StmtNodeBuilder Bldr(DstPreVisit, PreInitialized, *currBldrCtx);
  454. for (ExplodedNodeSet::iterator I = DstPreVisit.begin(),
  455. E = DstPreVisit.end();
  456. I != E; ++I) {
  457. ProgramStateRef State = (*I)->getState();
  458. if (CE->requiresZeroInitialization()) {
  459. // FIXME: Once we properly handle constructors in new-expressions, we'll
  460. // need to invalidate the region before setting a default value, to make
  461. // sure there aren't any lingering bindings around. This probably needs
  462. // to happen regardless of whether or not the object is zero-initialized
  463. // to handle random fields of a placement-initialized object picking up
  464. // old bindings. We might only want to do it when we need to, though.
  465. // FIXME: This isn't actually correct for arrays -- we need to zero-
  466. // initialize the entire array, not just the first element -- but our
  467. // handling of arrays everywhere else is weak as well, so this shouldn't
  468. // actually make things worse. Placement new makes this tricky as well,
  469. // since it's then possible to be initializing one part of a multi-
  470. // dimensional array.
  471. State = State->bindDefaultZero(Target, LCtx);
  472. }
  473. Bldr.generateNode(CE, *I, State, /*tag=*/nullptr,
  474. ProgramPoint::PreStmtKind);
  475. }
  476. }
  477. ExplodedNodeSet DstPreCall;
  478. getCheckerManager().runCheckersForPreCall(DstPreCall, PreInitialized,
  479. *Call, *this);
  480. ExplodedNodeSet DstEvaluated;
  481. StmtNodeBuilder Bldr(DstPreCall, DstEvaluated, *currBldrCtx);
  482. if (CE->getConstructor()->isTrivial() &&
  483. CE->getConstructor()->isCopyOrMoveConstructor() &&
  484. !CallOpts.IsArrayCtorOrDtor) {
  485. // FIXME: Handle other kinds of trivial constructors as well.
  486. for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end();
  487. I != E; ++I)
  488. performTrivialCopy(Bldr, *I, *Call);
  489. } else {
  490. for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end();
  491. I != E; ++I)
  492. defaultEvalCall(Bldr, *I, *Call, CallOpts);
  493. }
  494. // If the CFG was constructed without elements for temporary destructors
  495. // and the just-called constructor created a temporary object then
  496. // stop exploration if the temporary object has a noreturn constructor.
  497. // This can lose coverage because the destructor, if it were present
  498. // in the CFG, would be called at the end of the full expression or
  499. // later (for life-time extended temporaries) -- but avoids infeasible
  500. // paths when no-return temporary destructors are used for assertions.
  501. const AnalysisDeclContext *ADC = LCtx->getAnalysisDeclContext();
  502. if (!ADC->getCFGBuildOptions().AddTemporaryDtors) {
  503. const MemRegion *Target = Call->getCXXThisVal().getAsRegion();
  504. if (Target && isa<CXXTempObjectRegion>(Target) &&
  505. Call->getDecl()->getParent()->isAnyDestructorNoReturn()) {
  506. // If we've inlined the constructor, then DstEvaluated would be empty.
  507. // In this case we still want a sink, which could be implemented
  508. // in processCallExit. But we don't have that implemented at the moment,
  509. // so if you hit this assertion, see if you can avoid inlining
  510. // the respective constructor when analyzer-config cfg-temporary-dtors
  511. // is set to false.
  512. // Otherwise there's nothing wrong with inlining such constructor.
  513. assert(!DstEvaluated.empty() &&
  514. "We should not have inlined this constructor!");
  515. for (ExplodedNode *N : DstEvaluated) {
  516. Bldr.generateSink(CE, N, N->getState());
  517. }
  518. // There is no need to run the PostCall and PostStmt checker
  519. // callbacks because we just generated sinks on all nodes in th
  520. // frontier.
  521. return;
  522. }
  523. }
  524. ExplodedNodeSet DstPostArgumentCleanup;
  525. for (auto I : DstEvaluated)
  526. finishArgumentConstruction(DstPostArgumentCleanup, I, *Call);
  527. // If there were other constructors called for object-type arguments
  528. // of this constructor, clean them up.
  529. ExplodedNodeSet DstPostCall;
  530. getCheckerManager().runCheckersForPostCall(DstPostCall,
  531. DstPostArgumentCleanup,
  532. *Call, *this);
  533. getCheckerManager().runCheckersForPostStmt(destNodes, DstPostCall, CE, *this);
  534. }
  535. void ExprEngine::VisitCXXDestructor(QualType ObjectType,
  536. const MemRegion *Dest,
  537. const Stmt *S,
  538. bool IsBaseDtor,
  539. ExplodedNode *Pred,
  540. ExplodedNodeSet &Dst,
  541. const EvalCallOptions &CallOpts) {
  542. assert(S && "A destructor without a trigger!");
  543. const LocationContext *LCtx = Pred->getLocationContext();
  544. ProgramStateRef State = Pred->getState();
  545. const CXXRecordDecl *RecordDecl = ObjectType->getAsCXXRecordDecl();
  546. assert(RecordDecl && "Only CXXRecordDecls should have destructors");
  547. const CXXDestructorDecl *DtorDecl = RecordDecl->getDestructor();
  548. // FIXME: There should always be a Decl, otherwise the destructor call
  549. // shouldn't have been added to the CFG in the first place.
  550. if (!DtorDecl) {
  551. // Skip the invalid destructor. We cannot simply return because
  552. // it would interrupt the analysis instead.
  553. static SimpleProgramPointTag T("ExprEngine", "SkipInvalidDestructor");
  554. // FIXME: PostImplicitCall with a null decl may crash elsewhere anyway.
  555. PostImplicitCall PP(/*Decl=*/nullptr, S->getEndLoc(), LCtx, &T);
  556. NodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  557. Bldr.generateNode(PP, Pred->getState(), Pred);
  558. return;
  559. }
  560. CallEventManager &CEMgr = getStateManager().getCallEventManager();
  561. CallEventRef<CXXDestructorCall> Call =
  562. CEMgr.getCXXDestructorCall(DtorDecl, S, Dest, IsBaseDtor, State, LCtx);
  563. PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
  564. Call->getSourceRange().getBegin(),
  565. "Error evaluating destructor");
  566. ExplodedNodeSet DstPreCall;
  567. getCheckerManager().runCheckersForPreCall(DstPreCall, Pred,
  568. *Call, *this);
  569. ExplodedNodeSet DstInvalidated;
  570. StmtNodeBuilder Bldr(DstPreCall, DstInvalidated, *currBldrCtx);
  571. for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end();
  572. I != E; ++I)
  573. defaultEvalCall(Bldr, *I, *Call, CallOpts);
  574. getCheckerManager().runCheckersForPostCall(Dst, DstInvalidated,
  575. *Call, *this);
  576. }
  577. void ExprEngine::VisitCXXNewAllocatorCall(const CXXNewExpr *CNE,
  578. ExplodedNode *Pred,
  579. ExplodedNodeSet &Dst) {
  580. ProgramStateRef State = Pred->getState();
  581. const LocationContext *LCtx = Pred->getLocationContext();
  582. PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
  583. CNE->getBeginLoc(),
  584. "Error evaluating New Allocator Call");
  585. CallEventManager &CEMgr = getStateManager().getCallEventManager();
  586. CallEventRef<CXXAllocatorCall> Call =
  587. CEMgr.getCXXAllocatorCall(CNE, State, LCtx);
  588. ExplodedNodeSet DstPreCall;
  589. getCheckerManager().runCheckersForPreCall(DstPreCall, Pred,
  590. *Call, *this);
  591. ExplodedNodeSet DstPostCall;
  592. StmtNodeBuilder CallBldr(DstPreCall, DstPostCall, *currBldrCtx);
  593. for (auto I : DstPreCall) {
  594. // FIXME: Provide evalCall for checkers?
  595. defaultEvalCall(CallBldr, I, *Call);
  596. }
  597. // If the call is inlined, DstPostCall will be empty and we bail out now.
  598. // Store return value of operator new() for future use, until the actual
  599. // CXXNewExpr gets processed.
  600. ExplodedNodeSet DstPostValue;
  601. StmtNodeBuilder ValueBldr(DstPostCall, DstPostValue, *currBldrCtx);
  602. for (auto I : DstPostCall) {
  603. // FIXME: Because CNE serves as the "call site" for the allocator (due to
  604. // lack of a better expression in the AST), the conjured return value symbol
  605. // is going to be of the same type (C++ object pointer type). Technically
  606. // this is not correct because the operator new's prototype always says that
  607. // it returns a 'void *'. So we should change the type of the symbol,
  608. // and then evaluate the cast over the symbolic pointer from 'void *' to
  609. // the object pointer type. But without changing the symbol's type it
  610. // is breaking too much to evaluate the no-op symbolic cast over it, so we
  611. // skip it for now.
  612. ProgramStateRef State = I->getState();
  613. SVal RetVal = State->getSVal(CNE, LCtx);
  614. // If this allocation function is not declared as non-throwing, failures
  615. // /must/ be signalled by exceptions, and thus the return value will never
  616. // be NULL. -fno-exceptions does not influence this semantics.
  617. // FIXME: GCC has a -fcheck-new option, which forces it to consider the case
  618. // where new can return NULL. If we end up supporting that option, we can
  619. // consider adding a check for it here.
  620. // C++11 [basic.stc.dynamic.allocation]p3.
  621. if (const FunctionDecl *FD = CNE->getOperatorNew()) {
  622. QualType Ty = FD->getType();
  623. if (const auto *ProtoType = Ty->getAs<FunctionProtoType>())
  624. if (!ProtoType->isNothrow())
  625. State = State->assume(RetVal.castAs<DefinedOrUnknownSVal>(), true);
  626. }
  627. ValueBldr.generateNode(
  628. CNE, I, addObjectUnderConstruction(State, CNE, LCtx, RetVal));
  629. }
  630. ExplodedNodeSet DstPostPostCallCallback;
  631. getCheckerManager().runCheckersForPostCall(DstPostPostCallCallback,
  632. DstPostValue, *Call, *this);
  633. for (auto I : DstPostPostCallCallback) {
  634. getCheckerManager().runCheckersForNewAllocator(
  635. CNE, *getObjectUnderConstruction(I->getState(), CNE, LCtx), Dst, I,
  636. *this);
  637. }
  638. }
  639. void ExprEngine::VisitCXXNewExpr(const CXXNewExpr *CNE, ExplodedNode *Pred,
  640. ExplodedNodeSet &Dst) {
  641. // FIXME: Much of this should eventually migrate to CXXAllocatorCall.
  642. // Also, we need to decide how allocators actually work -- they're not
  643. // really part of the CXXNewExpr because they happen BEFORE the
  644. // CXXConstructExpr subexpression. See PR12014 for some discussion.
  645. unsigned blockCount = currBldrCtx->blockCount();
  646. const LocationContext *LCtx = Pred->getLocationContext();
  647. SVal symVal = UnknownVal();
  648. FunctionDecl *FD = CNE->getOperatorNew();
  649. bool IsStandardGlobalOpNewFunction =
  650. FD->isReplaceableGlobalAllocationFunction();
  651. ProgramStateRef State = Pred->getState();
  652. // Retrieve the stored operator new() return value.
  653. if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) {
  654. symVal = *getObjectUnderConstruction(State, CNE, LCtx);
  655. State = finishObjectConstruction(State, CNE, LCtx);
  656. }
  657. // We assume all standard global 'operator new' functions allocate memory in
  658. // heap. We realize this is an approximation that might not correctly model
  659. // a custom global allocator.
  660. if (symVal.isUnknown()) {
  661. if (IsStandardGlobalOpNewFunction)
  662. symVal = svalBuilder.getConjuredHeapSymbolVal(CNE, LCtx, blockCount);
  663. else
  664. symVal = svalBuilder.conjureSymbolVal(nullptr, CNE, LCtx, CNE->getType(),
  665. blockCount);
  666. }
  667. CallEventManager &CEMgr = getStateManager().getCallEventManager();
  668. CallEventRef<CXXAllocatorCall> Call =
  669. CEMgr.getCXXAllocatorCall(CNE, State, LCtx);
  670. if (!AMgr.getAnalyzerOptions().MayInlineCXXAllocator) {
  671. // Invalidate placement args.
  672. // FIXME: Once we figure out how we want allocators to work,
  673. // we should be using the usual pre-/(default-)eval-/post-call checks here.
  674. State = Call->invalidateRegions(blockCount);
  675. if (!State)
  676. return;
  677. // If this allocation function is not declared as non-throwing, failures
  678. // /must/ be signalled by exceptions, and thus the return value will never
  679. // be NULL. -fno-exceptions does not influence this semantics.
  680. // FIXME: GCC has a -fcheck-new option, which forces it to consider the case
  681. // where new can return NULL. If we end up supporting that option, we can
  682. // consider adding a check for it here.
  683. // C++11 [basic.stc.dynamic.allocation]p3.
  684. if (FD) {
  685. QualType Ty = FD->getType();
  686. if (const auto *ProtoType = Ty->getAs<FunctionProtoType>())
  687. if (!ProtoType->isNothrow())
  688. if (auto dSymVal = symVal.getAs<DefinedOrUnknownSVal>())
  689. State = State->assume(*dSymVal, true);
  690. }
  691. }
  692. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  693. SVal Result = symVal;
  694. if (CNE->isArray()) {
  695. // FIXME: allocating an array requires simulating the constructors.
  696. // For now, just return a symbolicated region.
  697. if (const SubRegion *NewReg =
  698. dyn_cast_or_null<SubRegion>(symVal.getAsRegion())) {
  699. QualType ObjTy = CNE->getType()->getAs<PointerType>()->getPointeeType();
  700. const ElementRegion *EleReg =
  701. getStoreManager().GetElementZeroRegion(NewReg, ObjTy);
  702. Result = loc::MemRegionVal(EleReg);
  703. }
  704. State = State->BindExpr(CNE, Pred->getLocationContext(), Result);
  705. Bldr.generateNode(CNE, Pred, State);
  706. return;
  707. }
  708. // FIXME: Once we have proper support for CXXConstructExprs inside
  709. // CXXNewExpr, we need to make sure that the constructed object is not
  710. // immediately invalidated here. (The placement call should happen before
  711. // the constructor call anyway.)
  712. if (FD && FD->isReservedGlobalPlacementOperator()) {
  713. // Non-array placement new should always return the placement location.
  714. SVal PlacementLoc = State->getSVal(CNE->getPlacementArg(0), LCtx);
  715. Result = svalBuilder.evalCast(PlacementLoc, CNE->getType(),
  716. CNE->getPlacementArg(0)->getType());
  717. }
  718. // Bind the address of the object, then check to see if we cached out.
  719. State = State->BindExpr(CNE, LCtx, Result);
  720. ExplodedNode *NewN = Bldr.generateNode(CNE, Pred, State);
  721. if (!NewN)
  722. return;
  723. // If the type is not a record, we won't have a CXXConstructExpr as an
  724. // initializer. Copy the value over.
  725. if (const Expr *Init = CNE->getInitializer()) {
  726. if (!isa<CXXConstructExpr>(Init)) {
  727. assert(Bldr.getResults().size() == 1);
  728. Bldr.takeNodes(NewN);
  729. evalBind(Dst, CNE, NewN, Result, State->getSVal(Init, LCtx),
  730. /*FirstInit=*/IsStandardGlobalOpNewFunction);
  731. }
  732. }
  733. }
  734. void ExprEngine::VisitCXXDeleteExpr(const CXXDeleteExpr *CDE,
  735. ExplodedNode *Pred, ExplodedNodeSet &Dst) {
  736. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  737. ProgramStateRef state = Pred->getState();
  738. Bldr.generateNode(CDE, Pred, state);
  739. }
  740. void ExprEngine::VisitCXXCatchStmt(const CXXCatchStmt *CS,
  741. ExplodedNode *Pred,
  742. ExplodedNodeSet &Dst) {
  743. const VarDecl *VD = CS->getExceptionDecl();
  744. if (!VD) {
  745. Dst.Add(Pred);
  746. return;
  747. }
  748. const LocationContext *LCtx = Pred->getLocationContext();
  749. SVal V = svalBuilder.conjureSymbolVal(CS, LCtx, VD->getType(),
  750. currBldrCtx->blockCount());
  751. ProgramStateRef state = Pred->getState();
  752. state = state->bindLoc(state->getLValue(VD, LCtx), V, LCtx);
  753. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  754. Bldr.generateNode(CS, Pred, state);
  755. }
  756. void ExprEngine::VisitCXXThisExpr(const CXXThisExpr *TE, ExplodedNode *Pred,
  757. ExplodedNodeSet &Dst) {
  758. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  759. // Get the this object region from StoreManager.
  760. const LocationContext *LCtx = Pred->getLocationContext();
  761. const MemRegion *R =
  762. svalBuilder.getRegionManager().getCXXThisRegion(
  763. getContext().getCanonicalType(TE->getType()),
  764. LCtx);
  765. ProgramStateRef state = Pred->getState();
  766. SVal V = state->getSVal(loc::MemRegionVal(R));
  767. Bldr.generateNode(TE, Pred, state->BindExpr(TE, LCtx, V));
  768. }
  769. void ExprEngine::VisitLambdaExpr(const LambdaExpr *LE, ExplodedNode *Pred,
  770. ExplodedNodeSet &Dst) {
  771. const LocationContext *LocCtxt = Pred->getLocationContext();
  772. // Get the region of the lambda itself.
  773. const MemRegion *R = svalBuilder.getRegionManager().getCXXTempObjectRegion(
  774. LE, LocCtxt);
  775. SVal V = loc::MemRegionVal(R);
  776. ProgramStateRef State = Pred->getState();
  777. // If we created a new MemRegion for the lambda, we should explicitly bind
  778. // the captures.
  779. CXXRecordDecl::field_iterator CurField = LE->getLambdaClass()->field_begin();
  780. for (LambdaExpr::const_capture_init_iterator i = LE->capture_init_begin(),
  781. e = LE->capture_init_end();
  782. i != e; ++i, ++CurField) {
  783. FieldDecl *FieldForCapture = *CurField;
  784. SVal FieldLoc = State->getLValue(FieldForCapture, V);
  785. SVal InitVal;
  786. if (!FieldForCapture->hasCapturedVLAType()) {
  787. Expr *InitExpr = *i;
  788. assert(InitExpr && "Capture missing initialization expression");
  789. InitVal = State->getSVal(InitExpr, LocCtxt);
  790. } else {
  791. // The field stores the length of a captured variable-length array.
  792. // These captures don't have initialization expressions; instead we
  793. // get the length from the VLAType size expression.
  794. Expr *SizeExpr = FieldForCapture->getCapturedVLAType()->getSizeExpr();
  795. InitVal = State->getSVal(SizeExpr, LocCtxt);
  796. }
  797. State = State->bindLoc(FieldLoc, InitVal, LocCtxt);
  798. }
  799. // Decay the Loc into an RValue, because there might be a
  800. // MaterializeTemporaryExpr node above this one which expects the bound value
  801. // to be an RValue.
  802. SVal LambdaRVal = State->getSVal(R);
  803. ExplodedNodeSet Tmp;
  804. StmtNodeBuilder Bldr(Pred, Tmp, *currBldrCtx);
  805. // FIXME: is this the right program point kind?
  806. Bldr.generateNode(LE, Pred,
  807. State->BindExpr(LE, LocCtxt, LambdaRVal),
  808. nullptr, ProgramPoint::PostLValueKind);
  809. // FIXME: Move all post/pre visits to ::Visit().
  810. getCheckerManager().runCheckersForPostStmt(Dst, Tmp, LE, *this);
  811. }