ExprEngineCXX.cpp 40 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938
  1. //===- ExprEngineCXX.cpp - ExprEngine support for C++ -----------*- C++ -*-===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This file defines the C++ expression evaluation engine.
  10. //
  11. //===----------------------------------------------------------------------===//
  12. #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
  13. #include "clang/Analysis/ConstructionContext.h"
  14. #include "clang/AST/DeclCXX.h"
  15. #include "clang/AST/StmtCXX.h"
  16. #include "clang/AST/ParentMap.h"
  17. #include "clang/Basic/PrettyStackTrace.h"
  18. #include "clang/StaticAnalyzer/Core/CheckerManager.h"
  19. #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h"
  20. #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
  21. using namespace clang;
  22. using namespace ento;
  23. void ExprEngine::CreateCXXTemporaryObject(const MaterializeTemporaryExpr *ME,
  24. ExplodedNode *Pred,
  25. ExplodedNodeSet &Dst) {
  26. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  27. const Expr *tempExpr = ME->GetTemporaryExpr()->IgnoreParens();
  28. ProgramStateRef state = Pred->getState();
  29. const LocationContext *LCtx = Pred->getLocationContext();
  30. state = createTemporaryRegionIfNeeded(state, LCtx, tempExpr, ME);
  31. Bldr.generateNode(ME, Pred, state);
  32. }
  33. // FIXME: This is the sort of code that should eventually live in a Core
  34. // checker rather than as a special case in ExprEngine.
  35. void ExprEngine::performTrivialCopy(NodeBuilder &Bldr, ExplodedNode *Pred,
  36. const CallEvent &Call) {
  37. SVal ThisVal;
  38. bool AlwaysReturnsLValue;
  39. const CXXRecordDecl *ThisRD = nullptr;
  40. if (const CXXConstructorCall *Ctor = dyn_cast<CXXConstructorCall>(&Call)) {
  41. assert(Ctor->getDecl()->isTrivial());
  42. assert(Ctor->getDecl()->isCopyOrMoveConstructor());
  43. ThisVal = Ctor->getCXXThisVal();
  44. ThisRD = Ctor->getDecl()->getParent();
  45. AlwaysReturnsLValue = false;
  46. } else {
  47. assert(cast<CXXMethodDecl>(Call.getDecl())->isTrivial());
  48. assert(cast<CXXMethodDecl>(Call.getDecl())->getOverloadedOperator() ==
  49. OO_Equal);
  50. ThisVal = cast<CXXInstanceCall>(Call).getCXXThisVal();
  51. ThisRD = cast<CXXMethodDecl>(Call.getDecl())->getParent();
  52. AlwaysReturnsLValue = true;
  53. }
  54. assert(ThisRD);
  55. if (ThisRD->isEmpty()) {
  56. // Do nothing for empty classes. Otherwise it'd retrieve an UnknownVal
  57. // and bind it and RegionStore would think that the actual value
  58. // in this region at this offset is unknown.
  59. return;
  60. }
  61. const LocationContext *LCtx = Pred->getLocationContext();
  62. ExplodedNodeSet Dst;
  63. Bldr.takeNodes(Pred);
  64. SVal V = Call.getArgSVal(0);
  65. // If the value being copied is not unknown, load from its location to get
  66. // an aggregate rvalue.
  67. if (Optional<Loc> L = V.getAs<Loc>())
  68. V = Pred->getState()->getSVal(*L);
  69. else
  70. assert(V.isUnknownOrUndef());
  71. const Expr *CallExpr = Call.getOriginExpr();
  72. evalBind(Dst, CallExpr, Pred, ThisVal, V, true);
  73. PostStmt PS(CallExpr, LCtx);
  74. for (ExplodedNodeSet::iterator I = Dst.begin(), E = Dst.end();
  75. I != E; ++I) {
  76. ProgramStateRef State = (*I)->getState();
  77. if (AlwaysReturnsLValue)
  78. State = State->BindExpr(CallExpr, LCtx, ThisVal);
  79. else
  80. State = bindReturnValue(Call, LCtx, State);
  81. Bldr.generateNode(PS, State, *I);
  82. }
  83. }
  84. SVal ExprEngine::makeZeroElementRegion(ProgramStateRef State, SVal LValue,
  85. QualType &Ty, bool &IsArray) {
  86. SValBuilder &SVB = State->getStateManager().getSValBuilder();
  87. ASTContext &Ctx = SVB.getContext();
  88. while (const ArrayType *AT = Ctx.getAsArrayType(Ty)) {
  89. Ty = AT->getElementType();
  90. LValue = State->getLValue(Ty, SVB.makeZeroArrayIndex(), LValue);
  91. IsArray = true;
  92. }
  93. return LValue;
  94. }
  95. std::pair<ProgramStateRef, SVal> ExprEngine::prepareForObjectConstruction(
  96. const Expr *E, ProgramStateRef State, const LocationContext *LCtx,
  97. const ConstructionContext *CC, EvalCallOptions &CallOpts) {
  98. SValBuilder &SVB = getSValBuilder();
  99. MemRegionManager &MRMgr = SVB.getRegionManager();
  100. ASTContext &ACtx = SVB.getContext();
  101. // See if we're constructing an existing region by looking at the
  102. // current construction context.
  103. if (CC) {
  104. switch (CC->getKind()) {
  105. case ConstructionContext::CXX17ElidedCopyVariableKind:
  106. case ConstructionContext::SimpleVariableKind: {
  107. const auto *DSCC = cast<VariableConstructionContext>(CC);
  108. const auto *DS = DSCC->getDeclStmt();
  109. const auto *Var = cast<VarDecl>(DS->getSingleDecl());
  110. SVal LValue = State->getLValue(Var, LCtx);
  111. QualType Ty = Var->getType();
  112. LValue =
  113. makeZeroElementRegion(State, LValue, Ty, CallOpts.IsArrayCtorOrDtor);
  114. State =
  115. addObjectUnderConstruction(State, DSCC->getDeclStmt(), LCtx, LValue);
  116. return std::make_pair(State, LValue);
  117. }
  118. case ConstructionContext::CXX17ElidedCopyConstructorInitializerKind:
  119. case ConstructionContext::SimpleConstructorInitializerKind: {
  120. const auto *ICC = cast<ConstructorInitializerConstructionContext>(CC);
  121. const auto *Init = ICC->getCXXCtorInitializer();
  122. assert(Init->isAnyMemberInitializer());
  123. const CXXMethodDecl *CurCtor = cast<CXXMethodDecl>(LCtx->getDecl());
  124. Loc ThisPtr =
  125. SVB.getCXXThis(CurCtor, LCtx->getStackFrame());
  126. SVal ThisVal = State->getSVal(ThisPtr);
  127. const ValueDecl *Field;
  128. SVal FieldVal;
  129. if (Init->isIndirectMemberInitializer()) {
  130. Field = Init->getIndirectMember();
  131. FieldVal = State->getLValue(Init->getIndirectMember(), ThisVal);
  132. } else {
  133. Field = Init->getMember();
  134. FieldVal = State->getLValue(Init->getMember(), ThisVal);
  135. }
  136. QualType Ty = Field->getType();
  137. FieldVal = makeZeroElementRegion(State, FieldVal, Ty,
  138. CallOpts.IsArrayCtorOrDtor);
  139. State = addObjectUnderConstruction(State, Init, LCtx, FieldVal);
  140. return std::make_pair(State, FieldVal);
  141. }
  142. case ConstructionContext::NewAllocatedObjectKind: {
  143. if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) {
  144. const auto *NECC = cast<NewAllocatedObjectConstructionContext>(CC);
  145. const auto *NE = NECC->getCXXNewExpr();
  146. SVal V = *getObjectUnderConstruction(State, NE, LCtx);
  147. if (const SubRegion *MR =
  148. dyn_cast_or_null<SubRegion>(V.getAsRegion())) {
  149. if (NE->isArray()) {
  150. // TODO: In fact, we need to call the constructor for every
  151. // allocated element, not just the first one!
  152. CallOpts.IsArrayCtorOrDtor = true;
  153. return std::make_pair(
  154. State, loc::MemRegionVal(getStoreManager().GetElementZeroRegion(
  155. MR, NE->getType()->getPointeeType())));
  156. }
  157. return std::make_pair(State, V);
  158. }
  159. // TODO: Detect when the allocator returns a null pointer.
  160. // Constructor shall not be called in this case.
  161. }
  162. break;
  163. }
  164. case ConstructionContext::SimpleReturnedValueKind:
  165. case ConstructionContext::CXX17ElidedCopyReturnedValueKind: {
  166. // The temporary is to be managed by the parent stack frame.
  167. // So build it in the parent stack frame if we're not in the
  168. // top frame of the analysis.
  169. const StackFrameContext *SFC = LCtx->getStackFrame();
  170. if (const LocationContext *CallerLCtx = SFC->getParent()) {
  171. auto RTC = (*SFC->getCallSiteBlock())[SFC->getIndex()]
  172. .getAs<CFGCXXRecordTypedCall>();
  173. if (!RTC) {
  174. // We were unable to find the correct construction context for the
  175. // call in the parent stack frame. This is equivalent to not being
  176. // able to find construction context at all.
  177. break;
  178. }
  179. if (isa<BlockInvocationContext>(CallerLCtx)) {
  180. // Unwrap block invocation contexts. They're mostly part of
  181. // the current stack frame.
  182. CallerLCtx = CallerLCtx->getParent();
  183. assert(!isa<BlockInvocationContext>(CallerLCtx));
  184. }
  185. return prepareForObjectConstruction(
  186. cast<Expr>(SFC->getCallSite()), State, CallerLCtx,
  187. RTC->getConstructionContext(), CallOpts);
  188. } else {
  189. // We are on the top frame of the analysis. We do not know where is the
  190. // object returned to. Conjure a symbolic region for the return value.
  191. // TODO: We probably need a new MemRegion kind to represent the storage
  192. // of that SymbolicRegion, so that we cound produce a fancy symbol
  193. // instead of an anonymous conjured symbol.
  194. // TODO: Do we need to track the region to avoid having it dead
  195. // too early? It does die too early, at least in C++17, but because
  196. // putting anything into a SymbolicRegion causes an immediate escape,
  197. // it doesn't cause any leak false positives.
  198. const auto *RCC = cast<ReturnedValueConstructionContext>(CC);
  199. // Make sure that this doesn't coincide with any other symbol
  200. // conjured for the returned expression.
  201. static const int TopLevelSymRegionTag = 0;
  202. const Expr *RetE = RCC->getReturnStmt()->getRetValue();
  203. assert(RetE && "Void returns should not have a construction context");
  204. QualType ReturnTy = RetE->getType();
  205. QualType RegionTy = ACtx.getPointerType(ReturnTy);
  206. SVal V = SVB.conjureSymbolVal(&TopLevelSymRegionTag, RetE, SFC,
  207. RegionTy, currBldrCtx->blockCount());
  208. return std::make_pair(State, V);
  209. }
  210. llvm_unreachable("Unhandled return value construction context!");
  211. }
  212. case ConstructionContext::ElidedTemporaryObjectKind: {
  213. assert(AMgr.getAnalyzerOptions().ShouldElideConstructors);
  214. const auto *TCC = cast<ElidedTemporaryObjectConstructionContext>(CC);
  215. const CXXBindTemporaryExpr *BTE = TCC->getCXXBindTemporaryExpr();
  216. const MaterializeTemporaryExpr *MTE = TCC->getMaterializedTemporaryExpr();
  217. const CXXConstructExpr *CE = TCC->getConstructorAfterElision();
  218. // Support pre-C++17 copy elision. We'll have the elidable copy
  219. // constructor in the AST and in the CFG, but we'll skip it
  220. // and construct directly into the final object. This call
  221. // also sets the CallOpts flags for us.
  222. SVal V;
  223. // If the elided copy/move constructor is not supported, there's still
  224. // benefit in trying to model the non-elided constructor.
  225. // Stash our state before trying to elide, as it'll get overwritten.
  226. ProgramStateRef PreElideState = State;
  227. EvalCallOptions PreElideCallOpts = CallOpts;
  228. std::tie(State, V) = prepareForObjectConstruction(
  229. CE, State, LCtx, TCC->getConstructionContextAfterElision(), CallOpts);
  230. // FIXME: This definition of "copy elision has not failed" is unreliable.
  231. // It doesn't indicate that the constructor will actually be inlined
  232. // later; it is still up to evalCall() to decide.
  233. if (!CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion) {
  234. // Remember that we've elided the constructor.
  235. State = addObjectUnderConstruction(State, CE, LCtx, V);
  236. // Remember that we've elided the destructor.
  237. if (BTE)
  238. State = elideDestructor(State, BTE, LCtx);
  239. // Instead of materialization, shamelessly return
  240. // the final object destination.
  241. if (MTE)
  242. State = addObjectUnderConstruction(State, MTE, LCtx, V);
  243. return std::make_pair(State, V);
  244. } else {
  245. // Copy elision failed. Revert the changes and proceed as if we have
  246. // a simple temporary.
  247. State = PreElideState;
  248. CallOpts = PreElideCallOpts;
  249. }
  250. LLVM_FALLTHROUGH;
  251. }
  252. case ConstructionContext::SimpleTemporaryObjectKind: {
  253. const auto *TCC = cast<TemporaryObjectConstructionContext>(CC);
  254. const CXXBindTemporaryExpr *BTE = TCC->getCXXBindTemporaryExpr();
  255. const MaterializeTemporaryExpr *MTE = TCC->getMaterializedTemporaryExpr();
  256. SVal V = UnknownVal();
  257. if (MTE) {
  258. if (const ValueDecl *VD = MTE->getExtendingDecl()) {
  259. assert(MTE->getStorageDuration() != SD_FullExpression);
  260. if (!VD->getType()->isReferenceType()) {
  261. // We're lifetime-extended by a surrounding aggregate.
  262. // Automatic destructors aren't quite working in this case
  263. // on the CFG side. We should warn the caller about that.
  264. // FIXME: Is there a better way to retrieve this information from
  265. // the MaterializeTemporaryExpr?
  266. CallOpts.IsTemporaryLifetimeExtendedViaAggregate = true;
  267. }
  268. }
  269. if (MTE->getStorageDuration() == SD_Static ||
  270. MTE->getStorageDuration() == SD_Thread)
  271. V = loc::MemRegionVal(MRMgr.getCXXStaticTempObjectRegion(E));
  272. }
  273. if (V.isUnknown())
  274. V = loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx));
  275. if (BTE)
  276. State = addObjectUnderConstruction(State, BTE, LCtx, V);
  277. if (MTE)
  278. State = addObjectUnderConstruction(State, MTE, LCtx, V);
  279. CallOpts.IsTemporaryCtorOrDtor = true;
  280. return std::make_pair(State, V);
  281. }
  282. case ConstructionContext::ArgumentKind: {
  283. // Arguments are technically temporaries.
  284. CallOpts.IsTemporaryCtorOrDtor = true;
  285. const auto *ACC = cast<ArgumentConstructionContext>(CC);
  286. const Expr *E = ACC->getCallLikeExpr();
  287. unsigned Idx = ACC->getIndex();
  288. const CXXBindTemporaryExpr *BTE = ACC->getCXXBindTemporaryExpr();
  289. CallEventManager &CEMgr = getStateManager().getCallEventManager();
  290. SVal V = UnknownVal();
  291. auto getArgLoc = [&](CallEventRef<> Caller) -> Optional<SVal> {
  292. const LocationContext *FutureSFC =
  293. Caller->getCalleeStackFrame(currBldrCtx->blockCount());
  294. // Return early if we are unable to reliably foresee
  295. // the future stack frame.
  296. if (!FutureSFC)
  297. return None;
  298. // This should be equivalent to Caller->getDecl() for now, but
  299. // FutureSFC->getDecl() is likely to support better stuff (like
  300. // virtual functions) earlier.
  301. const Decl *CalleeD = FutureSFC->getDecl();
  302. // FIXME: Support for variadic arguments is not implemented here yet.
  303. if (CallEvent::isVariadic(CalleeD))
  304. return None;
  305. // Operator arguments do not correspond to operator parameters
  306. // because this-argument is implemented as a normal argument in
  307. // operator call expressions but not in operator declarations.
  308. const VarRegion *VR = Caller->getParameterLocation(
  309. *Caller->getAdjustedParameterIndex(Idx), currBldrCtx->blockCount());
  310. if (!VR)
  311. return None;
  312. return loc::MemRegionVal(VR);
  313. };
  314. if (const auto *CE = dyn_cast<CallExpr>(E)) {
  315. CallEventRef<> Caller = CEMgr.getSimpleCall(CE, State, LCtx);
  316. if (auto OptV = getArgLoc(Caller))
  317. V = *OptV;
  318. else
  319. break;
  320. State = addObjectUnderConstruction(State, {CE, Idx}, LCtx, V);
  321. } else if (const auto *CCE = dyn_cast<CXXConstructExpr>(E)) {
  322. // Don't bother figuring out the target region for the future
  323. // constructor because we won't need it.
  324. CallEventRef<> Caller =
  325. CEMgr.getCXXConstructorCall(CCE, /*Target=*/nullptr, State, LCtx);
  326. if (auto OptV = getArgLoc(Caller))
  327. V = *OptV;
  328. else
  329. break;
  330. State = addObjectUnderConstruction(State, {CCE, Idx}, LCtx, V);
  331. } else if (const auto *ME = dyn_cast<ObjCMessageExpr>(E)) {
  332. CallEventRef<> Caller = CEMgr.getObjCMethodCall(ME, State, LCtx);
  333. if (auto OptV = getArgLoc(Caller))
  334. V = *OptV;
  335. else
  336. break;
  337. State = addObjectUnderConstruction(State, {ME, Idx}, LCtx, V);
  338. }
  339. assert(!V.isUnknown());
  340. if (BTE)
  341. State = addObjectUnderConstruction(State, BTE, LCtx, V);
  342. return std::make_pair(State, V);
  343. }
  344. }
  345. }
  346. // If we couldn't find an existing region to construct into, assume we're
  347. // constructing a temporary. Notify the caller of our failure.
  348. CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true;
  349. return std::make_pair(
  350. State, loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx)));
  351. }
  352. void ExprEngine::VisitCXXConstructExpr(const CXXConstructExpr *CE,
  353. ExplodedNode *Pred,
  354. ExplodedNodeSet &destNodes) {
  355. const LocationContext *LCtx = Pred->getLocationContext();
  356. ProgramStateRef State = Pred->getState();
  357. SVal Target = UnknownVal();
  358. if (Optional<SVal> ElidedTarget =
  359. getObjectUnderConstruction(State, CE, LCtx)) {
  360. // We've previously modeled an elidable constructor by pretending that it in
  361. // fact constructs into the correct target. This constructor can therefore
  362. // be skipped.
  363. Target = *ElidedTarget;
  364. StmtNodeBuilder Bldr(Pred, destNodes, *currBldrCtx);
  365. State = finishObjectConstruction(State, CE, LCtx);
  366. if (auto L = Target.getAs<Loc>())
  367. State = State->BindExpr(CE, LCtx, State->getSVal(*L, CE->getType()));
  368. Bldr.generateNode(CE, Pred, State);
  369. return;
  370. }
  371. // FIXME: Handle arrays, which run the same constructor for every element.
  372. // For now, we just run the first constructor (which should still invalidate
  373. // the entire array).
  374. EvalCallOptions CallOpts;
  375. auto C = getCurrentCFGElement().getAs<CFGConstructor>();
  376. assert(C || getCurrentCFGElement().getAs<CFGStmt>());
  377. const ConstructionContext *CC = C ? C->getConstructionContext() : nullptr;
  378. switch (CE->getConstructionKind()) {
  379. case CXXConstructExpr::CK_Complete: {
  380. std::tie(State, Target) =
  381. prepareForObjectConstruction(CE, State, LCtx, CC, CallOpts);
  382. break;
  383. }
  384. case CXXConstructExpr::CK_VirtualBase: {
  385. // Make sure we are not calling virtual base class initializers twice.
  386. // Only the most-derived object should initialize virtual base classes.
  387. const auto *OuterCtor = dyn_cast_or_null<CXXConstructExpr>(
  388. LCtx->getStackFrame()->getCallSite());
  389. assert(
  390. (!OuterCtor ||
  391. OuterCtor->getConstructionKind() == CXXConstructExpr::CK_Complete ||
  392. OuterCtor->getConstructionKind() == CXXConstructExpr::CK_Delegating) &&
  393. ("This virtual base should have already been initialized by "
  394. "the most derived class!"));
  395. (void)OuterCtor;
  396. LLVM_FALLTHROUGH;
  397. }
  398. case CXXConstructExpr::CK_NonVirtualBase:
  399. // In C++17, classes with non-virtual bases may be aggregates, so they would
  400. // be initialized as aggregates without a constructor call, so we may have
  401. // a base class constructed directly into an initializer list without
  402. // having the derived-class constructor call on the previous stack frame.
  403. // Initializer lists may be nested into more initializer lists that
  404. // correspond to surrounding aggregate initializations.
  405. // FIXME: For now this code essentially bails out. We need to find the
  406. // correct target region and set it.
  407. // FIXME: Instead of relying on the ParentMap, we should have the
  408. // trigger-statement (InitListExpr in this case) passed down from CFG or
  409. // otherwise always available during construction.
  410. if (dyn_cast_or_null<InitListExpr>(LCtx->getParentMap().getParent(CE))) {
  411. MemRegionManager &MRMgr = getSValBuilder().getRegionManager();
  412. Target = loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(CE, LCtx));
  413. CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true;
  414. break;
  415. }
  416. LLVM_FALLTHROUGH;
  417. case CXXConstructExpr::CK_Delegating: {
  418. const CXXMethodDecl *CurCtor = cast<CXXMethodDecl>(LCtx->getDecl());
  419. Loc ThisPtr = getSValBuilder().getCXXThis(CurCtor,
  420. LCtx->getStackFrame());
  421. SVal ThisVal = State->getSVal(ThisPtr);
  422. if (CE->getConstructionKind() == CXXConstructExpr::CK_Delegating) {
  423. Target = ThisVal;
  424. } else {
  425. // Cast to the base type.
  426. bool IsVirtual =
  427. (CE->getConstructionKind() == CXXConstructExpr::CK_VirtualBase);
  428. SVal BaseVal = getStoreManager().evalDerivedToBase(ThisVal, CE->getType(),
  429. IsVirtual);
  430. Target = BaseVal;
  431. }
  432. break;
  433. }
  434. }
  435. if (State != Pred->getState()) {
  436. static SimpleProgramPointTag T("ExprEngine",
  437. "Prepare for object construction");
  438. ExplodedNodeSet DstPrepare;
  439. StmtNodeBuilder BldrPrepare(Pred, DstPrepare, *currBldrCtx);
  440. BldrPrepare.generateNode(CE, Pred, State, &T, ProgramPoint::PreStmtKind);
  441. assert(DstPrepare.size() <= 1);
  442. if (DstPrepare.size() == 0)
  443. return;
  444. Pred = *BldrPrepare.begin();
  445. }
  446. CallEventManager &CEMgr = getStateManager().getCallEventManager();
  447. CallEventRef<CXXConstructorCall> Call =
  448. CEMgr.getCXXConstructorCall(CE, Target.getAsRegion(), State, LCtx);
  449. ExplodedNodeSet DstPreVisit;
  450. getCheckerManager().runCheckersForPreStmt(DstPreVisit, Pred, CE, *this);
  451. // FIXME: Is it possible and/or useful to do this before PreStmt?
  452. ExplodedNodeSet PreInitialized;
  453. {
  454. StmtNodeBuilder Bldr(DstPreVisit, PreInitialized, *currBldrCtx);
  455. for (ExplodedNodeSet::iterator I = DstPreVisit.begin(),
  456. E = DstPreVisit.end();
  457. I != E; ++I) {
  458. ProgramStateRef State = (*I)->getState();
  459. if (CE->requiresZeroInitialization()) {
  460. // FIXME: Once we properly handle constructors in new-expressions, we'll
  461. // need to invalidate the region before setting a default value, to make
  462. // sure there aren't any lingering bindings around. This probably needs
  463. // to happen regardless of whether or not the object is zero-initialized
  464. // to handle random fields of a placement-initialized object picking up
  465. // old bindings. We might only want to do it when we need to, though.
  466. // FIXME: This isn't actually correct for arrays -- we need to zero-
  467. // initialize the entire array, not just the first element -- but our
  468. // handling of arrays everywhere else is weak as well, so this shouldn't
  469. // actually make things worse. Placement new makes this tricky as well,
  470. // since it's then possible to be initializing one part of a multi-
  471. // dimensional array.
  472. State = State->bindDefaultZero(Target, LCtx);
  473. }
  474. Bldr.generateNode(CE, *I, State, /*tag=*/nullptr,
  475. ProgramPoint::PreStmtKind);
  476. }
  477. }
  478. ExplodedNodeSet DstPreCall;
  479. getCheckerManager().runCheckersForPreCall(DstPreCall, PreInitialized,
  480. *Call, *this);
  481. ExplodedNodeSet DstEvaluated;
  482. StmtNodeBuilder Bldr(DstPreCall, DstEvaluated, *currBldrCtx);
  483. if (CE->getConstructor()->isTrivial() &&
  484. CE->getConstructor()->isCopyOrMoveConstructor() &&
  485. !CallOpts.IsArrayCtorOrDtor) {
  486. // FIXME: Handle other kinds of trivial constructors as well.
  487. for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end();
  488. I != E; ++I)
  489. performTrivialCopy(Bldr, *I, *Call);
  490. } else {
  491. for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end();
  492. I != E; ++I)
  493. defaultEvalCall(Bldr, *I, *Call, CallOpts);
  494. }
  495. // If the CFG was constructed without elements for temporary destructors
  496. // and the just-called constructor created a temporary object then
  497. // stop exploration if the temporary object has a noreturn constructor.
  498. // This can lose coverage because the destructor, if it were present
  499. // in the CFG, would be called at the end of the full expression or
  500. // later (for life-time extended temporaries) -- but avoids infeasible
  501. // paths when no-return temporary destructors are used for assertions.
  502. const AnalysisDeclContext *ADC = LCtx->getAnalysisDeclContext();
  503. if (!ADC->getCFGBuildOptions().AddTemporaryDtors) {
  504. const MemRegion *Target = Call->getCXXThisVal().getAsRegion();
  505. if (Target && isa<CXXTempObjectRegion>(Target) &&
  506. Call->getDecl()->getParent()->isAnyDestructorNoReturn()) {
  507. // If we've inlined the constructor, then DstEvaluated would be empty.
  508. // In this case we still want a sink, which could be implemented
  509. // in processCallExit. But we don't have that implemented at the moment,
  510. // so if you hit this assertion, see if you can avoid inlining
  511. // the respective constructor when analyzer-config cfg-temporary-dtors
  512. // is set to false.
  513. // Otherwise there's nothing wrong with inlining such constructor.
  514. assert(!DstEvaluated.empty() &&
  515. "We should not have inlined this constructor!");
  516. for (ExplodedNode *N : DstEvaluated) {
  517. Bldr.generateSink(CE, N, N->getState());
  518. }
  519. // There is no need to run the PostCall and PostStmt checker
  520. // callbacks because we just generated sinks on all nodes in th
  521. // frontier.
  522. return;
  523. }
  524. }
  525. ExplodedNodeSet DstPostArgumentCleanup;
  526. for (auto I : DstEvaluated)
  527. finishArgumentConstruction(DstPostArgumentCleanup, I, *Call);
  528. // If there were other constructors called for object-type arguments
  529. // of this constructor, clean them up.
  530. ExplodedNodeSet DstPostCall;
  531. getCheckerManager().runCheckersForPostCall(DstPostCall,
  532. DstPostArgumentCleanup,
  533. *Call, *this);
  534. getCheckerManager().runCheckersForPostStmt(destNodes, DstPostCall, CE, *this);
  535. }
  536. void ExprEngine::VisitCXXDestructor(QualType ObjectType,
  537. const MemRegion *Dest,
  538. const Stmt *S,
  539. bool IsBaseDtor,
  540. ExplodedNode *Pred,
  541. ExplodedNodeSet &Dst,
  542. EvalCallOptions &CallOpts) {
  543. assert(S && "A destructor without a trigger!");
  544. const LocationContext *LCtx = Pred->getLocationContext();
  545. ProgramStateRef State = Pred->getState();
  546. const CXXRecordDecl *RecordDecl = ObjectType->getAsCXXRecordDecl();
  547. assert(RecordDecl && "Only CXXRecordDecls should have destructors");
  548. const CXXDestructorDecl *DtorDecl = RecordDecl->getDestructor();
  549. // FIXME: There should always be a Decl, otherwise the destructor call
  550. // shouldn't have been added to the CFG in the first place.
  551. if (!DtorDecl) {
  552. // Skip the invalid destructor. We cannot simply return because
  553. // it would interrupt the analysis instead.
  554. static SimpleProgramPointTag T("ExprEngine", "SkipInvalidDestructor");
  555. // FIXME: PostImplicitCall with a null decl may crash elsewhere anyway.
  556. PostImplicitCall PP(/*Decl=*/nullptr, S->getEndLoc(), LCtx, &T);
  557. NodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  558. Bldr.generateNode(PP, Pred->getState(), Pred);
  559. return;
  560. }
  561. if (!Dest) {
  562. // We're trying to destroy something that is not a region. This may happen
  563. // for a variety of reasons (unknown target region, concrete integer instead
  564. // of target region, etc.). The current code makes an attempt to recover.
  565. // FIXME: We probably don't really need to recover when we're dealing
  566. // with concrete integers specifically.
  567. CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true;
  568. if (const Expr *E = dyn_cast_or_null<Expr>(S)) {
  569. Dest = MRMgr.getCXXTempObjectRegion(E, Pred->getLocationContext());
  570. } else {
  571. static SimpleProgramPointTag T("ExprEngine", "SkipInvalidDestructor");
  572. NodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  573. Bldr.generateSink(Pred->getLocation().withTag(&T),
  574. Pred->getState(), Pred);
  575. return;
  576. }
  577. }
  578. CallEventManager &CEMgr = getStateManager().getCallEventManager();
  579. CallEventRef<CXXDestructorCall> Call =
  580. CEMgr.getCXXDestructorCall(DtorDecl, S, Dest, IsBaseDtor, State, LCtx);
  581. PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
  582. Call->getSourceRange().getBegin(),
  583. "Error evaluating destructor");
  584. ExplodedNodeSet DstPreCall;
  585. getCheckerManager().runCheckersForPreCall(DstPreCall, Pred,
  586. *Call, *this);
  587. ExplodedNodeSet DstInvalidated;
  588. StmtNodeBuilder Bldr(DstPreCall, DstInvalidated, *currBldrCtx);
  589. for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end();
  590. I != E; ++I)
  591. defaultEvalCall(Bldr, *I, *Call, CallOpts);
  592. getCheckerManager().runCheckersForPostCall(Dst, DstInvalidated,
  593. *Call, *this);
  594. }
  595. void ExprEngine::VisitCXXNewAllocatorCall(const CXXNewExpr *CNE,
  596. ExplodedNode *Pred,
  597. ExplodedNodeSet &Dst) {
  598. ProgramStateRef State = Pred->getState();
  599. const LocationContext *LCtx = Pred->getLocationContext();
  600. PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
  601. CNE->getBeginLoc(),
  602. "Error evaluating New Allocator Call");
  603. CallEventManager &CEMgr = getStateManager().getCallEventManager();
  604. CallEventRef<CXXAllocatorCall> Call =
  605. CEMgr.getCXXAllocatorCall(CNE, State, LCtx);
  606. ExplodedNodeSet DstPreCall;
  607. getCheckerManager().runCheckersForPreCall(DstPreCall, Pred,
  608. *Call, *this);
  609. ExplodedNodeSet DstPostCall;
  610. StmtNodeBuilder CallBldr(DstPreCall, DstPostCall, *currBldrCtx);
  611. for (auto I : DstPreCall) {
  612. // FIXME: Provide evalCall for checkers?
  613. defaultEvalCall(CallBldr, I, *Call);
  614. }
  615. // If the call is inlined, DstPostCall will be empty and we bail out now.
  616. // Store return value of operator new() for future use, until the actual
  617. // CXXNewExpr gets processed.
  618. ExplodedNodeSet DstPostValue;
  619. StmtNodeBuilder ValueBldr(DstPostCall, DstPostValue, *currBldrCtx);
  620. for (auto I : DstPostCall) {
  621. // FIXME: Because CNE serves as the "call site" for the allocator (due to
  622. // lack of a better expression in the AST), the conjured return value symbol
  623. // is going to be of the same type (C++ object pointer type). Technically
  624. // this is not correct because the operator new's prototype always says that
  625. // it returns a 'void *'. So we should change the type of the symbol,
  626. // and then evaluate the cast over the symbolic pointer from 'void *' to
  627. // the object pointer type. But without changing the symbol's type it
  628. // is breaking too much to evaluate the no-op symbolic cast over it, so we
  629. // skip it for now.
  630. ProgramStateRef State = I->getState();
  631. SVal RetVal = State->getSVal(CNE, LCtx);
  632. // If this allocation function is not declared as non-throwing, failures
  633. // /must/ be signalled by exceptions, and thus the return value will never
  634. // be NULL. -fno-exceptions does not influence this semantics.
  635. // FIXME: GCC has a -fcheck-new option, which forces it to consider the case
  636. // where new can return NULL. If we end up supporting that option, we can
  637. // consider adding a check for it here.
  638. // C++11 [basic.stc.dynamic.allocation]p3.
  639. if (const FunctionDecl *FD = CNE->getOperatorNew()) {
  640. QualType Ty = FD->getType();
  641. if (const auto *ProtoType = Ty->getAs<FunctionProtoType>())
  642. if (!ProtoType->isNothrow())
  643. State = State->assume(RetVal.castAs<DefinedOrUnknownSVal>(), true);
  644. }
  645. ValueBldr.generateNode(
  646. CNE, I, addObjectUnderConstruction(State, CNE, LCtx, RetVal));
  647. }
  648. ExplodedNodeSet DstPostPostCallCallback;
  649. getCheckerManager().runCheckersForPostCall(DstPostPostCallCallback,
  650. DstPostValue, *Call, *this);
  651. for (auto I : DstPostPostCallCallback) {
  652. getCheckerManager().runCheckersForNewAllocator(
  653. CNE, *getObjectUnderConstruction(I->getState(), CNE, LCtx), Dst, I,
  654. *this);
  655. }
  656. }
  657. void ExprEngine::VisitCXXNewExpr(const CXXNewExpr *CNE, ExplodedNode *Pred,
  658. ExplodedNodeSet &Dst) {
  659. // FIXME: Much of this should eventually migrate to CXXAllocatorCall.
  660. // Also, we need to decide how allocators actually work -- they're not
  661. // really part of the CXXNewExpr because they happen BEFORE the
  662. // CXXConstructExpr subexpression. See PR12014 for some discussion.
  663. unsigned blockCount = currBldrCtx->blockCount();
  664. const LocationContext *LCtx = Pred->getLocationContext();
  665. SVal symVal = UnknownVal();
  666. FunctionDecl *FD = CNE->getOperatorNew();
  667. bool IsStandardGlobalOpNewFunction =
  668. FD->isReplaceableGlobalAllocationFunction();
  669. ProgramStateRef State = Pred->getState();
  670. // Retrieve the stored operator new() return value.
  671. if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) {
  672. symVal = *getObjectUnderConstruction(State, CNE, LCtx);
  673. State = finishObjectConstruction(State, CNE, LCtx);
  674. }
  675. // We assume all standard global 'operator new' functions allocate memory in
  676. // heap. We realize this is an approximation that might not correctly model
  677. // a custom global allocator.
  678. if (symVal.isUnknown()) {
  679. if (IsStandardGlobalOpNewFunction)
  680. symVal = svalBuilder.getConjuredHeapSymbolVal(CNE, LCtx, blockCount);
  681. else
  682. symVal = svalBuilder.conjureSymbolVal(nullptr, CNE, LCtx, CNE->getType(),
  683. blockCount);
  684. }
  685. CallEventManager &CEMgr = getStateManager().getCallEventManager();
  686. CallEventRef<CXXAllocatorCall> Call =
  687. CEMgr.getCXXAllocatorCall(CNE, State, LCtx);
  688. if (!AMgr.getAnalyzerOptions().MayInlineCXXAllocator) {
  689. // Invalidate placement args.
  690. // FIXME: Once we figure out how we want allocators to work,
  691. // we should be using the usual pre-/(default-)eval-/post-call checks here.
  692. State = Call->invalidateRegions(blockCount);
  693. if (!State)
  694. return;
  695. // If this allocation function is not declared as non-throwing, failures
  696. // /must/ be signalled by exceptions, and thus the return value will never
  697. // be NULL. -fno-exceptions does not influence this semantics.
  698. // FIXME: GCC has a -fcheck-new option, which forces it to consider the case
  699. // where new can return NULL. If we end up supporting that option, we can
  700. // consider adding a check for it here.
  701. // C++11 [basic.stc.dynamic.allocation]p3.
  702. if (FD) {
  703. QualType Ty = FD->getType();
  704. if (const auto *ProtoType = Ty->getAs<FunctionProtoType>())
  705. if (!ProtoType->isNothrow())
  706. if (auto dSymVal = symVal.getAs<DefinedOrUnknownSVal>())
  707. State = State->assume(*dSymVal, true);
  708. }
  709. }
  710. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  711. SVal Result = symVal;
  712. if (CNE->isArray()) {
  713. // FIXME: allocating an array requires simulating the constructors.
  714. // For now, just return a symbolicated region.
  715. if (const auto *NewReg = cast_or_null<SubRegion>(symVal.getAsRegion())) {
  716. QualType ObjTy = CNE->getType()->getPointeeType();
  717. const ElementRegion *EleReg =
  718. getStoreManager().GetElementZeroRegion(NewReg, ObjTy);
  719. Result = loc::MemRegionVal(EleReg);
  720. }
  721. State = State->BindExpr(CNE, Pred->getLocationContext(), Result);
  722. Bldr.generateNode(CNE, Pred, State);
  723. return;
  724. }
  725. // FIXME: Once we have proper support for CXXConstructExprs inside
  726. // CXXNewExpr, we need to make sure that the constructed object is not
  727. // immediately invalidated here. (The placement call should happen before
  728. // the constructor call anyway.)
  729. if (FD && FD->isReservedGlobalPlacementOperator()) {
  730. // Non-array placement new should always return the placement location.
  731. SVal PlacementLoc = State->getSVal(CNE->getPlacementArg(0), LCtx);
  732. Result = svalBuilder.evalCast(PlacementLoc, CNE->getType(),
  733. CNE->getPlacementArg(0)->getType());
  734. }
  735. // Bind the address of the object, then check to see if we cached out.
  736. State = State->BindExpr(CNE, LCtx, Result);
  737. ExplodedNode *NewN = Bldr.generateNode(CNE, Pred, State);
  738. if (!NewN)
  739. return;
  740. // If the type is not a record, we won't have a CXXConstructExpr as an
  741. // initializer. Copy the value over.
  742. if (const Expr *Init = CNE->getInitializer()) {
  743. if (!isa<CXXConstructExpr>(Init)) {
  744. assert(Bldr.getResults().size() == 1);
  745. Bldr.takeNodes(NewN);
  746. evalBind(Dst, CNE, NewN, Result, State->getSVal(Init, LCtx),
  747. /*FirstInit=*/IsStandardGlobalOpNewFunction);
  748. }
  749. }
  750. }
  751. void ExprEngine::VisitCXXDeleteExpr(const CXXDeleteExpr *CDE,
  752. ExplodedNode *Pred, ExplodedNodeSet &Dst) {
  753. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  754. ProgramStateRef state = Pred->getState();
  755. Bldr.generateNode(CDE, Pred, state);
  756. }
  757. void ExprEngine::VisitCXXCatchStmt(const CXXCatchStmt *CS,
  758. ExplodedNode *Pred,
  759. ExplodedNodeSet &Dst) {
  760. const VarDecl *VD = CS->getExceptionDecl();
  761. if (!VD) {
  762. Dst.Add(Pred);
  763. return;
  764. }
  765. const LocationContext *LCtx = Pred->getLocationContext();
  766. SVal V = svalBuilder.conjureSymbolVal(CS, LCtx, VD->getType(),
  767. currBldrCtx->blockCount());
  768. ProgramStateRef state = Pred->getState();
  769. state = state->bindLoc(state->getLValue(VD, LCtx), V, LCtx);
  770. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  771. Bldr.generateNode(CS, Pred, state);
  772. }
  773. void ExprEngine::VisitCXXThisExpr(const CXXThisExpr *TE, ExplodedNode *Pred,
  774. ExplodedNodeSet &Dst) {
  775. StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
  776. // Get the this object region from StoreManager.
  777. const LocationContext *LCtx = Pred->getLocationContext();
  778. const MemRegion *R =
  779. svalBuilder.getRegionManager().getCXXThisRegion(
  780. getContext().getCanonicalType(TE->getType()),
  781. LCtx);
  782. ProgramStateRef state = Pred->getState();
  783. SVal V = state->getSVal(loc::MemRegionVal(R));
  784. Bldr.generateNode(TE, Pred, state->BindExpr(TE, LCtx, V));
  785. }
  786. void ExprEngine::VisitLambdaExpr(const LambdaExpr *LE, ExplodedNode *Pred,
  787. ExplodedNodeSet &Dst) {
  788. const LocationContext *LocCtxt = Pred->getLocationContext();
  789. // Get the region of the lambda itself.
  790. const MemRegion *R = svalBuilder.getRegionManager().getCXXTempObjectRegion(
  791. LE, LocCtxt);
  792. SVal V = loc::MemRegionVal(R);
  793. ProgramStateRef State = Pred->getState();
  794. // If we created a new MemRegion for the lambda, we should explicitly bind
  795. // the captures.
  796. CXXRecordDecl::field_iterator CurField = LE->getLambdaClass()->field_begin();
  797. for (LambdaExpr::const_capture_init_iterator i = LE->capture_init_begin(),
  798. e = LE->capture_init_end();
  799. i != e; ++i, ++CurField) {
  800. FieldDecl *FieldForCapture = *CurField;
  801. SVal FieldLoc = State->getLValue(FieldForCapture, V);
  802. SVal InitVal;
  803. if (!FieldForCapture->hasCapturedVLAType()) {
  804. Expr *InitExpr = *i;
  805. assert(InitExpr && "Capture missing initialization expression");
  806. InitVal = State->getSVal(InitExpr, LocCtxt);
  807. } else {
  808. // The field stores the length of a captured variable-length array.
  809. // These captures don't have initialization expressions; instead we
  810. // get the length from the VLAType size expression.
  811. Expr *SizeExpr = FieldForCapture->getCapturedVLAType()->getSizeExpr();
  812. InitVal = State->getSVal(SizeExpr, LocCtxt);
  813. }
  814. State = State->bindLoc(FieldLoc, InitVal, LocCtxt);
  815. }
  816. // Decay the Loc into an RValue, because there might be a
  817. // MaterializeTemporaryExpr node above this one which expects the bound value
  818. // to be an RValue.
  819. SVal LambdaRVal = State->getSVal(R);
  820. ExplodedNodeSet Tmp;
  821. StmtNodeBuilder Bldr(Pred, Tmp, *currBldrCtx);
  822. // FIXME: is this the right program point kind?
  823. Bldr.generateNode(LE, Pred,
  824. State->BindExpr(LE, LocCtxt, LambdaRVal),
  825. nullptr, ProgramPoint::PostLValueKind);
  826. // FIXME: Move all post/pre visits to ::Visit().
  827. getCheckerManager().runCheckersForPostStmt(Dst, Tmp, LE, *this);
  828. }