UninitializedValues.cpp 31 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925
  1. //==- UninitializedValues.cpp - Find Uninitialized Values -------*- C++ --*-==//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This file implements uninitialized values analysis for source-level CFGs.
  11. //
  12. //===----------------------------------------------------------------------===//
  13. #include "clang/AST/ASTContext.h"
  14. #include "clang/AST/Attr.h"
  15. #include "clang/AST/Decl.h"
  16. #include "clang/AST/DeclCXX.h"
  17. #include "clang/AST/StmtVisitor.h"
  18. #include "clang/Analysis/Analyses/PostOrderCFGView.h"
  19. #include "clang/Analysis/Analyses/UninitializedValues.h"
  20. #include "clang/Analysis/AnalysisContext.h"
  21. #include "clang/Analysis/CFG.h"
  22. #include "clang/Analysis/DomainSpecific/ObjCNoReturn.h"
  23. #include "llvm/ADT/DenseMap.h"
  24. #include "llvm/ADT/Optional.h"
  25. #include "llvm/ADT/PackedVector.h"
  26. #include "llvm/ADT/SmallBitVector.h"
  27. #include "llvm/ADT/SmallVector.h"
  28. #include "llvm/Support/SaveAndRestore.h"
  29. #include <utility>
  30. using namespace clang;
  31. #define DEBUG_LOGGING 0
  32. static bool isTrackedVar(const VarDecl *vd, const DeclContext *dc) {
  33. if (vd->isLocalVarDecl() && !vd->hasGlobalStorage() &&
  34. !vd->isExceptionVariable() && !vd->isInitCapture() &&
  35. vd->getDeclContext() == dc) {
  36. QualType ty = vd->getType();
  37. return ty->isScalarType() || ty->isVectorType() || ty->isRecordType();
  38. }
  39. return false;
  40. }
  41. //------------------------------------------------------------------------====//
  42. // DeclToIndex: a mapping from Decls we track to value indices.
  43. //====------------------------------------------------------------------------//
  44. namespace {
  45. class DeclToIndex {
  46. llvm::DenseMap<const VarDecl *, unsigned> map;
  47. public:
  48. DeclToIndex() {}
  49. /// Compute the actual mapping from declarations to bits.
  50. void computeMap(const DeclContext &dc);
  51. /// Return the number of declarations in the map.
  52. unsigned size() const { return map.size(); }
  53. /// Returns the bit vector index for a given declaration.
  54. Optional<unsigned> getValueIndex(const VarDecl *d) const;
  55. };
  56. }
  57. void DeclToIndex::computeMap(const DeclContext &dc) {
  58. unsigned count = 0;
  59. DeclContext::specific_decl_iterator<VarDecl> I(dc.decls_begin()),
  60. E(dc.decls_end());
  61. for ( ; I != E; ++I) {
  62. const VarDecl *vd = *I;
  63. if (isTrackedVar(vd, &dc))
  64. map[vd] = count++;
  65. }
  66. }
  67. Optional<unsigned> DeclToIndex::getValueIndex(const VarDecl *d) const {
  68. llvm::DenseMap<const VarDecl *, unsigned>::const_iterator I = map.find(d);
  69. if (I == map.end())
  70. return None;
  71. return I->second;
  72. }
  73. //------------------------------------------------------------------------====//
  74. // CFGBlockValues: dataflow values for CFG blocks.
  75. //====------------------------------------------------------------------------//
  76. // These values are defined in such a way that a merge can be done using
  77. // a bitwise OR.
  78. enum Value { Unknown = 0x0, /* 00 */
  79. Initialized = 0x1, /* 01 */
  80. Uninitialized = 0x2, /* 10 */
  81. MayUninitialized = 0x3 /* 11 */ };
  82. static bool isUninitialized(const Value v) {
  83. return v >= Uninitialized;
  84. }
  85. static bool isAlwaysUninit(const Value v) {
  86. return v == Uninitialized;
  87. }
  88. namespace {
  89. typedef llvm::PackedVector<Value, 2, llvm::SmallBitVector> ValueVector;
  90. class CFGBlockValues {
  91. const CFG &cfg;
  92. SmallVector<ValueVector, 8> vals;
  93. ValueVector scratch;
  94. DeclToIndex declToIndex;
  95. public:
  96. CFGBlockValues(const CFG &cfg);
  97. unsigned getNumEntries() const { return declToIndex.size(); }
  98. void computeSetOfDeclarations(const DeclContext &dc);
  99. ValueVector &getValueVector(const CFGBlock *block) {
  100. return vals[block->getBlockID()];
  101. }
  102. void setAllScratchValues(Value V);
  103. void mergeIntoScratch(ValueVector const &source, bool isFirst);
  104. bool updateValueVectorWithScratch(const CFGBlock *block);
  105. bool hasNoDeclarations() const {
  106. return declToIndex.size() == 0;
  107. }
  108. void resetScratch();
  109. ValueVector::reference operator[](const VarDecl *vd);
  110. Value getValue(const CFGBlock *block, const CFGBlock *dstBlock,
  111. const VarDecl *vd) {
  112. const Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
  113. assert(idx.hasValue());
  114. return getValueVector(block)[idx.getValue()];
  115. }
  116. };
  117. } // end anonymous namespace
  118. CFGBlockValues::CFGBlockValues(const CFG &c) : cfg(c), vals(0) {}
  119. void CFGBlockValues::computeSetOfDeclarations(const DeclContext &dc) {
  120. declToIndex.computeMap(dc);
  121. unsigned decls = declToIndex.size();
  122. scratch.resize(decls);
  123. unsigned n = cfg.getNumBlockIDs();
  124. if (!n)
  125. return;
  126. vals.resize(n);
  127. for (unsigned i = 0; i < n; ++i)
  128. vals[i].resize(decls);
  129. }
  130. #if DEBUG_LOGGING
  131. static void printVector(const CFGBlock *block, ValueVector &bv,
  132. unsigned num) {
  133. llvm::errs() << block->getBlockID() << " :";
  134. for (unsigned i = 0; i < bv.size(); ++i) {
  135. llvm::errs() << ' ' << bv[i];
  136. }
  137. llvm::errs() << " : " << num << '\n';
  138. }
  139. #endif
  140. void CFGBlockValues::setAllScratchValues(Value V) {
  141. for (unsigned I = 0, E = scratch.size(); I != E; ++I)
  142. scratch[I] = V;
  143. }
  144. void CFGBlockValues::mergeIntoScratch(ValueVector const &source,
  145. bool isFirst) {
  146. if (isFirst)
  147. scratch = source;
  148. else
  149. scratch |= source;
  150. }
  151. bool CFGBlockValues::updateValueVectorWithScratch(const CFGBlock *block) {
  152. ValueVector &dst = getValueVector(block);
  153. bool changed = (dst != scratch);
  154. if (changed)
  155. dst = scratch;
  156. #if DEBUG_LOGGING
  157. printVector(block, scratch, 0);
  158. #endif
  159. return changed;
  160. }
  161. void CFGBlockValues::resetScratch() {
  162. scratch.reset();
  163. }
  164. ValueVector::reference CFGBlockValues::operator[](const VarDecl *vd) {
  165. const Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
  166. assert(idx.hasValue());
  167. return scratch[idx.getValue()];
  168. }
  169. //------------------------------------------------------------------------====//
  170. // Worklist: worklist for dataflow analysis.
  171. //====------------------------------------------------------------------------//
  172. namespace {
  173. class DataflowWorklist {
  174. PostOrderCFGView::iterator PO_I, PO_E;
  175. SmallVector<const CFGBlock *, 20> worklist;
  176. llvm::BitVector enqueuedBlocks;
  177. public:
  178. DataflowWorklist(const CFG &cfg, PostOrderCFGView &view)
  179. : PO_I(view.begin()), PO_E(view.end()),
  180. enqueuedBlocks(cfg.getNumBlockIDs(), true) {
  181. // Treat the first block as already analyzed.
  182. if (PO_I != PO_E) {
  183. assert(*PO_I == &cfg.getEntry());
  184. enqueuedBlocks[(*PO_I)->getBlockID()] = false;
  185. ++PO_I;
  186. }
  187. }
  188. void enqueueSuccessors(const CFGBlock *block);
  189. const CFGBlock *dequeue();
  190. };
  191. }
  192. void DataflowWorklist::enqueueSuccessors(const clang::CFGBlock *block) {
  193. for (CFGBlock::const_succ_iterator I = block->succ_begin(),
  194. E = block->succ_end(); I != E; ++I) {
  195. const CFGBlock *Successor = *I;
  196. if (!Successor || enqueuedBlocks[Successor->getBlockID()])
  197. continue;
  198. worklist.push_back(Successor);
  199. enqueuedBlocks[Successor->getBlockID()] = true;
  200. }
  201. }
  202. const CFGBlock *DataflowWorklist::dequeue() {
  203. const CFGBlock *B = nullptr;
  204. // First dequeue from the worklist. This can represent
  205. // updates along backedges that we want propagated as quickly as possible.
  206. if (!worklist.empty())
  207. B = worklist.pop_back_val();
  208. // Next dequeue from the initial reverse post order. This is the
  209. // theoretical ideal in the presence of no back edges.
  210. else if (PO_I != PO_E) {
  211. B = *PO_I;
  212. ++PO_I;
  213. }
  214. else {
  215. return nullptr;
  216. }
  217. assert(enqueuedBlocks[B->getBlockID()] == true);
  218. enqueuedBlocks[B->getBlockID()] = false;
  219. return B;
  220. }
  221. //------------------------------------------------------------------------====//
  222. // Classification of DeclRefExprs as use or initialization.
  223. //====------------------------------------------------------------------------//
  224. namespace {
  225. class FindVarResult {
  226. const VarDecl *vd;
  227. const DeclRefExpr *dr;
  228. public:
  229. FindVarResult(const VarDecl *vd, const DeclRefExpr *dr) : vd(vd), dr(dr) {}
  230. const DeclRefExpr *getDeclRefExpr() const { return dr; }
  231. const VarDecl *getDecl() const { return vd; }
  232. };
  233. static const Expr *stripCasts(ASTContext &C, const Expr *Ex) {
  234. while (Ex) {
  235. Ex = Ex->IgnoreParenNoopCasts(C);
  236. if (const CastExpr *CE = dyn_cast<CastExpr>(Ex)) {
  237. if (CE->getCastKind() == CK_LValueBitCast) {
  238. Ex = CE->getSubExpr();
  239. continue;
  240. }
  241. }
  242. break;
  243. }
  244. return Ex;
  245. }
  246. /// If E is an expression comprising a reference to a single variable, find that
  247. /// variable.
  248. static FindVarResult findVar(const Expr *E, const DeclContext *DC) {
  249. if (const DeclRefExpr *DRE =
  250. dyn_cast<DeclRefExpr>(stripCasts(DC->getParentASTContext(), E)))
  251. if (const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl()))
  252. if (isTrackedVar(VD, DC))
  253. return FindVarResult(VD, DRE);
  254. return FindVarResult(nullptr, nullptr);
  255. }
  256. /// \brief Classify each DeclRefExpr as an initialization or a use. Any
  257. /// DeclRefExpr which isn't explicitly classified will be assumed to have
  258. /// escaped the analysis and will be treated as an initialization.
  259. class ClassifyRefs : public StmtVisitor<ClassifyRefs> {
  260. public:
  261. enum Class {
  262. Init,
  263. Use,
  264. SelfInit,
  265. Ignore
  266. };
  267. private:
  268. const DeclContext *DC;
  269. llvm::DenseMap<const DeclRefExpr*, Class> Classification;
  270. bool isTrackedVar(const VarDecl *VD) const {
  271. return ::isTrackedVar(VD, DC);
  272. }
  273. void classify(const Expr *E, Class C);
  274. public:
  275. ClassifyRefs(AnalysisDeclContext &AC) : DC(cast<DeclContext>(AC.getDecl())) {}
  276. void VisitDeclStmt(DeclStmt *DS);
  277. void VisitUnaryOperator(UnaryOperator *UO);
  278. void VisitBinaryOperator(BinaryOperator *BO);
  279. void VisitCallExpr(CallExpr *CE);
  280. void VisitCastExpr(CastExpr *CE);
  281. void operator()(Stmt *S) { Visit(S); }
  282. Class get(const DeclRefExpr *DRE) const {
  283. llvm::DenseMap<const DeclRefExpr*, Class>::const_iterator I
  284. = Classification.find(DRE);
  285. if (I != Classification.end())
  286. return I->second;
  287. const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl());
  288. if (!VD || !isTrackedVar(VD))
  289. return Ignore;
  290. return Init;
  291. }
  292. };
  293. }
  294. static const DeclRefExpr *getSelfInitExpr(VarDecl *VD) {
  295. if (VD->getType()->isRecordType()) return nullptr;
  296. if (Expr *Init = VD->getInit()) {
  297. const DeclRefExpr *DRE
  298. = dyn_cast<DeclRefExpr>(stripCasts(VD->getASTContext(), Init));
  299. if (DRE && DRE->getDecl() == VD)
  300. return DRE;
  301. }
  302. return nullptr;
  303. }
  304. void ClassifyRefs::classify(const Expr *E, Class C) {
  305. // The result of a ?: could also be an lvalue.
  306. E = E->IgnoreParens();
  307. if (const ConditionalOperator *CO = dyn_cast<ConditionalOperator>(E)) {
  308. classify(CO->getTrueExpr(), C);
  309. classify(CO->getFalseExpr(), C);
  310. return;
  311. }
  312. if (const BinaryConditionalOperator *BCO =
  313. dyn_cast<BinaryConditionalOperator>(E)) {
  314. classify(BCO->getFalseExpr(), C);
  315. return;
  316. }
  317. if (const OpaqueValueExpr *OVE = dyn_cast<OpaqueValueExpr>(E)) {
  318. classify(OVE->getSourceExpr(), C);
  319. return;
  320. }
  321. if (const MemberExpr *ME = dyn_cast<MemberExpr>(E)) {
  322. if (VarDecl *VD = dyn_cast<VarDecl>(ME->getMemberDecl())) {
  323. if (!VD->isStaticDataMember())
  324. classify(ME->getBase(), C);
  325. }
  326. return;
  327. }
  328. if (const BinaryOperator *BO = dyn_cast<BinaryOperator>(E)) {
  329. switch (BO->getOpcode()) {
  330. case BO_PtrMemD:
  331. case BO_PtrMemI:
  332. classify(BO->getLHS(), C);
  333. return;
  334. case BO_Comma:
  335. classify(BO->getRHS(), C);
  336. return;
  337. default:
  338. return;
  339. }
  340. }
  341. FindVarResult Var = findVar(E, DC);
  342. if (const DeclRefExpr *DRE = Var.getDeclRefExpr())
  343. Classification[DRE] = std::max(Classification[DRE], C);
  344. }
  345. void ClassifyRefs::VisitDeclStmt(DeclStmt *DS) {
  346. for (auto *DI : DS->decls()) {
  347. VarDecl *VD = dyn_cast<VarDecl>(DI);
  348. if (VD && isTrackedVar(VD))
  349. if (const DeclRefExpr *DRE = getSelfInitExpr(VD))
  350. Classification[DRE] = SelfInit;
  351. }
  352. }
  353. void ClassifyRefs::VisitBinaryOperator(BinaryOperator *BO) {
  354. // Ignore the evaluation of a DeclRefExpr on the LHS of an assignment. If this
  355. // is not a compound-assignment, we will treat it as initializing the variable
  356. // when TransferFunctions visits it. A compound-assignment does not affect
  357. // whether a variable is uninitialized, and there's no point counting it as a
  358. // use.
  359. if (BO->isCompoundAssignmentOp())
  360. classify(BO->getLHS(), Use);
  361. else if (BO->getOpcode() == BO_Assign || BO->getOpcode() == BO_Comma)
  362. classify(BO->getLHS(), Ignore);
  363. }
  364. void ClassifyRefs::VisitUnaryOperator(UnaryOperator *UO) {
  365. // Increment and decrement are uses despite there being no lvalue-to-rvalue
  366. // conversion.
  367. if (UO->isIncrementDecrementOp())
  368. classify(UO->getSubExpr(), Use);
  369. }
  370. static bool isPointerToConst(const QualType &QT) {
  371. return QT->isAnyPointerType() && QT->getPointeeType().isConstQualified();
  372. }
  373. void ClassifyRefs::VisitCallExpr(CallExpr *CE) {
  374. // Classify arguments to std::move as used.
  375. if (CE->getNumArgs() == 1) {
  376. if (FunctionDecl *FD = CE->getDirectCallee()) {
  377. if (FD->isInStdNamespace() && FD->getIdentifier() &&
  378. FD->getIdentifier()->isStr("move")) {
  379. // RecordTypes are handled in SemaDeclCXX.cpp.
  380. if (!CE->getArg(0)->getType()->isRecordType())
  381. classify(CE->getArg(0), Use);
  382. return;
  383. }
  384. }
  385. }
  386. // If a value is passed by const pointer or by const reference to a function,
  387. // we should not assume that it is initialized by the call, and we
  388. // conservatively do not assume that it is used.
  389. for (CallExpr::arg_iterator I = CE->arg_begin(), E = CE->arg_end();
  390. I != E; ++I) {
  391. if ((*I)->isGLValue()) {
  392. if ((*I)->getType().isConstQualified())
  393. classify((*I), Ignore);
  394. } else if (isPointerToConst((*I)->getType())) {
  395. const Expr *Ex = stripCasts(DC->getParentASTContext(), *I);
  396. const UnaryOperator *UO = dyn_cast<UnaryOperator>(Ex);
  397. if (UO && UO->getOpcode() == UO_AddrOf)
  398. Ex = UO->getSubExpr();
  399. classify(Ex, Ignore);
  400. }
  401. }
  402. }
  403. void ClassifyRefs::VisitCastExpr(CastExpr *CE) {
  404. if (CE->getCastKind() == CK_LValueToRValue)
  405. classify(CE->getSubExpr(), Use);
  406. else if (CStyleCastExpr *CSE = dyn_cast<CStyleCastExpr>(CE)) {
  407. if (CSE->getType()->isVoidType()) {
  408. // Squelch any detected load of an uninitialized value if
  409. // we cast it to void.
  410. // e.g. (void) x;
  411. classify(CSE->getSubExpr(), Ignore);
  412. }
  413. }
  414. }
  415. //------------------------------------------------------------------------====//
  416. // Transfer function for uninitialized values analysis.
  417. //====------------------------------------------------------------------------//
  418. namespace {
  419. class TransferFunctions : public StmtVisitor<TransferFunctions> {
  420. CFGBlockValues &vals;
  421. const CFG &cfg;
  422. const CFGBlock *block;
  423. AnalysisDeclContext &ac;
  424. const ClassifyRefs &classification;
  425. ObjCNoReturn objCNoRet;
  426. UninitVariablesHandler &handler;
  427. public:
  428. TransferFunctions(CFGBlockValues &vals, const CFG &cfg,
  429. const CFGBlock *block, AnalysisDeclContext &ac,
  430. const ClassifyRefs &classification,
  431. UninitVariablesHandler &handler)
  432. : vals(vals), cfg(cfg), block(block), ac(ac),
  433. classification(classification), objCNoRet(ac.getASTContext()),
  434. handler(handler) {}
  435. void reportUse(const Expr *ex, const VarDecl *vd);
  436. void VisitBinaryOperator(BinaryOperator *bo);
  437. void VisitBlockExpr(BlockExpr *be);
  438. void VisitCallExpr(CallExpr *ce);
  439. void VisitDeclRefExpr(DeclRefExpr *dr);
  440. void VisitDeclStmt(DeclStmt *ds);
  441. void VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS);
  442. void VisitObjCMessageExpr(ObjCMessageExpr *ME);
  443. bool isTrackedVar(const VarDecl *vd) {
  444. return ::isTrackedVar(vd, cast<DeclContext>(ac.getDecl()));
  445. }
  446. FindVarResult findVar(const Expr *ex) {
  447. return ::findVar(ex, cast<DeclContext>(ac.getDecl()));
  448. }
  449. UninitUse getUninitUse(const Expr *ex, const VarDecl *vd, Value v) {
  450. UninitUse Use(ex, isAlwaysUninit(v));
  451. assert(isUninitialized(v));
  452. if (Use.getKind() == UninitUse::Always)
  453. return Use;
  454. // If an edge which leads unconditionally to this use did not initialize
  455. // the variable, we can say something stronger than 'may be uninitialized':
  456. // we can say 'either it's used uninitialized or you have dead code'.
  457. //
  458. // We track the number of successors of a node which have been visited, and
  459. // visit a node once we have visited all of its successors. Only edges where
  460. // the variable might still be uninitialized are followed. Since a variable
  461. // can't transfer from being initialized to being uninitialized, this will
  462. // trace out the subgraph which inevitably leads to the use and does not
  463. // initialize the variable. We do not want to skip past loops, since their
  464. // non-termination might be correlated with the initialization condition.
  465. //
  466. // For example:
  467. //
  468. // void f(bool a, bool b) {
  469. // block1: int n;
  470. // if (a) {
  471. // block2: if (b)
  472. // block3: n = 1;
  473. // block4: } else if (b) {
  474. // block5: while (!a) {
  475. // block6: do_work(&a);
  476. // n = 2;
  477. // }
  478. // }
  479. // block7: if (a)
  480. // block8: g();
  481. // block9: return n;
  482. // }
  483. //
  484. // Starting from the maybe-uninitialized use in block 9:
  485. // * Block 7 is not visited because we have only visited one of its two
  486. // successors.
  487. // * Block 8 is visited because we've visited its only successor.
  488. // From block 8:
  489. // * Block 7 is visited because we've now visited both of its successors.
  490. // From block 7:
  491. // * Blocks 1, 2, 4, 5, and 6 are not visited because we didn't visit all
  492. // of their successors (we didn't visit 4, 3, 5, 6, and 5, respectively).
  493. // * Block 3 is not visited because it initializes 'n'.
  494. // Now the algorithm terminates, having visited blocks 7 and 8, and having
  495. // found the frontier is blocks 2, 4, and 5.
  496. //
  497. // 'n' is definitely uninitialized for two edges into block 7 (from blocks 2
  498. // and 4), so we report that any time either of those edges is taken (in
  499. // each case when 'b == false'), 'n' is used uninitialized.
  500. SmallVector<const CFGBlock*, 32> Queue;
  501. SmallVector<unsigned, 32> SuccsVisited(cfg.getNumBlockIDs(), 0);
  502. Queue.push_back(block);
  503. // Specify that we've already visited all successors of the starting block.
  504. // This has the dual purpose of ensuring we never add it to the queue, and
  505. // of marking it as not being a candidate element of the frontier.
  506. SuccsVisited[block->getBlockID()] = block->succ_size();
  507. while (!Queue.empty()) {
  508. const CFGBlock *B = Queue.pop_back_val();
  509. // If the use is always reached from the entry block, make a note of that.
  510. if (B == &cfg.getEntry())
  511. Use.setUninitAfterCall();
  512. for (CFGBlock::const_pred_iterator I = B->pred_begin(), E = B->pred_end();
  513. I != E; ++I) {
  514. const CFGBlock *Pred = *I;
  515. if (!Pred)
  516. continue;
  517. Value AtPredExit = vals.getValue(Pred, B, vd);
  518. if (AtPredExit == Initialized)
  519. // This block initializes the variable.
  520. continue;
  521. if (AtPredExit == MayUninitialized &&
  522. vals.getValue(B, nullptr, vd) == Uninitialized) {
  523. // This block declares the variable (uninitialized), and is reachable
  524. // from a block that initializes the variable. We can't guarantee to
  525. // give an earlier location for the diagnostic (and it appears that
  526. // this code is intended to be reachable) so give a diagnostic here
  527. // and go no further down this path.
  528. Use.setUninitAfterDecl();
  529. continue;
  530. }
  531. unsigned &SV = SuccsVisited[Pred->getBlockID()];
  532. if (!SV) {
  533. // When visiting the first successor of a block, mark all NULL
  534. // successors as having been visited.
  535. for (CFGBlock::const_succ_iterator SI = Pred->succ_begin(),
  536. SE = Pred->succ_end();
  537. SI != SE; ++SI)
  538. if (!*SI)
  539. ++SV;
  540. }
  541. if (++SV == Pred->succ_size())
  542. // All paths from this block lead to the use and don't initialize the
  543. // variable.
  544. Queue.push_back(Pred);
  545. }
  546. }
  547. // Scan the frontier, looking for blocks where the variable was
  548. // uninitialized.
  549. for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) {
  550. const CFGBlock *Block = *BI;
  551. unsigned BlockID = Block->getBlockID();
  552. const Stmt *Term = Block->getTerminator();
  553. if (SuccsVisited[BlockID] && SuccsVisited[BlockID] < Block->succ_size() &&
  554. Term) {
  555. // This block inevitably leads to the use. If we have an edge from here
  556. // to a post-dominator block, and the variable is uninitialized on that
  557. // edge, we have found a bug.
  558. for (CFGBlock::const_succ_iterator I = Block->succ_begin(),
  559. E = Block->succ_end(); I != E; ++I) {
  560. const CFGBlock *Succ = *I;
  561. if (Succ && SuccsVisited[Succ->getBlockID()] >= Succ->succ_size() &&
  562. vals.getValue(Block, Succ, vd) == Uninitialized) {
  563. // Switch cases are a special case: report the label to the caller
  564. // as the 'terminator', not the switch statement itself. Suppress
  565. // situations where no label matched: we can't be sure that's
  566. // possible.
  567. if (isa<SwitchStmt>(Term)) {
  568. const Stmt *Label = Succ->getLabel();
  569. if (!Label || !isa<SwitchCase>(Label))
  570. // Might not be possible.
  571. continue;
  572. UninitUse::Branch Branch;
  573. Branch.Terminator = Label;
  574. Branch.Output = 0; // Ignored.
  575. Use.addUninitBranch(Branch);
  576. } else {
  577. UninitUse::Branch Branch;
  578. Branch.Terminator = Term;
  579. Branch.Output = I - Block->succ_begin();
  580. Use.addUninitBranch(Branch);
  581. }
  582. }
  583. }
  584. }
  585. }
  586. return Use;
  587. }
  588. };
  589. }
  590. void TransferFunctions::reportUse(const Expr *ex, const VarDecl *vd) {
  591. Value v = vals[vd];
  592. if (isUninitialized(v))
  593. handler.handleUseOfUninitVariable(vd, getUninitUse(ex, vd, v));
  594. }
  595. void TransferFunctions::VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS) {
  596. // This represents an initialization of the 'element' value.
  597. if (DeclStmt *DS = dyn_cast<DeclStmt>(FS->getElement())) {
  598. const VarDecl *VD = cast<VarDecl>(DS->getSingleDecl());
  599. if (isTrackedVar(VD))
  600. vals[VD] = Initialized;
  601. }
  602. }
  603. void TransferFunctions::VisitBlockExpr(BlockExpr *be) {
  604. const BlockDecl *bd = be->getBlockDecl();
  605. for (const auto &I : bd->captures()) {
  606. const VarDecl *vd = I.getVariable();
  607. if (!isTrackedVar(vd))
  608. continue;
  609. if (I.isByRef()) {
  610. vals[vd] = Initialized;
  611. continue;
  612. }
  613. reportUse(be, vd);
  614. }
  615. }
  616. void TransferFunctions::VisitCallExpr(CallExpr *ce) {
  617. if (Decl *Callee = ce->getCalleeDecl()) {
  618. if (Callee->hasAttr<ReturnsTwiceAttr>()) {
  619. // After a call to a function like setjmp or vfork, any variable which is
  620. // initialized anywhere within this function may now be initialized. For
  621. // now, just assume such a call initializes all variables. FIXME: Only
  622. // mark variables as initialized if they have an initializer which is
  623. // reachable from here.
  624. vals.setAllScratchValues(Initialized);
  625. }
  626. else if (Callee->hasAttr<AnalyzerNoReturnAttr>()) {
  627. // Functions labeled like "analyzer_noreturn" are often used to denote
  628. // "panic" functions that in special debug situations can still return,
  629. // but for the most part should not be treated as returning. This is a
  630. // useful annotation borrowed from the static analyzer that is useful for
  631. // suppressing branch-specific false positives when we call one of these
  632. // functions but keep pretending the path continues (when in reality the
  633. // user doesn't care).
  634. vals.setAllScratchValues(Unknown);
  635. }
  636. }
  637. }
  638. void TransferFunctions::VisitDeclRefExpr(DeclRefExpr *dr) {
  639. switch (classification.get(dr)) {
  640. case ClassifyRefs::Ignore:
  641. break;
  642. case ClassifyRefs::Use:
  643. reportUse(dr, cast<VarDecl>(dr->getDecl()));
  644. break;
  645. case ClassifyRefs::Init:
  646. vals[cast<VarDecl>(dr->getDecl())] = Initialized;
  647. break;
  648. case ClassifyRefs::SelfInit:
  649. handler.handleSelfInit(cast<VarDecl>(dr->getDecl()));
  650. break;
  651. }
  652. }
  653. void TransferFunctions::VisitBinaryOperator(BinaryOperator *BO) {
  654. if (BO->getOpcode() == BO_Assign) {
  655. FindVarResult Var = findVar(BO->getLHS());
  656. if (const VarDecl *VD = Var.getDecl())
  657. vals[VD] = Initialized;
  658. }
  659. }
  660. void TransferFunctions::VisitDeclStmt(DeclStmt *DS) {
  661. for (auto *DI : DS->decls()) {
  662. VarDecl *VD = dyn_cast<VarDecl>(DI);
  663. if (VD && isTrackedVar(VD)) {
  664. if (getSelfInitExpr(VD)) {
  665. // If the initializer consists solely of a reference to itself, we
  666. // explicitly mark the variable as uninitialized. This allows code
  667. // like the following:
  668. //
  669. // int x = x;
  670. //
  671. // to deliberately leave a variable uninitialized. Different analysis
  672. // clients can detect this pattern and adjust their reporting
  673. // appropriately, but we need to continue to analyze subsequent uses
  674. // of the variable.
  675. vals[VD] = Uninitialized;
  676. } else if (VD->getInit()) {
  677. // Treat the new variable as initialized.
  678. vals[VD] = Initialized;
  679. } else {
  680. // No initializer: the variable is now uninitialized. This matters
  681. // for cases like:
  682. // while (...) {
  683. // int n;
  684. // use(n);
  685. // n = 0;
  686. // }
  687. // FIXME: Mark the variable as uninitialized whenever its scope is
  688. // left, since its scope could be re-entered by a jump over the
  689. // declaration.
  690. vals[VD] = Uninitialized;
  691. }
  692. }
  693. }
  694. }
  695. void TransferFunctions::VisitObjCMessageExpr(ObjCMessageExpr *ME) {
  696. // If the Objective-C message expression is an implicit no-return that
  697. // is not modeled in the CFG, set the tracked dataflow values to Unknown.
  698. if (objCNoRet.isImplicitNoReturn(ME)) {
  699. vals.setAllScratchValues(Unknown);
  700. }
  701. }
  702. //------------------------------------------------------------------------====//
  703. // High-level "driver" logic for uninitialized values analysis.
  704. //====------------------------------------------------------------------------//
  705. static bool runOnBlock(const CFGBlock *block, const CFG &cfg,
  706. AnalysisDeclContext &ac, CFGBlockValues &vals,
  707. const ClassifyRefs &classification,
  708. llvm::BitVector &wasAnalyzed,
  709. UninitVariablesHandler &handler) {
  710. wasAnalyzed[block->getBlockID()] = true;
  711. vals.resetScratch();
  712. // Merge in values of predecessor blocks.
  713. bool isFirst = true;
  714. for (CFGBlock::const_pred_iterator I = block->pred_begin(),
  715. E = block->pred_end(); I != E; ++I) {
  716. const CFGBlock *pred = *I;
  717. if (!pred)
  718. continue;
  719. if (wasAnalyzed[pred->getBlockID()]) {
  720. vals.mergeIntoScratch(vals.getValueVector(pred), isFirst);
  721. isFirst = false;
  722. }
  723. }
  724. // Apply the transfer function.
  725. TransferFunctions tf(vals, cfg, block, ac, classification, handler);
  726. for (CFGBlock::const_iterator I = block->begin(), E = block->end();
  727. I != E; ++I) {
  728. if (Optional<CFGStmt> cs = I->getAs<CFGStmt>())
  729. tf.Visit(const_cast<Stmt*>(cs->getStmt()));
  730. }
  731. return vals.updateValueVectorWithScratch(block);
  732. }
  733. /// PruneBlocksHandler is a special UninitVariablesHandler that is used
  734. /// to detect when a CFGBlock has any *potential* use of an uninitialized
  735. /// variable. It is mainly used to prune out work during the final
  736. /// reporting pass.
  737. namespace {
  738. struct PruneBlocksHandler : public UninitVariablesHandler {
  739. PruneBlocksHandler(unsigned numBlocks)
  740. : hadUse(numBlocks, false), hadAnyUse(false),
  741. currentBlock(0) {}
  742. ~PruneBlocksHandler() override {}
  743. /// Records if a CFGBlock had a potential use of an uninitialized variable.
  744. llvm::BitVector hadUse;
  745. /// Records if any CFGBlock had a potential use of an uninitialized variable.
  746. bool hadAnyUse;
  747. /// The current block to scribble use information.
  748. unsigned currentBlock;
  749. void handleUseOfUninitVariable(const VarDecl *vd,
  750. const UninitUse &use) override {
  751. hadUse[currentBlock] = true;
  752. hadAnyUse = true;
  753. }
  754. /// Called when the uninitialized variable analysis detects the
  755. /// idiom 'int x = x'. All other uses of 'x' within the initializer
  756. /// are handled by handleUseOfUninitVariable.
  757. void handleSelfInit(const VarDecl *vd) override {
  758. hadUse[currentBlock] = true;
  759. hadAnyUse = true;
  760. }
  761. };
  762. }
  763. void clang::runUninitializedVariablesAnalysis(
  764. const DeclContext &dc,
  765. const CFG &cfg,
  766. AnalysisDeclContext &ac,
  767. UninitVariablesHandler &handler,
  768. UninitVariablesAnalysisStats &stats) {
  769. CFGBlockValues vals(cfg);
  770. vals.computeSetOfDeclarations(dc);
  771. if (vals.hasNoDeclarations())
  772. return;
  773. stats.NumVariablesAnalyzed = vals.getNumEntries();
  774. // Precompute which expressions are uses and which are initializations.
  775. ClassifyRefs classification(ac);
  776. cfg.VisitBlockStmts(classification);
  777. // Mark all variables uninitialized at the entry.
  778. const CFGBlock &entry = cfg.getEntry();
  779. ValueVector &vec = vals.getValueVector(&entry);
  780. const unsigned n = vals.getNumEntries();
  781. for (unsigned j = 0; j < n ; ++j) {
  782. vec[j] = Uninitialized;
  783. }
  784. // Proceed with the workist.
  785. DataflowWorklist worklist(cfg, *ac.getAnalysis<PostOrderCFGView>());
  786. llvm::BitVector previouslyVisited(cfg.getNumBlockIDs());
  787. worklist.enqueueSuccessors(&cfg.getEntry());
  788. llvm::BitVector wasAnalyzed(cfg.getNumBlockIDs(), false);
  789. wasAnalyzed[cfg.getEntry().getBlockID()] = true;
  790. PruneBlocksHandler PBH(cfg.getNumBlockIDs());
  791. while (const CFGBlock *block = worklist.dequeue()) {
  792. PBH.currentBlock = block->getBlockID();
  793. // Did the block change?
  794. bool changed = runOnBlock(block, cfg, ac, vals,
  795. classification, wasAnalyzed, PBH);
  796. ++stats.NumBlockVisits;
  797. if (changed || !previouslyVisited[block->getBlockID()])
  798. worklist.enqueueSuccessors(block);
  799. previouslyVisited[block->getBlockID()] = true;
  800. }
  801. if (!PBH.hadAnyUse)
  802. return;
  803. // Run through the blocks one more time, and report uninitialized variables.
  804. for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) {
  805. const CFGBlock *block = *BI;
  806. if (PBH.hadUse[block->getBlockID()]) {
  807. runOnBlock(block, cfg, ac, vals, classification, wasAnalyzed, handler);
  808. ++stats.NumBlockVisits;
  809. }
  810. }
  811. }
  812. UninitVariablesHandler::~UninitVariablesHandler() {}