UninitializedValues.cpp 28 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841
  1. //==- UninitializedValues.cpp - Find Uninitialized Values -------*- C++ --*-==//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This file implements uninitialized values analysis for source-level CFGs.
  11. //
  12. //===----------------------------------------------------------------------===//
  13. #include "clang/AST/ASTContext.h"
  14. #include "clang/AST/Attr.h"
  15. #include "clang/AST/Decl.h"
  16. #include "clang/Analysis/Analyses/PostOrderCFGView.h"
  17. #include "clang/Analysis/Analyses/UninitializedValues.h"
  18. #include "clang/Analysis/AnalysisContext.h"
  19. #include "clang/Analysis/CFG.h"
  20. #include "clang/Analysis/DomainSpecific/ObjCNoReturn.h"
  21. #include "clang/Analysis/Visitors/CFGRecStmtDeclVisitor.h"
  22. #include "llvm/ADT/DenseMap.h"
  23. #include "llvm/ADT/Optional.h"
  24. #include "llvm/ADT/PackedVector.h"
  25. #include "llvm/ADT/SmallBitVector.h"
  26. #include "llvm/ADT/SmallVector.h"
  27. #include "llvm/Support/SaveAndRestore.h"
  28. #include <utility>
  29. using namespace clang;
  30. #define DEBUG_LOGGING 0
  31. static bool isTrackedVar(const VarDecl *vd, const DeclContext *dc) {
  32. if (vd->isLocalVarDecl() && !vd->hasGlobalStorage() &&
  33. !vd->isExceptionVariable() &&
  34. vd->getDeclContext() == dc) {
  35. QualType ty = vd->getType();
  36. return ty->isScalarType() || ty->isVectorType();
  37. }
  38. return false;
  39. }
  40. //------------------------------------------------------------------------====//
  41. // DeclToIndex: a mapping from Decls we track to value indices.
  42. //====------------------------------------------------------------------------//
  43. namespace {
  44. class DeclToIndex {
  45. llvm::DenseMap<const VarDecl *, unsigned> map;
  46. public:
  47. DeclToIndex() {}
  48. /// Compute the actual mapping from declarations to bits.
  49. void computeMap(const DeclContext &dc);
  50. /// Return the number of declarations in the map.
  51. unsigned size() const { return map.size(); }
  52. /// Returns the bit vector index for a given declaration.
  53. llvm::Optional<unsigned> getValueIndex(const VarDecl *d) const;
  54. };
  55. }
  56. void DeclToIndex::computeMap(const DeclContext &dc) {
  57. unsigned count = 0;
  58. DeclContext::specific_decl_iterator<VarDecl> I(dc.decls_begin()),
  59. E(dc.decls_end());
  60. for ( ; I != E; ++I) {
  61. const VarDecl *vd = *I;
  62. if (isTrackedVar(vd, &dc))
  63. map[vd] = count++;
  64. }
  65. }
  66. llvm::Optional<unsigned> DeclToIndex::getValueIndex(const VarDecl *d) const {
  67. llvm::DenseMap<const VarDecl *, unsigned>::const_iterator I = map.find(d);
  68. if (I == map.end())
  69. return llvm::Optional<unsigned>();
  70. return I->second;
  71. }
  72. //------------------------------------------------------------------------====//
  73. // CFGBlockValues: dataflow values for CFG blocks.
  74. //====------------------------------------------------------------------------//
  75. // These values are defined in such a way that a merge can be done using
  76. // a bitwise OR.
  77. enum Value { Unknown = 0x0, /* 00 */
  78. Initialized = 0x1, /* 01 */
  79. Uninitialized = 0x2, /* 10 */
  80. MayUninitialized = 0x3 /* 11 */ };
  81. static bool isUninitialized(const Value v) {
  82. return v >= Uninitialized;
  83. }
  84. static bool isAlwaysUninit(const Value v) {
  85. return v == Uninitialized;
  86. }
  87. namespace {
  88. typedef llvm::PackedVector<Value, 2, llvm::SmallBitVector> ValueVector;
  89. class CFGBlockValues {
  90. const CFG &cfg;
  91. SmallVector<ValueVector, 8> vals;
  92. ValueVector scratch;
  93. DeclToIndex declToIndex;
  94. public:
  95. CFGBlockValues(const CFG &cfg);
  96. unsigned getNumEntries() const { return declToIndex.size(); }
  97. void computeSetOfDeclarations(const DeclContext &dc);
  98. ValueVector &getValueVector(const CFGBlock *block) {
  99. return vals[block->getBlockID()];
  100. }
  101. void setAllScratchValues(Value V);
  102. void mergeIntoScratch(ValueVector const &source, bool isFirst);
  103. bool updateValueVectorWithScratch(const CFGBlock *block);
  104. bool hasNoDeclarations() const {
  105. return declToIndex.size() == 0;
  106. }
  107. void resetScratch();
  108. ValueVector::reference operator[](const VarDecl *vd);
  109. Value getValue(const CFGBlock *block, const CFGBlock *dstBlock,
  110. const VarDecl *vd) {
  111. const llvm::Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
  112. assert(idx.hasValue());
  113. return getValueVector(block)[idx.getValue()];
  114. }
  115. };
  116. } // end anonymous namespace
  117. CFGBlockValues::CFGBlockValues(const CFG &c) : cfg(c), vals(0) {}
  118. void CFGBlockValues::computeSetOfDeclarations(const DeclContext &dc) {
  119. declToIndex.computeMap(dc);
  120. unsigned decls = declToIndex.size();
  121. scratch.resize(decls);
  122. unsigned n = cfg.getNumBlockIDs();
  123. if (!n)
  124. return;
  125. vals.resize(n);
  126. for (unsigned i = 0; i < n; ++i)
  127. vals[i].resize(decls);
  128. }
  129. #if DEBUG_LOGGING
  130. static void printVector(const CFGBlock *block, ValueVector &bv,
  131. unsigned num) {
  132. llvm::errs() << block->getBlockID() << " :";
  133. for (unsigned i = 0; i < bv.size(); ++i) {
  134. llvm::errs() << ' ' << bv[i];
  135. }
  136. llvm::errs() << " : " << num << '\n';
  137. }
  138. #endif
  139. void CFGBlockValues::setAllScratchValues(Value V) {
  140. for (unsigned I = 0, E = scratch.size(); I != E; ++I)
  141. scratch[I] = V;
  142. }
  143. void CFGBlockValues::mergeIntoScratch(ValueVector const &source,
  144. bool isFirst) {
  145. if (isFirst)
  146. scratch = source;
  147. else
  148. scratch |= source;
  149. }
  150. bool CFGBlockValues::updateValueVectorWithScratch(const CFGBlock *block) {
  151. ValueVector &dst = getValueVector(block);
  152. bool changed = (dst != scratch);
  153. if (changed)
  154. dst = scratch;
  155. #if DEBUG_LOGGING
  156. printVector(block, scratch, 0);
  157. #endif
  158. return changed;
  159. }
  160. void CFGBlockValues::resetScratch() {
  161. scratch.reset();
  162. }
  163. ValueVector::reference CFGBlockValues::operator[](const VarDecl *vd) {
  164. const llvm::Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
  165. assert(idx.hasValue());
  166. return scratch[idx.getValue()];
  167. }
  168. //------------------------------------------------------------------------====//
  169. // Worklist: worklist for dataflow analysis.
  170. //====------------------------------------------------------------------------//
  171. namespace {
  172. class DataflowWorklist {
  173. PostOrderCFGView::iterator PO_I, PO_E;
  174. SmallVector<const CFGBlock *, 20> worklist;
  175. llvm::BitVector enqueuedBlocks;
  176. public:
  177. DataflowWorklist(const CFG &cfg, PostOrderCFGView &view)
  178. : PO_I(view.begin()), PO_E(view.end()),
  179. enqueuedBlocks(cfg.getNumBlockIDs(), true) {
  180. // Treat the first block as already analyzed.
  181. if (PO_I != PO_E) {
  182. assert(*PO_I == &cfg.getEntry());
  183. enqueuedBlocks[(*PO_I)->getBlockID()] = false;
  184. ++PO_I;
  185. }
  186. }
  187. void enqueueSuccessors(const CFGBlock *block);
  188. const CFGBlock *dequeue();
  189. };
  190. }
  191. void DataflowWorklist::enqueueSuccessors(const clang::CFGBlock *block) {
  192. for (CFGBlock::const_succ_iterator I = block->succ_begin(),
  193. E = block->succ_end(); I != E; ++I) {
  194. const CFGBlock *Successor = *I;
  195. if (!Successor || enqueuedBlocks[Successor->getBlockID()])
  196. continue;
  197. worklist.push_back(Successor);
  198. enqueuedBlocks[Successor->getBlockID()] = true;
  199. }
  200. }
  201. const CFGBlock *DataflowWorklist::dequeue() {
  202. const CFGBlock *B = 0;
  203. // First dequeue from the worklist. This can represent
  204. // updates along backedges that we want propagated as quickly as possible.
  205. if (!worklist.empty()) {
  206. B = worklist.back();
  207. worklist.pop_back();
  208. }
  209. // Next dequeue from the initial reverse post order. This is the
  210. // theoretical ideal in the presence of no back edges.
  211. else if (PO_I != PO_E) {
  212. B = *PO_I;
  213. ++PO_I;
  214. }
  215. else {
  216. return 0;
  217. }
  218. assert(enqueuedBlocks[B->getBlockID()] == true);
  219. enqueuedBlocks[B->getBlockID()] = false;
  220. return B;
  221. }
  222. //------------------------------------------------------------------------====//
  223. // Classification of DeclRefExprs as use or initialization.
  224. //====------------------------------------------------------------------------//
  225. namespace {
  226. class FindVarResult {
  227. const VarDecl *vd;
  228. const DeclRefExpr *dr;
  229. public:
  230. FindVarResult(const VarDecl *vd, const DeclRefExpr *dr) : vd(vd), dr(dr) {}
  231. const DeclRefExpr *getDeclRefExpr() const { return dr; }
  232. const VarDecl *getDecl() const { return vd; }
  233. };
  234. static const Expr *stripCasts(ASTContext &C, const Expr *Ex) {
  235. while (Ex) {
  236. Ex = Ex->IgnoreParenNoopCasts(C);
  237. if (const CastExpr *CE = dyn_cast<CastExpr>(Ex)) {
  238. if (CE->getCastKind() == CK_LValueBitCast) {
  239. Ex = CE->getSubExpr();
  240. continue;
  241. }
  242. }
  243. break;
  244. }
  245. return Ex;
  246. }
  247. /// If E is an expression comprising a reference to a single variable, find that
  248. /// variable.
  249. static FindVarResult findVar(const Expr *E, const DeclContext *DC) {
  250. if (const DeclRefExpr *DRE =
  251. dyn_cast<DeclRefExpr>(stripCasts(DC->getParentASTContext(), E)))
  252. if (const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl()))
  253. if (isTrackedVar(VD, DC))
  254. return FindVarResult(VD, DRE);
  255. return FindVarResult(0, 0);
  256. }
  257. /// \brief Classify each DeclRefExpr as an initialization or a use. Any
  258. /// DeclRefExpr which isn't explicitly classified will be assumed to have
  259. /// escaped the analysis and will be treated as an initialization.
  260. class ClassifyRefs : public StmtVisitor<ClassifyRefs> {
  261. public:
  262. enum Class {
  263. Init,
  264. Use,
  265. SelfInit,
  266. Ignore
  267. };
  268. private:
  269. const DeclContext *DC;
  270. llvm::DenseMap<const DeclRefExpr*, Class> Classification;
  271. bool isTrackedVar(const VarDecl *VD) const {
  272. return ::isTrackedVar(VD, DC);
  273. }
  274. void classify(const Expr *E, Class C);
  275. public:
  276. ClassifyRefs(AnalysisDeclContext &AC) : DC(cast<DeclContext>(AC.getDecl())) {}
  277. void VisitDeclStmt(DeclStmt *DS);
  278. void VisitUnaryOperator(UnaryOperator *UO);
  279. void VisitBinaryOperator(BinaryOperator *BO);
  280. void VisitCallExpr(CallExpr *CE);
  281. void VisitCastExpr(CastExpr *CE);
  282. void operator()(Stmt *S) { Visit(S); }
  283. Class get(const DeclRefExpr *DRE) const {
  284. llvm::DenseMap<const DeclRefExpr*, Class>::const_iterator I
  285. = Classification.find(DRE);
  286. if (I != Classification.end())
  287. return I->second;
  288. const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl());
  289. if (!VD || !isTrackedVar(VD))
  290. return Ignore;
  291. return Init;
  292. }
  293. };
  294. }
  295. static const DeclRefExpr *getSelfInitExpr(VarDecl *VD) {
  296. if (Expr *Init = VD->getInit()) {
  297. const DeclRefExpr *DRE
  298. = dyn_cast<DeclRefExpr>(stripCasts(VD->getASTContext(), Init));
  299. if (DRE && DRE->getDecl() == VD)
  300. return DRE;
  301. }
  302. return 0;
  303. }
  304. void ClassifyRefs::classify(const Expr *E, Class C) {
  305. FindVarResult Var = findVar(E, DC);
  306. if (const DeclRefExpr *DRE = Var.getDeclRefExpr())
  307. Classification[DRE] = std::max(Classification[DRE], C);
  308. }
  309. void ClassifyRefs::VisitDeclStmt(DeclStmt *DS) {
  310. for (DeclStmt::decl_iterator DI = DS->decl_begin(), DE = DS->decl_end();
  311. DI != DE; ++DI) {
  312. VarDecl *VD = dyn_cast<VarDecl>(*DI);
  313. if (VD && isTrackedVar(VD))
  314. if (const DeclRefExpr *DRE = getSelfInitExpr(VD))
  315. Classification[DRE] = SelfInit;
  316. }
  317. }
  318. void ClassifyRefs::VisitBinaryOperator(BinaryOperator *BO) {
  319. // Ignore the evaluation of a DeclRefExpr on the LHS of an assignment. If this
  320. // is not a compound-assignment, we will treat it as initializing the variable
  321. // when TransferFunctions visits it. A compound-assignment does not affect
  322. // whether a variable is uninitialized, and there's no point counting it as a
  323. // use.
  324. if (BO->isCompoundAssignmentOp())
  325. classify(BO->getLHS(), Use);
  326. else if (BO->getOpcode() == BO_Assign)
  327. classify(BO->getLHS(), Ignore);
  328. }
  329. void ClassifyRefs::VisitUnaryOperator(UnaryOperator *UO) {
  330. // Increment and decrement are uses despite there being no lvalue-to-rvalue
  331. // conversion.
  332. if (UO->isIncrementDecrementOp())
  333. classify(UO->getSubExpr(), Use);
  334. }
  335. void ClassifyRefs::VisitCallExpr(CallExpr *CE) {
  336. // If a value is passed by const reference to a function, we should not assume
  337. // that it is initialized by the call, and we conservatively do not assume
  338. // that it is used.
  339. for (CallExpr::arg_iterator I = CE->arg_begin(), E = CE->arg_end();
  340. I != E; ++I)
  341. if ((*I)->getType().isConstQualified() && (*I)->isGLValue())
  342. classify(*I, Ignore);
  343. }
  344. void ClassifyRefs::VisitCastExpr(CastExpr *CE) {
  345. if (CE->getCastKind() == CK_LValueToRValue)
  346. classify(CE->getSubExpr(), Use);
  347. else if (CStyleCastExpr *CSE = dyn_cast<CStyleCastExpr>(CE)) {
  348. if (CSE->getType()->isVoidType()) {
  349. // Squelch any detected load of an uninitialized value if
  350. // we cast it to void.
  351. // e.g. (void) x;
  352. classify(CSE->getSubExpr(), Ignore);
  353. }
  354. }
  355. }
  356. //------------------------------------------------------------------------====//
  357. // Transfer function for uninitialized values analysis.
  358. //====------------------------------------------------------------------------//
  359. namespace {
  360. class TransferFunctions : public StmtVisitor<TransferFunctions> {
  361. CFGBlockValues &vals;
  362. const CFG &cfg;
  363. const CFGBlock *block;
  364. AnalysisDeclContext &ac;
  365. const ClassifyRefs &classification;
  366. ObjCNoReturn objCNoRet;
  367. UninitVariablesHandler &handler;
  368. public:
  369. TransferFunctions(CFGBlockValues &vals, const CFG &cfg,
  370. const CFGBlock *block, AnalysisDeclContext &ac,
  371. const ClassifyRefs &classification,
  372. UninitVariablesHandler &handler)
  373. : vals(vals), cfg(cfg), block(block), ac(ac),
  374. classification(classification), objCNoRet(ac.getASTContext()),
  375. handler(handler) {}
  376. void reportUse(const Expr *ex, const VarDecl *vd);
  377. void VisitBinaryOperator(BinaryOperator *bo);
  378. void VisitBlockExpr(BlockExpr *be);
  379. void VisitCallExpr(CallExpr *ce);
  380. void VisitDeclRefExpr(DeclRefExpr *dr);
  381. void VisitDeclStmt(DeclStmt *ds);
  382. void VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS);
  383. void VisitObjCMessageExpr(ObjCMessageExpr *ME);
  384. bool isTrackedVar(const VarDecl *vd) {
  385. return ::isTrackedVar(vd, cast<DeclContext>(ac.getDecl()));
  386. }
  387. FindVarResult findVar(const Expr *ex) {
  388. return ::findVar(ex, cast<DeclContext>(ac.getDecl()));
  389. }
  390. UninitUse getUninitUse(const Expr *ex, const VarDecl *vd, Value v) {
  391. UninitUse Use(ex, isAlwaysUninit(v));
  392. assert(isUninitialized(v));
  393. if (Use.getKind() == UninitUse::Always)
  394. return Use;
  395. // If an edge which leads unconditionally to this use did not initialize
  396. // the variable, we can say something stronger than 'may be uninitialized':
  397. // we can say 'either it's used uninitialized or you have dead code'.
  398. //
  399. // We track the number of successors of a node which have been visited, and
  400. // visit a node once we have visited all of its successors. Only edges where
  401. // the variable might still be uninitialized are followed. Since a variable
  402. // can't transfer from being initialized to being uninitialized, this will
  403. // trace out the subgraph which inevitably leads to the use and does not
  404. // initialize the variable. We do not want to skip past loops, since their
  405. // non-termination might be correlated with the initialization condition.
  406. //
  407. // For example:
  408. //
  409. // void f(bool a, bool b) {
  410. // block1: int n;
  411. // if (a) {
  412. // block2: if (b)
  413. // block3: n = 1;
  414. // block4: } else if (b) {
  415. // block5: while (!a) {
  416. // block6: do_work(&a);
  417. // n = 2;
  418. // }
  419. // }
  420. // block7: if (a)
  421. // block8: g();
  422. // block9: return n;
  423. // }
  424. //
  425. // Starting from the maybe-uninitialized use in block 9:
  426. // * Block 7 is not visited because we have only visited one of its two
  427. // successors.
  428. // * Block 8 is visited because we've visited its only successor.
  429. // From block 8:
  430. // * Block 7 is visited because we've now visited both of its successors.
  431. // From block 7:
  432. // * Blocks 1, 2, 4, 5, and 6 are not visited because we didn't visit all
  433. // of their successors (we didn't visit 4, 3, 5, 6, and 5, respectively).
  434. // * Block 3 is not visited because it initializes 'n'.
  435. // Now the algorithm terminates, having visited blocks 7 and 8, and having
  436. // found the frontier is blocks 2, 4, and 5.
  437. //
  438. // 'n' is definitely uninitialized for two edges into block 7 (from blocks 2
  439. // and 4), so we report that any time either of those edges is taken (in
  440. // each case when 'b == false'), 'n' is used uninitialized.
  441. SmallVector<const CFGBlock*, 32> Queue;
  442. SmallVector<unsigned, 32> SuccsVisited(cfg.getNumBlockIDs(), 0);
  443. Queue.push_back(block);
  444. // Specify that we've already visited all successors of the starting block.
  445. // This has the dual purpose of ensuring we never add it to the queue, and
  446. // of marking it as not being a candidate element of the frontier.
  447. SuccsVisited[block->getBlockID()] = block->succ_size();
  448. while (!Queue.empty()) {
  449. const CFGBlock *B = Queue.back();
  450. Queue.pop_back();
  451. for (CFGBlock::const_pred_iterator I = B->pred_begin(), E = B->pred_end();
  452. I != E; ++I) {
  453. const CFGBlock *Pred = *I;
  454. if (vals.getValue(Pred, B, vd) == Initialized)
  455. // This block initializes the variable.
  456. continue;
  457. unsigned &SV = SuccsVisited[Pred->getBlockID()];
  458. if (!SV) {
  459. // When visiting the first successor of a block, mark all NULL
  460. // successors as having been visited.
  461. for (CFGBlock::const_succ_iterator SI = Pred->succ_begin(),
  462. SE = Pred->succ_end();
  463. SI != SE; ++SI)
  464. if (!*SI)
  465. ++SV;
  466. }
  467. if (++SV == Pred->succ_size())
  468. // All paths from this block lead to the use and don't initialize the
  469. // variable.
  470. Queue.push_back(Pred);
  471. }
  472. }
  473. // Scan the frontier, looking for blocks where the variable was
  474. // uninitialized.
  475. for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) {
  476. const CFGBlock *Block = *BI;
  477. unsigned BlockID = Block->getBlockID();
  478. const Stmt *Term = Block->getTerminator();
  479. if (SuccsVisited[BlockID] && SuccsVisited[BlockID] < Block->succ_size() &&
  480. Term) {
  481. // This block inevitably leads to the use. If we have an edge from here
  482. // to a post-dominator block, and the variable is uninitialized on that
  483. // edge, we have found a bug.
  484. for (CFGBlock::const_succ_iterator I = Block->succ_begin(),
  485. E = Block->succ_end(); I != E; ++I) {
  486. const CFGBlock *Succ = *I;
  487. if (Succ && SuccsVisited[Succ->getBlockID()] >= Succ->succ_size() &&
  488. vals.getValue(Block, Succ, vd) == Uninitialized) {
  489. // Switch cases are a special case: report the label to the caller
  490. // as the 'terminator', not the switch statement itself. Suppress
  491. // situations where no label matched: we can't be sure that's
  492. // possible.
  493. if (isa<SwitchStmt>(Term)) {
  494. const Stmt *Label = Succ->getLabel();
  495. if (!Label || !isa<SwitchCase>(Label))
  496. // Might not be possible.
  497. continue;
  498. UninitUse::Branch Branch;
  499. Branch.Terminator = Label;
  500. Branch.Output = 0; // Ignored.
  501. Use.addUninitBranch(Branch);
  502. } else {
  503. UninitUse::Branch Branch;
  504. Branch.Terminator = Term;
  505. Branch.Output = I - Block->succ_begin();
  506. Use.addUninitBranch(Branch);
  507. }
  508. }
  509. }
  510. }
  511. }
  512. return Use;
  513. }
  514. };
  515. }
  516. void TransferFunctions::reportUse(const Expr *ex, const VarDecl *vd) {
  517. Value v = vals[vd];
  518. if (isUninitialized(v))
  519. handler.handleUseOfUninitVariable(vd, getUninitUse(ex, vd, v));
  520. }
  521. void TransferFunctions::VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS) {
  522. // This represents an initialization of the 'element' value.
  523. if (DeclStmt *DS = dyn_cast<DeclStmt>(FS->getElement())) {
  524. const VarDecl *VD = cast<VarDecl>(DS->getSingleDecl());
  525. if (isTrackedVar(VD))
  526. vals[VD] = Initialized;
  527. }
  528. }
  529. void TransferFunctions::VisitBlockExpr(BlockExpr *be) {
  530. const BlockDecl *bd = be->getBlockDecl();
  531. for (BlockDecl::capture_const_iterator i = bd->capture_begin(),
  532. e = bd->capture_end() ; i != e; ++i) {
  533. const VarDecl *vd = i->getVariable();
  534. if (!isTrackedVar(vd))
  535. continue;
  536. if (i->isByRef()) {
  537. vals[vd] = Initialized;
  538. continue;
  539. }
  540. reportUse(be, vd);
  541. }
  542. }
  543. void TransferFunctions::VisitCallExpr(CallExpr *ce) {
  544. if (Decl *Callee = ce->getCalleeDecl()) {
  545. if (Callee->hasAttr<ReturnsTwiceAttr>()) {
  546. // After a call to a function like setjmp or vfork, any variable which is
  547. // initialized anywhere within this function may now be initialized. For
  548. // now, just assume such a call initializes all variables. FIXME: Only
  549. // mark variables as initialized if they have an initializer which is
  550. // reachable from here.
  551. vals.setAllScratchValues(Initialized);
  552. }
  553. else if (Callee->hasAttr<AnalyzerNoReturnAttr>()) {
  554. // Functions labeled like "analyzer_noreturn" are often used to denote
  555. // "panic" functions that in special debug situations can still return,
  556. // but for the most part should not be treated as returning. This is a
  557. // useful annotation borrowed from the static analyzer that is useful for
  558. // suppressing branch-specific false positives when we call one of these
  559. // functions but keep pretending the path continues (when in reality the
  560. // user doesn't care).
  561. vals.setAllScratchValues(Unknown);
  562. }
  563. }
  564. }
  565. void TransferFunctions::VisitDeclRefExpr(DeclRefExpr *dr) {
  566. switch (classification.get(dr)) {
  567. case ClassifyRefs::Ignore:
  568. break;
  569. case ClassifyRefs::Use:
  570. reportUse(dr, cast<VarDecl>(dr->getDecl()));
  571. break;
  572. case ClassifyRefs::Init:
  573. vals[cast<VarDecl>(dr->getDecl())] = Initialized;
  574. break;
  575. case ClassifyRefs::SelfInit:
  576. handler.handleSelfInit(cast<VarDecl>(dr->getDecl()));
  577. break;
  578. }
  579. }
  580. void TransferFunctions::VisitBinaryOperator(BinaryOperator *BO) {
  581. if (BO->getOpcode() == BO_Assign) {
  582. FindVarResult Var = findVar(BO->getLHS());
  583. if (const VarDecl *VD = Var.getDecl())
  584. vals[VD] = Initialized;
  585. }
  586. }
  587. void TransferFunctions::VisitDeclStmt(DeclStmt *DS) {
  588. for (DeclStmt::decl_iterator DI = DS->decl_begin(), DE = DS->decl_end();
  589. DI != DE; ++DI) {
  590. VarDecl *VD = dyn_cast<VarDecl>(*DI);
  591. if (VD && isTrackedVar(VD)) {
  592. if (getSelfInitExpr(VD)) {
  593. // If the initializer consists solely of a reference to itself, we
  594. // explicitly mark the variable as uninitialized. This allows code
  595. // like the following:
  596. //
  597. // int x = x;
  598. //
  599. // to deliberately leave a variable uninitialized. Different analysis
  600. // clients can detect this pattern and adjust their reporting
  601. // appropriately, but we need to continue to analyze subsequent uses
  602. // of the variable.
  603. vals[VD] = Uninitialized;
  604. } else if (VD->getInit()) {
  605. // Treat the new variable as initialized.
  606. vals[VD] = Initialized;
  607. } else {
  608. // No initializer: the variable is now uninitialized. This matters
  609. // for cases like:
  610. // while (...) {
  611. // int n;
  612. // use(n);
  613. // n = 0;
  614. // }
  615. // FIXME: Mark the variable as uninitialized whenever its scope is
  616. // left, since its scope could be re-entered by a jump over the
  617. // declaration.
  618. vals[VD] = Uninitialized;
  619. }
  620. }
  621. }
  622. }
  623. void TransferFunctions::VisitObjCMessageExpr(ObjCMessageExpr *ME) {
  624. // If the Objective-C message expression is an implicit no-return that
  625. // is not modeled in the CFG, set the tracked dataflow values to Unknown.
  626. if (objCNoRet.isImplicitNoReturn(ME)) {
  627. vals.setAllScratchValues(Unknown);
  628. }
  629. }
  630. //------------------------------------------------------------------------====//
  631. // High-level "driver" logic for uninitialized values analysis.
  632. //====------------------------------------------------------------------------//
  633. static bool runOnBlock(const CFGBlock *block, const CFG &cfg,
  634. AnalysisDeclContext &ac, CFGBlockValues &vals,
  635. const ClassifyRefs &classification,
  636. llvm::BitVector &wasAnalyzed,
  637. UninitVariablesHandler &handler) {
  638. wasAnalyzed[block->getBlockID()] = true;
  639. vals.resetScratch();
  640. // Merge in values of predecessor blocks.
  641. bool isFirst = true;
  642. for (CFGBlock::const_pred_iterator I = block->pred_begin(),
  643. E = block->pred_end(); I != E; ++I) {
  644. const CFGBlock *pred = *I;
  645. if (wasAnalyzed[pred->getBlockID()]) {
  646. vals.mergeIntoScratch(vals.getValueVector(pred), isFirst);
  647. isFirst = false;
  648. }
  649. }
  650. // Apply the transfer function.
  651. TransferFunctions tf(vals, cfg, block, ac, classification, handler);
  652. for (CFGBlock::const_iterator I = block->begin(), E = block->end();
  653. I != E; ++I) {
  654. if (const CFGStmt *cs = dyn_cast<CFGStmt>(&*I)) {
  655. tf.Visit(const_cast<Stmt*>(cs->getStmt()));
  656. }
  657. }
  658. return vals.updateValueVectorWithScratch(block);
  659. }
  660. /// PruneBlocksHandler is a special UninitVariablesHandler that is used
  661. /// to detect when a CFGBlock has any *potential* use of an uninitialized
  662. /// variable. It is mainly used to prune out work during the final
  663. /// reporting pass.
  664. namespace {
  665. struct PruneBlocksHandler : public UninitVariablesHandler {
  666. PruneBlocksHandler(unsigned numBlocks)
  667. : hadUse(numBlocks, false), hadAnyUse(false),
  668. currentBlock(0) {}
  669. virtual ~PruneBlocksHandler() {}
  670. /// Records if a CFGBlock had a potential use of an uninitialized variable.
  671. llvm::BitVector hadUse;
  672. /// Records if any CFGBlock had a potential use of an uninitialized variable.
  673. bool hadAnyUse;
  674. /// The current block to scribble use information.
  675. unsigned currentBlock;
  676. virtual void handleUseOfUninitVariable(const VarDecl *vd,
  677. const UninitUse &use) {
  678. hadUse[currentBlock] = true;
  679. hadAnyUse = true;
  680. }
  681. /// Called when the uninitialized variable analysis detects the
  682. /// idiom 'int x = x'. All other uses of 'x' within the initializer
  683. /// are handled by handleUseOfUninitVariable.
  684. virtual void handleSelfInit(const VarDecl *vd) {
  685. hadUse[currentBlock] = true;
  686. hadAnyUse = true;
  687. }
  688. };
  689. }
  690. void clang::runUninitializedVariablesAnalysis(
  691. const DeclContext &dc,
  692. const CFG &cfg,
  693. AnalysisDeclContext &ac,
  694. UninitVariablesHandler &handler,
  695. UninitVariablesAnalysisStats &stats) {
  696. CFGBlockValues vals(cfg);
  697. vals.computeSetOfDeclarations(dc);
  698. if (vals.hasNoDeclarations())
  699. return;
  700. stats.NumVariablesAnalyzed = vals.getNumEntries();
  701. // Precompute which expressions are uses and which are initializations.
  702. ClassifyRefs classification(ac);
  703. cfg.VisitBlockStmts(classification);
  704. // Mark all variables uninitialized at the entry.
  705. const CFGBlock &entry = cfg.getEntry();
  706. ValueVector &vec = vals.getValueVector(&entry);
  707. const unsigned n = vals.getNumEntries();
  708. for (unsigned j = 0; j < n ; ++j) {
  709. vec[j] = Uninitialized;
  710. }
  711. // Proceed with the workist.
  712. DataflowWorklist worklist(cfg, *ac.getAnalysis<PostOrderCFGView>());
  713. llvm::BitVector previouslyVisited(cfg.getNumBlockIDs());
  714. worklist.enqueueSuccessors(&cfg.getEntry());
  715. llvm::BitVector wasAnalyzed(cfg.getNumBlockIDs(), false);
  716. wasAnalyzed[cfg.getEntry().getBlockID()] = true;
  717. PruneBlocksHandler PBH(cfg.getNumBlockIDs());
  718. while (const CFGBlock *block = worklist.dequeue()) {
  719. PBH.currentBlock = block->getBlockID();
  720. // Did the block change?
  721. bool changed = runOnBlock(block, cfg, ac, vals,
  722. classification, wasAnalyzed, PBH);
  723. ++stats.NumBlockVisits;
  724. if (changed || !previouslyVisited[block->getBlockID()])
  725. worklist.enqueueSuccessors(block);
  726. previouslyVisited[block->getBlockID()] = true;
  727. }
  728. if (!PBH.hadAnyUse)
  729. return;
  730. // Run through the blocks one more time, and report uninitialized variables.
  731. for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) {
  732. const CFGBlock *block = *BI;
  733. if (PBH.hadUse[block->getBlockID()]) {
  734. runOnBlock(block, cfg, ac, vals, classification, wasAnalyzed, handler);
  735. ++stats.NumBlockVisits;
  736. }
  737. }
  738. }
  739. UninitVariablesHandler::~UninitVariablesHandler() {}