AnalysisBasedWarnings.cpp 35 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974
  1. //=- AnalysisBasedWarnings.cpp - Sema warnings based on libAnalysis -*- C++ -*-=//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This file defines analysis_warnings::[Policy,Executor].
  11. // Together they are used by Sema to issue warnings based on inexpensive
  12. // static analysis algorithms in libAnalysis.
  13. //
  14. //===----------------------------------------------------------------------===//
  15. #include "clang/Sema/AnalysisBasedWarnings.h"
  16. #include "clang/Sema/SemaInternal.h"
  17. #include "clang/Sema/ScopeInfo.h"
  18. #include "clang/Basic/SourceManager.h"
  19. #include "clang/Basic/SourceLocation.h"
  20. #include "clang/Lex/Preprocessor.h"
  21. #include "clang/AST/DeclObjC.h"
  22. #include "clang/AST/DeclCXX.h"
  23. #include "clang/AST/ExprObjC.h"
  24. #include "clang/AST/ExprCXX.h"
  25. #include "clang/AST/StmtObjC.h"
  26. #include "clang/AST/StmtCXX.h"
  27. #include "clang/AST/EvaluatedExprVisitor.h"
  28. #include "clang/AST/StmtVisitor.h"
  29. #include "clang/Analysis/AnalysisContext.h"
  30. #include "clang/Analysis/CFG.h"
  31. #include "clang/Analysis/Analyses/ReachableCode.h"
  32. #include "clang/Analysis/Analyses/CFGReachabilityAnalysis.h"
  33. #include "clang/Analysis/Analyses/ThreadSafety.h"
  34. #include "clang/Analysis/CFGStmtMap.h"
  35. #include "clang/Analysis/Analyses/UninitializedValues.h"
  36. #include "llvm/ADT/BitVector.h"
  37. #include "llvm/ADT/FoldingSet.h"
  38. #include "llvm/ADT/ImmutableMap.h"
  39. #include "llvm/ADT/PostOrderIterator.h"
  40. #include "llvm/ADT/SmallVector.h"
  41. #include "llvm/ADT/StringRef.h"
  42. #include "llvm/Support/Casting.h"
  43. #include <algorithm>
  44. #include <vector>
  45. using namespace clang;
  46. //===----------------------------------------------------------------------===//
  47. // Unreachable code analysis.
  48. //===----------------------------------------------------------------------===//
  49. namespace {
  50. class UnreachableCodeHandler : public reachable_code::Callback {
  51. Sema &S;
  52. public:
  53. UnreachableCodeHandler(Sema &s) : S(s) {}
  54. void HandleUnreachable(SourceLocation L, SourceRange R1, SourceRange R2) {
  55. S.Diag(L, diag::warn_unreachable) << R1 << R2;
  56. }
  57. };
  58. }
  59. /// CheckUnreachable - Check for unreachable code.
  60. static void CheckUnreachable(Sema &S, AnalysisDeclContext &AC) {
  61. UnreachableCodeHandler UC(S);
  62. reachable_code::FindUnreachableCode(AC, UC);
  63. }
  64. //===----------------------------------------------------------------------===//
  65. // Check for missing return value.
  66. //===----------------------------------------------------------------------===//
  67. enum ControlFlowKind {
  68. UnknownFallThrough,
  69. NeverFallThrough,
  70. MaybeFallThrough,
  71. AlwaysFallThrough,
  72. NeverFallThroughOrReturn
  73. };
  74. /// CheckFallThrough - Check that we don't fall off the end of a
  75. /// Statement that should return a value.
  76. ///
  77. /// \returns AlwaysFallThrough iff we always fall off the end of the statement,
  78. /// MaybeFallThrough iff we might or might not fall off the end,
  79. /// NeverFallThroughOrReturn iff we never fall off the end of the statement or
  80. /// return. We assume NeverFallThrough iff we never fall off the end of the
  81. /// statement but we may return. We assume that functions not marked noreturn
  82. /// will return.
  83. static ControlFlowKind CheckFallThrough(AnalysisDeclContext &AC) {
  84. CFG *cfg = AC.getCFG();
  85. if (cfg == 0) return UnknownFallThrough;
  86. // The CFG leaves in dead things, and we don't want the dead code paths to
  87. // confuse us, so we mark all live things first.
  88. llvm::BitVector live(cfg->getNumBlockIDs());
  89. unsigned count = reachable_code::ScanReachableFromBlock(&cfg->getEntry(),
  90. live);
  91. bool AddEHEdges = AC.getAddEHEdges();
  92. if (!AddEHEdges && count != cfg->getNumBlockIDs())
  93. // When there are things remaining dead, and we didn't add EH edges
  94. // from CallExprs to the catch clauses, we have to go back and
  95. // mark them as live.
  96. for (CFG::iterator I = cfg->begin(), E = cfg->end(); I != E; ++I) {
  97. CFGBlock &b = **I;
  98. if (!live[b.getBlockID()]) {
  99. if (b.pred_begin() == b.pred_end()) {
  100. if (b.getTerminator() && isa<CXXTryStmt>(b.getTerminator()))
  101. // When not adding EH edges from calls, catch clauses
  102. // can otherwise seem dead. Avoid noting them as dead.
  103. count += reachable_code::ScanReachableFromBlock(&b, live);
  104. continue;
  105. }
  106. }
  107. }
  108. // Now we know what is live, we check the live precessors of the exit block
  109. // and look for fall through paths, being careful to ignore normal returns,
  110. // and exceptional paths.
  111. bool HasLiveReturn = false;
  112. bool HasFakeEdge = false;
  113. bool HasPlainEdge = false;
  114. bool HasAbnormalEdge = false;
  115. // Ignore default cases that aren't likely to be reachable because all
  116. // enums in a switch(X) have explicit case statements.
  117. CFGBlock::FilterOptions FO;
  118. FO.IgnoreDefaultsWithCoveredEnums = 1;
  119. for (CFGBlock::filtered_pred_iterator
  120. I = cfg->getExit().filtered_pred_start_end(FO); I.hasMore(); ++I) {
  121. const CFGBlock& B = **I;
  122. if (!live[B.getBlockID()])
  123. continue;
  124. // Skip blocks which contain an element marked as no-return. They don't
  125. // represent actually viable edges into the exit block, so mark them as
  126. // abnormal.
  127. if (B.hasNoReturnElement()) {
  128. HasAbnormalEdge = true;
  129. continue;
  130. }
  131. // Destructors can appear after the 'return' in the CFG. This is
  132. // normal. We need to look pass the destructors for the return
  133. // statement (if it exists).
  134. CFGBlock::const_reverse_iterator ri = B.rbegin(), re = B.rend();
  135. for ( ; ri != re ; ++ri)
  136. if (isa<CFGStmt>(*ri))
  137. break;
  138. // No more CFGElements in the block?
  139. if (ri == re) {
  140. if (B.getTerminator() && isa<CXXTryStmt>(B.getTerminator())) {
  141. HasAbnormalEdge = true;
  142. continue;
  143. }
  144. // A labeled empty statement, or the entry block...
  145. HasPlainEdge = true;
  146. continue;
  147. }
  148. CFGStmt CS = cast<CFGStmt>(*ri);
  149. const Stmt *S = CS.getStmt();
  150. if (isa<ReturnStmt>(S)) {
  151. HasLiveReturn = true;
  152. continue;
  153. }
  154. if (isa<ObjCAtThrowStmt>(S)) {
  155. HasFakeEdge = true;
  156. continue;
  157. }
  158. if (isa<CXXThrowExpr>(S)) {
  159. HasFakeEdge = true;
  160. continue;
  161. }
  162. if (const AsmStmt *AS = dyn_cast<AsmStmt>(S)) {
  163. if (AS->isMSAsm()) {
  164. HasFakeEdge = true;
  165. HasLiveReturn = true;
  166. continue;
  167. }
  168. }
  169. if (isa<CXXTryStmt>(S)) {
  170. HasAbnormalEdge = true;
  171. continue;
  172. }
  173. if (std::find(B.succ_begin(), B.succ_end(), &cfg->getExit())
  174. == B.succ_end()) {
  175. HasAbnormalEdge = true;
  176. continue;
  177. }
  178. HasPlainEdge = true;
  179. }
  180. if (!HasPlainEdge) {
  181. if (HasLiveReturn)
  182. return NeverFallThrough;
  183. return NeverFallThroughOrReturn;
  184. }
  185. if (HasAbnormalEdge || HasFakeEdge || HasLiveReturn)
  186. return MaybeFallThrough;
  187. // This says AlwaysFallThrough for calls to functions that are not marked
  188. // noreturn, that don't return. If people would like this warning to be more
  189. // accurate, such functions should be marked as noreturn.
  190. return AlwaysFallThrough;
  191. }
  192. namespace {
  193. struct CheckFallThroughDiagnostics {
  194. unsigned diag_MaybeFallThrough_HasNoReturn;
  195. unsigned diag_MaybeFallThrough_ReturnsNonVoid;
  196. unsigned diag_AlwaysFallThrough_HasNoReturn;
  197. unsigned diag_AlwaysFallThrough_ReturnsNonVoid;
  198. unsigned diag_NeverFallThroughOrReturn;
  199. bool funMode;
  200. SourceLocation FuncLoc;
  201. static CheckFallThroughDiagnostics MakeForFunction(const Decl *Func) {
  202. CheckFallThroughDiagnostics D;
  203. D.FuncLoc = Func->getLocation();
  204. D.diag_MaybeFallThrough_HasNoReturn =
  205. diag::warn_falloff_noreturn_function;
  206. D.diag_MaybeFallThrough_ReturnsNonVoid =
  207. diag::warn_maybe_falloff_nonvoid_function;
  208. D.diag_AlwaysFallThrough_HasNoReturn =
  209. diag::warn_falloff_noreturn_function;
  210. D.diag_AlwaysFallThrough_ReturnsNonVoid =
  211. diag::warn_falloff_nonvoid_function;
  212. // Don't suggest that virtual functions be marked "noreturn", since they
  213. // might be overridden by non-noreturn functions.
  214. bool isVirtualMethod = false;
  215. if (const CXXMethodDecl *Method = dyn_cast<CXXMethodDecl>(Func))
  216. isVirtualMethod = Method->isVirtual();
  217. // Don't suggest that template instantiations be marked "noreturn"
  218. bool isTemplateInstantiation = false;
  219. if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Func))
  220. isTemplateInstantiation = Function->isTemplateInstantiation();
  221. if (!isVirtualMethod && !isTemplateInstantiation)
  222. D.diag_NeverFallThroughOrReturn =
  223. diag::warn_suggest_noreturn_function;
  224. else
  225. D.diag_NeverFallThroughOrReturn = 0;
  226. D.funMode = true;
  227. return D;
  228. }
  229. static CheckFallThroughDiagnostics MakeForBlock() {
  230. CheckFallThroughDiagnostics D;
  231. D.diag_MaybeFallThrough_HasNoReturn =
  232. diag::err_noreturn_block_has_return_expr;
  233. D.diag_MaybeFallThrough_ReturnsNonVoid =
  234. diag::err_maybe_falloff_nonvoid_block;
  235. D.diag_AlwaysFallThrough_HasNoReturn =
  236. diag::err_noreturn_block_has_return_expr;
  237. D.diag_AlwaysFallThrough_ReturnsNonVoid =
  238. diag::err_falloff_nonvoid_block;
  239. D.diag_NeverFallThroughOrReturn =
  240. diag::warn_suggest_noreturn_block;
  241. D.funMode = false;
  242. return D;
  243. }
  244. bool checkDiagnostics(DiagnosticsEngine &D, bool ReturnsVoid,
  245. bool HasNoReturn) const {
  246. if (funMode) {
  247. return (ReturnsVoid ||
  248. D.getDiagnosticLevel(diag::warn_maybe_falloff_nonvoid_function,
  249. FuncLoc) == DiagnosticsEngine::Ignored)
  250. && (!HasNoReturn ||
  251. D.getDiagnosticLevel(diag::warn_noreturn_function_has_return_expr,
  252. FuncLoc) == DiagnosticsEngine::Ignored)
  253. && (!ReturnsVoid ||
  254. D.getDiagnosticLevel(diag::warn_suggest_noreturn_block, FuncLoc)
  255. == DiagnosticsEngine::Ignored);
  256. }
  257. // For blocks.
  258. return ReturnsVoid && !HasNoReturn
  259. && (!ReturnsVoid ||
  260. D.getDiagnosticLevel(diag::warn_suggest_noreturn_block, FuncLoc)
  261. == DiagnosticsEngine::Ignored);
  262. }
  263. };
  264. }
  265. /// CheckFallThroughForFunctionDef - Check that we don't fall off the end of a
  266. /// function that should return a value. Check that we don't fall off the end
  267. /// of a noreturn function. We assume that functions and blocks not marked
  268. /// noreturn will return.
  269. static void CheckFallThroughForBody(Sema &S, const Decl *D, const Stmt *Body,
  270. const BlockExpr *blkExpr,
  271. const CheckFallThroughDiagnostics& CD,
  272. AnalysisDeclContext &AC) {
  273. bool ReturnsVoid = false;
  274. bool HasNoReturn = false;
  275. if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
  276. ReturnsVoid = FD->getResultType()->isVoidType();
  277. HasNoReturn = FD->hasAttr<NoReturnAttr>() ||
  278. FD->getType()->getAs<FunctionType>()->getNoReturnAttr();
  279. }
  280. else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
  281. ReturnsVoid = MD->getResultType()->isVoidType();
  282. HasNoReturn = MD->hasAttr<NoReturnAttr>();
  283. }
  284. else if (isa<BlockDecl>(D)) {
  285. QualType BlockTy = blkExpr->getType();
  286. if (const FunctionType *FT =
  287. BlockTy->getPointeeType()->getAs<FunctionType>()) {
  288. if (FT->getResultType()->isVoidType())
  289. ReturnsVoid = true;
  290. if (FT->getNoReturnAttr())
  291. HasNoReturn = true;
  292. }
  293. }
  294. DiagnosticsEngine &Diags = S.getDiagnostics();
  295. // Short circuit for compilation speed.
  296. if (CD.checkDiagnostics(Diags, ReturnsVoid, HasNoReturn))
  297. return;
  298. // FIXME: Function try block
  299. if (const CompoundStmt *Compound = dyn_cast<CompoundStmt>(Body)) {
  300. switch (CheckFallThrough(AC)) {
  301. case UnknownFallThrough:
  302. break;
  303. case MaybeFallThrough:
  304. if (HasNoReturn)
  305. S.Diag(Compound->getRBracLoc(),
  306. CD.diag_MaybeFallThrough_HasNoReturn);
  307. else if (!ReturnsVoid)
  308. S.Diag(Compound->getRBracLoc(),
  309. CD.diag_MaybeFallThrough_ReturnsNonVoid);
  310. break;
  311. case AlwaysFallThrough:
  312. if (HasNoReturn)
  313. S.Diag(Compound->getRBracLoc(),
  314. CD.diag_AlwaysFallThrough_HasNoReturn);
  315. else if (!ReturnsVoid)
  316. S.Diag(Compound->getRBracLoc(),
  317. CD.diag_AlwaysFallThrough_ReturnsNonVoid);
  318. break;
  319. case NeverFallThroughOrReturn:
  320. if (ReturnsVoid && !HasNoReturn && CD.diag_NeverFallThroughOrReturn) {
  321. if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
  322. S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn)
  323. << 0 << FD;
  324. } else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
  325. S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn)
  326. << 1 << MD;
  327. } else {
  328. S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn);
  329. }
  330. }
  331. break;
  332. case NeverFallThrough:
  333. break;
  334. }
  335. }
  336. }
  337. //===----------------------------------------------------------------------===//
  338. // -Wuninitialized
  339. //===----------------------------------------------------------------------===//
  340. namespace {
  341. /// ContainsReference - A visitor class to search for references to
  342. /// a particular declaration (the needle) within any evaluated component of an
  343. /// expression (recursively).
  344. class ContainsReference : public EvaluatedExprVisitor<ContainsReference> {
  345. bool FoundReference;
  346. const DeclRefExpr *Needle;
  347. public:
  348. ContainsReference(ASTContext &Context, const DeclRefExpr *Needle)
  349. : EvaluatedExprVisitor<ContainsReference>(Context),
  350. FoundReference(false), Needle(Needle) {}
  351. void VisitExpr(Expr *E) {
  352. // Stop evaluating if we already have a reference.
  353. if (FoundReference)
  354. return;
  355. EvaluatedExprVisitor<ContainsReference>::VisitExpr(E);
  356. }
  357. void VisitDeclRefExpr(DeclRefExpr *E) {
  358. if (E == Needle)
  359. FoundReference = true;
  360. else
  361. EvaluatedExprVisitor<ContainsReference>::VisitDeclRefExpr(E);
  362. }
  363. bool doesContainReference() const { return FoundReference; }
  364. };
  365. }
  366. static bool SuggestInitializationFixit(Sema &S, const VarDecl *VD) {
  367. // Don't issue a fixit if there is already an initializer.
  368. if (VD->getInit())
  369. return false;
  370. // Suggest possible initialization (if any).
  371. QualType VariableTy = VD->getType().getCanonicalType();
  372. const char *Init = S.getFixItZeroInitializerForType(VariableTy);
  373. if (!Init)
  374. return false;
  375. SourceLocation Loc = S.PP.getLocForEndOfToken(VD->getLocEnd());
  376. S.Diag(Loc, diag::note_var_fixit_add_initialization) << VD->getDeclName()
  377. << FixItHint::CreateInsertion(Loc, Init);
  378. return true;
  379. }
  380. /// DiagnoseUninitializedUse -- Helper function for diagnosing uses of an
  381. /// uninitialized variable. This manages the different forms of diagnostic
  382. /// emitted for particular types of uses. Returns true if the use was diagnosed
  383. /// as a warning. If a pariticular use is one we omit warnings for, returns
  384. /// false.
  385. static bool DiagnoseUninitializedUse(Sema &S, const VarDecl *VD,
  386. const Expr *E, bool isAlwaysUninit,
  387. bool alwaysReportSelfInit = false) {
  388. bool isSelfInit = false;
  389. if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(E)) {
  390. if (isAlwaysUninit) {
  391. // Inspect the initializer of the variable declaration which is
  392. // being referenced prior to its initialization. We emit
  393. // specialized diagnostics for self-initialization, and we
  394. // specifically avoid warning about self references which take the
  395. // form of:
  396. //
  397. // int x = x;
  398. //
  399. // This is used to indicate to GCC that 'x' is intentionally left
  400. // uninitialized. Proven code paths which access 'x' in
  401. // an uninitialized state after this will still warn.
  402. //
  403. // TODO: Should we suppress maybe-uninitialized warnings for
  404. // variables initialized in this way?
  405. if (const Expr *Initializer = VD->getInit()) {
  406. if (!alwaysReportSelfInit && DRE == Initializer->IgnoreParenImpCasts())
  407. return false;
  408. ContainsReference CR(S.Context, DRE);
  409. CR.Visit(const_cast<Expr*>(Initializer));
  410. isSelfInit = CR.doesContainReference();
  411. }
  412. if (isSelfInit) {
  413. S.Diag(DRE->getLocStart(),
  414. diag::warn_uninit_self_reference_in_init)
  415. << VD->getDeclName() << VD->getLocation() << DRE->getSourceRange();
  416. } else {
  417. S.Diag(DRE->getLocStart(), diag::warn_uninit_var)
  418. << VD->getDeclName() << DRE->getSourceRange();
  419. }
  420. } else {
  421. S.Diag(DRE->getLocStart(), diag::warn_maybe_uninit_var)
  422. << VD->getDeclName() << DRE->getSourceRange();
  423. }
  424. } else {
  425. const BlockExpr *BE = cast<BlockExpr>(E);
  426. S.Diag(BE->getLocStart(),
  427. isAlwaysUninit ? diag::warn_uninit_var_captured_by_block
  428. : diag::warn_maybe_uninit_var_captured_by_block)
  429. << VD->getDeclName();
  430. }
  431. // Report where the variable was declared when the use wasn't within
  432. // the initializer of that declaration & we didn't already suggest
  433. // an initialization fixit.
  434. if (!isSelfInit && !SuggestInitializationFixit(S, VD))
  435. S.Diag(VD->getLocStart(), diag::note_uninit_var_def)
  436. << VD->getDeclName();
  437. return true;
  438. }
  439. typedef std::pair<const Expr*, bool> UninitUse;
  440. namespace {
  441. struct SLocSort {
  442. bool operator()(const UninitUse &a, const UninitUse &b) {
  443. SourceLocation aLoc = a.first->getLocStart();
  444. SourceLocation bLoc = b.first->getLocStart();
  445. return aLoc.getRawEncoding() < bLoc.getRawEncoding();
  446. }
  447. };
  448. class UninitValsDiagReporter : public UninitVariablesHandler {
  449. Sema &S;
  450. typedef SmallVector<UninitUse, 2> UsesVec;
  451. typedef llvm::DenseMap<const VarDecl *, std::pair<UsesVec*, bool> > UsesMap;
  452. UsesMap *uses;
  453. public:
  454. UninitValsDiagReporter(Sema &S) : S(S), uses(0) {}
  455. ~UninitValsDiagReporter() {
  456. flushDiagnostics();
  457. }
  458. std::pair<UsesVec*, bool> &getUses(const VarDecl *vd) {
  459. if (!uses)
  460. uses = new UsesMap();
  461. UsesMap::mapped_type &V = (*uses)[vd];
  462. UsesVec *&vec = V.first;
  463. if (!vec)
  464. vec = new UsesVec();
  465. return V;
  466. }
  467. void handleUseOfUninitVariable(const Expr *ex, const VarDecl *vd,
  468. bool isAlwaysUninit) {
  469. getUses(vd).first->push_back(std::make_pair(ex, isAlwaysUninit));
  470. }
  471. void handleSelfInit(const VarDecl *vd) {
  472. getUses(vd).second = true;
  473. }
  474. void flushDiagnostics() {
  475. if (!uses)
  476. return;
  477. for (UsesMap::iterator i = uses->begin(), e = uses->end(); i != e; ++i) {
  478. const VarDecl *vd = i->first;
  479. const UsesMap::mapped_type &V = i->second;
  480. UsesVec *vec = V.first;
  481. bool hasSelfInit = V.second;
  482. // Specially handle the case where we have uses of an uninitialized
  483. // variable, but the root cause is an idiomatic self-init. We want
  484. // to report the diagnostic at the self-init since that is the root cause.
  485. if (!vec->empty() && hasSelfInit && hasAlwaysUninitializedUse(vec))
  486. DiagnoseUninitializedUse(S, vd, vd->getInit()->IgnoreParenCasts(),
  487. /* isAlwaysUninit */ true,
  488. /* alwaysReportSelfInit */ true);
  489. else {
  490. // Sort the uses by their SourceLocations. While not strictly
  491. // guaranteed to produce them in line/column order, this will provide
  492. // a stable ordering.
  493. std::sort(vec->begin(), vec->end(), SLocSort());
  494. for (UsesVec::iterator vi = vec->begin(), ve = vec->end(); vi != ve;
  495. ++vi) {
  496. if (DiagnoseUninitializedUse(S, vd, vi->first,
  497. /*isAlwaysUninit=*/vi->second))
  498. // Skip further diagnostics for this variable. We try to warn only
  499. // on the first point at which a variable is used uninitialized.
  500. break;
  501. }
  502. }
  503. // Release the uses vector.
  504. delete vec;
  505. }
  506. delete uses;
  507. }
  508. private:
  509. static bool hasAlwaysUninitializedUse(const UsesVec* vec) {
  510. for (UsesVec::const_iterator i = vec->begin(), e = vec->end(); i != e; ++i) {
  511. if (i->second) {
  512. return true;
  513. }
  514. }
  515. return false;
  516. }
  517. };
  518. }
  519. //===----------------------------------------------------------------------===//
  520. // -Wthread-safety
  521. //===----------------------------------------------------------------------===//
  522. namespace clang {
  523. namespace thread_safety {
  524. typedef std::pair<SourceLocation, PartialDiagnostic> DelayedDiag;
  525. typedef llvm::SmallVector<DelayedDiag, 4> DiagList;
  526. struct SortDiagBySourceLocation {
  527. Sema &S;
  528. SortDiagBySourceLocation(Sema &S) : S(S) {}
  529. bool operator()(const DelayedDiag &left, const DelayedDiag &right) {
  530. // Although this call will be slow, this is only called when outputting
  531. // multiple warnings.
  532. return S.getSourceManager().isBeforeInTranslationUnit(left.first,
  533. right.first);
  534. }
  535. };
  536. namespace {
  537. class ThreadSafetyReporter : public clang::thread_safety::ThreadSafetyHandler {
  538. Sema &S;
  539. DiagList Warnings;
  540. SourceLocation FunLocation;
  541. // Helper functions
  542. void warnLockMismatch(unsigned DiagID, Name LockName, SourceLocation Loc) {
  543. // Gracefully handle rare cases when the analysis can't get a more
  544. // precise source location.
  545. if (!Loc.isValid())
  546. Loc = FunLocation;
  547. PartialDiagnostic Warning = S.PDiag(DiagID) << LockName;
  548. Warnings.push_back(DelayedDiag(Loc, Warning));
  549. }
  550. public:
  551. ThreadSafetyReporter(Sema &S, SourceLocation FL)
  552. : S(S), FunLocation(FL) {}
  553. /// \brief Emit all buffered diagnostics in order of sourcelocation.
  554. /// We need to output diagnostics produced while iterating through
  555. /// the lockset in deterministic order, so this function orders diagnostics
  556. /// and outputs them.
  557. void emitDiagnostics() {
  558. SortDiagBySourceLocation SortDiagBySL(S);
  559. sort(Warnings.begin(), Warnings.end(), SortDiagBySL);
  560. for (DiagList::iterator I = Warnings.begin(), E = Warnings.end();
  561. I != E; ++I)
  562. S.Diag(I->first, I->second);
  563. }
  564. void handleInvalidLockExp(SourceLocation Loc) {
  565. PartialDiagnostic Warning = S.PDiag(diag::warn_cannot_resolve_lock) << Loc;
  566. Warnings.push_back(DelayedDiag(Loc, Warning));
  567. }
  568. void handleUnmatchedUnlock(Name LockName, SourceLocation Loc) {
  569. warnLockMismatch(diag::warn_unlock_but_no_lock, LockName, Loc);
  570. }
  571. void handleDoubleLock(Name LockName, SourceLocation Loc) {
  572. warnLockMismatch(diag::warn_double_lock, LockName, Loc);
  573. }
  574. void handleMutexHeldEndOfScope(Name LockName, SourceLocation Loc,
  575. LockErrorKind LEK){
  576. unsigned DiagID = 0;
  577. switch (LEK) {
  578. case LEK_LockedSomePredecessors:
  579. DiagID = diag::warn_lock_at_end_of_scope;
  580. break;
  581. case LEK_LockedSomeLoopIterations:
  582. DiagID = diag::warn_expecting_lock_held_on_loop;
  583. break;
  584. case LEK_LockedAtEndOfFunction:
  585. DiagID = diag::warn_no_unlock;
  586. break;
  587. }
  588. warnLockMismatch(DiagID, LockName, Loc);
  589. }
  590. void handleExclusiveAndShared(Name LockName, SourceLocation Loc1,
  591. SourceLocation Loc2) {
  592. PartialDiagnostic Warning =
  593. S.PDiag(diag::warn_lock_exclusive_and_shared) << LockName;
  594. PartialDiagnostic Note =
  595. S.PDiag(diag::note_lock_exclusive_and_shared) << LockName;
  596. Warnings.push_back(DelayedDiag(Loc1, Warning));
  597. Warnings.push_back(DelayedDiag(Loc2, Note));
  598. }
  599. void handleNoMutexHeld(const NamedDecl *D, ProtectedOperationKind POK,
  600. AccessKind AK, SourceLocation Loc) {
  601. assert((POK == POK_VarAccess || POK == POK_VarDereference)
  602. && "Only works for variables");
  603. unsigned DiagID = POK == POK_VarAccess?
  604. diag::warn_variable_requires_any_lock:
  605. diag::warn_var_deref_requires_any_lock;
  606. PartialDiagnostic Warning = S.PDiag(DiagID)
  607. << D->getName() << getLockKindFromAccessKind(AK);
  608. Warnings.push_back(DelayedDiag(Loc, Warning));
  609. }
  610. void handleMutexNotHeld(const NamedDecl *D, ProtectedOperationKind POK,
  611. Name LockName, LockKind LK, SourceLocation Loc) {
  612. unsigned DiagID = 0;
  613. switch (POK) {
  614. case POK_VarAccess:
  615. DiagID = diag::warn_variable_requires_lock;
  616. break;
  617. case POK_VarDereference:
  618. DiagID = diag::warn_var_deref_requires_lock;
  619. break;
  620. case POK_FunctionCall:
  621. DiagID = diag::warn_fun_requires_lock;
  622. break;
  623. }
  624. PartialDiagnostic Warning = S.PDiag(DiagID)
  625. << D->getName() << LockName << LK;
  626. Warnings.push_back(DelayedDiag(Loc, Warning));
  627. }
  628. void handleFunExcludesLock(Name FunName, Name LockName, SourceLocation Loc) {
  629. PartialDiagnostic Warning =
  630. S.PDiag(diag::warn_fun_excludes_mutex) << FunName << LockName;
  631. Warnings.push_back(DelayedDiag(Loc, Warning));
  632. }
  633. };
  634. }
  635. }
  636. }
  637. //===----------------------------------------------------------------------===//
  638. // AnalysisBasedWarnings - Worker object used by Sema to execute analysis-based
  639. // warnings on a function, method, or block.
  640. //===----------------------------------------------------------------------===//
  641. clang::sema::AnalysisBasedWarnings::Policy::Policy() {
  642. enableCheckFallThrough = 1;
  643. enableCheckUnreachable = 0;
  644. enableThreadSafetyAnalysis = 0;
  645. }
  646. clang::sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema &s)
  647. : S(s),
  648. NumFunctionsAnalyzed(0),
  649. NumFunctionsWithBadCFGs(0),
  650. NumCFGBlocks(0),
  651. MaxCFGBlocksPerFunction(0),
  652. NumUninitAnalysisFunctions(0),
  653. NumUninitAnalysisVariables(0),
  654. MaxUninitAnalysisVariablesPerFunction(0),
  655. NumUninitAnalysisBlockVisits(0),
  656. MaxUninitAnalysisBlockVisitsPerFunction(0) {
  657. DiagnosticsEngine &D = S.getDiagnostics();
  658. DefaultPolicy.enableCheckUnreachable = (unsigned)
  659. (D.getDiagnosticLevel(diag::warn_unreachable, SourceLocation()) !=
  660. DiagnosticsEngine::Ignored);
  661. DefaultPolicy.enableThreadSafetyAnalysis = (unsigned)
  662. (D.getDiagnosticLevel(diag::warn_double_lock, SourceLocation()) !=
  663. DiagnosticsEngine::Ignored);
  664. }
  665. static void flushDiagnostics(Sema &S, sema::FunctionScopeInfo *fscope) {
  666. for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator
  667. i = fscope->PossiblyUnreachableDiags.begin(),
  668. e = fscope->PossiblyUnreachableDiags.end();
  669. i != e; ++i) {
  670. const sema::PossiblyUnreachableDiag &D = *i;
  671. S.Diag(D.Loc, D.PD);
  672. }
  673. }
  674. void clang::sema::
  675. AnalysisBasedWarnings::IssueWarnings(sema::AnalysisBasedWarnings::Policy P,
  676. sema::FunctionScopeInfo *fscope,
  677. const Decl *D, const BlockExpr *blkExpr) {
  678. // We avoid doing analysis-based warnings when there are errors for
  679. // two reasons:
  680. // (1) The CFGs often can't be constructed (if the body is invalid), so
  681. // don't bother trying.
  682. // (2) The code already has problems; running the analysis just takes more
  683. // time.
  684. DiagnosticsEngine &Diags = S.getDiagnostics();
  685. // Do not do any analysis for declarations in system headers if we are
  686. // going to just ignore them.
  687. if (Diags.getSuppressSystemWarnings() &&
  688. S.SourceMgr.isInSystemHeader(D->getLocation()))
  689. return;
  690. // For code in dependent contexts, we'll do this at instantiation time.
  691. bool Dependent = cast<DeclContext>(D)->isDependentContext();
  692. if (Diags.hasErrorOccurred() || Diags.hasFatalErrorOccurred()) {
  693. // Flush out any possibly unreachable diagnostics.
  694. flushDiagnostics(S, fscope);
  695. return;
  696. }
  697. const Stmt *Body = D->getBody();
  698. assert(Body);
  699. AnalysisDeclContext AC(/* AnalysisDeclContextManager */ 0, D, 0);
  700. // Don't generate EH edges for CallExprs as we'd like to avoid the n^2
  701. // explosion for destrutors that can result and the compile time hit.
  702. AC.getCFGBuildOptions().PruneTriviallyFalseEdges = true;
  703. AC.getCFGBuildOptions().AddEHEdges = false;
  704. AC.getCFGBuildOptions().AddInitializers = true;
  705. AC.getCFGBuildOptions().AddImplicitDtors = true;
  706. // Force that certain expressions appear as CFGElements in the CFG. This
  707. // is used to speed up various analyses.
  708. // FIXME: This isn't the right factoring. This is here for initial
  709. // prototyping, but we need a way for analyses to say what expressions they
  710. // expect to always be CFGElements and then fill in the BuildOptions
  711. // appropriately. This is essentially a layering violation.
  712. if (P.enableCheckUnreachable || P.enableThreadSafetyAnalysis) {
  713. // Unreachable code analysis and thread safety require a linearized CFG.
  714. AC.getCFGBuildOptions().setAllAlwaysAdd();
  715. }
  716. else {
  717. AC.getCFGBuildOptions()
  718. .setAlwaysAdd(Stmt::BinaryOperatorClass)
  719. .setAlwaysAdd(Stmt::BlockExprClass)
  720. .setAlwaysAdd(Stmt::CStyleCastExprClass)
  721. .setAlwaysAdd(Stmt::DeclRefExprClass)
  722. .setAlwaysAdd(Stmt::ImplicitCastExprClass)
  723. .setAlwaysAdd(Stmt::UnaryOperatorClass);
  724. }
  725. // Construct the analysis context with the specified CFG build options.
  726. // Emit delayed diagnostics.
  727. if (!fscope->PossiblyUnreachableDiags.empty() && !Dependent) {
  728. bool analyzed = false;
  729. // Register the expressions with the CFGBuilder.
  730. for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator
  731. i = fscope->PossiblyUnreachableDiags.begin(),
  732. e = fscope->PossiblyUnreachableDiags.end();
  733. i != e; ++i) {
  734. if (const Stmt *stmt = i->stmt)
  735. AC.registerForcedBlockExpression(stmt);
  736. }
  737. if (AC.getCFG()) {
  738. analyzed = true;
  739. for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator
  740. i = fscope->PossiblyUnreachableDiags.begin(),
  741. e = fscope->PossiblyUnreachableDiags.end();
  742. i != e; ++i)
  743. {
  744. const sema::PossiblyUnreachableDiag &D = *i;
  745. bool processed = false;
  746. if (const Stmt *stmt = i->stmt) {
  747. const CFGBlock *block = AC.getBlockForRegisteredExpression(stmt);
  748. CFGReverseBlockReachabilityAnalysis *cra =
  749. AC.getCFGReachablityAnalysis();
  750. // FIXME: We should be able to assert that block is non-null, but
  751. // the CFG analysis can skip potentially-evaluated expressions in
  752. // edge cases; see test/Sema/vla-2.c.
  753. if (block && cra) {
  754. // Can this block be reached from the entrance?
  755. if (cra->isReachable(&AC.getCFG()->getEntry(), block))
  756. S.Diag(D.Loc, D.PD);
  757. processed = true;
  758. }
  759. }
  760. if (!processed) {
  761. // Emit the warning anyway if we cannot map to a basic block.
  762. S.Diag(D.Loc, D.PD);
  763. }
  764. }
  765. }
  766. if (!analyzed)
  767. flushDiagnostics(S, fscope);
  768. }
  769. // Warning: check missing 'return'
  770. if (P.enableCheckFallThrough && !Dependent) {
  771. const CheckFallThroughDiagnostics &CD =
  772. (isa<BlockDecl>(D) ? CheckFallThroughDiagnostics::MakeForBlock()
  773. : CheckFallThroughDiagnostics::MakeForFunction(D));
  774. CheckFallThroughForBody(S, D, Body, blkExpr, CD, AC);
  775. }
  776. // Warning: check for unreachable code
  777. if (P.enableCheckUnreachable) {
  778. // Only check for unreachable code on non-template instantiations.
  779. // Different template instantiations can effectively change the control-flow
  780. // and it is very difficult to prove that a snippet of code in a template
  781. // is unreachable for all instantiations.
  782. bool isTemplateInstantiation = false;
  783. if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(D))
  784. isTemplateInstantiation = Function->isTemplateInstantiation();
  785. if (!isTemplateInstantiation)
  786. CheckUnreachable(S, AC);
  787. }
  788. // Check for thread safety violations
  789. if (P.enableThreadSafetyAnalysis && !Dependent) {
  790. SourceLocation FL = AC.getDecl()->getLocation();
  791. thread_safety::ThreadSafetyReporter Reporter(S, FL);
  792. thread_safety::runThreadSafetyAnalysis(AC, Reporter);
  793. Reporter.emitDiagnostics();
  794. }
  795. if (Diags.getDiagnosticLevel(diag::warn_uninit_var, D->getLocStart())
  796. != DiagnosticsEngine::Ignored ||
  797. Diags.getDiagnosticLevel(diag::warn_maybe_uninit_var, D->getLocStart())
  798. != DiagnosticsEngine::Ignored) {
  799. if (CFG *cfg = AC.getCFG()) {
  800. UninitValsDiagReporter reporter(S);
  801. UninitVariablesAnalysisStats stats;
  802. std::memset(&stats, 0, sizeof(UninitVariablesAnalysisStats));
  803. runUninitializedVariablesAnalysis(*cast<DeclContext>(D), *cfg, AC,
  804. reporter, stats);
  805. if (S.CollectStats && stats.NumVariablesAnalyzed > 0) {
  806. ++NumUninitAnalysisFunctions;
  807. NumUninitAnalysisVariables += stats.NumVariablesAnalyzed;
  808. NumUninitAnalysisBlockVisits += stats.NumBlockVisits;
  809. MaxUninitAnalysisVariablesPerFunction =
  810. std::max(MaxUninitAnalysisVariablesPerFunction,
  811. stats.NumVariablesAnalyzed);
  812. MaxUninitAnalysisBlockVisitsPerFunction =
  813. std::max(MaxUninitAnalysisBlockVisitsPerFunction,
  814. stats.NumBlockVisits);
  815. }
  816. }
  817. }
  818. // Collect statistics about the CFG if it was built.
  819. if (S.CollectStats && AC.isCFGBuilt()) {
  820. ++NumFunctionsAnalyzed;
  821. if (CFG *cfg = AC.getCFG()) {
  822. // If we successfully built a CFG for this context, record some more
  823. // detail information about it.
  824. NumCFGBlocks += cfg->getNumBlockIDs();
  825. MaxCFGBlocksPerFunction = std::max(MaxCFGBlocksPerFunction,
  826. cfg->getNumBlockIDs());
  827. } else {
  828. ++NumFunctionsWithBadCFGs;
  829. }
  830. }
  831. }
  832. void clang::sema::AnalysisBasedWarnings::PrintStats() const {
  833. llvm::errs() << "\n*** Analysis Based Warnings Stats:\n";
  834. unsigned NumCFGsBuilt = NumFunctionsAnalyzed - NumFunctionsWithBadCFGs;
  835. unsigned AvgCFGBlocksPerFunction =
  836. !NumCFGsBuilt ? 0 : NumCFGBlocks/NumCFGsBuilt;
  837. llvm::errs() << NumFunctionsAnalyzed << " functions analyzed ("
  838. << NumFunctionsWithBadCFGs << " w/o CFGs).\n"
  839. << " " << NumCFGBlocks << " CFG blocks built.\n"
  840. << " " << AvgCFGBlocksPerFunction
  841. << " average CFG blocks per function.\n"
  842. << " " << MaxCFGBlocksPerFunction
  843. << " max CFG blocks per function.\n";
  844. unsigned AvgUninitVariablesPerFunction = !NumUninitAnalysisFunctions ? 0
  845. : NumUninitAnalysisVariables/NumUninitAnalysisFunctions;
  846. unsigned AvgUninitBlockVisitsPerFunction = !NumUninitAnalysisFunctions ? 0
  847. : NumUninitAnalysisBlockVisits/NumUninitAnalysisFunctions;
  848. llvm::errs() << NumUninitAnalysisFunctions
  849. << " functions analyzed for uninitialiazed variables\n"
  850. << " " << NumUninitAnalysisVariables << " variables analyzed.\n"
  851. << " " << AvgUninitVariablesPerFunction
  852. << " average variables per function.\n"
  853. << " " << MaxUninitAnalysisVariablesPerFunction
  854. << " max variables per function.\n"
  855. << " " << NumUninitAnalysisBlockVisits << " block visits.\n"
  856. << " " << AvgUninitBlockVisitsPerFunction
  857. << " average block visits per function.\n"
  858. << " " << MaxUninitAnalysisBlockVisitsPerFunction
  859. << " max block visits per function.\n";
  860. }