AnalysisBasedWarnings.cpp 36 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987
  1. //=- AnalysisBasedWarnings.cpp - Sema warnings based on libAnalysis -*- C++ -*-=//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This file defines analysis_warnings::[Policy,Executor].
  11. // Together they are used by Sema to issue warnings based on inexpensive
  12. // static analysis algorithms in libAnalysis.
  13. //
  14. //===----------------------------------------------------------------------===//
  15. #include "clang/Sema/AnalysisBasedWarnings.h"
  16. #include "clang/Sema/SemaInternal.h"
  17. #include "clang/Sema/ScopeInfo.h"
  18. #include "clang/Basic/SourceManager.h"
  19. #include "clang/Basic/SourceLocation.h"
  20. #include "clang/Lex/Preprocessor.h"
  21. #include "clang/AST/DeclObjC.h"
  22. #include "clang/AST/DeclCXX.h"
  23. #include "clang/AST/ExprObjC.h"
  24. #include "clang/AST/ExprCXX.h"
  25. #include "clang/AST/StmtObjC.h"
  26. #include "clang/AST/StmtCXX.h"
  27. #include "clang/AST/EvaluatedExprVisitor.h"
  28. #include "clang/AST/StmtVisitor.h"
  29. #include "clang/Analysis/AnalysisContext.h"
  30. #include "clang/Analysis/CFG.h"
  31. #include "clang/Analysis/Analyses/ReachableCode.h"
  32. #include "clang/Analysis/Analyses/CFGReachabilityAnalysis.h"
  33. #include "clang/Analysis/Analyses/ThreadSafety.h"
  34. #include "clang/Analysis/CFGStmtMap.h"
  35. #include "clang/Analysis/Analyses/UninitializedValues.h"
  36. #include "llvm/ADT/BitVector.h"
  37. #include "llvm/ADT/FoldingSet.h"
  38. #include "llvm/ADT/ImmutableMap.h"
  39. #include "llvm/ADT/PostOrderIterator.h"
  40. #include "llvm/ADT/SmallVector.h"
  41. #include "llvm/ADT/StringRef.h"
  42. #include "llvm/Support/Casting.h"
  43. #include <algorithm>
  44. #include <vector>
  45. using namespace clang;
  46. //===----------------------------------------------------------------------===//
  47. // Unreachable code analysis.
  48. //===----------------------------------------------------------------------===//
  49. namespace {
  50. class UnreachableCodeHandler : public reachable_code::Callback {
  51. Sema &S;
  52. public:
  53. UnreachableCodeHandler(Sema &s) : S(s) {}
  54. void HandleUnreachable(SourceLocation L, SourceRange R1, SourceRange R2) {
  55. S.Diag(L, diag::warn_unreachable) << R1 << R2;
  56. }
  57. };
  58. }
  59. /// CheckUnreachable - Check for unreachable code.
  60. static void CheckUnreachable(Sema &S, AnalysisDeclContext &AC) {
  61. UnreachableCodeHandler UC(S);
  62. reachable_code::FindUnreachableCode(AC, UC);
  63. }
  64. //===----------------------------------------------------------------------===//
  65. // Check for missing return value.
  66. //===----------------------------------------------------------------------===//
  67. enum ControlFlowKind {
  68. UnknownFallThrough,
  69. NeverFallThrough,
  70. MaybeFallThrough,
  71. AlwaysFallThrough,
  72. NeverFallThroughOrReturn
  73. };
  74. /// CheckFallThrough - Check that we don't fall off the end of a
  75. /// Statement that should return a value.
  76. ///
  77. /// \returns AlwaysFallThrough iff we always fall off the end of the statement,
  78. /// MaybeFallThrough iff we might or might not fall off the end,
  79. /// NeverFallThroughOrReturn iff we never fall off the end of the statement or
  80. /// return. We assume NeverFallThrough iff we never fall off the end of the
  81. /// statement but we may return. We assume that functions not marked noreturn
  82. /// will return.
  83. static ControlFlowKind CheckFallThrough(AnalysisDeclContext &AC) {
  84. CFG *cfg = AC.getCFG();
  85. if (cfg == 0) return UnknownFallThrough;
  86. // The CFG leaves in dead things, and we don't want the dead code paths to
  87. // confuse us, so we mark all live things first.
  88. llvm::BitVector live(cfg->getNumBlockIDs());
  89. unsigned count = reachable_code::ScanReachableFromBlock(&cfg->getEntry(),
  90. live);
  91. bool AddEHEdges = AC.getAddEHEdges();
  92. if (!AddEHEdges && count != cfg->getNumBlockIDs())
  93. // When there are things remaining dead, and we didn't add EH edges
  94. // from CallExprs to the catch clauses, we have to go back and
  95. // mark them as live.
  96. for (CFG::iterator I = cfg->begin(), E = cfg->end(); I != E; ++I) {
  97. CFGBlock &b = **I;
  98. if (!live[b.getBlockID()]) {
  99. if (b.pred_begin() == b.pred_end()) {
  100. if (b.getTerminator() && isa<CXXTryStmt>(b.getTerminator()))
  101. // When not adding EH edges from calls, catch clauses
  102. // can otherwise seem dead. Avoid noting them as dead.
  103. count += reachable_code::ScanReachableFromBlock(&b, live);
  104. continue;
  105. }
  106. }
  107. }
  108. // Now we know what is live, we check the live precessors of the exit block
  109. // and look for fall through paths, being careful to ignore normal returns,
  110. // and exceptional paths.
  111. bool HasLiveReturn = false;
  112. bool HasFakeEdge = false;
  113. bool HasPlainEdge = false;
  114. bool HasAbnormalEdge = false;
  115. // Ignore default cases that aren't likely to be reachable because all
  116. // enums in a switch(X) have explicit case statements.
  117. CFGBlock::FilterOptions FO;
  118. FO.IgnoreDefaultsWithCoveredEnums = 1;
  119. for (CFGBlock::filtered_pred_iterator
  120. I = cfg->getExit().filtered_pred_start_end(FO); I.hasMore(); ++I) {
  121. const CFGBlock& B = **I;
  122. if (!live[B.getBlockID()])
  123. continue;
  124. // Skip blocks which contain an element marked as no-return. They don't
  125. // represent actually viable edges into the exit block, so mark them as
  126. // abnormal.
  127. if (B.hasNoReturnElement()) {
  128. HasAbnormalEdge = true;
  129. continue;
  130. }
  131. // Destructors can appear after the 'return' in the CFG. This is
  132. // normal. We need to look pass the destructors for the return
  133. // statement (if it exists).
  134. CFGBlock::const_reverse_iterator ri = B.rbegin(), re = B.rend();
  135. for ( ; ri != re ; ++ri)
  136. if (isa<CFGStmt>(*ri))
  137. break;
  138. // No more CFGElements in the block?
  139. if (ri == re) {
  140. if (B.getTerminator() && isa<CXXTryStmt>(B.getTerminator())) {
  141. HasAbnormalEdge = true;
  142. continue;
  143. }
  144. // A labeled empty statement, or the entry block...
  145. HasPlainEdge = true;
  146. continue;
  147. }
  148. CFGStmt CS = cast<CFGStmt>(*ri);
  149. const Stmt *S = CS.getStmt();
  150. if (isa<ReturnStmt>(S)) {
  151. HasLiveReturn = true;
  152. continue;
  153. }
  154. if (isa<ObjCAtThrowStmt>(S)) {
  155. HasFakeEdge = true;
  156. continue;
  157. }
  158. if (isa<CXXThrowExpr>(S)) {
  159. HasFakeEdge = true;
  160. continue;
  161. }
  162. if (const AsmStmt *AS = dyn_cast<AsmStmt>(S)) {
  163. if (AS->isMSAsm()) {
  164. HasFakeEdge = true;
  165. HasLiveReturn = true;
  166. continue;
  167. }
  168. }
  169. if (isa<CXXTryStmt>(S)) {
  170. HasAbnormalEdge = true;
  171. continue;
  172. }
  173. if (std::find(B.succ_begin(), B.succ_end(), &cfg->getExit())
  174. == B.succ_end()) {
  175. HasAbnormalEdge = true;
  176. continue;
  177. }
  178. HasPlainEdge = true;
  179. }
  180. if (!HasPlainEdge) {
  181. if (HasLiveReturn)
  182. return NeverFallThrough;
  183. return NeverFallThroughOrReturn;
  184. }
  185. if (HasAbnormalEdge || HasFakeEdge || HasLiveReturn)
  186. return MaybeFallThrough;
  187. // This says AlwaysFallThrough for calls to functions that are not marked
  188. // noreturn, that don't return. If people would like this warning to be more
  189. // accurate, such functions should be marked as noreturn.
  190. return AlwaysFallThrough;
  191. }
  192. namespace {
  193. struct CheckFallThroughDiagnostics {
  194. unsigned diag_MaybeFallThrough_HasNoReturn;
  195. unsigned diag_MaybeFallThrough_ReturnsNonVoid;
  196. unsigned diag_AlwaysFallThrough_HasNoReturn;
  197. unsigned diag_AlwaysFallThrough_ReturnsNonVoid;
  198. unsigned diag_NeverFallThroughOrReturn;
  199. bool funMode;
  200. SourceLocation FuncLoc;
  201. static CheckFallThroughDiagnostics MakeForFunction(const Decl *Func) {
  202. CheckFallThroughDiagnostics D;
  203. D.FuncLoc = Func->getLocation();
  204. D.diag_MaybeFallThrough_HasNoReturn =
  205. diag::warn_falloff_noreturn_function;
  206. D.diag_MaybeFallThrough_ReturnsNonVoid =
  207. diag::warn_maybe_falloff_nonvoid_function;
  208. D.diag_AlwaysFallThrough_HasNoReturn =
  209. diag::warn_falloff_noreturn_function;
  210. D.diag_AlwaysFallThrough_ReturnsNonVoid =
  211. diag::warn_falloff_nonvoid_function;
  212. // Don't suggest that virtual functions be marked "noreturn", since they
  213. // might be overridden by non-noreturn functions.
  214. bool isVirtualMethod = false;
  215. if (const CXXMethodDecl *Method = dyn_cast<CXXMethodDecl>(Func))
  216. isVirtualMethod = Method->isVirtual();
  217. // Don't suggest that template instantiations be marked "noreturn"
  218. bool isTemplateInstantiation = false;
  219. if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Func))
  220. isTemplateInstantiation = Function->isTemplateInstantiation();
  221. if (!isVirtualMethod && !isTemplateInstantiation)
  222. D.diag_NeverFallThroughOrReturn =
  223. diag::warn_suggest_noreturn_function;
  224. else
  225. D.diag_NeverFallThroughOrReturn = 0;
  226. D.funMode = true;
  227. return D;
  228. }
  229. static CheckFallThroughDiagnostics MakeForBlock() {
  230. CheckFallThroughDiagnostics D;
  231. D.diag_MaybeFallThrough_HasNoReturn =
  232. diag::err_noreturn_block_has_return_expr;
  233. D.diag_MaybeFallThrough_ReturnsNonVoid =
  234. diag::err_maybe_falloff_nonvoid_block;
  235. D.diag_AlwaysFallThrough_HasNoReturn =
  236. diag::err_noreturn_block_has_return_expr;
  237. D.diag_AlwaysFallThrough_ReturnsNonVoid =
  238. diag::err_falloff_nonvoid_block;
  239. D.diag_NeverFallThroughOrReturn =
  240. diag::warn_suggest_noreturn_block;
  241. D.funMode = false;
  242. return D;
  243. }
  244. bool checkDiagnostics(DiagnosticsEngine &D, bool ReturnsVoid,
  245. bool HasNoReturn) const {
  246. if (funMode) {
  247. return (ReturnsVoid ||
  248. D.getDiagnosticLevel(diag::warn_maybe_falloff_nonvoid_function,
  249. FuncLoc) == DiagnosticsEngine::Ignored)
  250. && (!HasNoReturn ||
  251. D.getDiagnosticLevel(diag::warn_noreturn_function_has_return_expr,
  252. FuncLoc) == DiagnosticsEngine::Ignored)
  253. && (!ReturnsVoid ||
  254. D.getDiagnosticLevel(diag::warn_suggest_noreturn_block, FuncLoc)
  255. == DiagnosticsEngine::Ignored);
  256. }
  257. // For blocks.
  258. return ReturnsVoid && !HasNoReturn
  259. && (!ReturnsVoid ||
  260. D.getDiagnosticLevel(diag::warn_suggest_noreturn_block, FuncLoc)
  261. == DiagnosticsEngine::Ignored);
  262. }
  263. };
  264. }
  265. /// CheckFallThroughForFunctionDef - Check that we don't fall off the end of a
  266. /// function that should return a value. Check that we don't fall off the end
  267. /// of a noreturn function. We assume that functions and blocks not marked
  268. /// noreturn will return.
  269. static void CheckFallThroughForBody(Sema &S, const Decl *D, const Stmt *Body,
  270. const BlockExpr *blkExpr,
  271. const CheckFallThroughDiagnostics& CD,
  272. AnalysisDeclContext &AC) {
  273. bool ReturnsVoid = false;
  274. bool HasNoReturn = false;
  275. if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
  276. ReturnsVoid = FD->getResultType()->isVoidType();
  277. HasNoReturn = FD->hasAttr<NoReturnAttr>() ||
  278. FD->getType()->getAs<FunctionType>()->getNoReturnAttr();
  279. }
  280. else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
  281. ReturnsVoid = MD->getResultType()->isVoidType();
  282. HasNoReturn = MD->hasAttr<NoReturnAttr>();
  283. }
  284. else if (isa<BlockDecl>(D)) {
  285. QualType BlockTy = blkExpr->getType();
  286. if (const FunctionType *FT =
  287. BlockTy->getPointeeType()->getAs<FunctionType>()) {
  288. if (FT->getResultType()->isVoidType())
  289. ReturnsVoid = true;
  290. if (FT->getNoReturnAttr())
  291. HasNoReturn = true;
  292. }
  293. }
  294. DiagnosticsEngine &Diags = S.getDiagnostics();
  295. // Short circuit for compilation speed.
  296. if (CD.checkDiagnostics(Diags, ReturnsVoid, HasNoReturn))
  297. return;
  298. // FIXME: Function try block
  299. if (const CompoundStmt *Compound = dyn_cast<CompoundStmt>(Body)) {
  300. switch (CheckFallThrough(AC)) {
  301. case UnknownFallThrough:
  302. break;
  303. case MaybeFallThrough:
  304. if (HasNoReturn)
  305. S.Diag(Compound->getRBracLoc(),
  306. CD.diag_MaybeFallThrough_HasNoReturn);
  307. else if (!ReturnsVoid)
  308. S.Diag(Compound->getRBracLoc(),
  309. CD.diag_MaybeFallThrough_ReturnsNonVoid);
  310. break;
  311. case AlwaysFallThrough:
  312. if (HasNoReturn)
  313. S.Diag(Compound->getRBracLoc(),
  314. CD.diag_AlwaysFallThrough_HasNoReturn);
  315. else if (!ReturnsVoid)
  316. S.Diag(Compound->getRBracLoc(),
  317. CD.diag_AlwaysFallThrough_ReturnsNonVoid);
  318. break;
  319. case NeverFallThroughOrReturn:
  320. if (ReturnsVoid && !HasNoReturn && CD.diag_NeverFallThroughOrReturn) {
  321. if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
  322. S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn)
  323. << 0 << FD;
  324. } else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
  325. S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn)
  326. << 1 << MD;
  327. } else {
  328. S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn);
  329. }
  330. }
  331. break;
  332. case NeverFallThrough:
  333. break;
  334. }
  335. }
  336. }
  337. //===----------------------------------------------------------------------===//
  338. // -Wuninitialized
  339. //===----------------------------------------------------------------------===//
  340. namespace {
  341. /// ContainsReference - A visitor class to search for references to
  342. /// a particular declaration (the needle) within any evaluated component of an
  343. /// expression (recursively).
  344. class ContainsReference : public EvaluatedExprVisitor<ContainsReference> {
  345. bool FoundReference;
  346. const DeclRefExpr *Needle;
  347. public:
  348. ContainsReference(ASTContext &Context, const DeclRefExpr *Needle)
  349. : EvaluatedExprVisitor<ContainsReference>(Context),
  350. FoundReference(false), Needle(Needle) {}
  351. void VisitExpr(Expr *E) {
  352. // Stop evaluating if we already have a reference.
  353. if (FoundReference)
  354. return;
  355. EvaluatedExprVisitor<ContainsReference>::VisitExpr(E);
  356. }
  357. void VisitDeclRefExpr(DeclRefExpr *E) {
  358. if (E == Needle)
  359. FoundReference = true;
  360. else
  361. EvaluatedExprVisitor<ContainsReference>::VisitDeclRefExpr(E);
  362. }
  363. bool doesContainReference() const { return FoundReference; }
  364. };
  365. }
  366. static bool SuggestInitializationFixit(Sema &S, const VarDecl *VD) {
  367. // Don't issue a fixit if there is already an initializer.
  368. if (VD->getInit())
  369. return false;
  370. // Suggest possible initialization (if any).
  371. QualType VariableTy = VD->getType().getCanonicalType();
  372. const char *Init = S.getFixItZeroInitializerForType(VariableTy);
  373. if (!Init)
  374. return false;
  375. SourceLocation Loc = S.PP.getLocForEndOfToken(VD->getLocEnd());
  376. S.Diag(Loc, diag::note_var_fixit_add_initialization) << VD->getDeclName()
  377. << FixItHint::CreateInsertion(Loc, Init);
  378. return true;
  379. }
  380. /// DiagnoseUninitializedUse -- Helper function for diagnosing uses of an
  381. /// uninitialized variable. This manages the different forms of diagnostic
  382. /// emitted for particular types of uses. Returns true if the use was diagnosed
  383. /// as a warning. If a pariticular use is one we omit warnings for, returns
  384. /// false.
  385. static bool DiagnoseUninitializedUse(Sema &S, const VarDecl *VD,
  386. const Expr *E, bool isAlwaysUninit,
  387. bool alwaysReportSelfInit = false) {
  388. bool isSelfInit = false;
  389. if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(E)) {
  390. if (isAlwaysUninit) {
  391. // Inspect the initializer of the variable declaration which is
  392. // being referenced prior to its initialization. We emit
  393. // specialized diagnostics for self-initialization, and we
  394. // specifically avoid warning about self references which take the
  395. // form of:
  396. //
  397. // int x = x;
  398. //
  399. // This is used to indicate to GCC that 'x' is intentionally left
  400. // uninitialized. Proven code paths which access 'x' in
  401. // an uninitialized state after this will still warn.
  402. //
  403. // TODO: Should we suppress maybe-uninitialized warnings for
  404. // variables initialized in this way?
  405. if (const Expr *Initializer = VD->getInit()) {
  406. if (!alwaysReportSelfInit && DRE == Initializer->IgnoreParenImpCasts())
  407. return false;
  408. ContainsReference CR(S.Context, DRE);
  409. CR.Visit(const_cast<Expr*>(Initializer));
  410. isSelfInit = CR.doesContainReference();
  411. }
  412. if (isSelfInit) {
  413. S.Diag(DRE->getLocStart(),
  414. diag::warn_uninit_self_reference_in_init)
  415. << VD->getDeclName() << VD->getLocation() << DRE->getSourceRange();
  416. } else {
  417. S.Diag(DRE->getLocStart(), diag::warn_uninit_var)
  418. << VD->getDeclName() << DRE->getSourceRange();
  419. }
  420. } else {
  421. S.Diag(DRE->getLocStart(), diag::warn_maybe_uninit_var)
  422. << VD->getDeclName() << DRE->getSourceRange();
  423. }
  424. } else {
  425. const BlockExpr *BE = cast<BlockExpr>(E);
  426. S.Diag(BE->getLocStart(),
  427. isAlwaysUninit ? diag::warn_uninit_var_captured_by_block
  428. : diag::warn_maybe_uninit_var_captured_by_block)
  429. << VD->getDeclName();
  430. }
  431. // Report where the variable was declared when the use wasn't within
  432. // the initializer of that declaration & we didn't already suggest
  433. // an initialization fixit.
  434. if (!isSelfInit && !SuggestInitializationFixit(S, VD))
  435. S.Diag(VD->getLocStart(), diag::note_uninit_var_def)
  436. << VD->getDeclName();
  437. return true;
  438. }
  439. typedef std::pair<const Expr*, bool> UninitUse;
  440. namespace {
  441. struct SLocSort {
  442. bool operator()(const UninitUse &a, const UninitUse &b) {
  443. SourceLocation aLoc = a.first->getLocStart();
  444. SourceLocation bLoc = b.first->getLocStart();
  445. return aLoc.getRawEncoding() < bLoc.getRawEncoding();
  446. }
  447. };
  448. class UninitValsDiagReporter : public UninitVariablesHandler {
  449. Sema &S;
  450. typedef SmallVector<UninitUse, 2> UsesVec;
  451. typedef llvm::DenseMap<const VarDecl *, std::pair<UsesVec*, bool> > UsesMap;
  452. UsesMap *uses;
  453. public:
  454. UninitValsDiagReporter(Sema &S) : S(S), uses(0) {}
  455. ~UninitValsDiagReporter() {
  456. flushDiagnostics();
  457. }
  458. std::pair<UsesVec*, bool> &getUses(const VarDecl *vd) {
  459. if (!uses)
  460. uses = new UsesMap();
  461. UsesMap::mapped_type &V = (*uses)[vd];
  462. UsesVec *&vec = V.first;
  463. if (!vec)
  464. vec = new UsesVec();
  465. return V;
  466. }
  467. void handleUseOfUninitVariable(const Expr *ex, const VarDecl *vd,
  468. bool isAlwaysUninit) {
  469. getUses(vd).first->push_back(std::make_pair(ex, isAlwaysUninit));
  470. }
  471. void handleSelfInit(const VarDecl *vd) {
  472. getUses(vd).second = true;
  473. }
  474. void flushDiagnostics() {
  475. if (!uses)
  476. return;
  477. for (UsesMap::iterator i = uses->begin(), e = uses->end(); i != e; ++i) {
  478. const VarDecl *vd = i->first;
  479. const UsesMap::mapped_type &V = i->second;
  480. UsesVec *vec = V.first;
  481. bool hasSelfInit = V.second;
  482. // Specially handle the case where we have uses of an uninitialized
  483. // variable, but the root cause is an idiomatic self-init. We want
  484. // to report the diagnostic at the self-init since that is the root cause.
  485. if (!vec->empty() && hasSelfInit && hasAlwaysUninitializedUse(vec))
  486. DiagnoseUninitializedUse(S, vd, vd->getInit()->IgnoreParenCasts(),
  487. /* isAlwaysUninit */ true,
  488. /* alwaysReportSelfInit */ true);
  489. else {
  490. // Sort the uses by their SourceLocations. While not strictly
  491. // guaranteed to produce them in line/column order, this will provide
  492. // a stable ordering.
  493. std::sort(vec->begin(), vec->end(), SLocSort());
  494. for (UsesVec::iterator vi = vec->begin(), ve = vec->end(); vi != ve;
  495. ++vi) {
  496. if (DiagnoseUninitializedUse(S, vd, vi->first,
  497. /*isAlwaysUninit=*/vi->second))
  498. // Skip further diagnostics for this variable. We try to warn only
  499. // on the first point at which a variable is used uninitialized.
  500. break;
  501. }
  502. }
  503. // Release the uses vector.
  504. delete vec;
  505. }
  506. delete uses;
  507. }
  508. private:
  509. static bool hasAlwaysUninitializedUse(const UsesVec* vec) {
  510. for (UsesVec::const_iterator i = vec->begin(), e = vec->end(); i != e; ++i) {
  511. if (i->second) {
  512. return true;
  513. }
  514. }
  515. return false;
  516. }
  517. };
  518. }
  519. //===----------------------------------------------------------------------===//
  520. // -Wthread-safety
  521. //===----------------------------------------------------------------------===//
  522. namespace clang {
  523. namespace thread_safety {
  524. typedef llvm::SmallVector<PartialDiagnosticAt, 1> OptionalNotes;
  525. typedef std::pair<PartialDiagnosticAt, OptionalNotes> DelayedDiag;
  526. typedef llvm::SmallVector<DelayedDiag, 4> DiagList;
  527. struct SortDiagBySourceLocation {
  528. Sema &S;
  529. SortDiagBySourceLocation(Sema &S) : S(S) {}
  530. bool operator()(const DelayedDiag &left, const DelayedDiag &right) {
  531. // Although this call will be slow, this is only called when outputting
  532. // multiple warnings.
  533. return S.getSourceManager().isBeforeInTranslationUnit(left.first.first,
  534. right.first.first);
  535. }
  536. };
  537. namespace {
  538. class ThreadSafetyReporter : public clang::thread_safety::ThreadSafetyHandler {
  539. Sema &S;
  540. DiagList Warnings;
  541. SourceLocation FunLocation, FunEndLocation;
  542. // Helper functions
  543. void warnLockMismatch(unsigned DiagID, Name LockName, SourceLocation Loc) {
  544. // Gracefully handle rare cases when the analysis can't get a more
  545. // precise source location.
  546. if (!Loc.isValid())
  547. Loc = FunLocation;
  548. PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << LockName);
  549. Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
  550. }
  551. public:
  552. ThreadSafetyReporter(Sema &S, SourceLocation FL, SourceLocation FEL)
  553. : S(S), FunLocation(FL), FunEndLocation(FEL) {}
  554. /// \brief Emit all buffered diagnostics in order of sourcelocation.
  555. /// We need to output diagnostics produced while iterating through
  556. /// the lockset in deterministic order, so this function orders diagnostics
  557. /// and outputs them.
  558. void emitDiagnostics() {
  559. SortDiagBySourceLocation SortDiagBySL(S);
  560. sort(Warnings.begin(), Warnings.end(), SortDiagBySL);
  561. for (DiagList::iterator I = Warnings.begin(), E = Warnings.end();
  562. I != E; ++I) {
  563. S.Diag(I->first.first, I->first.second);
  564. const OptionalNotes &Notes = I->second;
  565. for (unsigned NoteI = 0, NoteN = Notes.size(); NoteI != NoteN; ++NoteI)
  566. S.Diag(Notes[NoteI].first, Notes[NoteI].second);
  567. }
  568. }
  569. void handleInvalidLockExp(SourceLocation Loc) {
  570. PartialDiagnosticAt Warning(Loc,
  571. S.PDiag(diag::warn_cannot_resolve_lock) << Loc);
  572. Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
  573. }
  574. void handleUnmatchedUnlock(Name LockName, SourceLocation Loc) {
  575. warnLockMismatch(diag::warn_unlock_but_no_lock, LockName, Loc);
  576. }
  577. void handleDoubleLock(Name LockName, SourceLocation Loc) {
  578. warnLockMismatch(diag::warn_double_lock, LockName, Loc);
  579. }
  580. void handleMutexHeldEndOfScope(Name LockName, SourceLocation LocLocked,
  581. SourceLocation LocEndOfScope,
  582. LockErrorKind LEK){
  583. unsigned DiagID = 0;
  584. switch (LEK) {
  585. case LEK_LockedSomePredecessors:
  586. DiagID = diag::warn_lock_some_predecessors;
  587. break;
  588. case LEK_LockedSomeLoopIterations:
  589. DiagID = diag::warn_expecting_lock_held_on_loop;
  590. break;
  591. case LEK_LockedAtEndOfFunction:
  592. DiagID = diag::warn_no_unlock;
  593. break;
  594. }
  595. if (LocEndOfScope.isInvalid())
  596. LocEndOfScope = FunEndLocation;
  597. PartialDiagnosticAt Warning(LocEndOfScope, S.PDiag(DiagID) << LockName);
  598. PartialDiagnosticAt Note(LocLocked, S.PDiag(diag::note_locked_here));
  599. Warnings.push_back(DelayedDiag(Warning, OptionalNotes(1, Note)));
  600. }
  601. void handleExclusiveAndShared(Name LockName, SourceLocation Loc1,
  602. SourceLocation Loc2) {
  603. PartialDiagnosticAt Warning(
  604. Loc1, S.PDiag(diag::warn_lock_exclusive_and_shared) << LockName);
  605. PartialDiagnosticAt Note(
  606. Loc2, S.PDiag(diag::note_lock_exclusive_and_shared) << LockName);
  607. Warnings.push_back(DelayedDiag(Warning, OptionalNotes(1, Note)));
  608. }
  609. void handleNoMutexHeld(const NamedDecl *D, ProtectedOperationKind POK,
  610. AccessKind AK, SourceLocation Loc) {
  611. assert((POK == POK_VarAccess || POK == POK_VarDereference)
  612. && "Only works for variables");
  613. unsigned DiagID = POK == POK_VarAccess?
  614. diag::warn_variable_requires_any_lock:
  615. diag::warn_var_deref_requires_any_lock;
  616. PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
  617. << D->getName() << getLockKindFromAccessKind(AK));
  618. Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
  619. }
  620. void handleMutexNotHeld(const NamedDecl *D, ProtectedOperationKind POK,
  621. Name LockName, LockKind LK, SourceLocation Loc) {
  622. unsigned DiagID = 0;
  623. switch (POK) {
  624. case POK_VarAccess:
  625. DiagID = diag::warn_variable_requires_lock;
  626. break;
  627. case POK_VarDereference:
  628. DiagID = diag::warn_var_deref_requires_lock;
  629. break;
  630. case POK_FunctionCall:
  631. DiagID = diag::warn_fun_requires_lock;
  632. break;
  633. }
  634. PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
  635. << D->getName() << LockName << LK);
  636. Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
  637. }
  638. void handleFunExcludesLock(Name FunName, Name LockName, SourceLocation Loc) {
  639. PartialDiagnosticAt Warning(Loc,
  640. S.PDiag(diag::warn_fun_excludes_mutex) << FunName << LockName);
  641. Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
  642. }
  643. };
  644. }
  645. }
  646. }
  647. //===----------------------------------------------------------------------===//
  648. // AnalysisBasedWarnings - Worker object used by Sema to execute analysis-based
  649. // warnings on a function, method, or block.
  650. //===----------------------------------------------------------------------===//
  651. clang::sema::AnalysisBasedWarnings::Policy::Policy() {
  652. enableCheckFallThrough = 1;
  653. enableCheckUnreachable = 0;
  654. enableThreadSafetyAnalysis = 0;
  655. }
  656. clang::sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema &s)
  657. : S(s),
  658. NumFunctionsAnalyzed(0),
  659. NumFunctionsWithBadCFGs(0),
  660. NumCFGBlocks(0),
  661. MaxCFGBlocksPerFunction(0),
  662. NumUninitAnalysisFunctions(0),
  663. NumUninitAnalysisVariables(0),
  664. MaxUninitAnalysisVariablesPerFunction(0),
  665. NumUninitAnalysisBlockVisits(0),
  666. MaxUninitAnalysisBlockVisitsPerFunction(0) {
  667. DiagnosticsEngine &D = S.getDiagnostics();
  668. DefaultPolicy.enableCheckUnreachable = (unsigned)
  669. (D.getDiagnosticLevel(diag::warn_unreachable, SourceLocation()) !=
  670. DiagnosticsEngine::Ignored);
  671. DefaultPolicy.enableThreadSafetyAnalysis = (unsigned)
  672. (D.getDiagnosticLevel(diag::warn_double_lock, SourceLocation()) !=
  673. DiagnosticsEngine::Ignored);
  674. }
  675. static void flushDiagnostics(Sema &S, sema::FunctionScopeInfo *fscope) {
  676. for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator
  677. i = fscope->PossiblyUnreachableDiags.begin(),
  678. e = fscope->PossiblyUnreachableDiags.end();
  679. i != e; ++i) {
  680. const sema::PossiblyUnreachableDiag &D = *i;
  681. S.Diag(D.Loc, D.PD);
  682. }
  683. }
  684. void clang::sema::
  685. AnalysisBasedWarnings::IssueWarnings(sema::AnalysisBasedWarnings::Policy P,
  686. sema::FunctionScopeInfo *fscope,
  687. const Decl *D, const BlockExpr *blkExpr) {
  688. // We avoid doing analysis-based warnings when there are errors for
  689. // two reasons:
  690. // (1) The CFGs often can't be constructed (if the body is invalid), so
  691. // don't bother trying.
  692. // (2) The code already has problems; running the analysis just takes more
  693. // time.
  694. DiagnosticsEngine &Diags = S.getDiagnostics();
  695. // Do not do any analysis for declarations in system headers if we are
  696. // going to just ignore them.
  697. if (Diags.getSuppressSystemWarnings() &&
  698. S.SourceMgr.isInSystemHeader(D->getLocation()))
  699. return;
  700. // For code in dependent contexts, we'll do this at instantiation time.
  701. if (cast<DeclContext>(D)->isDependentContext())
  702. return;
  703. if (Diags.hasErrorOccurred() || Diags.hasFatalErrorOccurred()) {
  704. // Flush out any possibly unreachable diagnostics.
  705. flushDiagnostics(S, fscope);
  706. return;
  707. }
  708. const Stmt *Body = D->getBody();
  709. assert(Body);
  710. AnalysisDeclContext AC(/* AnalysisDeclContextManager */ 0, D, 0);
  711. // Don't generate EH edges for CallExprs as we'd like to avoid the n^2
  712. // explosion for destrutors that can result and the compile time hit.
  713. AC.getCFGBuildOptions().PruneTriviallyFalseEdges = true;
  714. AC.getCFGBuildOptions().AddEHEdges = false;
  715. AC.getCFGBuildOptions().AddInitializers = true;
  716. AC.getCFGBuildOptions().AddImplicitDtors = true;
  717. // Force that certain expressions appear as CFGElements in the CFG. This
  718. // is used to speed up various analyses.
  719. // FIXME: This isn't the right factoring. This is here for initial
  720. // prototyping, but we need a way for analyses to say what expressions they
  721. // expect to always be CFGElements and then fill in the BuildOptions
  722. // appropriately. This is essentially a layering violation.
  723. if (P.enableCheckUnreachable || P.enableThreadSafetyAnalysis) {
  724. // Unreachable code analysis and thread safety require a linearized CFG.
  725. AC.getCFGBuildOptions().setAllAlwaysAdd();
  726. }
  727. else {
  728. AC.getCFGBuildOptions()
  729. .setAlwaysAdd(Stmt::BinaryOperatorClass)
  730. .setAlwaysAdd(Stmt::BlockExprClass)
  731. .setAlwaysAdd(Stmt::CStyleCastExprClass)
  732. .setAlwaysAdd(Stmt::DeclRefExprClass)
  733. .setAlwaysAdd(Stmt::ImplicitCastExprClass)
  734. .setAlwaysAdd(Stmt::UnaryOperatorClass);
  735. }
  736. // Construct the analysis context with the specified CFG build options.
  737. // Emit delayed diagnostics.
  738. if (!fscope->PossiblyUnreachableDiags.empty()) {
  739. bool analyzed = false;
  740. // Register the expressions with the CFGBuilder.
  741. for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator
  742. i = fscope->PossiblyUnreachableDiags.begin(),
  743. e = fscope->PossiblyUnreachableDiags.end();
  744. i != e; ++i) {
  745. if (const Stmt *stmt = i->stmt)
  746. AC.registerForcedBlockExpression(stmt);
  747. }
  748. if (AC.getCFG()) {
  749. analyzed = true;
  750. for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator
  751. i = fscope->PossiblyUnreachableDiags.begin(),
  752. e = fscope->PossiblyUnreachableDiags.end();
  753. i != e; ++i)
  754. {
  755. const sema::PossiblyUnreachableDiag &D = *i;
  756. bool processed = false;
  757. if (const Stmt *stmt = i->stmt) {
  758. const CFGBlock *block = AC.getBlockForRegisteredExpression(stmt);
  759. CFGReverseBlockReachabilityAnalysis *cra =
  760. AC.getCFGReachablityAnalysis();
  761. // FIXME: We should be able to assert that block is non-null, but
  762. // the CFG analysis can skip potentially-evaluated expressions in
  763. // edge cases; see test/Sema/vla-2.c.
  764. if (block && cra) {
  765. // Can this block be reached from the entrance?
  766. if (cra->isReachable(&AC.getCFG()->getEntry(), block))
  767. S.Diag(D.Loc, D.PD);
  768. processed = true;
  769. }
  770. }
  771. if (!processed) {
  772. // Emit the warning anyway if we cannot map to a basic block.
  773. S.Diag(D.Loc, D.PD);
  774. }
  775. }
  776. }
  777. if (!analyzed)
  778. flushDiagnostics(S, fscope);
  779. }
  780. // Warning: check missing 'return'
  781. if (P.enableCheckFallThrough) {
  782. const CheckFallThroughDiagnostics &CD =
  783. (isa<BlockDecl>(D) ? CheckFallThroughDiagnostics::MakeForBlock()
  784. : CheckFallThroughDiagnostics::MakeForFunction(D));
  785. CheckFallThroughForBody(S, D, Body, blkExpr, CD, AC);
  786. }
  787. // Warning: check for unreachable code
  788. if (P.enableCheckUnreachable) {
  789. // Only check for unreachable code on non-template instantiations.
  790. // Different template instantiations can effectively change the control-flow
  791. // and it is very difficult to prove that a snippet of code in a template
  792. // is unreachable for all instantiations.
  793. bool isTemplateInstantiation = false;
  794. if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(D))
  795. isTemplateInstantiation = Function->isTemplateInstantiation();
  796. if (!isTemplateInstantiation)
  797. CheckUnreachable(S, AC);
  798. }
  799. // Check for thread safety violations
  800. if (P.enableThreadSafetyAnalysis) {
  801. SourceLocation FL = AC.getDecl()->getLocation();
  802. SourceLocation FEL = AC.getDecl()->getLocEnd();
  803. thread_safety::ThreadSafetyReporter Reporter(S, FL, FEL);
  804. thread_safety::runThreadSafetyAnalysis(AC, Reporter);
  805. Reporter.emitDiagnostics();
  806. }
  807. if (Diags.getDiagnosticLevel(diag::warn_uninit_var, D->getLocStart())
  808. != DiagnosticsEngine::Ignored ||
  809. Diags.getDiagnosticLevel(diag::warn_maybe_uninit_var, D->getLocStart())
  810. != DiagnosticsEngine::Ignored) {
  811. if (CFG *cfg = AC.getCFG()) {
  812. UninitValsDiagReporter reporter(S);
  813. UninitVariablesAnalysisStats stats;
  814. std::memset(&stats, 0, sizeof(UninitVariablesAnalysisStats));
  815. runUninitializedVariablesAnalysis(*cast<DeclContext>(D), *cfg, AC,
  816. reporter, stats);
  817. if (S.CollectStats && stats.NumVariablesAnalyzed > 0) {
  818. ++NumUninitAnalysisFunctions;
  819. NumUninitAnalysisVariables += stats.NumVariablesAnalyzed;
  820. NumUninitAnalysisBlockVisits += stats.NumBlockVisits;
  821. MaxUninitAnalysisVariablesPerFunction =
  822. std::max(MaxUninitAnalysisVariablesPerFunction,
  823. stats.NumVariablesAnalyzed);
  824. MaxUninitAnalysisBlockVisitsPerFunction =
  825. std::max(MaxUninitAnalysisBlockVisitsPerFunction,
  826. stats.NumBlockVisits);
  827. }
  828. }
  829. }
  830. // Collect statistics about the CFG if it was built.
  831. if (S.CollectStats && AC.isCFGBuilt()) {
  832. ++NumFunctionsAnalyzed;
  833. if (CFG *cfg = AC.getCFG()) {
  834. // If we successfully built a CFG for this context, record some more
  835. // detail information about it.
  836. NumCFGBlocks += cfg->getNumBlockIDs();
  837. MaxCFGBlocksPerFunction = std::max(MaxCFGBlocksPerFunction,
  838. cfg->getNumBlockIDs());
  839. } else {
  840. ++NumFunctionsWithBadCFGs;
  841. }
  842. }
  843. }
  844. void clang::sema::AnalysisBasedWarnings::PrintStats() const {
  845. llvm::errs() << "\n*** Analysis Based Warnings Stats:\n";
  846. unsigned NumCFGsBuilt = NumFunctionsAnalyzed - NumFunctionsWithBadCFGs;
  847. unsigned AvgCFGBlocksPerFunction =
  848. !NumCFGsBuilt ? 0 : NumCFGBlocks/NumCFGsBuilt;
  849. llvm::errs() << NumFunctionsAnalyzed << " functions analyzed ("
  850. << NumFunctionsWithBadCFGs << " w/o CFGs).\n"
  851. << " " << NumCFGBlocks << " CFG blocks built.\n"
  852. << " " << AvgCFGBlocksPerFunction
  853. << " average CFG blocks per function.\n"
  854. << " " << MaxCFGBlocksPerFunction
  855. << " max CFG blocks per function.\n";
  856. unsigned AvgUninitVariablesPerFunction = !NumUninitAnalysisFunctions ? 0
  857. : NumUninitAnalysisVariables/NumUninitAnalysisFunctions;
  858. unsigned AvgUninitBlockVisitsPerFunction = !NumUninitAnalysisFunctions ? 0
  859. : NumUninitAnalysisBlockVisits/NumUninitAnalysisFunctions;
  860. llvm::errs() << NumUninitAnalysisFunctions
  861. << " functions analyzed for uninitialiazed variables\n"
  862. << " " << NumUninitAnalysisVariables << " variables analyzed.\n"
  863. << " " << AvgUninitVariablesPerFunction
  864. << " average variables per function.\n"
  865. << " " << MaxUninitAnalysisVariablesPerFunction
  866. << " max variables per function.\n"
  867. << " " << NumUninitAnalysisBlockVisits << " block visits.\n"
  868. << " " << AvgUninitBlockVisitsPerFunction
  869. << " average block visits per function.\n"
  870. << " " << MaxUninitAnalysisBlockVisitsPerFunction
  871. << " max block visits per function.\n";
  872. }