AnalysisBasedWarnings.cpp 36 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006
  1. //=- AnalysisBasedWarnings.cpp - Sema warnings based on libAnalysis -*- C++ -*-=//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This file defines analysis_warnings::[Policy,Executor].
  11. // Together they are used by Sema to issue warnings based on inexpensive
  12. // static analysis algorithms in libAnalysis.
  13. //
  14. //===----------------------------------------------------------------------===//
  15. #include "clang/Sema/AnalysisBasedWarnings.h"
  16. #include "clang/Sema/SemaInternal.h"
  17. #include "clang/Sema/ScopeInfo.h"
  18. #include "clang/Basic/SourceManager.h"
  19. #include "clang/Basic/SourceLocation.h"
  20. #include "clang/Lex/Preprocessor.h"
  21. #include "clang/AST/DeclObjC.h"
  22. #include "clang/AST/DeclCXX.h"
  23. #include "clang/AST/ExprObjC.h"
  24. #include "clang/AST/ExprCXX.h"
  25. #include "clang/AST/StmtObjC.h"
  26. #include "clang/AST/StmtCXX.h"
  27. #include "clang/AST/EvaluatedExprVisitor.h"
  28. #include "clang/AST/StmtVisitor.h"
  29. #include "clang/Analysis/AnalysisContext.h"
  30. #include "clang/Analysis/CFG.h"
  31. #include "clang/Analysis/Analyses/ReachableCode.h"
  32. #include "clang/Analysis/Analyses/CFGReachabilityAnalysis.h"
  33. #include "clang/Analysis/Analyses/ThreadSafety.h"
  34. #include "clang/Analysis/CFGStmtMap.h"
  35. #include "clang/Analysis/Analyses/UninitializedValues.h"
  36. #include "llvm/ADT/BitVector.h"
  37. #include "llvm/ADT/FoldingSet.h"
  38. #include "llvm/ADT/ImmutableMap.h"
  39. #include "llvm/ADT/PostOrderIterator.h"
  40. #include "llvm/ADT/SmallVector.h"
  41. #include "llvm/ADT/StringRef.h"
  42. #include "llvm/Support/Casting.h"
  43. #include <algorithm>
  44. #include <vector>
  45. using namespace clang;
  46. //===----------------------------------------------------------------------===//
  47. // Unreachable code analysis.
  48. //===----------------------------------------------------------------------===//
  49. namespace {
  50. class UnreachableCodeHandler : public reachable_code::Callback {
  51. Sema &S;
  52. public:
  53. UnreachableCodeHandler(Sema &s) : S(s) {}
  54. void HandleUnreachable(SourceLocation L, SourceRange R1, SourceRange R2) {
  55. S.Diag(L, diag::warn_unreachable) << R1 << R2;
  56. }
  57. };
  58. }
  59. /// CheckUnreachable - Check for unreachable code.
  60. static void CheckUnreachable(Sema &S, AnalysisDeclContext &AC) {
  61. UnreachableCodeHandler UC(S);
  62. reachable_code::FindUnreachableCode(AC, UC);
  63. }
  64. //===----------------------------------------------------------------------===//
  65. // Check for missing return value.
  66. //===----------------------------------------------------------------------===//
  67. enum ControlFlowKind {
  68. UnknownFallThrough,
  69. NeverFallThrough,
  70. MaybeFallThrough,
  71. AlwaysFallThrough,
  72. NeverFallThroughOrReturn
  73. };
  74. /// CheckFallThrough - Check that we don't fall off the end of a
  75. /// Statement that should return a value.
  76. ///
  77. /// \returns AlwaysFallThrough iff we always fall off the end of the statement,
  78. /// MaybeFallThrough iff we might or might not fall off the end,
  79. /// NeverFallThroughOrReturn iff we never fall off the end of the statement or
  80. /// return. We assume NeverFallThrough iff we never fall off the end of the
  81. /// statement but we may return. We assume that functions not marked noreturn
  82. /// will return.
  83. static ControlFlowKind CheckFallThrough(AnalysisDeclContext &AC) {
  84. CFG *cfg = AC.getCFG();
  85. if (cfg == 0) return UnknownFallThrough;
  86. // The CFG leaves in dead things, and we don't want the dead code paths to
  87. // confuse us, so we mark all live things first.
  88. llvm::BitVector live(cfg->getNumBlockIDs());
  89. unsigned count = reachable_code::ScanReachableFromBlock(&cfg->getEntry(),
  90. live);
  91. bool AddEHEdges = AC.getAddEHEdges();
  92. if (!AddEHEdges && count != cfg->getNumBlockIDs())
  93. // When there are things remaining dead, and we didn't add EH edges
  94. // from CallExprs to the catch clauses, we have to go back and
  95. // mark them as live.
  96. for (CFG::iterator I = cfg->begin(), E = cfg->end(); I != E; ++I) {
  97. CFGBlock &b = **I;
  98. if (!live[b.getBlockID()]) {
  99. if (b.pred_begin() == b.pred_end()) {
  100. if (b.getTerminator() && isa<CXXTryStmt>(b.getTerminator()))
  101. // When not adding EH edges from calls, catch clauses
  102. // can otherwise seem dead. Avoid noting them as dead.
  103. count += reachable_code::ScanReachableFromBlock(&b, live);
  104. continue;
  105. }
  106. }
  107. }
  108. // Now we know what is live, we check the live precessors of the exit block
  109. // and look for fall through paths, being careful to ignore normal returns,
  110. // and exceptional paths.
  111. bool HasLiveReturn = false;
  112. bool HasFakeEdge = false;
  113. bool HasPlainEdge = false;
  114. bool HasAbnormalEdge = false;
  115. // Ignore default cases that aren't likely to be reachable because all
  116. // enums in a switch(X) have explicit case statements.
  117. CFGBlock::FilterOptions FO;
  118. FO.IgnoreDefaultsWithCoveredEnums = 1;
  119. for (CFGBlock::filtered_pred_iterator
  120. I = cfg->getExit().filtered_pred_start_end(FO); I.hasMore(); ++I) {
  121. const CFGBlock& B = **I;
  122. if (!live[B.getBlockID()])
  123. continue;
  124. // Skip blocks which contain an element marked as no-return. They don't
  125. // represent actually viable edges into the exit block, so mark them as
  126. // abnormal.
  127. if (B.hasNoReturnElement()) {
  128. HasAbnormalEdge = true;
  129. continue;
  130. }
  131. // Destructors can appear after the 'return' in the CFG. This is
  132. // normal. We need to look pass the destructors for the return
  133. // statement (if it exists).
  134. CFGBlock::const_reverse_iterator ri = B.rbegin(), re = B.rend();
  135. for ( ; ri != re ; ++ri)
  136. if (isa<CFGStmt>(*ri))
  137. break;
  138. // No more CFGElements in the block?
  139. if (ri == re) {
  140. if (B.getTerminator() && isa<CXXTryStmt>(B.getTerminator())) {
  141. HasAbnormalEdge = true;
  142. continue;
  143. }
  144. // A labeled empty statement, or the entry block...
  145. HasPlainEdge = true;
  146. continue;
  147. }
  148. CFGStmt CS = cast<CFGStmt>(*ri);
  149. const Stmt *S = CS.getStmt();
  150. if (isa<ReturnStmt>(S)) {
  151. HasLiveReturn = true;
  152. continue;
  153. }
  154. if (isa<ObjCAtThrowStmt>(S)) {
  155. HasFakeEdge = true;
  156. continue;
  157. }
  158. if (isa<CXXThrowExpr>(S)) {
  159. HasFakeEdge = true;
  160. continue;
  161. }
  162. if (const AsmStmt *AS = dyn_cast<AsmStmt>(S)) {
  163. if (AS->isMSAsm()) {
  164. HasFakeEdge = true;
  165. HasLiveReturn = true;
  166. continue;
  167. }
  168. }
  169. if (isa<CXXTryStmt>(S)) {
  170. HasAbnormalEdge = true;
  171. continue;
  172. }
  173. if (std::find(B.succ_begin(), B.succ_end(), &cfg->getExit())
  174. == B.succ_end()) {
  175. HasAbnormalEdge = true;
  176. continue;
  177. }
  178. HasPlainEdge = true;
  179. }
  180. if (!HasPlainEdge) {
  181. if (HasLiveReturn)
  182. return NeverFallThrough;
  183. return NeverFallThroughOrReturn;
  184. }
  185. if (HasAbnormalEdge || HasFakeEdge || HasLiveReturn)
  186. return MaybeFallThrough;
  187. // This says AlwaysFallThrough for calls to functions that are not marked
  188. // noreturn, that don't return. If people would like this warning to be more
  189. // accurate, such functions should be marked as noreturn.
  190. return AlwaysFallThrough;
  191. }
  192. namespace {
  193. struct CheckFallThroughDiagnostics {
  194. unsigned diag_MaybeFallThrough_HasNoReturn;
  195. unsigned diag_MaybeFallThrough_ReturnsNonVoid;
  196. unsigned diag_AlwaysFallThrough_HasNoReturn;
  197. unsigned diag_AlwaysFallThrough_ReturnsNonVoid;
  198. unsigned diag_NeverFallThroughOrReturn;
  199. enum { Function, Block, Lambda } funMode;
  200. SourceLocation FuncLoc;
  201. static CheckFallThroughDiagnostics MakeForFunction(const Decl *Func) {
  202. CheckFallThroughDiagnostics D;
  203. D.FuncLoc = Func->getLocation();
  204. D.diag_MaybeFallThrough_HasNoReturn =
  205. diag::warn_falloff_noreturn_function;
  206. D.diag_MaybeFallThrough_ReturnsNonVoid =
  207. diag::warn_maybe_falloff_nonvoid_function;
  208. D.diag_AlwaysFallThrough_HasNoReturn =
  209. diag::warn_falloff_noreturn_function;
  210. D.diag_AlwaysFallThrough_ReturnsNonVoid =
  211. diag::warn_falloff_nonvoid_function;
  212. // Don't suggest that virtual functions be marked "noreturn", since they
  213. // might be overridden by non-noreturn functions.
  214. bool isVirtualMethod = false;
  215. if (const CXXMethodDecl *Method = dyn_cast<CXXMethodDecl>(Func))
  216. isVirtualMethod = Method->isVirtual();
  217. // Don't suggest that template instantiations be marked "noreturn"
  218. bool isTemplateInstantiation = false;
  219. if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Func))
  220. isTemplateInstantiation = Function->isTemplateInstantiation();
  221. if (!isVirtualMethod && !isTemplateInstantiation)
  222. D.diag_NeverFallThroughOrReturn =
  223. diag::warn_suggest_noreturn_function;
  224. else
  225. D.diag_NeverFallThroughOrReturn = 0;
  226. D.funMode = Function;
  227. return D;
  228. }
  229. static CheckFallThroughDiagnostics MakeForBlock() {
  230. CheckFallThroughDiagnostics D;
  231. D.diag_MaybeFallThrough_HasNoReturn =
  232. diag::err_noreturn_block_has_return_expr;
  233. D.diag_MaybeFallThrough_ReturnsNonVoid =
  234. diag::err_maybe_falloff_nonvoid_block;
  235. D.diag_AlwaysFallThrough_HasNoReturn =
  236. diag::err_noreturn_block_has_return_expr;
  237. D.diag_AlwaysFallThrough_ReturnsNonVoid =
  238. diag::err_falloff_nonvoid_block;
  239. D.diag_NeverFallThroughOrReturn =
  240. diag::warn_suggest_noreturn_block;
  241. D.funMode = Block;
  242. return D;
  243. }
  244. static CheckFallThroughDiagnostics MakeForLambda() {
  245. CheckFallThroughDiagnostics D;
  246. D.diag_MaybeFallThrough_HasNoReturn =
  247. diag::err_noreturn_lambda_has_return_expr;
  248. D.diag_MaybeFallThrough_ReturnsNonVoid =
  249. diag::warn_maybe_falloff_nonvoid_lambda;
  250. D.diag_AlwaysFallThrough_HasNoReturn =
  251. diag::err_noreturn_lambda_has_return_expr;
  252. D.diag_AlwaysFallThrough_ReturnsNonVoid =
  253. diag::warn_falloff_nonvoid_lambda;
  254. D.diag_NeverFallThroughOrReturn = 0;
  255. D.funMode = Lambda;
  256. return D;
  257. }
  258. bool checkDiagnostics(DiagnosticsEngine &D, bool ReturnsVoid,
  259. bool HasNoReturn) const {
  260. if (funMode == Function) {
  261. return (ReturnsVoid ||
  262. D.getDiagnosticLevel(diag::warn_maybe_falloff_nonvoid_function,
  263. FuncLoc) == DiagnosticsEngine::Ignored)
  264. && (!HasNoReturn ||
  265. D.getDiagnosticLevel(diag::warn_noreturn_function_has_return_expr,
  266. FuncLoc) == DiagnosticsEngine::Ignored)
  267. && (!ReturnsVoid ||
  268. D.getDiagnosticLevel(diag::warn_suggest_noreturn_block, FuncLoc)
  269. == DiagnosticsEngine::Ignored);
  270. }
  271. // For blocks / lambdas.
  272. return ReturnsVoid && !HasNoReturn
  273. && ((funMode == Lambda) ||
  274. D.getDiagnosticLevel(diag::warn_suggest_noreturn_block, FuncLoc)
  275. == DiagnosticsEngine::Ignored);
  276. }
  277. };
  278. }
  279. /// CheckFallThroughForFunctionDef - Check that we don't fall off the end of a
  280. /// function that should return a value. Check that we don't fall off the end
  281. /// of a noreturn function. We assume that functions and blocks not marked
  282. /// noreturn will return.
  283. static void CheckFallThroughForBody(Sema &S, const Decl *D, const Stmt *Body,
  284. const BlockExpr *blkExpr,
  285. const CheckFallThroughDiagnostics& CD,
  286. AnalysisDeclContext &AC) {
  287. bool ReturnsVoid = false;
  288. bool HasNoReturn = false;
  289. if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
  290. ReturnsVoid = FD->getResultType()->isVoidType();
  291. HasNoReturn = FD->hasAttr<NoReturnAttr>() ||
  292. FD->getType()->getAs<FunctionType>()->getNoReturnAttr();
  293. }
  294. else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
  295. ReturnsVoid = MD->getResultType()->isVoidType();
  296. HasNoReturn = MD->hasAttr<NoReturnAttr>();
  297. }
  298. else if (isa<BlockDecl>(D)) {
  299. QualType BlockTy = blkExpr->getType();
  300. if (const FunctionType *FT =
  301. BlockTy->getPointeeType()->getAs<FunctionType>()) {
  302. if (FT->getResultType()->isVoidType())
  303. ReturnsVoid = true;
  304. if (FT->getNoReturnAttr())
  305. HasNoReturn = true;
  306. }
  307. }
  308. DiagnosticsEngine &Diags = S.getDiagnostics();
  309. // Short circuit for compilation speed.
  310. if (CD.checkDiagnostics(Diags, ReturnsVoid, HasNoReturn))
  311. return;
  312. // FIXME: Function try block
  313. if (const CompoundStmt *Compound = dyn_cast<CompoundStmt>(Body)) {
  314. switch (CheckFallThrough(AC)) {
  315. case UnknownFallThrough:
  316. break;
  317. case MaybeFallThrough:
  318. if (HasNoReturn)
  319. S.Diag(Compound->getRBracLoc(),
  320. CD.diag_MaybeFallThrough_HasNoReturn);
  321. else if (!ReturnsVoid)
  322. S.Diag(Compound->getRBracLoc(),
  323. CD.diag_MaybeFallThrough_ReturnsNonVoid);
  324. break;
  325. case AlwaysFallThrough:
  326. if (HasNoReturn)
  327. S.Diag(Compound->getRBracLoc(),
  328. CD.diag_AlwaysFallThrough_HasNoReturn);
  329. else if (!ReturnsVoid)
  330. S.Diag(Compound->getRBracLoc(),
  331. CD.diag_AlwaysFallThrough_ReturnsNonVoid);
  332. break;
  333. case NeverFallThroughOrReturn:
  334. if (ReturnsVoid && !HasNoReturn && CD.diag_NeverFallThroughOrReturn) {
  335. if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
  336. S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn)
  337. << 0 << FD;
  338. } else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
  339. S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn)
  340. << 1 << MD;
  341. } else {
  342. S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn);
  343. }
  344. }
  345. break;
  346. case NeverFallThrough:
  347. break;
  348. }
  349. }
  350. }
  351. //===----------------------------------------------------------------------===//
  352. // -Wuninitialized
  353. //===----------------------------------------------------------------------===//
  354. namespace {
  355. /// ContainsReference - A visitor class to search for references to
  356. /// a particular declaration (the needle) within any evaluated component of an
  357. /// expression (recursively).
  358. class ContainsReference : public EvaluatedExprVisitor<ContainsReference> {
  359. bool FoundReference;
  360. const DeclRefExpr *Needle;
  361. public:
  362. ContainsReference(ASTContext &Context, const DeclRefExpr *Needle)
  363. : EvaluatedExprVisitor<ContainsReference>(Context),
  364. FoundReference(false), Needle(Needle) {}
  365. void VisitExpr(Expr *E) {
  366. // Stop evaluating if we already have a reference.
  367. if (FoundReference)
  368. return;
  369. EvaluatedExprVisitor<ContainsReference>::VisitExpr(E);
  370. }
  371. void VisitDeclRefExpr(DeclRefExpr *E) {
  372. if (E == Needle)
  373. FoundReference = true;
  374. else
  375. EvaluatedExprVisitor<ContainsReference>::VisitDeclRefExpr(E);
  376. }
  377. bool doesContainReference() const { return FoundReference; }
  378. };
  379. }
  380. static bool SuggestInitializationFixit(Sema &S, const VarDecl *VD) {
  381. // Don't issue a fixit if there is already an initializer.
  382. if (VD->getInit())
  383. return false;
  384. // Suggest possible initialization (if any).
  385. QualType VariableTy = VD->getType().getCanonicalType();
  386. const char *Init = S.getFixItZeroInitializerForType(VariableTy);
  387. if (!Init)
  388. return false;
  389. SourceLocation Loc = S.PP.getLocForEndOfToken(VD->getLocEnd());
  390. S.Diag(Loc, diag::note_var_fixit_add_initialization) << VD->getDeclName()
  391. << FixItHint::CreateInsertion(Loc, Init);
  392. return true;
  393. }
  394. /// DiagnoseUninitializedUse -- Helper function for diagnosing uses of an
  395. /// uninitialized variable. This manages the different forms of diagnostic
  396. /// emitted for particular types of uses. Returns true if the use was diagnosed
  397. /// as a warning. If a pariticular use is one we omit warnings for, returns
  398. /// false.
  399. static bool DiagnoseUninitializedUse(Sema &S, const VarDecl *VD,
  400. const Expr *E, bool isAlwaysUninit,
  401. bool alwaysReportSelfInit = false) {
  402. bool isSelfInit = false;
  403. if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(E)) {
  404. if (isAlwaysUninit) {
  405. // Inspect the initializer of the variable declaration which is
  406. // being referenced prior to its initialization. We emit
  407. // specialized diagnostics for self-initialization, and we
  408. // specifically avoid warning about self references which take the
  409. // form of:
  410. //
  411. // int x = x;
  412. //
  413. // This is used to indicate to GCC that 'x' is intentionally left
  414. // uninitialized. Proven code paths which access 'x' in
  415. // an uninitialized state after this will still warn.
  416. //
  417. // TODO: Should we suppress maybe-uninitialized warnings for
  418. // variables initialized in this way?
  419. if (const Expr *Initializer = VD->getInit()) {
  420. if (!alwaysReportSelfInit && DRE == Initializer->IgnoreParenImpCasts())
  421. return false;
  422. ContainsReference CR(S.Context, DRE);
  423. CR.Visit(const_cast<Expr*>(Initializer));
  424. isSelfInit = CR.doesContainReference();
  425. }
  426. if (isSelfInit) {
  427. S.Diag(DRE->getLocStart(),
  428. diag::warn_uninit_self_reference_in_init)
  429. << VD->getDeclName() << VD->getLocation() << DRE->getSourceRange();
  430. } else {
  431. S.Diag(DRE->getLocStart(), diag::warn_uninit_var)
  432. << VD->getDeclName() << DRE->getSourceRange();
  433. }
  434. } else {
  435. S.Diag(DRE->getLocStart(), diag::warn_maybe_uninit_var)
  436. << VD->getDeclName() << DRE->getSourceRange();
  437. }
  438. } else {
  439. const BlockExpr *BE = cast<BlockExpr>(E);
  440. S.Diag(BE->getLocStart(),
  441. isAlwaysUninit ? diag::warn_uninit_var_captured_by_block
  442. : diag::warn_maybe_uninit_var_captured_by_block)
  443. << VD->getDeclName();
  444. }
  445. // Report where the variable was declared when the use wasn't within
  446. // the initializer of that declaration & we didn't already suggest
  447. // an initialization fixit.
  448. if (!isSelfInit && !SuggestInitializationFixit(S, VD))
  449. S.Diag(VD->getLocStart(), diag::note_uninit_var_def)
  450. << VD->getDeclName();
  451. return true;
  452. }
  453. typedef std::pair<const Expr*, bool> UninitUse;
  454. namespace {
  455. struct SLocSort {
  456. bool operator()(const UninitUse &a, const UninitUse &b) {
  457. SourceLocation aLoc = a.first->getLocStart();
  458. SourceLocation bLoc = b.first->getLocStart();
  459. return aLoc.getRawEncoding() < bLoc.getRawEncoding();
  460. }
  461. };
  462. class UninitValsDiagReporter : public UninitVariablesHandler {
  463. Sema &S;
  464. typedef SmallVector<UninitUse, 2> UsesVec;
  465. typedef llvm::DenseMap<const VarDecl *, std::pair<UsesVec*, bool> > UsesMap;
  466. UsesMap *uses;
  467. public:
  468. UninitValsDiagReporter(Sema &S) : S(S), uses(0) {}
  469. ~UninitValsDiagReporter() {
  470. flushDiagnostics();
  471. }
  472. std::pair<UsesVec*, bool> &getUses(const VarDecl *vd) {
  473. if (!uses)
  474. uses = new UsesMap();
  475. UsesMap::mapped_type &V = (*uses)[vd];
  476. UsesVec *&vec = V.first;
  477. if (!vec)
  478. vec = new UsesVec();
  479. return V;
  480. }
  481. void handleUseOfUninitVariable(const Expr *ex, const VarDecl *vd,
  482. bool isAlwaysUninit) {
  483. getUses(vd).first->push_back(std::make_pair(ex, isAlwaysUninit));
  484. }
  485. void handleSelfInit(const VarDecl *vd) {
  486. getUses(vd).second = true;
  487. }
  488. void flushDiagnostics() {
  489. if (!uses)
  490. return;
  491. for (UsesMap::iterator i = uses->begin(), e = uses->end(); i != e; ++i) {
  492. const VarDecl *vd = i->first;
  493. const UsesMap::mapped_type &V = i->second;
  494. UsesVec *vec = V.first;
  495. bool hasSelfInit = V.second;
  496. // Specially handle the case where we have uses of an uninitialized
  497. // variable, but the root cause is an idiomatic self-init. We want
  498. // to report the diagnostic at the self-init since that is the root cause.
  499. if (!vec->empty() && hasSelfInit && hasAlwaysUninitializedUse(vec))
  500. DiagnoseUninitializedUse(S, vd, vd->getInit()->IgnoreParenCasts(),
  501. /* isAlwaysUninit */ true,
  502. /* alwaysReportSelfInit */ true);
  503. else {
  504. // Sort the uses by their SourceLocations. While not strictly
  505. // guaranteed to produce them in line/column order, this will provide
  506. // a stable ordering.
  507. std::sort(vec->begin(), vec->end(), SLocSort());
  508. for (UsesVec::iterator vi = vec->begin(), ve = vec->end(); vi != ve;
  509. ++vi) {
  510. if (DiagnoseUninitializedUse(S, vd, vi->first,
  511. /*isAlwaysUninit=*/vi->second))
  512. // Skip further diagnostics for this variable. We try to warn only
  513. // on the first point at which a variable is used uninitialized.
  514. break;
  515. }
  516. }
  517. // Release the uses vector.
  518. delete vec;
  519. }
  520. delete uses;
  521. }
  522. private:
  523. static bool hasAlwaysUninitializedUse(const UsesVec* vec) {
  524. for (UsesVec::const_iterator i = vec->begin(), e = vec->end(); i != e; ++i) {
  525. if (i->second) {
  526. return true;
  527. }
  528. }
  529. return false;
  530. }
  531. };
  532. }
  533. //===----------------------------------------------------------------------===//
  534. // -Wthread-safety
  535. //===----------------------------------------------------------------------===//
  536. namespace clang {
  537. namespace thread_safety {
  538. typedef llvm::SmallVector<PartialDiagnosticAt, 1> OptionalNotes;
  539. typedef std::pair<PartialDiagnosticAt, OptionalNotes> DelayedDiag;
  540. typedef llvm::SmallVector<DelayedDiag, 4> DiagList;
  541. struct SortDiagBySourceLocation {
  542. Sema &S;
  543. SortDiagBySourceLocation(Sema &S) : S(S) {}
  544. bool operator()(const DelayedDiag &left, const DelayedDiag &right) {
  545. // Although this call will be slow, this is only called when outputting
  546. // multiple warnings.
  547. return S.getSourceManager().isBeforeInTranslationUnit(left.first.first,
  548. right.first.first);
  549. }
  550. };
  551. namespace {
  552. class ThreadSafetyReporter : public clang::thread_safety::ThreadSafetyHandler {
  553. Sema &S;
  554. DiagList Warnings;
  555. SourceLocation FunLocation, FunEndLocation;
  556. // Helper functions
  557. void warnLockMismatch(unsigned DiagID, Name LockName, SourceLocation Loc) {
  558. // Gracefully handle rare cases when the analysis can't get a more
  559. // precise source location.
  560. if (!Loc.isValid())
  561. Loc = FunLocation;
  562. PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << LockName);
  563. Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
  564. }
  565. public:
  566. ThreadSafetyReporter(Sema &S, SourceLocation FL, SourceLocation FEL)
  567. : S(S), FunLocation(FL), FunEndLocation(FEL) {}
  568. /// \brief Emit all buffered diagnostics in order of sourcelocation.
  569. /// We need to output diagnostics produced while iterating through
  570. /// the lockset in deterministic order, so this function orders diagnostics
  571. /// and outputs them.
  572. void emitDiagnostics() {
  573. SortDiagBySourceLocation SortDiagBySL(S);
  574. sort(Warnings.begin(), Warnings.end(), SortDiagBySL);
  575. for (DiagList::iterator I = Warnings.begin(), E = Warnings.end();
  576. I != E; ++I) {
  577. S.Diag(I->first.first, I->first.second);
  578. const OptionalNotes &Notes = I->second;
  579. for (unsigned NoteI = 0, NoteN = Notes.size(); NoteI != NoteN; ++NoteI)
  580. S.Diag(Notes[NoteI].first, Notes[NoteI].second);
  581. }
  582. }
  583. void handleInvalidLockExp(SourceLocation Loc) {
  584. PartialDiagnosticAt Warning(Loc,
  585. S.PDiag(diag::warn_cannot_resolve_lock) << Loc);
  586. Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
  587. }
  588. void handleUnmatchedUnlock(Name LockName, SourceLocation Loc) {
  589. warnLockMismatch(diag::warn_unlock_but_no_lock, LockName, Loc);
  590. }
  591. void handleDoubleLock(Name LockName, SourceLocation Loc) {
  592. warnLockMismatch(diag::warn_double_lock, LockName, Loc);
  593. }
  594. void handleMutexHeldEndOfScope(Name LockName, SourceLocation LocLocked,
  595. SourceLocation LocEndOfScope,
  596. LockErrorKind LEK){
  597. unsigned DiagID = 0;
  598. switch (LEK) {
  599. case LEK_LockedSomePredecessors:
  600. DiagID = diag::warn_lock_some_predecessors;
  601. break;
  602. case LEK_LockedSomeLoopIterations:
  603. DiagID = diag::warn_expecting_lock_held_on_loop;
  604. break;
  605. case LEK_LockedAtEndOfFunction:
  606. DiagID = diag::warn_no_unlock;
  607. break;
  608. }
  609. if (LocEndOfScope.isInvalid())
  610. LocEndOfScope = FunEndLocation;
  611. PartialDiagnosticAt Warning(LocEndOfScope, S.PDiag(DiagID) << LockName);
  612. PartialDiagnosticAt Note(LocLocked, S.PDiag(diag::note_locked_here));
  613. Warnings.push_back(DelayedDiag(Warning, OptionalNotes(1, Note)));
  614. }
  615. void handleExclusiveAndShared(Name LockName, SourceLocation Loc1,
  616. SourceLocation Loc2) {
  617. PartialDiagnosticAt Warning(
  618. Loc1, S.PDiag(diag::warn_lock_exclusive_and_shared) << LockName);
  619. PartialDiagnosticAt Note(
  620. Loc2, S.PDiag(diag::note_lock_exclusive_and_shared) << LockName);
  621. Warnings.push_back(DelayedDiag(Warning, OptionalNotes(1, Note)));
  622. }
  623. void handleNoMutexHeld(const NamedDecl *D, ProtectedOperationKind POK,
  624. AccessKind AK, SourceLocation Loc) {
  625. assert((POK == POK_VarAccess || POK == POK_VarDereference)
  626. && "Only works for variables");
  627. unsigned DiagID = POK == POK_VarAccess?
  628. diag::warn_variable_requires_any_lock:
  629. diag::warn_var_deref_requires_any_lock;
  630. PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
  631. << D->getName() << getLockKindFromAccessKind(AK));
  632. Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
  633. }
  634. void handleMutexNotHeld(const NamedDecl *D, ProtectedOperationKind POK,
  635. Name LockName, LockKind LK, SourceLocation Loc) {
  636. unsigned DiagID = 0;
  637. switch (POK) {
  638. case POK_VarAccess:
  639. DiagID = diag::warn_variable_requires_lock;
  640. break;
  641. case POK_VarDereference:
  642. DiagID = diag::warn_var_deref_requires_lock;
  643. break;
  644. case POK_FunctionCall:
  645. DiagID = diag::warn_fun_requires_lock;
  646. break;
  647. }
  648. PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
  649. << D->getName() << LockName << LK);
  650. Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
  651. }
  652. void handleFunExcludesLock(Name FunName, Name LockName, SourceLocation Loc) {
  653. PartialDiagnosticAt Warning(Loc,
  654. S.PDiag(diag::warn_fun_excludes_mutex) << FunName << LockName);
  655. Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
  656. }
  657. };
  658. }
  659. }
  660. }
  661. //===----------------------------------------------------------------------===//
  662. // AnalysisBasedWarnings - Worker object used by Sema to execute analysis-based
  663. // warnings on a function, method, or block.
  664. //===----------------------------------------------------------------------===//
  665. clang::sema::AnalysisBasedWarnings::Policy::Policy() {
  666. enableCheckFallThrough = 1;
  667. enableCheckUnreachable = 0;
  668. enableThreadSafetyAnalysis = 0;
  669. }
  670. clang::sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema &s)
  671. : S(s),
  672. NumFunctionsAnalyzed(0),
  673. NumFunctionsWithBadCFGs(0),
  674. NumCFGBlocks(0),
  675. MaxCFGBlocksPerFunction(0),
  676. NumUninitAnalysisFunctions(0),
  677. NumUninitAnalysisVariables(0),
  678. MaxUninitAnalysisVariablesPerFunction(0),
  679. NumUninitAnalysisBlockVisits(0),
  680. MaxUninitAnalysisBlockVisitsPerFunction(0) {
  681. DiagnosticsEngine &D = S.getDiagnostics();
  682. DefaultPolicy.enableCheckUnreachable = (unsigned)
  683. (D.getDiagnosticLevel(diag::warn_unreachable, SourceLocation()) !=
  684. DiagnosticsEngine::Ignored);
  685. DefaultPolicy.enableThreadSafetyAnalysis = (unsigned)
  686. (D.getDiagnosticLevel(diag::warn_double_lock, SourceLocation()) !=
  687. DiagnosticsEngine::Ignored);
  688. }
  689. static void flushDiagnostics(Sema &S, sema::FunctionScopeInfo *fscope) {
  690. for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator
  691. i = fscope->PossiblyUnreachableDiags.begin(),
  692. e = fscope->PossiblyUnreachableDiags.end();
  693. i != e; ++i) {
  694. const sema::PossiblyUnreachableDiag &D = *i;
  695. S.Diag(D.Loc, D.PD);
  696. }
  697. }
  698. void clang::sema::
  699. AnalysisBasedWarnings::IssueWarnings(sema::AnalysisBasedWarnings::Policy P,
  700. sema::FunctionScopeInfo *fscope,
  701. const Decl *D, const BlockExpr *blkExpr) {
  702. // We avoid doing analysis-based warnings when there are errors for
  703. // two reasons:
  704. // (1) The CFGs often can't be constructed (if the body is invalid), so
  705. // don't bother trying.
  706. // (2) The code already has problems; running the analysis just takes more
  707. // time.
  708. DiagnosticsEngine &Diags = S.getDiagnostics();
  709. // Do not do any analysis for declarations in system headers if we are
  710. // going to just ignore them.
  711. if (Diags.getSuppressSystemWarnings() &&
  712. S.SourceMgr.isInSystemHeader(D->getLocation()))
  713. return;
  714. // For code in dependent contexts, we'll do this at instantiation time.
  715. if (cast<DeclContext>(D)->isDependentContext())
  716. return;
  717. if (Diags.hasErrorOccurred() || Diags.hasFatalErrorOccurred()) {
  718. // Flush out any possibly unreachable diagnostics.
  719. flushDiagnostics(S, fscope);
  720. return;
  721. }
  722. const Stmt *Body = D->getBody();
  723. assert(Body);
  724. AnalysisDeclContext AC(/* AnalysisDeclContextManager */ 0, D, 0);
  725. // Don't generate EH edges for CallExprs as we'd like to avoid the n^2
  726. // explosion for destrutors that can result and the compile time hit.
  727. AC.getCFGBuildOptions().PruneTriviallyFalseEdges = true;
  728. AC.getCFGBuildOptions().AddEHEdges = false;
  729. AC.getCFGBuildOptions().AddInitializers = true;
  730. AC.getCFGBuildOptions().AddImplicitDtors = true;
  731. // Force that certain expressions appear as CFGElements in the CFG. This
  732. // is used to speed up various analyses.
  733. // FIXME: This isn't the right factoring. This is here for initial
  734. // prototyping, but we need a way for analyses to say what expressions they
  735. // expect to always be CFGElements and then fill in the BuildOptions
  736. // appropriately. This is essentially a layering violation.
  737. if (P.enableCheckUnreachable || P.enableThreadSafetyAnalysis) {
  738. // Unreachable code analysis and thread safety require a linearized CFG.
  739. AC.getCFGBuildOptions().setAllAlwaysAdd();
  740. }
  741. else {
  742. AC.getCFGBuildOptions()
  743. .setAlwaysAdd(Stmt::BinaryOperatorClass)
  744. .setAlwaysAdd(Stmt::BlockExprClass)
  745. .setAlwaysAdd(Stmt::CStyleCastExprClass)
  746. .setAlwaysAdd(Stmt::DeclRefExprClass)
  747. .setAlwaysAdd(Stmt::ImplicitCastExprClass)
  748. .setAlwaysAdd(Stmt::UnaryOperatorClass);
  749. }
  750. // Construct the analysis context with the specified CFG build options.
  751. // Emit delayed diagnostics.
  752. if (!fscope->PossiblyUnreachableDiags.empty()) {
  753. bool analyzed = false;
  754. // Register the expressions with the CFGBuilder.
  755. for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator
  756. i = fscope->PossiblyUnreachableDiags.begin(),
  757. e = fscope->PossiblyUnreachableDiags.end();
  758. i != e; ++i) {
  759. if (const Stmt *stmt = i->stmt)
  760. AC.registerForcedBlockExpression(stmt);
  761. }
  762. if (AC.getCFG()) {
  763. analyzed = true;
  764. for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator
  765. i = fscope->PossiblyUnreachableDiags.begin(),
  766. e = fscope->PossiblyUnreachableDiags.end();
  767. i != e; ++i)
  768. {
  769. const sema::PossiblyUnreachableDiag &D = *i;
  770. bool processed = false;
  771. if (const Stmt *stmt = i->stmt) {
  772. const CFGBlock *block = AC.getBlockForRegisteredExpression(stmt);
  773. CFGReverseBlockReachabilityAnalysis *cra =
  774. AC.getCFGReachablityAnalysis();
  775. // FIXME: We should be able to assert that block is non-null, but
  776. // the CFG analysis can skip potentially-evaluated expressions in
  777. // edge cases; see test/Sema/vla-2.c.
  778. if (block && cra) {
  779. // Can this block be reached from the entrance?
  780. if (cra->isReachable(&AC.getCFG()->getEntry(), block))
  781. S.Diag(D.Loc, D.PD);
  782. processed = true;
  783. }
  784. }
  785. if (!processed) {
  786. // Emit the warning anyway if we cannot map to a basic block.
  787. S.Diag(D.Loc, D.PD);
  788. }
  789. }
  790. }
  791. if (!analyzed)
  792. flushDiagnostics(S, fscope);
  793. }
  794. // Warning: check missing 'return'
  795. if (P.enableCheckFallThrough) {
  796. const CheckFallThroughDiagnostics &CD =
  797. (isa<BlockDecl>(D) ? CheckFallThroughDiagnostics::MakeForBlock()
  798. : (isa<CXXMethodDecl>(D) &&
  799. cast<CXXMethodDecl>(D)->getOverloadedOperator() == OO_Call &&
  800. cast<CXXMethodDecl>(D)->getParent()->isLambda())
  801. ? CheckFallThroughDiagnostics::MakeForLambda()
  802. : CheckFallThroughDiagnostics::MakeForFunction(D));
  803. CheckFallThroughForBody(S, D, Body, blkExpr, CD, AC);
  804. }
  805. // Warning: check for unreachable code
  806. if (P.enableCheckUnreachable) {
  807. // Only check for unreachable code on non-template instantiations.
  808. // Different template instantiations can effectively change the control-flow
  809. // and it is very difficult to prove that a snippet of code in a template
  810. // is unreachable for all instantiations.
  811. bool isTemplateInstantiation = false;
  812. if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(D))
  813. isTemplateInstantiation = Function->isTemplateInstantiation();
  814. if (!isTemplateInstantiation)
  815. CheckUnreachable(S, AC);
  816. }
  817. // Check for thread safety violations
  818. if (P.enableThreadSafetyAnalysis) {
  819. SourceLocation FL = AC.getDecl()->getLocation();
  820. SourceLocation FEL = AC.getDecl()->getLocEnd();
  821. thread_safety::ThreadSafetyReporter Reporter(S, FL, FEL);
  822. thread_safety::runThreadSafetyAnalysis(AC, Reporter);
  823. Reporter.emitDiagnostics();
  824. }
  825. if (Diags.getDiagnosticLevel(diag::warn_uninit_var, D->getLocStart())
  826. != DiagnosticsEngine::Ignored ||
  827. Diags.getDiagnosticLevel(diag::warn_maybe_uninit_var, D->getLocStart())
  828. != DiagnosticsEngine::Ignored) {
  829. if (CFG *cfg = AC.getCFG()) {
  830. UninitValsDiagReporter reporter(S);
  831. UninitVariablesAnalysisStats stats;
  832. std::memset(&stats, 0, sizeof(UninitVariablesAnalysisStats));
  833. runUninitializedVariablesAnalysis(*cast<DeclContext>(D), *cfg, AC,
  834. reporter, stats);
  835. if (S.CollectStats && stats.NumVariablesAnalyzed > 0) {
  836. ++NumUninitAnalysisFunctions;
  837. NumUninitAnalysisVariables += stats.NumVariablesAnalyzed;
  838. NumUninitAnalysisBlockVisits += stats.NumBlockVisits;
  839. MaxUninitAnalysisVariablesPerFunction =
  840. std::max(MaxUninitAnalysisVariablesPerFunction,
  841. stats.NumVariablesAnalyzed);
  842. MaxUninitAnalysisBlockVisitsPerFunction =
  843. std::max(MaxUninitAnalysisBlockVisitsPerFunction,
  844. stats.NumBlockVisits);
  845. }
  846. }
  847. }
  848. // Collect statistics about the CFG if it was built.
  849. if (S.CollectStats && AC.isCFGBuilt()) {
  850. ++NumFunctionsAnalyzed;
  851. if (CFG *cfg = AC.getCFG()) {
  852. // If we successfully built a CFG for this context, record some more
  853. // detail information about it.
  854. NumCFGBlocks += cfg->getNumBlockIDs();
  855. MaxCFGBlocksPerFunction = std::max(MaxCFGBlocksPerFunction,
  856. cfg->getNumBlockIDs());
  857. } else {
  858. ++NumFunctionsWithBadCFGs;
  859. }
  860. }
  861. }
  862. void clang::sema::AnalysisBasedWarnings::PrintStats() const {
  863. llvm::errs() << "\n*** Analysis Based Warnings Stats:\n";
  864. unsigned NumCFGsBuilt = NumFunctionsAnalyzed - NumFunctionsWithBadCFGs;
  865. unsigned AvgCFGBlocksPerFunction =
  866. !NumCFGsBuilt ? 0 : NumCFGBlocks/NumCFGsBuilt;
  867. llvm::errs() << NumFunctionsAnalyzed << " functions analyzed ("
  868. << NumFunctionsWithBadCFGs << " w/o CFGs).\n"
  869. << " " << NumCFGBlocks << " CFG blocks built.\n"
  870. << " " << AvgCFGBlocksPerFunction
  871. << " average CFG blocks per function.\n"
  872. << " " << MaxCFGBlocksPerFunction
  873. << " max CFG blocks per function.\n";
  874. unsigned AvgUninitVariablesPerFunction = !NumUninitAnalysisFunctions ? 0
  875. : NumUninitAnalysisVariables/NumUninitAnalysisFunctions;
  876. unsigned AvgUninitBlockVisitsPerFunction = !NumUninitAnalysisFunctions ? 0
  877. : NumUninitAnalysisBlockVisits/NumUninitAnalysisFunctions;
  878. llvm::errs() << NumUninitAnalysisFunctions
  879. << " functions analyzed for uninitialiazed variables\n"
  880. << " " << NumUninitAnalysisVariables << " variables analyzed.\n"
  881. << " " << AvgUninitVariablesPerFunction
  882. << " average variables per function.\n"
  883. << " " << MaxUninitAnalysisVariablesPerFunction
  884. << " max variables per function.\n"
  885. << " " << NumUninitAnalysisBlockVisits << " block visits.\n"
  886. << " " << AvgUninitBlockVisitsPerFunction
  887. << " average block visits per function.\n"
  888. << " " << MaxUninitAnalysisBlockVisitsPerFunction
  889. << " max block visits per function.\n";
  890. }