AnalysisBasedWarnings.cpp 50 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412
  1. //=- AnalysisBasedWarnings.cpp - Sema warnings based on libAnalysis -*- C++ -*-=//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This file defines analysis_warnings::[Policy,Executor].
  11. // Together they are used by Sema to issue warnings based on inexpensive
  12. // static analysis algorithms in libAnalysis.
  13. //
  14. //===----------------------------------------------------------------------===//
  15. #include "clang/Sema/AnalysisBasedWarnings.h"
  16. #include "clang/Sema/SemaInternal.h"
  17. #include "clang/Sema/ScopeInfo.h"
  18. #include "clang/Basic/SourceManager.h"
  19. #include "clang/Basic/SourceLocation.h"
  20. #include "clang/Lex/Preprocessor.h"
  21. #include "clang/Lex/Lexer.h"
  22. #include "clang/AST/DeclObjC.h"
  23. #include "clang/AST/DeclCXX.h"
  24. #include "clang/AST/ExprObjC.h"
  25. #include "clang/AST/ExprCXX.h"
  26. #include "clang/AST/StmtObjC.h"
  27. #include "clang/AST/StmtCXX.h"
  28. #include "clang/AST/EvaluatedExprVisitor.h"
  29. #include "clang/AST/StmtVisitor.h"
  30. #include "clang/AST/RecursiveASTVisitor.h"
  31. #include "clang/Analysis/AnalysisContext.h"
  32. #include "clang/Analysis/CFG.h"
  33. #include "clang/Analysis/Analyses/ReachableCode.h"
  34. #include "clang/Analysis/Analyses/CFGReachabilityAnalysis.h"
  35. #include "clang/Analysis/Analyses/ThreadSafety.h"
  36. #include "clang/Analysis/CFGStmtMap.h"
  37. #include "clang/Analysis/Analyses/UninitializedValues.h"
  38. #include "llvm/ADT/BitVector.h"
  39. #include "llvm/ADT/FoldingSet.h"
  40. #include "llvm/ADT/ImmutableMap.h"
  41. #include "llvm/ADT/PostOrderIterator.h"
  42. #include "llvm/ADT/SmallVector.h"
  43. #include "llvm/ADT/StringRef.h"
  44. #include "llvm/Support/Casting.h"
  45. #include <algorithm>
  46. #include <iterator>
  47. #include <vector>
  48. #include <deque>
  49. using namespace clang;
  50. //===----------------------------------------------------------------------===//
  51. // Unreachable code analysis.
  52. //===----------------------------------------------------------------------===//
  53. namespace {
  54. class UnreachableCodeHandler : public reachable_code::Callback {
  55. Sema &S;
  56. public:
  57. UnreachableCodeHandler(Sema &s) : S(s) {}
  58. void HandleUnreachable(SourceLocation L, SourceRange R1, SourceRange R2) {
  59. S.Diag(L, diag::warn_unreachable) << R1 << R2;
  60. }
  61. };
  62. }
  63. /// CheckUnreachable - Check for unreachable code.
  64. static void CheckUnreachable(Sema &S, AnalysisDeclContext &AC) {
  65. UnreachableCodeHandler UC(S);
  66. reachable_code::FindUnreachableCode(AC, UC);
  67. }
  68. //===----------------------------------------------------------------------===//
  69. // Check for missing return value.
  70. //===----------------------------------------------------------------------===//
  71. enum ControlFlowKind {
  72. UnknownFallThrough,
  73. NeverFallThrough,
  74. MaybeFallThrough,
  75. AlwaysFallThrough,
  76. NeverFallThroughOrReturn
  77. };
  78. /// CheckFallThrough - Check that we don't fall off the end of a
  79. /// Statement that should return a value.
  80. ///
  81. /// \returns AlwaysFallThrough if we always fall off the end of the statement,
  82. /// MaybeFallThrough if we might or might not fall off the end,
  83. /// NeverFallThroughOrReturn if we never fall off the end of the statement or
  84. /// return. We assume NeverFallThrough if we never fall off the end of the
  85. /// statement but we may return. We assume that functions not marked noreturn
  86. /// will return.
  87. static ControlFlowKind CheckFallThrough(AnalysisDeclContext &AC) {
  88. CFG *cfg = AC.getCFG();
  89. if (cfg == 0) return UnknownFallThrough;
  90. // The CFG leaves in dead things, and we don't want the dead code paths to
  91. // confuse us, so we mark all live things first.
  92. llvm::BitVector live(cfg->getNumBlockIDs());
  93. unsigned count = reachable_code::ScanReachableFromBlock(&cfg->getEntry(),
  94. live);
  95. bool AddEHEdges = AC.getAddEHEdges();
  96. if (!AddEHEdges && count != cfg->getNumBlockIDs())
  97. // When there are things remaining dead, and we didn't add EH edges
  98. // from CallExprs to the catch clauses, we have to go back and
  99. // mark them as live.
  100. for (CFG::iterator I = cfg->begin(), E = cfg->end(); I != E; ++I) {
  101. CFGBlock &b = **I;
  102. if (!live[b.getBlockID()]) {
  103. if (b.pred_begin() == b.pred_end()) {
  104. if (b.getTerminator() && isa<CXXTryStmt>(b.getTerminator()))
  105. // When not adding EH edges from calls, catch clauses
  106. // can otherwise seem dead. Avoid noting them as dead.
  107. count += reachable_code::ScanReachableFromBlock(&b, live);
  108. continue;
  109. }
  110. }
  111. }
  112. // Now we know what is live, we check the live precessors of the exit block
  113. // and look for fall through paths, being careful to ignore normal returns,
  114. // and exceptional paths.
  115. bool HasLiveReturn = false;
  116. bool HasFakeEdge = false;
  117. bool HasPlainEdge = false;
  118. bool HasAbnormalEdge = false;
  119. // Ignore default cases that aren't likely to be reachable because all
  120. // enums in a switch(X) have explicit case statements.
  121. CFGBlock::FilterOptions FO;
  122. FO.IgnoreDefaultsWithCoveredEnums = 1;
  123. for (CFGBlock::filtered_pred_iterator
  124. I = cfg->getExit().filtered_pred_start_end(FO); I.hasMore(); ++I) {
  125. const CFGBlock& B = **I;
  126. if (!live[B.getBlockID()])
  127. continue;
  128. // Skip blocks which contain an element marked as no-return. They don't
  129. // represent actually viable edges into the exit block, so mark them as
  130. // abnormal.
  131. if (B.hasNoReturnElement()) {
  132. HasAbnormalEdge = true;
  133. continue;
  134. }
  135. // Destructors can appear after the 'return' in the CFG. This is
  136. // normal. We need to look pass the destructors for the return
  137. // statement (if it exists).
  138. CFGBlock::const_reverse_iterator ri = B.rbegin(), re = B.rend();
  139. for ( ; ri != re ; ++ri)
  140. if (isa<CFGStmt>(*ri))
  141. break;
  142. // No more CFGElements in the block?
  143. if (ri == re) {
  144. if (B.getTerminator() && isa<CXXTryStmt>(B.getTerminator())) {
  145. HasAbnormalEdge = true;
  146. continue;
  147. }
  148. // A labeled empty statement, or the entry block...
  149. HasPlainEdge = true;
  150. continue;
  151. }
  152. CFGStmt CS = cast<CFGStmt>(*ri);
  153. const Stmt *S = CS.getStmt();
  154. if (isa<ReturnStmt>(S)) {
  155. HasLiveReturn = true;
  156. continue;
  157. }
  158. if (isa<ObjCAtThrowStmt>(S)) {
  159. HasFakeEdge = true;
  160. continue;
  161. }
  162. if (isa<CXXThrowExpr>(S)) {
  163. HasFakeEdge = true;
  164. continue;
  165. }
  166. if (isa<MSAsmStmt>(S)) {
  167. // TODO: Verify this is correct.
  168. HasFakeEdge = true;
  169. HasLiveReturn = true;
  170. continue;
  171. }
  172. if (isa<CXXTryStmt>(S)) {
  173. HasAbnormalEdge = true;
  174. continue;
  175. }
  176. if (std::find(B.succ_begin(), B.succ_end(), &cfg->getExit())
  177. == B.succ_end()) {
  178. HasAbnormalEdge = true;
  179. continue;
  180. }
  181. HasPlainEdge = true;
  182. }
  183. if (!HasPlainEdge) {
  184. if (HasLiveReturn)
  185. return NeverFallThrough;
  186. return NeverFallThroughOrReturn;
  187. }
  188. if (HasAbnormalEdge || HasFakeEdge || HasLiveReturn)
  189. return MaybeFallThrough;
  190. // This says AlwaysFallThrough for calls to functions that are not marked
  191. // noreturn, that don't return. If people would like this warning to be more
  192. // accurate, such functions should be marked as noreturn.
  193. return AlwaysFallThrough;
  194. }
  195. namespace {
  196. struct CheckFallThroughDiagnostics {
  197. unsigned diag_MaybeFallThrough_HasNoReturn;
  198. unsigned diag_MaybeFallThrough_ReturnsNonVoid;
  199. unsigned diag_AlwaysFallThrough_HasNoReturn;
  200. unsigned diag_AlwaysFallThrough_ReturnsNonVoid;
  201. unsigned diag_NeverFallThroughOrReturn;
  202. enum { Function, Block, Lambda } funMode;
  203. SourceLocation FuncLoc;
  204. static CheckFallThroughDiagnostics MakeForFunction(const Decl *Func) {
  205. CheckFallThroughDiagnostics D;
  206. D.FuncLoc = Func->getLocation();
  207. D.diag_MaybeFallThrough_HasNoReturn =
  208. diag::warn_falloff_noreturn_function;
  209. D.diag_MaybeFallThrough_ReturnsNonVoid =
  210. diag::warn_maybe_falloff_nonvoid_function;
  211. D.diag_AlwaysFallThrough_HasNoReturn =
  212. diag::warn_falloff_noreturn_function;
  213. D.diag_AlwaysFallThrough_ReturnsNonVoid =
  214. diag::warn_falloff_nonvoid_function;
  215. // Don't suggest that virtual functions be marked "noreturn", since they
  216. // might be overridden by non-noreturn functions.
  217. bool isVirtualMethod = false;
  218. if (const CXXMethodDecl *Method = dyn_cast<CXXMethodDecl>(Func))
  219. isVirtualMethod = Method->isVirtual();
  220. // Don't suggest that template instantiations be marked "noreturn"
  221. bool isTemplateInstantiation = false;
  222. if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Func))
  223. isTemplateInstantiation = Function->isTemplateInstantiation();
  224. if (!isVirtualMethod && !isTemplateInstantiation)
  225. D.diag_NeverFallThroughOrReturn =
  226. diag::warn_suggest_noreturn_function;
  227. else
  228. D.diag_NeverFallThroughOrReturn = 0;
  229. D.funMode = Function;
  230. return D;
  231. }
  232. static CheckFallThroughDiagnostics MakeForBlock() {
  233. CheckFallThroughDiagnostics D;
  234. D.diag_MaybeFallThrough_HasNoReturn =
  235. diag::err_noreturn_block_has_return_expr;
  236. D.diag_MaybeFallThrough_ReturnsNonVoid =
  237. diag::err_maybe_falloff_nonvoid_block;
  238. D.diag_AlwaysFallThrough_HasNoReturn =
  239. diag::err_noreturn_block_has_return_expr;
  240. D.diag_AlwaysFallThrough_ReturnsNonVoid =
  241. diag::err_falloff_nonvoid_block;
  242. D.diag_NeverFallThroughOrReturn =
  243. diag::warn_suggest_noreturn_block;
  244. D.funMode = Block;
  245. return D;
  246. }
  247. static CheckFallThroughDiagnostics MakeForLambda() {
  248. CheckFallThroughDiagnostics D;
  249. D.diag_MaybeFallThrough_HasNoReturn =
  250. diag::err_noreturn_lambda_has_return_expr;
  251. D.diag_MaybeFallThrough_ReturnsNonVoid =
  252. diag::warn_maybe_falloff_nonvoid_lambda;
  253. D.diag_AlwaysFallThrough_HasNoReturn =
  254. diag::err_noreturn_lambda_has_return_expr;
  255. D.diag_AlwaysFallThrough_ReturnsNonVoid =
  256. diag::warn_falloff_nonvoid_lambda;
  257. D.diag_NeverFallThroughOrReturn = 0;
  258. D.funMode = Lambda;
  259. return D;
  260. }
  261. bool checkDiagnostics(DiagnosticsEngine &D, bool ReturnsVoid,
  262. bool HasNoReturn) const {
  263. if (funMode == Function) {
  264. return (ReturnsVoid ||
  265. D.getDiagnosticLevel(diag::warn_maybe_falloff_nonvoid_function,
  266. FuncLoc) == DiagnosticsEngine::Ignored)
  267. && (!HasNoReturn ||
  268. D.getDiagnosticLevel(diag::warn_noreturn_function_has_return_expr,
  269. FuncLoc) == DiagnosticsEngine::Ignored)
  270. && (!ReturnsVoid ||
  271. D.getDiagnosticLevel(diag::warn_suggest_noreturn_block, FuncLoc)
  272. == DiagnosticsEngine::Ignored);
  273. }
  274. // For blocks / lambdas.
  275. return ReturnsVoid && !HasNoReturn
  276. && ((funMode == Lambda) ||
  277. D.getDiagnosticLevel(diag::warn_suggest_noreturn_block, FuncLoc)
  278. == DiagnosticsEngine::Ignored);
  279. }
  280. };
  281. }
  282. /// CheckFallThroughForFunctionDef - Check that we don't fall off the end of a
  283. /// function that should return a value. Check that we don't fall off the end
  284. /// of a noreturn function. We assume that functions and blocks not marked
  285. /// noreturn will return.
  286. static void CheckFallThroughForBody(Sema &S, const Decl *D, const Stmt *Body,
  287. const BlockExpr *blkExpr,
  288. const CheckFallThroughDiagnostics& CD,
  289. AnalysisDeclContext &AC) {
  290. bool ReturnsVoid = false;
  291. bool HasNoReturn = false;
  292. if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
  293. ReturnsVoid = FD->getResultType()->isVoidType();
  294. HasNoReturn = FD->hasAttr<NoReturnAttr>() ||
  295. FD->getType()->getAs<FunctionType>()->getNoReturnAttr();
  296. }
  297. else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
  298. ReturnsVoid = MD->getResultType()->isVoidType();
  299. HasNoReturn = MD->hasAttr<NoReturnAttr>();
  300. }
  301. else if (isa<BlockDecl>(D)) {
  302. QualType BlockTy = blkExpr->getType();
  303. if (const FunctionType *FT =
  304. BlockTy->getPointeeType()->getAs<FunctionType>()) {
  305. if (FT->getResultType()->isVoidType())
  306. ReturnsVoid = true;
  307. if (FT->getNoReturnAttr())
  308. HasNoReturn = true;
  309. }
  310. }
  311. DiagnosticsEngine &Diags = S.getDiagnostics();
  312. // Short circuit for compilation speed.
  313. if (CD.checkDiagnostics(Diags, ReturnsVoid, HasNoReturn))
  314. return;
  315. // FIXME: Function try block
  316. if (const CompoundStmt *Compound = dyn_cast<CompoundStmt>(Body)) {
  317. switch (CheckFallThrough(AC)) {
  318. case UnknownFallThrough:
  319. break;
  320. case MaybeFallThrough:
  321. if (HasNoReturn)
  322. S.Diag(Compound->getRBracLoc(),
  323. CD.diag_MaybeFallThrough_HasNoReturn);
  324. else if (!ReturnsVoid)
  325. S.Diag(Compound->getRBracLoc(),
  326. CD.diag_MaybeFallThrough_ReturnsNonVoid);
  327. break;
  328. case AlwaysFallThrough:
  329. if (HasNoReturn)
  330. S.Diag(Compound->getRBracLoc(),
  331. CD.diag_AlwaysFallThrough_HasNoReturn);
  332. else if (!ReturnsVoid)
  333. S.Diag(Compound->getRBracLoc(),
  334. CD.diag_AlwaysFallThrough_ReturnsNonVoid);
  335. break;
  336. case NeverFallThroughOrReturn:
  337. if (ReturnsVoid && !HasNoReturn && CD.diag_NeverFallThroughOrReturn) {
  338. if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
  339. S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn)
  340. << 0 << FD;
  341. } else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
  342. S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn)
  343. << 1 << MD;
  344. } else {
  345. S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn);
  346. }
  347. }
  348. break;
  349. case NeverFallThrough:
  350. break;
  351. }
  352. }
  353. }
  354. //===----------------------------------------------------------------------===//
  355. // -Wuninitialized
  356. //===----------------------------------------------------------------------===//
  357. namespace {
  358. /// ContainsReference - A visitor class to search for references to
  359. /// a particular declaration (the needle) within any evaluated component of an
  360. /// expression (recursively).
  361. class ContainsReference : public EvaluatedExprVisitor<ContainsReference> {
  362. bool FoundReference;
  363. const DeclRefExpr *Needle;
  364. public:
  365. ContainsReference(ASTContext &Context, const DeclRefExpr *Needle)
  366. : EvaluatedExprVisitor<ContainsReference>(Context),
  367. FoundReference(false), Needle(Needle) {}
  368. void VisitExpr(Expr *E) {
  369. // Stop evaluating if we already have a reference.
  370. if (FoundReference)
  371. return;
  372. EvaluatedExprVisitor<ContainsReference>::VisitExpr(E);
  373. }
  374. void VisitDeclRefExpr(DeclRefExpr *E) {
  375. if (E == Needle)
  376. FoundReference = true;
  377. else
  378. EvaluatedExprVisitor<ContainsReference>::VisitDeclRefExpr(E);
  379. }
  380. bool doesContainReference() const { return FoundReference; }
  381. };
  382. }
  383. static bool SuggestInitializationFixit(Sema &S, const VarDecl *VD) {
  384. QualType VariableTy = VD->getType().getCanonicalType();
  385. if (VariableTy->isBlockPointerType() &&
  386. !VD->hasAttr<BlocksAttr>()) {
  387. S.Diag(VD->getLocation(), diag::note_block_var_fixit_add_initialization) << VD->getDeclName()
  388. << FixItHint::CreateInsertion(VD->getLocation(), "__block ");
  389. return true;
  390. }
  391. // Don't issue a fixit if there is already an initializer.
  392. if (VD->getInit())
  393. return false;
  394. // Suggest possible initialization (if any).
  395. std::string Init = S.getFixItZeroInitializerForType(VariableTy);
  396. if (Init.empty())
  397. return false;
  398. // Don't suggest a fixit inside macros.
  399. if (VD->getLocEnd().isMacroID())
  400. return false;
  401. SourceLocation Loc = S.PP.getLocForEndOfToken(VD->getLocEnd());
  402. S.Diag(Loc, diag::note_var_fixit_add_initialization) << VD->getDeclName()
  403. << FixItHint::CreateInsertion(Loc, Init);
  404. return true;
  405. }
  406. /// Create a fixit to remove an if-like statement, on the assumption that its
  407. /// condition is CondVal.
  408. static void CreateIfFixit(Sema &S, const Stmt *If, const Stmt *Then,
  409. const Stmt *Else, bool CondVal,
  410. FixItHint &Fixit1, FixItHint &Fixit2) {
  411. if (CondVal) {
  412. // If condition is always true, remove all but the 'then'.
  413. Fixit1 = FixItHint::CreateRemoval(
  414. CharSourceRange::getCharRange(If->getLocStart(),
  415. Then->getLocStart()));
  416. if (Else) {
  417. SourceLocation ElseKwLoc = Lexer::getLocForEndOfToken(
  418. Then->getLocEnd(), 0, S.getSourceManager(), S.getLangOpts());
  419. Fixit2 = FixItHint::CreateRemoval(
  420. SourceRange(ElseKwLoc, Else->getLocEnd()));
  421. }
  422. } else {
  423. // If condition is always false, remove all but the 'else'.
  424. if (Else)
  425. Fixit1 = FixItHint::CreateRemoval(
  426. CharSourceRange::getCharRange(If->getLocStart(),
  427. Else->getLocStart()));
  428. else
  429. Fixit1 = FixItHint::CreateRemoval(If->getSourceRange());
  430. }
  431. }
  432. /// DiagUninitUse -- Helper function to produce a diagnostic for an
  433. /// uninitialized use of a variable.
  434. static void DiagUninitUse(Sema &S, const VarDecl *VD, const UninitUse &Use,
  435. bool IsCapturedByBlock) {
  436. bool Diagnosed = false;
  437. // Diagnose each branch which leads to a sometimes-uninitialized use.
  438. for (UninitUse::branch_iterator I = Use.branch_begin(), E = Use.branch_end();
  439. I != E; ++I) {
  440. assert(Use.getKind() == UninitUse::Sometimes);
  441. const Expr *User = Use.getUser();
  442. const Stmt *Term = I->Terminator;
  443. // Information used when building the diagnostic.
  444. unsigned DiagKind;
  445. const char *Str;
  446. SourceRange Range;
  447. // FixIts to suppress the diagnosic by removing the dead condition.
  448. // For all binary terminators, branch 0 is taken if the condition is true,
  449. // and branch 1 is taken if the condition is false.
  450. int RemoveDiagKind = -1;
  451. const char *FixitStr =
  452. S.getLangOpts().CPlusPlus ? (I->Output ? "true" : "false")
  453. : (I->Output ? "1" : "0");
  454. FixItHint Fixit1, Fixit2;
  455. switch (Term->getStmtClass()) {
  456. default:
  457. // Don't know how to report this. Just fall back to 'may be used
  458. // uninitialized'. This happens for range-based for, which the user
  459. // can't explicitly fix.
  460. // FIXME: This also happens if the first use of a variable is always
  461. // uninitialized, eg "for (int n; n < 10; ++n)". We should report that
  462. // with the 'is uninitialized' diagnostic.
  463. continue;
  464. // "condition is true / condition is false".
  465. case Stmt::IfStmtClass: {
  466. const IfStmt *IS = cast<IfStmt>(Term);
  467. DiagKind = 0;
  468. Str = "if";
  469. Range = IS->getCond()->getSourceRange();
  470. RemoveDiagKind = 0;
  471. CreateIfFixit(S, IS, IS->getThen(), IS->getElse(),
  472. I->Output, Fixit1, Fixit2);
  473. break;
  474. }
  475. case Stmt::ConditionalOperatorClass: {
  476. const ConditionalOperator *CO = cast<ConditionalOperator>(Term);
  477. DiagKind = 0;
  478. Str = "?:";
  479. Range = CO->getCond()->getSourceRange();
  480. RemoveDiagKind = 0;
  481. CreateIfFixit(S, CO, CO->getTrueExpr(), CO->getFalseExpr(),
  482. I->Output, Fixit1, Fixit2);
  483. break;
  484. }
  485. case Stmt::BinaryOperatorClass: {
  486. const BinaryOperator *BO = cast<BinaryOperator>(Term);
  487. if (!BO->isLogicalOp())
  488. continue;
  489. DiagKind = 0;
  490. Str = BO->getOpcodeStr();
  491. Range = BO->getLHS()->getSourceRange();
  492. RemoveDiagKind = 0;
  493. if ((BO->getOpcode() == BO_LAnd && I->Output) ||
  494. (BO->getOpcode() == BO_LOr && !I->Output))
  495. // true && y -> y, false || y -> y.
  496. Fixit1 = FixItHint::CreateRemoval(SourceRange(BO->getLocStart(),
  497. BO->getOperatorLoc()));
  498. else
  499. // false && y -> false, true || y -> true.
  500. Fixit1 = FixItHint::CreateReplacement(BO->getSourceRange(), FixitStr);
  501. break;
  502. }
  503. // "loop is entered / loop is exited".
  504. case Stmt::WhileStmtClass:
  505. DiagKind = 1;
  506. Str = "while";
  507. Range = cast<WhileStmt>(Term)->getCond()->getSourceRange();
  508. RemoveDiagKind = 1;
  509. Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
  510. break;
  511. case Stmt::ForStmtClass:
  512. DiagKind = 1;
  513. Str = "for";
  514. Range = cast<ForStmt>(Term)->getCond()->getSourceRange();
  515. RemoveDiagKind = 1;
  516. if (I->Output)
  517. Fixit1 = FixItHint::CreateRemoval(Range);
  518. else
  519. Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
  520. break;
  521. // "condition is true / loop is exited".
  522. case Stmt::DoStmtClass:
  523. DiagKind = 2;
  524. Str = "do";
  525. Range = cast<DoStmt>(Term)->getCond()->getSourceRange();
  526. RemoveDiagKind = 1;
  527. Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
  528. break;
  529. // "switch case is taken".
  530. case Stmt::CaseStmtClass:
  531. DiagKind = 3;
  532. Str = "case";
  533. Range = cast<CaseStmt>(Term)->getLHS()->getSourceRange();
  534. break;
  535. case Stmt::DefaultStmtClass:
  536. DiagKind = 3;
  537. Str = "default";
  538. Range = cast<DefaultStmt>(Term)->getDefaultLoc();
  539. break;
  540. }
  541. S.Diag(Range.getBegin(), diag::warn_sometimes_uninit_var)
  542. << VD->getDeclName() << IsCapturedByBlock << DiagKind
  543. << Str << I->Output << Range;
  544. S.Diag(User->getLocStart(), diag::note_uninit_var_use)
  545. << IsCapturedByBlock << User->getSourceRange();
  546. if (RemoveDiagKind != -1)
  547. S.Diag(Fixit1.RemoveRange.getBegin(), diag::note_uninit_fixit_remove_cond)
  548. << RemoveDiagKind << Str << I->Output << Fixit1 << Fixit2;
  549. Diagnosed = true;
  550. }
  551. if (!Diagnosed)
  552. S.Diag(Use.getUser()->getLocStart(),
  553. Use.getKind() == UninitUse::Always ? diag::warn_uninit_var
  554. : diag::warn_maybe_uninit_var)
  555. << VD->getDeclName() << IsCapturedByBlock
  556. << Use.getUser()->getSourceRange();
  557. }
  558. /// DiagnoseUninitializedUse -- Helper function for diagnosing uses of an
  559. /// uninitialized variable. This manages the different forms of diagnostic
  560. /// emitted for particular types of uses. Returns true if the use was diagnosed
  561. /// as a warning. If a particular use is one we omit warnings for, returns
  562. /// false.
  563. static bool DiagnoseUninitializedUse(Sema &S, const VarDecl *VD,
  564. const UninitUse &Use,
  565. bool alwaysReportSelfInit = false) {
  566. if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Use.getUser())) {
  567. // Inspect the initializer of the variable declaration which is
  568. // being referenced prior to its initialization. We emit
  569. // specialized diagnostics for self-initialization, and we
  570. // specifically avoid warning about self references which take the
  571. // form of:
  572. //
  573. // int x = x;
  574. //
  575. // This is used to indicate to GCC that 'x' is intentionally left
  576. // uninitialized. Proven code paths which access 'x' in
  577. // an uninitialized state after this will still warn.
  578. if (const Expr *Initializer = VD->getInit()) {
  579. if (!alwaysReportSelfInit && DRE == Initializer->IgnoreParenImpCasts())
  580. return false;
  581. ContainsReference CR(S.Context, DRE);
  582. CR.Visit(const_cast<Expr*>(Initializer));
  583. if (CR.doesContainReference()) {
  584. S.Diag(DRE->getLocStart(),
  585. diag::warn_uninit_self_reference_in_init)
  586. << VD->getDeclName() << VD->getLocation() << DRE->getSourceRange();
  587. return true;
  588. }
  589. }
  590. DiagUninitUse(S, VD, Use, false);
  591. } else {
  592. const BlockExpr *BE = cast<BlockExpr>(Use.getUser());
  593. if (VD->getType()->isBlockPointerType() && !VD->hasAttr<BlocksAttr>())
  594. S.Diag(BE->getLocStart(),
  595. diag::warn_uninit_byref_blockvar_captured_by_block)
  596. << VD->getDeclName();
  597. else
  598. DiagUninitUse(S, VD, Use, true);
  599. }
  600. // Report where the variable was declared when the use wasn't within
  601. // the initializer of that declaration & we didn't already suggest
  602. // an initialization fixit.
  603. if (!SuggestInitializationFixit(S, VD))
  604. S.Diag(VD->getLocStart(), diag::note_uninit_var_def)
  605. << VD->getDeclName();
  606. return true;
  607. }
  608. namespace {
  609. class FallthroughMapper : public RecursiveASTVisitor<FallthroughMapper> {
  610. public:
  611. FallthroughMapper(Sema &S)
  612. : FoundSwitchStatements(false),
  613. S(S) {
  614. }
  615. bool foundSwitchStatements() const { return FoundSwitchStatements; }
  616. void markFallthroughVisited(const AttributedStmt *Stmt) {
  617. bool Found = FallthroughStmts.erase(Stmt);
  618. assert(Found);
  619. (void)Found;
  620. }
  621. typedef llvm::SmallPtrSet<const AttributedStmt*, 8> AttrStmts;
  622. const AttrStmts &getFallthroughStmts() const {
  623. return FallthroughStmts;
  624. }
  625. bool checkFallThroughIntoBlock(const CFGBlock &B, int &AnnotatedCnt) {
  626. int UnannotatedCnt = 0;
  627. AnnotatedCnt = 0;
  628. std::deque<const CFGBlock*> BlockQueue;
  629. std::copy(B.pred_begin(), B.pred_end(), std::back_inserter(BlockQueue));
  630. while (!BlockQueue.empty()) {
  631. const CFGBlock *P = BlockQueue.front();
  632. BlockQueue.pop_front();
  633. const Stmt *Term = P->getTerminator();
  634. if (Term && isa<SwitchStmt>(Term))
  635. continue; // Switch statement, good.
  636. const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(P->getLabel());
  637. if (SW && SW->getSubStmt() == B.getLabel() && P->begin() == P->end())
  638. continue; // Previous case label has no statements, good.
  639. if (P->pred_begin() == P->pred_end()) { // The block is unreachable.
  640. // This only catches trivially unreachable blocks.
  641. for (CFGBlock::const_iterator ElIt = P->begin(), ElEnd = P->end();
  642. ElIt != ElEnd; ++ElIt) {
  643. if (const CFGStmt *CS = ElIt->getAs<CFGStmt>()){
  644. if (const AttributedStmt *AS = asFallThroughAttr(CS->getStmt())) {
  645. S.Diag(AS->getLocStart(),
  646. diag::warn_fallthrough_attr_unreachable);
  647. markFallthroughVisited(AS);
  648. ++AnnotatedCnt;
  649. }
  650. // Don't care about other unreachable statements.
  651. }
  652. }
  653. // If there are no unreachable statements, this may be a special
  654. // case in CFG:
  655. // case X: {
  656. // A a; // A has a destructor.
  657. // break;
  658. // }
  659. // // <<<< This place is represented by a 'hanging' CFG block.
  660. // case Y:
  661. continue;
  662. }
  663. const Stmt *LastStmt = getLastStmt(*P);
  664. if (const AttributedStmt *AS = asFallThroughAttr(LastStmt)) {
  665. markFallthroughVisited(AS);
  666. ++AnnotatedCnt;
  667. continue; // Fallthrough annotation, good.
  668. }
  669. if (!LastStmt) { // This block contains no executable statements.
  670. // Traverse its predecessors.
  671. std::copy(P->pred_begin(), P->pred_end(),
  672. std::back_inserter(BlockQueue));
  673. continue;
  674. }
  675. ++UnannotatedCnt;
  676. }
  677. return !!UnannotatedCnt;
  678. }
  679. // RecursiveASTVisitor setup.
  680. bool shouldWalkTypesOfTypeLocs() const { return false; }
  681. bool VisitAttributedStmt(AttributedStmt *S) {
  682. if (asFallThroughAttr(S))
  683. FallthroughStmts.insert(S);
  684. return true;
  685. }
  686. bool VisitSwitchStmt(SwitchStmt *S) {
  687. FoundSwitchStatements = true;
  688. return true;
  689. }
  690. private:
  691. static const AttributedStmt *asFallThroughAttr(const Stmt *S) {
  692. if (const AttributedStmt *AS = dyn_cast_or_null<AttributedStmt>(S)) {
  693. if (hasSpecificAttr<FallThroughAttr>(AS->getAttrs()))
  694. return AS;
  695. }
  696. return 0;
  697. }
  698. static const Stmt *getLastStmt(const CFGBlock &B) {
  699. if (const Stmt *Term = B.getTerminator())
  700. return Term;
  701. for (CFGBlock::const_reverse_iterator ElemIt = B.rbegin(),
  702. ElemEnd = B.rend();
  703. ElemIt != ElemEnd; ++ElemIt) {
  704. if (const CFGStmt *CS = ElemIt->getAs<CFGStmt>())
  705. return CS->getStmt();
  706. }
  707. // Workaround to detect a statement thrown out by CFGBuilder:
  708. // case X: {} case Y:
  709. // case X: ; case Y:
  710. if (const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(B.getLabel()))
  711. if (!isa<SwitchCase>(SW->getSubStmt()))
  712. return SW->getSubStmt();
  713. return 0;
  714. }
  715. bool FoundSwitchStatements;
  716. AttrStmts FallthroughStmts;
  717. Sema &S;
  718. };
  719. }
  720. static void DiagnoseSwitchLabelsFallthrough(Sema &S, AnalysisDeclContext &AC,
  721. bool PerFunction) {
  722. FallthroughMapper FM(S);
  723. FM.TraverseStmt(AC.getBody());
  724. if (!FM.foundSwitchStatements())
  725. return;
  726. if (PerFunction && FM.getFallthroughStmts().empty())
  727. return;
  728. CFG *Cfg = AC.getCFG();
  729. if (!Cfg)
  730. return;
  731. int AnnotatedCnt;
  732. for (CFG::reverse_iterator I = Cfg->rbegin(), E = Cfg->rend(); I != E; ++I) {
  733. const CFGBlock &B = **I;
  734. const Stmt *Label = B.getLabel();
  735. if (!Label || !isa<SwitchCase>(Label))
  736. continue;
  737. if (!FM.checkFallThroughIntoBlock(B, AnnotatedCnt))
  738. continue;
  739. S.Diag(Label->getLocStart(),
  740. PerFunction ? diag::warn_unannotated_fallthrough_per_function
  741. : diag::warn_unannotated_fallthrough);
  742. if (!AnnotatedCnt) {
  743. SourceLocation L = Label->getLocStart();
  744. if (L.isMacroID())
  745. continue;
  746. if (S.getLangOpts().CPlusPlus0x) {
  747. const Stmt *Term = B.getTerminator();
  748. if (!(B.empty() && Term && isa<BreakStmt>(Term))) {
  749. S.Diag(L, diag::note_insert_fallthrough_fixit) <<
  750. FixItHint::CreateInsertion(L, "[[clang::fallthrough]]; ");
  751. }
  752. }
  753. S.Diag(L, diag::note_insert_break_fixit) <<
  754. FixItHint::CreateInsertion(L, "break; ");
  755. }
  756. }
  757. const FallthroughMapper::AttrStmts &Fallthroughs = FM.getFallthroughStmts();
  758. for (FallthroughMapper::AttrStmts::const_iterator I = Fallthroughs.begin(),
  759. E = Fallthroughs.end();
  760. I != E; ++I) {
  761. S.Diag((*I)->getLocStart(), diag::warn_fallthrough_attr_invalid_placement);
  762. }
  763. }
  764. namespace {
  765. struct SLocSort {
  766. bool operator()(const UninitUse &a, const UninitUse &b) {
  767. // Prefer a more confident report over a less confident one.
  768. if (a.getKind() != b.getKind())
  769. return a.getKind() > b.getKind();
  770. SourceLocation aLoc = a.getUser()->getLocStart();
  771. SourceLocation bLoc = b.getUser()->getLocStart();
  772. return aLoc.getRawEncoding() < bLoc.getRawEncoding();
  773. }
  774. };
  775. class UninitValsDiagReporter : public UninitVariablesHandler {
  776. Sema &S;
  777. typedef SmallVector<UninitUse, 2> UsesVec;
  778. typedef llvm::DenseMap<const VarDecl *, std::pair<UsesVec*, bool> > UsesMap;
  779. UsesMap *uses;
  780. public:
  781. UninitValsDiagReporter(Sema &S) : S(S), uses(0) {}
  782. ~UninitValsDiagReporter() {
  783. flushDiagnostics();
  784. }
  785. std::pair<UsesVec*, bool> &getUses(const VarDecl *vd) {
  786. if (!uses)
  787. uses = new UsesMap();
  788. UsesMap::mapped_type &V = (*uses)[vd];
  789. UsesVec *&vec = V.first;
  790. if (!vec)
  791. vec = new UsesVec();
  792. return V;
  793. }
  794. void handleUseOfUninitVariable(const VarDecl *vd, const UninitUse &use) {
  795. getUses(vd).first->push_back(use);
  796. }
  797. void handleSelfInit(const VarDecl *vd) {
  798. getUses(vd).second = true;
  799. }
  800. void flushDiagnostics() {
  801. if (!uses)
  802. return;
  803. // FIXME: This iteration order, and thus the resulting diagnostic order,
  804. // is nondeterministic.
  805. for (UsesMap::iterator i = uses->begin(), e = uses->end(); i != e; ++i) {
  806. const VarDecl *vd = i->first;
  807. const UsesMap::mapped_type &V = i->second;
  808. UsesVec *vec = V.first;
  809. bool hasSelfInit = V.second;
  810. // Specially handle the case where we have uses of an uninitialized
  811. // variable, but the root cause is an idiomatic self-init. We want
  812. // to report the diagnostic at the self-init since that is the root cause.
  813. if (!vec->empty() && hasSelfInit && hasAlwaysUninitializedUse(vec))
  814. DiagnoseUninitializedUse(S, vd,
  815. UninitUse(vd->getInit()->IgnoreParenCasts(),
  816. /* isAlwaysUninit */ true),
  817. /* alwaysReportSelfInit */ true);
  818. else {
  819. // Sort the uses by their SourceLocations. While not strictly
  820. // guaranteed to produce them in line/column order, this will provide
  821. // a stable ordering.
  822. std::sort(vec->begin(), vec->end(), SLocSort());
  823. for (UsesVec::iterator vi = vec->begin(), ve = vec->end(); vi != ve;
  824. ++vi) {
  825. // If we have self-init, downgrade all uses to 'may be uninitialized'.
  826. UninitUse Use = hasSelfInit ? UninitUse(vi->getUser(), false) : *vi;
  827. if (DiagnoseUninitializedUse(S, vd, Use))
  828. // Skip further diagnostics for this variable. We try to warn only
  829. // on the first point at which a variable is used uninitialized.
  830. break;
  831. }
  832. }
  833. // Release the uses vector.
  834. delete vec;
  835. }
  836. delete uses;
  837. }
  838. private:
  839. static bool hasAlwaysUninitializedUse(const UsesVec* vec) {
  840. for (UsesVec::const_iterator i = vec->begin(), e = vec->end(); i != e; ++i) {
  841. if (i->getKind() == UninitUse::Always) {
  842. return true;
  843. }
  844. }
  845. return false;
  846. }
  847. };
  848. }
  849. //===----------------------------------------------------------------------===//
  850. // -Wthread-safety
  851. //===----------------------------------------------------------------------===//
  852. namespace clang {
  853. namespace thread_safety {
  854. typedef llvm::SmallVector<PartialDiagnosticAt, 1> OptionalNotes;
  855. typedef std::pair<PartialDiagnosticAt, OptionalNotes> DelayedDiag;
  856. typedef std::list<DelayedDiag> DiagList;
  857. struct SortDiagBySourceLocation {
  858. SourceManager &SM;
  859. SortDiagBySourceLocation(SourceManager &SM) : SM(SM) {}
  860. bool operator()(const DelayedDiag &left, const DelayedDiag &right) {
  861. // Although this call will be slow, this is only called when outputting
  862. // multiple warnings.
  863. return SM.isBeforeInTranslationUnit(left.first.first, right.first.first);
  864. }
  865. };
  866. namespace {
  867. class ThreadSafetyReporter : public clang::thread_safety::ThreadSafetyHandler {
  868. Sema &S;
  869. DiagList Warnings;
  870. SourceLocation FunLocation, FunEndLocation;
  871. // Helper functions
  872. void warnLockMismatch(unsigned DiagID, Name LockName, SourceLocation Loc) {
  873. // Gracefully handle rare cases when the analysis can't get a more
  874. // precise source location.
  875. if (!Loc.isValid())
  876. Loc = FunLocation;
  877. PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << LockName);
  878. Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
  879. }
  880. public:
  881. ThreadSafetyReporter(Sema &S, SourceLocation FL, SourceLocation FEL)
  882. : S(S), FunLocation(FL), FunEndLocation(FEL) {}
  883. /// \brief Emit all buffered diagnostics in order of sourcelocation.
  884. /// We need to output diagnostics produced while iterating through
  885. /// the lockset in deterministic order, so this function orders diagnostics
  886. /// and outputs them.
  887. void emitDiagnostics() {
  888. Warnings.sort(SortDiagBySourceLocation(S.getSourceManager()));
  889. for (DiagList::iterator I = Warnings.begin(), E = Warnings.end();
  890. I != E; ++I) {
  891. S.Diag(I->first.first, I->first.second);
  892. const OptionalNotes &Notes = I->second;
  893. for (unsigned NoteI = 0, NoteN = Notes.size(); NoteI != NoteN; ++NoteI)
  894. S.Diag(Notes[NoteI].first, Notes[NoteI].second);
  895. }
  896. }
  897. void handleInvalidLockExp(SourceLocation Loc) {
  898. PartialDiagnosticAt Warning(Loc,
  899. S.PDiag(diag::warn_cannot_resolve_lock) << Loc);
  900. Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
  901. }
  902. void handleUnmatchedUnlock(Name LockName, SourceLocation Loc) {
  903. warnLockMismatch(diag::warn_unlock_but_no_lock, LockName, Loc);
  904. }
  905. void handleDoubleLock(Name LockName, SourceLocation Loc) {
  906. warnLockMismatch(diag::warn_double_lock, LockName, Loc);
  907. }
  908. void handleMutexHeldEndOfScope(Name LockName, SourceLocation LocLocked,
  909. SourceLocation LocEndOfScope,
  910. LockErrorKind LEK){
  911. unsigned DiagID = 0;
  912. switch (LEK) {
  913. case LEK_LockedSomePredecessors:
  914. DiagID = diag::warn_lock_some_predecessors;
  915. break;
  916. case LEK_LockedSomeLoopIterations:
  917. DiagID = diag::warn_expecting_lock_held_on_loop;
  918. break;
  919. case LEK_LockedAtEndOfFunction:
  920. DiagID = diag::warn_no_unlock;
  921. break;
  922. case LEK_NotLockedAtEndOfFunction:
  923. DiagID = diag::warn_expecting_locked;
  924. break;
  925. }
  926. if (LocEndOfScope.isInvalid())
  927. LocEndOfScope = FunEndLocation;
  928. PartialDiagnosticAt Warning(LocEndOfScope, S.PDiag(DiagID) << LockName);
  929. PartialDiagnosticAt Note(LocLocked, S.PDiag(diag::note_locked_here));
  930. Warnings.push_back(DelayedDiag(Warning, OptionalNotes(1, Note)));
  931. }
  932. void handleExclusiveAndShared(Name LockName, SourceLocation Loc1,
  933. SourceLocation Loc2) {
  934. PartialDiagnosticAt Warning(
  935. Loc1, S.PDiag(diag::warn_lock_exclusive_and_shared) << LockName);
  936. PartialDiagnosticAt Note(
  937. Loc2, S.PDiag(diag::note_lock_exclusive_and_shared) << LockName);
  938. Warnings.push_back(DelayedDiag(Warning, OptionalNotes(1, Note)));
  939. }
  940. void handleNoMutexHeld(const NamedDecl *D, ProtectedOperationKind POK,
  941. AccessKind AK, SourceLocation Loc) {
  942. assert((POK == POK_VarAccess || POK == POK_VarDereference)
  943. && "Only works for variables");
  944. unsigned DiagID = POK == POK_VarAccess?
  945. diag::warn_variable_requires_any_lock:
  946. diag::warn_var_deref_requires_any_lock;
  947. PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
  948. << D->getNameAsString() << getLockKindFromAccessKind(AK));
  949. Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
  950. }
  951. void handleMutexNotHeld(const NamedDecl *D, ProtectedOperationKind POK,
  952. Name LockName, LockKind LK, SourceLocation Loc,
  953. Name *PossibleMatch) {
  954. unsigned DiagID = 0;
  955. if (PossibleMatch) {
  956. switch (POK) {
  957. case POK_VarAccess:
  958. DiagID = diag::warn_variable_requires_lock_precise;
  959. break;
  960. case POK_VarDereference:
  961. DiagID = diag::warn_var_deref_requires_lock_precise;
  962. break;
  963. case POK_FunctionCall:
  964. DiagID = diag::warn_fun_requires_lock_precise;
  965. break;
  966. }
  967. PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
  968. << D->getNameAsString() << LockName << LK);
  969. PartialDiagnosticAt Note(Loc, S.PDiag(diag::note_found_mutex_near_match)
  970. << *PossibleMatch);
  971. Warnings.push_back(DelayedDiag(Warning, OptionalNotes(1, Note)));
  972. } else {
  973. switch (POK) {
  974. case POK_VarAccess:
  975. DiagID = diag::warn_variable_requires_lock;
  976. break;
  977. case POK_VarDereference:
  978. DiagID = diag::warn_var_deref_requires_lock;
  979. break;
  980. case POK_FunctionCall:
  981. DiagID = diag::warn_fun_requires_lock;
  982. break;
  983. }
  984. PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
  985. << D->getNameAsString() << LockName << LK);
  986. Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
  987. }
  988. }
  989. void handleFunExcludesLock(Name FunName, Name LockName, SourceLocation Loc) {
  990. PartialDiagnosticAt Warning(Loc,
  991. S.PDiag(diag::warn_fun_excludes_mutex) << FunName << LockName);
  992. Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
  993. }
  994. };
  995. }
  996. }
  997. }
  998. //===----------------------------------------------------------------------===//
  999. // AnalysisBasedWarnings - Worker object used by Sema to execute analysis-based
  1000. // warnings on a function, method, or block.
  1001. //===----------------------------------------------------------------------===//
  1002. clang::sema::AnalysisBasedWarnings::Policy::Policy() {
  1003. enableCheckFallThrough = 1;
  1004. enableCheckUnreachable = 0;
  1005. enableThreadSafetyAnalysis = 0;
  1006. }
  1007. clang::sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema &s)
  1008. : S(s),
  1009. NumFunctionsAnalyzed(0),
  1010. NumFunctionsWithBadCFGs(0),
  1011. NumCFGBlocks(0),
  1012. MaxCFGBlocksPerFunction(0),
  1013. NumUninitAnalysisFunctions(0),
  1014. NumUninitAnalysisVariables(0),
  1015. MaxUninitAnalysisVariablesPerFunction(0),
  1016. NumUninitAnalysisBlockVisits(0),
  1017. MaxUninitAnalysisBlockVisitsPerFunction(0) {
  1018. DiagnosticsEngine &D = S.getDiagnostics();
  1019. DefaultPolicy.enableCheckUnreachable = (unsigned)
  1020. (D.getDiagnosticLevel(diag::warn_unreachable, SourceLocation()) !=
  1021. DiagnosticsEngine::Ignored);
  1022. DefaultPolicy.enableThreadSafetyAnalysis = (unsigned)
  1023. (D.getDiagnosticLevel(diag::warn_double_lock, SourceLocation()) !=
  1024. DiagnosticsEngine::Ignored);
  1025. }
  1026. static void flushDiagnostics(Sema &S, sema::FunctionScopeInfo *fscope) {
  1027. for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator
  1028. i = fscope->PossiblyUnreachableDiags.begin(),
  1029. e = fscope->PossiblyUnreachableDiags.end();
  1030. i != e; ++i) {
  1031. const sema::PossiblyUnreachableDiag &D = *i;
  1032. S.Diag(D.Loc, D.PD);
  1033. }
  1034. }
  1035. void clang::sema::
  1036. AnalysisBasedWarnings::IssueWarnings(sema::AnalysisBasedWarnings::Policy P,
  1037. sema::FunctionScopeInfo *fscope,
  1038. const Decl *D, const BlockExpr *blkExpr) {
  1039. // We avoid doing analysis-based warnings when there are errors for
  1040. // two reasons:
  1041. // (1) The CFGs often can't be constructed (if the body is invalid), so
  1042. // don't bother trying.
  1043. // (2) The code already has problems; running the analysis just takes more
  1044. // time.
  1045. DiagnosticsEngine &Diags = S.getDiagnostics();
  1046. // Do not do any analysis for declarations in system headers if we are
  1047. // going to just ignore them.
  1048. if (Diags.getSuppressSystemWarnings() &&
  1049. S.SourceMgr.isInSystemHeader(D->getLocation()))
  1050. return;
  1051. // For code in dependent contexts, we'll do this at instantiation time.
  1052. if (cast<DeclContext>(D)->isDependentContext())
  1053. return;
  1054. if (Diags.hasErrorOccurred() || Diags.hasFatalErrorOccurred()) {
  1055. // Flush out any possibly unreachable diagnostics.
  1056. flushDiagnostics(S, fscope);
  1057. return;
  1058. }
  1059. const Stmt *Body = D->getBody();
  1060. assert(Body);
  1061. AnalysisDeclContext AC(/* AnalysisDeclContextManager */ 0, D);
  1062. // Don't generate EH edges for CallExprs as we'd like to avoid the n^2
  1063. // explosion for destrutors that can result and the compile time hit.
  1064. AC.getCFGBuildOptions().PruneTriviallyFalseEdges = true;
  1065. AC.getCFGBuildOptions().AddEHEdges = false;
  1066. AC.getCFGBuildOptions().AddInitializers = true;
  1067. AC.getCFGBuildOptions().AddImplicitDtors = true;
  1068. AC.getCFGBuildOptions().AddTemporaryDtors = true;
  1069. // Force that certain expressions appear as CFGElements in the CFG. This
  1070. // is used to speed up various analyses.
  1071. // FIXME: This isn't the right factoring. This is here for initial
  1072. // prototyping, but we need a way for analyses to say what expressions they
  1073. // expect to always be CFGElements and then fill in the BuildOptions
  1074. // appropriately. This is essentially a layering violation.
  1075. if (P.enableCheckUnreachable || P.enableThreadSafetyAnalysis) {
  1076. // Unreachable code analysis and thread safety require a linearized CFG.
  1077. AC.getCFGBuildOptions().setAllAlwaysAdd();
  1078. }
  1079. else {
  1080. AC.getCFGBuildOptions()
  1081. .setAlwaysAdd(Stmt::BinaryOperatorClass)
  1082. .setAlwaysAdd(Stmt::CompoundAssignOperatorClass)
  1083. .setAlwaysAdd(Stmt::BlockExprClass)
  1084. .setAlwaysAdd(Stmt::CStyleCastExprClass)
  1085. .setAlwaysAdd(Stmt::DeclRefExprClass)
  1086. .setAlwaysAdd(Stmt::ImplicitCastExprClass)
  1087. .setAlwaysAdd(Stmt::UnaryOperatorClass)
  1088. .setAlwaysAdd(Stmt::AttributedStmtClass);
  1089. }
  1090. // Construct the analysis context with the specified CFG build options.
  1091. // Emit delayed diagnostics.
  1092. if (!fscope->PossiblyUnreachableDiags.empty()) {
  1093. bool analyzed = false;
  1094. // Register the expressions with the CFGBuilder.
  1095. for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator
  1096. i = fscope->PossiblyUnreachableDiags.begin(),
  1097. e = fscope->PossiblyUnreachableDiags.end();
  1098. i != e; ++i) {
  1099. if (const Stmt *stmt = i->stmt)
  1100. AC.registerForcedBlockExpression(stmt);
  1101. }
  1102. if (AC.getCFG()) {
  1103. analyzed = true;
  1104. for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator
  1105. i = fscope->PossiblyUnreachableDiags.begin(),
  1106. e = fscope->PossiblyUnreachableDiags.end();
  1107. i != e; ++i)
  1108. {
  1109. const sema::PossiblyUnreachableDiag &D = *i;
  1110. bool processed = false;
  1111. if (const Stmt *stmt = i->stmt) {
  1112. const CFGBlock *block = AC.getBlockForRegisteredExpression(stmt);
  1113. CFGReverseBlockReachabilityAnalysis *cra =
  1114. AC.getCFGReachablityAnalysis();
  1115. // FIXME: We should be able to assert that block is non-null, but
  1116. // the CFG analysis can skip potentially-evaluated expressions in
  1117. // edge cases; see test/Sema/vla-2.c.
  1118. if (block && cra) {
  1119. // Can this block be reached from the entrance?
  1120. if (cra->isReachable(&AC.getCFG()->getEntry(), block))
  1121. S.Diag(D.Loc, D.PD);
  1122. processed = true;
  1123. }
  1124. }
  1125. if (!processed) {
  1126. // Emit the warning anyway if we cannot map to a basic block.
  1127. S.Diag(D.Loc, D.PD);
  1128. }
  1129. }
  1130. }
  1131. if (!analyzed)
  1132. flushDiagnostics(S, fscope);
  1133. }
  1134. // Warning: check missing 'return'
  1135. if (P.enableCheckFallThrough) {
  1136. const CheckFallThroughDiagnostics &CD =
  1137. (isa<BlockDecl>(D) ? CheckFallThroughDiagnostics::MakeForBlock()
  1138. : (isa<CXXMethodDecl>(D) &&
  1139. cast<CXXMethodDecl>(D)->getOverloadedOperator() == OO_Call &&
  1140. cast<CXXMethodDecl>(D)->getParent()->isLambda())
  1141. ? CheckFallThroughDiagnostics::MakeForLambda()
  1142. : CheckFallThroughDiagnostics::MakeForFunction(D));
  1143. CheckFallThroughForBody(S, D, Body, blkExpr, CD, AC);
  1144. }
  1145. // Warning: check for unreachable code
  1146. if (P.enableCheckUnreachable) {
  1147. // Only check for unreachable code on non-template instantiations.
  1148. // Different template instantiations can effectively change the control-flow
  1149. // and it is very difficult to prove that a snippet of code in a template
  1150. // is unreachable for all instantiations.
  1151. bool isTemplateInstantiation = false;
  1152. if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(D))
  1153. isTemplateInstantiation = Function->isTemplateInstantiation();
  1154. if (!isTemplateInstantiation)
  1155. CheckUnreachable(S, AC);
  1156. }
  1157. // Check for thread safety violations
  1158. if (P.enableThreadSafetyAnalysis) {
  1159. SourceLocation FL = AC.getDecl()->getLocation();
  1160. SourceLocation FEL = AC.getDecl()->getLocEnd();
  1161. thread_safety::ThreadSafetyReporter Reporter(S, FL, FEL);
  1162. thread_safety::runThreadSafetyAnalysis(AC, Reporter);
  1163. Reporter.emitDiagnostics();
  1164. }
  1165. if (Diags.getDiagnosticLevel(diag::warn_uninit_var, D->getLocStart())
  1166. != DiagnosticsEngine::Ignored ||
  1167. Diags.getDiagnosticLevel(diag::warn_sometimes_uninit_var,D->getLocStart())
  1168. != DiagnosticsEngine::Ignored ||
  1169. Diags.getDiagnosticLevel(diag::warn_maybe_uninit_var, D->getLocStart())
  1170. != DiagnosticsEngine::Ignored) {
  1171. if (CFG *cfg = AC.getCFG()) {
  1172. UninitValsDiagReporter reporter(S);
  1173. UninitVariablesAnalysisStats stats;
  1174. std::memset(&stats, 0, sizeof(UninitVariablesAnalysisStats));
  1175. runUninitializedVariablesAnalysis(*cast<DeclContext>(D), *cfg, AC,
  1176. reporter, stats);
  1177. if (S.CollectStats && stats.NumVariablesAnalyzed > 0) {
  1178. ++NumUninitAnalysisFunctions;
  1179. NumUninitAnalysisVariables += stats.NumVariablesAnalyzed;
  1180. NumUninitAnalysisBlockVisits += stats.NumBlockVisits;
  1181. MaxUninitAnalysisVariablesPerFunction =
  1182. std::max(MaxUninitAnalysisVariablesPerFunction,
  1183. stats.NumVariablesAnalyzed);
  1184. MaxUninitAnalysisBlockVisitsPerFunction =
  1185. std::max(MaxUninitAnalysisBlockVisitsPerFunction,
  1186. stats.NumBlockVisits);
  1187. }
  1188. }
  1189. }
  1190. bool FallThroughDiagFull =
  1191. Diags.getDiagnosticLevel(diag::warn_unannotated_fallthrough,
  1192. D->getLocStart()) != DiagnosticsEngine::Ignored;
  1193. bool FallThroughDiagPerFunction =
  1194. Diags.getDiagnosticLevel(diag::warn_unannotated_fallthrough_per_function,
  1195. D->getLocStart()) != DiagnosticsEngine::Ignored;
  1196. if (FallThroughDiagFull || FallThroughDiagPerFunction) {
  1197. DiagnoseSwitchLabelsFallthrough(S, AC, !FallThroughDiagFull);
  1198. }
  1199. // Collect statistics about the CFG if it was built.
  1200. if (S.CollectStats && AC.isCFGBuilt()) {
  1201. ++NumFunctionsAnalyzed;
  1202. if (CFG *cfg = AC.getCFG()) {
  1203. // If we successfully built a CFG for this context, record some more
  1204. // detail information about it.
  1205. NumCFGBlocks += cfg->getNumBlockIDs();
  1206. MaxCFGBlocksPerFunction = std::max(MaxCFGBlocksPerFunction,
  1207. cfg->getNumBlockIDs());
  1208. } else {
  1209. ++NumFunctionsWithBadCFGs;
  1210. }
  1211. }
  1212. }
  1213. void clang::sema::AnalysisBasedWarnings::PrintStats() const {
  1214. llvm::errs() << "\n*** Analysis Based Warnings Stats:\n";
  1215. unsigned NumCFGsBuilt = NumFunctionsAnalyzed - NumFunctionsWithBadCFGs;
  1216. unsigned AvgCFGBlocksPerFunction =
  1217. !NumCFGsBuilt ? 0 : NumCFGBlocks/NumCFGsBuilt;
  1218. llvm::errs() << NumFunctionsAnalyzed << " functions analyzed ("
  1219. << NumFunctionsWithBadCFGs << " w/o CFGs).\n"
  1220. << " " << NumCFGBlocks << " CFG blocks built.\n"
  1221. << " " << AvgCFGBlocksPerFunction
  1222. << " average CFG blocks per function.\n"
  1223. << " " << MaxCFGBlocksPerFunction
  1224. << " max CFG blocks per function.\n";
  1225. unsigned AvgUninitVariablesPerFunction = !NumUninitAnalysisFunctions ? 0
  1226. : NumUninitAnalysisVariables/NumUninitAnalysisFunctions;
  1227. unsigned AvgUninitBlockVisitsPerFunction = !NumUninitAnalysisFunctions ? 0
  1228. : NumUninitAnalysisBlockVisits/NumUninitAnalysisFunctions;
  1229. llvm::errs() << NumUninitAnalysisFunctions
  1230. << " functions analyzed for uninitialiazed variables\n"
  1231. << " " << NumUninitAnalysisVariables << " variables analyzed.\n"
  1232. << " " << AvgUninitVariablesPerFunction
  1233. << " average variables per function.\n"
  1234. << " " << MaxUninitAnalysisVariablesPerFunction
  1235. << " max variables per function.\n"
  1236. << " " << NumUninitAnalysisBlockVisits << " block visits.\n"
  1237. << " " << AvgUninitBlockVisitsPerFunction
  1238. << " average block visits per function.\n"
  1239. << " " << MaxUninitAnalysisBlockVisitsPerFunction
  1240. << " max block visits per function.\n";
  1241. }