AnalysisBasedWarnings.cpp 81 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288
  1. //=- AnalysisBasedWarnings.cpp - Sema warnings based on libAnalysis -*- C++ -*-=//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This file defines analysis_warnings::[Policy,Executor].
  10. // Together they are used by Sema to issue warnings based on inexpensive
  11. // static analysis algorithms in libAnalysis.
  12. //
  13. //===----------------------------------------------------------------------===//
  14. #include "clang/Sema/AnalysisBasedWarnings.h"
  15. #include "clang/AST/DeclCXX.h"
  16. #include "clang/AST/DeclObjC.h"
  17. #include "clang/AST/EvaluatedExprVisitor.h"
  18. #include "clang/AST/ExprCXX.h"
  19. #include "clang/AST/ExprObjC.h"
  20. #include "clang/AST/ParentMap.h"
  21. #include "clang/AST/RecursiveASTVisitor.h"
  22. #include "clang/AST/StmtCXX.h"
  23. #include "clang/AST/StmtObjC.h"
  24. #include "clang/AST/StmtVisitor.h"
  25. #include "clang/Analysis/Analyses/CFGReachabilityAnalysis.h"
  26. #include "clang/Analysis/Analyses/Consumed.h"
  27. #include "clang/Analysis/Analyses/ReachableCode.h"
  28. #include "clang/Analysis/Analyses/ThreadSafety.h"
  29. #include "clang/Analysis/Analyses/UninitializedValues.h"
  30. #include "clang/Analysis/AnalysisDeclContext.h"
  31. #include "clang/Analysis/CFG.h"
  32. #include "clang/Analysis/CFGStmtMap.h"
  33. #include "clang/Basic/SourceLocation.h"
  34. #include "clang/Basic/SourceManager.h"
  35. #include "clang/Lex/Preprocessor.h"
  36. #include "clang/Sema/ScopeInfo.h"
  37. #include "clang/Sema/SemaInternal.h"
  38. #include "llvm/ADT/BitVector.h"
  39. #include "llvm/ADT/MapVector.h"
  40. #include "llvm/ADT/SmallString.h"
  41. #include "llvm/ADT/SmallVector.h"
  42. #include "llvm/ADT/StringRef.h"
  43. #include "llvm/Support/Casting.h"
  44. #include <algorithm>
  45. #include <deque>
  46. #include <iterator>
  47. using namespace clang;
  48. //===----------------------------------------------------------------------===//
  49. // Unreachable code analysis.
  50. //===----------------------------------------------------------------------===//
  51. namespace {
  52. class UnreachableCodeHandler : public reachable_code::Callback {
  53. Sema &S;
  54. SourceRange PreviousSilenceableCondVal;
  55. public:
  56. UnreachableCodeHandler(Sema &s) : S(s) {}
  57. void HandleUnreachable(reachable_code::UnreachableKind UK,
  58. SourceLocation L,
  59. SourceRange SilenceableCondVal,
  60. SourceRange R1,
  61. SourceRange R2) override {
  62. // Avoid reporting multiple unreachable code diagnostics that are
  63. // triggered by the same conditional value.
  64. if (PreviousSilenceableCondVal.isValid() &&
  65. SilenceableCondVal.isValid() &&
  66. PreviousSilenceableCondVal == SilenceableCondVal)
  67. return;
  68. PreviousSilenceableCondVal = SilenceableCondVal;
  69. unsigned diag = diag::warn_unreachable;
  70. switch (UK) {
  71. case reachable_code::UK_Break:
  72. diag = diag::warn_unreachable_break;
  73. break;
  74. case reachable_code::UK_Return:
  75. diag = diag::warn_unreachable_return;
  76. break;
  77. case reachable_code::UK_Loop_Increment:
  78. diag = diag::warn_unreachable_loop_increment;
  79. break;
  80. case reachable_code::UK_Other:
  81. break;
  82. }
  83. S.Diag(L, diag) << R1 << R2;
  84. SourceLocation Open = SilenceableCondVal.getBegin();
  85. if (Open.isValid()) {
  86. SourceLocation Close = SilenceableCondVal.getEnd();
  87. Close = S.getLocForEndOfToken(Close);
  88. if (Close.isValid()) {
  89. S.Diag(Open, diag::note_unreachable_silence)
  90. << FixItHint::CreateInsertion(Open, "/* DISABLES CODE */ (")
  91. << FixItHint::CreateInsertion(Close, ")");
  92. }
  93. }
  94. }
  95. };
  96. } // anonymous namespace
  97. /// CheckUnreachable - Check for unreachable code.
  98. static void CheckUnreachable(Sema &S, AnalysisDeclContext &AC) {
  99. // As a heuristic prune all diagnostics not in the main file. Currently
  100. // the majority of warnings in headers are false positives. These
  101. // are largely caused by configuration state, e.g. preprocessor
  102. // defined code, etc.
  103. //
  104. // Note that this is also a performance optimization. Analyzing
  105. // headers many times can be expensive.
  106. if (!S.getSourceManager().isInMainFile(AC.getDecl()->getBeginLoc()))
  107. return;
  108. UnreachableCodeHandler UC(S);
  109. reachable_code::FindUnreachableCode(AC, S.getPreprocessor(), UC);
  110. }
  111. namespace {
  112. /// Warn on logical operator errors in CFGBuilder
  113. class LogicalErrorHandler : public CFGCallback {
  114. Sema &S;
  115. public:
  116. LogicalErrorHandler(Sema &S) : CFGCallback(), S(S) {}
  117. static bool HasMacroID(const Expr *E) {
  118. if (E->getExprLoc().isMacroID())
  119. return true;
  120. // Recurse to children.
  121. for (const Stmt *SubStmt : E->children())
  122. if (const Expr *SubExpr = dyn_cast_or_null<Expr>(SubStmt))
  123. if (HasMacroID(SubExpr))
  124. return true;
  125. return false;
  126. }
  127. void compareAlwaysTrue(const BinaryOperator *B, bool isAlwaysTrue) override {
  128. if (HasMacroID(B))
  129. return;
  130. SourceRange DiagRange = B->getSourceRange();
  131. S.Diag(B->getExprLoc(), diag::warn_tautological_overlap_comparison)
  132. << DiagRange << isAlwaysTrue;
  133. }
  134. void compareBitwiseEquality(const BinaryOperator *B,
  135. bool isAlwaysTrue) override {
  136. if (HasMacroID(B))
  137. return;
  138. SourceRange DiagRange = B->getSourceRange();
  139. S.Diag(B->getExprLoc(), diag::warn_comparison_bitwise_always)
  140. << DiagRange << isAlwaysTrue;
  141. }
  142. void compareBitwiseOr(const BinaryOperator *B) override {
  143. if (HasMacroID(B))
  144. return;
  145. SourceRange DiagRange = B->getSourceRange();
  146. S.Diag(B->getExprLoc(), diag::warn_comparison_bitwise_or) << DiagRange;
  147. }
  148. static bool hasActiveDiagnostics(DiagnosticsEngine &Diags,
  149. SourceLocation Loc) {
  150. return !Diags.isIgnored(diag::warn_tautological_overlap_comparison, Loc) ||
  151. !Diags.isIgnored(diag::warn_comparison_bitwise_or, Loc);
  152. }
  153. };
  154. } // anonymous namespace
  155. //===----------------------------------------------------------------------===//
  156. // Check for infinite self-recursion in functions
  157. //===----------------------------------------------------------------------===//
  158. // Returns true if the function is called anywhere within the CFGBlock.
  159. // For member functions, the additional condition of being call from the
  160. // this pointer is required.
  161. static bool hasRecursiveCallInPath(const FunctionDecl *FD, CFGBlock &Block) {
  162. // Process all the Stmt's in this block to find any calls to FD.
  163. for (const auto &B : Block) {
  164. if (B.getKind() != CFGElement::Statement)
  165. continue;
  166. const CallExpr *CE = dyn_cast<CallExpr>(B.getAs<CFGStmt>()->getStmt());
  167. if (!CE || !CE->getCalleeDecl() ||
  168. CE->getCalleeDecl()->getCanonicalDecl() != FD)
  169. continue;
  170. // Skip function calls which are qualified with a templated class.
  171. if (const DeclRefExpr *DRE =
  172. dyn_cast<DeclRefExpr>(CE->getCallee()->IgnoreParenImpCasts())) {
  173. if (NestedNameSpecifier *NNS = DRE->getQualifier()) {
  174. if (NNS->getKind() == NestedNameSpecifier::TypeSpec &&
  175. isa<TemplateSpecializationType>(NNS->getAsType())) {
  176. continue;
  177. }
  178. }
  179. }
  180. const CXXMemberCallExpr *MCE = dyn_cast<CXXMemberCallExpr>(CE);
  181. if (!MCE || isa<CXXThisExpr>(MCE->getImplicitObjectArgument()) ||
  182. !MCE->getMethodDecl()->isVirtual())
  183. return true;
  184. }
  185. return false;
  186. }
  187. // Returns true if every path from the entry block passes through a call to FD.
  188. static bool checkForRecursiveFunctionCall(const FunctionDecl *FD, CFG *cfg) {
  189. llvm::SmallPtrSet<CFGBlock *, 16> Visited;
  190. llvm::SmallVector<CFGBlock *, 16> WorkList;
  191. // Keep track of whether we found at least one recursive path.
  192. bool foundRecursion = false;
  193. const unsigned ExitID = cfg->getExit().getBlockID();
  194. // Seed the work list with the entry block.
  195. WorkList.push_back(&cfg->getEntry());
  196. while (!WorkList.empty()) {
  197. CFGBlock *Block = WorkList.pop_back_val();
  198. for (auto I = Block->succ_begin(), E = Block->succ_end(); I != E; ++I) {
  199. if (CFGBlock *SuccBlock = *I) {
  200. if (!Visited.insert(SuccBlock).second)
  201. continue;
  202. // Found a path to the exit node without a recursive call.
  203. if (ExitID == SuccBlock->getBlockID())
  204. return false;
  205. // If the successor block contains a recursive call, end analysis there.
  206. if (hasRecursiveCallInPath(FD, *SuccBlock)) {
  207. foundRecursion = true;
  208. continue;
  209. }
  210. WorkList.push_back(SuccBlock);
  211. }
  212. }
  213. }
  214. return foundRecursion;
  215. }
  216. static void checkRecursiveFunction(Sema &S, const FunctionDecl *FD,
  217. const Stmt *Body, AnalysisDeclContext &AC) {
  218. FD = FD->getCanonicalDecl();
  219. // Only run on non-templated functions and non-templated members of
  220. // templated classes.
  221. if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate &&
  222. FD->getTemplatedKind() != FunctionDecl::TK_MemberSpecialization)
  223. return;
  224. CFG *cfg = AC.getCFG();
  225. if (!cfg) return;
  226. // If the exit block is unreachable, skip processing the function.
  227. if (cfg->getExit().pred_empty())
  228. return;
  229. // Emit diagnostic if a recursive function call is detected for all paths.
  230. if (checkForRecursiveFunctionCall(FD, cfg))
  231. S.Diag(Body->getBeginLoc(), diag::warn_infinite_recursive_function);
  232. }
  233. //===----------------------------------------------------------------------===//
  234. // Check for throw in a non-throwing function.
  235. //===----------------------------------------------------------------------===//
  236. /// Determine whether an exception thrown by E, unwinding from ThrowBlock,
  237. /// can reach ExitBlock.
  238. static bool throwEscapes(Sema &S, const CXXThrowExpr *E, CFGBlock &ThrowBlock,
  239. CFG *Body) {
  240. SmallVector<CFGBlock *, 16> Stack;
  241. llvm::BitVector Queued(Body->getNumBlockIDs());
  242. Stack.push_back(&ThrowBlock);
  243. Queued[ThrowBlock.getBlockID()] = true;
  244. while (!Stack.empty()) {
  245. CFGBlock &UnwindBlock = *Stack.back();
  246. Stack.pop_back();
  247. for (auto &Succ : UnwindBlock.succs()) {
  248. if (!Succ.isReachable() || Queued[Succ->getBlockID()])
  249. continue;
  250. if (Succ->getBlockID() == Body->getExit().getBlockID())
  251. return true;
  252. if (auto *Catch =
  253. dyn_cast_or_null<CXXCatchStmt>(Succ->getLabel())) {
  254. QualType Caught = Catch->getCaughtType();
  255. if (Caught.isNull() || // catch (...) catches everything
  256. !E->getSubExpr() || // throw; is considered cuaght by any handler
  257. S.handlerCanCatch(Caught, E->getSubExpr()->getType()))
  258. // Exception doesn't escape via this path.
  259. break;
  260. } else {
  261. Stack.push_back(Succ);
  262. Queued[Succ->getBlockID()] = true;
  263. }
  264. }
  265. }
  266. return false;
  267. }
  268. static void visitReachableThrows(
  269. CFG *BodyCFG,
  270. llvm::function_ref<void(const CXXThrowExpr *, CFGBlock &)> Visit) {
  271. llvm::BitVector Reachable(BodyCFG->getNumBlockIDs());
  272. clang::reachable_code::ScanReachableFromBlock(&BodyCFG->getEntry(), Reachable);
  273. for (CFGBlock *B : *BodyCFG) {
  274. if (!Reachable[B->getBlockID()])
  275. continue;
  276. for (CFGElement &E : *B) {
  277. Optional<CFGStmt> S = E.getAs<CFGStmt>();
  278. if (!S)
  279. continue;
  280. if (auto *Throw = dyn_cast<CXXThrowExpr>(S->getStmt()))
  281. Visit(Throw, *B);
  282. }
  283. }
  284. }
  285. static void EmitDiagForCXXThrowInNonThrowingFunc(Sema &S, SourceLocation OpLoc,
  286. const FunctionDecl *FD) {
  287. if (!S.getSourceManager().isInSystemHeader(OpLoc) &&
  288. FD->getTypeSourceInfo()) {
  289. S.Diag(OpLoc, diag::warn_throw_in_noexcept_func) << FD;
  290. if (S.getLangOpts().CPlusPlus11 &&
  291. (isa<CXXDestructorDecl>(FD) ||
  292. FD->getDeclName().getCXXOverloadedOperator() == OO_Delete ||
  293. FD->getDeclName().getCXXOverloadedOperator() == OO_Array_Delete)) {
  294. if (const auto *Ty = FD->getTypeSourceInfo()->getType()->
  295. getAs<FunctionProtoType>())
  296. S.Diag(FD->getLocation(), diag::note_throw_in_dtor)
  297. << !isa<CXXDestructorDecl>(FD) << !Ty->hasExceptionSpec()
  298. << FD->getExceptionSpecSourceRange();
  299. } else
  300. S.Diag(FD->getLocation(), diag::note_throw_in_function)
  301. << FD->getExceptionSpecSourceRange();
  302. }
  303. }
  304. static void checkThrowInNonThrowingFunc(Sema &S, const FunctionDecl *FD,
  305. AnalysisDeclContext &AC) {
  306. CFG *BodyCFG = AC.getCFG();
  307. if (!BodyCFG)
  308. return;
  309. if (BodyCFG->getExit().pred_empty())
  310. return;
  311. visitReachableThrows(BodyCFG, [&](const CXXThrowExpr *Throw, CFGBlock &Block) {
  312. if (throwEscapes(S, Throw, Block, BodyCFG))
  313. EmitDiagForCXXThrowInNonThrowingFunc(S, Throw->getThrowLoc(), FD);
  314. });
  315. }
  316. static bool isNoexcept(const FunctionDecl *FD) {
  317. const auto *FPT = FD->getType()->castAs<FunctionProtoType>();
  318. if (FPT->isNothrow() || FD->hasAttr<NoThrowAttr>())
  319. return true;
  320. return false;
  321. }
  322. //===----------------------------------------------------------------------===//
  323. // Check for missing return value.
  324. //===----------------------------------------------------------------------===//
  325. enum ControlFlowKind {
  326. UnknownFallThrough,
  327. NeverFallThrough,
  328. MaybeFallThrough,
  329. AlwaysFallThrough,
  330. NeverFallThroughOrReturn
  331. };
  332. /// CheckFallThrough - Check that we don't fall off the end of a
  333. /// Statement that should return a value.
  334. ///
  335. /// \returns AlwaysFallThrough iff we always fall off the end of the statement,
  336. /// MaybeFallThrough iff we might or might not fall off the end,
  337. /// NeverFallThroughOrReturn iff we never fall off the end of the statement or
  338. /// return. We assume NeverFallThrough iff we never fall off the end of the
  339. /// statement but we may return. We assume that functions not marked noreturn
  340. /// will return.
  341. static ControlFlowKind CheckFallThrough(AnalysisDeclContext &AC) {
  342. CFG *cfg = AC.getCFG();
  343. if (!cfg) return UnknownFallThrough;
  344. // The CFG leaves in dead things, and we don't want the dead code paths to
  345. // confuse us, so we mark all live things first.
  346. llvm::BitVector live(cfg->getNumBlockIDs());
  347. unsigned count = reachable_code::ScanReachableFromBlock(&cfg->getEntry(),
  348. live);
  349. bool AddEHEdges = AC.getAddEHEdges();
  350. if (!AddEHEdges && count != cfg->getNumBlockIDs())
  351. // When there are things remaining dead, and we didn't add EH edges
  352. // from CallExprs to the catch clauses, we have to go back and
  353. // mark them as live.
  354. for (const auto *B : *cfg) {
  355. if (!live[B->getBlockID()]) {
  356. if (B->pred_begin() == B->pred_end()) {
  357. const Stmt *Term = B->getTerminatorStmt();
  358. if (Term && isa<CXXTryStmt>(Term))
  359. // When not adding EH edges from calls, catch clauses
  360. // can otherwise seem dead. Avoid noting them as dead.
  361. count += reachable_code::ScanReachableFromBlock(B, live);
  362. continue;
  363. }
  364. }
  365. }
  366. // Now we know what is live, we check the live precessors of the exit block
  367. // and look for fall through paths, being careful to ignore normal returns,
  368. // and exceptional paths.
  369. bool HasLiveReturn = false;
  370. bool HasFakeEdge = false;
  371. bool HasPlainEdge = false;
  372. bool HasAbnormalEdge = false;
  373. // Ignore default cases that aren't likely to be reachable because all
  374. // enums in a switch(X) have explicit case statements.
  375. CFGBlock::FilterOptions FO;
  376. FO.IgnoreDefaultsWithCoveredEnums = 1;
  377. for (CFGBlock::filtered_pred_iterator I =
  378. cfg->getExit().filtered_pred_start_end(FO);
  379. I.hasMore(); ++I) {
  380. const CFGBlock &B = **I;
  381. if (!live[B.getBlockID()])
  382. continue;
  383. // Skip blocks which contain an element marked as no-return. They don't
  384. // represent actually viable edges into the exit block, so mark them as
  385. // abnormal.
  386. if (B.hasNoReturnElement()) {
  387. HasAbnormalEdge = true;
  388. continue;
  389. }
  390. // Destructors can appear after the 'return' in the CFG. This is
  391. // normal. We need to look pass the destructors for the return
  392. // statement (if it exists).
  393. CFGBlock::const_reverse_iterator ri = B.rbegin(), re = B.rend();
  394. for ( ; ri != re ; ++ri)
  395. if (ri->getAs<CFGStmt>())
  396. break;
  397. // No more CFGElements in the block?
  398. if (ri == re) {
  399. const Stmt *Term = B.getTerminatorStmt();
  400. if (Term && isa<CXXTryStmt>(Term)) {
  401. HasAbnormalEdge = true;
  402. continue;
  403. }
  404. // A labeled empty statement, or the entry block...
  405. HasPlainEdge = true;
  406. continue;
  407. }
  408. CFGStmt CS = ri->castAs<CFGStmt>();
  409. const Stmt *S = CS.getStmt();
  410. if (isa<ReturnStmt>(S) || isa<CoreturnStmt>(S)) {
  411. HasLiveReturn = true;
  412. continue;
  413. }
  414. if (isa<ObjCAtThrowStmt>(S)) {
  415. HasFakeEdge = true;
  416. continue;
  417. }
  418. if (isa<CXXThrowExpr>(S)) {
  419. HasFakeEdge = true;
  420. continue;
  421. }
  422. if (isa<MSAsmStmt>(S)) {
  423. // TODO: Verify this is correct.
  424. HasFakeEdge = true;
  425. HasLiveReturn = true;
  426. continue;
  427. }
  428. if (isa<CXXTryStmt>(S)) {
  429. HasAbnormalEdge = true;
  430. continue;
  431. }
  432. if (std::find(B.succ_begin(), B.succ_end(), &cfg->getExit())
  433. == B.succ_end()) {
  434. HasAbnormalEdge = true;
  435. continue;
  436. }
  437. HasPlainEdge = true;
  438. }
  439. if (!HasPlainEdge) {
  440. if (HasLiveReturn)
  441. return NeverFallThrough;
  442. return NeverFallThroughOrReturn;
  443. }
  444. if (HasAbnormalEdge || HasFakeEdge || HasLiveReturn)
  445. return MaybeFallThrough;
  446. // This says AlwaysFallThrough for calls to functions that are not marked
  447. // noreturn, that don't return. If people would like this warning to be more
  448. // accurate, such functions should be marked as noreturn.
  449. return AlwaysFallThrough;
  450. }
  451. namespace {
  452. struct CheckFallThroughDiagnostics {
  453. unsigned diag_MaybeFallThrough_HasNoReturn;
  454. unsigned diag_MaybeFallThrough_ReturnsNonVoid;
  455. unsigned diag_AlwaysFallThrough_HasNoReturn;
  456. unsigned diag_AlwaysFallThrough_ReturnsNonVoid;
  457. unsigned diag_NeverFallThroughOrReturn;
  458. enum { Function, Block, Lambda, Coroutine } funMode;
  459. SourceLocation FuncLoc;
  460. static CheckFallThroughDiagnostics MakeForFunction(const Decl *Func) {
  461. CheckFallThroughDiagnostics D;
  462. D.FuncLoc = Func->getLocation();
  463. D.diag_MaybeFallThrough_HasNoReturn =
  464. diag::warn_falloff_noreturn_function;
  465. D.diag_MaybeFallThrough_ReturnsNonVoid =
  466. diag::warn_maybe_falloff_nonvoid_function;
  467. D.diag_AlwaysFallThrough_HasNoReturn =
  468. diag::warn_falloff_noreturn_function;
  469. D.diag_AlwaysFallThrough_ReturnsNonVoid =
  470. diag::warn_falloff_nonvoid_function;
  471. // Don't suggest that virtual functions be marked "noreturn", since they
  472. // might be overridden by non-noreturn functions.
  473. bool isVirtualMethod = false;
  474. if (const CXXMethodDecl *Method = dyn_cast<CXXMethodDecl>(Func))
  475. isVirtualMethod = Method->isVirtual();
  476. // Don't suggest that template instantiations be marked "noreturn"
  477. bool isTemplateInstantiation = false;
  478. if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Func))
  479. isTemplateInstantiation = Function->isTemplateInstantiation();
  480. if (!isVirtualMethod && !isTemplateInstantiation)
  481. D.diag_NeverFallThroughOrReturn =
  482. diag::warn_suggest_noreturn_function;
  483. else
  484. D.diag_NeverFallThroughOrReturn = 0;
  485. D.funMode = Function;
  486. return D;
  487. }
  488. static CheckFallThroughDiagnostics MakeForCoroutine(const Decl *Func) {
  489. CheckFallThroughDiagnostics D;
  490. D.FuncLoc = Func->getLocation();
  491. D.diag_MaybeFallThrough_HasNoReturn = 0;
  492. D.diag_MaybeFallThrough_ReturnsNonVoid =
  493. diag::warn_maybe_falloff_nonvoid_coroutine;
  494. D.diag_AlwaysFallThrough_HasNoReturn = 0;
  495. D.diag_AlwaysFallThrough_ReturnsNonVoid =
  496. diag::warn_falloff_nonvoid_coroutine;
  497. D.funMode = Coroutine;
  498. return D;
  499. }
  500. static CheckFallThroughDiagnostics MakeForBlock() {
  501. CheckFallThroughDiagnostics D;
  502. D.diag_MaybeFallThrough_HasNoReturn =
  503. diag::err_noreturn_block_has_return_expr;
  504. D.diag_MaybeFallThrough_ReturnsNonVoid =
  505. diag::err_maybe_falloff_nonvoid_block;
  506. D.diag_AlwaysFallThrough_HasNoReturn =
  507. diag::err_noreturn_block_has_return_expr;
  508. D.diag_AlwaysFallThrough_ReturnsNonVoid =
  509. diag::err_falloff_nonvoid_block;
  510. D.diag_NeverFallThroughOrReturn = 0;
  511. D.funMode = Block;
  512. return D;
  513. }
  514. static CheckFallThroughDiagnostics MakeForLambda() {
  515. CheckFallThroughDiagnostics D;
  516. D.diag_MaybeFallThrough_HasNoReturn =
  517. diag::err_noreturn_lambda_has_return_expr;
  518. D.diag_MaybeFallThrough_ReturnsNonVoid =
  519. diag::warn_maybe_falloff_nonvoid_lambda;
  520. D.diag_AlwaysFallThrough_HasNoReturn =
  521. diag::err_noreturn_lambda_has_return_expr;
  522. D.diag_AlwaysFallThrough_ReturnsNonVoid =
  523. diag::warn_falloff_nonvoid_lambda;
  524. D.diag_NeverFallThroughOrReturn = 0;
  525. D.funMode = Lambda;
  526. return D;
  527. }
  528. bool checkDiagnostics(DiagnosticsEngine &D, bool ReturnsVoid,
  529. bool HasNoReturn) const {
  530. if (funMode == Function) {
  531. return (ReturnsVoid ||
  532. D.isIgnored(diag::warn_maybe_falloff_nonvoid_function,
  533. FuncLoc)) &&
  534. (!HasNoReturn ||
  535. D.isIgnored(diag::warn_noreturn_function_has_return_expr,
  536. FuncLoc)) &&
  537. (!ReturnsVoid ||
  538. D.isIgnored(diag::warn_suggest_noreturn_block, FuncLoc));
  539. }
  540. if (funMode == Coroutine) {
  541. return (ReturnsVoid ||
  542. D.isIgnored(diag::warn_maybe_falloff_nonvoid_function, FuncLoc) ||
  543. D.isIgnored(diag::warn_maybe_falloff_nonvoid_coroutine,
  544. FuncLoc)) &&
  545. (!HasNoReturn);
  546. }
  547. // For blocks / lambdas.
  548. return ReturnsVoid && !HasNoReturn;
  549. }
  550. };
  551. } // anonymous namespace
  552. /// CheckFallThroughForBody - Check that we don't fall off the end of a
  553. /// function that should return a value. Check that we don't fall off the end
  554. /// of a noreturn function. We assume that functions and blocks not marked
  555. /// noreturn will return.
  556. static void CheckFallThroughForBody(Sema &S, const Decl *D, const Stmt *Body,
  557. QualType BlockType,
  558. const CheckFallThroughDiagnostics &CD,
  559. AnalysisDeclContext &AC,
  560. sema::FunctionScopeInfo *FSI) {
  561. bool ReturnsVoid = false;
  562. bool HasNoReturn = false;
  563. bool IsCoroutine = FSI->isCoroutine();
  564. if (const auto *FD = dyn_cast<FunctionDecl>(D)) {
  565. if (const auto *CBody = dyn_cast<CoroutineBodyStmt>(Body))
  566. ReturnsVoid = CBody->getFallthroughHandler() != nullptr;
  567. else
  568. ReturnsVoid = FD->getReturnType()->isVoidType();
  569. HasNoReturn = FD->isNoReturn();
  570. }
  571. else if (const auto *MD = dyn_cast<ObjCMethodDecl>(D)) {
  572. ReturnsVoid = MD->getReturnType()->isVoidType();
  573. HasNoReturn = MD->hasAttr<NoReturnAttr>();
  574. }
  575. else if (isa<BlockDecl>(D)) {
  576. if (const FunctionType *FT =
  577. BlockType->getPointeeType()->getAs<FunctionType>()) {
  578. if (FT->getReturnType()->isVoidType())
  579. ReturnsVoid = true;
  580. if (FT->getNoReturnAttr())
  581. HasNoReturn = true;
  582. }
  583. }
  584. DiagnosticsEngine &Diags = S.getDiagnostics();
  585. // Short circuit for compilation speed.
  586. if (CD.checkDiagnostics(Diags, ReturnsVoid, HasNoReturn))
  587. return;
  588. SourceLocation LBrace = Body->getBeginLoc(), RBrace = Body->getEndLoc();
  589. auto EmitDiag = [&](SourceLocation Loc, unsigned DiagID) {
  590. if (IsCoroutine)
  591. S.Diag(Loc, DiagID) << FSI->CoroutinePromise->getType();
  592. else
  593. S.Diag(Loc, DiagID);
  594. };
  595. // cpu_dispatch functions permit empty function bodies for ICC compatibility.
  596. if (D->getAsFunction() && D->getAsFunction()->isCPUDispatchMultiVersion())
  597. return;
  598. // Either in a function body compound statement, or a function-try-block.
  599. switch (CheckFallThrough(AC)) {
  600. case UnknownFallThrough:
  601. break;
  602. case MaybeFallThrough:
  603. if (HasNoReturn)
  604. EmitDiag(RBrace, CD.diag_MaybeFallThrough_HasNoReturn);
  605. else if (!ReturnsVoid)
  606. EmitDiag(RBrace, CD.diag_MaybeFallThrough_ReturnsNonVoid);
  607. break;
  608. case AlwaysFallThrough:
  609. if (HasNoReturn)
  610. EmitDiag(RBrace, CD.diag_AlwaysFallThrough_HasNoReturn);
  611. else if (!ReturnsVoid)
  612. EmitDiag(RBrace, CD.diag_AlwaysFallThrough_ReturnsNonVoid);
  613. break;
  614. case NeverFallThroughOrReturn:
  615. if (ReturnsVoid && !HasNoReturn && CD.diag_NeverFallThroughOrReturn) {
  616. if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
  617. S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn) << 0 << FD;
  618. } else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
  619. S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn) << 1 << MD;
  620. } else {
  621. S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn);
  622. }
  623. }
  624. break;
  625. case NeverFallThrough:
  626. break;
  627. }
  628. }
  629. //===----------------------------------------------------------------------===//
  630. // -Wuninitialized
  631. //===----------------------------------------------------------------------===//
  632. namespace {
  633. /// ContainsReference - A visitor class to search for references to
  634. /// a particular declaration (the needle) within any evaluated component of an
  635. /// expression (recursively).
  636. class ContainsReference : public ConstEvaluatedExprVisitor<ContainsReference> {
  637. bool FoundReference;
  638. const DeclRefExpr *Needle;
  639. public:
  640. typedef ConstEvaluatedExprVisitor<ContainsReference> Inherited;
  641. ContainsReference(ASTContext &Context, const DeclRefExpr *Needle)
  642. : Inherited(Context), FoundReference(false), Needle(Needle) {}
  643. void VisitExpr(const Expr *E) {
  644. // Stop evaluating if we already have a reference.
  645. if (FoundReference)
  646. return;
  647. Inherited::VisitExpr(E);
  648. }
  649. void VisitDeclRefExpr(const DeclRefExpr *E) {
  650. if (E == Needle)
  651. FoundReference = true;
  652. else
  653. Inherited::VisitDeclRefExpr(E);
  654. }
  655. bool doesContainReference() const { return FoundReference; }
  656. };
  657. } // anonymous namespace
  658. static bool SuggestInitializationFixit(Sema &S, const VarDecl *VD) {
  659. QualType VariableTy = VD->getType().getCanonicalType();
  660. if (VariableTy->isBlockPointerType() &&
  661. !VD->hasAttr<BlocksAttr>()) {
  662. S.Diag(VD->getLocation(), diag::note_block_var_fixit_add_initialization)
  663. << VD->getDeclName()
  664. << FixItHint::CreateInsertion(VD->getLocation(), "__block ");
  665. return true;
  666. }
  667. // Don't issue a fixit if there is already an initializer.
  668. if (VD->getInit())
  669. return false;
  670. // Don't suggest a fixit inside macros.
  671. if (VD->getEndLoc().isMacroID())
  672. return false;
  673. SourceLocation Loc = S.getLocForEndOfToken(VD->getEndLoc());
  674. // Suggest possible initialization (if any).
  675. std::string Init = S.getFixItZeroInitializerForType(VariableTy, Loc);
  676. if (Init.empty())
  677. return false;
  678. S.Diag(Loc, diag::note_var_fixit_add_initialization) << VD->getDeclName()
  679. << FixItHint::CreateInsertion(Loc, Init);
  680. return true;
  681. }
  682. /// Create a fixit to remove an if-like statement, on the assumption that its
  683. /// condition is CondVal.
  684. static void CreateIfFixit(Sema &S, const Stmt *If, const Stmt *Then,
  685. const Stmt *Else, bool CondVal,
  686. FixItHint &Fixit1, FixItHint &Fixit2) {
  687. if (CondVal) {
  688. // If condition is always true, remove all but the 'then'.
  689. Fixit1 = FixItHint::CreateRemoval(
  690. CharSourceRange::getCharRange(If->getBeginLoc(), Then->getBeginLoc()));
  691. if (Else) {
  692. SourceLocation ElseKwLoc = S.getLocForEndOfToken(Then->getEndLoc());
  693. Fixit2 =
  694. FixItHint::CreateRemoval(SourceRange(ElseKwLoc, Else->getEndLoc()));
  695. }
  696. } else {
  697. // If condition is always false, remove all but the 'else'.
  698. if (Else)
  699. Fixit1 = FixItHint::CreateRemoval(CharSourceRange::getCharRange(
  700. If->getBeginLoc(), Else->getBeginLoc()));
  701. else
  702. Fixit1 = FixItHint::CreateRemoval(If->getSourceRange());
  703. }
  704. }
  705. /// DiagUninitUse -- Helper function to produce a diagnostic for an
  706. /// uninitialized use of a variable.
  707. static void DiagUninitUse(Sema &S, const VarDecl *VD, const UninitUse &Use,
  708. bool IsCapturedByBlock) {
  709. bool Diagnosed = false;
  710. switch (Use.getKind()) {
  711. case UninitUse::Always:
  712. S.Diag(Use.getUser()->getBeginLoc(), diag::warn_uninit_var)
  713. << VD->getDeclName() << IsCapturedByBlock
  714. << Use.getUser()->getSourceRange();
  715. return;
  716. case UninitUse::AfterDecl:
  717. case UninitUse::AfterCall:
  718. S.Diag(VD->getLocation(), diag::warn_sometimes_uninit_var)
  719. << VD->getDeclName() << IsCapturedByBlock
  720. << (Use.getKind() == UninitUse::AfterDecl ? 4 : 5)
  721. << const_cast<DeclContext*>(VD->getLexicalDeclContext())
  722. << VD->getSourceRange();
  723. S.Diag(Use.getUser()->getBeginLoc(), diag::note_uninit_var_use)
  724. << IsCapturedByBlock << Use.getUser()->getSourceRange();
  725. return;
  726. case UninitUse::Maybe:
  727. case UninitUse::Sometimes:
  728. // Carry on to report sometimes-uninitialized branches, if possible,
  729. // or a 'may be used uninitialized' diagnostic otherwise.
  730. break;
  731. }
  732. // Diagnose each branch which leads to a sometimes-uninitialized use.
  733. for (UninitUse::branch_iterator I = Use.branch_begin(), E = Use.branch_end();
  734. I != E; ++I) {
  735. assert(Use.getKind() == UninitUse::Sometimes);
  736. const Expr *User = Use.getUser();
  737. const Stmt *Term = I->Terminator;
  738. // Information used when building the diagnostic.
  739. unsigned DiagKind;
  740. StringRef Str;
  741. SourceRange Range;
  742. // FixIts to suppress the diagnostic by removing the dead condition.
  743. // For all binary terminators, branch 0 is taken if the condition is true,
  744. // and branch 1 is taken if the condition is false.
  745. int RemoveDiagKind = -1;
  746. const char *FixitStr =
  747. S.getLangOpts().CPlusPlus ? (I->Output ? "true" : "false")
  748. : (I->Output ? "1" : "0");
  749. FixItHint Fixit1, Fixit2;
  750. switch (Term ? Term->getStmtClass() : Stmt::DeclStmtClass) {
  751. default:
  752. // Don't know how to report this. Just fall back to 'may be used
  753. // uninitialized'. FIXME: Can this happen?
  754. continue;
  755. // "condition is true / condition is false".
  756. case Stmt::IfStmtClass: {
  757. const IfStmt *IS = cast<IfStmt>(Term);
  758. DiagKind = 0;
  759. Str = "if";
  760. Range = IS->getCond()->getSourceRange();
  761. RemoveDiagKind = 0;
  762. CreateIfFixit(S, IS, IS->getThen(), IS->getElse(),
  763. I->Output, Fixit1, Fixit2);
  764. break;
  765. }
  766. case Stmt::ConditionalOperatorClass: {
  767. const ConditionalOperator *CO = cast<ConditionalOperator>(Term);
  768. DiagKind = 0;
  769. Str = "?:";
  770. Range = CO->getCond()->getSourceRange();
  771. RemoveDiagKind = 0;
  772. CreateIfFixit(S, CO, CO->getTrueExpr(), CO->getFalseExpr(),
  773. I->Output, Fixit1, Fixit2);
  774. break;
  775. }
  776. case Stmt::BinaryOperatorClass: {
  777. const BinaryOperator *BO = cast<BinaryOperator>(Term);
  778. if (!BO->isLogicalOp())
  779. continue;
  780. DiagKind = 0;
  781. Str = BO->getOpcodeStr();
  782. Range = BO->getLHS()->getSourceRange();
  783. RemoveDiagKind = 0;
  784. if ((BO->getOpcode() == BO_LAnd && I->Output) ||
  785. (BO->getOpcode() == BO_LOr && !I->Output))
  786. // true && y -> y, false || y -> y.
  787. Fixit1 = FixItHint::CreateRemoval(
  788. SourceRange(BO->getBeginLoc(), BO->getOperatorLoc()));
  789. else
  790. // false && y -> false, true || y -> true.
  791. Fixit1 = FixItHint::CreateReplacement(BO->getSourceRange(), FixitStr);
  792. break;
  793. }
  794. // "loop is entered / loop is exited".
  795. case Stmt::WhileStmtClass:
  796. DiagKind = 1;
  797. Str = "while";
  798. Range = cast<WhileStmt>(Term)->getCond()->getSourceRange();
  799. RemoveDiagKind = 1;
  800. Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
  801. break;
  802. case Stmt::ForStmtClass:
  803. DiagKind = 1;
  804. Str = "for";
  805. Range = cast<ForStmt>(Term)->getCond()->getSourceRange();
  806. RemoveDiagKind = 1;
  807. if (I->Output)
  808. Fixit1 = FixItHint::CreateRemoval(Range);
  809. else
  810. Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
  811. break;
  812. case Stmt::CXXForRangeStmtClass:
  813. if (I->Output == 1) {
  814. // The use occurs if a range-based for loop's body never executes.
  815. // That may be impossible, and there's no syntactic fix for this,
  816. // so treat it as a 'may be uninitialized' case.
  817. continue;
  818. }
  819. DiagKind = 1;
  820. Str = "for";
  821. Range = cast<CXXForRangeStmt>(Term)->getRangeInit()->getSourceRange();
  822. break;
  823. // "condition is true / loop is exited".
  824. case Stmt::DoStmtClass:
  825. DiagKind = 2;
  826. Str = "do";
  827. Range = cast<DoStmt>(Term)->getCond()->getSourceRange();
  828. RemoveDiagKind = 1;
  829. Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
  830. break;
  831. // "switch case is taken".
  832. case Stmt::CaseStmtClass:
  833. DiagKind = 3;
  834. Str = "case";
  835. Range = cast<CaseStmt>(Term)->getLHS()->getSourceRange();
  836. break;
  837. case Stmt::DefaultStmtClass:
  838. DiagKind = 3;
  839. Str = "default";
  840. Range = cast<DefaultStmt>(Term)->getDefaultLoc();
  841. break;
  842. }
  843. S.Diag(Range.getBegin(), diag::warn_sometimes_uninit_var)
  844. << VD->getDeclName() << IsCapturedByBlock << DiagKind
  845. << Str << I->Output << Range;
  846. S.Diag(User->getBeginLoc(), diag::note_uninit_var_use)
  847. << IsCapturedByBlock << User->getSourceRange();
  848. if (RemoveDiagKind != -1)
  849. S.Diag(Fixit1.RemoveRange.getBegin(), diag::note_uninit_fixit_remove_cond)
  850. << RemoveDiagKind << Str << I->Output << Fixit1 << Fixit2;
  851. Diagnosed = true;
  852. }
  853. if (!Diagnosed)
  854. S.Diag(Use.getUser()->getBeginLoc(), diag::warn_maybe_uninit_var)
  855. << VD->getDeclName() << IsCapturedByBlock
  856. << Use.getUser()->getSourceRange();
  857. }
  858. /// DiagnoseUninitializedUse -- Helper function for diagnosing uses of an
  859. /// uninitialized variable. This manages the different forms of diagnostic
  860. /// emitted for particular types of uses. Returns true if the use was diagnosed
  861. /// as a warning. If a particular use is one we omit warnings for, returns
  862. /// false.
  863. static bool DiagnoseUninitializedUse(Sema &S, const VarDecl *VD,
  864. const UninitUse &Use,
  865. bool alwaysReportSelfInit = false) {
  866. if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Use.getUser())) {
  867. // Inspect the initializer of the variable declaration which is
  868. // being referenced prior to its initialization. We emit
  869. // specialized diagnostics for self-initialization, and we
  870. // specifically avoid warning about self references which take the
  871. // form of:
  872. //
  873. // int x = x;
  874. //
  875. // This is used to indicate to GCC that 'x' is intentionally left
  876. // uninitialized. Proven code paths which access 'x' in
  877. // an uninitialized state after this will still warn.
  878. if (const Expr *Initializer = VD->getInit()) {
  879. if (!alwaysReportSelfInit && DRE == Initializer->IgnoreParenImpCasts())
  880. return false;
  881. ContainsReference CR(S.Context, DRE);
  882. CR.Visit(Initializer);
  883. if (CR.doesContainReference()) {
  884. S.Diag(DRE->getBeginLoc(), diag::warn_uninit_self_reference_in_init)
  885. << VD->getDeclName() << VD->getLocation() << DRE->getSourceRange();
  886. return true;
  887. }
  888. }
  889. DiagUninitUse(S, VD, Use, false);
  890. } else {
  891. const BlockExpr *BE = cast<BlockExpr>(Use.getUser());
  892. if (VD->getType()->isBlockPointerType() && !VD->hasAttr<BlocksAttr>())
  893. S.Diag(BE->getBeginLoc(),
  894. diag::warn_uninit_byref_blockvar_captured_by_block)
  895. << VD->getDeclName()
  896. << VD->getType().getQualifiers().hasObjCLifetime();
  897. else
  898. DiagUninitUse(S, VD, Use, true);
  899. }
  900. // Report where the variable was declared when the use wasn't within
  901. // the initializer of that declaration & we didn't already suggest
  902. // an initialization fixit.
  903. if (!SuggestInitializationFixit(S, VD))
  904. S.Diag(VD->getBeginLoc(), diag::note_var_declared_here)
  905. << VD->getDeclName();
  906. return true;
  907. }
  908. namespace {
  909. class FallthroughMapper : public RecursiveASTVisitor<FallthroughMapper> {
  910. public:
  911. FallthroughMapper(Sema &S)
  912. : FoundSwitchStatements(false),
  913. S(S) {
  914. }
  915. bool foundSwitchStatements() const { return FoundSwitchStatements; }
  916. void markFallthroughVisited(const AttributedStmt *Stmt) {
  917. bool Found = FallthroughStmts.erase(Stmt);
  918. assert(Found);
  919. (void)Found;
  920. }
  921. typedef llvm::SmallPtrSet<const AttributedStmt*, 8> AttrStmts;
  922. const AttrStmts &getFallthroughStmts() const {
  923. return FallthroughStmts;
  924. }
  925. void fillReachableBlocks(CFG *Cfg) {
  926. assert(ReachableBlocks.empty() && "ReachableBlocks already filled");
  927. std::deque<const CFGBlock *> BlockQueue;
  928. ReachableBlocks.insert(&Cfg->getEntry());
  929. BlockQueue.push_back(&Cfg->getEntry());
  930. // Mark all case blocks reachable to avoid problems with switching on
  931. // constants, covered enums, etc.
  932. // These blocks can contain fall-through annotations, and we don't want to
  933. // issue a warn_fallthrough_attr_unreachable for them.
  934. for (const auto *B : *Cfg) {
  935. const Stmt *L = B->getLabel();
  936. if (L && isa<SwitchCase>(L) && ReachableBlocks.insert(B).second)
  937. BlockQueue.push_back(B);
  938. }
  939. while (!BlockQueue.empty()) {
  940. const CFGBlock *P = BlockQueue.front();
  941. BlockQueue.pop_front();
  942. for (CFGBlock::const_succ_iterator I = P->succ_begin(),
  943. E = P->succ_end();
  944. I != E; ++I) {
  945. if (*I && ReachableBlocks.insert(*I).second)
  946. BlockQueue.push_back(*I);
  947. }
  948. }
  949. }
  950. bool checkFallThroughIntoBlock(const CFGBlock &B, int &AnnotatedCnt,
  951. bool IsTemplateInstantiation) {
  952. assert(!ReachableBlocks.empty() && "ReachableBlocks empty");
  953. int UnannotatedCnt = 0;
  954. AnnotatedCnt = 0;
  955. std::deque<const CFGBlock*> BlockQueue(B.pred_begin(), B.pred_end());
  956. while (!BlockQueue.empty()) {
  957. const CFGBlock *P = BlockQueue.front();
  958. BlockQueue.pop_front();
  959. if (!P) continue;
  960. const Stmt *Term = P->getTerminatorStmt();
  961. if (Term && isa<SwitchStmt>(Term))
  962. continue; // Switch statement, good.
  963. const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(P->getLabel());
  964. if (SW && SW->getSubStmt() == B.getLabel() && P->begin() == P->end())
  965. continue; // Previous case label has no statements, good.
  966. const LabelStmt *L = dyn_cast_or_null<LabelStmt>(P->getLabel());
  967. if (L && L->getSubStmt() == B.getLabel() && P->begin() == P->end())
  968. continue; // Case label is preceded with a normal label, good.
  969. if (!ReachableBlocks.count(P)) {
  970. for (CFGBlock::const_reverse_iterator ElemIt = P->rbegin(),
  971. ElemEnd = P->rend();
  972. ElemIt != ElemEnd; ++ElemIt) {
  973. if (Optional<CFGStmt> CS = ElemIt->getAs<CFGStmt>()) {
  974. if (const AttributedStmt *AS = asFallThroughAttr(CS->getStmt())) {
  975. // Don't issue a warning for an unreachable fallthrough
  976. // attribute in template instantiations as it may not be
  977. // unreachable in all instantiations of the template.
  978. if (!IsTemplateInstantiation)
  979. S.Diag(AS->getBeginLoc(),
  980. diag::warn_fallthrough_attr_unreachable);
  981. markFallthroughVisited(AS);
  982. ++AnnotatedCnt;
  983. break;
  984. }
  985. // Don't care about other unreachable statements.
  986. }
  987. }
  988. // If there are no unreachable statements, this may be a special
  989. // case in CFG:
  990. // case X: {
  991. // A a; // A has a destructor.
  992. // break;
  993. // }
  994. // // <<<< This place is represented by a 'hanging' CFG block.
  995. // case Y:
  996. continue;
  997. }
  998. const Stmt *LastStmt = getLastStmt(*P);
  999. if (const AttributedStmt *AS = asFallThroughAttr(LastStmt)) {
  1000. markFallthroughVisited(AS);
  1001. ++AnnotatedCnt;
  1002. continue; // Fallthrough annotation, good.
  1003. }
  1004. if (!LastStmt) { // This block contains no executable statements.
  1005. // Traverse its predecessors.
  1006. std::copy(P->pred_begin(), P->pred_end(),
  1007. std::back_inserter(BlockQueue));
  1008. continue;
  1009. }
  1010. ++UnannotatedCnt;
  1011. }
  1012. return !!UnannotatedCnt;
  1013. }
  1014. // RecursiveASTVisitor setup.
  1015. bool shouldWalkTypesOfTypeLocs() const { return false; }
  1016. bool VisitAttributedStmt(AttributedStmt *S) {
  1017. if (asFallThroughAttr(S))
  1018. FallthroughStmts.insert(S);
  1019. return true;
  1020. }
  1021. bool VisitSwitchStmt(SwitchStmt *S) {
  1022. FoundSwitchStatements = true;
  1023. return true;
  1024. }
  1025. // We don't want to traverse local type declarations. We analyze their
  1026. // methods separately.
  1027. bool TraverseDecl(Decl *D) { return true; }
  1028. // We analyze lambda bodies separately. Skip them here.
  1029. bool TraverseLambdaExpr(LambdaExpr *LE) {
  1030. // Traverse the captures, but not the body.
  1031. for (const auto &C : zip(LE->captures(), LE->capture_inits()))
  1032. TraverseLambdaCapture(LE, &std::get<0>(C), std::get<1>(C));
  1033. return true;
  1034. }
  1035. private:
  1036. static const AttributedStmt *asFallThroughAttr(const Stmt *S) {
  1037. if (const AttributedStmt *AS = dyn_cast_or_null<AttributedStmt>(S)) {
  1038. if (hasSpecificAttr<FallThroughAttr>(AS->getAttrs()))
  1039. return AS;
  1040. }
  1041. return nullptr;
  1042. }
  1043. static const Stmt *getLastStmt(const CFGBlock &B) {
  1044. if (const Stmt *Term = B.getTerminatorStmt())
  1045. return Term;
  1046. for (CFGBlock::const_reverse_iterator ElemIt = B.rbegin(),
  1047. ElemEnd = B.rend();
  1048. ElemIt != ElemEnd; ++ElemIt) {
  1049. if (Optional<CFGStmt> CS = ElemIt->getAs<CFGStmt>())
  1050. return CS->getStmt();
  1051. }
  1052. // Workaround to detect a statement thrown out by CFGBuilder:
  1053. // case X: {} case Y:
  1054. // case X: ; case Y:
  1055. if (const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(B.getLabel()))
  1056. if (!isa<SwitchCase>(SW->getSubStmt()))
  1057. return SW->getSubStmt();
  1058. return nullptr;
  1059. }
  1060. bool FoundSwitchStatements;
  1061. AttrStmts FallthroughStmts;
  1062. Sema &S;
  1063. llvm::SmallPtrSet<const CFGBlock *, 16> ReachableBlocks;
  1064. };
  1065. } // anonymous namespace
  1066. static StringRef getFallthroughAttrSpelling(Preprocessor &PP,
  1067. SourceLocation Loc) {
  1068. TokenValue FallthroughTokens[] = {
  1069. tok::l_square, tok::l_square,
  1070. PP.getIdentifierInfo("fallthrough"),
  1071. tok::r_square, tok::r_square
  1072. };
  1073. TokenValue ClangFallthroughTokens[] = {
  1074. tok::l_square, tok::l_square, PP.getIdentifierInfo("clang"),
  1075. tok::coloncolon, PP.getIdentifierInfo("fallthrough"),
  1076. tok::r_square, tok::r_square
  1077. };
  1078. bool PreferClangAttr = !PP.getLangOpts().CPlusPlus17 && !PP.getLangOpts().C2x;
  1079. StringRef MacroName;
  1080. if (PreferClangAttr)
  1081. MacroName = PP.getLastMacroWithSpelling(Loc, ClangFallthroughTokens);
  1082. if (MacroName.empty())
  1083. MacroName = PP.getLastMacroWithSpelling(Loc, FallthroughTokens);
  1084. if (MacroName.empty() && !PreferClangAttr)
  1085. MacroName = PP.getLastMacroWithSpelling(Loc, ClangFallthroughTokens);
  1086. if (MacroName.empty()) {
  1087. if (!PreferClangAttr)
  1088. MacroName = "[[fallthrough]]";
  1089. else if (PP.getLangOpts().CPlusPlus)
  1090. MacroName = "[[clang::fallthrough]]";
  1091. else
  1092. MacroName = "__attribute__((fallthrough))";
  1093. }
  1094. return MacroName;
  1095. }
  1096. static void DiagnoseSwitchLabelsFallthrough(Sema &S, AnalysisDeclContext &AC,
  1097. bool PerFunction) {
  1098. FallthroughMapper FM(S);
  1099. FM.TraverseStmt(AC.getBody());
  1100. if (!FM.foundSwitchStatements())
  1101. return;
  1102. if (PerFunction && FM.getFallthroughStmts().empty())
  1103. return;
  1104. CFG *Cfg = AC.getCFG();
  1105. if (!Cfg)
  1106. return;
  1107. FM.fillReachableBlocks(Cfg);
  1108. for (const CFGBlock *B : llvm::reverse(*Cfg)) {
  1109. const Stmt *Label = B->getLabel();
  1110. if (!Label || !isa<SwitchCase>(Label))
  1111. continue;
  1112. int AnnotatedCnt;
  1113. bool IsTemplateInstantiation = false;
  1114. if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(AC.getDecl()))
  1115. IsTemplateInstantiation = Function->isTemplateInstantiation();
  1116. if (!FM.checkFallThroughIntoBlock(*B, AnnotatedCnt,
  1117. IsTemplateInstantiation))
  1118. continue;
  1119. S.Diag(Label->getBeginLoc(),
  1120. PerFunction ? diag::warn_unannotated_fallthrough_per_function
  1121. : diag::warn_unannotated_fallthrough);
  1122. if (!AnnotatedCnt) {
  1123. SourceLocation L = Label->getBeginLoc();
  1124. if (L.isMacroID())
  1125. continue;
  1126. const Stmt *Term = B->getTerminatorStmt();
  1127. // Skip empty cases.
  1128. while (B->empty() && !Term && B->succ_size() == 1) {
  1129. B = *B->succ_begin();
  1130. Term = B->getTerminatorStmt();
  1131. }
  1132. if (!(B->empty() && Term && isa<BreakStmt>(Term))) {
  1133. Preprocessor &PP = S.getPreprocessor();
  1134. StringRef AnnotationSpelling = getFallthroughAttrSpelling(PP, L);
  1135. SmallString<64> TextToInsert(AnnotationSpelling);
  1136. TextToInsert += "; ";
  1137. S.Diag(L, diag::note_insert_fallthrough_fixit)
  1138. << AnnotationSpelling
  1139. << FixItHint::CreateInsertion(L, TextToInsert);
  1140. }
  1141. S.Diag(L, diag::note_insert_break_fixit)
  1142. << FixItHint::CreateInsertion(L, "break; ");
  1143. }
  1144. }
  1145. for (const auto *F : FM.getFallthroughStmts())
  1146. S.Diag(F->getBeginLoc(), diag::err_fallthrough_attr_invalid_placement);
  1147. }
  1148. static bool isInLoop(const ASTContext &Ctx, const ParentMap &PM,
  1149. const Stmt *S) {
  1150. assert(S);
  1151. do {
  1152. switch (S->getStmtClass()) {
  1153. case Stmt::ForStmtClass:
  1154. case Stmt::WhileStmtClass:
  1155. case Stmt::CXXForRangeStmtClass:
  1156. case Stmt::ObjCForCollectionStmtClass:
  1157. return true;
  1158. case Stmt::DoStmtClass: {
  1159. Expr::EvalResult Result;
  1160. if (!cast<DoStmt>(S)->getCond()->EvaluateAsInt(Result, Ctx))
  1161. return true;
  1162. return Result.Val.getInt().getBoolValue();
  1163. }
  1164. default:
  1165. break;
  1166. }
  1167. } while ((S = PM.getParent(S)));
  1168. return false;
  1169. }
  1170. static void diagnoseRepeatedUseOfWeak(Sema &S,
  1171. const sema::FunctionScopeInfo *CurFn,
  1172. const Decl *D,
  1173. const ParentMap &PM) {
  1174. typedef sema::FunctionScopeInfo::WeakObjectProfileTy WeakObjectProfileTy;
  1175. typedef sema::FunctionScopeInfo::WeakObjectUseMap WeakObjectUseMap;
  1176. typedef sema::FunctionScopeInfo::WeakUseVector WeakUseVector;
  1177. typedef std::pair<const Stmt *, WeakObjectUseMap::const_iterator>
  1178. StmtUsesPair;
  1179. ASTContext &Ctx = S.getASTContext();
  1180. const WeakObjectUseMap &WeakMap = CurFn->getWeakObjectUses();
  1181. // Extract all weak objects that are referenced more than once.
  1182. SmallVector<StmtUsesPair, 8> UsesByStmt;
  1183. for (WeakObjectUseMap::const_iterator I = WeakMap.begin(), E = WeakMap.end();
  1184. I != E; ++I) {
  1185. const WeakUseVector &Uses = I->second;
  1186. // Find the first read of the weak object.
  1187. WeakUseVector::const_iterator UI = Uses.begin(), UE = Uses.end();
  1188. for ( ; UI != UE; ++UI) {
  1189. if (UI->isUnsafe())
  1190. break;
  1191. }
  1192. // If there were only writes to this object, don't warn.
  1193. if (UI == UE)
  1194. continue;
  1195. // If there was only one read, followed by any number of writes, and the
  1196. // read is not within a loop, don't warn. Additionally, don't warn in a
  1197. // loop if the base object is a local variable -- local variables are often
  1198. // changed in loops.
  1199. if (UI == Uses.begin()) {
  1200. WeakUseVector::const_iterator UI2 = UI;
  1201. for (++UI2; UI2 != UE; ++UI2)
  1202. if (UI2->isUnsafe())
  1203. break;
  1204. if (UI2 == UE) {
  1205. if (!isInLoop(Ctx, PM, UI->getUseExpr()))
  1206. continue;
  1207. const WeakObjectProfileTy &Profile = I->first;
  1208. if (!Profile.isExactProfile())
  1209. continue;
  1210. const NamedDecl *Base = Profile.getBase();
  1211. if (!Base)
  1212. Base = Profile.getProperty();
  1213. assert(Base && "A profile always has a base or property.");
  1214. if (const VarDecl *BaseVar = dyn_cast<VarDecl>(Base))
  1215. if (BaseVar->hasLocalStorage() && !isa<ParmVarDecl>(Base))
  1216. continue;
  1217. }
  1218. }
  1219. UsesByStmt.push_back(StmtUsesPair(UI->getUseExpr(), I));
  1220. }
  1221. if (UsesByStmt.empty())
  1222. return;
  1223. // Sort by first use so that we emit the warnings in a deterministic order.
  1224. SourceManager &SM = S.getSourceManager();
  1225. llvm::sort(UsesByStmt,
  1226. [&SM](const StmtUsesPair &LHS, const StmtUsesPair &RHS) {
  1227. return SM.isBeforeInTranslationUnit(LHS.first->getBeginLoc(),
  1228. RHS.first->getBeginLoc());
  1229. });
  1230. // Classify the current code body for better warning text.
  1231. // This enum should stay in sync with the cases in
  1232. // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
  1233. // FIXME: Should we use a common classification enum and the same set of
  1234. // possibilities all throughout Sema?
  1235. enum {
  1236. Function,
  1237. Method,
  1238. Block,
  1239. Lambda
  1240. } FunctionKind;
  1241. if (isa<sema::BlockScopeInfo>(CurFn))
  1242. FunctionKind = Block;
  1243. else if (isa<sema::LambdaScopeInfo>(CurFn))
  1244. FunctionKind = Lambda;
  1245. else if (isa<ObjCMethodDecl>(D))
  1246. FunctionKind = Method;
  1247. else
  1248. FunctionKind = Function;
  1249. // Iterate through the sorted problems and emit warnings for each.
  1250. for (const auto &P : UsesByStmt) {
  1251. const Stmt *FirstRead = P.first;
  1252. const WeakObjectProfileTy &Key = P.second->first;
  1253. const WeakUseVector &Uses = P.second->second;
  1254. // For complicated expressions like 'a.b.c' and 'x.b.c', WeakObjectProfileTy
  1255. // may not contain enough information to determine that these are different
  1256. // properties. We can only be 100% sure of a repeated use in certain cases,
  1257. // and we adjust the diagnostic kind accordingly so that the less certain
  1258. // case can be turned off if it is too noisy.
  1259. unsigned DiagKind;
  1260. if (Key.isExactProfile())
  1261. DiagKind = diag::warn_arc_repeated_use_of_weak;
  1262. else
  1263. DiagKind = diag::warn_arc_possible_repeated_use_of_weak;
  1264. // Classify the weak object being accessed for better warning text.
  1265. // This enum should stay in sync with the cases in
  1266. // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
  1267. enum {
  1268. Variable,
  1269. Property,
  1270. ImplicitProperty,
  1271. Ivar
  1272. } ObjectKind;
  1273. const NamedDecl *KeyProp = Key.getProperty();
  1274. if (isa<VarDecl>(KeyProp))
  1275. ObjectKind = Variable;
  1276. else if (isa<ObjCPropertyDecl>(KeyProp))
  1277. ObjectKind = Property;
  1278. else if (isa<ObjCMethodDecl>(KeyProp))
  1279. ObjectKind = ImplicitProperty;
  1280. else if (isa<ObjCIvarDecl>(KeyProp))
  1281. ObjectKind = Ivar;
  1282. else
  1283. llvm_unreachable("Unexpected weak object kind!");
  1284. // Do not warn about IBOutlet weak property receivers being set to null
  1285. // since they are typically only used from the main thread.
  1286. if (const ObjCPropertyDecl *Prop = dyn_cast<ObjCPropertyDecl>(KeyProp))
  1287. if (Prop->hasAttr<IBOutletAttr>())
  1288. continue;
  1289. // Show the first time the object was read.
  1290. S.Diag(FirstRead->getBeginLoc(), DiagKind)
  1291. << int(ObjectKind) << KeyProp << int(FunctionKind)
  1292. << FirstRead->getSourceRange();
  1293. // Print all the other accesses as notes.
  1294. for (const auto &Use : Uses) {
  1295. if (Use.getUseExpr() == FirstRead)
  1296. continue;
  1297. S.Diag(Use.getUseExpr()->getBeginLoc(),
  1298. diag::note_arc_weak_also_accessed_here)
  1299. << Use.getUseExpr()->getSourceRange();
  1300. }
  1301. }
  1302. }
  1303. namespace {
  1304. class UninitValsDiagReporter : public UninitVariablesHandler {
  1305. Sema &S;
  1306. typedef SmallVector<UninitUse, 2> UsesVec;
  1307. typedef llvm::PointerIntPair<UsesVec *, 1, bool> MappedType;
  1308. // Prefer using MapVector to DenseMap, so that iteration order will be
  1309. // the same as insertion order. This is needed to obtain a deterministic
  1310. // order of diagnostics when calling flushDiagnostics().
  1311. typedef llvm::MapVector<const VarDecl *, MappedType> UsesMap;
  1312. UsesMap uses;
  1313. public:
  1314. UninitValsDiagReporter(Sema &S) : S(S) {}
  1315. ~UninitValsDiagReporter() override { flushDiagnostics(); }
  1316. MappedType &getUses(const VarDecl *vd) {
  1317. MappedType &V = uses[vd];
  1318. if (!V.getPointer())
  1319. V.setPointer(new UsesVec());
  1320. return V;
  1321. }
  1322. void handleUseOfUninitVariable(const VarDecl *vd,
  1323. const UninitUse &use) override {
  1324. getUses(vd).getPointer()->push_back(use);
  1325. }
  1326. void handleSelfInit(const VarDecl *vd) override {
  1327. getUses(vd).setInt(true);
  1328. }
  1329. void flushDiagnostics() {
  1330. for (const auto &P : uses) {
  1331. const VarDecl *vd = P.first;
  1332. const MappedType &V = P.second;
  1333. UsesVec *vec = V.getPointer();
  1334. bool hasSelfInit = V.getInt();
  1335. // Specially handle the case where we have uses of an uninitialized
  1336. // variable, but the root cause is an idiomatic self-init. We want
  1337. // to report the diagnostic at the self-init since that is the root cause.
  1338. if (!vec->empty() && hasSelfInit && hasAlwaysUninitializedUse(vec))
  1339. DiagnoseUninitializedUse(S, vd,
  1340. UninitUse(vd->getInit()->IgnoreParenCasts(),
  1341. /* isAlwaysUninit */ true),
  1342. /* alwaysReportSelfInit */ true);
  1343. else {
  1344. // Sort the uses by their SourceLocations. While not strictly
  1345. // guaranteed to produce them in line/column order, this will provide
  1346. // a stable ordering.
  1347. llvm::sort(vec->begin(), vec->end(),
  1348. [](const UninitUse &a, const UninitUse &b) {
  1349. // Prefer a more confident report over a less confident one.
  1350. if (a.getKind() != b.getKind())
  1351. return a.getKind() > b.getKind();
  1352. return a.getUser()->getBeginLoc() < b.getUser()->getBeginLoc();
  1353. });
  1354. for (const auto &U : *vec) {
  1355. // If we have self-init, downgrade all uses to 'may be uninitialized'.
  1356. UninitUse Use = hasSelfInit ? UninitUse(U.getUser(), false) : U;
  1357. if (DiagnoseUninitializedUse(S, vd, Use))
  1358. // Skip further diagnostics for this variable. We try to warn only
  1359. // on the first point at which a variable is used uninitialized.
  1360. break;
  1361. }
  1362. }
  1363. // Release the uses vector.
  1364. delete vec;
  1365. }
  1366. uses.clear();
  1367. }
  1368. private:
  1369. static bool hasAlwaysUninitializedUse(const UsesVec* vec) {
  1370. return std::any_of(vec->begin(), vec->end(), [](const UninitUse &U) {
  1371. return U.getKind() == UninitUse::Always ||
  1372. U.getKind() == UninitUse::AfterCall ||
  1373. U.getKind() == UninitUse::AfterDecl;
  1374. });
  1375. }
  1376. };
  1377. } // anonymous namespace
  1378. namespace clang {
  1379. namespace {
  1380. typedef SmallVector<PartialDiagnosticAt, 1> OptionalNotes;
  1381. typedef std::pair<PartialDiagnosticAt, OptionalNotes> DelayedDiag;
  1382. typedef std::list<DelayedDiag> DiagList;
  1383. struct SortDiagBySourceLocation {
  1384. SourceManager &SM;
  1385. SortDiagBySourceLocation(SourceManager &SM) : SM(SM) {}
  1386. bool operator()(const DelayedDiag &left, const DelayedDiag &right) {
  1387. // Although this call will be slow, this is only called when outputting
  1388. // multiple warnings.
  1389. return SM.isBeforeInTranslationUnit(left.first.first, right.first.first);
  1390. }
  1391. };
  1392. } // anonymous namespace
  1393. } // namespace clang
  1394. //===----------------------------------------------------------------------===//
  1395. // -Wthread-safety
  1396. //===----------------------------------------------------------------------===//
  1397. namespace clang {
  1398. namespace threadSafety {
  1399. namespace {
  1400. class ThreadSafetyReporter : public clang::threadSafety::ThreadSafetyHandler {
  1401. Sema &S;
  1402. DiagList Warnings;
  1403. SourceLocation FunLocation, FunEndLocation;
  1404. const FunctionDecl *CurrentFunction;
  1405. bool Verbose;
  1406. OptionalNotes getNotes() const {
  1407. if (Verbose && CurrentFunction) {
  1408. PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
  1409. S.PDiag(diag::note_thread_warning_in_fun)
  1410. << CurrentFunction);
  1411. return OptionalNotes(1, FNote);
  1412. }
  1413. return OptionalNotes();
  1414. }
  1415. OptionalNotes getNotes(const PartialDiagnosticAt &Note) const {
  1416. OptionalNotes ONS(1, Note);
  1417. if (Verbose && CurrentFunction) {
  1418. PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
  1419. S.PDiag(diag::note_thread_warning_in_fun)
  1420. << CurrentFunction);
  1421. ONS.push_back(std::move(FNote));
  1422. }
  1423. return ONS;
  1424. }
  1425. OptionalNotes getNotes(const PartialDiagnosticAt &Note1,
  1426. const PartialDiagnosticAt &Note2) const {
  1427. OptionalNotes ONS;
  1428. ONS.push_back(Note1);
  1429. ONS.push_back(Note2);
  1430. if (Verbose && CurrentFunction) {
  1431. PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
  1432. S.PDiag(diag::note_thread_warning_in_fun)
  1433. << CurrentFunction);
  1434. ONS.push_back(std::move(FNote));
  1435. }
  1436. return ONS;
  1437. }
  1438. OptionalNotes makeLockedHereNote(SourceLocation LocLocked, StringRef Kind) {
  1439. return LocLocked.isValid()
  1440. ? getNotes(PartialDiagnosticAt(
  1441. LocLocked, S.PDiag(diag::note_locked_here) << Kind))
  1442. : getNotes();
  1443. }
  1444. public:
  1445. ThreadSafetyReporter(Sema &S, SourceLocation FL, SourceLocation FEL)
  1446. : S(S), FunLocation(FL), FunEndLocation(FEL),
  1447. CurrentFunction(nullptr), Verbose(false) {}
  1448. void setVerbose(bool b) { Verbose = b; }
  1449. /// Emit all buffered diagnostics in order of sourcelocation.
  1450. /// We need to output diagnostics produced while iterating through
  1451. /// the lockset in deterministic order, so this function orders diagnostics
  1452. /// and outputs them.
  1453. void emitDiagnostics() {
  1454. Warnings.sort(SortDiagBySourceLocation(S.getSourceManager()));
  1455. for (const auto &Diag : Warnings) {
  1456. S.Diag(Diag.first.first, Diag.first.second);
  1457. for (const auto &Note : Diag.second)
  1458. S.Diag(Note.first, Note.second);
  1459. }
  1460. }
  1461. void handleInvalidLockExp(StringRef Kind, SourceLocation Loc) override {
  1462. PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_cannot_resolve_lock)
  1463. << Loc);
  1464. Warnings.emplace_back(std::move(Warning), getNotes());
  1465. }
  1466. void handleUnmatchedUnlock(StringRef Kind, Name LockName,
  1467. SourceLocation Loc) override {
  1468. if (Loc.isInvalid())
  1469. Loc = FunLocation;
  1470. PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_unlock_but_no_lock)
  1471. << Kind << LockName);
  1472. Warnings.emplace_back(std::move(Warning), getNotes());
  1473. }
  1474. void handleIncorrectUnlockKind(StringRef Kind, Name LockName,
  1475. LockKind Expected, LockKind Received,
  1476. SourceLocation LocLocked,
  1477. SourceLocation LocUnlock) override {
  1478. if (LocUnlock.isInvalid())
  1479. LocUnlock = FunLocation;
  1480. PartialDiagnosticAt Warning(
  1481. LocUnlock, S.PDiag(diag::warn_unlock_kind_mismatch)
  1482. << Kind << LockName << Received << Expected);
  1483. Warnings.emplace_back(std::move(Warning),
  1484. makeLockedHereNote(LocLocked, Kind));
  1485. }
  1486. void handleDoubleLock(StringRef Kind, Name LockName, SourceLocation LocLocked,
  1487. SourceLocation LocDoubleLock) override {
  1488. if (LocDoubleLock.isInvalid())
  1489. LocDoubleLock = FunLocation;
  1490. PartialDiagnosticAt Warning(LocDoubleLock, S.PDiag(diag::warn_double_lock)
  1491. << Kind << LockName);
  1492. Warnings.emplace_back(std::move(Warning),
  1493. makeLockedHereNote(LocLocked, Kind));
  1494. }
  1495. void handleMutexHeldEndOfScope(StringRef Kind, Name LockName,
  1496. SourceLocation LocLocked,
  1497. SourceLocation LocEndOfScope,
  1498. LockErrorKind LEK) override {
  1499. unsigned DiagID = 0;
  1500. switch (LEK) {
  1501. case LEK_LockedSomePredecessors:
  1502. DiagID = diag::warn_lock_some_predecessors;
  1503. break;
  1504. case LEK_LockedSomeLoopIterations:
  1505. DiagID = diag::warn_expecting_lock_held_on_loop;
  1506. break;
  1507. case LEK_LockedAtEndOfFunction:
  1508. DiagID = diag::warn_no_unlock;
  1509. break;
  1510. case LEK_NotLockedAtEndOfFunction:
  1511. DiagID = diag::warn_expecting_locked;
  1512. break;
  1513. }
  1514. if (LocEndOfScope.isInvalid())
  1515. LocEndOfScope = FunEndLocation;
  1516. PartialDiagnosticAt Warning(LocEndOfScope, S.PDiag(DiagID) << Kind
  1517. << LockName);
  1518. Warnings.emplace_back(std::move(Warning),
  1519. makeLockedHereNote(LocLocked, Kind));
  1520. }
  1521. void handleExclusiveAndShared(StringRef Kind, Name LockName,
  1522. SourceLocation Loc1,
  1523. SourceLocation Loc2) override {
  1524. PartialDiagnosticAt Warning(Loc1,
  1525. S.PDiag(diag::warn_lock_exclusive_and_shared)
  1526. << Kind << LockName);
  1527. PartialDiagnosticAt Note(Loc2, S.PDiag(diag::note_lock_exclusive_and_shared)
  1528. << Kind << LockName);
  1529. Warnings.emplace_back(std::move(Warning), getNotes(Note));
  1530. }
  1531. void handleNoMutexHeld(StringRef Kind, const NamedDecl *D,
  1532. ProtectedOperationKind POK, AccessKind AK,
  1533. SourceLocation Loc) override {
  1534. assert((POK == POK_VarAccess || POK == POK_VarDereference) &&
  1535. "Only works for variables");
  1536. unsigned DiagID = POK == POK_VarAccess?
  1537. diag::warn_variable_requires_any_lock:
  1538. diag::warn_var_deref_requires_any_lock;
  1539. PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
  1540. << D << getLockKindFromAccessKind(AK));
  1541. Warnings.emplace_back(std::move(Warning), getNotes());
  1542. }
  1543. void handleMutexNotHeld(StringRef Kind, const NamedDecl *D,
  1544. ProtectedOperationKind POK, Name LockName,
  1545. LockKind LK, SourceLocation Loc,
  1546. Name *PossibleMatch) override {
  1547. unsigned DiagID = 0;
  1548. if (PossibleMatch) {
  1549. switch (POK) {
  1550. case POK_VarAccess:
  1551. DiagID = diag::warn_variable_requires_lock_precise;
  1552. break;
  1553. case POK_VarDereference:
  1554. DiagID = diag::warn_var_deref_requires_lock_precise;
  1555. break;
  1556. case POK_FunctionCall:
  1557. DiagID = diag::warn_fun_requires_lock_precise;
  1558. break;
  1559. case POK_PassByRef:
  1560. DiagID = diag::warn_guarded_pass_by_reference;
  1561. break;
  1562. case POK_PtPassByRef:
  1563. DiagID = diag::warn_pt_guarded_pass_by_reference;
  1564. break;
  1565. }
  1566. PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
  1567. << D
  1568. << LockName << LK);
  1569. PartialDiagnosticAt Note(Loc, S.PDiag(diag::note_found_mutex_near_match)
  1570. << *PossibleMatch);
  1571. if (Verbose && POK == POK_VarAccess) {
  1572. PartialDiagnosticAt VNote(D->getLocation(),
  1573. S.PDiag(diag::note_guarded_by_declared_here)
  1574. << D->getNameAsString());
  1575. Warnings.emplace_back(std::move(Warning), getNotes(Note, VNote));
  1576. } else
  1577. Warnings.emplace_back(std::move(Warning), getNotes(Note));
  1578. } else {
  1579. switch (POK) {
  1580. case POK_VarAccess:
  1581. DiagID = diag::warn_variable_requires_lock;
  1582. break;
  1583. case POK_VarDereference:
  1584. DiagID = diag::warn_var_deref_requires_lock;
  1585. break;
  1586. case POK_FunctionCall:
  1587. DiagID = diag::warn_fun_requires_lock;
  1588. break;
  1589. case POK_PassByRef:
  1590. DiagID = diag::warn_guarded_pass_by_reference;
  1591. break;
  1592. case POK_PtPassByRef:
  1593. DiagID = diag::warn_pt_guarded_pass_by_reference;
  1594. break;
  1595. }
  1596. PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
  1597. << D
  1598. << LockName << LK);
  1599. if (Verbose && POK == POK_VarAccess) {
  1600. PartialDiagnosticAt Note(D->getLocation(),
  1601. S.PDiag(diag::note_guarded_by_declared_here));
  1602. Warnings.emplace_back(std::move(Warning), getNotes(Note));
  1603. } else
  1604. Warnings.emplace_back(std::move(Warning), getNotes());
  1605. }
  1606. }
  1607. void handleNegativeNotHeld(StringRef Kind, Name LockName, Name Neg,
  1608. SourceLocation Loc) override {
  1609. PartialDiagnosticAt Warning(Loc,
  1610. S.PDiag(diag::warn_acquire_requires_negative_cap)
  1611. << Kind << LockName << Neg);
  1612. Warnings.emplace_back(std::move(Warning), getNotes());
  1613. }
  1614. void handleFunExcludesLock(StringRef Kind, Name FunName, Name LockName,
  1615. SourceLocation Loc) override {
  1616. PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_fun_excludes_mutex)
  1617. << Kind << FunName << LockName);
  1618. Warnings.emplace_back(std::move(Warning), getNotes());
  1619. }
  1620. void handleLockAcquiredBefore(StringRef Kind, Name L1Name, Name L2Name,
  1621. SourceLocation Loc) override {
  1622. PartialDiagnosticAt Warning(Loc,
  1623. S.PDiag(diag::warn_acquired_before) << Kind << L1Name << L2Name);
  1624. Warnings.emplace_back(std::move(Warning), getNotes());
  1625. }
  1626. void handleBeforeAfterCycle(Name L1Name, SourceLocation Loc) override {
  1627. PartialDiagnosticAt Warning(Loc,
  1628. S.PDiag(diag::warn_acquired_before_after_cycle) << L1Name);
  1629. Warnings.emplace_back(std::move(Warning), getNotes());
  1630. }
  1631. void enterFunction(const FunctionDecl* FD) override {
  1632. CurrentFunction = FD;
  1633. }
  1634. void leaveFunction(const FunctionDecl* FD) override {
  1635. CurrentFunction = nullptr;
  1636. }
  1637. };
  1638. } // anonymous namespace
  1639. } // namespace threadSafety
  1640. } // namespace clang
  1641. //===----------------------------------------------------------------------===//
  1642. // -Wconsumed
  1643. //===----------------------------------------------------------------------===//
  1644. namespace clang {
  1645. namespace consumed {
  1646. namespace {
  1647. class ConsumedWarningsHandler : public ConsumedWarningsHandlerBase {
  1648. Sema &S;
  1649. DiagList Warnings;
  1650. public:
  1651. ConsumedWarningsHandler(Sema &S) : S(S) {}
  1652. void emitDiagnostics() override {
  1653. Warnings.sort(SortDiagBySourceLocation(S.getSourceManager()));
  1654. for (const auto &Diag : Warnings) {
  1655. S.Diag(Diag.first.first, Diag.first.second);
  1656. for (const auto &Note : Diag.second)
  1657. S.Diag(Note.first, Note.second);
  1658. }
  1659. }
  1660. void warnLoopStateMismatch(SourceLocation Loc,
  1661. StringRef VariableName) override {
  1662. PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_loop_state_mismatch) <<
  1663. VariableName);
  1664. Warnings.emplace_back(std::move(Warning), OptionalNotes());
  1665. }
  1666. void warnParamReturnTypestateMismatch(SourceLocation Loc,
  1667. StringRef VariableName,
  1668. StringRef ExpectedState,
  1669. StringRef ObservedState) override {
  1670. PartialDiagnosticAt Warning(Loc, S.PDiag(
  1671. diag::warn_param_return_typestate_mismatch) << VariableName <<
  1672. ExpectedState << ObservedState);
  1673. Warnings.emplace_back(std::move(Warning), OptionalNotes());
  1674. }
  1675. void warnParamTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
  1676. StringRef ObservedState) override {
  1677. PartialDiagnosticAt Warning(Loc, S.PDiag(
  1678. diag::warn_param_typestate_mismatch) << ExpectedState << ObservedState);
  1679. Warnings.emplace_back(std::move(Warning), OptionalNotes());
  1680. }
  1681. void warnReturnTypestateForUnconsumableType(SourceLocation Loc,
  1682. StringRef TypeName) override {
  1683. PartialDiagnosticAt Warning(Loc, S.PDiag(
  1684. diag::warn_return_typestate_for_unconsumable_type) << TypeName);
  1685. Warnings.emplace_back(std::move(Warning), OptionalNotes());
  1686. }
  1687. void warnReturnTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
  1688. StringRef ObservedState) override {
  1689. PartialDiagnosticAt Warning(Loc, S.PDiag(
  1690. diag::warn_return_typestate_mismatch) << ExpectedState << ObservedState);
  1691. Warnings.emplace_back(std::move(Warning), OptionalNotes());
  1692. }
  1693. void warnUseOfTempInInvalidState(StringRef MethodName, StringRef State,
  1694. SourceLocation Loc) override {
  1695. PartialDiagnosticAt Warning(Loc, S.PDiag(
  1696. diag::warn_use_of_temp_in_invalid_state) << MethodName << State);
  1697. Warnings.emplace_back(std::move(Warning), OptionalNotes());
  1698. }
  1699. void warnUseInInvalidState(StringRef MethodName, StringRef VariableName,
  1700. StringRef State, SourceLocation Loc) override {
  1701. PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_use_in_invalid_state) <<
  1702. MethodName << VariableName << State);
  1703. Warnings.emplace_back(std::move(Warning), OptionalNotes());
  1704. }
  1705. };
  1706. } // anonymous namespace
  1707. } // namespace consumed
  1708. } // namespace clang
  1709. //===----------------------------------------------------------------------===//
  1710. // AnalysisBasedWarnings - Worker object used by Sema to execute analysis-based
  1711. // warnings on a function, method, or block.
  1712. //===----------------------------------------------------------------------===//
  1713. clang::sema::AnalysisBasedWarnings::Policy::Policy() {
  1714. enableCheckFallThrough = 1;
  1715. enableCheckUnreachable = 0;
  1716. enableThreadSafetyAnalysis = 0;
  1717. enableConsumedAnalysis = 0;
  1718. }
  1719. static unsigned isEnabled(DiagnosticsEngine &D, unsigned diag) {
  1720. return (unsigned)!D.isIgnored(diag, SourceLocation());
  1721. }
  1722. clang::sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema &s)
  1723. : S(s),
  1724. NumFunctionsAnalyzed(0),
  1725. NumFunctionsWithBadCFGs(0),
  1726. NumCFGBlocks(0),
  1727. MaxCFGBlocksPerFunction(0),
  1728. NumUninitAnalysisFunctions(0),
  1729. NumUninitAnalysisVariables(0),
  1730. MaxUninitAnalysisVariablesPerFunction(0),
  1731. NumUninitAnalysisBlockVisits(0),
  1732. MaxUninitAnalysisBlockVisitsPerFunction(0) {
  1733. using namespace diag;
  1734. DiagnosticsEngine &D = S.getDiagnostics();
  1735. DefaultPolicy.enableCheckUnreachable =
  1736. isEnabled(D, warn_unreachable) ||
  1737. isEnabled(D, warn_unreachable_break) ||
  1738. isEnabled(D, warn_unreachable_return) ||
  1739. isEnabled(D, warn_unreachable_loop_increment);
  1740. DefaultPolicy.enableThreadSafetyAnalysis =
  1741. isEnabled(D, warn_double_lock);
  1742. DefaultPolicy.enableConsumedAnalysis =
  1743. isEnabled(D, warn_use_in_invalid_state);
  1744. }
  1745. static void flushDiagnostics(Sema &S, const sema::FunctionScopeInfo *fscope) {
  1746. for (const auto &D : fscope->PossiblyUnreachableDiags)
  1747. S.Diag(D.Loc, D.PD);
  1748. }
  1749. void clang::sema::
  1750. AnalysisBasedWarnings::IssueWarnings(sema::AnalysisBasedWarnings::Policy P,
  1751. sema::FunctionScopeInfo *fscope,
  1752. const Decl *D, QualType BlockType) {
  1753. // We avoid doing analysis-based warnings when there are errors for
  1754. // two reasons:
  1755. // (1) The CFGs often can't be constructed (if the body is invalid), so
  1756. // don't bother trying.
  1757. // (2) The code already has problems; running the analysis just takes more
  1758. // time.
  1759. DiagnosticsEngine &Diags = S.getDiagnostics();
  1760. // Do not do any analysis if we are going to just ignore them.
  1761. if (Diags.getIgnoreAllWarnings() ||
  1762. (Diags.getSuppressSystemWarnings() &&
  1763. S.SourceMgr.isInSystemHeader(D->getLocation())))
  1764. return;
  1765. // For code in dependent contexts, we'll do this at instantiation time.
  1766. if (cast<DeclContext>(D)->isDependentContext())
  1767. return;
  1768. if (Diags.hasUncompilableErrorOccurred()) {
  1769. // Flush out any possibly unreachable diagnostics.
  1770. flushDiagnostics(S, fscope);
  1771. return;
  1772. }
  1773. const Stmt *Body = D->getBody();
  1774. assert(Body);
  1775. // Construct the analysis context with the specified CFG build options.
  1776. AnalysisDeclContext AC(/* AnalysisDeclContextManager */ nullptr, D);
  1777. // Don't generate EH edges for CallExprs as we'd like to avoid the n^2
  1778. // explosion for destructors that can result and the compile time hit.
  1779. AC.getCFGBuildOptions().PruneTriviallyFalseEdges = true;
  1780. AC.getCFGBuildOptions().AddEHEdges = false;
  1781. AC.getCFGBuildOptions().AddInitializers = true;
  1782. AC.getCFGBuildOptions().AddImplicitDtors = true;
  1783. AC.getCFGBuildOptions().AddTemporaryDtors = true;
  1784. AC.getCFGBuildOptions().AddCXXNewAllocator = false;
  1785. AC.getCFGBuildOptions().AddCXXDefaultInitExprInCtors = true;
  1786. // Force that certain expressions appear as CFGElements in the CFG. This
  1787. // is used to speed up various analyses.
  1788. // FIXME: This isn't the right factoring. This is here for initial
  1789. // prototyping, but we need a way for analyses to say what expressions they
  1790. // expect to always be CFGElements and then fill in the BuildOptions
  1791. // appropriately. This is essentially a layering violation.
  1792. if (P.enableCheckUnreachable || P.enableThreadSafetyAnalysis ||
  1793. P.enableConsumedAnalysis) {
  1794. // Unreachable code analysis and thread safety require a linearized CFG.
  1795. AC.getCFGBuildOptions().setAllAlwaysAdd();
  1796. }
  1797. else {
  1798. AC.getCFGBuildOptions()
  1799. .setAlwaysAdd(Stmt::BinaryOperatorClass)
  1800. .setAlwaysAdd(Stmt::CompoundAssignOperatorClass)
  1801. .setAlwaysAdd(Stmt::BlockExprClass)
  1802. .setAlwaysAdd(Stmt::CStyleCastExprClass)
  1803. .setAlwaysAdd(Stmt::DeclRefExprClass)
  1804. .setAlwaysAdd(Stmt::ImplicitCastExprClass)
  1805. .setAlwaysAdd(Stmt::UnaryOperatorClass)
  1806. .setAlwaysAdd(Stmt::AttributedStmtClass);
  1807. }
  1808. // Install the logical handler.
  1809. llvm::Optional<LogicalErrorHandler> LEH;
  1810. if (LogicalErrorHandler::hasActiveDiagnostics(Diags, D->getBeginLoc())) {
  1811. LEH.emplace(S);
  1812. AC.getCFGBuildOptions().Observer = &*LEH;
  1813. }
  1814. // Emit delayed diagnostics.
  1815. if (!fscope->PossiblyUnreachableDiags.empty()) {
  1816. bool analyzed = false;
  1817. // Register the expressions with the CFGBuilder.
  1818. for (const auto &D : fscope->PossiblyUnreachableDiags) {
  1819. for (const Stmt *S : D.Stmts)
  1820. AC.registerForcedBlockExpression(S);
  1821. }
  1822. if (AC.getCFG()) {
  1823. analyzed = true;
  1824. for (const auto &D : fscope->PossiblyUnreachableDiags) {
  1825. bool AllReachable = true;
  1826. for (const Stmt *S : D.Stmts) {
  1827. const CFGBlock *block = AC.getBlockForRegisteredExpression(S);
  1828. CFGReverseBlockReachabilityAnalysis *cra =
  1829. AC.getCFGReachablityAnalysis();
  1830. // FIXME: We should be able to assert that block is non-null, but
  1831. // the CFG analysis can skip potentially-evaluated expressions in
  1832. // edge cases; see test/Sema/vla-2.c.
  1833. if (block && cra) {
  1834. // Can this block be reached from the entrance?
  1835. if (!cra->isReachable(&AC.getCFG()->getEntry(), block)) {
  1836. AllReachable = false;
  1837. break;
  1838. }
  1839. }
  1840. // If we cannot map to a basic block, assume the statement is
  1841. // reachable.
  1842. }
  1843. if (AllReachable)
  1844. S.Diag(D.Loc, D.PD);
  1845. }
  1846. }
  1847. if (!analyzed)
  1848. flushDiagnostics(S, fscope);
  1849. }
  1850. // Warning: check missing 'return'
  1851. if (P.enableCheckFallThrough) {
  1852. const CheckFallThroughDiagnostics &CD =
  1853. (isa<BlockDecl>(D)
  1854. ? CheckFallThroughDiagnostics::MakeForBlock()
  1855. : (isa<CXXMethodDecl>(D) &&
  1856. cast<CXXMethodDecl>(D)->getOverloadedOperator() == OO_Call &&
  1857. cast<CXXMethodDecl>(D)->getParent()->isLambda())
  1858. ? CheckFallThroughDiagnostics::MakeForLambda()
  1859. : (fscope->isCoroutine()
  1860. ? CheckFallThroughDiagnostics::MakeForCoroutine(D)
  1861. : CheckFallThroughDiagnostics::MakeForFunction(D)));
  1862. CheckFallThroughForBody(S, D, Body, BlockType, CD, AC, fscope);
  1863. }
  1864. // Warning: check for unreachable code
  1865. if (P.enableCheckUnreachable) {
  1866. // Only check for unreachable code on non-template instantiations.
  1867. // Different template instantiations can effectively change the control-flow
  1868. // and it is very difficult to prove that a snippet of code in a template
  1869. // is unreachable for all instantiations.
  1870. bool isTemplateInstantiation = false;
  1871. if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(D))
  1872. isTemplateInstantiation = Function->isTemplateInstantiation();
  1873. if (!isTemplateInstantiation)
  1874. CheckUnreachable(S, AC);
  1875. }
  1876. // Check for thread safety violations
  1877. if (P.enableThreadSafetyAnalysis) {
  1878. SourceLocation FL = AC.getDecl()->getLocation();
  1879. SourceLocation FEL = AC.getDecl()->getEndLoc();
  1880. threadSafety::ThreadSafetyReporter Reporter(S, FL, FEL);
  1881. if (!Diags.isIgnored(diag::warn_thread_safety_beta, D->getBeginLoc()))
  1882. Reporter.setIssueBetaWarnings(true);
  1883. if (!Diags.isIgnored(diag::warn_thread_safety_verbose, D->getBeginLoc()))
  1884. Reporter.setVerbose(true);
  1885. threadSafety::runThreadSafetyAnalysis(AC, Reporter,
  1886. &S.ThreadSafetyDeclCache);
  1887. Reporter.emitDiagnostics();
  1888. }
  1889. // Check for violations of consumed properties.
  1890. if (P.enableConsumedAnalysis) {
  1891. consumed::ConsumedWarningsHandler WarningHandler(S);
  1892. consumed::ConsumedAnalyzer Analyzer(WarningHandler);
  1893. Analyzer.run(AC);
  1894. }
  1895. if (!Diags.isIgnored(diag::warn_uninit_var, D->getBeginLoc()) ||
  1896. !Diags.isIgnored(diag::warn_sometimes_uninit_var, D->getBeginLoc()) ||
  1897. !Diags.isIgnored(diag::warn_maybe_uninit_var, D->getBeginLoc())) {
  1898. if (CFG *cfg = AC.getCFG()) {
  1899. UninitValsDiagReporter reporter(S);
  1900. UninitVariablesAnalysisStats stats;
  1901. std::memset(&stats, 0, sizeof(UninitVariablesAnalysisStats));
  1902. runUninitializedVariablesAnalysis(*cast<DeclContext>(D), *cfg, AC,
  1903. reporter, stats);
  1904. if (S.CollectStats && stats.NumVariablesAnalyzed > 0) {
  1905. ++NumUninitAnalysisFunctions;
  1906. NumUninitAnalysisVariables += stats.NumVariablesAnalyzed;
  1907. NumUninitAnalysisBlockVisits += stats.NumBlockVisits;
  1908. MaxUninitAnalysisVariablesPerFunction =
  1909. std::max(MaxUninitAnalysisVariablesPerFunction,
  1910. stats.NumVariablesAnalyzed);
  1911. MaxUninitAnalysisBlockVisitsPerFunction =
  1912. std::max(MaxUninitAnalysisBlockVisitsPerFunction,
  1913. stats.NumBlockVisits);
  1914. }
  1915. }
  1916. }
  1917. bool FallThroughDiagFull =
  1918. !Diags.isIgnored(diag::warn_unannotated_fallthrough, D->getBeginLoc());
  1919. bool FallThroughDiagPerFunction = !Diags.isIgnored(
  1920. diag::warn_unannotated_fallthrough_per_function, D->getBeginLoc());
  1921. if (FallThroughDiagFull || FallThroughDiagPerFunction ||
  1922. fscope->HasFallthroughStmt) {
  1923. DiagnoseSwitchLabelsFallthrough(S, AC, !FallThroughDiagFull);
  1924. }
  1925. if (S.getLangOpts().ObjCWeak &&
  1926. !Diags.isIgnored(diag::warn_arc_repeated_use_of_weak, D->getBeginLoc()))
  1927. diagnoseRepeatedUseOfWeak(S, fscope, D, AC.getParentMap());
  1928. // Check for infinite self-recursion in functions
  1929. if (!Diags.isIgnored(diag::warn_infinite_recursive_function,
  1930. D->getBeginLoc())) {
  1931. if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
  1932. checkRecursiveFunction(S, FD, Body, AC);
  1933. }
  1934. }
  1935. // Check for throw out of non-throwing function.
  1936. if (!Diags.isIgnored(diag::warn_throw_in_noexcept_func, D->getBeginLoc()))
  1937. if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D))
  1938. if (S.getLangOpts().CPlusPlus && isNoexcept(FD))
  1939. checkThrowInNonThrowingFunc(S, FD, AC);
  1940. // If none of the previous checks caused a CFG build, trigger one here
  1941. // for the logical error handler.
  1942. if (LogicalErrorHandler::hasActiveDiagnostics(Diags, D->getBeginLoc())) {
  1943. AC.getCFG();
  1944. }
  1945. // Collect statistics about the CFG if it was built.
  1946. if (S.CollectStats && AC.isCFGBuilt()) {
  1947. ++NumFunctionsAnalyzed;
  1948. if (CFG *cfg = AC.getCFG()) {
  1949. // If we successfully built a CFG for this context, record some more
  1950. // detail information about it.
  1951. NumCFGBlocks += cfg->getNumBlockIDs();
  1952. MaxCFGBlocksPerFunction = std::max(MaxCFGBlocksPerFunction,
  1953. cfg->getNumBlockIDs());
  1954. } else {
  1955. ++NumFunctionsWithBadCFGs;
  1956. }
  1957. }
  1958. }
  1959. void clang::sema::AnalysisBasedWarnings::PrintStats() const {
  1960. llvm::errs() << "\n*** Analysis Based Warnings Stats:\n";
  1961. unsigned NumCFGsBuilt = NumFunctionsAnalyzed - NumFunctionsWithBadCFGs;
  1962. unsigned AvgCFGBlocksPerFunction =
  1963. !NumCFGsBuilt ? 0 : NumCFGBlocks/NumCFGsBuilt;
  1964. llvm::errs() << NumFunctionsAnalyzed << " functions analyzed ("
  1965. << NumFunctionsWithBadCFGs << " w/o CFGs).\n"
  1966. << " " << NumCFGBlocks << " CFG blocks built.\n"
  1967. << " " << AvgCFGBlocksPerFunction
  1968. << " average CFG blocks per function.\n"
  1969. << " " << MaxCFGBlocksPerFunction
  1970. << " max CFG blocks per function.\n";
  1971. unsigned AvgUninitVariablesPerFunction = !NumUninitAnalysisFunctions ? 0
  1972. : NumUninitAnalysisVariables/NumUninitAnalysisFunctions;
  1973. unsigned AvgUninitBlockVisitsPerFunction = !NumUninitAnalysisFunctions ? 0
  1974. : NumUninitAnalysisBlockVisits/NumUninitAnalysisFunctions;
  1975. llvm::errs() << NumUninitAnalysisFunctions
  1976. << " functions analyzed for uninitialiazed variables\n"
  1977. << " " << NumUninitAnalysisVariables << " variables analyzed.\n"
  1978. << " " << AvgUninitVariablesPerFunction
  1979. << " average variables per function.\n"
  1980. << " " << MaxUninitAnalysisVariablesPerFunction
  1981. << " max variables per function.\n"
  1982. << " " << NumUninitAnalysisBlockVisits << " block visits.\n"
  1983. << " " << AvgUninitBlockVisitsPerFunction
  1984. << " average block visits per function.\n"
  1985. << " " << MaxUninitAnalysisBlockVisitsPerFunction
  1986. << " max block visits per function.\n";
  1987. }