BugReporter.cpp 105 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142
  1. //===- BugReporter.cpp - Generate PathDiagnostics for bugs ----------------===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This file defines BugReporter, a utility class for generating
  11. // PathDiagnostics.
  12. //
  13. //===----------------------------------------------------------------------===//
  14. #include "clang/StaticAnalyzer/Core/BugReporter/BugReporter.h"
  15. #include "clang/AST/Decl.h"
  16. #include "clang/AST/DeclBase.h"
  17. #include "clang/AST/DeclObjC.h"
  18. #include "clang/AST/Expr.h"
  19. #include "clang/AST/ExprCXX.h"
  20. #include "clang/AST/ParentMap.h"
  21. #include "clang/AST/Stmt.h"
  22. #include "clang/AST/StmtCXX.h"
  23. #include "clang/AST/StmtObjC.h"
  24. #include "clang/Analysis/AnalysisDeclContext.h"
  25. #include "clang/Analysis/CFG.h"
  26. #include "clang/Analysis/CFGStmtMap.h"
  27. #include "clang/Analysis/ProgramPoint.h"
  28. #include "clang/Basic/LLVM.h"
  29. #include "clang/Basic/SourceLocation.h"
  30. #include "clang/Basic/SourceManager.h"
  31. #include "clang/StaticAnalyzer/Core/AnalyzerOptions.h"
  32. #include "clang/StaticAnalyzer/Core/BugReporter/BugReporterVisitors.h"
  33. #include "clang/StaticAnalyzer/Core/BugReporter/BugType.h"
  34. #include "clang/StaticAnalyzer/Core/BugReporter/PathDiagnostic.h"
  35. #include "clang/StaticAnalyzer/Core/Checker.h"
  36. #include "clang/StaticAnalyzer/Core/CheckerManager.h"
  37. #include "clang/StaticAnalyzer/Core/PathSensitive/ExplodedGraph.h"
  38. #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
  39. #include "clang/StaticAnalyzer/Core/PathSensitive/MemRegion.h"
  40. #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramState.h"
  41. #include "clang/StaticAnalyzer/Core/PathSensitive/SVals.h"
  42. #include "clang/StaticAnalyzer/Core/PathSensitive/SymbolManager.h"
  43. #include "llvm/ADT/ArrayRef.h"
  44. #include "llvm/ADT/DenseMap.h"
  45. #include "llvm/ADT/DenseSet.h"
  46. #include "llvm/ADT/FoldingSet.h"
  47. #include "llvm/ADT/None.h"
  48. #include "llvm/ADT/Optional.h"
  49. #include "llvm/ADT/STLExtras.h"
  50. #include "llvm/ADT/SmallPtrSet.h"
  51. #include "llvm/ADT/SmallString.h"
  52. #include "llvm/ADT/SmallVector.h"
  53. #include "llvm/ADT/Statistic.h"
  54. #include "llvm/ADT/StringRef.h"
  55. #include "llvm/ADT/iterator_range.h"
  56. #include "llvm/Support/Casting.h"
  57. #include "llvm/Support/Compiler.h"
  58. #include "llvm/Support/ErrorHandling.h"
  59. #include "llvm/Support/MemoryBuffer.h"
  60. #include "llvm/Support/raw_ostream.h"
  61. #include <algorithm>
  62. #include <cassert>
  63. #include <cstddef>
  64. #include <iterator>
  65. #include <memory>
  66. #include <queue>
  67. #include <string>
  68. #include <tuple>
  69. #include <utility>
  70. #include <vector>
  71. using namespace clang;
  72. using namespace ento;
  73. #define DEBUG_TYPE "BugReporter"
  74. STATISTIC(MaxBugClassSize,
  75. "The maximum number of bug reports in the same equivalence class");
  76. STATISTIC(MaxValidBugClassSize,
  77. "The maximum number of bug reports in the same equivalence class "
  78. "where at least one report is valid (not suppressed)");
  79. BugReporterVisitor::~BugReporterVisitor() = default;
  80. void BugReporterContext::anchor() {}
  81. //===----------------------------------------------------------------------===//
  82. // Helper routines for walking the ExplodedGraph and fetching statements.
  83. //===----------------------------------------------------------------------===//
  84. static const Stmt *GetPreviousStmt(const ExplodedNode *N) {
  85. for (N = N->getFirstPred(); N; N = N->getFirstPred())
  86. if (const Stmt *S = PathDiagnosticLocation::getStmt(N))
  87. return S;
  88. return nullptr;
  89. }
  90. static inline const Stmt*
  91. GetCurrentOrPreviousStmt(const ExplodedNode *N) {
  92. if (const Stmt *S = PathDiagnosticLocation::getStmt(N))
  93. return S;
  94. return GetPreviousStmt(N);
  95. }
  96. //===----------------------------------------------------------------------===//
  97. // Diagnostic cleanup.
  98. //===----------------------------------------------------------------------===//
  99. static PathDiagnosticEventPiece *
  100. eventsDescribeSameCondition(PathDiagnosticEventPiece *X,
  101. PathDiagnosticEventPiece *Y) {
  102. // Prefer diagnostics that come from ConditionBRVisitor over
  103. // those that came from TrackConstraintBRVisitor,
  104. // unless the one from ConditionBRVisitor is
  105. // its generic fallback diagnostic.
  106. const void *tagPreferred = ConditionBRVisitor::getTag();
  107. const void *tagLesser = TrackConstraintBRVisitor::getTag();
  108. if (X->getLocation() != Y->getLocation())
  109. return nullptr;
  110. if (X->getTag() == tagPreferred && Y->getTag() == tagLesser)
  111. return ConditionBRVisitor::isPieceMessageGeneric(X) ? Y : X;
  112. if (Y->getTag() == tagPreferred && X->getTag() == tagLesser)
  113. return ConditionBRVisitor::isPieceMessageGeneric(Y) ? X : Y;
  114. return nullptr;
  115. }
  116. /// An optimization pass over PathPieces that removes redundant diagnostics
  117. /// generated by both ConditionBRVisitor and TrackConstraintBRVisitor. Both
  118. /// BugReporterVisitors use different methods to generate diagnostics, with
  119. /// one capable of emitting diagnostics in some cases but not in others. This
  120. /// can lead to redundant diagnostic pieces at the same point in a path.
  121. static void removeRedundantMsgs(PathPieces &path) {
  122. unsigned N = path.size();
  123. if (N < 2)
  124. return;
  125. // NOTE: this loop intentionally is not using an iterator. Instead, we
  126. // are streaming the path and modifying it in place. This is done by
  127. // grabbing the front, processing it, and if we decide to keep it append
  128. // it to the end of the path. The entire path is processed in this way.
  129. for (unsigned i = 0; i < N; ++i) {
  130. auto piece = std::move(path.front());
  131. path.pop_front();
  132. switch (piece->getKind()) {
  133. case PathDiagnosticPiece::Call:
  134. removeRedundantMsgs(cast<PathDiagnosticCallPiece>(*piece).path);
  135. break;
  136. case PathDiagnosticPiece::Macro:
  137. removeRedundantMsgs(cast<PathDiagnosticMacroPiece>(*piece).subPieces);
  138. break;
  139. case PathDiagnosticPiece::ControlFlow:
  140. break;
  141. case PathDiagnosticPiece::Event: {
  142. if (i == N-1)
  143. break;
  144. if (auto *nextEvent =
  145. dyn_cast<PathDiagnosticEventPiece>(path.front().get())) {
  146. auto *event = cast<PathDiagnosticEventPiece>(piece.get());
  147. // Check to see if we should keep one of the two pieces. If we
  148. // come up with a preference, record which piece to keep, and consume
  149. // another piece from the path.
  150. if (auto *pieceToKeep =
  151. eventsDescribeSameCondition(event, nextEvent)) {
  152. piece = std::move(pieceToKeep == event ? piece : path.front());
  153. path.pop_front();
  154. ++i;
  155. }
  156. }
  157. break;
  158. }
  159. case PathDiagnosticPiece::Note:
  160. break;
  161. }
  162. path.push_back(std::move(piece));
  163. }
  164. }
  165. /// A map from PathDiagnosticPiece to the LocationContext of the inlined
  166. /// function call it represents.
  167. using LocationContextMap =
  168. llvm::DenseMap<const PathPieces *, const LocationContext *>;
  169. /// Recursively scan through a path and prune out calls and macros pieces
  170. /// that aren't needed. Return true if afterwards the path contains
  171. /// "interesting stuff" which means it shouldn't be pruned from the parent path.
  172. static bool removeUnneededCalls(PathPieces &pieces, BugReport *R,
  173. LocationContextMap &LCM,
  174. bool IsInteresting = false) {
  175. bool containsSomethingInteresting = IsInteresting;
  176. const unsigned N = pieces.size();
  177. for (unsigned i = 0 ; i < N ; ++i) {
  178. // Remove the front piece from the path. If it is still something we
  179. // want to keep once we are done, we will push it back on the end.
  180. auto piece = std::move(pieces.front());
  181. pieces.pop_front();
  182. switch (piece->getKind()) {
  183. case PathDiagnosticPiece::Call: {
  184. auto &call = cast<PathDiagnosticCallPiece>(*piece);
  185. // Check if the location context is interesting.
  186. assert(LCM.count(&call.path));
  187. if (!removeUnneededCalls(call.path, R, LCM,
  188. R->isInteresting(LCM[&call.path])))
  189. continue;
  190. containsSomethingInteresting = true;
  191. break;
  192. }
  193. case PathDiagnosticPiece::Macro: {
  194. auto &macro = cast<PathDiagnosticMacroPiece>(*piece);
  195. if (!removeUnneededCalls(macro.subPieces, R, LCM, IsInteresting))
  196. continue;
  197. containsSomethingInteresting = true;
  198. break;
  199. }
  200. case PathDiagnosticPiece::Event: {
  201. auto &event = cast<PathDiagnosticEventPiece>(*piece);
  202. // We never throw away an event, but we do throw it away wholesale
  203. // as part of a path if we throw the entire path away.
  204. containsSomethingInteresting |= !event.isPrunable();
  205. break;
  206. }
  207. case PathDiagnosticPiece::ControlFlow:
  208. break;
  209. case PathDiagnosticPiece::Note:
  210. break;
  211. }
  212. pieces.push_back(std::move(piece));
  213. }
  214. return containsSomethingInteresting;
  215. }
  216. /// Returns true if the given decl has been implicitly given a body, either by
  217. /// the analyzer or by the compiler proper.
  218. static bool hasImplicitBody(const Decl *D) {
  219. assert(D);
  220. return D->isImplicit() || !D->hasBody();
  221. }
  222. /// Recursively scan through a path and make sure that all call pieces have
  223. /// valid locations.
  224. static void
  225. adjustCallLocations(PathPieces &Pieces,
  226. PathDiagnosticLocation *LastCallLocation = nullptr) {
  227. for (const auto &I : Pieces) {
  228. auto *Call = dyn_cast<PathDiagnosticCallPiece>(I.get());
  229. if (!Call)
  230. continue;
  231. if (LastCallLocation) {
  232. bool CallerIsImplicit = hasImplicitBody(Call->getCaller());
  233. if (CallerIsImplicit || !Call->callEnter.asLocation().isValid())
  234. Call->callEnter = *LastCallLocation;
  235. if (CallerIsImplicit || !Call->callReturn.asLocation().isValid())
  236. Call->callReturn = *LastCallLocation;
  237. }
  238. // Recursively clean out the subclass. Keep this call around if
  239. // it contains any informative diagnostics.
  240. PathDiagnosticLocation *ThisCallLocation;
  241. if (Call->callEnterWithin.asLocation().isValid() &&
  242. !hasImplicitBody(Call->getCallee()))
  243. ThisCallLocation = &Call->callEnterWithin;
  244. else
  245. ThisCallLocation = &Call->callEnter;
  246. assert(ThisCallLocation && "Outermost call has an invalid location");
  247. adjustCallLocations(Call->path, ThisCallLocation);
  248. }
  249. }
  250. /// Remove edges in and out of C++ default initializer expressions. These are
  251. /// for fields that have in-class initializers, as opposed to being initialized
  252. /// explicitly in a constructor or braced list.
  253. static void removeEdgesToDefaultInitializers(PathPieces &Pieces) {
  254. for (PathPieces::iterator I = Pieces.begin(), E = Pieces.end(); I != E;) {
  255. if (auto *C = dyn_cast<PathDiagnosticCallPiece>(I->get()))
  256. removeEdgesToDefaultInitializers(C->path);
  257. if (auto *M = dyn_cast<PathDiagnosticMacroPiece>(I->get()))
  258. removeEdgesToDefaultInitializers(M->subPieces);
  259. if (auto *CF = dyn_cast<PathDiagnosticControlFlowPiece>(I->get())) {
  260. const Stmt *Start = CF->getStartLocation().asStmt();
  261. const Stmt *End = CF->getEndLocation().asStmt();
  262. if (Start && isa<CXXDefaultInitExpr>(Start)) {
  263. I = Pieces.erase(I);
  264. continue;
  265. } else if (End && isa<CXXDefaultInitExpr>(End)) {
  266. PathPieces::iterator Next = std::next(I);
  267. if (Next != E) {
  268. if (auto *NextCF =
  269. dyn_cast<PathDiagnosticControlFlowPiece>(Next->get())) {
  270. NextCF->setStartLocation(CF->getStartLocation());
  271. }
  272. }
  273. I = Pieces.erase(I);
  274. continue;
  275. }
  276. }
  277. I++;
  278. }
  279. }
  280. /// Remove all pieces with invalid locations as these cannot be serialized.
  281. /// We might have pieces with invalid locations as a result of inlining Body
  282. /// Farm generated functions.
  283. static void removePiecesWithInvalidLocations(PathPieces &Pieces) {
  284. for (PathPieces::iterator I = Pieces.begin(), E = Pieces.end(); I != E;) {
  285. if (auto *C = dyn_cast<PathDiagnosticCallPiece>(I->get()))
  286. removePiecesWithInvalidLocations(C->path);
  287. if (auto *M = dyn_cast<PathDiagnosticMacroPiece>(I->get()))
  288. removePiecesWithInvalidLocations(M->subPieces);
  289. if (!(*I)->getLocation().isValid() ||
  290. !(*I)->getLocation().asLocation().isValid()) {
  291. I = Pieces.erase(I);
  292. continue;
  293. }
  294. I++;
  295. }
  296. }
  297. //===----------------------------------------------------------------------===//
  298. // PathDiagnosticBuilder and its associated routines and helper objects.
  299. //===----------------------------------------------------------------------===//
  300. namespace {
  301. class PathDiagnosticBuilder : public BugReporterContext {
  302. BugReport *R;
  303. PathDiagnosticConsumer *PDC;
  304. public:
  305. const LocationContext *LC;
  306. PathDiagnosticBuilder(GRBugReporter &br,
  307. BugReport *r, InterExplodedGraphMap &Backmap,
  308. PathDiagnosticConsumer *pdc)
  309. : BugReporterContext(br, Backmap), R(r), PDC(pdc),
  310. LC(r->getErrorNode()->getLocationContext()) {}
  311. PathDiagnosticLocation ExecutionContinues(const ExplodedNode *N);
  312. PathDiagnosticLocation ExecutionContinues(llvm::raw_string_ostream &os,
  313. const ExplodedNode *N);
  314. BugReport *getBugReport() { return R; }
  315. Decl const &getCodeDecl() { return R->getErrorNode()->getCodeDecl(); }
  316. ParentMap& getParentMap() { return LC->getParentMap(); }
  317. const Stmt *getParent(const Stmt *S) {
  318. return getParentMap().getParent(S);
  319. }
  320. PathDiagnosticLocation getEnclosingStmtLocation(const Stmt *S);
  321. PathDiagnosticConsumer::PathGenerationScheme getGenerationScheme() const {
  322. return PDC ? PDC->getGenerationScheme() : PathDiagnosticConsumer::Minimal;
  323. }
  324. bool supportsLogicalOpControlFlow() const {
  325. return PDC ? PDC->supportsLogicalOpControlFlow() : true;
  326. }
  327. };
  328. } // namespace
  329. PathDiagnosticLocation
  330. PathDiagnosticBuilder::ExecutionContinues(const ExplodedNode *N) {
  331. if (const Stmt *S = PathDiagnosticLocation::getNextStmt(N))
  332. return PathDiagnosticLocation(S, getSourceManager(), LC);
  333. return PathDiagnosticLocation::createDeclEnd(N->getLocationContext(),
  334. getSourceManager());
  335. }
  336. PathDiagnosticLocation
  337. PathDiagnosticBuilder::ExecutionContinues(llvm::raw_string_ostream &os,
  338. const ExplodedNode *N) {
  339. // Slow, but probably doesn't matter.
  340. if (os.str().empty())
  341. os << ' ';
  342. const PathDiagnosticLocation &Loc = ExecutionContinues(N);
  343. if (Loc.asStmt())
  344. os << "Execution continues on line "
  345. << getSourceManager().getExpansionLineNumber(Loc.asLocation())
  346. << '.';
  347. else {
  348. os << "Execution jumps to the end of the ";
  349. const Decl *D = N->getLocationContext()->getDecl();
  350. if (isa<ObjCMethodDecl>(D))
  351. os << "method";
  352. else if (isa<FunctionDecl>(D))
  353. os << "function";
  354. else {
  355. assert(isa<BlockDecl>(D));
  356. os << "anonymous block";
  357. }
  358. os << '.';
  359. }
  360. return Loc;
  361. }
  362. static const Stmt *getEnclosingParent(const Stmt *S, const ParentMap &PM) {
  363. if (isa<Expr>(S) && PM.isConsumedExpr(cast<Expr>(S)))
  364. return PM.getParentIgnoreParens(S);
  365. const Stmt *Parent = PM.getParentIgnoreParens(S);
  366. if (!Parent)
  367. return nullptr;
  368. switch (Parent->getStmtClass()) {
  369. case Stmt::ForStmtClass:
  370. case Stmt::DoStmtClass:
  371. case Stmt::WhileStmtClass:
  372. case Stmt::ObjCForCollectionStmtClass:
  373. case Stmt::CXXForRangeStmtClass:
  374. return Parent;
  375. default:
  376. break;
  377. }
  378. return nullptr;
  379. }
  380. static PathDiagnosticLocation
  381. getEnclosingStmtLocation(const Stmt *S, SourceManager &SMgr, const ParentMap &P,
  382. const LocationContext *LC, bool allowNestedContexts) {
  383. if (!S)
  384. return {};
  385. while (const Stmt *Parent = getEnclosingParent(S, P)) {
  386. switch (Parent->getStmtClass()) {
  387. case Stmt::BinaryOperatorClass: {
  388. const auto *B = cast<BinaryOperator>(Parent);
  389. if (B->isLogicalOp())
  390. return PathDiagnosticLocation(allowNestedContexts ? B : S, SMgr, LC);
  391. break;
  392. }
  393. case Stmt::CompoundStmtClass:
  394. case Stmt::StmtExprClass:
  395. return PathDiagnosticLocation(S, SMgr, LC);
  396. case Stmt::ChooseExprClass:
  397. // Similar to '?' if we are referring to condition, just have the edge
  398. // point to the entire choose expression.
  399. if (allowNestedContexts || cast<ChooseExpr>(Parent)->getCond() == S)
  400. return PathDiagnosticLocation(Parent, SMgr, LC);
  401. else
  402. return PathDiagnosticLocation(S, SMgr, LC);
  403. case Stmt::BinaryConditionalOperatorClass:
  404. case Stmt::ConditionalOperatorClass:
  405. // For '?', if we are referring to condition, just have the edge point
  406. // to the entire '?' expression.
  407. if (allowNestedContexts ||
  408. cast<AbstractConditionalOperator>(Parent)->getCond() == S)
  409. return PathDiagnosticLocation(Parent, SMgr, LC);
  410. else
  411. return PathDiagnosticLocation(S, SMgr, LC);
  412. case Stmt::CXXForRangeStmtClass:
  413. if (cast<CXXForRangeStmt>(Parent)->getBody() == S)
  414. return PathDiagnosticLocation(S, SMgr, LC);
  415. break;
  416. case Stmt::DoStmtClass:
  417. return PathDiagnosticLocation(S, SMgr, LC);
  418. case Stmt::ForStmtClass:
  419. if (cast<ForStmt>(Parent)->getBody() == S)
  420. return PathDiagnosticLocation(S, SMgr, LC);
  421. break;
  422. case Stmt::IfStmtClass:
  423. if (cast<IfStmt>(Parent)->getCond() != S)
  424. return PathDiagnosticLocation(S, SMgr, LC);
  425. break;
  426. case Stmt::ObjCForCollectionStmtClass:
  427. if (cast<ObjCForCollectionStmt>(Parent)->getBody() == S)
  428. return PathDiagnosticLocation(S, SMgr, LC);
  429. break;
  430. case Stmt::WhileStmtClass:
  431. if (cast<WhileStmt>(Parent)->getCond() != S)
  432. return PathDiagnosticLocation(S, SMgr, LC);
  433. break;
  434. default:
  435. break;
  436. }
  437. S = Parent;
  438. }
  439. assert(S && "Cannot have null Stmt for PathDiagnosticLocation");
  440. return PathDiagnosticLocation(S, SMgr, LC);
  441. }
  442. PathDiagnosticLocation
  443. PathDiagnosticBuilder::getEnclosingStmtLocation(const Stmt *S) {
  444. assert(S && "Null Stmt passed to getEnclosingStmtLocation");
  445. return ::getEnclosingStmtLocation(S, getSourceManager(), getParentMap(), LC,
  446. /*allowNestedContexts=*/false);
  447. }
  448. //===----------------------------------------------------------------------===//
  449. // "Minimal" path diagnostic generation algorithm.
  450. //===----------------------------------------------------------------------===//
  451. using StackDiagPair =
  452. std::pair<PathDiagnosticCallPiece *, const ExplodedNode *>;
  453. using StackDiagVector = SmallVector<StackDiagPair, 6>;
  454. static void updateStackPiecesWithMessage(PathDiagnosticPiece &P,
  455. StackDiagVector &CallStack) {
  456. // If the piece contains a special message, add it to all the call
  457. // pieces on the active stack.
  458. if (auto *ep = dyn_cast<PathDiagnosticEventPiece>(&P)) {
  459. if (ep->hasCallStackHint())
  460. for (const auto &I : CallStack) {
  461. PathDiagnosticCallPiece *CP = I.first;
  462. const ExplodedNode *N = I.second;
  463. std::string stackMsg = ep->getCallStackMessage(N);
  464. // The last message on the path to final bug is the most important
  465. // one. Since we traverse the path backwards, do not add the message
  466. // if one has been previously added.
  467. if (!CP->hasCallStackMessage())
  468. CP->setCallStackMessage(stackMsg);
  469. }
  470. }
  471. }
  472. static void CompactMacroExpandedPieces(PathPieces &path,
  473. const SourceManager& SM);
  474. std::shared_ptr<PathDiagnosticControlFlowPiece> generateDiagForSwitchOP(
  475. const ExplodedNode *N,
  476. const CFGBlock *Dst,
  477. const SourceManager &SM,
  478. const LocationContext *LC,
  479. PathDiagnosticBuilder &PDB,
  480. PathDiagnosticLocation &Start
  481. ) {
  482. // Figure out what case arm we took.
  483. std::string sbuf;
  484. llvm::raw_string_ostream os(sbuf);
  485. PathDiagnosticLocation End;
  486. if (const Stmt *S = Dst->getLabel()) {
  487. End = PathDiagnosticLocation(S, SM, LC);
  488. switch (S->getStmtClass()) {
  489. default:
  490. os << "No cases match in the switch statement. "
  491. "Control jumps to line "
  492. << End.asLocation().getExpansionLineNumber();
  493. break;
  494. case Stmt::DefaultStmtClass:
  495. os << "Control jumps to the 'default' case at line "
  496. << End.asLocation().getExpansionLineNumber();
  497. break;
  498. case Stmt::CaseStmtClass: {
  499. os << "Control jumps to 'case ";
  500. const auto *Case = cast<CaseStmt>(S);
  501. const Expr *LHS = Case->getLHS()->IgnoreParenCasts();
  502. // Determine if it is an enum.
  503. bool GetRawInt = true;
  504. if (const auto *DR = dyn_cast<DeclRefExpr>(LHS)) {
  505. // FIXME: Maybe this should be an assertion. Are there cases
  506. // were it is not an EnumConstantDecl?
  507. const auto *D = dyn_cast<EnumConstantDecl>(DR->getDecl());
  508. if (D) {
  509. GetRawInt = false;
  510. os << *D;
  511. }
  512. }
  513. if (GetRawInt)
  514. os << LHS->EvaluateKnownConstInt(PDB.getASTContext());
  515. os << ":' at line " << End.asLocation().getExpansionLineNumber();
  516. break;
  517. }
  518. }
  519. } else {
  520. os << "'Default' branch taken. ";
  521. End = PDB.ExecutionContinues(os, N);
  522. }
  523. return std::make_shared<PathDiagnosticControlFlowPiece>(Start, End,
  524. os.str());
  525. }
  526. std::shared_ptr<PathDiagnosticControlFlowPiece> generateDiagForGotoOP(
  527. const Stmt *S,
  528. PathDiagnosticBuilder &PDB,
  529. PathDiagnosticLocation &Start) {
  530. std::string sbuf;
  531. llvm::raw_string_ostream os(sbuf);
  532. const PathDiagnosticLocation &End = PDB.getEnclosingStmtLocation(S);
  533. os << "Control jumps to line " << End.asLocation().getExpansionLineNumber();
  534. return std::make_shared<PathDiagnosticControlFlowPiece>(Start, End, os.str());
  535. }
  536. std::shared_ptr<PathDiagnosticControlFlowPiece> generateDiagForBinaryOP(
  537. const ExplodedNode *N,
  538. const Stmt *T,
  539. const CFGBlock *Src,
  540. const CFGBlock *Dst,
  541. const SourceManager &SM,
  542. PathDiagnosticBuilder &PDB,
  543. const LocationContext *LC) {
  544. const auto *B = cast<BinaryOperator>(T);
  545. std::string sbuf;
  546. llvm::raw_string_ostream os(sbuf);
  547. os << "Left side of '";
  548. PathDiagnosticLocation Start, End;
  549. if (B->getOpcode() == BO_LAnd) {
  550. os << "&&"
  551. << "' is ";
  552. if (*(Src->succ_begin() + 1) == Dst) {
  553. os << "false";
  554. End = PathDiagnosticLocation(B->getLHS(), SM, LC);
  555. Start =
  556. PathDiagnosticLocation::createOperatorLoc(B, SM);
  557. } else {
  558. os << "true";
  559. Start = PathDiagnosticLocation(B->getLHS(), SM, LC);
  560. End = PDB.ExecutionContinues(N);
  561. }
  562. } else {
  563. assert(B->getOpcode() == BO_LOr);
  564. os << "||"
  565. << "' is ";
  566. if (*(Src->succ_begin() + 1) == Dst) {
  567. os << "false";
  568. Start = PathDiagnosticLocation(B->getLHS(), SM, LC);
  569. End = PDB.ExecutionContinues(N);
  570. } else {
  571. os << "true";
  572. End = PathDiagnosticLocation(B->getLHS(), SM, LC);
  573. Start =
  574. PathDiagnosticLocation::createOperatorLoc(B, SM);
  575. }
  576. }
  577. return std::make_shared<PathDiagnosticControlFlowPiece>(Start, End,
  578. os.str());
  579. }
  580. void generateMinimalDiagForBlockEdge(const ExplodedNode *N, BlockEdge BE,
  581. const SourceManager &SM,
  582. PathDiagnosticBuilder &PDB,
  583. PathDiagnostic &PD) {
  584. const LocationContext *LC = N->getLocationContext();
  585. const CFGBlock *Src = BE.getSrc();
  586. const CFGBlock *Dst = BE.getDst();
  587. const Stmt *T = Src->getTerminator();
  588. if (!T)
  589. return;
  590. auto Start = PathDiagnosticLocation::createBegin(T, SM, LC);
  591. switch (T->getStmtClass()) {
  592. default:
  593. break;
  594. case Stmt::GotoStmtClass:
  595. case Stmt::IndirectGotoStmtClass: {
  596. if (const Stmt *S = PathDiagnosticLocation::getNextStmt(N))
  597. PD.getActivePath().push_front(generateDiagForGotoOP(S, PDB, Start));
  598. break;
  599. }
  600. case Stmt::SwitchStmtClass: {
  601. PD.getActivePath().push_front(
  602. generateDiagForSwitchOP(N, Dst, SM, LC, PDB, Start));
  603. break;
  604. }
  605. case Stmt::BreakStmtClass:
  606. case Stmt::ContinueStmtClass: {
  607. std::string sbuf;
  608. llvm::raw_string_ostream os(sbuf);
  609. PathDiagnosticLocation End = PDB.ExecutionContinues(os, N);
  610. PD.getActivePath().push_front(
  611. std::make_shared<PathDiagnosticControlFlowPiece>(Start, End, os.str()));
  612. break;
  613. }
  614. // Determine control-flow for ternary '?'.
  615. case Stmt::BinaryConditionalOperatorClass:
  616. case Stmt::ConditionalOperatorClass: {
  617. std::string sbuf;
  618. llvm::raw_string_ostream os(sbuf);
  619. os << "'?' condition is ";
  620. if (*(Src->succ_begin() + 1) == Dst)
  621. os << "false";
  622. else
  623. os << "true";
  624. PathDiagnosticLocation End = PDB.ExecutionContinues(N);
  625. if (const Stmt *S = End.asStmt())
  626. End = PDB.getEnclosingStmtLocation(S);
  627. PD.getActivePath().push_front(
  628. std::make_shared<PathDiagnosticControlFlowPiece>(Start, End, os.str()));
  629. break;
  630. }
  631. // Determine control-flow for short-circuited '&&' and '||'.
  632. case Stmt::BinaryOperatorClass: {
  633. if (!PDB.supportsLogicalOpControlFlow())
  634. break;
  635. std::shared_ptr<PathDiagnosticControlFlowPiece> Diag =
  636. generateDiagForBinaryOP(N, T, Src, Dst, SM, PDB, LC);
  637. PD.getActivePath().push_front(Diag);
  638. break;
  639. }
  640. case Stmt::DoStmtClass:
  641. if (*(Src->succ_begin()) == Dst) {
  642. std::string sbuf;
  643. llvm::raw_string_ostream os(sbuf);
  644. os << "Loop condition is true. ";
  645. PathDiagnosticLocation End = PDB.ExecutionContinues(os, N);
  646. if (const Stmt *S = End.asStmt())
  647. End = PDB.getEnclosingStmtLocation(S);
  648. PD.getActivePath().push_front(
  649. std::make_shared<PathDiagnosticControlFlowPiece>(Start, End,
  650. os.str()));
  651. } else {
  652. PathDiagnosticLocation End = PDB.ExecutionContinues(N);
  653. if (const Stmt *S = End.asStmt())
  654. End = PDB.getEnclosingStmtLocation(S);
  655. PD.getActivePath().push_front(
  656. std::make_shared<PathDiagnosticControlFlowPiece>(
  657. Start, End, "Loop condition is false. Exiting loop"));
  658. }
  659. break;
  660. case Stmt::WhileStmtClass:
  661. case Stmt::ForStmtClass:
  662. if (*(Src->succ_begin() + 1) == Dst) {
  663. std::string sbuf;
  664. llvm::raw_string_ostream os(sbuf);
  665. os << "Loop condition is false. ";
  666. PathDiagnosticLocation End = PDB.ExecutionContinues(os, N);
  667. if (const Stmt *S = End.asStmt())
  668. End = PDB.getEnclosingStmtLocation(S);
  669. PD.getActivePath().push_front(
  670. std::make_shared<PathDiagnosticControlFlowPiece>(Start, End,
  671. os.str()));
  672. } else {
  673. PathDiagnosticLocation End = PDB.ExecutionContinues(N);
  674. if (const Stmt *S = End.asStmt())
  675. End = PDB.getEnclosingStmtLocation(S);
  676. PD.getActivePath().push_front(
  677. std::make_shared<PathDiagnosticControlFlowPiece>(
  678. Start, End, "Loop condition is true. Entering loop body"));
  679. }
  680. break;
  681. case Stmt::IfStmtClass: {
  682. PathDiagnosticLocation End = PDB.ExecutionContinues(N);
  683. if (const Stmt *S = End.asStmt())
  684. End = PDB.getEnclosingStmtLocation(S);
  685. if (*(Src->succ_begin() + 1) == Dst)
  686. PD.getActivePath().push_front(
  687. std::make_shared<PathDiagnosticControlFlowPiece>(
  688. Start, End, "Taking false branch"));
  689. else
  690. PD.getActivePath().push_front(
  691. std::make_shared<PathDiagnosticControlFlowPiece>(
  692. Start, End, "Taking true branch"));
  693. break;
  694. }
  695. }
  696. }
  697. // Cone-of-influence: support the reverse propagation of "interesting" symbols
  698. // and values by tracing interesting calculations backwards through evaluated
  699. // expressions along a path. This is probably overly complicated, but the idea
  700. // is that if an expression computed an "interesting" value, the child
  701. // expressions are also likely to be "interesting" as well (which then
  702. // propagates to the values they in turn compute). This reverse propagation
  703. // is needed to track interesting correlations across function call boundaries,
  704. // where formal arguments bind to actual arguments, etc. This is also needed
  705. // because the constraint solver sometimes simplifies certain symbolic values
  706. // into constants when appropriate, and this complicates reasoning about
  707. // interesting values.
  708. using InterestingExprs = llvm::DenseSet<const Expr *>;
  709. static void reversePropagateIntererstingSymbols(BugReport &R,
  710. InterestingExprs &IE,
  711. const ProgramState *State,
  712. const Expr *Ex,
  713. const LocationContext *LCtx) {
  714. SVal V = State->getSVal(Ex, LCtx);
  715. if (!(R.isInteresting(V) || IE.count(Ex)))
  716. return;
  717. switch (Ex->getStmtClass()) {
  718. default:
  719. if (!isa<CastExpr>(Ex))
  720. break;
  721. LLVM_FALLTHROUGH;
  722. case Stmt::BinaryOperatorClass:
  723. case Stmt::UnaryOperatorClass: {
  724. for (const Stmt *SubStmt : Ex->children()) {
  725. if (const auto *child = dyn_cast_or_null<Expr>(SubStmt)) {
  726. IE.insert(child);
  727. SVal ChildV = State->getSVal(child, LCtx);
  728. R.markInteresting(ChildV);
  729. }
  730. }
  731. break;
  732. }
  733. }
  734. R.markInteresting(V);
  735. }
  736. static void reversePropagateInterestingSymbols(BugReport &R,
  737. InterestingExprs &IE,
  738. const ProgramState *State,
  739. const LocationContext *CalleeCtx)
  740. {
  741. // FIXME: Handle non-CallExpr-based CallEvents.
  742. const StackFrameContext *Callee = CalleeCtx->getStackFrame();
  743. const Stmt *CallSite = Callee->getCallSite();
  744. if (const auto *CE = dyn_cast_or_null<CallExpr>(CallSite)) {
  745. if (const auto *FD = dyn_cast<FunctionDecl>(CalleeCtx->getDecl())) {
  746. FunctionDecl::param_const_iterator PI = FD->param_begin(),
  747. PE = FD->param_end();
  748. CallExpr::const_arg_iterator AI = CE->arg_begin(), AE = CE->arg_end();
  749. for (; AI != AE && PI != PE; ++AI, ++PI) {
  750. if (const Expr *ArgE = *AI) {
  751. if (const ParmVarDecl *PD = *PI) {
  752. Loc LV = State->getLValue(PD, CalleeCtx);
  753. if (R.isInteresting(LV) || R.isInteresting(State->getRawSVal(LV)))
  754. IE.insert(ArgE);
  755. }
  756. }
  757. }
  758. }
  759. }
  760. }
  761. //===----------------------------------------------------------------------===//
  762. // Functions for determining if a loop was executed 0 times.
  763. //===----------------------------------------------------------------------===//
  764. static bool isLoop(const Stmt *Term) {
  765. switch (Term->getStmtClass()) {
  766. case Stmt::ForStmtClass:
  767. case Stmt::WhileStmtClass:
  768. case Stmt::ObjCForCollectionStmtClass:
  769. case Stmt::CXXForRangeStmtClass:
  770. return true;
  771. default:
  772. // Note that we intentionally do not include do..while here.
  773. return false;
  774. }
  775. }
  776. static bool isJumpToFalseBranch(const BlockEdge *BE) {
  777. const CFGBlock *Src = BE->getSrc();
  778. assert(Src->succ_size() == 2);
  779. return (*(Src->succ_begin()+1) == BE->getDst());
  780. }
  781. static bool isContainedByStmt(ParentMap &PM, const Stmt *S, const Stmt *SubS) {
  782. while (SubS) {
  783. if (SubS == S)
  784. return true;
  785. SubS = PM.getParent(SubS);
  786. }
  787. return false;
  788. }
  789. static const Stmt *getStmtBeforeCond(ParentMap &PM, const Stmt *Term,
  790. const ExplodedNode *N) {
  791. while (N) {
  792. Optional<StmtPoint> SP = N->getLocation().getAs<StmtPoint>();
  793. if (SP) {
  794. const Stmt *S = SP->getStmt();
  795. if (!isContainedByStmt(PM, Term, S))
  796. return S;
  797. }
  798. N = N->getFirstPred();
  799. }
  800. return nullptr;
  801. }
  802. static bool isInLoopBody(ParentMap &PM, const Stmt *S, const Stmt *Term) {
  803. const Stmt *LoopBody = nullptr;
  804. switch (Term->getStmtClass()) {
  805. case Stmt::CXXForRangeStmtClass: {
  806. const auto *FR = cast<CXXForRangeStmt>(Term);
  807. if (isContainedByStmt(PM, FR->getInc(), S))
  808. return true;
  809. if (isContainedByStmt(PM, FR->getLoopVarStmt(), S))
  810. return true;
  811. LoopBody = FR->getBody();
  812. break;
  813. }
  814. case Stmt::ForStmtClass: {
  815. const auto *FS = cast<ForStmt>(Term);
  816. if (isContainedByStmt(PM, FS->getInc(), S))
  817. return true;
  818. LoopBody = FS->getBody();
  819. break;
  820. }
  821. case Stmt::ObjCForCollectionStmtClass: {
  822. const auto *FC = cast<ObjCForCollectionStmt>(Term);
  823. LoopBody = FC->getBody();
  824. break;
  825. }
  826. case Stmt::WhileStmtClass:
  827. LoopBody = cast<WhileStmt>(Term)->getBody();
  828. break;
  829. default:
  830. return false;
  831. }
  832. return isContainedByStmt(PM, LoopBody, S);
  833. }
  834. /// Adds a sanitized control-flow diagnostic edge to a path.
  835. static void addEdgeToPath(PathPieces &path,
  836. PathDiagnosticLocation &PrevLoc,
  837. PathDiagnosticLocation NewLoc) {
  838. if (!NewLoc.isValid())
  839. return;
  840. SourceLocation NewLocL = NewLoc.asLocation();
  841. if (NewLocL.isInvalid())
  842. return;
  843. if (!PrevLoc.isValid() || !PrevLoc.asLocation().isValid()) {
  844. PrevLoc = NewLoc;
  845. return;
  846. }
  847. // Ignore self-edges, which occur when there are multiple nodes at the same
  848. // statement.
  849. if (NewLoc.asStmt() && NewLoc.asStmt() == PrevLoc.asStmt())
  850. return;
  851. path.push_front(
  852. std::make_shared<PathDiagnosticControlFlowPiece>(NewLoc, PrevLoc));
  853. PrevLoc = NewLoc;
  854. }
  855. /// A customized wrapper for CFGBlock::getTerminatorCondition()
  856. /// which returns the element for ObjCForCollectionStmts.
  857. static const Stmt *getTerminatorCondition(const CFGBlock *B) {
  858. const Stmt *S = B->getTerminatorCondition();
  859. if (const auto *FS = dyn_cast_or_null<ObjCForCollectionStmt>(S))
  860. return FS->getElement();
  861. return S;
  862. }
  863. static const char StrEnteringLoop[] = "Entering loop body";
  864. static const char StrLoopBodyZero[] = "Loop body executed 0 times";
  865. static const char StrLoopRangeEmpty[] =
  866. "Loop body skipped when range is empty";
  867. static const char StrLoopCollectionEmpty[] =
  868. "Loop body skipped when collection is empty";
  869. static std::unique_ptr<FilesToLineNumsMap>
  870. findExecutedLines(SourceManager &SM, const ExplodedNode *N);
  871. /// Generate diagnostics for the node \p N,
  872. /// and write it into \p PD.
  873. /// \p AddPathEdges Whether diagnostic consumer can generate path arrows
  874. /// showing both row and column.
  875. static void generatePathDiagnosticsForNode(const ExplodedNode *N,
  876. PathDiagnostic &PD,
  877. PathDiagnosticLocation &PrevLoc,
  878. PathDiagnosticBuilder &PDB,
  879. LocationContextMap &LCM,
  880. StackDiagVector &CallStack,
  881. InterestingExprs &IE,
  882. bool AddPathEdges) {
  883. ProgramPoint P = N->getLocation();
  884. const SourceManager& SM = PDB.getSourceManager();
  885. // Have we encountered an entrance to a call? It may be
  886. // the case that we have not encountered a matching
  887. // call exit before this point. This means that the path
  888. // terminated within the call itself.
  889. if (auto CE = P.getAs<CallEnter>()) {
  890. if (AddPathEdges) {
  891. // Add an edge to the start of the function.
  892. const StackFrameContext *CalleeLC = CE->getCalleeContext();
  893. const Decl *D = CalleeLC->getDecl();
  894. // Add the edge only when the callee has body. We jump to the beginning
  895. // of the *declaration*, however we expect it to be followed by the
  896. // body. This isn't the case for autosynthesized property accessors in
  897. // Objective-C. No need for a similar extra check for CallExit points
  898. // because the exit edge comes from a statement (i.e. return),
  899. // not from declaration.
  900. if (D->hasBody())
  901. addEdgeToPath(PD.getActivePath(), PrevLoc,
  902. PathDiagnosticLocation::createBegin(D, SM));
  903. }
  904. // Did we visit an entire call?
  905. bool VisitedEntireCall = PD.isWithinCall();
  906. PD.popActivePath();
  907. PathDiagnosticCallPiece *C;
  908. if (VisitedEntireCall) {
  909. C = cast<PathDiagnosticCallPiece>(PD.getActivePath().front().get());
  910. } else {
  911. const Decl *Caller = CE->getLocationContext()->getDecl();
  912. C = PathDiagnosticCallPiece::construct(PD.getActivePath(), Caller);
  913. if (AddPathEdges) {
  914. // Since we just transferred the path over to the call piece,
  915. // reset the mapping from active to location context.
  916. assert(PD.getActivePath().size() == 1 &&
  917. PD.getActivePath().front().get() == C);
  918. LCM[&PD.getActivePath()] = nullptr;
  919. }
  920. // Record the location context mapping for the path within
  921. // the call.
  922. assert(LCM[&C->path] == nullptr ||
  923. LCM[&C->path] == CE->getCalleeContext());
  924. LCM[&C->path] = CE->getCalleeContext();
  925. // If this is the first item in the active path, record
  926. // the new mapping from active path to location context.
  927. const LocationContext *&NewLC = LCM[&PD.getActivePath()];
  928. if (!NewLC)
  929. NewLC = N->getLocationContext();
  930. PDB.LC = NewLC;
  931. }
  932. C->setCallee(*CE, SM);
  933. // Update the previous location in the active path.
  934. PrevLoc = C->getLocation();
  935. if (!CallStack.empty()) {
  936. assert(CallStack.back().first == C);
  937. CallStack.pop_back();
  938. }
  939. return;
  940. }
  941. if (AddPathEdges) {
  942. // Query the location context here and the previous location
  943. // as processing CallEnter may change the active path.
  944. PDB.LC = N->getLocationContext();
  945. // Record the mapping from the active path to the location
  946. // context.
  947. assert(!LCM[&PD.getActivePath()] || LCM[&PD.getActivePath()] == PDB.LC);
  948. LCM[&PD.getActivePath()] = PDB.LC;
  949. }
  950. // Have we encountered an exit from a function call?
  951. if (Optional<CallExitEnd> CE = P.getAs<CallExitEnd>()) {
  952. // We are descending into a call (backwards). Construct
  953. // a new call piece to contain the path pieces for that call.
  954. auto C = PathDiagnosticCallPiece::construct(*CE, SM);
  955. // Record the mapping from call piece to LocationContext.
  956. LCM[&C->path] = CE->getCalleeContext();
  957. if (AddPathEdges) {
  958. const Stmt *S = CE->getCalleeContext()->getCallSite();
  959. // Propagate the interesting symbols accordingly.
  960. if (const auto *Ex = dyn_cast_or_null<Expr>(S)) {
  961. reversePropagateIntererstingSymbols(*PDB.getBugReport(), IE,
  962. N->getState().get(), Ex,
  963. N->getLocationContext());
  964. }
  965. // Add the edge to the return site.
  966. addEdgeToPath(PD.getActivePath(), PrevLoc, C->callReturn);
  967. PrevLoc.invalidate();
  968. }
  969. auto *P = C.get();
  970. PD.getActivePath().push_front(std::move(C));
  971. // Make the contents of the call the active path for now.
  972. PD.pushActivePath(&P->path);
  973. CallStack.push_back(StackDiagPair(P, N));
  974. return;
  975. }
  976. if (auto PS = P.getAs<PostStmt>()) {
  977. if (!AddPathEdges)
  978. return;
  979. // For expressions, make sure we propagate the
  980. // interesting symbols correctly.
  981. if (const Expr *Ex = PS->getStmtAs<Expr>())
  982. reversePropagateIntererstingSymbols(*PDB.getBugReport(), IE,
  983. N->getState().get(), Ex,
  984. N->getLocationContext());
  985. // Add an edge. If this is an ObjCForCollectionStmt do
  986. // not add an edge here as it appears in the CFG both
  987. // as a terminator and as a terminator condition.
  988. if (!isa<ObjCForCollectionStmt>(PS->getStmt())) {
  989. PathDiagnosticLocation L =
  990. PathDiagnosticLocation(PS->getStmt(), SM, PDB.LC);
  991. addEdgeToPath(PD.getActivePath(), PrevLoc, L);
  992. }
  993. } else if (auto BE = P.getAs<BlockEdge>()) {
  994. if (!AddPathEdges) {
  995. generateMinimalDiagForBlockEdge(N, *BE, SM, PDB, PD);
  996. return;
  997. }
  998. // Does this represent entering a call? If so, look at propagating
  999. // interesting symbols across call boundaries.
  1000. if (const ExplodedNode *NextNode = N->getFirstPred()) {
  1001. const LocationContext *CallerCtx = NextNode->getLocationContext();
  1002. const LocationContext *CalleeCtx = PDB.LC;
  1003. if (CallerCtx != CalleeCtx && AddPathEdges) {
  1004. reversePropagateInterestingSymbols(*PDB.getBugReport(), IE,
  1005. N->getState().get(), CalleeCtx);
  1006. }
  1007. }
  1008. // Are we jumping to the head of a loop? Add a special diagnostic.
  1009. if (const Stmt *Loop = BE->getSrc()->getLoopTarget()) {
  1010. PathDiagnosticLocation L(Loop, SM, PDB.LC);
  1011. const Stmt *Body = nullptr;
  1012. if (const auto *FS = dyn_cast<ForStmt>(Loop))
  1013. Body = FS->getBody();
  1014. else if (const auto *WS = dyn_cast<WhileStmt>(Loop))
  1015. Body = WS->getBody();
  1016. else if (const auto *OFS = dyn_cast<ObjCForCollectionStmt>(Loop)) {
  1017. Body = OFS->getBody();
  1018. } else if (const auto *FRS = dyn_cast<CXXForRangeStmt>(Loop)) {
  1019. Body = FRS->getBody();
  1020. }
  1021. // do-while statements are explicitly excluded here
  1022. auto p = std::make_shared<PathDiagnosticEventPiece>(
  1023. L, "Looping back to the head "
  1024. "of the loop");
  1025. p->setPrunable(true);
  1026. addEdgeToPath(PD.getActivePath(), PrevLoc, p->getLocation());
  1027. PD.getActivePath().push_front(std::move(p));
  1028. if (const auto *CS = dyn_cast_or_null<CompoundStmt>(Body)) {
  1029. addEdgeToPath(PD.getActivePath(), PrevLoc,
  1030. PathDiagnosticLocation::createEndBrace(CS, SM));
  1031. }
  1032. }
  1033. const CFGBlock *BSrc = BE->getSrc();
  1034. ParentMap &PM = PDB.getParentMap();
  1035. if (const Stmt *Term = BSrc->getTerminator()) {
  1036. // Are we jumping past the loop body without ever executing the
  1037. // loop (because the condition was false)?
  1038. if (isLoop(Term)) {
  1039. const Stmt *TermCond = getTerminatorCondition(BSrc);
  1040. bool IsInLoopBody =
  1041. isInLoopBody(PM, getStmtBeforeCond(PM, TermCond, N), Term);
  1042. const char *str = nullptr;
  1043. if (isJumpToFalseBranch(&*BE)) {
  1044. if (!IsInLoopBody) {
  1045. if (isa<ObjCForCollectionStmt>(Term)) {
  1046. str = StrLoopCollectionEmpty;
  1047. } else if (isa<CXXForRangeStmt>(Term)) {
  1048. str = StrLoopRangeEmpty;
  1049. } else {
  1050. str = StrLoopBodyZero;
  1051. }
  1052. }
  1053. } else {
  1054. str = StrEnteringLoop;
  1055. }
  1056. if (str) {
  1057. PathDiagnosticLocation L(TermCond ? TermCond : Term, SM, PDB.LC);
  1058. auto PE = std::make_shared<PathDiagnosticEventPiece>(L, str);
  1059. PE->setPrunable(true);
  1060. addEdgeToPath(PD.getActivePath(), PrevLoc,
  1061. PE->getLocation());
  1062. PD.getActivePath().push_front(std::move(PE));
  1063. }
  1064. } else if (isa<BreakStmt>(Term) || isa<ContinueStmt>(Term) ||
  1065. isa<GotoStmt>(Term)) {
  1066. PathDiagnosticLocation L(Term, SM, PDB.LC);
  1067. addEdgeToPath(PD.getActivePath(), PrevLoc, L);
  1068. }
  1069. }
  1070. }
  1071. }
  1072. static std::unique_ptr<PathDiagnostic>
  1073. generateEmptyDiagnosticForReport(BugReport *R, SourceManager &SM) {
  1074. BugType &BT = R->getBugType();
  1075. return llvm::make_unique<PathDiagnostic>(
  1076. R->getBugType().getCheckName(), R->getDeclWithIssue(),
  1077. R->getBugType().getName(), R->getDescription(),
  1078. R->getShortDescription(/*Fallback=*/false), BT.getCategory(),
  1079. R->getUniqueingLocation(), R->getUniqueingDecl(),
  1080. findExecutedLines(SM, R->getErrorNode()));
  1081. }
  1082. static const Stmt *getStmtParent(const Stmt *S, const ParentMap &PM) {
  1083. if (!S)
  1084. return nullptr;
  1085. while (true) {
  1086. S = PM.getParentIgnoreParens(S);
  1087. if (!S)
  1088. break;
  1089. if (isa<FullExpr>(S) ||
  1090. isa<CXXBindTemporaryExpr>(S) ||
  1091. isa<SubstNonTypeTemplateParmExpr>(S))
  1092. continue;
  1093. break;
  1094. }
  1095. return S;
  1096. }
  1097. static bool isConditionForTerminator(const Stmt *S, const Stmt *Cond) {
  1098. switch (S->getStmtClass()) {
  1099. case Stmt::BinaryOperatorClass: {
  1100. const auto *BO = cast<BinaryOperator>(S);
  1101. if (!BO->isLogicalOp())
  1102. return false;
  1103. return BO->getLHS() == Cond || BO->getRHS() == Cond;
  1104. }
  1105. case Stmt::IfStmtClass:
  1106. return cast<IfStmt>(S)->getCond() == Cond;
  1107. case Stmt::ForStmtClass:
  1108. return cast<ForStmt>(S)->getCond() == Cond;
  1109. case Stmt::WhileStmtClass:
  1110. return cast<WhileStmt>(S)->getCond() == Cond;
  1111. case Stmt::DoStmtClass:
  1112. return cast<DoStmt>(S)->getCond() == Cond;
  1113. case Stmt::ChooseExprClass:
  1114. return cast<ChooseExpr>(S)->getCond() == Cond;
  1115. case Stmt::IndirectGotoStmtClass:
  1116. return cast<IndirectGotoStmt>(S)->getTarget() == Cond;
  1117. case Stmt::SwitchStmtClass:
  1118. return cast<SwitchStmt>(S)->getCond() == Cond;
  1119. case Stmt::BinaryConditionalOperatorClass:
  1120. return cast<BinaryConditionalOperator>(S)->getCond() == Cond;
  1121. case Stmt::ConditionalOperatorClass: {
  1122. const auto *CO = cast<ConditionalOperator>(S);
  1123. return CO->getCond() == Cond ||
  1124. CO->getLHS() == Cond ||
  1125. CO->getRHS() == Cond;
  1126. }
  1127. case Stmt::ObjCForCollectionStmtClass:
  1128. return cast<ObjCForCollectionStmt>(S)->getElement() == Cond;
  1129. case Stmt::CXXForRangeStmtClass: {
  1130. const auto *FRS = cast<CXXForRangeStmt>(S);
  1131. return FRS->getCond() == Cond || FRS->getRangeInit() == Cond;
  1132. }
  1133. default:
  1134. return false;
  1135. }
  1136. }
  1137. static bool isIncrementOrInitInForLoop(const Stmt *S, const Stmt *FL) {
  1138. if (const auto *FS = dyn_cast<ForStmt>(FL))
  1139. return FS->getInc() == S || FS->getInit() == S;
  1140. if (const auto *FRS = dyn_cast<CXXForRangeStmt>(FL))
  1141. return FRS->getInc() == S || FRS->getRangeStmt() == S ||
  1142. FRS->getLoopVarStmt() || FRS->getRangeInit() == S;
  1143. return false;
  1144. }
  1145. using OptimizedCallsSet = llvm::DenseSet<const PathDiagnosticCallPiece *>;
  1146. /// Adds synthetic edges from top-level statements to their subexpressions.
  1147. ///
  1148. /// This avoids a "swoosh" effect, where an edge from a top-level statement A
  1149. /// points to a sub-expression B.1 that's not at the start of B. In these cases,
  1150. /// we'd like to see an edge from A to B, then another one from B to B.1.
  1151. static void addContextEdges(PathPieces &pieces, SourceManager &SM,
  1152. const ParentMap &PM, const LocationContext *LCtx) {
  1153. PathPieces::iterator Prev = pieces.end();
  1154. for (PathPieces::iterator I = pieces.begin(), E = Prev; I != E;
  1155. Prev = I, ++I) {
  1156. auto *Piece = dyn_cast<PathDiagnosticControlFlowPiece>(I->get());
  1157. if (!Piece)
  1158. continue;
  1159. PathDiagnosticLocation SrcLoc = Piece->getStartLocation();
  1160. SmallVector<PathDiagnosticLocation, 4> SrcContexts;
  1161. PathDiagnosticLocation NextSrcContext = SrcLoc;
  1162. const Stmt *InnerStmt = nullptr;
  1163. while (NextSrcContext.isValid() && NextSrcContext.asStmt() != InnerStmt) {
  1164. SrcContexts.push_back(NextSrcContext);
  1165. InnerStmt = NextSrcContext.asStmt();
  1166. NextSrcContext = getEnclosingStmtLocation(InnerStmt, SM, PM, LCtx,
  1167. /*allowNested=*/true);
  1168. }
  1169. // Repeatedly split the edge as necessary.
  1170. // This is important for nested logical expressions (||, &&, ?:) where we
  1171. // want to show all the levels of context.
  1172. while (true) {
  1173. const Stmt *Dst = Piece->getEndLocation().getStmtOrNull();
  1174. // We are looking at an edge. Is the destination within a larger
  1175. // expression?
  1176. PathDiagnosticLocation DstContext =
  1177. getEnclosingStmtLocation(Dst, SM, PM, LCtx, /*allowNested=*/true);
  1178. if (!DstContext.isValid() || DstContext.asStmt() == Dst)
  1179. break;
  1180. // If the source is in the same context, we're already good.
  1181. if (std::find(SrcContexts.begin(), SrcContexts.end(), DstContext) !=
  1182. SrcContexts.end())
  1183. break;
  1184. // Update the subexpression node to point to the context edge.
  1185. Piece->setStartLocation(DstContext);
  1186. // Try to extend the previous edge if it's at the same level as the source
  1187. // context.
  1188. if (Prev != E) {
  1189. auto *PrevPiece = dyn_cast<PathDiagnosticControlFlowPiece>(Prev->get());
  1190. if (PrevPiece) {
  1191. if (const Stmt *PrevSrc =
  1192. PrevPiece->getStartLocation().getStmtOrNull()) {
  1193. const Stmt *PrevSrcParent = getStmtParent(PrevSrc, PM);
  1194. if (PrevSrcParent ==
  1195. getStmtParent(DstContext.getStmtOrNull(), PM)) {
  1196. PrevPiece->setEndLocation(DstContext);
  1197. break;
  1198. }
  1199. }
  1200. }
  1201. }
  1202. // Otherwise, split the current edge into a context edge and a
  1203. // subexpression edge. Note that the context statement may itself have
  1204. // context.
  1205. auto P =
  1206. std::make_shared<PathDiagnosticControlFlowPiece>(SrcLoc, DstContext);
  1207. Piece = P.get();
  1208. I = pieces.insert(I, std::move(P));
  1209. }
  1210. }
  1211. }
  1212. /// Move edges from a branch condition to a branch target
  1213. /// when the condition is simple.
  1214. ///
  1215. /// This restructures some of the work of addContextEdges. That function
  1216. /// creates edges this may destroy, but they work together to create a more
  1217. /// aesthetically set of edges around branches. After the call to
  1218. /// addContextEdges, we may have (1) an edge to the branch, (2) an edge from
  1219. /// the branch to the branch condition, and (3) an edge from the branch
  1220. /// condition to the branch target. We keep (1), but may wish to remove (2)
  1221. /// and move the source of (3) to the branch if the branch condition is simple.
  1222. static void simplifySimpleBranches(PathPieces &pieces) {
  1223. for (PathPieces::iterator I = pieces.begin(), E = pieces.end(); I != E; ++I) {
  1224. const auto *PieceI = dyn_cast<PathDiagnosticControlFlowPiece>(I->get());
  1225. if (!PieceI)
  1226. continue;
  1227. const Stmt *s1Start = PieceI->getStartLocation().getStmtOrNull();
  1228. const Stmt *s1End = PieceI->getEndLocation().getStmtOrNull();
  1229. if (!s1Start || !s1End)
  1230. continue;
  1231. PathPieces::iterator NextI = I; ++NextI;
  1232. if (NextI == E)
  1233. break;
  1234. PathDiagnosticControlFlowPiece *PieceNextI = nullptr;
  1235. while (true) {
  1236. if (NextI == E)
  1237. break;
  1238. const auto *EV = dyn_cast<PathDiagnosticEventPiece>(NextI->get());
  1239. if (EV) {
  1240. StringRef S = EV->getString();
  1241. if (S == StrEnteringLoop || S == StrLoopBodyZero ||
  1242. S == StrLoopCollectionEmpty || S == StrLoopRangeEmpty) {
  1243. ++NextI;
  1244. continue;
  1245. }
  1246. break;
  1247. }
  1248. PieceNextI = dyn_cast<PathDiagnosticControlFlowPiece>(NextI->get());
  1249. break;
  1250. }
  1251. if (!PieceNextI)
  1252. continue;
  1253. const Stmt *s2Start = PieceNextI->getStartLocation().getStmtOrNull();
  1254. const Stmt *s2End = PieceNextI->getEndLocation().getStmtOrNull();
  1255. if (!s2Start || !s2End || s1End != s2Start)
  1256. continue;
  1257. // We only perform this transformation for specific branch kinds.
  1258. // We don't want to do this for do..while, for example.
  1259. if (!(isa<ForStmt>(s1Start) || isa<WhileStmt>(s1Start) ||
  1260. isa<IfStmt>(s1Start) || isa<ObjCForCollectionStmt>(s1Start) ||
  1261. isa<CXXForRangeStmt>(s1Start)))
  1262. continue;
  1263. // Is s1End the branch condition?
  1264. if (!isConditionForTerminator(s1Start, s1End))
  1265. continue;
  1266. // Perform the hoisting by eliminating (2) and changing the start
  1267. // location of (3).
  1268. PieceNextI->setStartLocation(PieceI->getStartLocation());
  1269. I = pieces.erase(I);
  1270. }
  1271. }
  1272. /// Returns the number of bytes in the given (character-based) SourceRange.
  1273. ///
  1274. /// If the locations in the range are not on the same line, returns None.
  1275. ///
  1276. /// Note that this does not do a precise user-visible character or column count.
  1277. static Optional<size_t> getLengthOnSingleLine(SourceManager &SM,
  1278. SourceRange Range) {
  1279. SourceRange ExpansionRange(SM.getExpansionLoc(Range.getBegin()),
  1280. SM.getExpansionRange(Range.getEnd()).getEnd());
  1281. FileID FID = SM.getFileID(ExpansionRange.getBegin());
  1282. if (FID != SM.getFileID(ExpansionRange.getEnd()))
  1283. return None;
  1284. bool Invalid;
  1285. const llvm::MemoryBuffer *Buffer = SM.getBuffer(FID, &Invalid);
  1286. if (Invalid)
  1287. return None;
  1288. unsigned BeginOffset = SM.getFileOffset(ExpansionRange.getBegin());
  1289. unsigned EndOffset = SM.getFileOffset(ExpansionRange.getEnd());
  1290. StringRef Snippet = Buffer->getBuffer().slice(BeginOffset, EndOffset);
  1291. // We're searching the raw bytes of the buffer here, which might include
  1292. // escaped newlines and such. That's okay; we're trying to decide whether the
  1293. // SourceRange is covering a large or small amount of space in the user's
  1294. // editor.
  1295. if (Snippet.find_first_of("\r\n") != StringRef::npos)
  1296. return None;
  1297. // This isn't Unicode-aware, but it doesn't need to be.
  1298. return Snippet.size();
  1299. }
  1300. /// \sa getLengthOnSingleLine(SourceManager, SourceRange)
  1301. static Optional<size_t> getLengthOnSingleLine(SourceManager &SM,
  1302. const Stmt *S) {
  1303. return getLengthOnSingleLine(SM, S->getSourceRange());
  1304. }
  1305. /// Eliminate two-edge cycles created by addContextEdges().
  1306. ///
  1307. /// Once all the context edges are in place, there are plenty of cases where
  1308. /// there's a single edge from a top-level statement to a subexpression,
  1309. /// followed by a single path note, and then a reverse edge to get back out to
  1310. /// the top level. If the statement is simple enough, the subexpression edges
  1311. /// just add noise and make it harder to understand what's going on.
  1312. ///
  1313. /// This function only removes edges in pairs, because removing only one edge
  1314. /// might leave other edges dangling.
  1315. ///
  1316. /// This will not remove edges in more complicated situations:
  1317. /// - if there is more than one "hop" leading to or from a subexpression.
  1318. /// - if there is an inlined call between the edges instead of a single event.
  1319. /// - if the whole statement is large enough that having subexpression arrows
  1320. /// might be helpful.
  1321. static void removeContextCycles(PathPieces &Path, SourceManager &SM) {
  1322. for (PathPieces::iterator I = Path.begin(), E = Path.end(); I != E; ) {
  1323. // Pattern match the current piece and its successor.
  1324. const auto *PieceI = dyn_cast<PathDiagnosticControlFlowPiece>(I->get());
  1325. if (!PieceI) {
  1326. ++I;
  1327. continue;
  1328. }
  1329. const Stmt *s1Start = PieceI->getStartLocation().getStmtOrNull();
  1330. const Stmt *s1End = PieceI->getEndLocation().getStmtOrNull();
  1331. PathPieces::iterator NextI = I; ++NextI;
  1332. if (NextI == E)
  1333. break;
  1334. const auto *PieceNextI =
  1335. dyn_cast<PathDiagnosticControlFlowPiece>(NextI->get());
  1336. if (!PieceNextI) {
  1337. if (isa<PathDiagnosticEventPiece>(NextI->get())) {
  1338. ++NextI;
  1339. if (NextI == E)
  1340. break;
  1341. PieceNextI = dyn_cast<PathDiagnosticControlFlowPiece>(NextI->get());
  1342. }
  1343. if (!PieceNextI) {
  1344. ++I;
  1345. continue;
  1346. }
  1347. }
  1348. const Stmt *s2Start = PieceNextI->getStartLocation().getStmtOrNull();
  1349. const Stmt *s2End = PieceNextI->getEndLocation().getStmtOrNull();
  1350. if (s1Start && s2Start && s1Start == s2End && s2Start == s1End) {
  1351. const size_t MAX_SHORT_LINE_LENGTH = 80;
  1352. Optional<size_t> s1Length = getLengthOnSingleLine(SM, s1Start);
  1353. if (s1Length && *s1Length <= MAX_SHORT_LINE_LENGTH) {
  1354. Optional<size_t> s2Length = getLengthOnSingleLine(SM, s2Start);
  1355. if (s2Length && *s2Length <= MAX_SHORT_LINE_LENGTH) {
  1356. Path.erase(I);
  1357. I = Path.erase(NextI);
  1358. continue;
  1359. }
  1360. }
  1361. }
  1362. ++I;
  1363. }
  1364. }
  1365. /// Return true if X is contained by Y.
  1366. static bool lexicalContains(ParentMap &PM, const Stmt *X, const Stmt *Y) {
  1367. while (X) {
  1368. if (X == Y)
  1369. return true;
  1370. X = PM.getParent(X);
  1371. }
  1372. return false;
  1373. }
  1374. // Remove short edges on the same line less than 3 columns in difference.
  1375. static void removePunyEdges(PathPieces &path, SourceManager &SM,
  1376. ParentMap &PM) {
  1377. bool erased = false;
  1378. for (PathPieces::iterator I = path.begin(), E = path.end(); I != E;
  1379. erased ? I : ++I) {
  1380. erased = false;
  1381. const auto *PieceI = dyn_cast<PathDiagnosticControlFlowPiece>(I->get());
  1382. if (!PieceI)
  1383. continue;
  1384. const Stmt *start = PieceI->getStartLocation().getStmtOrNull();
  1385. const Stmt *end = PieceI->getEndLocation().getStmtOrNull();
  1386. if (!start || !end)
  1387. continue;
  1388. const Stmt *endParent = PM.getParent(end);
  1389. if (!endParent)
  1390. continue;
  1391. if (isConditionForTerminator(end, endParent))
  1392. continue;
  1393. SourceLocation FirstLoc = start->getBeginLoc();
  1394. SourceLocation SecondLoc = end->getBeginLoc();
  1395. if (!SM.isWrittenInSameFile(FirstLoc, SecondLoc))
  1396. continue;
  1397. if (SM.isBeforeInTranslationUnit(SecondLoc, FirstLoc))
  1398. std::swap(SecondLoc, FirstLoc);
  1399. SourceRange EdgeRange(FirstLoc, SecondLoc);
  1400. Optional<size_t> ByteWidth = getLengthOnSingleLine(SM, EdgeRange);
  1401. // If the statements are on different lines, continue.
  1402. if (!ByteWidth)
  1403. continue;
  1404. const size_t MAX_PUNY_EDGE_LENGTH = 2;
  1405. if (*ByteWidth <= MAX_PUNY_EDGE_LENGTH) {
  1406. // FIXME: There are enough /bytes/ between the endpoints of the edge, but
  1407. // there might not be enough /columns/. A proper user-visible column count
  1408. // is probably too expensive, though.
  1409. I = path.erase(I);
  1410. erased = true;
  1411. continue;
  1412. }
  1413. }
  1414. }
  1415. static void removeIdenticalEvents(PathPieces &path) {
  1416. for (PathPieces::iterator I = path.begin(), E = path.end(); I != E; ++I) {
  1417. const auto *PieceI = dyn_cast<PathDiagnosticEventPiece>(I->get());
  1418. if (!PieceI)
  1419. continue;
  1420. PathPieces::iterator NextI = I; ++NextI;
  1421. if (NextI == E)
  1422. return;
  1423. const auto *PieceNextI = dyn_cast<PathDiagnosticEventPiece>(NextI->get());
  1424. if (!PieceNextI)
  1425. continue;
  1426. // Erase the second piece if it has the same exact message text.
  1427. if (PieceI->getString() == PieceNextI->getString()) {
  1428. path.erase(NextI);
  1429. }
  1430. }
  1431. }
  1432. static bool optimizeEdges(PathPieces &path, SourceManager &SM,
  1433. OptimizedCallsSet &OCS,
  1434. LocationContextMap &LCM) {
  1435. bool hasChanges = false;
  1436. const LocationContext *LC = LCM[&path];
  1437. assert(LC);
  1438. ParentMap &PM = LC->getParentMap();
  1439. for (PathPieces::iterator I = path.begin(), E = path.end(); I != E; ) {
  1440. // Optimize subpaths.
  1441. if (auto *CallI = dyn_cast<PathDiagnosticCallPiece>(I->get())) {
  1442. // Record the fact that a call has been optimized so we only do the
  1443. // effort once.
  1444. if (!OCS.count(CallI)) {
  1445. while (optimizeEdges(CallI->path, SM, OCS, LCM)) {}
  1446. OCS.insert(CallI);
  1447. }
  1448. ++I;
  1449. continue;
  1450. }
  1451. // Pattern match the current piece and its successor.
  1452. auto *PieceI = dyn_cast<PathDiagnosticControlFlowPiece>(I->get());
  1453. if (!PieceI) {
  1454. ++I;
  1455. continue;
  1456. }
  1457. const Stmt *s1Start = PieceI->getStartLocation().getStmtOrNull();
  1458. const Stmt *s1End = PieceI->getEndLocation().getStmtOrNull();
  1459. const Stmt *level1 = getStmtParent(s1Start, PM);
  1460. const Stmt *level2 = getStmtParent(s1End, PM);
  1461. PathPieces::iterator NextI = I; ++NextI;
  1462. if (NextI == E)
  1463. break;
  1464. const auto *PieceNextI = dyn_cast<PathDiagnosticControlFlowPiece>(NextI->get());
  1465. if (!PieceNextI) {
  1466. ++I;
  1467. continue;
  1468. }
  1469. const Stmt *s2Start = PieceNextI->getStartLocation().getStmtOrNull();
  1470. const Stmt *s2End = PieceNextI->getEndLocation().getStmtOrNull();
  1471. const Stmt *level3 = getStmtParent(s2Start, PM);
  1472. const Stmt *level4 = getStmtParent(s2End, PM);
  1473. // Rule I.
  1474. //
  1475. // If we have two consecutive control edges whose end/begin locations
  1476. // are at the same level (e.g. statements or top-level expressions within
  1477. // a compound statement, or siblings share a single ancestor expression),
  1478. // then merge them if they have no interesting intermediate event.
  1479. //
  1480. // For example:
  1481. //
  1482. // (1.1 -> 1.2) -> (1.2 -> 1.3) becomes (1.1 -> 1.3) because the common
  1483. // parent is '1'. Here 'x.y.z' represents the hierarchy of statements.
  1484. //
  1485. // NOTE: this will be limited later in cases where we add barriers
  1486. // to prevent this optimization.
  1487. if (level1 && level1 == level2 && level1 == level3 && level1 == level4) {
  1488. PieceI->setEndLocation(PieceNextI->getEndLocation());
  1489. path.erase(NextI);
  1490. hasChanges = true;
  1491. continue;
  1492. }
  1493. // Rule II.
  1494. //
  1495. // Eliminate edges between subexpressions and parent expressions
  1496. // when the subexpression is consumed.
  1497. //
  1498. // NOTE: this will be limited later in cases where we add barriers
  1499. // to prevent this optimization.
  1500. if (s1End && s1End == s2Start && level2) {
  1501. bool removeEdge = false;
  1502. // Remove edges into the increment or initialization of a
  1503. // loop that have no interleaving event. This means that
  1504. // they aren't interesting.
  1505. if (isIncrementOrInitInForLoop(s1End, level2))
  1506. removeEdge = true;
  1507. // Next only consider edges that are not anchored on
  1508. // the condition of a terminator. This are intermediate edges
  1509. // that we might want to trim.
  1510. else if (!isConditionForTerminator(level2, s1End)) {
  1511. // Trim edges on expressions that are consumed by
  1512. // the parent expression.
  1513. if (isa<Expr>(s1End) && PM.isConsumedExpr(cast<Expr>(s1End))) {
  1514. removeEdge = true;
  1515. }
  1516. // Trim edges where a lexical containment doesn't exist.
  1517. // For example:
  1518. //
  1519. // X -> Y -> Z
  1520. //
  1521. // If 'Z' lexically contains Y (it is an ancestor) and
  1522. // 'X' does not lexically contain Y (it is a descendant OR
  1523. // it has no lexical relationship at all) then trim.
  1524. //
  1525. // This can eliminate edges where we dive into a subexpression
  1526. // and then pop back out, etc.
  1527. else if (s1Start && s2End &&
  1528. lexicalContains(PM, s2Start, s2End) &&
  1529. !lexicalContains(PM, s1End, s1Start)) {
  1530. removeEdge = true;
  1531. }
  1532. // Trim edges from a subexpression back to the top level if the
  1533. // subexpression is on a different line.
  1534. //
  1535. // A.1 -> A -> B
  1536. // becomes
  1537. // A.1 -> B
  1538. //
  1539. // These edges just look ugly and don't usually add anything.
  1540. else if (s1Start && s2End &&
  1541. lexicalContains(PM, s1Start, s1End)) {
  1542. SourceRange EdgeRange(PieceI->getEndLocation().asLocation(),
  1543. PieceI->getStartLocation().asLocation());
  1544. if (!getLengthOnSingleLine(SM, EdgeRange).hasValue())
  1545. removeEdge = true;
  1546. }
  1547. }
  1548. if (removeEdge) {
  1549. PieceI->setEndLocation(PieceNextI->getEndLocation());
  1550. path.erase(NextI);
  1551. hasChanges = true;
  1552. continue;
  1553. }
  1554. }
  1555. // Optimize edges for ObjC fast-enumeration loops.
  1556. //
  1557. // (X -> collection) -> (collection -> element)
  1558. //
  1559. // becomes:
  1560. //
  1561. // (X -> element)
  1562. if (s1End == s2Start) {
  1563. const auto *FS = dyn_cast_or_null<ObjCForCollectionStmt>(level3);
  1564. if (FS && FS->getCollection()->IgnoreParens() == s2Start &&
  1565. s2End == FS->getElement()) {
  1566. PieceI->setEndLocation(PieceNextI->getEndLocation());
  1567. path.erase(NextI);
  1568. hasChanges = true;
  1569. continue;
  1570. }
  1571. }
  1572. // No changes at this index? Move to the next one.
  1573. ++I;
  1574. }
  1575. if (!hasChanges) {
  1576. // Adjust edges into subexpressions to make them more uniform
  1577. // and aesthetically pleasing.
  1578. addContextEdges(path, SM, PM, LC);
  1579. // Remove "cyclical" edges that include one or more context edges.
  1580. removeContextCycles(path, SM);
  1581. // Hoist edges originating from branch conditions to branches
  1582. // for simple branches.
  1583. simplifySimpleBranches(path);
  1584. // Remove any puny edges left over after primary optimization pass.
  1585. removePunyEdges(path, SM, PM);
  1586. // Remove identical events.
  1587. removeIdenticalEvents(path);
  1588. }
  1589. return hasChanges;
  1590. }
  1591. /// Drop the very first edge in a path, which should be a function entry edge.
  1592. ///
  1593. /// If the first edge is not a function entry edge (say, because the first
  1594. /// statement had an invalid source location), this function does nothing.
  1595. // FIXME: We should just generate invalid edges anyway and have the optimizer
  1596. // deal with them.
  1597. static void dropFunctionEntryEdge(PathPieces &Path, LocationContextMap &LCM,
  1598. SourceManager &SM) {
  1599. const auto *FirstEdge =
  1600. dyn_cast<PathDiagnosticControlFlowPiece>(Path.front().get());
  1601. if (!FirstEdge)
  1602. return;
  1603. const Decl *D = LCM[&Path]->getDecl();
  1604. PathDiagnosticLocation EntryLoc = PathDiagnosticLocation::createBegin(D, SM);
  1605. if (FirstEdge->getStartLocation() != EntryLoc)
  1606. return;
  1607. Path.pop_front();
  1608. }
  1609. using VisitorsDiagnosticsTy = llvm::DenseMap<const ExplodedNode *,
  1610. std::vector<std::shared_ptr<PathDiagnosticPiece>>>;
  1611. /// Populate executes lines with lines containing at least one diagnostics.
  1612. static void updateExecutedLinesWithDiagnosticPieces(
  1613. PathDiagnostic &PD) {
  1614. PathPieces path = PD.path.flatten(/*ShouldFlattenMacros=*/true);
  1615. FilesToLineNumsMap &ExecutedLines = PD.getExecutedLines();
  1616. for (const auto &P : path) {
  1617. FullSourceLoc Loc = P->getLocation().asLocation().getExpansionLoc();
  1618. FileID FID = Loc.getFileID();
  1619. unsigned LineNo = Loc.getLineNumber();
  1620. assert(FID.isValid());
  1621. ExecutedLines[FID].insert(LineNo);
  1622. }
  1623. }
  1624. /// This function is responsible for generating diagnostic pieces that are
  1625. /// *not* provided by bug report visitors.
  1626. /// These diagnostics may differ depending on the consumer's settings,
  1627. /// and are therefore constructed separately for each consumer.
  1628. ///
  1629. /// There are two path diagnostics generation modes: with adding edges (used
  1630. /// for plists) and without (used for HTML and text).
  1631. /// When edges are added (\p ActiveScheme is Extensive),
  1632. /// the path is modified to insert artificially generated
  1633. /// edges.
  1634. /// Otherwise, more detailed diagnostics is emitted for block edges, explaining
  1635. /// the transitions in words.
  1636. static std::unique_ptr<PathDiagnostic> generatePathDiagnosticForConsumer(
  1637. PathDiagnosticConsumer::PathGenerationScheme ActiveScheme,
  1638. PathDiagnosticBuilder &PDB,
  1639. const ExplodedNode *ErrorNode,
  1640. const VisitorsDiagnosticsTy &VisitorsDiagnostics) {
  1641. bool GenerateDiagnostics = (ActiveScheme != PathDiagnosticConsumer::None);
  1642. bool AddPathEdges = (ActiveScheme == PathDiagnosticConsumer::Extensive);
  1643. SourceManager &SM = PDB.getSourceManager();
  1644. BugReport *R = PDB.getBugReport();
  1645. AnalyzerOptions &Opts = PDB.getBugReporter().getAnalyzerOptions();
  1646. StackDiagVector CallStack;
  1647. InterestingExprs IE;
  1648. LocationContextMap LCM;
  1649. std::unique_ptr<PathDiagnostic> PD = generateEmptyDiagnosticForReport(R, SM);
  1650. if (GenerateDiagnostics) {
  1651. auto EndNotes = VisitorsDiagnostics.find(ErrorNode);
  1652. std::shared_ptr<PathDiagnosticPiece> LastPiece;
  1653. if (EndNotes != VisitorsDiagnostics.end()) {
  1654. assert(!EndNotes->second.empty());
  1655. LastPiece = EndNotes->second[0];
  1656. } else {
  1657. LastPiece = BugReporterVisitor::getDefaultEndPath(PDB, ErrorNode, *R);
  1658. }
  1659. PD->setEndOfPath(LastPiece);
  1660. }
  1661. PathDiagnosticLocation PrevLoc = PD->getLocation();
  1662. const ExplodedNode *NextNode = ErrorNode->getFirstPred();
  1663. while (NextNode) {
  1664. if (GenerateDiagnostics)
  1665. generatePathDiagnosticsForNode(
  1666. NextNode, *PD, PrevLoc, PDB, LCM, CallStack, IE, AddPathEdges);
  1667. auto VisitorNotes = VisitorsDiagnostics.find(NextNode);
  1668. NextNode = NextNode->getFirstPred();
  1669. if (!GenerateDiagnostics || VisitorNotes == VisitorsDiagnostics.end())
  1670. continue;
  1671. // This is a workaround due to inability to put shared PathDiagnosticPiece
  1672. // into a FoldingSet.
  1673. std::set<llvm::FoldingSetNodeID> DeduplicationSet;
  1674. // Add pieces from custom visitors.
  1675. for (const auto &Note : VisitorNotes->second) {
  1676. llvm::FoldingSetNodeID ID;
  1677. Note->Profile(ID);
  1678. auto P = DeduplicationSet.insert(ID);
  1679. if (!P.second)
  1680. continue;
  1681. if (AddPathEdges)
  1682. addEdgeToPath(PD->getActivePath(), PrevLoc, Note->getLocation());
  1683. updateStackPiecesWithMessage(*Note, CallStack);
  1684. PD->getActivePath().push_front(Note);
  1685. }
  1686. }
  1687. if (AddPathEdges) {
  1688. // Add an edge to the start of the function.
  1689. // We'll prune it out later, but it helps make diagnostics more uniform.
  1690. const StackFrameContext *CalleeLC = PDB.LC->getStackFrame();
  1691. const Decl *D = CalleeLC->getDecl();
  1692. addEdgeToPath(PD->getActivePath(), PrevLoc,
  1693. PathDiagnosticLocation::createBegin(D, SM));
  1694. }
  1695. // Finally, prune the diagnostic path of uninteresting stuff.
  1696. if (!PD->path.empty()) {
  1697. if (R->shouldPrunePath() && Opts.ShouldPrunePaths) {
  1698. bool stillHasNotes =
  1699. removeUnneededCalls(PD->getMutablePieces(), R, LCM);
  1700. assert(stillHasNotes);
  1701. (void)stillHasNotes;
  1702. }
  1703. // Redirect all call pieces to have valid locations.
  1704. adjustCallLocations(PD->getMutablePieces());
  1705. removePiecesWithInvalidLocations(PD->getMutablePieces());
  1706. if (AddPathEdges) {
  1707. // Reduce the number of edges from a very conservative set
  1708. // to an aesthetically pleasing subset that conveys the
  1709. // necessary information.
  1710. OptimizedCallsSet OCS;
  1711. while (optimizeEdges(PD->getMutablePieces(), SM, OCS, LCM)) {}
  1712. // Drop the very first function-entry edge. It's not really necessary
  1713. // for top-level functions.
  1714. dropFunctionEntryEdge(PD->getMutablePieces(), LCM, SM);
  1715. }
  1716. // Remove messages that are basically the same, and edges that may not
  1717. // make sense.
  1718. // We have to do this after edge optimization in the Extensive mode.
  1719. removeRedundantMsgs(PD->getMutablePieces());
  1720. removeEdgesToDefaultInitializers(PD->getMutablePieces());
  1721. }
  1722. if (GenerateDiagnostics && Opts.ShouldDisplayMacroExpansions)
  1723. CompactMacroExpandedPieces(PD->getMutablePieces(), SM);
  1724. return PD;
  1725. }
  1726. //===----------------------------------------------------------------------===//
  1727. // Methods for BugType and subclasses.
  1728. //===----------------------------------------------------------------------===//
  1729. void BugType::anchor() {}
  1730. void BuiltinBug::anchor() {}
  1731. //===----------------------------------------------------------------------===//
  1732. // Methods for BugReport and subclasses.
  1733. //===----------------------------------------------------------------------===//
  1734. void BugReport::NodeResolver::anchor() {}
  1735. void BugReport::addVisitor(std::unique_ptr<BugReporterVisitor> visitor) {
  1736. if (!visitor)
  1737. return;
  1738. llvm::FoldingSetNodeID ID;
  1739. visitor->Profile(ID);
  1740. void *InsertPos = nullptr;
  1741. if (CallbacksSet.FindNodeOrInsertPos(ID, InsertPos)) {
  1742. return;
  1743. }
  1744. Callbacks.push_back(std::move(visitor));
  1745. }
  1746. void BugReport::clearVisitors() {
  1747. Callbacks.clear();
  1748. }
  1749. BugReport::~BugReport() {
  1750. while (!interestingSymbols.empty()) {
  1751. popInterestingSymbolsAndRegions();
  1752. }
  1753. }
  1754. const Decl *BugReport::getDeclWithIssue() const {
  1755. if (DeclWithIssue)
  1756. return DeclWithIssue;
  1757. const ExplodedNode *N = getErrorNode();
  1758. if (!N)
  1759. return nullptr;
  1760. const LocationContext *LC = N->getLocationContext();
  1761. return LC->getStackFrame()->getDecl();
  1762. }
  1763. void BugReport::Profile(llvm::FoldingSetNodeID& hash) const {
  1764. hash.AddPointer(&BT);
  1765. hash.AddString(Description);
  1766. PathDiagnosticLocation UL = getUniqueingLocation();
  1767. if (UL.isValid()) {
  1768. UL.Profile(hash);
  1769. } else if (Location.isValid()) {
  1770. Location.Profile(hash);
  1771. } else {
  1772. assert(ErrorNode);
  1773. hash.AddPointer(GetCurrentOrPreviousStmt(ErrorNode));
  1774. }
  1775. for (SourceRange range : Ranges) {
  1776. if (!range.isValid())
  1777. continue;
  1778. hash.AddInteger(range.getBegin().getRawEncoding());
  1779. hash.AddInteger(range.getEnd().getRawEncoding());
  1780. }
  1781. }
  1782. void BugReport::markInteresting(SymbolRef sym) {
  1783. if (!sym)
  1784. return;
  1785. getInterestingSymbols().insert(sym);
  1786. if (const auto *meta = dyn_cast<SymbolMetadata>(sym))
  1787. getInterestingRegions().insert(meta->getRegion());
  1788. }
  1789. void BugReport::markInteresting(const MemRegion *R) {
  1790. if (!R)
  1791. return;
  1792. R = R->getBaseRegion();
  1793. getInterestingRegions().insert(R);
  1794. if (const auto *SR = dyn_cast<SymbolicRegion>(R))
  1795. getInterestingSymbols().insert(SR->getSymbol());
  1796. }
  1797. void BugReport::markInteresting(SVal V) {
  1798. markInteresting(V.getAsRegion());
  1799. markInteresting(V.getAsSymbol());
  1800. }
  1801. void BugReport::markInteresting(const LocationContext *LC) {
  1802. if (!LC)
  1803. return;
  1804. InterestingLocationContexts.insert(LC);
  1805. }
  1806. bool BugReport::isInteresting(SVal V) {
  1807. return isInteresting(V.getAsRegion()) || isInteresting(V.getAsSymbol());
  1808. }
  1809. bool BugReport::isInteresting(SymbolRef sym) {
  1810. if (!sym)
  1811. return false;
  1812. // We don't currently consider metadata symbols to be interesting
  1813. // even if we know their region is interesting. Is that correct behavior?
  1814. return getInterestingSymbols().count(sym);
  1815. }
  1816. bool BugReport::isInteresting(const MemRegion *R) {
  1817. if (!R)
  1818. return false;
  1819. R = R->getBaseRegion();
  1820. bool b = getInterestingRegions().count(R);
  1821. if (b)
  1822. return true;
  1823. if (const auto *SR = dyn_cast<SymbolicRegion>(R))
  1824. return getInterestingSymbols().count(SR->getSymbol());
  1825. return false;
  1826. }
  1827. bool BugReport::isInteresting(const LocationContext *LC) {
  1828. if (!LC)
  1829. return false;
  1830. return InterestingLocationContexts.count(LC);
  1831. }
  1832. void BugReport::lazyInitializeInterestingSets() {
  1833. if (interestingSymbols.empty()) {
  1834. interestingSymbols.push_back(new Symbols());
  1835. interestingRegions.push_back(new Regions());
  1836. }
  1837. }
  1838. BugReport::Symbols &BugReport::getInterestingSymbols() {
  1839. lazyInitializeInterestingSets();
  1840. return *interestingSymbols.back();
  1841. }
  1842. BugReport::Regions &BugReport::getInterestingRegions() {
  1843. lazyInitializeInterestingSets();
  1844. return *interestingRegions.back();
  1845. }
  1846. void BugReport::pushInterestingSymbolsAndRegions() {
  1847. interestingSymbols.push_back(new Symbols(getInterestingSymbols()));
  1848. interestingRegions.push_back(new Regions(getInterestingRegions()));
  1849. }
  1850. void BugReport::popInterestingSymbolsAndRegions() {
  1851. delete interestingSymbols.pop_back_val();
  1852. delete interestingRegions.pop_back_val();
  1853. }
  1854. const Stmt *BugReport::getStmt() const {
  1855. if (!ErrorNode)
  1856. return nullptr;
  1857. ProgramPoint ProgP = ErrorNode->getLocation();
  1858. const Stmt *S = nullptr;
  1859. if (Optional<BlockEntrance> BE = ProgP.getAs<BlockEntrance>()) {
  1860. CFGBlock &Exit = ProgP.getLocationContext()->getCFG()->getExit();
  1861. if (BE->getBlock() == &Exit)
  1862. S = GetPreviousStmt(ErrorNode);
  1863. }
  1864. if (!S)
  1865. S = PathDiagnosticLocation::getStmt(ErrorNode);
  1866. return S;
  1867. }
  1868. llvm::iterator_range<BugReport::ranges_iterator> BugReport::getRanges() {
  1869. // If no custom ranges, add the range of the statement corresponding to
  1870. // the error node.
  1871. if (Ranges.empty()) {
  1872. if (const auto *E = dyn_cast_or_null<Expr>(getStmt()))
  1873. addRange(E->getSourceRange());
  1874. else
  1875. return llvm::make_range(ranges_iterator(), ranges_iterator());
  1876. }
  1877. // User-specified absence of range info.
  1878. if (Ranges.size() == 1 && !Ranges.begin()->isValid())
  1879. return llvm::make_range(ranges_iterator(), ranges_iterator());
  1880. return llvm::make_range(Ranges.begin(), Ranges.end());
  1881. }
  1882. PathDiagnosticLocation BugReport::getLocation(const SourceManager &SM) const {
  1883. if (ErrorNode) {
  1884. assert(!Location.isValid() &&
  1885. "Either Location or ErrorNode should be specified but not both.");
  1886. return PathDiagnosticLocation::createEndOfPath(ErrorNode, SM);
  1887. }
  1888. assert(Location.isValid());
  1889. return Location;
  1890. }
  1891. //===----------------------------------------------------------------------===//
  1892. // Methods for BugReporter and subclasses.
  1893. //===----------------------------------------------------------------------===//
  1894. BugReportEquivClass::~BugReportEquivClass() = default;
  1895. GRBugReporter::~GRBugReporter() = default;
  1896. BugReporterData::~BugReporterData() = default;
  1897. ExplodedGraph &GRBugReporter::getGraph() { return Eng.getGraph(); }
  1898. ProgramStateManager&
  1899. GRBugReporter::getStateManager() { return Eng.getStateManager(); }
  1900. BugReporter::~BugReporter() {
  1901. FlushReports();
  1902. // Free the bug reports we are tracking.
  1903. for (const auto I : EQClassesVector)
  1904. delete I;
  1905. }
  1906. void BugReporter::FlushReports() {
  1907. if (BugTypes.isEmpty())
  1908. return;
  1909. // We need to flush reports in deterministic order to ensure the order
  1910. // of the reports is consistent between runs.
  1911. for (const auto EQ : EQClassesVector)
  1912. FlushReport(*EQ);
  1913. // BugReporter owns and deletes only BugTypes created implicitly through
  1914. // EmitBasicReport.
  1915. // FIXME: There are leaks from checkers that assume that the BugTypes they
  1916. // create will be destroyed by the BugReporter.
  1917. llvm::DeleteContainerSeconds(StrBugTypes);
  1918. // Remove all references to the BugType objects.
  1919. BugTypes = F.getEmptySet();
  1920. }
  1921. //===----------------------------------------------------------------------===//
  1922. // PathDiagnostics generation.
  1923. //===----------------------------------------------------------------------===//
  1924. namespace {
  1925. /// A wrapper around a report graph, which contains only a single path, and its
  1926. /// node maps.
  1927. class ReportGraph {
  1928. public:
  1929. InterExplodedGraphMap BackMap;
  1930. std::unique_ptr<ExplodedGraph> Graph;
  1931. const ExplodedNode *ErrorNode;
  1932. size_t Index;
  1933. };
  1934. /// A wrapper around a trimmed graph and its node maps.
  1935. class TrimmedGraph {
  1936. InterExplodedGraphMap InverseMap;
  1937. using PriorityMapTy = llvm::DenseMap<const ExplodedNode *, unsigned>;
  1938. PriorityMapTy PriorityMap;
  1939. using NodeIndexPair = std::pair<const ExplodedNode *, size_t>;
  1940. SmallVector<NodeIndexPair, 32> ReportNodes;
  1941. std::unique_ptr<ExplodedGraph> G;
  1942. /// A helper class for sorting ExplodedNodes by priority.
  1943. template <bool Descending>
  1944. class PriorityCompare {
  1945. const PriorityMapTy &PriorityMap;
  1946. public:
  1947. PriorityCompare(const PriorityMapTy &M) : PriorityMap(M) {}
  1948. bool operator()(const ExplodedNode *LHS, const ExplodedNode *RHS) const {
  1949. PriorityMapTy::const_iterator LI = PriorityMap.find(LHS);
  1950. PriorityMapTy::const_iterator RI = PriorityMap.find(RHS);
  1951. PriorityMapTy::const_iterator E = PriorityMap.end();
  1952. if (LI == E)
  1953. return Descending;
  1954. if (RI == E)
  1955. return !Descending;
  1956. return Descending ? LI->second > RI->second
  1957. : LI->second < RI->second;
  1958. }
  1959. bool operator()(const NodeIndexPair &LHS, const NodeIndexPair &RHS) const {
  1960. return (*this)(LHS.first, RHS.first);
  1961. }
  1962. };
  1963. public:
  1964. TrimmedGraph(const ExplodedGraph *OriginalGraph,
  1965. ArrayRef<const ExplodedNode *> Nodes);
  1966. bool popNextReportGraph(ReportGraph &GraphWrapper);
  1967. };
  1968. } // namespace
  1969. TrimmedGraph::TrimmedGraph(const ExplodedGraph *OriginalGraph,
  1970. ArrayRef<const ExplodedNode *> Nodes) {
  1971. // The trimmed graph is created in the body of the constructor to ensure
  1972. // that the DenseMaps have been initialized already.
  1973. InterExplodedGraphMap ForwardMap;
  1974. G = OriginalGraph->trim(Nodes, &ForwardMap, &InverseMap);
  1975. // Find the (first) error node in the trimmed graph. We just need to consult
  1976. // the node map which maps from nodes in the original graph to nodes
  1977. // in the new graph.
  1978. llvm::SmallPtrSet<const ExplodedNode *, 32> RemainingNodes;
  1979. for (unsigned i = 0, count = Nodes.size(); i < count; ++i) {
  1980. if (const ExplodedNode *NewNode = ForwardMap.lookup(Nodes[i])) {
  1981. ReportNodes.push_back(std::make_pair(NewNode, i));
  1982. RemainingNodes.insert(NewNode);
  1983. }
  1984. }
  1985. assert(!RemainingNodes.empty() && "No error node found in the trimmed graph");
  1986. // Perform a forward BFS to find all the shortest paths.
  1987. std::queue<const ExplodedNode *> WS;
  1988. assert(G->num_roots() == 1);
  1989. WS.push(*G->roots_begin());
  1990. unsigned Priority = 0;
  1991. while (!WS.empty()) {
  1992. const ExplodedNode *Node = WS.front();
  1993. WS.pop();
  1994. PriorityMapTy::iterator PriorityEntry;
  1995. bool IsNew;
  1996. std::tie(PriorityEntry, IsNew) =
  1997. PriorityMap.insert(std::make_pair(Node, Priority));
  1998. ++Priority;
  1999. if (!IsNew) {
  2000. assert(PriorityEntry->second <= Priority);
  2001. continue;
  2002. }
  2003. if (RemainingNodes.erase(Node))
  2004. if (RemainingNodes.empty())
  2005. break;
  2006. for (ExplodedNode::const_pred_iterator I = Node->succ_begin(),
  2007. E = Node->succ_end();
  2008. I != E; ++I)
  2009. WS.push(*I);
  2010. }
  2011. // Sort the error paths from longest to shortest.
  2012. llvm::sort(ReportNodes, PriorityCompare<true>(PriorityMap));
  2013. }
  2014. bool TrimmedGraph::popNextReportGraph(ReportGraph &GraphWrapper) {
  2015. if (ReportNodes.empty())
  2016. return false;
  2017. const ExplodedNode *OrigN;
  2018. std::tie(OrigN, GraphWrapper.Index) = ReportNodes.pop_back_val();
  2019. assert(PriorityMap.find(OrigN) != PriorityMap.end() &&
  2020. "error node not accessible from root");
  2021. // Create a new graph with a single path. This is the graph
  2022. // that will be returned to the caller.
  2023. auto GNew = llvm::make_unique<ExplodedGraph>();
  2024. GraphWrapper.BackMap.clear();
  2025. // Now walk from the error node up the BFS path, always taking the
  2026. // predeccessor with the lowest number.
  2027. ExplodedNode *Succ = nullptr;
  2028. while (true) {
  2029. // Create the equivalent node in the new graph with the same state
  2030. // and location.
  2031. ExplodedNode *NewN = GNew->createUncachedNode(OrigN->getLocation(), OrigN->getState(),
  2032. OrigN->isSink());
  2033. // Store the mapping to the original node.
  2034. InterExplodedGraphMap::const_iterator IMitr = InverseMap.find(OrigN);
  2035. assert(IMitr != InverseMap.end() && "No mapping to original node.");
  2036. GraphWrapper.BackMap[NewN] = IMitr->second;
  2037. // Link up the new node with the previous node.
  2038. if (Succ)
  2039. Succ->addPredecessor(NewN, *GNew);
  2040. else
  2041. GraphWrapper.ErrorNode = NewN;
  2042. Succ = NewN;
  2043. // Are we at the final node?
  2044. if (OrigN->pred_empty()) {
  2045. GNew->addRoot(NewN);
  2046. break;
  2047. }
  2048. // Find the next predeccessor node. We choose the node that is marked
  2049. // with the lowest BFS number.
  2050. OrigN = *std::min_element(OrigN->pred_begin(), OrigN->pred_end(),
  2051. PriorityCompare<false>(PriorityMap));
  2052. }
  2053. GraphWrapper.Graph = std::move(GNew);
  2054. return true;
  2055. }
  2056. /// CompactMacroExpandedPieces - This function postprocesses a PathDiagnostic
  2057. /// object and collapses PathDiagosticPieces that are expanded by macros.
  2058. static void CompactMacroExpandedPieces(PathPieces &path,
  2059. const SourceManager& SM) {
  2060. using MacroStackTy =
  2061. std::vector<
  2062. std::pair<std::shared_ptr<PathDiagnosticMacroPiece>, SourceLocation>>;
  2063. using PiecesTy = std::vector<std::shared_ptr<PathDiagnosticPiece>>;
  2064. MacroStackTy MacroStack;
  2065. PiecesTy Pieces;
  2066. for (PathPieces::const_iterator I = path.begin(), E = path.end();
  2067. I != E; ++I) {
  2068. const auto &piece = *I;
  2069. // Recursively compact calls.
  2070. if (auto *call = dyn_cast<PathDiagnosticCallPiece>(&*piece)) {
  2071. CompactMacroExpandedPieces(call->path, SM);
  2072. }
  2073. // Get the location of the PathDiagnosticPiece.
  2074. const FullSourceLoc Loc = piece->getLocation().asLocation();
  2075. // Determine the instantiation location, which is the location we group
  2076. // related PathDiagnosticPieces.
  2077. SourceLocation InstantiationLoc = Loc.isMacroID() ?
  2078. SM.getExpansionLoc(Loc) :
  2079. SourceLocation();
  2080. if (Loc.isFileID()) {
  2081. MacroStack.clear();
  2082. Pieces.push_back(piece);
  2083. continue;
  2084. }
  2085. assert(Loc.isMacroID());
  2086. // Is the PathDiagnosticPiece within the same macro group?
  2087. if (!MacroStack.empty() && InstantiationLoc == MacroStack.back().second) {
  2088. MacroStack.back().first->subPieces.push_back(piece);
  2089. continue;
  2090. }
  2091. // We aren't in the same group. Are we descending into a new macro
  2092. // or are part of an old one?
  2093. std::shared_ptr<PathDiagnosticMacroPiece> MacroGroup;
  2094. SourceLocation ParentInstantiationLoc = InstantiationLoc.isMacroID() ?
  2095. SM.getExpansionLoc(Loc) :
  2096. SourceLocation();
  2097. // Walk the entire macro stack.
  2098. while (!MacroStack.empty()) {
  2099. if (InstantiationLoc == MacroStack.back().second) {
  2100. MacroGroup = MacroStack.back().first;
  2101. break;
  2102. }
  2103. if (ParentInstantiationLoc == MacroStack.back().second) {
  2104. MacroGroup = MacroStack.back().first;
  2105. break;
  2106. }
  2107. MacroStack.pop_back();
  2108. }
  2109. if (!MacroGroup || ParentInstantiationLoc == MacroStack.back().second) {
  2110. // Create a new macro group and add it to the stack.
  2111. auto NewGroup = std::make_shared<PathDiagnosticMacroPiece>(
  2112. PathDiagnosticLocation::createSingleLocation(piece->getLocation()));
  2113. if (MacroGroup)
  2114. MacroGroup->subPieces.push_back(NewGroup);
  2115. else {
  2116. assert(InstantiationLoc.isFileID());
  2117. Pieces.push_back(NewGroup);
  2118. }
  2119. MacroGroup = NewGroup;
  2120. MacroStack.push_back(std::make_pair(MacroGroup, InstantiationLoc));
  2121. }
  2122. // Finally, add the PathDiagnosticPiece to the group.
  2123. MacroGroup->subPieces.push_back(piece);
  2124. }
  2125. // Now take the pieces and construct a new PathDiagnostic.
  2126. path.clear();
  2127. path.insert(path.end(), Pieces.begin(), Pieces.end());
  2128. }
  2129. /// Generate notes from all visitors.
  2130. /// Notes associated with {@code ErrorNode} are generated using
  2131. /// {@code getEndPath}, and the rest are generated with {@code VisitNode}.
  2132. static std::unique_ptr<VisitorsDiagnosticsTy>
  2133. generateVisitorsDiagnostics(BugReport *R, const ExplodedNode *ErrorNode,
  2134. BugReporterContext &BRC) {
  2135. auto Notes = llvm::make_unique<VisitorsDiagnosticsTy>();
  2136. BugReport::VisitorList visitors;
  2137. // Run visitors on all nodes starting from the node *before* the last one.
  2138. // The last node is reserved for notes generated with {@code getEndPath}.
  2139. const ExplodedNode *NextNode = ErrorNode->getFirstPred();
  2140. while (NextNode) {
  2141. // At each iteration, move all visitors from report to visitor list.
  2142. for (BugReport::visitor_iterator I = R->visitor_begin(),
  2143. E = R->visitor_end();
  2144. I != E; ++I) {
  2145. visitors.push_back(std::move(*I));
  2146. }
  2147. R->clearVisitors();
  2148. const ExplodedNode *Pred = NextNode->getFirstPred();
  2149. if (!Pred) {
  2150. std::shared_ptr<PathDiagnosticPiece> LastPiece;
  2151. for (auto &V : visitors) {
  2152. V->finalizeVisitor(BRC, ErrorNode, *R);
  2153. if (auto Piece = V->getEndPath(BRC, ErrorNode, *R)) {
  2154. assert(!LastPiece &&
  2155. "There can only be one final piece in a diagnostic.");
  2156. LastPiece = std::move(Piece);
  2157. (*Notes)[ErrorNode].push_back(LastPiece);
  2158. }
  2159. }
  2160. break;
  2161. }
  2162. for (auto &V : visitors) {
  2163. auto P = V->VisitNode(NextNode, BRC, *R);
  2164. if (P)
  2165. (*Notes)[NextNode].push_back(std::move(P));
  2166. }
  2167. if (!R->isValid())
  2168. break;
  2169. NextNode = Pred;
  2170. }
  2171. return Notes;
  2172. }
  2173. /// Find a non-invalidated report for a given equivalence class,
  2174. /// and return together with a cache of visitors notes.
  2175. /// If none found, return a nullptr paired with an empty cache.
  2176. static
  2177. std::pair<BugReport*, std::unique_ptr<VisitorsDiagnosticsTy>> findValidReport(
  2178. TrimmedGraph &TrimG,
  2179. ReportGraph &ErrorGraph,
  2180. ArrayRef<BugReport *> &bugReports,
  2181. AnalyzerOptions &Opts,
  2182. GRBugReporter &Reporter) {
  2183. while (TrimG.popNextReportGraph(ErrorGraph)) {
  2184. // Find the BugReport with the original location.
  2185. assert(ErrorGraph.Index < bugReports.size());
  2186. BugReport *R = bugReports[ErrorGraph.Index];
  2187. assert(R && "No original report found for sliced graph.");
  2188. assert(R->isValid() && "Report selected by trimmed graph marked invalid.");
  2189. const ExplodedNode *ErrorNode = ErrorGraph.ErrorNode;
  2190. // Register refutation visitors first, if they mark the bug invalid no
  2191. // further analysis is required
  2192. R->addVisitor(llvm::make_unique<LikelyFalsePositiveSuppressionBRVisitor>());
  2193. // Register additional node visitors.
  2194. R->addVisitor(llvm::make_unique<NilReceiverBRVisitor>());
  2195. R->addVisitor(llvm::make_unique<ConditionBRVisitor>());
  2196. R->addVisitor(llvm::make_unique<CXXSelfAssignmentBRVisitor>());
  2197. BugReporterContext BRC(Reporter, ErrorGraph.BackMap);
  2198. // Run all visitors on a given graph, once.
  2199. std::unique_ptr<VisitorsDiagnosticsTy> visitorNotes =
  2200. generateVisitorsDiagnostics(R, ErrorNode, BRC);
  2201. if (R->isValid()) {
  2202. if (Opts.ShouldCrosscheckWithZ3) {
  2203. // If crosscheck is enabled, remove all visitors, add the refutation
  2204. // visitor and check again
  2205. R->clearVisitors();
  2206. R->addVisitor(llvm::make_unique<FalsePositiveRefutationBRVisitor>());
  2207. // We don't overrite the notes inserted by other visitors because the
  2208. // refutation manager does not add any new note to the path
  2209. generateVisitorsDiagnostics(R, ErrorGraph.ErrorNode, BRC);
  2210. }
  2211. // Check if the bug is still valid
  2212. if (R->isValid())
  2213. return std::make_pair(R, std::move(visitorNotes));
  2214. }
  2215. }
  2216. return std::make_pair(nullptr, llvm::make_unique<VisitorsDiagnosticsTy>());
  2217. }
  2218. std::unique_ptr<DiagnosticForConsumerMapTy>
  2219. GRBugReporter::generatePathDiagnostics(
  2220. ArrayRef<PathDiagnosticConsumer *> consumers,
  2221. ArrayRef<BugReport *> &bugReports) {
  2222. assert(!bugReports.empty());
  2223. auto Out = llvm::make_unique<DiagnosticForConsumerMapTy>();
  2224. bool HasValid = false;
  2225. SmallVector<const ExplodedNode *, 32> errorNodes;
  2226. for (const auto I : bugReports) {
  2227. if (I->isValid()) {
  2228. HasValid = true;
  2229. errorNodes.push_back(I->getErrorNode());
  2230. } else {
  2231. // Keep the errorNodes list in sync with the bugReports list.
  2232. errorNodes.push_back(nullptr);
  2233. }
  2234. }
  2235. // If all the reports have been marked invalid by a previous path generation,
  2236. // we're done.
  2237. if (!HasValid)
  2238. return Out;
  2239. TrimmedGraph TrimG(&getGraph(), errorNodes);
  2240. ReportGraph ErrorGraph;
  2241. auto ReportInfo = findValidReport(TrimG, ErrorGraph, bugReports,
  2242. getAnalyzerOptions(), *this);
  2243. BugReport *R = ReportInfo.first;
  2244. if (R && R->isValid()) {
  2245. const ExplodedNode *ErrorNode = ErrorGraph.ErrorNode;
  2246. for (PathDiagnosticConsumer *PC : consumers) {
  2247. PathDiagnosticBuilder PDB(*this, R, ErrorGraph.BackMap, PC);
  2248. std::unique_ptr<PathDiagnostic> PD = generatePathDiagnosticForConsumer(
  2249. PC->getGenerationScheme(), PDB, ErrorNode, *ReportInfo.second);
  2250. (*Out)[PC] = std::move(PD);
  2251. }
  2252. }
  2253. return Out;
  2254. }
  2255. void BugReporter::Register(BugType *BT) {
  2256. BugTypes = F.add(BugTypes, BT);
  2257. }
  2258. void BugReporter::emitReport(std::unique_ptr<BugReport> R) {
  2259. if (const ExplodedNode *E = R->getErrorNode()) {
  2260. // An error node must either be a sink or have a tag, otherwise
  2261. // it could get reclaimed before the path diagnostic is created.
  2262. assert((E->isSink() || E->getLocation().getTag()) &&
  2263. "Error node must either be a sink or have a tag");
  2264. const AnalysisDeclContext *DeclCtx =
  2265. E->getLocationContext()->getAnalysisDeclContext();
  2266. // The source of autosynthesized body can be handcrafted AST or a model
  2267. // file. The locations from handcrafted ASTs have no valid source locations
  2268. // and have to be discarded. Locations from model files should be preserved
  2269. // for processing and reporting.
  2270. if (DeclCtx->isBodyAutosynthesized() &&
  2271. !DeclCtx->isBodyAutosynthesizedFromModelFile())
  2272. return;
  2273. }
  2274. bool ValidSourceLoc = R->getLocation(getSourceManager()).isValid();
  2275. assert(ValidSourceLoc);
  2276. // If we mess up in a release build, we'd still prefer to just drop the bug
  2277. // instead of trying to go on.
  2278. if (!ValidSourceLoc)
  2279. return;
  2280. // Compute the bug report's hash to determine its equivalence class.
  2281. llvm::FoldingSetNodeID ID;
  2282. R->Profile(ID);
  2283. // Lookup the equivance class. If there isn't one, create it.
  2284. BugType& BT = R->getBugType();
  2285. Register(&BT);
  2286. void *InsertPos;
  2287. BugReportEquivClass* EQ = EQClasses.FindNodeOrInsertPos(ID, InsertPos);
  2288. if (!EQ) {
  2289. EQ = new BugReportEquivClass(std::move(R));
  2290. EQClasses.InsertNode(EQ, InsertPos);
  2291. EQClassesVector.push_back(EQ);
  2292. } else
  2293. EQ->AddReport(std::move(R));
  2294. }
  2295. //===----------------------------------------------------------------------===//
  2296. // Emitting reports in equivalence classes.
  2297. //===----------------------------------------------------------------------===//
  2298. namespace {
  2299. struct FRIEC_WLItem {
  2300. const ExplodedNode *N;
  2301. ExplodedNode::const_succ_iterator I, E;
  2302. FRIEC_WLItem(const ExplodedNode *n)
  2303. : N(n), I(N->succ_begin()), E(N->succ_end()) {}
  2304. };
  2305. } // namespace
  2306. static const CFGBlock *findBlockForNode(const ExplodedNode *N) {
  2307. ProgramPoint P = N->getLocation();
  2308. if (auto BEP = P.getAs<BlockEntrance>())
  2309. return BEP->getBlock();
  2310. // Find the node's current statement in the CFG.
  2311. if (const Stmt *S = PathDiagnosticLocation::getStmt(N))
  2312. return N->getLocationContext()->getAnalysisDeclContext()
  2313. ->getCFGStmtMap()->getBlock(S);
  2314. return nullptr;
  2315. }
  2316. // Returns true if by simply looking at the block, we can be sure that it
  2317. // results in a sink during analysis. This is useful to know when the analysis
  2318. // was interrupted, and we try to figure out if it would sink eventually.
  2319. // There may be many more reasons why a sink would appear during analysis
  2320. // (eg. checkers may generate sinks arbitrarily), but here we only consider
  2321. // sinks that would be obvious by looking at the CFG.
  2322. static bool isImmediateSinkBlock(const CFGBlock *Blk) {
  2323. if (Blk->hasNoReturnElement())
  2324. return true;
  2325. // FIXME: Throw-expressions are currently generating sinks during analysis:
  2326. // they're not supported yet, and also often used for actually terminating
  2327. // the program. So we should treat them as sinks in this analysis as well,
  2328. // at least for now, but once we have better support for exceptions,
  2329. // we'd need to carefully handle the case when the throw is being
  2330. // immediately caught.
  2331. if (std::any_of(Blk->begin(), Blk->end(), [](const CFGElement &Elm) {
  2332. if (Optional<CFGStmt> StmtElm = Elm.getAs<CFGStmt>())
  2333. if (isa<CXXThrowExpr>(StmtElm->getStmt()))
  2334. return true;
  2335. return false;
  2336. }))
  2337. return true;
  2338. return false;
  2339. }
  2340. // Returns true if by looking at the CFG surrounding the node's program
  2341. // point, we can be sure that any analysis starting from this point would
  2342. // eventually end with a sink. We scan the child CFG blocks in a depth-first
  2343. // manner and see if all paths eventually end up in an immediate sink block.
  2344. static bool isInevitablySinking(const ExplodedNode *N) {
  2345. const CFG &Cfg = N->getCFG();
  2346. const CFGBlock *StartBlk = findBlockForNode(N);
  2347. if (!StartBlk)
  2348. return false;
  2349. if (isImmediateSinkBlock(StartBlk))
  2350. return true;
  2351. llvm::SmallVector<const CFGBlock *, 32> DFSWorkList;
  2352. llvm::SmallPtrSet<const CFGBlock *, 32> Visited;
  2353. DFSWorkList.push_back(StartBlk);
  2354. while (!DFSWorkList.empty()) {
  2355. const CFGBlock *Blk = DFSWorkList.back();
  2356. DFSWorkList.pop_back();
  2357. Visited.insert(Blk);
  2358. // If at least one path reaches the CFG exit, it means that control is
  2359. // returned to the caller. For now, say that we are not sure what
  2360. // happens next. If necessary, this can be improved to analyze
  2361. // the parent StackFrameContext's call site in a similar manner.
  2362. if (Blk == &Cfg.getExit())
  2363. return false;
  2364. for (const auto &Succ : Blk->succs()) {
  2365. if (const CFGBlock *SuccBlk = Succ.getReachableBlock()) {
  2366. if (!isImmediateSinkBlock(SuccBlk) && !Visited.count(SuccBlk)) {
  2367. // If the block has reachable child blocks that aren't no-return,
  2368. // add them to the worklist.
  2369. DFSWorkList.push_back(SuccBlk);
  2370. }
  2371. }
  2372. }
  2373. }
  2374. // Nothing reached the exit. It can only mean one thing: there's no return.
  2375. return true;
  2376. }
  2377. static BugReport *
  2378. FindReportInEquivalenceClass(BugReportEquivClass& EQ,
  2379. SmallVectorImpl<BugReport*> &bugReports) {
  2380. BugReportEquivClass::iterator I = EQ.begin(), E = EQ.end();
  2381. assert(I != E);
  2382. BugType& BT = I->getBugType();
  2383. // If we don't need to suppress any of the nodes because they are
  2384. // post-dominated by a sink, simply add all the nodes in the equivalence class
  2385. // to 'Nodes'. Any of the reports will serve as a "representative" report.
  2386. if (!BT.isSuppressOnSink()) {
  2387. BugReport *R = &*I;
  2388. for (auto &I : EQ) {
  2389. const ExplodedNode *N = I.getErrorNode();
  2390. if (N) {
  2391. R = &I;
  2392. bugReports.push_back(R);
  2393. }
  2394. }
  2395. return R;
  2396. }
  2397. // For bug reports that should be suppressed when all paths are post-dominated
  2398. // by a sink node, iterate through the reports in the equivalence class
  2399. // until we find one that isn't post-dominated (if one exists). We use a
  2400. // DFS traversal of the ExplodedGraph to find a non-sink node. We could write
  2401. // this as a recursive function, but we don't want to risk blowing out the
  2402. // stack for very long paths.
  2403. BugReport *exampleReport = nullptr;
  2404. for (; I != E; ++I) {
  2405. const ExplodedNode *errorNode = I->getErrorNode();
  2406. if (!errorNode)
  2407. continue;
  2408. if (errorNode->isSink()) {
  2409. llvm_unreachable(
  2410. "BugType::isSuppressSink() should not be 'true' for sink end nodes");
  2411. }
  2412. // No successors? By definition this nodes isn't post-dominated by a sink.
  2413. if (errorNode->succ_empty()) {
  2414. bugReports.push_back(&*I);
  2415. if (!exampleReport)
  2416. exampleReport = &*I;
  2417. continue;
  2418. }
  2419. // See if we are in a no-return CFG block. If so, treat this similarly
  2420. // to being post-dominated by a sink. This works better when the analysis
  2421. // is incomplete and we have never reached the no-return function call(s)
  2422. // that we'd inevitably bump into on this path.
  2423. if (isInevitablySinking(errorNode))
  2424. continue;
  2425. // At this point we know that 'N' is not a sink and it has at least one
  2426. // successor. Use a DFS worklist to find a non-sink end-of-path node.
  2427. using WLItem = FRIEC_WLItem;
  2428. using DFSWorkList = SmallVector<WLItem, 10>;
  2429. llvm::DenseMap<const ExplodedNode *, unsigned> Visited;
  2430. DFSWorkList WL;
  2431. WL.push_back(errorNode);
  2432. Visited[errorNode] = 1;
  2433. while (!WL.empty()) {
  2434. WLItem &WI = WL.back();
  2435. assert(!WI.N->succ_empty());
  2436. for (; WI.I != WI.E; ++WI.I) {
  2437. const ExplodedNode *Succ = *WI.I;
  2438. // End-of-path node?
  2439. if (Succ->succ_empty()) {
  2440. // If we found an end-of-path node that is not a sink.
  2441. if (!Succ->isSink()) {
  2442. bugReports.push_back(&*I);
  2443. if (!exampleReport)
  2444. exampleReport = &*I;
  2445. WL.clear();
  2446. break;
  2447. }
  2448. // Found a sink? Continue on to the next successor.
  2449. continue;
  2450. }
  2451. // Mark the successor as visited. If it hasn't been explored,
  2452. // enqueue it to the DFS worklist.
  2453. unsigned &mark = Visited[Succ];
  2454. if (!mark) {
  2455. mark = 1;
  2456. WL.push_back(Succ);
  2457. break;
  2458. }
  2459. }
  2460. // The worklist may have been cleared at this point. First
  2461. // check if it is empty before checking the last item.
  2462. if (!WL.empty() && &WL.back() == &WI)
  2463. WL.pop_back();
  2464. }
  2465. }
  2466. // ExampleReport will be NULL if all the nodes in the equivalence class
  2467. // were post-dominated by sinks.
  2468. return exampleReport;
  2469. }
  2470. void BugReporter::FlushReport(BugReportEquivClass& EQ) {
  2471. SmallVector<BugReport*, 10> bugReports;
  2472. BugReport *report = FindReportInEquivalenceClass(EQ, bugReports);
  2473. if (!report)
  2474. return;
  2475. ArrayRef<PathDiagnosticConsumer*> Consumers = getPathDiagnosticConsumers();
  2476. std::unique_ptr<DiagnosticForConsumerMapTy> Diagnostics =
  2477. generateDiagnosticForConsumerMap(report, Consumers, bugReports);
  2478. for (auto &P : *Diagnostics) {
  2479. PathDiagnosticConsumer *Consumer = P.first;
  2480. std::unique_ptr<PathDiagnostic> &PD = P.second;
  2481. // If the path is empty, generate a single step path with the location
  2482. // of the issue.
  2483. if (PD->path.empty()) {
  2484. PathDiagnosticLocation L = report->getLocation(getSourceManager());
  2485. auto piece = llvm::make_unique<PathDiagnosticEventPiece>(
  2486. L, report->getDescription());
  2487. for (SourceRange Range : report->getRanges())
  2488. piece->addRange(Range);
  2489. PD->setEndOfPath(std::move(piece));
  2490. }
  2491. PathPieces &Pieces = PD->getMutablePieces();
  2492. if (getAnalyzerOptions().ShouldDisplayNotesAsEvents) {
  2493. // For path diagnostic consumers that don't support extra notes,
  2494. // we may optionally convert those to path notes.
  2495. for (auto I = report->getNotes().rbegin(),
  2496. E = report->getNotes().rend(); I != E; ++I) {
  2497. PathDiagnosticNotePiece *Piece = I->get();
  2498. auto ConvertedPiece = std::make_shared<PathDiagnosticEventPiece>(
  2499. Piece->getLocation(), Piece->getString());
  2500. for (const auto &R: Piece->getRanges())
  2501. ConvertedPiece->addRange(R);
  2502. Pieces.push_front(std::move(ConvertedPiece));
  2503. }
  2504. } else {
  2505. for (auto I = report->getNotes().rbegin(),
  2506. E = report->getNotes().rend(); I != E; ++I)
  2507. Pieces.push_front(*I);
  2508. }
  2509. // Get the meta data.
  2510. const BugReport::ExtraTextList &Meta = report->getExtraText();
  2511. for (const auto &i : Meta)
  2512. PD->addMeta(i);
  2513. updateExecutedLinesWithDiagnosticPieces(*PD);
  2514. Consumer->HandlePathDiagnostic(std::move(PD));
  2515. }
  2516. }
  2517. /// Insert all lines participating in the function signature \p Signature
  2518. /// into \p ExecutedLines.
  2519. static void populateExecutedLinesWithFunctionSignature(
  2520. const Decl *Signature, SourceManager &SM,
  2521. FilesToLineNumsMap &ExecutedLines) {
  2522. SourceRange SignatureSourceRange;
  2523. const Stmt* Body = Signature->getBody();
  2524. if (const auto FD = dyn_cast<FunctionDecl>(Signature)) {
  2525. SignatureSourceRange = FD->getSourceRange();
  2526. } else if (const auto OD = dyn_cast<ObjCMethodDecl>(Signature)) {
  2527. SignatureSourceRange = OD->getSourceRange();
  2528. } else {
  2529. return;
  2530. }
  2531. SourceLocation Start = SignatureSourceRange.getBegin();
  2532. SourceLocation End = Body ? Body->getSourceRange().getBegin()
  2533. : SignatureSourceRange.getEnd();
  2534. if (!Start.isValid() || !End.isValid())
  2535. return;
  2536. unsigned StartLine = SM.getExpansionLineNumber(Start);
  2537. unsigned EndLine = SM.getExpansionLineNumber(End);
  2538. FileID FID = SM.getFileID(SM.getExpansionLoc(Start));
  2539. for (unsigned Line = StartLine; Line <= EndLine; Line++)
  2540. ExecutedLines[FID].insert(Line);
  2541. }
  2542. static void populateExecutedLinesWithStmt(
  2543. const Stmt *S, SourceManager &SM,
  2544. FilesToLineNumsMap &ExecutedLines) {
  2545. SourceLocation Loc = S->getSourceRange().getBegin();
  2546. if (!Loc.isValid())
  2547. return;
  2548. SourceLocation ExpansionLoc = SM.getExpansionLoc(Loc);
  2549. FileID FID = SM.getFileID(ExpansionLoc);
  2550. unsigned LineNo = SM.getExpansionLineNumber(ExpansionLoc);
  2551. ExecutedLines[FID].insert(LineNo);
  2552. }
  2553. /// \return all executed lines including function signatures on the path
  2554. /// starting from \p N.
  2555. static std::unique_ptr<FilesToLineNumsMap>
  2556. findExecutedLines(SourceManager &SM, const ExplodedNode *N) {
  2557. auto ExecutedLines = llvm::make_unique<FilesToLineNumsMap>();
  2558. while (N) {
  2559. if (N->getFirstPred() == nullptr) {
  2560. // First node: show signature of the entrance point.
  2561. const Decl *D = N->getLocationContext()->getDecl();
  2562. populateExecutedLinesWithFunctionSignature(D, SM, *ExecutedLines);
  2563. } else if (auto CE = N->getLocationAs<CallEnter>()) {
  2564. // Inlined function: show signature.
  2565. const Decl* D = CE->getCalleeContext()->getDecl();
  2566. populateExecutedLinesWithFunctionSignature(D, SM, *ExecutedLines);
  2567. } else if (const Stmt *S = PathDiagnosticLocation::getStmt(N)) {
  2568. populateExecutedLinesWithStmt(S, SM, *ExecutedLines);
  2569. // Show extra context for some parent kinds.
  2570. const Stmt *P = N->getParentMap().getParent(S);
  2571. // The path exploration can die before the node with the associated
  2572. // return statement is generated, but we do want to show the whole
  2573. // return.
  2574. if (const auto *RS = dyn_cast_or_null<ReturnStmt>(P)) {
  2575. populateExecutedLinesWithStmt(RS, SM, *ExecutedLines);
  2576. P = N->getParentMap().getParent(RS);
  2577. }
  2578. if (P && (isa<SwitchCase>(P) || isa<LabelStmt>(P)))
  2579. populateExecutedLinesWithStmt(P, SM, *ExecutedLines);
  2580. }
  2581. N = N->getFirstPred();
  2582. }
  2583. return ExecutedLines;
  2584. }
  2585. std::unique_ptr<DiagnosticForConsumerMapTy>
  2586. BugReporter::generateDiagnosticForConsumerMap(
  2587. BugReport *report, ArrayRef<PathDiagnosticConsumer *> consumers,
  2588. ArrayRef<BugReport *> bugReports) {
  2589. if (!report->isPathSensitive()) {
  2590. auto Out = llvm::make_unique<DiagnosticForConsumerMapTy>();
  2591. for (auto *Consumer : consumers)
  2592. (*Out)[Consumer] = generateEmptyDiagnosticForReport(report,
  2593. getSourceManager());
  2594. return Out;
  2595. }
  2596. // Generate the full path sensitive diagnostic, using the generation scheme
  2597. // specified by the PathDiagnosticConsumer. Note that we have to generate
  2598. // path diagnostics even for consumers which do not support paths, because
  2599. // the BugReporterVisitors may mark this bug as a false positive.
  2600. assert(!bugReports.empty());
  2601. MaxBugClassSize.updateMax(bugReports.size());
  2602. std::unique_ptr<DiagnosticForConsumerMapTy> Out =
  2603. generatePathDiagnostics(consumers, bugReports);
  2604. if (Out->empty())
  2605. return Out;
  2606. MaxValidBugClassSize.updateMax(bugReports.size());
  2607. // Examine the report and see if the last piece is in a header. Reset the
  2608. // report location to the last piece in the main source file.
  2609. AnalyzerOptions &Opts = getAnalyzerOptions();
  2610. for (auto const &P : *Out)
  2611. if (Opts.ShouldReportIssuesInMainSourceFile && !Opts.AnalyzeAll)
  2612. P.second->resetDiagnosticLocationToMainFile();
  2613. return Out;
  2614. }
  2615. void BugReporter::EmitBasicReport(const Decl *DeclWithIssue,
  2616. const CheckerBase *Checker,
  2617. StringRef Name, StringRef Category,
  2618. StringRef Str, PathDiagnosticLocation Loc,
  2619. ArrayRef<SourceRange> Ranges) {
  2620. EmitBasicReport(DeclWithIssue, Checker->getCheckName(), Name, Category, Str,
  2621. Loc, Ranges);
  2622. }
  2623. void BugReporter::EmitBasicReport(const Decl *DeclWithIssue,
  2624. CheckName CheckName,
  2625. StringRef name, StringRef category,
  2626. StringRef str, PathDiagnosticLocation Loc,
  2627. ArrayRef<SourceRange> Ranges) {
  2628. // 'BT' is owned by BugReporter.
  2629. BugType *BT = getBugTypeForName(CheckName, name, category);
  2630. auto R = llvm::make_unique<BugReport>(*BT, str, Loc);
  2631. R->setDeclWithIssue(DeclWithIssue);
  2632. for (ArrayRef<SourceRange>::iterator I = Ranges.begin(), E = Ranges.end();
  2633. I != E; ++I)
  2634. R->addRange(*I);
  2635. emitReport(std::move(R));
  2636. }
  2637. BugType *BugReporter::getBugTypeForName(CheckName CheckName, StringRef name,
  2638. StringRef category) {
  2639. SmallString<136> fullDesc;
  2640. llvm::raw_svector_ostream(fullDesc) << CheckName.getName() << ":" << name
  2641. << ":" << category;
  2642. BugType *&BT = StrBugTypes[fullDesc];
  2643. if (!BT)
  2644. BT = new BugType(CheckName, name, category);
  2645. return BT;
  2646. }