JumpDiagnostics.cpp 36 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944
  1. //===--- JumpDiagnostics.cpp - Protected scope jump analysis ------*- C++ -*-=//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This file implements the JumpScopeChecker class, which is used to diagnose
  10. // jumps that enter a protected scope in an invalid way.
  11. //
  12. //===----------------------------------------------------------------------===//
  13. #include "clang/Sema/SemaInternal.h"
  14. #include "clang/AST/DeclCXX.h"
  15. #include "clang/AST/Expr.h"
  16. #include "clang/AST/ExprCXX.h"
  17. #include "clang/AST/StmtCXX.h"
  18. #include "clang/AST/StmtObjC.h"
  19. #include "llvm/ADT/BitVector.h"
  20. using namespace clang;
  21. namespace {
  22. /// JumpScopeChecker - This object is used by Sema to diagnose invalid jumps
  23. /// into VLA and other protected scopes. For example, this rejects:
  24. /// goto L;
  25. /// int a[n];
  26. /// L:
  27. ///
  28. class JumpScopeChecker {
  29. Sema &S;
  30. /// Permissive - True when recovering from errors, in which case precautions
  31. /// are taken to handle incomplete scope information.
  32. const bool Permissive;
  33. /// GotoScope - This is a record that we use to keep track of all of the
  34. /// scopes that are introduced by VLAs and other things that scope jumps like
  35. /// gotos. This scope tree has nothing to do with the source scope tree,
  36. /// because you can have multiple VLA scopes per compound statement, and most
  37. /// compound statements don't introduce any scopes.
  38. struct GotoScope {
  39. /// ParentScope - The index in ScopeMap of the parent scope. This is 0 for
  40. /// the parent scope is the function body.
  41. unsigned ParentScope;
  42. /// InDiag - The note to emit if there is a jump into this scope.
  43. unsigned InDiag;
  44. /// OutDiag - The note to emit if there is an indirect jump out
  45. /// of this scope. Direct jumps always clean up their current scope
  46. /// in an orderly way.
  47. unsigned OutDiag;
  48. /// Loc - Location to emit the diagnostic.
  49. SourceLocation Loc;
  50. GotoScope(unsigned parentScope, unsigned InDiag, unsigned OutDiag,
  51. SourceLocation L)
  52. : ParentScope(parentScope), InDiag(InDiag), OutDiag(OutDiag), Loc(L) {}
  53. };
  54. SmallVector<GotoScope, 48> Scopes;
  55. llvm::DenseMap<Stmt*, unsigned> LabelAndGotoScopes;
  56. SmallVector<Stmt*, 16> Jumps;
  57. SmallVector<Stmt*, 4> IndirectJumps;
  58. SmallVector<Stmt*, 4> AsmJumps;
  59. SmallVector<LabelDecl*, 4> IndirectJumpTargets;
  60. SmallVector<LabelDecl*, 4> AsmJumpTargets;
  61. public:
  62. JumpScopeChecker(Stmt *Body, Sema &S);
  63. private:
  64. void BuildScopeInformation(Decl *D, unsigned &ParentScope);
  65. void BuildScopeInformation(VarDecl *D, const BlockDecl *BDecl,
  66. unsigned &ParentScope);
  67. void BuildScopeInformation(Stmt *S, unsigned &origParentScope);
  68. void VerifyJumps();
  69. void VerifyIndirectOrAsmJumps(bool IsAsmGoto);
  70. void NoteJumpIntoScopes(ArrayRef<unsigned> ToScopes);
  71. void DiagnoseIndirectOrAsmJump(Stmt *IG, unsigned IGScope, LabelDecl *Target,
  72. unsigned TargetScope);
  73. void CheckJump(Stmt *From, Stmt *To, SourceLocation DiagLoc,
  74. unsigned JumpDiag, unsigned JumpDiagWarning,
  75. unsigned JumpDiagCXX98Compat);
  76. void CheckGotoStmt(GotoStmt *GS);
  77. unsigned GetDeepestCommonScope(unsigned A, unsigned B);
  78. };
  79. } // end anonymous namespace
  80. #define CHECK_PERMISSIVE(x) (assert(Permissive || !(x)), (Permissive && (x)))
  81. JumpScopeChecker::JumpScopeChecker(Stmt *Body, Sema &s)
  82. : S(s), Permissive(s.hasAnyUnrecoverableErrorsInThisFunction()) {
  83. // Add a scope entry for function scope.
  84. Scopes.push_back(GotoScope(~0U, ~0U, ~0U, SourceLocation()));
  85. // Build information for the top level compound statement, so that we have a
  86. // defined scope record for every "goto" and label.
  87. unsigned BodyParentScope = 0;
  88. BuildScopeInformation(Body, BodyParentScope);
  89. // Check that all jumps we saw are kosher.
  90. VerifyJumps();
  91. VerifyIndirectOrAsmJumps(false);
  92. VerifyIndirectOrAsmJumps(true);
  93. }
  94. /// GetDeepestCommonScope - Finds the innermost scope enclosing the
  95. /// two scopes.
  96. unsigned JumpScopeChecker::GetDeepestCommonScope(unsigned A, unsigned B) {
  97. while (A != B) {
  98. // Inner scopes are created after outer scopes and therefore have
  99. // higher indices.
  100. if (A < B) {
  101. assert(Scopes[B].ParentScope < B);
  102. B = Scopes[B].ParentScope;
  103. } else {
  104. assert(Scopes[A].ParentScope < A);
  105. A = Scopes[A].ParentScope;
  106. }
  107. }
  108. return A;
  109. }
  110. typedef std::pair<unsigned,unsigned> ScopePair;
  111. /// GetDiagForGotoScopeDecl - If this decl induces a new goto scope, return a
  112. /// diagnostic that should be emitted if control goes over it. If not, return 0.
  113. static ScopePair GetDiagForGotoScopeDecl(Sema &S, const Decl *D) {
  114. if (const VarDecl *VD = dyn_cast<VarDecl>(D)) {
  115. unsigned InDiag = 0;
  116. unsigned OutDiag = 0;
  117. if (VD->getType()->isVariablyModifiedType())
  118. InDiag = diag::note_protected_by_vla;
  119. if (VD->hasAttr<BlocksAttr>())
  120. return ScopePair(diag::note_protected_by___block,
  121. diag::note_exits___block);
  122. if (VD->hasAttr<CleanupAttr>())
  123. return ScopePair(diag::note_protected_by_cleanup,
  124. diag::note_exits_cleanup);
  125. if (VD->hasLocalStorage()) {
  126. switch (VD->getType().isDestructedType()) {
  127. case QualType::DK_objc_strong_lifetime:
  128. return ScopePair(diag::note_protected_by_objc_strong_init,
  129. diag::note_exits_objc_strong);
  130. case QualType::DK_objc_weak_lifetime:
  131. return ScopePair(diag::note_protected_by_objc_weak_init,
  132. diag::note_exits_objc_weak);
  133. case QualType::DK_nontrivial_c_struct:
  134. return ScopePair(diag::note_protected_by_non_trivial_c_struct_init,
  135. diag::note_exits_dtor);
  136. case QualType::DK_cxx_destructor:
  137. OutDiag = diag::note_exits_dtor;
  138. break;
  139. case QualType::DK_none:
  140. break;
  141. }
  142. }
  143. const Expr *Init = VD->getInit();
  144. if (S.Context.getLangOpts().CPlusPlus && VD->hasLocalStorage() && Init) {
  145. // C++11 [stmt.dcl]p3:
  146. // A program that jumps from a point where a variable with automatic
  147. // storage duration is not in scope to a point where it is in scope
  148. // is ill-formed unless the variable has scalar type, class type with
  149. // a trivial default constructor and a trivial destructor, a
  150. // cv-qualified version of one of these types, or an array of one of
  151. // the preceding types and is declared without an initializer.
  152. // C++03 [stmt.dcl.p3:
  153. // A program that jumps from a point where a local variable
  154. // with automatic storage duration is not in scope to a point
  155. // where it is in scope is ill-formed unless the variable has
  156. // POD type and is declared without an initializer.
  157. InDiag = diag::note_protected_by_variable_init;
  158. // For a variable of (array of) class type declared without an
  159. // initializer, we will have call-style initialization and the initializer
  160. // will be the CXXConstructExpr with no intervening nodes.
  161. if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(Init)) {
  162. const CXXConstructorDecl *Ctor = CCE->getConstructor();
  163. if (Ctor->isTrivial() && Ctor->isDefaultConstructor() &&
  164. VD->getInitStyle() == VarDecl::CallInit) {
  165. if (OutDiag)
  166. InDiag = diag::note_protected_by_variable_nontriv_destructor;
  167. else if (!Ctor->getParent()->isPOD())
  168. InDiag = diag::note_protected_by_variable_non_pod;
  169. else
  170. InDiag = 0;
  171. }
  172. }
  173. }
  174. return ScopePair(InDiag, OutDiag);
  175. }
  176. if (const TypedefNameDecl *TD = dyn_cast<TypedefNameDecl>(D)) {
  177. if (TD->getUnderlyingType()->isVariablyModifiedType())
  178. return ScopePair(isa<TypedefDecl>(TD)
  179. ? diag::note_protected_by_vla_typedef
  180. : diag::note_protected_by_vla_type_alias,
  181. 0);
  182. }
  183. return ScopePair(0U, 0U);
  184. }
  185. /// Build scope information for a declaration that is part of a DeclStmt.
  186. void JumpScopeChecker::BuildScopeInformation(Decl *D, unsigned &ParentScope) {
  187. // If this decl causes a new scope, push and switch to it.
  188. std::pair<unsigned,unsigned> Diags = GetDiagForGotoScopeDecl(S, D);
  189. if (Diags.first || Diags.second) {
  190. Scopes.push_back(GotoScope(ParentScope, Diags.first, Diags.second,
  191. D->getLocation()));
  192. ParentScope = Scopes.size()-1;
  193. }
  194. // If the decl has an initializer, walk it with the potentially new
  195. // scope we just installed.
  196. if (VarDecl *VD = dyn_cast<VarDecl>(D))
  197. if (Expr *Init = VD->getInit())
  198. BuildScopeInformation(Init, ParentScope);
  199. }
  200. /// Build scope information for a captured block literal variables.
  201. void JumpScopeChecker::BuildScopeInformation(VarDecl *D,
  202. const BlockDecl *BDecl,
  203. unsigned &ParentScope) {
  204. // exclude captured __block variables; there's no destructor
  205. // associated with the block literal for them.
  206. if (D->hasAttr<BlocksAttr>())
  207. return;
  208. QualType T = D->getType();
  209. QualType::DestructionKind destructKind = T.isDestructedType();
  210. if (destructKind != QualType::DK_none) {
  211. std::pair<unsigned,unsigned> Diags;
  212. switch (destructKind) {
  213. case QualType::DK_cxx_destructor:
  214. Diags = ScopePair(diag::note_enters_block_captures_cxx_obj,
  215. diag::note_exits_block_captures_cxx_obj);
  216. break;
  217. case QualType::DK_objc_strong_lifetime:
  218. Diags = ScopePair(diag::note_enters_block_captures_strong,
  219. diag::note_exits_block_captures_strong);
  220. break;
  221. case QualType::DK_objc_weak_lifetime:
  222. Diags = ScopePair(diag::note_enters_block_captures_weak,
  223. diag::note_exits_block_captures_weak);
  224. break;
  225. case QualType::DK_nontrivial_c_struct:
  226. Diags = ScopePair(diag::note_enters_block_captures_non_trivial_c_struct,
  227. diag::note_exits_block_captures_non_trivial_c_struct);
  228. break;
  229. case QualType::DK_none:
  230. llvm_unreachable("non-lifetime captured variable");
  231. }
  232. SourceLocation Loc = D->getLocation();
  233. if (Loc.isInvalid())
  234. Loc = BDecl->getLocation();
  235. Scopes.push_back(GotoScope(ParentScope,
  236. Diags.first, Diags.second, Loc));
  237. ParentScope = Scopes.size()-1;
  238. }
  239. }
  240. /// BuildScopeInformation - The statements from CI to CE are known to form a
  241. /// coherent VLA scope with a specified parent node. Walk through the
  242. /// statements, adding any labels or gotos to LabelAndGotoScopes and recursively
  243. /// walking the AST as needed.
  244. void JumpScopeChecker::BuildScopeInformation(Stmt *S,
  245. unsigned &origParentScope) {
  246. // If this is a statement, rather than an expression, scopes within it don't
  247. // propagate out into the enclosing scope. Otherwise we have to worry
  248. // about block literals, which have the lifetime of their enclosing statement.
  249. unsigned independentParentScope = origParentScope;
  250. unsigned &ParentScope = ((isa<Expr>(S) && !isa<StmtExpr>(S))
  251. ? origParentScope : independentParentScope);
  252. unsigned StmtsToSkip = 0u;
  253. // If we found a label, remember that it is in ParentScope scope.
  254. switch (S->getStmtClass()) {
  255. case Stmt::AddrLabelExprClass:
  256. IndirectJumpTargets.push_back(cast<AddrLabelExpr>(S)->getLabel());
  257. break;
  258. case Stmt::ObjCForCollectionStmtClass: {
  259. auto *CS = cast<ObjCForCollectionStmt>(S);
  260. unsigned Diag = diag::note_protected_by_objc_fast_enumeration;
  261. unsigned NewParentScope = Scopes.size();
  262. Scopes.push_back(GotoScope(ParentScope, Diag, 0, S->getBeginLoc()));
  263. BuildScopeInformation(CS->getBody(), NewParentScope);
  264. return;
  265. }
  266. case Stmt::IndirectGotoStmtClass:
  267. // "goto *&&lbl;" is a special case which we treat as equivalent
  268. // to a normal goto. In addition, we don't calculate scope in the
  269. // operand (to avoid recording the address-of-label use), which
  270. // works only because of the restricted set of expressions which
  271. // we detect as constant targets.
  272. if (cast<IndirectGotoStmt>(S)->getConstantTarget()) {
  273. LabelAndGotoScopes[S] = ParentScope;
  274. Jumps.push_back(S);
  275. return;
  276. }
  277. LabelAndGotoScopes[S] = ParentScope;
  278. IndirectJumps.push_back(S);
  279. break;
  280. case Stmt::SwitchStmtClass:
  281. // Evaluate the C++17 init stmt and condition variable
  282. // before entering the scope of the switch statement.
  283. if (Stmt *Init = cast<SwitchStmt>(S)->getInit()) {
  284. BuildScopeInformation(Init, ParentScope);
  285. ++StmtsToSkip;
  286. }
  287. if (VarDecl *Var = cast<SwitchStmt>(S)->getConditionVariable()) {
  288. BuildScopeInformation(Var, ParentScope);
  289. ++StmtsToSkip;
  290. }
  291. LLVM_FALLTHROUGH;
  292. case Stmt::GotoStmtClass:
  293. // Remember both what scope a goto is in as well as the fact that we have
  294. // it. This makes the second scan not have to walk the AST again.
  295. LabelAndGotoScopes[S] = ParentScope;
  296. Jumps.push_back(S);
  297. break;
  298. case Stmt::GCCAsmStmtClass:
  299. if (auto *GS = dyn_cast<GCCAsmStmt>(S))
  300. if (GS->isAsmGoto()) {
  301. // Remember both what scope a goto is in as well as the fact that we
  302. // have it. This makes the second scan not have to walk the AST again.
  303. LabelAndGotoScopes[S] = ParentScope;
  304. AsmJumps.push_back(GS);
  305. for (auto *E : GS->labels())
  306. AsmJumpTargets.push_back(E->getLabel());
  307. }
  308. break;
  309. case Stmt::IfStmtClass: {
  310. IfStmt *IS = cast<IfStmt>(S);
  311. if (!(IS->isConstexpr() || IS->isObjCAvailabilityCheck()))
  312. break;
  313. unsigned Diag = IS->isConstexpr() ? diag::note_protected_by_constexpr_if
  314. : diag::note_protected_by_if_available;
  315. if (VarDecl *Var = IS->getConditionVariable())
  316. BuildScopeInformation(Var, ParentScope);
  317. // Cannot jump into the middle of the condition.
  318. unsigned NewParentScope = Scopes.size();
  319. Scopes.push_back(GotoScope(ParentScope, Diag, 0, IS->getBeginLoc()));
  320. BuildScopeInformation(IS->getCond(), NewParentScope);
  321. // Jumps into either arm of an 'if constexpr' are not allowed.
  322. NewParentScope = Scopes.size();
  323. Scopes.push_back(GotoScope(ParentScope, Diag, 0, IS->getBeginLoc()));
  324. BuildScopeInformation(IS->getThen(), NewParentScope);
  325. if (Stmt *Else = IS->getElse()) {
  326. NewParentScope = Scopes.size();
  327. Scopes.push_back(GotoScope(ParentScope, Diag, 0, IS->getBeginLoc()));
  328. BuildScopeInformation(Else, NewParentScope);
  329. }
  330. return;
  331. }
  332. case Stmt::CXXTryStmtClass: {
  333. CXXTryStmt *TS = cast<CXXTryStmt>(S);
  334. {
  335. unsigned NewParentScope = Scopes.size();
  336. Scopes.push_back(GotoScope(ParentScope,
  337. diag::note_protected_by_cxx_try,
  338. diag::note_exits_cxx_try,
  339. TS->getSourceRange().getBegin()));
  340. if (Stmt *TryBlock = TS->getTryBlock())
  341. BuildScopeInformation(TryBlock, NewParentScope);
  342. }
  343. // Jump from the catch into the try is not allowed either.
  344. for (unsigned I = 0, E = TS->getNumHandlers(); I != E; ++I) {
  345. CXXCatchStmt *CS = TS->getHandler(I);
  346. unsigned NewParentScope = Scopes.size();
  347. Scopes.push_back(GotoScope(ParentScope,
  348. diag::note_protected_by_cxx_catch,
  349. diag::note_exits_cxx_catch,
  350. CS->getSourceRange().getBegin()));
  351. BuildScopeInformation(CS->getHandlerBlock(), NewParentScope);
  352. }
  353. return;
  354. }
  355. case Stmt::SEHTryStmtClass: {
  356. SEHTryStmt *TS = cast<SEHTryStmt>(S);
  357. {
  358. unsigned NewParentScope = Scopes.size();
  359. Scopes.push_back(GotoScope(ParentScope,
  360. diag::note_protected_by_seh_try,
  361. diag::note_exits_seh_try,
  362. TS->getSourceRange().getBegin()));
  363. if (Stmt *TryBlock = TS->getTryBlock())
  364. BuildScopeInformation(TryBlock, NewParentScope);
  365. }
  366. // Jump from __except or __finally into the __try are not allowed either.
  367. if (SEHExceptStmt *Except = TS->getExceptHandler()) {
  368. unsigned NewParentScope = Scopes.size();
  369. Scopes.push_back(GotoScope(ParentScope,
  370. diag::note_protected_by_seh_except,
  371. diag::note_exits_seh_except,
  372. Except->getSourceRange().getBegin()));
  373. BuildScopeInformation(Except->getBlock(), NewParentScope);
  374. } else if (SEHFinallyStmt *Finally = TS->getFinallyHandler()) {
  375. unsigned NewParentScope = Scopes.size();
  376. Scopes.push_back(GotoScope(ParentScope,
  377. diag::note_protected_by_seh_finally,
  378. diag::note_exits_seh_finally,
  379. Finally->getSourceRange().getBegin()));
  380. BuildScopeInformation(Finally->getBlock(), NewParentScope);
  381. }
  382. return;
  383. }
  384. case Stmt::DeclStmtClass: {
  385. // If this is a declstmt with a VLA definition, it defines a scope from here
  386. // to the end of the containing context.
  387. DeclStmt *DS = cast<DeclStmt>(S);
  388. // The decl statement creates a scope if any of the decls in it are VLAs
  389. // or have the cleanup attribute.
  390. for (auto *I : DS->decls())
  391. BuildScopeInformation(I, origParentScope);
  392. return;
  393. }
  394. case Stmt::ObjCAtTryStmtClass: {
  395. // Disallow jumps into any part of an @try statement by pushing a scope and
  396. // walking all sub-stmts in that scope.
  397. ObjCAtTryStmt *AT = cast<ObjCAtTryStmt>(S);
  398. // Recursively walk the AST for the @try part.
  399. {
  400. unsigned NewParentScope = Scopes.size();
  401. Scopes.push_back(GotoScope(ParentScope,
  402. diag::note_protected_by_objc_try,
  403. diag::note_exits_objc_try,
  404. AT->getAtTryLoc()));
  405. if (Stmt *TryPart = AT->getTryBody())
  406. BuildScopeInformation(TryPart, NewParentScope);
  407. }
  408. // Jump from the catch to the finally or try is not valid.
  409. for (unsigned I = 0, N = AT->getNumCatchStmts(); I != N; ++I) {
  410. ObjCAtCatchStmt *AC = AT->getCatchStmt(I);
  411. unsigned NewParentScope = Scopes.size();
  412. Scopes.push_back(GotoScope(ParentScope,
  413. diag::note_protected_by_objc_catch,
  414. diag::note_exits_objc_catch,
  415. AC->getAtCatchLoc()));
  416. // @catches are nested and it isn't
  417. BuildScopeInformation(AC->getCatchBody(), NewParentScope);
  418. }
  419. // Jump from the finally to the try or catch is not valid.
  420. if (ObjCAtFinallyStmt *AF = AT->getFinallyStmt()) {
  421. unsigned NewParentScope = Scopes.size();
  422. Scopes.push_back(GotoScope(ParentScope,
  423. diag::note_protected_by_objc_finally,
  424. diag::note_exits_objc_finally,
  425. AF->getAtFinallyLoc()));
  426. BuildScopeInformation(AF, NewParentScope);
  427. }
  428. return;
  429. }
  430. case Stmt::ObjCAtSynchronizedStmtClass: {
  431. // Disallow jumps into the protected statement of an @synchronized, but
  432. // allow jumps into the object expression it protects.
  433. ObjCAtSynchronizedStmt *AS = cast<ObjCAtSynchronizedStmt>(S);
  434. // Recursively walk the AST for the @synchronized object expr, it is
  435. // evaluated in the normal scope.
  436. BuildScopeInformation(AS->getSynchExpr(), ParentScope);
  437. // Recursively walk the AST for the @synchronized part, protected by a new
  438. // scope.
  439. unsigned NewParentScope = Scopes.size();
  440. Scopes.push_back(GotoScope(ParentScope,
  441. diag::note_protected_by_objc_synchronized,
  442. diag::note_exits_objc_synchronized,
  443. AS->getAtSynchronizedLoc()));
  444. BuildScopeInformation(AS->getSynchBody(), NewParentScope);
  445. return;
  446. }
  447. case Stmt::ObjCAutoreleasePoolStmtClass: {
  448. // Disallow jumps into the protected statement of an @autoreleasepool.
  449. ObjCAutoreleasePoolStmt *AS = cast<ObjCAutoreleasePoolStmt>(S);
  450. // Recursively walk the AST for the @autoreleasepool part, protected by a
  451. // new scope.
  452. unsigned NewParentScope = Scopes.size();
  453. Scopes.push_back(GotoScope(ParentScope,
  454. diag::note_protected_by_objc_autoreleasepool,
  455. diag::note_exits_objc_autoreleasepool,
  456. AS->getAtLoc()));
  457. BuildScopeInformation(AS->getSubStmt(), NewParentScope);
  458. return;
  459. }
  460. case Stmt::ExprWithCleanupsClass: {
  461. // Disallow jumps past full-expressions that use blocks with
  462. // non-trivial cleanups of their captures. This is theoretically
  463. // implementable but a lot of work which we haven't felt up to doing.
  464. ExprWithCleanups *EWC = cast<ExprWithCleanups>(S);
  465. for (unsigned i = 0, e = EWC->getNumObjects(); i != e; ++i) {
  466. const BlockDecl *BDecl = EWC->getObject(i);
  467. for (const auto &CI : BDecl->captures()) {
  468. VarDecl *variable = CI.getVariable();
  469. BuildScopeInformation(variable, BDecl, origParentScope);
  470. }
  471. }
  472. break;
  473. }
  474. case Stmt::MaterializeTemporaryExprClass: {
  475. // Disallow jumps out of scopes containing temporaries lifetime-extended to
  476. // automatic storage duration.
  477. MaterializeTemporaryExpr *MTE = cast<MaterializeTemporaryExpr>(S);
  478. if (MTE->getStorageDuration() == SD_Automatic) {
  479. SmallVector<const Expr *, 4> CommaLHS;
  480. SmallVector<SubobjectAdjustment, 4> Adjustments;
  481. const Expr *ExtendedObject =
  482. MTE->GetTemporaryExpr()->skipRValueSubobjectAdjustments(
  483. CommaLHS, Adjustments);
  484. if (ExtendedObject->getType().isDestructedType()) {
  485. Scopes.push_back(GotoScope(ParentScope, 0,
  486. diag::note_exits_temporary_dtor,
  487. ExtendedObject->getExprLoc()));
  488. origParentScope = Scopes.size()-1;
  489. }
  490. }
  491. break;
  492. }
  493. case Stmt::CaseStmtClass:
  494. case Stmt::DefaultStmtClass:
  495. case Stmt::LabelStmtClass:
  496. LabelAndGotoScopes[S] = ParentScope;
  497. break;
  498. default:
  499. break;
  500. }
  501. for (Stmt *SubStmt : S->children()) {
  502. if (!SubStmt)
  503. continue;
  504. if (StmtsToSkip) {
  505. --StmtsToSkip;
  506. continue;
  507. }
  508. // Cases, labels, and defaults aren't "scope parents". It's also
  509. // important to handle these iteratively instead of recursively in
  510. // order to avoid blowing out the stack.
  511. while (true) {
  512. Stmt *Next;
  513. if (SwitchCase *SC = dyn_cast<SwitchCase>(SubStmt))
  514. Next = SC->getSubStmt();
  515. else if (LabelStmt *LS = dyn_cast<LabelStmt>(SubStmt))
  516. Next = LS->getSubStmt();
  517. else
  518. break;
  519. LabelAndGotoScopes[SubStmt] = ParentScope;
  520. SubStmt = Next;
  521. }
  522. // Recursively walk the AST.
  523. BuildScopeInformation(SubStmt, ParentScope);
  524. }
  525. }
  526. /// VerifyJumps - Verify each element of the Jumps array to see if they are
  527. /// valid, emitting diagnostics if not.
  528. void JumpScopeChecker::VerifyJumps() {
  529. while (!Jumps.empty()) {
  530. Stmt *Jump = Jumps.pop_back_val();
  531. // With a goto,
  532. if (GotoStmt *GS = dyn_cast<GotoStmt>(Jump)) {
  533. // The label may not have a statement if it's coming from inline MS ASM.
  534. if (GS->getLabel()->getStmt()) {
  535. CheckJump(GS, GS->getLabel()->getStmt(), GS->getGotoLoc(),
  536. diag::err_goto_into_protected_scope,
  537. diag::ext_goto_into_protected_scope,
  538. diag::warn_cxx98_compat_goto_into_protected_scope);
  539. }
  540. CheckGotoStmt(GS);
  541. continue;
  542. }
  543. // We only get indirect gotos here when they have a constant target.
  544. if (IndirectGotoStmt *IGS = dyn_cast<IndirectGotoStmt>(Jump)) {
  545. LabelDecl *Target = IGS->getConstantTarget();
  546. CheckJump(IGS, Target->getStmt(), IGS->getGotoLoc(),
  547. diag::err_goto_into_protected_scope,
  548. diag::ext_goto_into_protected_scope,
  549. diag::warn_cxx98_compat_goto_into_protected_scope);
  550. continue;
  551. }
  552. SwitchStmt *SS = cast<SwitchStmt>(Jump);
  553. for (SwitchCase *SC = SS->getSwitchCaseList(); SC;
  554. SC = SC->getNextSwitchCase()) {
  555. if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(SC)))
  556. continue;
  557. SourceLocation Loc;
  558. if (CaseStmt *CS = dyn_cast<CaseStmt>(SC))
  559. Loc = CS->getBeginLoc();
  560. else if (DefaultStmt *DS = dyn_cast<DefaultStmt>(SC))
  561. Loc = DS->getBeginLoc();
  562. else
  563. Loc = SC->getBeginLoc();
  564. CheckJump(SS, SC, Loc, diag::err_switch_into_protected_scope, 0,
  565. diag::warn_cxx98_compat_switch_into_protected_scope);
  566. }
  567. }
  568. }
  569. /// VerifyIndirectOrAsmJumps - Verify whether any possible indirect goto or
  570. /// asm goto jump might cross a protection boundary. Unlike direct jumps,
  571. /// indirect or asm goto jumps count cleanups as protection boundaries:
  572. /// since there's no way to know where the jump is going, we can't implicitly
  573. /// run the right cleanups the way we can with direct jumps.
  574. /// Thus, an indirect/asm jump is "trivial" if it bypasses no
  575. /// initializations and no teardowns. More formally, an indirect/asm jump
  576. /// from A to B is trivial if the path out from A to DCA(A,B) is
  577. /// trivial and the path in from DCA(A,B) to B is trivial, where
  578. /// DCA(A,B) is the deepest common ancestor of A and B.
  579. /// Jump-triviality is transitive but asymmetric.
  580. ///
  581. /// A path in is trivial if none of the entered scopes have an InDiag.
  582. /// A path out is trivial is none of the exited scopes have an OutDiag.
  583. ///
  584. /// Under these definitions, this function checks that the indirect
  585. /// jump between A and B is trivial for every indirect goto statement A
  586. /// and every label B whose address was taken in the function.
  587. void JumpScopeChecker::VerifyIndirectOrAsmJumps(bool IsAsmGoto) {
  588. SmallVector<Stmt*, 4> GotoJumps = IsAsmGoto ? AsmJumps : IndirectJumps;
  589. if (GotoJumps.empty())
  590. return;
  591. SmallVector<LabelDecl *, 4> JumpTargets =
  592. IsAsmGoto ? AsmJumpTargets : IndirectJumpTargets;
  593. // If there aren't any address-of-label expressions in this function,
  594. // complain about the first indirect goto.
  595. if (JumpTargets.empty()) {
  596. assert(!IsAsmGoto &&"only indirect goto can get here");
  597. S.Diag(GotoJumps[0]->getBeginLoc(),
  598. diag::err_indirect_goto_without_addrlabel);
  599. return;
  600. }
  601. // Collect a single representative of every scope containing an
  602. // indirect or asm goto. For most code bases, this substantially cuts
  603. // down on the number of jump sites we'll have to consider later.
  604. typedef std::pair<unsigned, Stmt*> JumpScope;
  605. SmallVector<JumpScope, 32> JumpScopes;
  606. {
  607. llvm::DenseMap<unsigned, Stmt*> JumpScopesMap;
  608. for (SmallVectorImpl<Stmt *>::iterator I = GotoJumps.begin(),
  609. E = GotoJumps.end();
  610. I != E; ++I) {
  611. Stmt *IG = *I;
  612. if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(IG)))
  613. continue;
  614. unsigned IGScope = LabelAndGotoScopes[IG];
  615. Stmt *&Entry = JumpScopesMap[IGScope];
  616. if (!Entry) Entry = IG;
  617. }
  618. JumpScopes.reserve(JumpScopesMap.size());
  619. for (llvm::DenseMap<unsigned, Stmt *>::iterator I = JumpScopesMap.begin(),
  620. E = JumpScopesMap.end();
  621. I != E; ++I)
  622. JumpScopes.push_back(*I);
  623. }
  624. // Collect a single representative of every scope containing a
  625. // label whose address was taken somewhere in the function.
  626. // For most code bases, there will be only one such scope.
  627. llvm::DenseMap<unsigned, LabelDecl*> TargetScopes;
  628. for (SmallVectorImpl<LabelDecl *>::iterator I = JumpTargets.begin(),
  629. E = JumpTargets.end();
  630. I != E; ++I) {
  631. LabelDecl *TheLabel = *I;
  632. if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(TheLabel->getStmt())))
  633. continue;
  634. unsigned LabelScope = LabelAndGotoScopes[TheLabel->getStmt()];
  635. LabelDecl *&Target = TargetScopes[LabelScope];
  636. if (!Target) Target = TheLabel;
  637. }
  638. // For each target scope, make sure it's trivially reachable from
  639. // every scope containing a jump site.
  640. //
  641. // A path between scopes always consists of exitting zero or more
  642. // scopes, then entering zero or more scopes. We build a set of
  643. // of scopes S from which the target scope can be trivially
  644. // entered, then verify that every jump scope can be trivially
  645. // exitted to reach a scope in S.
  646. llvm::BitVector Reachable(Scopes.size(), false);
  647. for (llvm::DenseMap<unsigned,LabelDecl*>::iterator
  648. TI = TargetScopes.begin(), TE = TargetScopes.end(); TI != TE; ++TI) {
  649. unsigned TargetScope = TI->first;
  650. LabelDecl *TargetLabel = TI->second;
  651. Reachable.reset();
  652. // Mark all the enclosing scopes from which you can safely jump
  653. // into the target scope. 'Min' will end up being the index of
  654. // the shallowest such scope.
  655. unsigned Min = TargetScope;
  656. while (true) {
  657. Reachable.set(Min);
  658. // Don't go beyond the outermost scope.
  659. if (Min == 0) break;
  660. // Stop if we can't trivially enter the current scope.
  661. if (Scopes[Min].InDiag) break;
  662. Min = Scopes[Min].ParentScope;
  663. }
  664. // Walk through all the jump sites, checking that they can trivially
  665. // reach this label scope.
  666. for (SmallVectorImpl<JumpScope>::iterator
  667. I = JumpScopes.begin(), E = JumpScopes.end(); I != E; ++I) {
  668. unsigned Scope = I->first;
  669. // Walk out the "scope chain" for this scope, looking for a scope
  670. // we've marked reachable. For well-formed code this amortizes
  671. // to O(JumpScopes.size() / Scopes.size()): we only iterate
  672. // when we see something unmarked, and in well-formed code we
  673. // mark everything we iterate past.
  674. bool IsReachable = false;
  675. while (true) {
  676. if (Reachable.test(Scope)) {
  677. // If we find something reachable, mark all the scopes we just
  678. // walked through as reachable.
  679. for (unsigned S = I->first; S != Scope; S = Scopes[S].ParentScope)
  680. Reachable.set(S);
  681. IsReachable = true;
  682. break;
  683. }
  684. // Don't walk out if we've reached the top-level scope or we've
  685. // gotten shallower than the shallowest reachable scope.
  686. if (Scope == 0 || Scope < Min) break;
  687. // Don't walk out through an out-diagnostic.
  688. if (Scopes[Scope].OutDiag) break;
  689. Scope = Scopes[Scope].ParentScope;
  690. }
  691. // Only diagnose if we didn't find something.
  692. if (IsReachable) continue;
  693. DiagnoseIndirectOrAsmJump(I->second, I->first, TargetLabel, TargetScope);
  694. }
  695. }
  696. }
  697. /// Return true if a particular error+note combination must be downgraded to a
  698. /// warning in Microsoft mode.
  699. static bool IsMicrosoftJumpWarning(unsigned JumpDiag, unsigned InDiagNote) {
  700. return (JumpDiag == diag::err_goto_into_protected_scope &&
  701. (InDiagNote == diag::note_protected_by_variable_init ||
  702. InDiagNote == diag::note_protected_by_variable_nontriv_destructor));
  703. }
  704. /// Return true if a particular note should be downgraded to a compatibility
  705. /// warning in C++11 mode.
  706. static bool IsCXX98CompatWarning(Sema &S, unsigned InDiagNote) {
  707. return S.getLangOpts().CPlusPlus11 &&
  708. InDiagNote == diag::note_protected_by_variable_non_pod;
  709. }
  710. /// Produce primary diagnostic for an indirect jump statement.
  711. static void DiagnoseIndirectOrAsmJumpStmt(Sema &S, Stmt *Jump,
  712. LabelDecl *Target, bool &Diagnosed) {
  713. if (Diagnosed)
  714. return;
  715. bool IsAsmGoto = isa<GCCAsmStmt>(Jump);
  716. S.Diag(Jump->getBeginLoc(), diag::err_indirect_goto_in_protected_scope)
  717. << IsAsmGoto;
  718. S.Diag(Target->getStmt()->getIdentLoc(), diag::note_indirect_goto_target)
  719. << IsAsmGoto;
  720. Diagnosed = true;
  721. }
  722. /// Produce note diagnostics for a jump into a protected scope.
  723. void JumpScopeChecker::NoteJumpIntoScopes(ArrayRef<unsigned> ToScopes) {
  724. if (CHECK_PERMISSIVE(ToScopes.empty()))
  725. return;
  726. for (unsigned I = 0, E = ToScopes.size(); I != E; ++I)
  727. if (Scopes[ToScopes[I]].InDiag)
  728. S.Diag(Scopes[ToScopes[I]].Loc, Scopes[ToScopes[I]].InDiag);
  729. }
  730. /// Diagnose an indirect jump which is known to cross scopes.
  731. void JumpScopeChecker::DiagnoseIndirectOrAsmJump(Stmt *Jump, unsigned JumpScope,
  732. LabelDecl *Target,
  733. unsigned TargetScope) {
  734. if (CHECK_PERMISSIVE(JumpScope == TargetScope))
  735. return;
  736. unsigned Common = GetDeepestCommonScope(JumpScope, TargetScope);
  737. bool Diagnosed = false;
  738. // Walk out the scope chain until we reach the common ancestor.
  739. for (unsigned I = JumpScope; I != Common; I = Scopes[I].ParentScope)
  740. if (Scopes[I].OutDiag) {
  741. DiagnoseIndirectOrAsmJumpStmt(S, Jump, Target, Diagnosed);
  742. S.Diag(Scopes[I].Loc, Scopes[I].OutDiag);
  743. }
  744. SmallVector<unsigned, 10> ToScopesCXX98Compat;
  745. // Now walk into the scopes containing the label whose address was taken.
  746. for (unsigned I = TargetScope; I != Common; I = Scopes[I].ParentScope)
  747. if (IsCXX98CompatWarning(S, Scopes[I].InDiag))
  748. ToScopesCXX98Compat.push_back(I);
  749. else if (Scopes[I].InDiag) {
  750. DiagnoseIndirectOrAsmJumpStmt(S, Jump, Target, Diagnosed);
  751. S.Diag(Scopes[I].Loc, Scopes[I].InDiag);
  752. }
  753. // Diagnose this jump if it would be ill-formed in C++98.
  754. if (!Diagnosed && !ToScopesCXX98Compat.empty()) {
  755. bool IsAsmGoto = isa<GCCAsmStmt>(Jump);
  756. S.Diag(Jump->getBeginLoc(),
  757. diag::warn_cxx98_compat_indirect_goto_in_protected_scope)
  758. << IsAsmGoto;
  759. S.Diag(Target->getStmt()->getIdentLoc(), diag::note_indirect_goto_target)
  760. << IsAsmGoto;
  761. NoteJumpIntoScopes(ToScopesCXX98Compat);
  762. }
  763. }
  764. /// CheckJump - Validate that the specified jump statement is valid: that it is
  765. /// jumping within or out of its current scope, not into a deeper one.
  766. void JumpScopeChecker::CheckJump(Stmt *From, Stmt *To, SourceLocation DiagLoc,
  767. unsigned JumpDiagError, unsigned JumpDiagWarning,
  768. unsigned JumpDiagCXX98Compat) {
  769. if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(From)))
  770. return;
  771. if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(To)))
  772. return;
  773. unsigned FromScope = LabelAndGotoScopes[From];
  774. unsigned ToScope = LabelAndGotoScopes[To];
  775. // Common case: exactly the same scope, which is fine.
  776. if (FromScope == ToScope) return;
  777. // Warn on gotos out of __finally blocks.
  778. if (isa<GotoStmt>(From) || isa<IndirectGotoStmt>(From)) {
  779. // If FromScope > ToScope, FromScope is more nested and the jump goes to a
  780. // less nested scope. Check if it crosses a __finally along the way.
  781. for (unsigned I = FromScope; I > ToScope; I = Scopes[I].ParentScope) {
  782. if (Scopes[I].InDiag == diag::note_protected_by_seh_finally) {
  783. S.Diag(From->getBeginLoc(), diag::warn_jump_out_of_seh_finally);
  784. break;
  785. }
  786. }
  787. }
  788. unsigned CommonScope = GetDeepestCommonScope(FromScope, ToScope);
  789. // It's okay to jump out from a nested scope.
  790. if (CommonScope == ToScope) return;
  791. // Pull out (and reverse) any scopes we might need to diagnose skipping.
  792. SmallVector<unsigned, 10> ToScopesCXX98Compat;
  793. SmallVector<unsigned, 10> ToScopesError;
  794. SmallVector<unsigned, 10> ToScopesWarning;
  795. for (unsigned I = ToScope; I != CommonScope; I = Scopes[I].ParentScope) {
  796. if (S.getLangOpts().MSVCCompat && JumpDiagWarning != 0 &&
  797. IsMicrosoftJumpWarning(JumpDiagError, Scopes[I].InDiag))
  798. ToScopesWarning.push_back(I);
  799. else if (IsCXX98CompatWarning(S, Scopes[I].InDiag))
  800. ToScopesCXX98Compat.push_back(I);
  801. else if (Scopes[I].InDiag)
  802. ToScopesError.push_back(I);
  803. }
  804. // Handle warnings.
  805. if (!ToScopesWarning.empty()) {
  806. S.Diag(DiagLoc, JumpDiagWarning);
  807. NoteJumpIntoScopes(ToScopesWarning);
  808. }
  809. // Handle errors.
  810. if (!ToScopesError.empty()) {
  811. S.Diag(DiagLoc, JumpDiagError);
  812. NoteJumpIntoScopes(ToScopesError);
  813. }
  814. // Handle -Wc++98-compat warnings if the jump is well-formed.
  815. if (ToScopesError.empty() && !ToScopesCXX98Compat.empty()) {
  816. S.Diag(DiagLoc, JumpDiagCXX98Compat);
  817. NoteJumpIntoScopes(ToScopesCXX98Compat);
  818. }
  819. }
  820. void JumpScopeChecker::CheckGotoStmt(GotoStmt *GS) {
  821. if (GS->getLabel()->isMSAsmLabel()) {
  822. S.Diag(GS->getGotoLoc(), diag::err_goto_ms_asm_label)
  823. << GS->getLabel()->getIdentifier();
  824. S.Diag(GS->getLabel()->getLocation(), diag::note_goto_ms_asm_label)
  825. << GS->getLabel()->getIdentifier();
  826. }
  827. }
  828. void Sema::DiagnoseInvalidJumps(Stmt *Body) {
  829. (void)JumpScopeChecker(Body, *this);
  830. }