JumpDiagnostics.cpp 33 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852
  1. //===--- JumpDiagnostics.cpp - Protected scope jump analysis ------*- C++ -*-=//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This file implements the JumpScopeChecker class, which is used to diagnose
  11. // jumps that enter a protected scope in an invalid way.
  12. //
  13. //===----------------------------------------------------------------------===//
  14. #include "clang/Sema/SemaInternal.h"
  15. #include "clang/AST/DeclCXX.h"
  16. #include "clang/AST/Expr.h"
  17. #include "clang/AST/ExprCXX.h"
  18. #include "clang/AST/StmtCXX.h"
  19. #include "clang/AST/StmtObjC.h"
  20. #include "llvm/ADT/BitVector.h"
  21. using namespace clang;
  22. namespace {
  23. /// JumpScopeChecker - This object is used by Sema to diagnose invalid jumps
  24. /// into VLA and other protected scopes. For example, this rejects:
  25. /// goto L;
  26. /// int a[n];
  27. /// L:
  28. ///
  29. class JumpScopeChecker {
  30. Sema &S;
  31. /// Permissive - True when recovering from errors, in which case precautions
  32. /// are taken to handle incomplete scope information.
  33. const bool Permissive;
  34. /// GotoScope - This is a record that we use to keep track of all of the
  35. /// scopes that are introduced by VLAs and other things that scope jumps like
  36. /// gotos. This scope tree has nothing to do with the source scope tree,
  37. /// because you can have multiple VLA scopes per compound statement, and most
  38. /// compound statements don't introduce any scopes.
  39. struct GotoScope {
  40. /// ParentScope - The index in ScopeMap of the parent scope. This is 0 for
  41. /// the parent scope is the function body.
  42. unsigned ParentScope;
  43. /// InDiag - The note to emit if there is a jump into this scope.
  44. unsigned InDiag;
  45. /// OutDiag - The note to emit if there is an indirect jump out
  46. /// of this scope. Direct jumps always clean up their current scope
  47. /// in an orderly way.
  48. unsigned OutDiag;
  49. /// Loc - Location to emit the diagnostic.
  50. SourceLocation Loc;
  51. GotoScope(unsigned parentScope, unsigned InDiag, unsigned OutDiag,
  52. SourceLocation L)
  53. : ParentScope(parentScope), InDiag(InDiag), OutDiag(OutDiag), Loc(L) {}
  54. };
  55. SmallVector<GotoScope, 48> Scopes;
  56. llvm::DenseMap<Stmt*, unsigned> LabelAndGotoScopes;
  57. SmallVector<Stmt*, 16> Jumps;
  58. SmallVector<IndirectGotoStmt*, 4> IndirectJumps;
  59. SmallVector<LabelDecl*, 4> IndirectJumpTargets;
  60. public:
  61. JumpScopeChecker(Stmt *Body, Sema &S);
  62. private:
  63. void BuildScopeInformation(Decl *D, unsigned &ParentScope);
  64. void BuildScopeInformation(VarDecl *D, const BlockDecl *BDecl,
  65. unsigned &ParentScope);
  66. void BuildScopeInformation(Stmt *S, unsigned &origParentScope);
  67. void VerifyJumps();
  68. void VerifyIndirectJumps();
  69. void NoteJumpIntoScopes(ArrayRef<unsigned> ToScopes);
  70. void DiagnoseIndirectJump(IndirectGotoStmt *IG, unsigned IGScope,
  71. LabelDecl *Target, unsigned TargetScope);
  72. void CheckJump(Stmt *From, Stmt *To, SourceLocation DiagLoc,
  73. unsigned JumpDiag, unsigned JumpDiagWarning,
  74. unsigned JumpDiagCXX98Compat);
  75. void CheckGotoStmt(GotoStmt *GS);
  76. unsigned GetDeepestCommonScope(unsigned A, unsigned B);
  77. };
  78. } // end anonymous namespace
  79. #define CHECK_PERMISSIVE(x) (assert(Permissive || !(x)), (Permissive && (x)))
  80. JumpScopeChecker::JumpScopeChecker(Stmt *Body, Sema &s)
  81. : S(s), Permissive(s.hasAnyUnrecoverableErrorsInThisFunction()) {
  82. // Add a scope entry for function scope.
  83. Scopes.push_back(GotoScope(~0U, ~0U, ~0U, SourceLocation()));
  84. // Build information for the top level compound statement, so that we have a
  85. // defined scope record for every "goto" and label.
  86. unsigned BodyParentScope = 0;
  87. BuildScopeInformation(Body, BodyParentScope);
  88. // Check that all jumps we saw are kosher.
  89. VerifyJumps();
  90. VerifyIndirectJumps();
  91. }
  92. /// GetDeepestCommonScope - Finds the innermost scope enclosing the
  93. /// two scopes.
  94. unsigned JumpScopeChecker::GetDeepestCommonScope(unsigned A, unsigned B) {
  95. while (A != B) {
  96. // Inner scopes are created after outer scopes and therefore have
  97. // higher indices.
  98. if (A < B) {
  99. assert(Scopes[B].ParentScope < B);
  100. B = Scopes[B].ParentScope;
  101. } else {
  102. assert(Scopes[A].ParentScope < A);
  103. A = Scopes[A].ParentScope;
  104. }
  105. }
  106. return A;
  107. }
  108. typedef std::pair<unsigned,unsigned> ScopePair;
  109. /// GetDiagForGotoScopeDecl - If this decl induces a new goto scope, return a
  110. /// diagnostic that should be emitted if control goes over it. If not, return 0.
  111. static ScopePair GetDiagForGotoScopeDecl(Sema &S, const Decl *D) {
  112. if (const VarDecl *VD = dyn_cast<VarDecl>(D)) {
  113. unsigned InDiag = 0;
  114. unsigned OutDiag = 0;
  115. if (VD->getType()->isVariablyModifiedType())
  116. InDiag = diag::note_protected_by_vla;
  117. if (VD->hasAttr<BlocksAttr>())
  118. return ScopePair(diag::note_protected_by___block,
  119. diag::note_exits___block);
  120. if (VD->hasAttr<CleanupAttr>())
  121. return ScopePair(diag::note_protected_by_cleanup,
  122. diag::note_exits_cleanup);
  123. if (VD->hasLocalStorage()) {
  124. switch (VD->getType().isDestructedType()) {
  125. case QualType::DK_objc_strong_lifetime:
  126. case QualType::DK_objc_weak_lifetime:
  127. return ScopePair(diag::note_protected_by_objc_ownership,
  128. diag::note_exits_objc_ownership);
  129. case QualType::DK_cxx_destructor:
  130. OutDiag = diag::note_exits_dtor;
  131. break;
  132. case QualType::DK_none:
  133. break;
  134. }
  135. }
  136. const Expr *Init = VD->getInit();
  137. if (S.Context.getLangOpts().CPlusPlus && VD->hasLocalStorage() && Init) {
  138. // C++11 [stmt.dcl]p3:
  139. // A program that jumps from a point where a variable with automatic
  140. // storage duration is not in scope to a point where it is in scope
  141. // is ill-formed unless the variable has scalar type, class type with
  142. // a trivial default constructor and a trivial destructor, a
  143. // cv-qualified version of one of these types, or an array of one of
  144. // the preceding types and is declared without an initializer.
  145. // C++03 [stmt.dcl.p3:
  146. // A program that jumps from a point where a local variable
  147. // with automatic storage duration is not in scope to a point
  148. // where it is in scope is ill-formed unless the variable has
  149. // POD type and is declared without an initializer.
  150. InDiag = diag::note_protected_by_variable_init;
  151. // For a variable of (array of) class type declared without an
  152. // initializer, we will have call-style initialization and the initializer
  153. // will be the CXXConstructExpr with no intervening nodes.
  154. if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(Init)) {
  155. const CXXConstructorDecl *Ctor = CCE->getConstructor();
  156. if (Ctor->isTrivial() && Ctor->isDefaultConstructor() &&
  157. VD->getInitStyle() == VarDecl::CallInit) {
  158. if (OutDiag)
  159. InDiag = diag::note_protected_by_variable_nontriv_destructor;
  160. else if (!Ctor->getParent()->isPOD())
  161. InDiag = diag::note_protected_by_variable_non_pod;
  162. else
  163. InDiag = 0;
  164. }
  165. }
  166. }
  167. return ScopePair(InDiag, OutDiag);
  168. }
  169. if (const TypedefNameDecl *TD = dyn_cast<TypedefNameDecl>(D)) {
  170. if (TD->getUnderlyingType()->isVariablyModifiedType())
  171. return ScopePair(isa<TypedefDecl>(TD)
  172. ? diag::note_protected_by_vla_typedef
  173. : diag::note_protected_by_vla_type_alias,
  174. 0);
  175. }
  176. return ScopePair(0U, 0U);
  177. }
  178. /// \brief Build scope information for a declaration that is part of a DeclStmt.
  179. void JumpScopeChecker::BuildScopeInformation(Decl *D, unsigned &ParentScope) {
  180. // If this decl causes a new scope, push and switch to it.
  181. std::pair<unsigned,unsigned> Diags = GetDiagForGotoScopeDecl(S, D);
  182. if (Diags.first || Diags.second) {
  183. Scopes.push_back(GotoScope(ParentScope, Diags.first, Diags.second,
  184. D->getLocation()));
  185. ParentScope = Scopes.size()-1;
  186. }
  187. // If the decl has an initializer, walk it with the potentially new
  188. // scope we just installed.
  189. if (VarDecl *VD = dyn_cast<VarDecl>(D))
  190. if (Expr *Init = VD->getInit())
  191. BuildScopeInformation(Init, ParentScope);
  192. }
  193. /// \brief Build scope information for a captured block literal variables.
  194. void JumpScopeChecker::BuildScopeInformation(VarDecl *D,
  195. const BlockDecl *BDecl,
  196. unsigned &ParentScope) {
  197. // exclude captured __block variables; there's no destructor
  198. // associated with the block literal for them.
  199. if (D->hasAttr<BlocksAttr>())
  200. return;
  201. QualType T = D->getType();
  202. QualType::DestructionKind destructKind = T.isDestructedType();
  203. if (destructKind != QualType::DK_none) {
  204. std::pair<unsigned,unsigned> Diags;
  205. switch (destructKind) {
  206. case QualType::DK_cxx_destructor:
  207. Diags = ScopePair(diag::note_enters_block_captures_cxx_obj,
  208. diag::note_exits_block_captures_cxx_obj);
  209. break;
  210. case QualType::DK_objc_strong_lifetime:
  211. Diags = ScopePair(diag::note_enters_block_captures_strong,
  212. diag::note_exits_block_captures_strong);
  213. break;
  214. case QualType::DK_objc_weak_lifetime:
  215. Diags = ScopePair(diag::note_enters_block_captures_weak,
  216. diag::note_exits_block_captures_weak);
  217. break;
  218. case QualType::DK_none:
  219. llvm_unreachable("non-lifetime captured variable");
  220. }
  221. SourceLocation Loc = D->getLocation();
  222. if (Loc.isInvalid())
  223. Loc = BDecl->getLocation();
  224. Scopes.push_back(GotoScope(ParentScope,
  225. Diags.first, Diags.second, Loc));
  226. ParentScope = Scopes.size()-1;
  227. }
  228. }
  229. /// BuildScopeInformation - The statements from CI to CE are known to form a
  230. /// coherent VLA scope with a specified parent node. Walk through the
  231. /// statements, adding any labels or gotos to LabelAndGotoScopes and recursively
  232. /// walking the AST as needed.
  233. void JumpScopeChecker::BuildScopeInformation(Stmt *S, unsigned &origParentScope) {
  234. // If this is a statement, rather than an expression, scopes within it don't
  235. // propagate out into the enclosing scope. Otherwise we have to worry
  236. // about block literals, which have the lifetime of their enclosing statement.
  237. unsigned independentParentScope = origParentScope;
  238. unsigned &ParentScope = ((isa<Expr>(S) && !isa<StmtExpr>(S))
  239. ? origParentScope : independentParentScope);
  240. bool SkipFirstSubStmt = false;
  241. // If we found a label, remember that it is in ParentScope scope.
  242. switch (S->getStmtClass()) {
  243. case Stmt::AddrLabelExprClass:
  244. IndirectJumpTargets.push_back(cast<AddrLabelExpr>(S)->getLabel());
  245. break;
  246. case Stmt::IndirectGotoStmtClass:
  247. // "goto *&&lbl;" is a special case which we treat as equivalent
  248. // to a normal goto. In addition, we don't calculate scope in the
  249. // operand (to avoid recording the address-of-label use), which
  250. // works only because of the restricted set of expressions which
  251. // we detect as constant targets.
  252. if (cast<IndirectGotoStmt>(S)->getConstantTarget()) {
  253. LabelAndGotoScopes[S] = ParentScope;
  254. Jumps.push_back(S);
  255. return;
  256. }
  257. LabelAndGotoScopes[S] = ParentScope;
  258. IndirectJumps.push_back(cast<IndirectGotoStmt>(S));
  259. break;
  260. case Stmt::SwitchStmtClass:
  261. // Evaluate the condition variable before entering the scope of the switch
  262. // statement.
  263. if (VarDecl *Var = cast<SwitchStmt>(S)->getConditionVariable()) {
  264. BuildScopeInformation(Var, ParentScope);
  265. SkipFirstSubStmt = true;
  266. }
  267. // Fall through
  268. case Stmt::GotoStmtClass:
  269. // Remember both what scope a goto is in as well as the fact that we have
  270. // it. This makes the second scan not have to walk the AST again.
  271. LabelAndGotoScopes[S] = ParentScope;
  272. Jumps.push_back(S);
  273. break;
  274. case Stmt::CXXTryStmtClass: {
  275. CXXTryStmt *TS = cast<CXXTryStmt>(S);
  276. unsigned newParentScope;
  277. Scopes.push_back(GotoScope(ParentScope,
  278. diag::note_protected_by_cxx_try,
  279. diag::note_exits_cxx_try,
  280. TS->getSourceRange().getBegin()));
  281. if (Stmt *TryBlock = TS->getTryBlock())
  282. BuildScopeInformation(TryBlock, (newParentScope = Scopes.size()-1));
  283. // Jump from the catch into the try is not allowed either.
  284. for (unsigned I = 0, E = TS->getNumHandlers(); I != E; ++I) {
  285. CXXCatchStmt *CS = TS->getHandler(I);
  286. Scopes.push_back(GotoScope(ParentScope,
  287. diag::note_protected_by_cxx_catch,
  288. diag::note_exits_cxx_catch,
  289. CS->getSourceRange().getBegin()));
  290. BuildScopeInformation(CS->getHandlerBlock(),
  291. (newParentScope = Scopes.size()-1));
  292. }
  293. return;
  294. }
  295. case Stmt::SEHTryStmtClass: {
  296. SEHTryStmt *TS = cast<SEHTryStmt>(S);
  297. unsigned newParentScope;
  298. Scopes.push_back(GotoScope(ParentScope,
  299. diag::note_protected_by_seh_try,
  300. diag::note_exits_seh_try,
  301. TS->getSourceRange().getBegin()));
  302. if (Stmt *TryBlock = TS->getTryBlock())
  303. BuildScopeInformation(TryBlock, (newParentScope = Scopes.size()-1));
  304. // Jump from __except or __finally into the __try are not allowed either.
  305. if (SEHExceptStmt *Except = TS->getExceptHandler()) {
  306. Scopes.push_back(GotoScope(ParentScope,
  307. diag::note_protected_by_seh_except,
  308. diag::note_exits_seh_except,
  309. Except->getSourceRange().getBegin()));
  310. BuildScopeInformation(Except->getBlock(),
  311. (newParentScope = Scopes.size()-1));
  312. } else if (SEHFinallyStmt *Finally = TS->getFinallyHandler()) {
  313. Scopes.push_back(GotoScope(ParentScope,
  314. diag::note_protected_by_seh_finally,
  315. diag::note_exits_seh_finally,
  316. Finally->getSourceRange().getBegin()));
  317. BuildScopeInformation(Finally->getBlock(),
  318. (newParentScope = Scopes.size()-1));
  319. }
  320. return;
  321. }
  322. default:
  323. break;
  324. }
  325. for (Stmt *SubStmt : S->children()) {
  326. if (SkipFirstSubStmt) {
  327. SkipFirstSubStmt = false;
  328. continue;
  329. }
  330. if (!SubStmt) continue;
  331. // Cases, labels, and defaults aren't "scope parents". It's also
  332. // important to handle these iteratively instead of recursively in
  333. // order to avoid blowing out the stack.
  334. while (true) {
  335. Stmt *Next;
  336. if (CaseStmt *CS = dyn_cast<CaseStmt>(SubStmt))
  337. Next = CS->getSubStmt();
  338. else if (DefaultStmt *DS = dyn_cast<DefaultStmt>(SubStmt))
  339. Next = DS->getSubStmt();
  340. else if (LabelStmt *LS = dyn_cast<LabelStmt>(SubStmt))
  341. Next = LS->getSubStmt();
  342. else
  343. break;
  344. LabelAndGotoScopes[SubStmt] = ParentScope;
  345. SubStmt = Next;
  346. }
  347. // If this is a declstmt with a VLA definition, it defines a scope from here
  348. // to the end of the containing context.
  349. if (DeclStmt *DS = dyn_cast<DeclStmt>(SubStmt)) {
  350. // The decl statement creates a scope if any of the decls in it are VLAs
  351. // or have the cleanup attribute.
  352. for (auto *I : DS->decls())
  353. BuildScopeInformation(I, ParentScope);
  354. continue;
  355. }
  356. // Disallow jumps into any part of an @try statement by pushing a scope and
  357. // walking all sub-stmts in that scope.
  358. if (ObjCAtTryStmt *AT = dyn_cast<ObjCAtTryStmt>(SubStmt)) {
  359. unsigned newParentScope;
  360. // Recursively walk the AST for the @try part.
  361. Scopes.push_back(GotoScope(ParentScope,
  362. diag::note_protected_by_objc_try,
  363. diag::note_exits_objc_try,
  364. AT->getAtTryLoc()));
  365. if (Stmt *TryPart = AT->getTryBody())
  366. BuildScopeInformation(TryPart, (newParentScope = Scopes.size()-1));
  367. // Jump from the catch to the finally or try is not valid.
  368. for (unsigned I = 0, N = AT->getNumCatchStmts(); I != N; ++I) {
  369. ObjCAtCatchStmt *AC = AT->getCatchStmt(I);
  370. Scopes.push_back(GotoScope(ParentScope,
  371. diag::note_protected_by_objc_catch,
  372. diag::note_exits_objc_catch,
  373. AC->getAtCatchLoc()));
  374. // @catches are nested and it isn't
  375. BuildScopeInformation(AC->getCatchBody(),
  376. (newParentScope = Scopes.size()-1));
  377. }
  378. // Jump from the finally to the try or catch is not valid.
  379. if (ObjCAtFinallyStmt *AF = AT->getFinallyStmt()) {
  380. Scopes.push_back(GotoScope(ParentScope,
  381. diag::note_protected_by_objc_finally,
  382. diag::note_exits_objc_finally,
  383. AF->getAtFinallyLoc()));
  384. BuildScopeInformation(AF, (newParentScope = Scopes.size()-1));
  385. }
  386. continue;
  387. }
  388. unsigned newParentScope;
  389. // Disallow jumps into the protected statement of an @synchronized, but
  390. // allow jumps into the object expression it protects.
  391. if (ObjCAtSynchronizedStmt *AS =
  392. dyn_cast<ObjCAtSynchronizedStmt>(SubStmt)) {
  393. // Recursively walk the AST for the @synchronized object expr, it is
  394. // evaluated in the normal scope.
  395. BuildScopeInformation(AS->getSynchExpr(), ParentScope);
  396. // Recursively walk the AST for the @synchronized part, protected by a new
  397. // scope.
  398. Scopes.push_back(GotoScope(ParentScope,
  399. diag::note_protected_by_objc_synchronized,
  400. diag::note_exits_objc_synchronized,
  401. AS->getAtSynchronizedLoc()));
  402. BuildScopeInformation(AS->getSynchBody(),
  403. (newParentScope = Scopes.size()-1));
  404. continue;
  405. }
  406. // Disallow jumps into the protected statement of an @autoreleasepool.
  407. if (ObjCAutoreleasePoolStmt *AS =
  408. dyn_cast<ObjCAutoreleasePoolStmt>(SubStmt)) {
  409. // Recursively walk the AST for the @autoreleasepool part, protected by a
  410. // new scope.
  411. Scopes.push_back(GotoScope(ParentScope,
  412. diag::note_protected_by_objc_autoreleasepool,
  413. diag::note_exits_objc_autoreleasepool,
  414. AS->getAtLoc()));
  415. BuildScopeInformation(AS->getSubStmt(),
  416. (newParentScope = Scopes.size() - 1));
  417. continue;
  418. }
  419. // Disallow jumps past full-expressions that use blocks with
  420. // non-trivial cleanups of their captures. This is theoretically
  421. // implementable but a lot of work which we haven't felt up to doing.
  422. if (ExprWithCleanups *EWC = dyn_cast<ExprWithCleanups>(SubStmt)) {
  423. for (unsigned i = 0, e = EWC->getNumObjects(); i != e; ++i) {
  424. const BlockDecl *BDecl = EWC->getObject(i);
  425. for (const auto &CI : BDecl->captures()) {
  426. VarDecl *variable = CI.getVariable();
  427. BuildScopeInformation(variable, BDecl, ParentScope);
  428. }
  429. }
  430. }
  431. // Disallow jumps out of scopes containing temporaries lifetime-extended to
  432. // automatic storage duration.
  433. if (MaterializeTemporaryExpr *MTE =
  434. dyn_cast<MaterializeTemporaryExpr>(SubStmt)) {
  435. if (MTE->getStorageDuration() == SD_Automatic) {
  436. SmallVector<const Expr *, 4> CommaLHS;
  437. SmallVector<SubobjectAdjustment, 4> Adjustments;
  438. const Expr *ExtendedObject =
  439. MTE->GetTemporaryExpr()->skipRValueSubobjectAdjustments(
  440. CommaLHS, Adjustments);
  441. if (ExtendedObject->getType().isDestructedType()) {
  442. Scopes.push_back(GotoScope(ParentScope, 0,
  443. diag::note_exits_temporary_dtor,
  444. ExtendedObject->getExprLoc()));
  445. ParentScope = Scopes.size()-1;
  446. }
  447. }
  448. }
  449. // Recursively walk the AST.
  450. BuildScopeInformation(SubStmt, ParentScope);
  451. }
  452. }
  453. /// VerifyJumps - Verify each element of the Jumps array to see if they are
  454. /// valid, emitting diagnostics if not.
  455. void JumpScopeChecker::VerifyJumps() {
  456. while (!Jumps.empty()) {
  457. Stmt *Jump = Jumps.pop_back_val();
  458. // With a goto,
  459. if (GotoStmt *GS = dyn_cast<GotoStmt>(Jump)) {
  460. // The label may not have a statement if it's coming from inline MS ASM.
  461. if (GS->getLabel()->getStmt()) {
  462. CheckJump(GS, GS->getLabel()->getStmt(), GS->getGotoLoc(),
  463. diag::err_goto_into_protected_scope,
  464. diag::ext_goto_into_protected_scope,
  465. diag::warn_cxx98_compat_goto_into_protected_scope);
  466. }
  467. CheckGotoStmt(GS);
  468. continue;
  469. }
  470. // We only get indirect gotos here when they have a constant target.
  471. if (IndirectGotoStmt *IGS = dyn_cast<IndirectGotoStmt>(Jump)) {
  472. LabelDecl *Target = IGS->getConstantTarget();
  473. CheckJump(IGS, Target->getStmt(), IGS->getGotoLoc(),
  474. diag::err_goto_into_protected_scope,
  475. diag::ext_goto_into_protected_scope,
  476. diag::warn_cxx98_compat_goto_into_protected_scope);
  477. continue;
  478. }
  479. SwitchStmt *SS = cast<SwitchStmt>(Jump);
  480. for (SwitchCase *SC = SS->getSwitchCaseList(); SC;
  481. SC = SC->getNextSwitchCase()) {
  482. if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(SC)))
  483. continue;
  484. SourceLocation Loc;
  485. if (CaseStmt *CS = dyn_cast<CaseStmt>(SC))
  486. Loc = CS->getLocStart();
  487. else if (DefaultStmt *DS = dyn_cast<DefaultStmt>(SC))
  488. Loc = DS->getLocStart();
  489. else
  490. Loc = SC->getLocStart();
  491. CheckJump(SS, SC, Loc, diag::err_switch_into_protected_scope, 0,
  492. diag::warn_cxx98_compat_switch_into_protected_scope);
  493. }
  494. }
  495. }
  496. /// VerifyIndirectJumps - Verify whether any possible indirect jump
  497. /// might cross a protection boundary. Unlike direct jumps, indirect
  498. /// jumps count cleanups as protection boundaries: since there's no
  499. /// way to know where the jump is going, we can't implicitly run the
  500. /// right cleanups the way we can with direct jumps.
  501. ///
  502. /// Thus, an indirect jump is "trivial" if it bypasses no
  503. /// initializations and no teardowns. More formally, an indirect jump
  504. /// from A to B is trivial if the path out from A to DCA(A,B) is
  505. /// trivial and the path in from DCA(A,B) to B is trivial, where
  506. /// DCA(A,B) is the deepest common ancestor of A and B.
  507. /// Jump-triviality is transitive but asymmetric.
  508. ///
  509. /// A path in is trivial if none of the entered scopes have an InDiag.
  510. /// A path out is trivial is none of the exited scopes have an OutDiag.
  511. ///
  512. /// Under these definitions, this function checks that the indirect
  513. /// jump between A and B is trivial for every indirect goto statement A
  514. /// and every label B whose address was taken in the function.
  515. void JumpScopeChecker::VerifyIndirectJumps() {
  516. if (IndirectJumps.empty()) return;
  517. // If there aren't any address-of-label expressions in this function,
  518. // complain about the first indirect goto.
  519. if (IndirectJumpTargets.empty()) {
  520. S.Diag(IndirectJumps[0]->getGotoLoc(),
  521. diag::err_indirect_goto_without_addrlabel);
  522. return;
  523. }
  524. // Collect a single representative of every scope containing an
  525. // indirect goto. For most code bases, this substantially cuts
  526. // down on the number of jump sites we'll have to consider later.
  527. typedef std::pair<unsigned, IndirectGotoStmt*> JumpScope;
  528. SmallVector<JumpScope, 32> JumpScopes;
  529. {
  530. llvm::DenseMap<unsigned, IndirectGotoStmt*> JumpScopesMap;
  531. for (SmallVectorImpl<IndirectGotoStmt*>::iterator
  532. I = IndirectJumps.begin(), E = IndirectJumps.end(); I != E; ++I) {
  533. IndirectGotoStmt *IG = *I;
  534. if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(IG)))
  535. continue;
  536. unsigned IGScope = LabelAndGotoScopes[IG];
  537. IndirectGotoStmt *&Entry = JumpScopesMap[IGScope];
  538. if (!Entry) Entry = IG;
  539. }
  540. JumpScopes.reserve(JumpScopesMap.size());
  541. for (llvm::DenseMap<unsigned, IndirectGotoStmt*>::iterator
  542. I = JumpScopesMap.begin(), E = JumpScopesMap.end(); I != E; ++I)
  543. JumpScopes.push_back(*I);
  544. }
  545. // Collect a single representative of every scope containing a
  546. // label whose address was taken somewhere in the function.
  547. // For most code bases, there will be only one such scope.
  548. llvm::DenseMap<unsigned, LabelDecl*> TargetScopes;
  549. for (SmallVectorImpl<LabelDecl*>::iterator
  550. I = IndirectJumpTargets.begin(), E = IndirectJumpTargets.end();
  551. I != E; ++I) {
  552. LabelDecl *TheLabel = *I;
  553. if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(TheLabel->getStmt())))
  554. continue;
  555. unsigned LabelScope = LabelAndGotoScopes[TheLabel->getStmt()];
  556. LabelDecl *&Target = TargetScopes[LabelScope];
  557. if (!Target) Target = TheLabel;
  558. }
  559. // For each target scope, make sure it's trivially reachable from
  560. // every scope containing a jump site.
  561. //
  562. // A path between scopes always consists of exitting zero or more
  563. // scopes, then entering zero or more scopes. We build a set of
  564. // of scopes S from which the target scope can be trivially
  565. // entered, then verify that every jump scope can be trivially
  566. // exitted to reach a scope in S.
  567. llvm::BitVector Reachable(Scopes.size(), false);
  568. for (llvm::DenseMap<unsigned,LabelDecl*>::iterator
  569. TI = TargetScopes.begin(), TE = TargetScopes.end(); TI != TE; ++TI) {
  570. unsigned TargetScope = TI->first;
  571. LabelDecl *TargetLabel = TI->second;
  572. Reachable.reset();
  573. // Mark all the enclosing scopes from which you can safely jump
  574. // into the target scope. 'Min' will end up being the index of
  575. // the shallowest such scope.
  576. unsigned Min = TargetScope;
  577. while (true) {
  578. Reachable.set(Min);
  579. // Don't go beyond the outermost scope.
  580. if (Min == 0) break;
  581. // Stop if we can't trivially enter the current scope.
  582. if (Scopes[Min].InDiag) break;
  583. Min = Scopes[Min].ParentScope;
  584. }
  585. // Walk through all the jump sites, checking that they can trivially
  586. // reach this label scope.
  587. for (SmallVectorImpl<JumpScope>::iterator
  588. I = JumpScopes.begin(), E = JumpScopes.end(); I != E; ++I) {
  589. unsigned Scope = I->first;
  590. // Walk out the "scope chain" for this scope, looking for a scope
  591. // we've marked reachable. For well-formed code this amortizes
  592. // to O(JumpScopes.size() / Scopes.size()): we only iterate
  593. // when we see something unmarked, and in well-formed code we
  594. // mark everything we iterate past.
  595. bool IsReachable = false;
  596. while (true) {
  597. if (Reachable.test(Scope)) {
  598. // If we find something reachable, mark all the scopes we just
  599. // walked through as reachable.
  600. for (unsigned S = I->first; S != Scope; S = Scopes[S].ParentScope)
  601. Reachable.set(S);
  602. IsReachable = true;
  603. break;
  604. }
  605. // Don't walk out if we've reached the top-level scope or we've
  606. // gotten shallower than the shallowest reachable scope.
  607. if (Scope == 0 || Scope < Min) break;
  608. // Don't walk out through an out-diagnostic.
  609. if (Scopes[Scope].OutDiag) break;
  610. Scope = Scopes[Scope].ParentScope;
  611. }
  612. // Only diagnose if we didn't find something.
  613. if (IsReachable) continue;
  614. DiagnoseIndirectJump(I->second, I->first, TargetLabel, TargetScope);
  615. }
  616. }
  617. }
  618. /// Return true if a particular error+note combination must be downgraded to a
  619. /// warning in Microsoft mode.
  620. static bool IsMicrosoftJumpWarning(unsigned JumpDiag, unsigned InDiagNote) {
  621. return (JumpDiag == diag::err_goto_into_protected_scope &&
  622. (InDiagNote == diag::note_protected_by_variable_init ||
  623. InDiagNote == diag::note_protected_by_variable_nontriv_destructor));
  624. }
  625. /// Return true if a particular note should be downgraded to a compatibility
  626. /// warning in C++11 mode.
  627. static bool IsCXX98CompatWarning(Sema &S, unsigned InDiagNote) {
  628. return S.getLangOpts().CPlusPlus11 &&
  629. InDiagNote == diag::note_protected_by_variable_non_pod;
  630. }
  631. /// Produce primary diagnostic for an indirect jump statement.
  632. static void DiagnoseIndirectJumpStmt(Sema &S, IndirectGotoStmt *Jump,
  633. LabelDecl *Target, bool &Diagnosed) {
  634. if (Diagnosed)
  635. return;
  636. S.Diag(Jump->getGotoLoc(), diag::err_indirect_goto_in_protected_scope);
  637. S.Diag(Target->getStmt()->getIdentLoc(), diag::note_indirect_goto_target);
  638. Diagnosed = true;
  639. }
  640. /// Produce note diagnostics for a jump into a protected scope.
  641. void JumpScopeChecker::NoteJumpIntoScopes(ArrayRef<unsigned> ToScopes) {
  642. if (CHECK_PERMISSIVE(ToScopes.empty()))
  643. return;
  644. for (unsigned I = 0, E = ToScopes.size(); I != E; ++I)
  645. if (Scopes[ToScopes[I]].InDiag)
  646. S.Diag(Scopes[ToScopes[I]].Loc, Scopes[ToScopes[I]].InDiag);
  647. }
  648. /// Diagnose an indirect jump which is known to cross scopes.
  649. void JumpScopeChecker::DiagnoseIndirectJump(IndirectGotoStmt *Jump,
  650. unsigned JumpScope,
  651. LabelDecl *Target,
  652. unsigned TargetScope) {
  653. if (CHECK_PERMISSIVE(JumpScope == TargetScope))
  654. return;
  655. unsigned Common = GetDeepestCommonScope(JumpScope, TargetScope);
  656. bool Diagnosed = false;
  657. // Walk out the scope chain until we reach the common ancestor.
  658. for (unsigned I = JumpScope; I != Common; I = Scopes[I].ParentScope)
  659. if (Scopes[I].OutDiag) {
  660. DiagnoseIndirectJumpStmt(S, Jump, Target, Diagnosed);
  661. S.Diag(Scopes[I].Loc, Scopes[I].OutDiag);
  662. }
  663. SmallVector<unsigned, 10> ToScopesCXX98Compat;
  664. // Now walk into the scopes containing the label whose address was taken.
  665. for (unsigned I = TargetScope; I != Common; I = Scopes[I].ParentScope)
  666. if (IsCXX98CompatWarning(S, Scopes[I].InDiag))
  667. ToScopesCXX98Compat.push_back(I);
  668. else if (Scopes[I].InDiag) {
  669. DiagnoseIndirectJumpStmt(S, Jump, Target, Diagnosed);
  670. S.Diag(Scopes[I].Loc, Scopes[I].InDiag);
  671. }
  672. // Diagnose this jump if it would be ill-formed in C++98.
  673. if (!Diagnosed && !ToScopesCXX98Compat.empty()) {
  674. S.Diag(Jump->getGotoLoc(),
  675. diag::warn_cxx98_compat_indirect_goto_in_protected_scope);
  676. S.Diag(Target->getStmt()->getIdentLoc(), diag::note_indirect_goto_target);
  677. NoteJumpIntoScopes(ToScopesCXX98Compat);
  678. }
  679. }
  680. /// CheckJump - Validate that the specified jump statement is valid: that it is
  681. /// jumping within or out of its current scope, not into a deeper one.
  682. void JumpScopeChecker::CheckJump(Stmt *From, Stmt *To, SourceLocation DiagLoc,
  683. unsigned JumpDiagError, unsigned JumpDiagWarning,
  684. unsigned JumpDiagCXX98Compat) {
  685. if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(From)))
  686. return;
  687. if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(To)))
  688. return;
  689. unsigned FromScope = LabelAndGotoScopes[From];
  690. unsigned ToScope = LabelAndGotoScopes[To];
  691. // Common case: exactly the same scope, which is fine.
  692. if (FromScope == ToScope) return;
  693. // Warn on gotos out of __finally blocks.
  694. if (isa<GotoStmt>(From) || isa<IndirectGotoStmt>(From)) {
  695. // If FromScope > ToScope, FromScope is more nested and the jump goes to a
  696. // less nested scope. Check if it crosses a __finally along the way.
  697. for (unsigned I = FromScope; I > ToScope; I = Scopes[I].ParentScope) {
  698. if (Scopes[I].InDiag == diag::note_protected_by_seh_finally) {
  699. S.Diag(From->getLocStart(), diag::warn_jump_out_of_seh_finally);
  700. break;
  701. }
  702. }
  703. }
  704. unsigned CommonScope = GetDeepestCommonScope(FromScope, ToScope);
  705. // It's okay to jump out from a nested scope.
  706. if (CommonScope == ToScope) return;
  707. // Pull out (and reverse) any scopes we might need to diagnose skipping.
  708. SmallVector<unsigned, 10> ToScopesCXX98Compat;
  709. SmallVector<unsigned, 10> ToScopesError;
  710. SmallVector<unsigned, 10> ToScopesWarning;
  711. for (unsigned I = ToScope; I != CommonScope; I = Scopes[I].ParentScope) {
  712. if (S.getLangOpts().MSVCCompat && JumpDiagWarning != 0 &&
  713. IsMicrosoftJumpWarning(JumpDiagError, Scopes[I].InDiag))
  714. ToScopesWarning.push_back(I);
  715. else if (IsCXX98CompatWarning(S, Scopes[I].InDiag))
  716. ToScopesCXX98Compat.push_back(I);
  717. else if (Scopes[I].InDiag)
  718. ToScopesError.push_back(I);
  719. }
  720. // Handle warnings.
  721. if (!ToScopesWarning.empty()) {
  722. S.Diag(DiagLoc, JumpDiagWarning);
  723. NoteJumpIntoScopes(ToScopesWarning);
  724. }
  725. // Handle errors.
  726. if (!ToScopesError.empty()) {
  727. S.Diag(DiagLoc, JumpDiagError);
  728. NoteJumpIntoScopes(ToScopesError);
  729. }
  730. // Handle -Wc++98-compat warnings if the jump is well-formed.
  731. if (ToScopesError.empty() && !ToScopesCXX98Compat.empty()) {
  732. S.Diag(DiagLoc, JumpDiagCXX98Compat);
  733. NoteJumpIntoScopes(ToScopesCXX98Compat);
  734. }
  735. }
  736. void JumpScopeChecker::CheckGotoStmt(GotoStmt *GS) {
  737. if (GS->getLabel()->isMSAsmLabel()) {
  738. S.Diag(GS->getGotoLoc(), diag::err_goto_ms_asm_label)
  739. << GS->getLabel()->getIdentifier();
  740. S.Diag(GS->getLabel()->getLocation(), diag::note_goto_ms_asm_label)
  741. << GS->getLabel()->getIdentifier();
  742. }
  743. }
  744. void Sema::DiagnoseInvalidJumps(Stmt *Body) {
  745. (void)JumpScopeChecker(Body, *this);
  746. }