1//===--- JumpDiagnostics.cpp - Protected scope jump analysis ------*- C++ -*-=//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file implements the JumpScopeChecker class, which is used to diagnose
10// jumps that enter a protected scope in an invalid way.
11//
12//===----------------------------------------------------------------------===//
13
14#include "clang/Sema/SemaInternal.h"
15#include "clang/AST/DeclCXX.h"
16#include "clang/AST/Expr.h"
17#include "clang/AST/ExprCXX.h"
18#include "clang/AST/StmtCXX.h"
19#include "clang/AST/StmtObjC.h"
20#include "llvm/ADT/BitVector.h"
21using namespace clang;
22
23namespace {
24
25/// JumpScopeChecker - This object is used by Sema to diagnose invalid jumps
26/// into VLA and other protected scopes.  For example, this rejects:
27///    goto L;
28///    int a[n];
29///  L:
30///
31class JumpScopeChecker {
32  Sema &S;
33
34  /// Permissive - True when recovering from errors, in which case precautions
35  /// are taken to handle incomplete scope information.
36  const bool Permissive;
37
38  /// GotoScope - This is a record that we use to keep track of all of the
39  /// scopes that are introduced by VLAs and other things that scope jumps like
40  /// gotos.  This scope tree has nothing to do with the source scope tree,
41  /// because you can have multiple VLA scopes per compound statement, and most
42  /// compound statements don't introduce any scopes.
43  struct GotoScope {
44    /// ParentScope - The index in ScopeMap of the parent scope.  This is 0 for
45    /// the parent scope is the function body.
46    unsigned ParentScope;
47
48    /// InDiag - The note to emit if there is a jump into this scope.
49    unsigned InDiag;
50
51    /// OutDiag - The note to emit if there is an indirect jump out
52    /// of this scope.  Direct jumps always clean up their current scope
53    /// in an orderly way.
54    unsigned OutDiag;
55
56    /// Loc - Location to emit the diagnostic.
57    SourceLocation Loc;
58
59    GotoScope(unsigned parentScope, unsigned InDiag, unsigned OutDiag,
60              SourceLocation L)
61      : ParentScope(parentScope), InDiag(InDiag), OutDiag(OutDiag), Loc(L) {}
62  };
63
64  SmallVector<GotoScope, 48> Scopes;
65  llvm::DenseMap<Stmt*, unsigned> LabelAndGotoScopes;
66  SmallVector<Stmt*, 16> Jumps;
67
68  SmallVector<Stmt*, 4> IndirectJumps;
69  SmallVector<Stmt*, 4> AsmJumps;
70  SmallVector<LabelDecl*, 4> IndirectJumpTargets;
71  SmallVector<LabelDecl*, 4> AsmJumpTargets;
72public:
73  JumpScopeChecker(Stmt *Body, Sema &S);
74private:
75  void BuildScopeInformation(Decl *D, unsigned &ParentScope);
76  void BuildScopeInformation(VarDecl *D, const BlockDecl *BDecl,
77                             unsigned &ParentScope);
78  void BuildScopeInformation(CompoundLiteralExpr *CLE, unsigned &ParentScope);
79  void BuildScopeInformation(Stmt *S, unsigned &origParentScope);
80
81  void VerifyJumps();
82  void VerifyIndirectOrAsmJumps(bool IsAsmGoto);
83  void NoteJumpIntoScopes(ArrayRef<unsigned> ToScopes);
84  void DiagnoseIndirectOrAsmJump(Stmt *IG, unsigned IGScope, LabelDecl *Target,
85                                 unsigned TargetScope);
86  void CheckJump(Stmt *From, Stmt *To, SourceLocation DiagLoc,
87                 unsigned JumpDiag, unsigned JumpDiagWarning,
88                 unsigned JumpDiagCXX98Compat);
89  void CheckGotoStmt(GotoStmt *GS);
90
91  unsigned GetDeepestCommonScope(unsigned A, unsigned B);
92};
93} // end anonymous namespace
94
95#define CHECK_PERMISSIVE(x) (assert(Permissive || !(x)), (Permissive && (x)))
96
97JumpScopeChecker::JumpScopeChecker(Stmt *Body, Sema &s)
98    : S(s), Permissive(s.hasAnyUnrecoverableErrorsInThisFunction()) {
99  // Add a scope entry for function scope.
100  Scopes.push_back(GotoScope(~0U, ~0U, ~0U, SourceLocation()));
101
102  // Build information for the top level compound statement, so that we have a
103  // defined scope record for every "goto" and label.
104  unsigned BodyParentScope = 0;
105  BuildScopeInformation(Body, BodyParentScope);
106
107  // Check that all jumps we saw are kosher.
108  VerifyJumps();
109  VerifyIndirectOrAsmJumps(false);
110  VerifyIndirectOrAsmJumps(true);
111}
112
113/// GetDeepestCommonScope - Finds the innermost scope enclosing the
114/// two scopes.
115unsigned JumpScopeChecker::GetDeepestCommonScope(unsigned A, unsigned B) {
116  while (A != B) {
117    // Inner scopes are created after outer scopes and therefore have
118    // higher indices.
119    if (A < B) {
120      assert(Scopes[B].ParentScope < B);
121      B = Scopes[B].ParentScope;
122    } else {
123      assert(Scopes[A].ParentScope < A);
124      A = Scopes[A].ParentScope;
125    }
126  }
127  return A;
128}
129
130typedef std::pair<unsigned,unsigned> ScopePair;
131
132/// GetDiagForGotoScopeDecl - If this decl induces a new goto scope, return a
133/// diagnostic that should be emitted if control goes over it. If not, return 0.
134static ScopePair GetDiagForGotoScopeDecl(Sema &S, const Decl *D) {
135  if (const VarDecl *VD = dyn_cast<VarDecl>(D)) {
136    unsigned InDiag = 0;
137    unsigned OutDiag = 0;
138
139    if (VD->getType()->isVariablyModifiedType())
140      InDiag = diag::note_protected_by_vla;
141
142    if (VD->hasAttr<BlocksAttr>())
143      return ScopePair(diag::note_protected_by___block,
144                       diag::note_exits___block);
145
146    if (VD->hasAttr<CleanupAttr>())
147      return ScopePair(diag::note_protected_by_cleanup,
148                       diag::note_exits_cleanup);
149
150    if (VD->hasLocalStorage()) {
151      switch (VD->getType().isDestructedType()) {
152      case QualType::DK_objc_strong_lifetime:
153        return ScopePair(diag::note_protected_by_objc_strong_init,
154                         diag::note_exits_objc_strong);
155
156      case QualType::DK_objc_weak_lifetime:
157        return ScopePair(diag::note_protected_by_objc_weak_init,
158                         diag::note_exits_objc_weak);
159
160      case QualType::DK_nontrivial_c_struct:
161        return ScopePair(diag::note_protected_by_non_trivial_c_struct_init,
162                         diag::note_exits_dtor);
163
164      case QualType::DK_cxx_destructor:
165        OutDiag = diag::note_exits_dtor;
166        break;
167
168      case QualType::DK_none:
169        break;
170      }
171    }
172
173    const Expr *Init = VD->getInit();
174    if (S.Context.getLangOpts().CPlusPlus && VD->hasLocalStorage() && Init) {
175      // C++11 [stmt.dcl]p3:
176      //   A program that jumps from a point where a variable with automatic
177      //   storage duration is not in scope to a point where it is in scope
178      //   is ill-formed unless the variable has scalar type, class type with
179      //   a trivial default constructor and a trivial destructor, a
180      //   cv-qualified version of one of these types, or an array of one of
181      //   the preceding types and is declared without an initializer.
182
183      // C++03 [stmt.dcl.p3:
184      //   A program that jumps from a point where a local variable
185      //   with automatic storage duration is not in scope to a point
186      //   where it is in scope is ill-formed unless the variable has
187      //   POD type and is declared without an initializer.
188
189      InDiag = diag::note_protected_by_variable_init;
190
191      // For a variable of (array of) class type declared without an
192      // initializer, we will have call-style initialization and the initializer
193      // will be the CXXConstructExpr with no intervening nodes.
194      if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(Init)) {
195        const CXXConstructorDecl *Ctor = CCE->getConstructor();
196        if (Ctor->isTrivial() && Ctor->isDefaultConstructor() &&
197            VD->getInitStyle() == VarDecl::CallInit) {
198          if (OutDiag)
199            InDiag = diag::note_protected_by_variable_nontriv_destructor;
200          else if (!Ctor->getParent()->isPOD())
201            InDiag = diag::note_protected_by_variable_non_pod;
202          else
203            InDiag = 0;
204        }
205      }
206    }
207
208    return ScopePair(InDiag, OutDiag);
209  }
210
211  if (const TypedefNameDecl *TD = dyn_cast<TypedefNameDecl>(D)) {
212    if (TD->getUnderlyingType()->isVariablyModifiedType())
213      return ScopePair(isa<TypedefDecl>(TD)
214                           ? diag::note_protected_by_vla_typedef
215                           : diag::note_protected_by_vla_type_alias,
216                       0);
217  }
218
219  return ScopePair(0U, 0U);
220}
221
222/// Build scope information for a declaration that is part of a DeclStmt.
223void JumpScopeChecker::BuildScopeInformation(Decl *D, unsigned &ParentScope) {
224  // If this decl causes a new scope, push and switch to it.
225  std::pair<unsigned,unsigned> Diags = GetDiagForGotoScopeDecl(S, D);
226  if (Diags.first || Diags.second) {
227    Scopes.push_back(GotoScope(ParentScope, Diags.first, Diags.second,
228                               D->getLocation()));
229    ParentScope = Scopes.size()-1;
230  }
231
232  // If the decl has an initializer, walk it with the potentially new
233  // scope we just installed.
234  if (VarDecl *VD = dyn_cast<VarDecl>(D))
235    if (Expr *Init = VD->getInit())
236      BuildScopeInformation(Init, ParentScope);
237}
238
239/// Build scope information for a captured block literal variables.
240void JumpScopeChecker::BuildScopeInformation(VarDecl *D,
241                                             const BlockDecl *BDecl,
242                                             unsigned &ParentScope) {
243  // exclude captured __block variables; there's no destructor
244  // associated with the block literal for them.
245  if (D->hasAttr<BlocksAttr>())
246    return;
247  QualType T = D->getType();
248  QualType::DestructionKind destructKind = T.isDestructedType();
249  if (destructKind != QualType::DK_none) {
250    std::pair<unsigned,unsigned> Diags;
251    switch (destructKind) {
252      case QualType::DK_cxx_destructor:
253        Diags = ScopePair(diag::note_enters_block_captures_cxx_obj,
254                          diag::note_exits_block_captures_cxx_obj);
255        break;
256      case QualType::DK_objc_strong_lifetime:
257        Diags = ScopePair(diag::note_enters_block_captures_strong,
258                          diag::note_exits_block_captures_strong);
259        break;
260      case QualType::DK_objc_weak_lifetime:
261        Diags = ScopePair(diag::note_enters_block_captures_weak,
262                          diag::note_exits_block_captures_weak);
263        break;
264      case QualType::DK_nontrivial_c_struct:
265        Diags = ScopePair(diag::note_enters_block_captures_non_trivial_c_struct,
266                          diag::note_exits_block_captures_non_trivial_c_struct);
267        break;
268      case QualType::DK_none:
269        llvm_unreachable("non-lifetime captured variable");
270    }
271    SourceLocation Loc = D->getLocation();
272    if (Loc.isInvalid())
273      Loc = BDecl->getLocation();
274    Scopes.push_back(GotoScope(ParentScope,
275                               Diags.first, Diags.second, Loc));
276    ParentScope = Scopes.size()-1;
277  }
278}
279
280/// Build scope information for compound literals of C struct types that are
281/// non-trivial to destruct.
282void JumpScopeChecker::BuildScopeInformation(CompoundLiteralExpr *CLE,
283                                             unsigned &ParentScope) {
284  unsigned InDiag = diag::note_enters_compound_literal_scope;
285  unsigned OutDiag = diag::note_exits_compound_literal_scope;
286  Scopes.push_back(GotoScope(ParentScope, InDiag, OutDiag, CLE->getExprLoc()));
287  ParentScope = Scopes.size() - 1;
288}
289
290/// BuildScopeInformation - The statements from CI to CE are known to form a
291/// coherent VLA scope with a specified parent node.  Walk through the
292/// statements, adding any labels or gotos to LabelAndGotoScopes and recursively
293/// walking the AST as needed.
294void JumpScopeChecker::BuildScopeInformation(Stmt *S,
295                                             unsigned &origParentScope) {
296  // If this is a statement, rather than an expression, scopes within it don't
297  // propagate out into the enclosing scope.  Otherwise we have to worry
298  // about block literals, which have the lifetime of their enclosing statement.
299  unsigned independentParentScope = origParentScope;
300  unsigned &ParentScope = ((isa<Expr>(S) && !isa<StmtExpr>(S))
301                            ? origParentScope : independentParentScope);
302
303  unsigned StmtsToSkip = 0u;
304
305  // If we found a label, remember that it is in ParentScope scope.
306  switch (S->getStmtClass()) {
307  case Stmt::AddrLabelExprClass:
308    IndirectJumpTargets.push_back(cast<AddrLabelExpr>(S)->getLabel());
309    break;
310
311  case Stmt::ObjCForCollectionStmtClass: {
312    auto *CS = cast<ObjCForCollectionStmt>(S);
313    unsigned Diag = diag::note_protected_by_objc_fast_enumeration;
314    unsigned NewParentScope = Scopes.size();
315    Scopes.push_back(GotoScope(ParentScope, Diag, 0, S->getBeginLoc()));
316    BuildScopeInformation(CS->getBody(), NewParentScope);
317    return;
318  }
319
320  case Stmt::IndirectGotoStmtClass:
321    // "goto *&&lbl;" is a special case which we treat as equivalent
322    // to a normal goto.  In addition, we don't calculate scope in the
323    // operand (to avoid recording the address-of-label use), which
324    // works only because of the restricted set of expressions which
325    // we detect as constant targets.
326    if (cast<IndirectGotoStmt>(S)->getConstantTarget()) {
327      LabelAndGotoScopes[S] = ParentScope;
328      Jumps.push_back(S);
329      return;
330    }
331
332    LabelAndGotoScopes[S] = ParentScope;
333    IndirectJumps.push_back(S);
334    break;
335
336  case Stmt::SwitchStmtClass:
337    // Evaluate the C++17 init stmt and condition variable
338    // before entering the scope of the switch statement.
339    if (Stmt *Init = cast<SwitchStmt>(S)->getInit()) {
340      BuildScopeInformation(Init, ParentScope);
341      ++StmtsToSkip;
342    }
343    if (VarDecl *Var = cast<SwitchStmt>(S)->getConditionVariable()) {
344      BuildScopeInformation(Var, ParentScope);
345      ++StmtsToSkip;
346    }
347    LLVM_FALLTHROUGH;
348
349  case Stmt::GotoStmtClass:
350    // Remember both what scope a goto is in as well as the fact that we have
351    // it.  This makes the second scan not have to walk the AST again.
352    LabelAndGotoScopes[S] = ParentScope;
353    Jumps.push_back(S);
354    break;
355
356  case Stmt::GCCAsmStmtClass:
357    if (auto *GS = dyn_cast<GCCAsmStmt>(S))
358      if (GS->isAsmGoto()) {
359        // Remember both what scope a goto is in as well as the fact that we
360        // have it.  This makes the second scan not have to walk the AST again.
361        LabelAndGotoScopes[S] = ParentScope;
362        AsmJumps.push_back(GS);
363        for (auto *E : GS->labels())
364          AsmJumpTargets.push_back(E->getLabel());
365      }
366    break;
367
368  case Stmt::IfStmtClass: {
369    IfStmt *IS = cast<IfStmt>(S);
370    if (!(IS->isConstexpr() || IS->isObjCAvailabilityCheck()))
371      break;
372
373    unsigned Diag = IS->isConstexpr() ? diag::note_protected_by_constexpr_if
374                                      : diag::note_protected_by_if_available;
375
376    if (VarDecl *Var = IS->getConditionVariable())
377      BuildScopeInformation(Var, ParentScope);
378
379    // Cannot jump into the middle of the condition.
380    unsigned NewParentScope = Scopes.size();
381    Scopes.push_back(GotoScope(ParentScope, Diag, 0, IS->getBeginLoc()));
382    BuildScopeInformation(IS->getCond(), NewParentScope);
383
384    // Jumps into either arm of an 'if constexpr' are not allowed.
385    NewParentScope = Scopes.size();
386    Scopes.push_back(GotoScope(ParentScope, Diag, 0, IS->getBeginLoc()));
387    BuildScopeInformation(IS->getThen(), NewParentScope);
388    if (Stmt *Else = IS->getElse()) {
389      NewParentScope = Scopes.size();
390      Scopes.push_back(GotoScope(ParentScope, Diag, 0, IS->getBeginLoc()));
391      BuildScopeInformation(Else, NewParentScope);
392    }
393    return;
394  }
395
396  case Stmt::CXXTryStmtClass: {
397    CXXTryStmt *TS = cast<CXXTryStmt>(S);
398    {
399      unsigned NewParentScope = Scopes.size();
400      Scopes.push_back(GotoScope(ParentScope,
401                                 diag::note_protected_by_cxx_try,
402                                 diag::note_exits_cxx_try,
403                                 TS->getSourceRange().getBegin()));
404      if (Stmt *TryBlock = TS->getTryBlock())
405        BuildScopeInformation(TryBlock, NewParentScope);
406    }
407
408    // Jump from the catch into the try is not allowed either.
409    for (unsigned I = 0, E = TS->getNumHandlers(); I != E; ++I) {
410      CXXCatchStmt *CS = TS->getHandler(I);
411      unsigned NewParentScope = Scopes.size();
412      Scopes.push_back(GotoScope(ParentScope,
413                                 diag::note_protected_by_cxx_catch,
414                                 diag::note_exits_cxx_catch,
415                                 CS->getSourceRange().getBegin()));
416      BuildScopeInformation(CS->getHandlerBlock(), NewParentScope);
417    }
418    return;
419  }
420
421  case Stmt::SEHTryStmtClass: {
422    SEHTryStmt *TS = cast<SEHTryStmt>(S);
423    {
424      unsigned NewParentScope = Scopes.size();
425      Scopes.push_back(GotoScope(ParentScope,
426                                 diag::note_protected_by_seh_try,
427                                 diag::note_exits_seh_try,
428                                 TS->getSourceRange().getBegin()));
429      if (Stmt *TryBlock = TS->getTryBlock())
430        BuildScopeInformation(TryBlock, NewParentScope);
431    }
432
433    // Jump from __except or __finally into the __try are not allowed either.
434    if (SEHExceptStmt *Except = TS->getExceptHandler()) {
435      unsigned NewParentScope = Scopes.size();
436      Scopes.push_back(GotoScope(ParentScope,
437                                 diag::note_protected_by_seh_except,
438                                 diag::note_exits_seh_except,
439                                 Except->getSourceRange().getBegin()));
440      BuildScopeInformation(Except->getBlock(), NewParentScope);
441    } else if (SEHFinallyStmt *Finally = TS->getFinallyHandler()) {
442      unsigned NewParentScope = Scopes.size();
443      Scopes.push_back(GotoScope(ParentScope,
444                                 diag::note_protected_by_seh_finally,
445                                 diag::note_exits_seh_finally,
446                                 Finally->getSourceRange().getBegin()));
447      BuildScopeInformation(Finally->getBlock(), NewParentScope);
448    }
449
450    return;
451  }
452
453  case Stmt::DeclStmtClass: {
454    // If this is a declstmt with a VLA definition, it defines a scope from here
455    // to the end of the containing context.
456    DeclStmt *DS = cast<DeclStmt>(S);
457    // The decl statement creates a scope if any of the decls in it are VLAs
458    // or have the cleanup attribute.
459    for (auto *I : DS->decls())
460      BuildScopeInformation(I, origParentScope);
461    return;
462  }
463
464  case Stmt::ObjCAtTryStmtClass: {
465    // Disallow jumps into any part of an @try statement by pushing a scope and
466    // walking all sub-stmts in that scope.
467    ObjCAtTryStmt *AT = cast<ObjCAtTryStmt>(S);
468    // Recursively walk the AST for the @try part.
469    {
470      unsigned NewParentScope = Scopes.size();
471      Scopes.push_back(GotoScope(ParentScope,
472                                 diag::note_protected_by_objc_try,
473                                 diag::note_exits_objc_try,
474                                 AT->getAtTryLoc()));
475      if (Stmt *TryPart = AT->getTryBody())
476        BuildScopeInformation(TryPart, NewParentScope);
477    }
478
479    // Jump from the catch to the finally or try is not valid.
480    for (unsigned I = 0, N = AT->getNumCatchStmts(); I != N; ++I) {
481      ObjCAtCatchStmt *AC = AT->getCatchStmt(I);
482      unsigned NewParentScope = Scopes.size();
483      Scopes.push_back(GotoScope(ParentScope,
484                                 diag::note_protected_by_objc_catch,
485                                 diag::note_exits_objc_catch,
486                                 AC->getAtCatchLoc()));
487      // @catches are nested and it isn't
488      BuildScopeInformation(AC->getCatchBody(), NewParentScope);
489    }
490
491    // Jump from the finally to the try or catch is not valid.
492    if (ObjCAtFinallyStmt *AF = AT->getFinallyStmt()) {
493      unsigned NewParentScope = Scopes.size();
494      Scopes.push_back(GotoScope(ParentScope,
495                                 diag::note_protected_by_objc_finally,
496                                 diag::note_exits_objc_finally,
497                                 AF->getAtFinallyLoc()));
498      BuildScopeInformation(AF, NewParentScope);
499    }
500
501    return;
502  }
503
504  case Stmt::ObjCAtSynchronizedStmtClass: {
505    // Disallow jumps into the protected statement of an @synchronized, but
506    // allow jumps into the object expression it protects.
507    ObjCAtSynchronizedStmt *AS = cast<ObjCAtSynchronizedStmt>(S);
508    // Recursively walk the AST for the @synchronized object expr, it is
509    // evaluated in the normal scope.
510    BuildScopeInformation(AS->getSynchExpr(), ParentScope);
511
512    // Recursively walk the AST for the @synchronized part, protected by a new
513    // scope.
514    unsigned NewParentScope = Scopes.size();
515    Scopes.push_back(GotoScope(ParentScope,
516                               diag::note_protected_by_objc_synchronized,
517                               diag::note_exits_objc_synchronized,
518                               AS->getAtSynchronizedLoc()));
519    BuildScopeInformation(AS->getSynchBody(), NewParentScope);
520    return;
521  }
522
523  case Stmt::ObjCAutoreleasePoolStmtClass: {
524    // Disallow jumps into the protected statement of an @autoreleasepool.
525    ObjCAutoreleasePoolStmt *AS = cast<ObjCAutoreleasePoolStmt>(S);
526    // Recursively walk the AST for the @autoreleasepool part, protected by a
527    // new scope.
528    unsigned NewParentScope = Scopes.size();
529    Scopes.push_back(GotoScope(ParentScope,
530                               diag::note_protected_by_objc_autoreleasepool,
531                               diag::note_exits_objc_autoreleasepool,
532                               AS->getAtLoc()));
533    BuildScopeInformation(AS->getSubStmt(), NewParentScope);
534    return;
535  }
536
537  case Stmt::ExprWithCleanupsClass: {
538    // Disallow jumps past full-expressions that use blocks with
539    // non-trivial cleanups of their captures.  This is theoretically
540    // implementable but a lot of work which we haven't felt up to doing.
541    ExprWithCleanups *EWC = cast<ExprWithCleanups>(S);
542    for (unsigned i = 0, e = EWC->getNumObjects(); i != e; ++i) {
543      if (auto *BDecl = EWC->getObject(i).dyn_cast<BlockDecl *>())
544        for (const auto &CI : BDecl->captures()) {
545          VarDecl *variable = CI.getVariable();
546          BuildScopeInformation(variable, BDecl, origParentScope);
547        }
548      else if (auto *CLE = EWC->getObject(i).dyn_cast<CompoundLiteralExpr *>())
549        BuildScopeInformation(CLE, origParentScope);
550      else
551        llvm_unreachable("unexpected cleanup object type");
552    }
553    break;
554  }
555
556  case Stmt::MaterializeTemporaryExprClass: {
557    // Disallow jumps out of scopes containing temporaries lifetime-extended to
558    // automatic storage duration.
559    MaterializeTemporaryExpr *MTE = cast<MaterializeTemporaryExpr>(S);
560    if (MTE->getStorageDuration() == SD_Automatic) {
561      SmallVector<const Expr *, 4> CommaLHS;
562      SmallVector<SubobjectAdjustment, 4> Adjustments;
563      const Expr *ExtendedObject =
564          MTE->getSubExpr()->skipRValueSubobjectAdjustments(CommaLHS,
565                                                            Adjustments);
566      if (ExtendedObject->getType().isDestructedType()) {
567        Scopes.push_back(GotoScope(ParentScope, 0,
568                                   diag::note_exits_temporary_dtor,
569                                   ExtendedObject->getExprLoc()));
570        origParentScope = Scopes.size()-1;
571      }
572    }
573    break;
574  }
575
576  case Stmt::CaseStmtClass:
577  case Stmt::DefaultStmtClass:
578  case Stmt::LabelStmtClass:
579    LabelAndGotoScopes[S] = ParentScope;
580    break;
581
582  default:
583    break;
584  }
585
586  for (Stmt *SubStmt : S->children()) {
587    if (!SubStmt)
588        continue;
589    if (StmtsToSkip) {
590      --StmtsToSkip;
591      continue;
592    }
593
594    // Cases, labels, and defaults aren't "scope parents".  It's also
595    // important to handle these iteratively instead of recursively in
596    // order to avoid blowing out the stack.
597    while (true) {
598      Stmt *Next;
599      if (SwitchCase *SC = dyn_cast<SwitchCase>(SubStmt))
600        Next = SC->getSubStmt();
601      else if (LabelStmt *LS = dyn_cast<LabelStmt>(SubStmt))
602        Next = LS->getSubStmt();
603      else
604        break;
605
606      LabelAndGotoScopes[SubStmt] = ParentScope;
607      SubStmt = Next;
608    }
609
610    // Recursively walk the AST.
611    BuildScopeInformation(SubStmt, ParentScope);
612  }
613}
614
615/// VerifyJumps - Verify each element of the Jumps array to see if they are
616/// valid, emitting diagnostics if not.
617void JumpScopeChecker::VerifyJumps() {
618  while (!Jumps.empty()) {
619    Stmt *Jump = Jumps.pop_back_val();
620
621    // With a goto,
622    if (GotoStmt *GS = dyn_cast<GotoStmt>(Jump)) {
623      // The label may not have a statement if it's coming from inline MS ASM.
624      if (GS->getLabel()->getStmt()) {
625        CheckJump(GS, GS->getLabel()->getStmt(), GS->getGotoLoc(),
626                  diag::err_goto_into_protected_scope,
627                  diag::ext_goto_into_protected_scope,
628                  diag::warn_cxx98_compat_goto_into_protected_scope);
629      }
630      CheckGotoStmt(GS);
631      continue;
632    }
633
634    // We only get indirect gotos here when they have a constant target.
635    if (IndirectGotoStmt *IGS = dyn_cast<IndirectGotoStmt>(Jump)) {
636      LabelDecl *Target = IGS->getConstantTarget();
637      CheckJump(IGS, Target->getStmt(), IGS->getGotoLoc(),
638                diag::err_goto_into_protected_scope,
639                diag::ext_goto_into_protected_scope,
640                diag::warn_cxx98_compat_goto_into_protected_scope);
641      continue;
642    }
643
644    SwitchStmt *SS = cast<SwitchStmt>(Jump);
645    for (SwitchCase *SC = SS->getSwitchCaseList(); SC;
646         SC = SC->getNextSwitchCase()) {
647      if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(SC)))
648        continue;
649      SourceLocation Loc;
650      if (CaseStmt *CS = dyn_cast<CaseStmt>(SC))
651        Loc = CS->getBeginLoc();
652      else if (DefaultStmt *DS = dyn_cast<DefaultStmt>(SC))
653        Loc = DS->getBeginLoc();
654      else
655        Loc = SC->getBeginLoc();
656      CheckJump(SS, SC, Loc, diag::err_switch_into_protected_scope, 0,
657                diag::warn_cxx98_compat_switch_into_protected_scope);
658    }
659  }
660}
661
662/// VerifyIndirectOrAsmJumps - Verify whether any possible indirect goto or
663/// asm goto jump might cross a protection boundary.  Unlike direct jumps,
664/// indirect or asm goto jumps count cleanups as protection boundaries:
665/// since there's no way to know where the jump is going, we can't implicitly
666/// run the right cleanups the way we can with direct jumps.
667/// Thus, an indirect/asm jump is "trivial" if it bypasses no
668/// initializations and no teardowns.  More formally, an indirect/asm jump
669/// from A to B is trivial if the path out from A to DCA(A,B) is
670/// trivial and the path in from DCA(A,B) to B is trivial, where
671/// DCA(A,B) is the deepest common ancestor of A and B.
672/// Jump-triviality is transitive but asymmetric.
673///
674/// A path in is trivial if none of the entered scopes have an InDiag.
675/// A path out is trivial is none of the exited scopes have an OutDiag.
676///
677/// Under these definitions, this function checks that the indirect
678/// jump between A and B is trivial for every indirect goto statement A
679/// and every label B whose address was taken in the function.
680void JumpScopeChecker::VerifyIndirectOrAsmJumps(bool IsAsmGoto) {
681  SmallVector<Stmt*, 4> GotoJumps = IsAsmGoto ? AsmJumps : IndirectJumps;
682  if (GotoJumps.empty())
683    return;
684  SmallVector<LabelDecl *, 4> JumpTargets =
685      IsAsmGoto ? AsmJumpTargets : IndirectJumpTargets;
686  // If there aren't any address-of-label expressions in this function,
687  // complain about the first indirect goto.
688  if (JumpTargets.empty()) {
689    assert(!IsAsmGoto &&"only indirect goto can get here");
690    S.Diag(GotoJumps[0]->getBeginLoc(),
691           diag::err_indirect_goto_without_addrlabel);
692    return;
693  }
694  // Collect a single representative of every scope containing an
695  // indirect or asm goto.  For most code bases, this substantially cuts
696  // down on the number of jump sites we'll have to consider later.
697  typedef std::pair<unsigned, Stmt*> JumpScope;
698  SmallVector<JumpScope, 32> JumpScopes;
699  {
700    llvm::DenseMap<unsigned, Stmt*> JumpScopesMap;
701    for (SmallVectorImpl<Stmt *>::iterator I = GotoJumps.begin(),
702                                           E = GotoJumps.end();
703         I != E; ++I) {
704      Stmt *IG = *I;
705      if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(IG)))
706        continue;
707      unsigned IGScope = LabelAndGotoScopes[IG];
708      Stmt *&Entry = JumpScopesMap[IGScope];
709      if (!Entry) Entry = IG;
710    }
711    JumpScopes.reserve(JumpScopesMap.size());
712    for (llvm::DenseMap<unsigned, Stmt *>::iterator I = JumpScopesMap.begin(),
713                                                    E = JumpScopesMap.end();
714         I != E; ++I)
715      JumpScopes.push_back(*I);
716  }
717
718  // Collect a single representative of every scope containing a
719  // label whose address was taken somewhere in the function.
720  // For most code bases, there will be only one such scope.
721  llvm::DenseMap<unsigned, LabelDecl*> TargetScopes;
722  for (SmallVectorImpl<LabelDecl *>::iterator I = JumpTargets.begin(),
723                                              E = JumpTargets.end();
724       I != E; ++I) {
725    LabelDecl *TheLabel = *I;
726    if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(TheLabel->getStmt())))
727      continue;
728    unsigned LabelScope = LabelAndGotoScopes[TheLabel->getStmt()];
729    LabelDecl *&Target = TargetScopes[LabelScope];
730    if (!Target) Target = TheLabel;
731  }
732
733  // For each target scope, make sure it's trivially reachable from
734  // every scope containing a jump site.
735  //
736  // A path between scopes always consists of exitting zero or more
737  // scopes, then entering zero or more scopes.  We build a set of
738  // of scopes S from which the target scope can be trivially
739  // entered, then verify that every jump scope can be trivially
740  // exitted to reach a scope in S.
741  llvm::BitVector Reachable(Scopes.size(), false);
742  for (llvm::DenseMap<unsigned,LabelDecl*>::iterator
743         TI = TargetScopes.begin(), TE = TargetScopes.end(); TI != TE; ++TI) {
744    unsigned TargetScope = TI->first;
745    LabelDecl *TargetLabel = TI->second;
746
747    Reachable.reset();
748
749    // Mark all the enclosing scopes from which you can safely jump
750    // into the target scope.  'Min' will end up being the index of
751    // the shallowest such scope.
752    unsigned Min = TargetScope;
753    while (true) {
754      Reachable.set(Min);
755
756      // Don't go beyond the outermost scope.
757      if (Min == 0) break;
758
759      // Stop if we can't trivially enter the current scope.
760      if (Scopes[Min].InDiag) break;
761
762      Min = Scopes[Min].ParentScope;
763    }
764
765    // Walk through all the jump sites, checking that they can trivially
766    // reach this label scope.
767    for (SmallVectorImpl<JumpScope>::iterator
768           I = JumpScopes.begin(), E = JumpScopes.end(); I != E; ++I) {
769      unsigned Scope = I->first;
770
771      // Walk out the "scope chain" for this scope, looking for a scope
772      // we've marked reachable.  For well-formed code this amortizes
773      // to O(JumpScopes.size() / Scopes.size()):  we only iterate
774      // when we see something unmarked, and in well-formed code we
775      // mark everything we iterate past.
776      bool IsReachable = false;
777      while (true) {
778        if (Reachable.test(Scope)) {
779          // If we find something reachable, mark all the scopes we just
780          // walked through as reachable.
781          for (unsigned S = I->first; S != Scope; S = Scopes[S].ParentScope)
782            Reachable.set(S);
783          IsReachable = true;
784          break;
785        }
786
787        // Don't walk out if we've reached the top-level scope or we've
788        // gotten shallower than the shallowest reachable scope.
789        if (Scope == 0 || Scope < Min) break;
790
791        // Don't walk out through an out-diagnostic.
792        if (Scopes[Scope].OutDiag) break;
793
794        Scope = Scopes[Scope].ParentScope;
795      }
796
797      // Only diagnose if we didn't find something.
798      if (IsReachable) continue;
799
800      DiagnoseIndirectOrAsmJump(I->second, I->first, TargetLabel, TargetScope);
801    }
802  }
803}
804
805/// Return true if a particular error+note combination must be downgraded to a
806/// warning in Microsoft mode.
807static bool IsMicrosoftJumpWarning(unsigned JumpDiag, unsigned InDiagNote) {
808  return (JumpDiag == diag::err_goto_into_protected_scope &&
809         (InDiagNote == diag::note_protected_by_variable_init ||
810          InDiagNote == diag::note_protected_by_variable_nontriv_destructor));
811}
812
813/// Return true if a particular note should be downgraded to a compatibility
814/// warning in C++11 mode.
815static bool IsCXX98CompatWarning(Sema &S, unsigned InDiagNote) {
816  return S.getLangOpts().CPlusPlus11 &&
817         InDiagNote == diag::note_protected_by_variable_non_pod;
818}
819
820/// Produce primary diagnostic for an indirect jump statement.
821static void DiagnoseIndirectOrAsmJumpStmt(Sema &S, Stmt *Jump,
822                                          LabelDecl *Target, bool &Diagnosed) {
823  if (Diagnosed)
824    return;
825  bool IsAsmGoto = isa<GCCAsmStmt>(Jump);
826  S.Diag(Jump->getBeginLoc(), diag::err_indirect_goto_in_protected_scope)
827      << IsAsmGoto;
828  S.Diag(Target->getStmt()->getIdentLoc(), diag::note_indirect_goto_target)
829      << IsAsmGoto;
830  Diagnosed = true;
831}
832
833/// Produce note diagnostics for a jump into a protected scope.
834void JumpScopeChecker::NoteJumpIntoScopes(ArrayRef<unsigned> ToScopes) {
835  if (CHECK_PERMISSIVE(ToScopes.empty()))
836    return;
837  for (unsigned I = 0, E = ToScopes.size(); I != E; ++I)
838    if (Scopes[ToScopes[I]].InDiag)
839      S.Diag(Scopes[ToScopes[I]].Loc, Scopes[ToScopes[I]].InDiag);
840}
841
842/// Diagnose an indirect jump which is known to cross scopes.
843void JumpScopeChecker::DiagnoseIndirectOrAsmJump(Stmt *Jump, unsigned JumpScope,
844                                                 LabelDecl *Target,
845                                                 unsigned TargetScope) {
846  if (CHECK_PERMISSIVE(JumpScope == TargetScope))
847    return;
848
849  unsigned Common = GetDeepestCommonScope(JumpScope, TargetScope);
850  bool Diagnosed = false;
851
852  // Walk out the scope chain until we reach the common ancestor.
853  for (unsigned I = JumpScope; I != Common; I = Scopes[I].ParentScope)
854    if (Scopes[I].OutDiag) {
855      DiagnoseIndirectOrAsmJumpStmt(S, Jump, Target, Diagnosed);
856      S.Diag(Scopes[I].Loc, Scopes[I].OutDiag);
857    }
858
859  SmallVector<unsigned, 10> ToScopesCXX98Compat;
860
861  // Now walk into the scopes containing the label whose address was taken.
862  for (unsigned I = TargetScope; I != Common; I = Scopes[I].ParentScope)
863    if (IsCXX98CompatWarning(S, Scopes[I].InDiag))
864      ToScopesCXX98Compat.push_back(I);
865    else if (Scopes[I].InDiag) {
866      DiagnoseIndirectOrAsmJumpStmt(S, Jump, Target, Diagnosed);
867      S.Diag(Scopes[I].Loc, Scopes[I].InDiag);
868    }
869
870  // Diagnose this jump if it would be ill-formed in C++98.
871  if (!Diagnosed && !ToScopesCXX98Compat.empty()) {
872    bool IsAsmGoto = isa<GCCAsmStmt>(Jump);
873    S.Diag(Jump->getBeginLoc(),
874           diag::warn_cxx98_compat_indirect_goto_in_protected_scope)
875        << IsAsmGoto;
876    S.Diag(Target->getStmt()->getIdentLoc(), diag::note_indirect_goto_target)
877        << IsAsmGoto;
878    NoteJumpIntoScopes(ToScopesCXX98Compat);
879  }
880}
881
882/// CheckJump - Validate that the specified jump statement is valid: that it is
883/// jumping within or out of its current scope, not into a deeper one.
884void JumpScopeChecker::CheckJump(Stmt *From, Stmt *To, SourceLocation DiagLoc,
885                               unsigned JumpDiagError, unsigned JumpDiagWarning,
886                                 unsigned JumpDiagCXX98Compat) {
887  if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(From)))
888    return;
889  if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(To)))
890    return;
891
892  unsigned FromScope = LabelAndGotoScopes[From];
893  unsigned ToScope = LabelAndGotoScopes[To];
894
895  // Common case: exactly the same scope, which is fine.
896  if (FromScope == ToScope) return;
897
898  // Warn on gotos out of __finally blocks.
899  if (isa<GotoStmt>(From) || isa<IndirectGotoStmt>(From)) {
900    // If FromScope > ToScope, FromScope is more nested and the jump goes to a
901    // less nested scope.  Check if it crosses a __finally along the way.
902    for (unsigned I = FromScope; I > ToScope; I = Scopes[I].ParentScope) {
903      if (Scopes[I].InDiag == diag::note_protected_by_seh_finally) {
904        S.Diag(From->getBeginLoc(), diag::warn_jump_out_of_seh_finally);
905        break;
906      }
907    }
908  }
909
910  unsigned CommonScope = GetDeepestCommonScope(FromScope, ToScope);
911
912  // It's okay to jump out from a nested scope.
913  if (CommonScope == ToScope) return;
914
915  // Pull out (and reverse) any scopes we might need to diagnose skipping.
916  SmallVector<unsigned, 10> ToScopesCXX98Compat;
917  SmallVector<unsigned, 10> ToScopesError;
918  SmallVector<unsigned, 10> ToScopesWarning;
919  for (unsigned I = ToScope; I != CommonScope; I = Scopes[I].ParentScope) {
920    if (S.getLangOpts().MSVCCompat && JumpDiagWarning != 0 &&
921        IsMicrosoftJumpWarning(JumpDiagError, Scopes[I].InDiag))
922      ToScopesWarning.push_back(I);
923    else if (IsCXX98CompatWarning(S, Scopes[I].InDiag))
924      ToScopesCXX98Compat.push_back(I);
925    else if (Scopes[I].InDiag)
926      ToScopesError.push_back(I);
927  }
928
929  // Handle warnings.
930  if (!ToScopesWarning.empty()) {
931    S.Diag(DiagLoc, JumpDiagWarning);
932    NoteJumpIntoScopes(ToScopesWarning);
933  }
934
935  // Handle errors.
936  if (!ToScopesError.empty()) {
937    S.Diag(DiagLoc, JumpDiagError);
938    NoteJumpIntoScopes(ToScopesError);
939  }
940
941  // Handle -Wc++98-compat warnings if the jump is well-formed.
942  if (ToScopesError.empty() && !ToScopesCXX98Compat.empty()) {
943    S.Diag(DiagLoc, JumpDiagCXX98Compat);
944    NoteJumpIntoScopes(ToScopesCXX98Compat);
945  }
946}
947
948void JumpScopeChecker::CheckGotoStmt(GotoStmt *GS) {
949  if (GS->getLabel()->isMSAsmLabel()) {
950    S.Diag(GS->getGotoLoc(), diag::err_goto_ms_asm_label)
951        << GS->getLabel()->getIdentifier();
952    S.Diag(GS->getLabel()->getLocation(), diag::note_goto_ms_asm_label)
953        << GS->getLabel()->getIdentifier();
954  }
955}
956
957void Sema::DiagnoseInvalidJumps(Stmt *Body) {
958  (void)JumpScopeChecker(Body, *this);
959}
960