1//===--- JumpDiagnostics.cpp - Protected scope jump analysis ------*- C++ -*-=//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file implements the JumpScopeChecker class, which is used to diagnose
10// jumps that enter a protected scope in an invalid way.
11//
12//===----------------------------------------------------------------------===//
13
14#include "clang/AST/DeclCXX.h"
15#include "clang/AST/Expr.h"
16#include "clang/AST/ExprCXX.h"
17#include "clang/AST/StmtCXX.h"
18#include "clang/AST/StmtObjC.h"
19#include "clang/AST/StmtOpenMP.h"
20#include "clang/Basic/SourceLocation.h"
21#include "clang/Sema/SemaInternal.h"
22#include "llvm/ADT/BitVector.h"
23using namespace clang;
24
25namespace {
26
27/// JumpScopeChecker - This object is used by Sema to diagnose invalid jumps
28/// into VLA and other protected scopes.  For example, this rejects:
29///    goto L;
30///    int a[n];
31///  L:
32///
33/// We also detect jumps out of protected scopes when it's not possible to do
34/// cleanups properly. Indirect jumps and ASM jumps can't do cleanups because
35/// the target is unknown. Return statements with \c [[clang::musttail]] cannot
36/// handle any cleanups due to the nature of a tail call.
37class JumpScopeChecker {
38  Sema &S;
39
40  /// Permissive - True when recovering from errors, in which case precautions
41  /// are taken to handle incomplete scope information.
42  const bool Permissive;
43
44  /// GotoScope - This is a record that we use to keep track of all of the
45  /// scopes that are introduced by VLAs and other things that scope jumps like
46  /// gotos.  This scope tree has nothing to do with the source scope tree,
47  /// because you can have multiple VLA scopes per compound statement, and most
48  /// compound statements don't introduce any scopes.
49  struct GotoScope {
50    /// ParentScope - The index in ScopeMap of the parent scope.  This is 0 for
51    /// the parent scope is the function body.
52    unsigned ParentScope;
53
54    /// InDiag - The note to emit if there is a jump into this scope.
55    unsigned InDiag;
56
57    /// OutDiag - The note to emit if there is an indirect jump out
58    /// of this scope.  Direct jumps always clean up their current scope
59    /// in an orderly way.
60    unsigned OutDiag;
61
62    /// Loc - Location to emit the diagnostic.
63    SourceLocation Loc;
64
65    GotoScope(unsigned parentScope, unsigned InDiag, unsigned OutDiag,
66              SourceLocation L)
67      : ParentScope(parentScope), InDiag(InDiag), OutDiag(OutDiag), Loc(L) {}
68  };
69
70  SmallVector<GotoScope, 48> Scopes;
71  llvm::DenseMap<Stmt*, unsigned> LabelAndGotoScopes;
72  SmallVector<Stmt*, 16> Jumps;
73
74  SmallVector<Stmt*, 4> IndirectJumps;
75  SmallVector<Stmt*, 4> AsmJumps;
76  SmallVector<AttributedStmt *, 4> MustTailStmts;
77  SmallVector<LabelDecl*, 4> IndirectJumpTargets;
78  SmallVector<LabelDecl*, 4> AsmJumpTargets;
79public:
80  JumpScopeChecker(Stmt *Body, Sema &S);
81private:
82  void BuildScopeInformation(Decl *D, unsigned &ParentScope);
83  void BuildScopeInformation(VarDecl *D, const BlockDecl *BDecl,
84                             unsigned &ParentScope);
85  void BuildScopeInformation(CompoundLiteralExpr *CLE, unsigned &ParentScope);
86  void BuildScopeInformation(Stmt *S, unsigned &origParentScope);
87
88  void VerifyJumps();
89  void VerifyIndirectOrAsmJumps(bool IsAsmGoto);
90  void VerifyMustTailStmts();
91  void NoteJumpIntoScopes(ArrayRef<unsigned> ToScopes);
92  void DiagnoseIndirectOrAsmJump(Stmt *IG, unsigned IGScope, LabelDecl *Target,
93                                 unsigned TargetScope);
94  void CheckJump(Stmt *From, Stmt *To, SourceLocation DiagLoc,
95                 unsigned JumpDiag, unsigned JumpDiagWarning,
96                 unsigned JumpDiagCXX98Compat);
97  void CheckGotoStmt(GotoStmt *GS);
98  const Attr *GetMustTailAttr(AttributedStmt *AS);
99
100  unsigned GetDeepestCommonScope(unsigned A, unsigned B);
101};
102} // end anonymous namespace
103
104#define CHECK_PERMISSIVE(x) (assert(Permissive || !(x)), (Permissive && (x)))
105
106JumpScopeChecker::JumpScopeChecker(Stmt *Body, Sema &s)
107    : S(s), Permissive(s.hasAnyUnrecoverableErrorsInThisFunction()) {
108  // Add a scope entry for function scope.
109  Scopes.push_back(GotoScope(~0U, ~0U, ~0U, SourceLocation()));
110
111  // Build information for the top level compound statement, so that we have a
112  // defined scope record for every "goto" and label.
113  unsigned BodyParentScope = 0;
114  BuildScopeInformation(Body, BodyParentScope);
115
116  // Check that all jumps we saw are kosher.
117  VerifyJumps();
118  VerifyIndirectOrAsmJumps(false);
119  VerifyIndirectOrAsmJumps(true);
120  VerifyMustTailStmts();
121}
122
123/// GetDeepestCommonScope - Finds the innermost scope enclosing the
124/// two scopes.
125unsigned JumpScopeChecker::GetDeepestCommonScope(unsigned A, unsigned B) {
126  while (A != B) {
127    // Inner scopes are created after outer scopes and therefore have
128    // higher indices.
129    if (A < B) {
130      assert(Scopes[B].ParentScope < B);
131      B = Scopes[B].ParentScope;
132    } else {
133      assert(Scopes[A].ParentScope < A);
134      A = Scopes[A].ParentScope;
135    }
136  }
137  return A;
138}
139
140typedef std::pair<unsigned,unsigned> ScopePair;
141
142/// GetDiagForGotoScopeDecl - If this decl induces a new goto scope, return a
143/// diagnostic that should be emitted if control goes over it. If not, return 0.
144static ScopePair GetDiagForGotoScopeDecl(Sema &S, const Decl *D) {
145  if (const VarDecl *VD = dyn_cast<VarDecl>(D)) {
146    unsigned InDiag = 0;
147    unsigned OutDiag = 0;
148
149    if (VD->getType()->isVariablyModifiedType())
150      InDiag = diag::note_protected_by_vla;
151
152    if (VD->hasAttr<BlocksAttr>())
153      return ScopePair(diag::note_protected_by___block,
154                       diag::note_exits___block);
155
156    if (VD->hasAttr<CleanupAttr>())
157      return ScopePair(diag::note_protected_by_cleanup,
158                       diag::note_exits_cleanup);
159
160    if (VD->hasLocalStorage()) {
161      switch (VD->getType().isDestructedType()) {
162      case QualType::DK_objc_strong_lifetime:
163        return ScopePair(diag::note_protected_by_objc_strong_init,
164                         diag::note_exits_objc_strong);
165
166      case QualType::DK_objc_weak_lifetime:
167        return ScopePair(diag::note_protected_by_objc_weak_init,
168                         diag::note_exits_objc_weak);
169
170      case QualType::DK_nontrivial_c_struct:
171        return ScopePair(diag::note_protected_by_non_trivial_c_struct_init,
172                         diag::note_exits_dtor);
173
174      case QualType::DK_cxx_destructor:
175        OutDiag = diag::note_exits_dtor;
176        break;
177
178      case QualType::DK_none:
179        break;
180      }
181    }
182
183    const Expr *Init = VD->getInit();
184    if (S.Context.getLangOpts().CPlusPlus && VD->hasLocalStorage() && Init) {
185      // C++11 [stmt.dcl]p3:
186      //   A program that jumps from a point where a variable with automatic
187      //   storage duration is not in scope to a point where it is in scope
188      //   is ill-formed unless the variable has scalar type, class type with
189      //   a trivial default constructor and a trivial destructor, a
190      //   cv-qualified version of one of these types, or an array of one of
191      //   the preceding types and is declared without an initializer.
192
193      // C++03 [stmt.dcl.p3:
194      //   A program that jumps from a point where a local variable
195      //   with automatic storage duration is not in scope to a point
196      //   where it is in scope is ill-formed unless the variable has
197      //   POD type and is declared without an initializer.
198
199      InDiag = diag::note_protected_by_variable_init;
200
201      // For a variable of (array of) class type declared without an
202      // initializer, we will have call-style initialization and the initializer
203      // will be the CXXConstructExpr with no intervening nodes.
204      if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(Init)) {
205        const CXXConstructorDecl *Ctor = CCE->getConstructor();
206        if (Ctor->isTrivial() && Ctor->isDefaultConstructor() &&
207            VD->getInitStyle() == VarDecl::CallInit) {
208          if (OutDiag)
209            InDiag = diag::note_protected_by_variable_nontriv_destructor;
210          else if (!Ctor->getParent()->isPOD())
211            InDiag = diag::note_protected_by_variable_non_pod;
212          else
213            InDiag = 0;
214        }
215      }
216    }
217
218    return ScopePair(InDiag, OutDiag);
219  }
220
221  if (const TypedefNameDecl *TD = dyn_cast<TypedefNameDecl>(D)) {
222    if (TD->getUnderlyingType()->isVariablyModifiedType())
223      return ScopePair(isa<TypedefDecl>(TD)
224                           ? diag::note_protected_by_vla_typedef
225                           : diag::note_protected_by_vla_type_alias,
226                       0);
227  }
228
229  return ScopePair(0U, 0U);
230}
231
232/// Build scope information for a declaration that is part of a DeclStmt.
233void JumpScopeChecker::BuildScopeInformation(Decl *D, unsigned &ParentScope) {
234  // If this decl causes a new scope, push and switch to it.
235  std::pair<unsigned,unsigned> Diags = GetDiagForGotoScopeDecl(S, D);
236  if (Diags.first || Diags.second) {
237    Scopes.push_back(GotoScope(ParentScope, Diags.first, Diags.second,
238                               D->getLocation()));
239    ParentScope = Scopes.size()-1;
240  }
241
242  // If the decl has an initializer, walk it with the potentially new
243  // scope we just installed.
244  if (VarDecl *VD = dyn_cast<VarDecl>(D))
245    if (Expr *Init = VD->getInit())
246      BuildScopeInformation(Init, ParentScope);
247}
248
249/// Build scope information for a captured block literal variables.
250void JumpScopeChecker::BuildScopeInformation(VarDecl *D,
251                                             const BlockDecl *BDecl,
252                                             unsigned &ParentScope) {
253  // exclude captured __block variables; there's no destructor
254  // associated with the block literal for them.
255  if (D->hasAttr<BlocksAttr>())
256    return;
257  QualType T = D->getType();
258  QualType::DestructionKind destructKind = T.isDestructedType();
259  if (destructKind != QualType::DK_none) {
260    std::pair<unsigned,unsigned> Diags;
261    switch (destructKind) {
262      case QualType::DK_cxx_destructor:
263        Diags = ScopePair(diag::note_enters_block_captures_cxx_obj,
264                          diag::note_exits_block_captures_cxx_obj);
265        break;
266      case QualType::DK_objc_strong_lifetime:
267        Diags = ScopePair(diag::note_enters_block_captures_strong,
268                          diag::note_exits_block_captures_strong);
269        break;
270      case QualType::DK_objc_weak_lifetime:
271        Diags = ScopePair(diag::note_enters_block_captures_weak,
272                          diag::note_exits_block_captures_weak);
273        break;
274      case QualType::DK_nontrivial_c_struct:
275        Diags = ScopePair(diag::note_enters_block_captures_non_trivial_c_struct,
276                          diag::note_exits_block_captures_non_trivial_c_struct);
277        break;
278      case QualType::DK_none:
279        llvm_unreachable("non-lifetime captured variable");
280    }
281    SourceLocation Loc = D->getLocation();
282    if (Loc.isInvalid())
283      Loc = BDecl->getLocation();
284    Scopes.push_back(GotoScope(ParentScope,
285                               Diags.first, Diags.second, Loc));
286    ParentScope = Scopes.size()-1;
287  }
288}
289
290/// Build scope information for compound literals of C struct types that are
291/// non-trivial to destruct.
292void JumpScopeChecker::BuildScopeInformation(CompoundLiteralExpr *CLE,
293                                             unsigned &ParentScope) {
294  unsigned InDiag = diag::note_enters_compound_literal_scope;
295  unsigned OutDiag = diag::note_exits_compound_literal_scope;
296  Scopes.push_back(GotoScope(ParentScope, InDiag, OutDiag, CLE->getExprLoc()));
297  ParentScope = Scopes.size() - 1;
298}
299
300/// BuildScopeInformation - The statements from CI to CE are known to form a
301/// coherent VLA scope with a specified parent node.  Walk through the
302/// statements, adding any labels or gotos to LabelAndGotoScopes and recursively
303/// walking the AST as needed.
304void JumpScopeChecker::BuildScopeInformation(Stmt *S,
305                                             unsigned &origParentScope) {
306  // If this is a statement, rather than an expression, scopes within it don't
307  // propagate out into the enclosing scope.  Otherwise we have to worry
308  // about block literals, which have the lifetime of their enclosing statement.
309  unsigned independentParentScope = origParentScope;
310  unsigned &ParentScope = ((isa<Expr>(S) && !isa<StmtExpr>(S))
311                            ? origParentScope : independentParentScope);
312
313  unsigned StmtsToSkip = 0u;
314
315  // If we found a label, remember that it is in ParentScope scope.
316  switch (S->getStmtClass()) {
317  case Stmt::AddrLabelExprClass:
318    IndirectJumpTargets.push_back(cast<AddrLabelExpr>(S)->getLabel());
319    break;
320
321  case Stmt::ObjCForCollectionStmtClass: {
322    auto *CS = cast<ObjCForCollectionStmt>(S);
323    unsigned Diag = diag::note_protected_by_objc_fast_enumeration;
324    unsigned NewParentScope = Scopes.size();
325    Scopes.push_back(GotoScope(ParentScope, Diag, 0, S->getBeginLoc()));
326    BuildScopeInformation(CS->getBody(), NewParentScope);
327    return;
328  }
329
330  case Stmt::IndirectGotoStmtClass:
331    // "goto *&&lbl;" is a special case which we treat as equivalent
332    // to a normal goto.  In addition, we don't calculate scope in the
333    // operand (to avoid recording the address-of-label use), which
334    // works only because of the restricted set of expressions which
335    // we detect as constant targets.
336    if (cast<IndirectGotoStmt>(S)->getConstantTarget()) {
337      LabelAndGotoScopes[S] = ParentScope;
338      Jumps.push_back(S);
339      return;
340    }
341
342    LabelAndGotoScopes[S] = ParentScope;
343    IndirectJumps.push_back(S);
344    break;
345
346  case Stmt::SwitchStmtClass:
347    // Evaluate the C++17 init stmt and condition variable
348    // before entering the scope of the switch statement.
349    if (Stmt *Init = cast<SwitchStmt>(S)->getInit()) {
350      BuildScopeInformation(Init, ParentScope);
351      ++StmtsToSkip;
352    }
353    if (VarDecl *Var = cast<SwitchStmt>(S)->getConditionVariable()) {
354      BuildScopeInformation(Var, ParentScope);
355      ++StmtsToSkip;
356    }
357    [[fallthrough]];
358
359  case Stmt::GotoStmtClass:
360    // Remember both what scope a goto is in as well as the fact that we have
361    // it.  This makes the second scan not have to walk the AST again.
362    LabelAndGotoScopes[S] = ParentScope;
363    Jumps.push_back(S);
364    break;
365
366  case Stmt::GCCAsmStmtClass:
367    if (auto *GS = dyn_cast<GCCAsmStmt>(S))
368      if (GS->isAsmGoto()) {
369        // Remember both what scope a goto is in as well as the fact that we
370        // have it.  This makes the second scan not have to walk the AST again.
371        LabelAndGotoScopes[S] = ParentScope;
372        AsmJumps.push_back(GS);
373        for (auto *E : GS->labels())
374          AsmJumpTargets.push_back(E->getLabel());
375      }
376    break;
377
378  case Stmt::IfStmtClass: {
379    IfStmt *IS = cast<IfStmt>(S);
380    if (!(IS->isConstexpr() || IS->isConsteval() ||
381          IS->isObjCAvailabilityCheck()))
382      break;
383
384    unsigned Diag = diag::note_protected_by_if_available;
385    if (IS->isConstexpr())
386      Diag = diag::note_protected_by_constexpr_if;
387    else if (IS->isConsteval())
388      Diag = diag::note_protected_by_consteval_if;
389
390    if (VarDecl *Var = IS->getConditionVariable())
391      BuildScopeInformation(Var, ParentScope);
392
393    // Cannot jump into the middle of the condition.
394    unsigned NewParentScope = Scopes.size();
395    Scopes.push_back(GotoScope(ParentScope, Diag, 0, IS->getBeginLoc()));
396
397    if (!IS->isConsteval())
398      BuildScopeInformation(IS->getCond(), NewParentScope);
399
400    // Jumps into either arm of an 'if constexpr' are not allowed.
401    NewParentScope = Scopes.size();
402    Scopes.push_back(GotoScope(ParentScope, Diag, 0, IS->getBeginLoc()));
403    BuildScopeInformation(IS->getThen(), NewParentScope);
404    if (Stmt *Else = IS->getElse()) {
405      NewParentScope = Scopes.size();
406      Scopes.push_back(GotoScope(ParentScope, Diag, 0, IS->getBeginLoc()));
407      BuildScopeInformation(Else, NewParentScope);
408    }
409    return;
410  }
411
412  case Stmt::CXXTryStmtClass: {
413    CXXTryStmt *TS = cast<CXXTryStmt>(S);
414    {
415      unsigned NewParentScope = Scopes.size();
416      Scopes.push_back(GotoScope(ParentScope,
417                                 diag::note_protected_by_cxx_try,
418                                 diag::note_exits_cxx_try,
419                                 TS->getSourceRange().getBegin()));
420      if (Stmt *TryBlock = TS->getTryBlock())
421        BuildScopeInformation(TryBlock, NewParentScope);
422    }
423
424    // Jump from the catch into the try is not allowed either.
425    for (unsigned I = 0, E = TS->getNumHandlers(); I != E; ++I) {
426      CXXCatchStmt *CS = TS->getHandler(I);
427      unsigned NewParentScope = Scopes.size();
428      Scopes.push_back(GotoScope(ParentScope,
429                                 diag::note_protected_by_cxx_catch,
430                                 diag::note_exits_cxx_catch,
431                                 CS->getSourceRange().getBegin()));
432      BuildScopeInformation(CS->getHandlerBlock(), NewParentScope);
433    }
434    return;
435  }
436
437  case Stmt::SEHTryStmtClass: {
438    SEHTryStmt *TS = cast<SEHTryStmt>(S);
439    {
440      unsigned NewParentScope = Scopes.size();
441      Scopes.push_back(GotoScope(ParentScope,
442                                 diag::note_protected_by_seh_try,
443                                 diag::note_exits_seh_try,
444                                 TS->getSourceRange().getBegin()));
445      if (Stmt *TryBlock = TS->getTryBlock())
446        BuildScopeInformation(TryBlock, NewParentScope);
447    }
448
449    // Jump from __except or __finally into the __try are not allowed either.
450    if (SEHExceptStmt *Except = TS->getExceptHandler()) {
451      unsigned NewParentScope = Scopes.size();
452      Scopes.push_back(GotoScope(ParentScope,
453                                 diag::note_protected_by_seh_except,
454                                 diag::note_exits_seh_except,
455                                 Except->getSourceRange().getBegin()));
456      BuildScopeInformation(Except->getBlock(), NewParentScope);
457    } else if (SEHFinallyStmt *Finally = TS->getFinallyHandler()) {
458      unsigned NewParentScope = Scopes.size();
459      Scopes.push_back(GotoScope(ParentScope,
460                                 diag::note_protected_by_seh_finally,
461                                 diag::note_exits_seh_finally,
462                                 Finally->getSourceRange().getBegin()));
463      BuildScopeInformation(Finally->getBlock(), NewParentScope);
464    }
465
466    return;
467  }
468
469  case Stmt::DeclStmtClass: {
470    // If this is a declstmt with a VLA definition, it defines a scope from here
471    // to the end of the containing context.
472    DeclStmt *DS = cast<DeclStmt>(S);
473    // The decl statement creates a scope if any of the decls in it are VLAs
474    // or have the cleanup attribute.
475    for (auto *I : DS->decls())
476      BuildScopeInformation(I, origParentScope);
477    return;
478  }
479
480  case Stmt::ObjCAtTryStmtClass: {
481    // Disallow jumps into any part of an @try statement by pushing a scope and
482    // walking all sub-stmts in that scope.
483    ObjCAtTryStmt *AT = cast<ObjCAtTryStmt>(S);
484    // Recursively walk the AST for the @try part.
485    {
486      unsigned NewParentScope = Scopes.size();
487      Scopes.push_back(GotoScope(ParentScope,
488                                 diag::note_protected_by_objc_try,
489                                 diag::note_exits_objc_try,
490                                 AT->getAtTryLoc()));
491      if (Stmt *TryPart = AT->getTryBody())
492        BuildScopeInformation(TryPart, NewParentScope);
493    }
494
495    // Jump from the catch to the finally or try is not valid.
496    for (ObjCAtCatchStmt *AC : AT->catch_stmts()) {
497      unsigned NewParentScope = Scopes.size();
498      Scopes.push_back(GotoScope(ParentScope,
499                                 diag::note_protected_by_objc_catch,
500                                 diag::note_exits_objc_catch,
501                                 AC->getAtCatchLoc()));
502      // @catches are nested and it isn't
503      BuildScopeInformation(AC->getCatchBody(), NewParentScope);
504    }
505
506    // Jump from the finally to the try or catch is not valid.
507    if (ObjCAtFinallyStmt *AF = AT->getFinallyStmt()) {
508      unsigned NewParentScope = Scopes.size();
509      Scopes.push_back(GotoScope(ParentScope,
510                                 diag::note_protected_by_objc_finally,
511                                 diag::note_exits_objc_finally,
512                                 AF->getAtFinallyLoc()));
513      BuildScopeInformation(AF, NewParentScope);
514    }
515
516    return;
517  }
518
519  case Stmt::ObjCAtSynchronizedStmtClass: {
520    // Disallow jumps into the protected statement of an @synchronized, but
521    // allow jumps into the object expression it protects.
522    ObjCAtSynchronizedStmt *AS = cast<ObjCAtSynchronizedStmt>(S);
523    // Recursively walk the AST for the @synchronized object expr, it is
524    // evaluated in the normal scope.
525    BuildScopeInformation(AS->getSynchExpr(), ParentScope);
526
527    // Recursively walk the AST for the @synchronized part, protected by a new
528    // scope.
529    unsigned NewParentScope = Scopes.size();
530    Scopes.push_back(GotoScope(ParentScope,
531                               diag::note_protected_by_objc_synchronized,
532                               diag::note_exits_objc_synchronized,
533                               AS->getAtSynchronizedLoc()));
534    BuildScopeInformation(AS->getSynchBody(), NewParentScope);
535    return;
536  }
537
538  case Stmt::ObjCAutoreleasePoolStmtClass: {
539    // Disallow jumps into the protected statement of an @autoreleasepool.
540    ObjCAutoreleasePoolStmt *AS = cast<ObjCAutoreleasePoolStmt>(S);
541    // Recursively walk the AST for the @autoreleasepool part, protected by a
542    // new scope.
543    unsigned NewParentScope = Scopes.size();
544    Scopes.push_back(GotoScope(ParentScope,
545                               diag::note_protected_by_objc_autoreleasepool,
546                               diag::note_exits_objc_autoreleasepool,
547                               AS->getAtLoc()));
548    BuildScopeInformation(AS->getSubStmt(), NewParentScope);
549    return;
550  }
551
552  case Stmt::ExprWithCleanupsClass: {
553    // Disallow jumps past full-expressions that use blocks with
554    // non-trivial cleanups of their captures.  This is theoretically
555    // implementable but a lot of work which we haven't felt up to doing.
556    ExprWithCleanups *EWC = cast<ExprWithCleanups>(S);
557    for (unsigned i = 0, e = EWC->getNumObjects(); i != e; ++i) {
558      if (auto *BDecl = EWC->getObject(i).dyn_cast<BlockDecl *>())
559        for (const auto &CI : BDecl->captures()) {
560          VarDecl *variable = CI.getVariable();
561          BuildScopeInformation(variable, BDecl, origParentScope);
562        }
563      else if (auto *CLE = EWC->getObject(i).dyn_cast<CompoundLiteralExpr *>())
564        BuildScopeInformation(CLE, origParentScope);
565      else
566        llvm_unreachable("unexpected cleanup object type");
567    }
568    break;
569  }
570
571  case Stmt::MaterializeTemporaryExprClass: {
572    // Disallow jumps out of scopes containing temporaries lifetime-extended to
573    // automatic storage duration.
574    MaterializeTemporaryExpr *MTE = cast<MaterializeTemporaryExpr>(S);
575    if (MTE->getStorageDuration() == SD_Automatic) {
576      SmallVector<const Expr *, 4> CommaLHS;
577      SmallVector<SubobjectAdjustment, 4> Adjustments;
578      const Expr *ExtendedObject =
579          MTE->getSubExpr()->skipRValueSubobjectAdjustments(CommaLHS,
580                                                            Adjustments);
581      if (ExtendedObject->getType().isDestructedType()) {
582        Scopes.push_back(GotoScope(ParentScope, 0,
583                                   diag::note_exits_temporary_dtor,
584                                   ExtendedObject->getExprLoc()));
585        origParentScope = Scopes.size()-1;
586      }
587    }
588    break;
589  }
590
591  case Stmt::CaseStmtClass:
592  case Stmt::DefaultStmtClass:
593  case Stmt::LabelStmtClass:
594    LabelAndGotoScopes[S] = ParentScope;
595    break;
596
597  case Stmt::AttributedStmtClass: {
598    AttributedStmt *AS = cast<AttributedStmt>(S);
599    if (GetMustTailAttr(AS)) {
600      LabelAndGotoScopes[AS] = ParentScope;
601      MustTailStmts.push_back(AS);
602    }
603    break;
604  }
605
606  default:
607    if (auto *ED = dyn_cast<OMPExecutableDirective>(S)) {
608      if (!ED->isStandaloneDirective()) {
609        unsigned NewParentScope = Scopes.size();
610        Scopes.emplace_back(ParentScope,
611                            diag::note_omp_protected_structured_block,
612                            diag::note_omp_exits_structured_block,
613                            ED->getStructuredBlock()->getBeginLoc());
614        BuildScopeInformation(ED->getStructuredBlock(), NewParentScope);
615        return;
616      }
617    }
618    break;
619  }
620
621  for (Stmt *SubStmt : S->children()) {
622    if (!SubStmt)
623        continue;
624    if (StmtsToSkip) {
625      --StmtsToSkip;
626      continue;
627    }
628
629    // Cases, labels, and defaults aren't "scope parents".  It's also
630    // important to handle these iteratively instead of recursively in
631    // order to avoid blowing out the stack.
632    while (true) {
633      Stmt *Next;
634      if (SwitchCase *SC = dyn_cast<SwitchCase>(SubStmt))
635        Next = SC->getSubStmt();
636      else if (LabelStmt *LS = dyn_cast<LabelStmt>(SubStmt))
637        Next = LS->getSubStmt();
638      else
639        break;
640
641      LabelAndGotoScopes[SubStmt] = ParentScope;
642      SubStmt = Next;
643    }
644
645    // Recursively walk the AST.
646    BuildScopeInformation(SubStmt, ParentScope);
647  }
648}
649
650/// VerifyJumps - Verify each element of the Jumps array to see if they are
651/// valid, emitting diagnostics if not.
652void JumpScopeChecker::VerifyJumps() {
653  while (!Jumps.empty()) {
654    Stmt *Jump = Jumps.pop_back_val();
655
656    // With a goto,
657    if (GotoStmt *GS = dyn_cast<GotoStmt>(Jump)) {
658      // The label may not have a statement if it's coming from inline MS ASM.
659      if (GS->getLabel()->getStmt()) {
660        CheckJump(GS, GS->getLabel()->getStmt(), GS->getGotoLoc(),
661                  diag::err_goto_into_protected_scope,
662                  diag::ext_goto_into_protected_scope,
663                  diag::warn_cxx98_compat_goto_into_protected_scope);
664      }
665      CheckGotoStmt(GS);
666      continue;
667    }
668
669    // We only get indirect gotos here when they have a constant target.
670    if (IndirectGotoStmt *IGS = dyn_cast<IndirectGotoStmt>(Jump)) {
671      LabelDecl *Target = IGS->getConstantTarget();
672      CheckJump(IGS, Target->getStmt(), IGS->getGotoLoc(),
673                diag::err_goto_into_protected_scope,
674                diag::ext_goto_into_protected_scope,
675                diag::warn_cxx98_compat_goto_into_protected_scope);
676      continue;
677    }
678
679    SwitchStmt *SS = cast<SwitchStmt>(Jump);
680    for (SwitchCase *SC = SS->getSwitchCaseList(); SC;
681         SC = SC->getNextSwitchCase()) {
682      if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(SC)))
683        continue;
684      SourceLocation Loc;
685      if (CaseStmt *CS = dyn_cast<CaseStmt>(SC))
686        Loc = CS->getBeginLoc();
687      else if (DefaultStmt *DS = dyn_cast<DefaultStmt>(SC))
688        Loc = DS->getBeginLoc();
689      else
690        Loc = SC->getBeginLoc();
691      CheckJump(SS, SC, Loc, diag::err_switch_into_protected_scope, 0,
692                diag::warn_cxx98_compat_switch_into_protected_scope);
693    }
694  }
695}
696
697/// VerifyIndirectOrAsmJumps - Verify whether any possible indirect goto or
698/// asm goto jump might cross a protection boundary.  Unlike direct jumps,
699/// indirect or asm goto jumps count cleanups as protection boundaries:
700/// since there's no way to know where the jump is going, we can't implicitly
701/// run the right cleanups the way we can with direct jumps.
702/// Thus, an indirect/asm jump is "trivial" if it bypasses no
703/// initializations and no teardowns.  More formally, an indirect/asm jump
704/// from A to B is trivial if the path out from A to DCA(A,B) is
705/// trivial and the path in from DCA(A,B) to B is trivial, where
706/// DCA(A,B) is the deepest common ancestor of A and B.
707/// Jump-triviality is transitive but asymmetric.
708///
709/// A path in is trivial if none of the entered scopes have an InDiag.
710/// A path out is trivial is none of the exited scopes have an OutDiag.
711///
712/// Under these definitions, this function checks that the indirect
713/// jump between A and B is trivial for every indirect goto statement A
714/// and every label B whose address was taken in the function.
715void JumpScopeChecker::VerifyIndirectOrAsmJumps(bool IsAsmGoto) {
716  SmallVector<Stmt*, 4> GotoJumps = IsAsmGoto ? AsmJumps : IndirectJumps;
717  if (GotoJumps.empty())
718    return;
719  SmallVector<LabelDecl *, 4> JumpTargets =
720      IsAsmGoto ? AsmJumpTargets : IndirectJumpTargets;
721  // If there aren't any address-of-label expressions in this function,
722  // complain about the first indirect goto.
723  if (JumpTargets.empty()) {
724    assert(!IsAsmGoto &&"only indirect goto can get here");
725    S.Diag(GotoJumps[0]->getBeginLoc(),
726           diag::err_indirect_goto_without_addrlabel);
727    return;
728  }
729  // Collect a single representative of every scope containing an
730  // indirect or asm goto.  For most code bases, this substantially cuts
731  // down on the number of jump sites we'll have to consider later.
732  typedef std::pair<unsigned, Stmt*> JumpScope;
733  SmallVector<JumpScope, 32> JumpScopes;
734  {
735    llvm::DenseMap<unsigned, Stmt*> JumpScopesMap;
736    for (SmallVectorImpl<Stmt *>::iterator I = GotoJumps.begin(),
737                                           E = GotoJumps.end();
738         I != E; ++I) {
739      Stmt *IG = *I;
740      if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(IG)))
741        continue;
742      unsigned IGScope = LabelAndGotoScopes[IG];
743      Stmt *&Entry = JumpScopesMap[IGScope];
744      if (!Entry) Entry = IG;
745    }
746    JumpScopes.reserve(JumpScopesMap.size());
747    for (llvm::DenseMap<unsigned, Stmt *>::iterator I = JumpScopesMap.begin(),
748                                                    E = JumpScopesMap.end();
749         I != E; ++I)
750      JumpScopes.push_back(*I);
751  }
752
753  // Collect a single representative of every scope containing a
754  // label whose address was taken somewhere in the function.
755  // For most code bases, there will be only one such scope.
756  llvm::DenseMap<unsigned, LabelDecl*> TargetScopes;
757  for (SmallVectorImpl<LabelDecl *>::iterator I = JumpTargets.begin(),
758                                              E = JumpTargets.end();
759       I != E; ++I) {
760    LabelDecl *TheLabel = *I;
761    if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(TheLabel->getStmt())))
762      continue;
763    unsigned LabelScope = LabelAndGotoScopes[TheLabel->getStmt()];
764    LabelDecl *&Target = TargetScopes[LabelScope];
765    if (!Target) Target = TheLabel;
766  }
767
768  // For each target scope, make sure it's trivially reachable from
769  // every scope containing a jump site.
770  //
771  // A path between scopes always consists of exitting zero or more
772  // scopes, then entering zero or more scopes.  We build a set of
773  // of scopes S from which the target scope can be trivially
774  // entered, then verify that every jump scope can be trivially
775  // exitted to reach a scope in S.
776  llvm::BitVector Reachable(Scopes.size(), false);
777  for (llvm::DenseMap<unsigned,LabelDecl*>::iterator
778         TI = TargetScopes.begin(), TE = TargetScopes.end(); TI != TE; ++TI) {
779    unsigned TargetScope = TI->first;
780    LabelDecl *TargetLabel = TI->second;
781
782    Reachable.reset();
783
784    // Mark all the enclosing scopes from which you can safely jump
785    // into the target scope.  'Min' will end up being the index of
786    // the shallowest such scope.
787    unsigned Min = TargetScope;
788    while (true) {
789      Reachable.set(Min);
790
791      // Don't go beyond the outermost scope.
792      if (Min == 0) break;
793
794      // Stop if we can't trivially enter the current scope.
795      if (Scopes[Min].InDiag) break;
796
797      Min = Scopes[Min].ParentScope;
798    }
799
800    // Walk through all the jump sites, checking that they can trivially
801    // reach this label scope.
802    for (SmallVectorImpl<JumpScope>::iterator
803           I = JumpScopes.begin(), E = JumpScopes.end(); I != E; ++I) {
804      unsigned Scope = I->first;
805
806      // Walk out the "scope chain" for this scope, looking for a scope
807      // we've marked reachable.  For well-formed code this amortizes
808      // to O(JumpScopes.size() / Scopes.size()):  we only iterate
809      // when we see something unmarked, and in well-formed code we
810      // mark everything we iterate past.
811      bool IsReachable = false;
812      while (true) {
813        if (Reachable.test(Scope)) {
814          // If we find something reachable, mark all the scopes we just
815          // walked through as reachable.
816          for (unsigned S = I->first; S != Scope; S = Scopes[S].ParentScope)
817            Reachable.set(S);
818          IsReachable = true;
819          break;
820        }
821
822        // Don't walk out if we've reached the top-level scope or we've
823        // gotten shallower than the shallowest reachable scope.
824        if (Scope == 0 || Scope < Min) break;
825
826        // Don't walk out through an out-diagnostic.
827        if (Scopes[Scope].OutDiag) break;
828
829        Scope = Scopes[Scope].ParentScope;
830      }
831
832      // Only diagnose if we didn't find something.
833      if (IsReachable) continue;
834
835      DiagnoseIndirectOrAsmJump(I->second, I->first, TargetLabel, TargetScope);
836    }
837  }
838}
839
840/// Return true if a particular error+note combination must be downgraded to a
841/// warning in Microsoft mode.
842static bool IsMicrosoftJumpWarning(unsigned JumpDiag, unsigned InDiagNote) {
843  return (JumpDiag == diag::err_goto_into_protected_scope &&
844         (InDiagNote == diag::note_protected_by_variable_init ||
845          InDiagNote == diag::note_protected_by_variable_nontriv_destructor));
846}
847
848/// Return true if a particular note should be downgraded to a compatibility
849/// warning in C++11 mode.
850static bool IsCXX98CompatWarning(Sema &S, unsigned InDiagNote) {
851  return S.getLangOpts().CPlusPlus11 &&
852         InDiagNote == diag::note_protected_by_variable_non_pod;
853}
854
855/// Produce primary diagnostic for an indirect jump statement.
856static void DiagnoseIndirectOrAsmJumpStmt(Sema &S, Stmt *Jump,
857                                          LabelDecl *Target, bool &Diagnosed) {
858  if (Diagnosed)
859    return;
860  bool IsAsmGoto = isa<GCCAsmStmt>(Jump);
861  S.Diag(Jump->getBeginLoc(), diag::err_indirect_goto_in_protected_scope)
862      << IsAsmGoto;
863  S.Diag(Target->getStmt()->getIdentLoc(), diag::note_indirect_goto_target)
864      << IsAsmGoto;
865  Diagnosed = true;
866}
867
868/// Produce note diagnostics for a jump into a protected scope.
869void JumpScopeChecker::NoteJumpIntoScopes(ArrayRef<unsigned> ToScopes) {
870  if (CHECK_PERMISSIVE(ToScopes.empty()))
871    return;
872  for (unsigned I = 0, E = ToScopes.size(); I != E; ++I)
873    if (Scopes[ToScopes[I]].InDiag)
874      S.Diag(Scopes[ToScopes[I]].Loc, Scopes[ToScopes[I]].InDiag);
875}
876
877/// Diagnose an indirect jump which is known to cross scopes.
878void JumpScopeChecker::DiagnoseIndirectOrAsmJump(Stmt *Jump, unsigned JumpScope,
879                                                 LabelDecl *Target,
880                                                 unsigned TargetScope) {
881  if (CHECK_PERMISSIVE(JumpScope == TargetScope))
882    return;
883
884  unsigned Common = GetDeepestCommonScope(JumpScope, TargetScope);
885  bool Diagnosed = false;
886
887  // Walk out the scope chain until we reach the common ancestor.
888  for (unsigned I = JumpScope; I != Common; I = Scopes[I].ParentScope)
889    if (Scopes[I].OutDiag) {
890      DiagnoseIndirectOrAsmJumpStmt(S, Jump, Target, Diagnosed);
891      S.Diag(Scopes[I].Loc, Scopes[I].OutDiag);
892    }
893
894  SmallVector<unsigned, 10> ToScopesCXX98Compat;
895
896  // Now walk into the scopes containing the label whose address was taken.
897  for (unsigned I = TargetScope; I != Common; I = Scopes[I].ParentScope)
898    if (IsCXX98CompatWarning(S, Scopes[I].InDiag))
899      ToScopesCXX98Compat.push_back(I);
900    else if (Scopes[I].InDiag) {
901      DiagnoseIndirectOrAsmJumpStmt(S, Jump, Target, Diagnosed);
902      S.Diag(Scopes[I].Loc, Scopes[I].InDiag);
903    }
904
905  // Diagnose this jump if it would be ill-formed in C++98.
906  if (!Diagnosed && !ToScopesCXX98Compat.empty()) {
907    bool IsAsmGoto = isa<GCCAsmStmt>(Jump);
908    S.Diag(Jump->getBeginLoc(),
909           diag::warn_cxx98_compat_indirect_goto_in_protected_scope)
910        << IsAsmGoto;
911    S.Diag(Target->getStmt()->getIdentLoc(), diag::note_indirect_goto_target)
912        << IsAsmGoto;
913    NoteJumpIntoScopes(ToScopesCXX98Compat);
914  }
915}
916
917/// CheckJump - Validate that the specified jump statement is valid: that it is
918/// jumping within or out of its current scope, not into a deeper one.
919void JumpScopeChecker::CheckJump(Stmt *From, Stmt *To, SourceLocation DiagLoc,
920                               unsigned JumpDiagError, unsigned JumpDiagWarning,
921                                 unsigned JumpDiagCXX98Compat) {
922  if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(From)))
923    return;
924  if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(To)))
925    return;
926
927  unsigned FromScope = LabelAndGotoScopes[From];
928  unsigned ToScope = LabelAndGotoScopes[To];
929
930  // Common case: exactly the same scope, which is fine.
931  if (FromScope == ToScope) return;
932
933  // Warn on gotos out of __finally blocks.
934  if (isa<GotoStmt>(From) || isa<IndirectGotoStmt>(From)) {
935    // If FromScope > ToScope, FromScope is more nested and the jump goes to a
936    // less nested scope.  Check if it crosses a __finally along the way.
937    for (unsigned I = FromScope; I > ToScope; I = Scopes[I].ParentScope) {
938      if (Scopes[I].InDiag == diag::note_protected_by_seh_finally) {
939        S.Diag(From->getBeginLoc(), diag::warn_jump_out_of_seh_finally);
940        break;
941      }
942      if (Scopes[I].InDiag == diag::note_omp_protected_structured_block) {
943        S.Diag(From->getBeginLoc(), diag::err_goto_into_protected_scope);
944        S.Diag(To->getBeginLoc(), diag::note_omp_exits_structured_block);
945        break;
946      }
947    }
948  }
949
950  unsigned CommonScope = GetDeepestCommonScope(FromScope, ToScope);
951
952  // It's okay to jump out from a nested scope.
953  if (CommonScope == ToScope) return;
954
955  // Pull out (and reverse) any scopes we might need to diagnose skipping.
956  SmallVector<unsigned, 10> ToScopesCXX98Compat;
957  SmallVector<unsigned, 10> ToScopesError;
958  SmallVector<unsigned, 10> ToScopesWarning;
959  for (unsigned I = ToScope; I != CommonScope; I = Scopes[I].ParentScope) {
960    if (S.getLangOpts().MSVCCompat && JumpDiagWarning != 0 &&
961        IsMicrosoftJumpWarning(JumpDiagError, Scopes[I].InDiag))
962      ToScopesWarning.push_back(I);
963    else if (IsCXX98CompatWarning(S, Scopes[I].InDiag))
964      ToScopesCXX98Compat.push_back(I);
965    else if (Scopes[I].InDiag)
966      ToScopesError.push_back(I);
967  }
968
969  // Handle warnings.
970  if (!ToScopesWarning.empty()) {
971    S.Diag(DiagLoc, JumpDiagWarning);
972    NoteJumpIntoScopes(ToScopesWarning);
973    assert(isa<LabelStmt>(To));
974    LabelStmt *Label = cast<LabelStmt>(To);
975    Label->setSideEntry(true);
976  }
977
978  // Handle errors.
979  if (!ToScopesError.empty()) {
980    S.Diag(DiagLoc, JumpDiagError);
981    NoteJumpIntoScopes(ToScopesError);
982  }
983
984  // Handle -Wc++98-compat warnings if the jump is well-formed.
985  if (ToScopesError.empty() && !ToScopesCXX98Compat.empty()) {
986    S.Diag(DiagLoc, JumpDiagCXX98Compat);
987    NoteJumpIntoScopes(ToScopesCXX98Compat);
988  }
989}
990
991void JumpScopeChecker::CheckGotoStmt(GotoStmt *GS) {
992  if (GS->getLabel()->isMSAsmLabel()) {
993    S.Diag(GS->getGotoLoc(), diag::err_goto_ms_asm_label)
994        << GS->getLabel()->getIdentifier();
995    S.Diag(GS->getLabel()->getLocation(), diag::note_goto_ms_asm_label)
996        << GS->getLabel()->getIdentifier();
997  }
998}
999
1000void JumpScopeChecker::VerifyMustTailStmts() {
1001  for (AttributedStmt *AS : MustTailStmts) {
1002    for (unsigned I = LabelAndGotoScopes[AS]; I; I = Scopes[I].ParentScope) {
1003      if (Scopes[I].OutDiag) {
1004        S.Diag(AS->getBeginLoc(), diag::err_musttail_scope);
1005        S.Diag(Scopes[I].Loc, Scopes[I].OutDiag);
1006      }
1007    }
1008  }
1009}
1010
1011const Attr *JumpScopeChecker::GetMustTailAttr(AttributedStmt *AS) {
1012  ArrayRef<const Attr *> Attrs = AS->getAttrs();
1013  const auto *Iter =
1014      llvm::find_if(Attrs, [](const Attr *A) { return isa<MustTailAttr>(A); });
1015  return Iter != Attrs.end() ? *Iter : nullptr;
1016}
1017
1018void Sema::DiagnoseInvalidJumps(Stmt *Body) {
1019  (void)JumpScopeChecker(Body, *this);
1020}
1021