1//=== StackAddrEscapeChecker.cpp ----------------------------------*- C++ -*--//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file defines stack address leak checker, which checks if an invalid
10// stack address is stored into a global or heap location. See CERT DCL30-C.
11//
12//===----------------------------------------------------------------------===//
13
14#include "clang/StaticAnalyzer/Checkers/BuiltinCheckerRegistration.h"
15#include "clang/AST/ExprCXX.h"
16#include "clang/Basic/SourceManager.h"
17#include "clang/StaticAnalyzer/Core/BugReporter/BugType.h"
18#include "clang/StaticAnalyzer/Core/Checker.h"
19#include "clang/StaticAnalyzer/Core/CheckerManager.h"
20#include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
21#include "clang/StaticAnalyzer/Core/PathSensitive/CheckerContext.h"
22#include "clang/StaticAnalyzer/Core/PathSensitive/ProgramState.h"
23#include "llvm/ADT/SmallString.h"
24#include "llvm/Support/raw_ostream.h"
25using namespace clang;
26using namespace ento;
27
28namespace {
29class StackAddrEscapeChecker
30    : public Checker<check::PreCall, check::PreStmt<ReturnStmt>,
31                     check::EndFunction> {
32  mutable IdentifierInfo *dispatch_semaphore_tII;
33  mutable std::unique_ptr<BuiltinBug> BT_stackleak;
34  mutable std::unique_ptr<BuiltinBug> BT_returnstack;
35  mutable std::unique_ptr<BuiltinBug> BT_capturedstackasync;
36  mutable std::unique_ptr<BuiltinBug> BT_capturedstackret;
37
38public:
39  enum CheckKind {
40    CK_StackAddrEscapeChecker,
41    CK_StackAddrAsyncEscapeChecker,
42    CK_NumCheckKinds
43  };
44
45  DefaultBool ChecksEnabled[CK_NumCheckKinds];
46
47  void checkPreCall(const CallEvent &Call, CheckerContext &C) const;
48  void checkPreStmt(const ReturnStmt *RS, CheckerContext &C) const;
49  void checkEndFunction(const ReturnStmt *RS, CheckerContext &Ctx) const;
50
51private:
52  void checkReturnedBlockCaptures(const BlockDataRegion &B,
53                                  CheckerContext &C) const;
54  void checkAsyncExecutedBlockCaptures(const BlockDataRegion &B,
55                                       CheckerContext &C) const;
56  void EmitStackError(CheckerContext &C, const MemRegion *R,
57                      const Expr *RetE) const;
58  bool isSemaphoreCaptured(const BlockDecl &B) const;
59  static SourceRange genName(raw_ostream &os, const MemRegion *R,
60                             ASTContext &Ctx);
61  static SmallVector<const MemRegion *, 4>
62  getCapturedStackRegions(const BlockDataRegion &B, CheckerContext &C);
63  static bool isArcManagedBlock(const MemRegion *R, CheckerContext &C);
64  static bool isNotInCurrentFrame(const MemRegion *R, CheckerContext &C);
65};
66} // namespace
67
68SourceRange StackAddrEscapeChecker::genName(raw_ostream &os, const MemRegion *R,
69                                            ASTContext &Ctx) {
70  // Get the base region, stripping away fields and elements.
71  R = R->getBaseRegion();
72  SourceManager &SM = Ctx.getSourceManager();
73  SourceRange range;
74  os << "Address of ";
75
76  // Check if the region is a compound literal.
77  if (const auto *CR = dyn_cast<CompoundLiteralRegion>(R)) {
78    const CompoundLiteralExpr *CL = CR->getLiteralExpr();
79    os << "stack memory associated with a compound literal "
80          "declared on line "
81       << SM.getExpansionLineNumber(CL->getBeginLoc()) << " returned to caller";
82    range = CL->getSourceRange();
83  } else if (const auto *AR = dyn_cast<AllocaRegion>(R)) {
84    const Expr *ARE = AR->getExpr();
85    SourceLocation L = ARE->getBeginLoc();
86    range = ARE->getSourceRange();
87    os << "stack memory allocated by call to alloca() on line "
88       << SM.getExpansionLineNumber(L);
89  } else if (const auto *BR = dyn_cast<BlockDataRegion>(R)) {
90    const BlockDecl *BD = BR->getCodeRegion()->getDecl();
91    SourceLocation L = BD->getBeginLoc();
92    range = BD->getSourceRange();
93    os << "stack-allocated block declared on line "
94       << SM.getExpansionLineNumber(L);
95  } else if (const auto *VR = dyn_cast<VarRegion>(R)) {
96    os << "stack memory associated with local variable '" << VR->getString()
97       << '\'';
98    range = VR->getDecl()->getSourceRange();
99  } else if (const auto *TOR = dyn_cast<CXXTempObjectRegion>(R)) {
100    QualType Ty = TOR->getValueType().getLocalUnqualifiedType();
101    os << "stack memory associated with temporary object of type '";
102    Ty.print(os, Ctx.getPrintingPolicy());
103    os << "'";
104    range = TOR->getExpr()->getSourceRange();
105  } else {
106    llvm_unreachable("Invalid region in ReturnStackAddressChecker.");
107  }
108
109  return range;
110}
111
112bool StackAddrEscapeChecker::isArcManagedBlock(const MemRegion *R,
113                                               CheckerContext &C) {
114  assert(R && "MemRegion should not be null");
115  return C.getASTContext().getLangOpts().ObjCAutoRefCount &&
116         isa<BlockDataRegion>(R);
117}
118
119bool StackAddrEscapeChecker::isNotInCurrentFrame(const MemRegion *R,
120                                                 CheckerContext &C) {
121  const StackSpaceRegion *S = cast<StackSpaceRegion>(R->getMemorySpace());
122  return S->getStackFrame() != C.getStackFrame();
123}
124
125bool StackAddrEscapeChecker::isSemaphoreCaptured(const BlockDecl &B) const {
126  if (!dispatch_semaphore_tII)
127    dispatch_semaphore_tII = &B.getASTContext().Idents.get("dispatch_semaphore_t");
128  for (const auto &C : B.captures()) {
129    const auto *T = C.getVariable()->getType()->getAs<TypedefType>();
130    if (T && T->getDecl()->getIdentifier() == dispatch_semaphore_tII)
131      return true;
132  }
133  return false;
134}
135
136SmallVector<const MemRegion *, 4>
137StackAddrEscapeChecker::getCapturedStackRegions(const BlockDataRegion &B,
138                                                CheckerContext &C) {
139  SmallVector<const MemRegion *, 4> Regions;
140  BlockDataRegion::referenced_vars_iterator I = B.referenced_vars_begin();
141  BlockDataRegion::referenced_vars_iterator E = B.referenced_vars_end();
142  for (; I != E; ++I) {
143    SVal Val = C.getState()->getSVal(I.getCapturedRegion());
144    const MemRegion *Region = Val.getAsRegion();
145    if (Region && isa<StackSpaceRegion>(Region->getMemorySpace()))
146      Regions.push_back(Region);
147  }
148  return Regions;
149}
150
151void StackAddrEscapeChecker::EmitStackError(CheckerContext &C,
152                                            const MemRegion *R,
153                                            const Expr *RetE) const {
154  ExplodedNode *N = C.generateNonFatalErrorNode();
155  if (!N)
156    return;
157  if (!BT_returnstack)
158    BT_returnstack = std::make_unique<BuiltinBug>(
159        this, "Return of address to stack-allocated memory");
160  // Generate a report for this bug.
161  SmallString<128> buf;
162  llvm::raw_svector_ostream os(buf);
163  SourceRange range = genName(os, R, C.getASTContext());
164  os << " returned to caller";
165  auto report =
166      std::make_unique<PathSensitiveBugReport>(*BT_returnstack, os.str(), N);
167  report->addRange(RetE->getSourceRange());
168  if (range.isValid())
169    report->addRange(range);
170  C.emitReport(std::move(report));
171}
172
173void StackAddrEscapeChecker::checkAsyncExecutedBlockCaptures(
174    const BlockDataRegion &B, CheckerContext &C) const {
175  // There is a not-too-uncommon idiom
176  // where a block passed to dispatch_async captures a semaphore
177  // and then the thread (which called dispatch_async) is blocked on waiting
178  // for the completion of the execution of the block
179  // via dispatch_semaphore_wait. To avoid false-positives (for now)
180  // we ignore all the blocks which have captured
181  // a variable of the type "dispatch_semaphore_t".
182  if (isSemaphoreCaptured(*B.getDecl()))
183    return;
184  for (const MemRegion *Region : getCapturedStackRegions(B, C)) {
185    // The block passed to dispatch_async may capture another block
186    // created on the stack. However, there is no leak in this situaton,
187    // no matter if ARC or no ARC is enabled:
188    // dispatch_async copies the passed "outer" block (via Block_copy)
189    // and if the block has captured another "inner" block,
190    // the "inner" block will be copied as well.
191    if (isa<BlockDataRegion>(Region))
192      continue;
193    ExplodedNode *N = C.generateNonFatalErrorNode();
194    if (!N)
195      continue;
196    if (!BT_capturedstackasync)
197      BT_capturedstackasync = std::make_unique<BuiltinBug>(
198          this, "Address of stack-allocated memory is captured");
199    SmallString<128> Buf;
200    llvm::raw_svector_ostream Out(Buf);
201    SourceRange Range = genName(Out, Region, C.getASTContext());
202    Out << " is captured by an asynchronously-executed block";
203    auto Report = std::make_unique<PathSensitiveBugReport>(
204        *BT_capturedstackasync, Out.str(), N);
205    if (Range.isValid())
206      Report->addRange(Range);
207    C.emitReport(std::move(Report));
208  }
209}
210
211void StackAddrEscapeChecker::checkReturnedBlockCaptures(
212    const BlockDataRegion &B, CheckerContext &C) const {
213  for (const MemRegion *Region : getCapturedStackRegions(B, C)) {
214    if (isArcManagedBlock(Region, C) || isNotInCurrentFrame(Region, C))
215      continue;
216    ExplodedNode *N = C.generateNonFatalErrorNode();
217    if (!N)
218      continue;
219    if (!BT_capturedstackret)
220      BT_capturedstackret = std::make_unique<BuiltinBug>(
221          this, "Address of stack-allocated memory is captured");
222    SmallString<128> Buf;
223    llvm::raw_svector_ostream Out(Buf);
224    SourceRange Range = genName(Out, Region, C.getASTContext());
225    Out << " is captured by a returned block";
226    auto Report = std::make_unique<PathSensitiveBugReport>(*BT_capturedstackret,
227                                                           Out.str(), N);
228    if (Range.isValid())
229      Report->addRange(Range);
230    C.emitReport(std::move(Report));
231  }
232}
233
234void StackAddrEscapeChecker::checkPreCall(const CallEvent &Call,
235                                          CheckerContext &C) const {
236  if (!ChecksEnabled[CK_StackAddrAsyncEscapeChecker])
237    return;
238  if (!Call.isGlobalCFunction("dispatch_after") &&
239      !Call.isGlobalCFunction("dispatch_async"))
240    return;
241  for (unsigned Idx = 0, NumArgs = Call.getNumArgs(); Idx < NumArgs; ++Idx) {
242    if (const BlockDataRegion *B = dyn_cast_or_null<BlockDataRegion>(
243            Call.getArgSVal(Idx).getAsRegion()))
244      checkAsyncExecutedBlockCaptures(*B, C);
245  }
246}
247
248void StackAddrEscapeChecker::checkPreStmt(const ReturnStmt *RS,
249                                          CheckerContext &C) const {
250  if (!ChecksEnabled[CK_StackAddrEscapeChecker])
251    return;
252
253  const Expr *RetE = RS->getRetValue();
254  if (!RetE)
255    return;
256  RetE = RetE->IgnoreParens();
257
258  SVal V = C.getSVal(RetE);
259  const MemRegion *R = V.getAsRegion();
260  if (!R)
261    return;
262
263  if (const BlockDataRegion *B = dyn_cast<BlockDataRegion>(R))
264    checkReturnedBlockCaptures(*B, C);
265
266  if (!isa<StackSpaceRegion>(R->getMemorySpace()) ||
267      isNotInCurrentFrame(R, C) || isArcManagedBlock(R, C))
268    return;
269
270  // Returning a record by value is fine. (In this case, the returned
271  // expression will be a copy-constructor, possibly wrapped in an
272  // ExprWithCleanups node.)
273  if (const ExprWithCleanups *Cleanup = dyn_cast<ExprWithCleanups>(RetE))
274    RetE = Cleanup->getSubExpr();
275  if (isa<CXXConstructExpr>(RetE) && RetE->getType()->isRecordType())
276    return;
277
278  // The CK_CopyAndAutoreleaseBlockObject cast causes the block to be copied
279  // so the stack address is not escaping here.
280  if (auto *ICE = dyn_cast<ImplicitCastExpr>(RetE)) {
281    if (isa<BlockDataRegion>(R) &&
282        ICE->getCastKind() == CK_CopyAndAutoreleaseBlockObject) {
283      return;
284    }
285  }
286
287  EmitStackError(C, R, RetE);
288}
289
290void StackAddrEscapeChecker::checkEndFunction(const ReturnStmt *RS,
291                                              CheckerContext &Ctx) const {
292  if (!ChecksEnabled[CK_StackAddrEscapeChecker])
293    return;
294
295  ProgramStateRef State = Ctx.getState();
296
297  // Iterate over all bindings to global variables and see if it contains
298  // a memory region in the stack space.
299  class CallBack : public StoreManager::BindingsHandler {
300  private:
301    CheckerContext &Ctx;
302    const StackFrameContext *CurSFC;
303
304  public:
305    SmallVector<std::pair<const MemRegion *, const MemRegion *>, 10> V;
306
307    CallBack(CheckerContext &CC) : Ctx(CC), CurSFC(CC.getStackFrame()) {}
308
309    bool HandleBinding(StoreManager &SMgr, Store S, const MemRegion *Region,
310                       SVal Val) override {
311
312      if (!isa<GlobalsSpaceRegion>(Region->getMemorySpace()))
313        return true;
314      const MemRegion *VR = Val.getAsRegion();
315      if (VR && isa<StackSpaceRegion>(VR->getMemorySpace()) &&
316          !isArcManagedBlock(VR, Ctx) && !isNotInCurrentFrame(VR, Ctx))
317        V.emplace_back(Region, VR);
318      return true;
319    }
320  };
321
322  CallBack Cb(Ctx);
323  State->getStateManager().getStoreManager().iterBindings(State->getStore(),
324                                                          Cb);
325
326  if (Cb.V.empty())
327    return;
328
329  // Generate an error node.
330  ExplodedNode *N = Ctx.generateNonFatalErrorNode(State);
331  if (!N)
332    return;
333
334  if (!BT_stackleak)
335    BT_stackleak = std::make_unique<BuiltinBug>(
336        this, "Stack address stored into global variable",
337        "Stack address was saved into a global variable. "
338        "This is dangerous because the address will become "
339        "invalid after returning from the function");
340
341  for (const auto &P : Cb.V) {
342    // Generate a report for this bug.
343    SmallString<128> Buf;
344    llvm::raw_svector_ostream Out(Buf);
345    SourceRange Range = genName(Out, P.second, Ctx.getASTContext());
346    Out << " is still referred to by the ";
347    if (isa<StaticGlobalSpaceRegion>(P.first->getMemorySpace()))
348      Out << "static";
349    else
350      Out << "global";
351    Out << " variable '";
352    const VarRegion *VR = cast<VarRegion>(P.first->getBaseRegion());
353    Out << *VR->getDecl()
354        << "' upon returning to the caller.  This will be a dangling reference";
355    auto Report =
356        std::make_unique<PathSensitiveBugReport>(*BT_stackleak, Out.str(), N);
357    if (Range.isValid())
358      Report->addRange(Range);
359
360    Ctx.emitReport(std::move(Report));
361  }
362}
363
364void ento::registerStackAddrEscapeBase(CheckerManager &mgr) {
365  mgr.registerChecker<StackAddrEscapeChecker>();
366}
367
368bool ento::shouldRegisterStackAddrEscapeBase(const LangOptions &LO) {
369  return true;
370}
371
372#define REGISTER_CHECKER(name)                                                 \
373  void ento::register##name(CheckerManager &Mgr) {                             \
374    StackAddrEscapeChecker *Chk =                                              \
375        Mgr.getChecker<StackAddrEscapeChecker>();                              \
376    Chk->ChecksEnabled[StackAddrEscapeChecker::CK_##name] = true;              \
377  }                                                                            \
378                                                                               \
379  bool ento::shouldRegister##name(const LangOptions &LO) {                     \
380    return true;                                                               \
381  }
382
383REGISTER_CHECKER(StackAddrEscapeChecker)
384REGISTER_CHECKER(StackAddrAsyncEscapeChecker)
385