1//=-- ExprEngineCallAndReturn.cpp - Support for call/return -----*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9//  This file defines ExprEngine's support for calls and returns.
10//
11//===----------------------------------------------------------------------===//
12
13#include "clang/AST/Decl.h"
14#include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
15#include "PrettyStackTraceLocationContext.h"
16#include "clang/AST/CXXInheritance.h"
17#include "clang/AST/DeclCXX.h"
18#include "clang/Analysis/Analyses/LiveVariables.h"
19#include "clang/Analysis/ConstructionContext.h"
20#include "clang/StaticAnalyzer/Core/CheckerManager.h"
21#include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
22#include "llvm/ADT/SmallSet.h"
23#include "llvm/ADT/Statistic.h"
24#include "llvm/Support/SaveAndRestore.h"
25
26using namespace clang;
27using namespace ento;
28
29#define DEBUG_TYPE "ExprEngine"
30
31STATISTIC(NumOfDynamicDispatchPathSplits,
32  "The # of times we split the path due to imprecise dynamic dispatch info");
33
34STATISTIC(NumInlinedCalls,
35  "The # of times we inlined a call");
36
37STATISTIC(NumReachedInlineCountMax,
38  "The # of times we reached inline count maximum");
39
40void ExprEngine::processCallEnter(NodeBuilderContext& BC, CallEnter CE,
41                                  ExplodedNode *Pred) {
42  // Get the entry block in the CFG of the callee.
43  const StackFrameContext *calleeCtx = CE.getCalleeContext();
44  PrettyStackTraceLocationContext CrashInfo(calleeCtx);
45  const CFGBlock *Entry = CE.getEntry();
46
47  // Validate the CFG.
48  assert(Entry->empty());
49  assert(Entry->succ_size() == 1);
50
51  // Get the solitary successor.
52  const CFGBlock *Succ = *(Entry->succ_begin());
53
54  // Construct an edge representing the starting location in the callee.
55  BlockEdge Loc(Entry, Succ, calleeCtx);
56
57  ProgramStateRef state = Pred->getState();
58
59  // Construct a new node, notify checkers that analysis of the function has
60  // begun, and add the resultant nodes to the worklist.
61  bool isNew;
62  ExplodedNode *Node = G.getNode(Loc, state, false, &isNew);
63  Node->addPredecessor(Pred, G);
64  if (isNew) {
65    ExplodedNodeSet DstBegin;
66    processBeginOfFunction(BC, Node, DstBegin, Loc);
67    Engine.enqueue(DstBegin);
68  }
69}
70
71// Find the last statement on the path to the exploded node and the
72// corresponding Block.
73static std::pair<const Stmt*,
74                 const CFGBlock*> getLastStmt(const ExplodedNode *Node) {
75  const Stmt *S = nullptr;
76  const CFGBlock *Blk = nullptr;
77  const StackFrameContext *SF = Node->getStackFrame();
78
79  // Back up through the ExplodedGraph until we reach a statement node in this
80  // stack frame.
81  while (Node) {
82    const ProgramPoint &PP = Node->getLocation();
83
84    if (PP.getStackFrame() == SF) {
85      if (Optional<StmtPoint> SP = PP.getAs<StmtPoint>()) {
86        S = SP->getStmt();
87        break;
88      } else if (Optional<CallExitEnd> CEE = PP.getAs<CallExitEnd>()) {
89        S = CEE->getCalleeContext()->getCallSite();
90        if (S)
91          break;
92
93        // If there is no statement, this is an implicitly-generated call.
94        // We'll walk backwards over it and then continue the loop to find
95        // an actual statement.
96        Optional<CallEnter> CE;
97        do {
98          Node = Node->getFirstPred();
99          CE = Node->getLocationAs<CallEnter>();
100        } while (!CE || CE->getCalleeContext() != CEE->getCalleeContext());
101
102        // Continue searching the graph.
103      } else if (Optional<BlockEdge> BE = PP.getAs<BlockEdge>()) {
104        Blk = BE->getSrc();
105      }
106    } else if (Optional<CallEnter> CE = PP.getAs<CallEnter>()) {
107      // If we reached the CallEnter for this function, it has no statements.
108      if (CE->getCalleeContext() == SF)
109        break;
110    }
111
112    if (Node->pred_empty())
113      return std::make_pair(nullptr, nullptr);
114
115    Node = *Node->pred_begin();
116  }
117
118  return std::make_pair(S, Blk);
119}
120
121/// Adjusts a return value when the called function's return type does not
122/// match the caller's expression type. This can happen when a dynamic call
123/// is devirtualized, and the overriding method has a covariant (more specific)
124/// return type than the parent's method. For C++ objects, this means we need
125/// to add base casts.
126static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy,
127                              StoreManager &StoreMgr) {
128  // For now, the only adjustments we handle apply only to locations.
129  if (!V.getAs<Loc>())
130    return V;
131
132  // If the types already match, don't do any unnecessary work.
133  ExpectedTy = ExpectedTy.getCanonicalType();
134  ActualTy = ActualTy.getCanonicalType();
135  if (ExpectedTy == ActualTy)
136    return V;
137
138  // No adjustment is needed between Objective-C pointer types.
139  if (ExpectedTy->isObjCObjectPointerType() &&
140      ActualTy->isObjCObjectPointerType())
141    return V;
142
143  // C++ object pointers may need "derived-to-base" casts.
144  const CXXRecordDecl *ExpectedClass = ExpectedTy->getPointeeCXXRecordDecl();
145  const CXXRecordDecl *ActualClass = ActualTy->getPointeeCXXRecordDecl();
146  if (ExpectedClass && ActualClass) {
147    CXXBasePaths Paths(/*FindAmbiguities=*/true, /*RecordPaths=*/true,
148                       /*DetectVirtual=*/false);
149    if (ActualClass->isDerivedFrom(ExpectedClass, Paths) &&
150        !Paths.isAmbiguous(ActualTy->getCanonicalTypeUnqualified())) {
151      return StoreMgr.evalDerivedToBase(V, Paths.front());
152    }
153  }
154
155  // Unfortunately, Objective-C does not enforce that overridden methods have
156  // covariant return types, so we can't assert that that never happens.
157  // Be safe and return UnknownVal().
158  return UnknownVal();
159}
160
161void ExprEngine::removeDeadOnEndOfFunction(NodeBuilderContext& BC,
162                                           ExplodedNode *Pred,
163                                           ExplodedNodeSet &Dst) {
164  // Find the last statement in the function and the corresponding basic block.
165  const Stmt *LastSt = nullptr;
166  const CFGBlock *Blk = nullptr;
167  std::tie(LastSt, Blk) = getLastStmt(Pred);
168  if (!Blk || !LastSt) {
169    Dst.Add(Pred);
170    return;
171  }
172
173  // Here, we destroy the current location context. We use the current
174  // function's entire body as a diagnostic statement, with which the program
175  // point will be associated. However, we only want to use LastStmt as a
176  // reference for what to clean up if it's a ReturnStmt; otherwise, everything
177  // is dead.
178  SaveAndRestore<const NodeBuilderContext *> NodeContextRAII(currBldrCtx, &BC);
179  const LocationContext *LCtx = Pred->getLocationContext();
180  removeDead(Pred, Dst, dyn_cast<ReturnStmt>(LastSt), LCtx,
181             LCtx->getAnalysisDeclContext()->getBody(),
182             ProgramPoint::PostStmtPurgeDeadSymbolsKind);
183}
184
185static bool wasDifferentDeclUsedForInlining(CallEventRef<> Call,
186    const StackFrameContext *calleeCtx) {
187  const Decl *RuntimeCallee = calleeCtx->getDecl();
188  const Decl *StaticDecl = Call->getDecl();
189  assert(RuntimeCallee);
190  if (!StaticDecl)
191    return true;
192  return RuntimeCallee->getCanonicalDecl() != StaticDecl->getCanonicalDecl();
193}
194
195/// The call exit is simulated with a sequence of nodes, which occur between
196/// CallExitBegin and CallExitEnd. The following operations occur between the
197/// two program points:
198/// 1. CallExitBegin (triggers the start of call exit sequence)
199/// 2. Bind the return value
200/// 3. Run Remove dead bindings to clean up the dead symbols from the callee.
201/// 4. CallExitEnd (switch to the caller context)
202/// 5. PostStmt<CallExpr>
203void ExprEngine::processCallExit(ExplodedNode *CEBNode) {
204  // Step 1 CEBNode was generated before the call.
205  PrettyStackTraceLocationContext CrashInfo(CEBNode->getLocationContext());
206  const StackFrameContext *calleeCtx = CEBNode->getStackFrame();
207
208  // The parent context might not be a stack frame, so make sure we
209  // look up the first enclosing stack frame.
210  const StackFrameContext *callerCtx =
211    calleeCtx->getParent()->getStackFrame();
212
213  const Stmt *CE = calleeCtx->getCallSite();
214  ProgramStateRef state = CEBNode->getState();
215  // Find the last statement in the function and the corresponding basic block.
216  const Stmt *LastSt = nullptr;
217  const CFGBlock *Blk = nullptr;
218  std::tie(LastSt, Blk) = getLastStmt(CEBNode);
219
220  // Generate a CallEvent /before/ cleaning the state, so that we can get the
221  // correct value for 'this' (if necessary).
222  CallEventManager &CEMgr = getStateManager().getCallEventManager();
223  CallEventRef<> Call = CEMgr.getCaller(calleeCtx, state);
224
225  // Step 2: generate node with bound return value: CEBNode -> BindedRetNode.
226
227  // If the callee returns an expression, bind its value to CallExpr.
228  if (CE) {
229    if (const ReturnStmt *RS = dyn_cast_or_null<ReturnStmt>(LastSt)) {
230      const LocationContext *LCtx = CEBNode->getLocationContext();
231      SVal V = state->getSVal(RS, LCtx);
232
233      // Ensure that the return type matches the type of the returned Expr.
234      if (wasDifferentDeclUsedForInlining(Call, calleeCtx)) {
235        QualType ReturnedTy =
236          CallEvent::getDeclaredResultType(calleeCtx->getDecl());
237        if (!ReturnedTy.isNull()) {
238          if (const Expr *Ex = dyn_cast<Expr>(CE)) {
239            V = adjustReturnValue(V, Ex->getType(), ReturnedTy,
240                                  getStoreManager());
241          }
242        }
243      }
244
245      state = state->BindExpr(CE, callerCtx, V);
246    }
247
248    // Bind the constructed object value to CXXConstructExpr.
249    if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(CE)) {
250      loc::MemRegionVal This =
251        svalBuilder.getCXXThis(CCE->getConstructor()->getParent(), calleeCtx);
252      SVal ThisV = state->getSVal(This);
253      ThisV = state->getSVal(ThisV.castAs<Loc>());
254      state = state->BindExpr(CCE, callerCtx, ThisV);
255    }
256
257    if (const auto *CNE = dyn_cast<CXXNewExpr>(CE)) {
258      // We are currently evaluating a CXXNewAllocator CFGElement. It takes a
259      // while to reach the actual CXXNewExpr element from here, so keep the
260      // region for later use.
261      // Additionally cast the return value of the inlined operator new
262      // (which is of type 'void *') to the correct object type.
263      SVal AllocV = state->getSVal(CNE, callerCtx);
264      AllocV = svalBuilder.evalCast(
265          AllocV, CNE->getType(),
266          getContext().getPointerType(getContext().VoidTy));
267
268      state = addObjectUnderConstruction(state, CNE, calleeCtx->getParent(),
269                                         AllocV);
270    }
271  }
272
273  // Step 3: BindedRetNode -> CleanedNodes
274  // If we can find a statement and a block in the inlined function, run remove
275  // dead bindings before returning from the call. This is important to ensure
276  // that we report the issues such as leaks in the stack contexts in which
277  // they occurred.
278  ExplodedNodeSet CleanedNodes;
279  if (LastSt && Blk && AMgr.options.AnalysisPurgeOpt != PurgeNone) {
280    static SimpleProgramPointTag retValBind("ExprEngine", "Bind Return Value");
281    PostStmt Loc(LastSt, calleeCtx, &retValBind);
282    bool isNew;
283    ExplodedNode *BindedRetNode = G.getNode(Loc, state, false, &isNew);
284    BindedRetNode->addPredecessor(CEBNode, G);
285    if (!isNew)
286      return;
287
288    NodeBuilderContext Ctx(getCoreEngine(), Blk, BindedRetNode);
289    currBldrCtx = &Ctx;
290    // Here, we call the Symbol Reaper with 0 statement and callee location
291    // context, telling it to clean up everything in the callee's context
292    // (and its children). We use the callee's function body as a diagnostic
293    // statement, with which the program point will be associated.
294    removeDead(BindedRetNode, CleanedNodes, nullptr, calleeCtx,
295               calleeCtx->getAnalysisDeclContext()->getBody(),
296               ProgramPoint::PostStmtPurgeDeadSymbolsKind);
297    currBldrCtx = nullptr;
298  } else {
299    CleanedNodes.Add(CEBNode);
300  }
301
302  for (ExplodedNodeSet::iterator I = CleanedNodes.begin(),
303                                 E = CleanedNodes.end(); I != E; ++I) {
304
305    // Step 4: Generate the CallExit and leave the callee's context.
306    // CleanedNodes -> CEENode
307    CallExitEnd Loc(calleeCtx, callerCtx);
308    bool isNew;
309    ProgramStateRef CEEState = (*I == CEBNode) ? state : (*I)->getState();
310
311    ExplodedNode *CEENode = G.getNode(Loc, CEEState, false, &isNew);
312    CEENode->addPredecessor(*I, G);
313    if (!isNew)
314      return;
315
316    // Step 5: Perform the post-condition check of the CallExpr and enqueue the
317    // result onto the work list.
318    // CEENode -> Dst -> WorkList
319    NodeBuilderContext Ctx(Engine, calleeCtx->getCallSiteBlock(), CEENode);
320    SaveAndRestore<const NodeBuilderContext*> NBCSave(currBldrCtx,
321        &Ctx);
322    SaveAndRestore<unsigned> CBISave(currStmtIdx, calleeCtx->getIndex());
323
324    CallEventRef<> UpdatedCall = Call.cloneWithState(CEEState);
325
326    ExplodedNodeSet DstPostCall;
327    if (const CXXNewExpr *CNE = dyn_cast_or_null<CXXNewExpr>(CE)) {
328      ExplodedNodeSet DstPostPostCallCallback;
329      getCheckerManager().runCheckersForPostCall(DstPostPostCallCallback,
330                                                 CEENode, *UpdatedCall, *this,
331                                                 /*wasInlined=*/true);
332      for (auto I : DstPostPostCallCallback) {
333        getCheckerManager().runCheckersForNewAllocator(
334            CNE,
335            *getObjectUnderConstruction(I->getState(), CNE,
336                                        calleeCtx->getParent()),
337            DstPostCall, I, *this,
338            /*wasInlined=*/true);
339      }
340    } else {
341      getCheckerManager().runCheckersForPostCall(DstPostCall, CEENode,
342                                                 *UpdatedCall, *this,
343                                                 /*wasInlined=*/true);
344    }
345    ExplodedNodeSet Dst;
346    if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(Call)) {
347      getCheckerManager().runCheckersForPostObjCMessage(Dst, DstPostCall, *Msg,
348                                                        *this,
349                                                        /*wasInlined=*/true);
350    } else if (CE &&
351               !(isa<CXXNewExpr>(CE) && // Called when visiting CXXNewExpr.
352                 AMgr.getAnalyzerOptions().MayInlineCXXAllocator)) {
353      getCheckerManager().runCheckersForPostStmt(Dst, DstPostCall, CE,
354                                                 *this, /*wasInlined=*/true);
355    } else {
356      Dst.insert(DstPostCall);
357    }
358
359    // Enqueue the next element in the block.
360    for (ExplodedNodeSet::iterator PSI = Dst.begin(), PSE = Dst.end();
361                                   PSI != PSE; ++PSI) {
362      Engine.getWorkList()->enqueue(*PSI, calleeCtx->getCallSiteBlock(),
363                                    calleeCtx->getIndex()+1);
364    }
365  }
366}
367
368bool ExprEngine::isSmall(AnalysisDeclContext *ADC) const {
369  // When there are no branches in the function, it means that there's no
370  // exponential complexity introduced by inlining such function.
371  // Such functions also don't trigger various fundamental problems
372  // with our inlining mechanism, such as the problem of
373  // inlined defensive checks. Hence isLinear().
374  const CFG *Cfg = ADC->getCFG();
375  return Cfg->isLinear() || Cfg->size() <= AMgr.options.AlwaysInlineSize;
376}
377
378bool ExprEngine::isLarge(AnalysisDeclContext *ADC) const {
379  const CFG *Cfg = ADC->getCFG();
380  return Cfg->size() >= AMgr.options.MinCFGSizeTreatFunctionsAsLarge;
381}
382
383bool ExprEngine::isHuge(AnalysisDeclContext *ADC) const {
384  const CFG *Cfg = ADC->getCFG();
385  return Cfg->getNumBlockIDs() > AMgr.options.MaxInlinableSize;
386}
387
388void ExprEngine::examineStackFrames(const Decl *D, const LocationContext *LCtx,
389                               bool &IsRecursive, unsigned &StackDepth) {
390  IsRecursive = false;
391  StackDepth = 0;
392
393  while (LCtx) {
394    if (const StackFrameContext *SFC = dyn_cast<StackFrameContext>(LCtx)) {
395      const Decl *DI = SFC->getDecl();
396
397      // Mark recursive (and mutually recursive) functions and always count
398      // them when measuring the stack depth.
399      if (DI == D) {
400        IsRecursive = true;
401        ++StackDepth;
402        LCtx = LCtx->getParent();
403        continue;
404      }
405
406      // Do not count the small functions when determining the stack depth.
407      AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(DI);
408      if (!isSmall(CalleeADC))
409        ++StackDepth;
410    }
411    LCtx = LCtx->getParent();
412  }
413}
414
415// The GDM component containing the dynamic dispatch bifurcation info. When
416// the exact type of the receiver is not known, we want to explore both paths -
417// one on which we do inline it and the other one on which we don't. This is
418// done to ensure we do not drop coverage.
419// This is the map from the receiver region to a bool, specifying either we
420// consider this region's information precise or not along the given path.
421namespace {
422  enum DynamicDispatchMode {
423    DynamicDispatchModeInlined = 1,
424    DynamicDispatchModeConservative
425  };
426} // end anonymous namespace
427
428REGISTER_MAP_WITH_PROGRAMSTATE(DynamicDispatchBifurcationMap,
429                               const MemRegion *, unsigned)
430
431bool ExprEngine::inlineCall(const CallEvent &Call, const Decl *D,
432                            NodeBuilder &Bldr, ExplodedNode *Pred,
433                            ProgramStateRef State) {
434  assert(D);
435
436  const LocationContext *CurLC = Pred->getLocationContext();
437  const StackFrameContext *CallerSFC = CurLC->getStackFrame();
438  const LocationContext *ParentOfCallee = CallerSFC;
439  if (Call.getKind() == CE_Block &&
440      !cast<BlockCall>(Call).isConversionFromLambda()) {
441    const BlockDataRegion *BR = cast<BlockCall>(Call).getBlockRegion();
442    assert(BR && "If we have the block definition we should have its region");
443    AnalysisDeclContext *BlockCtx = AMgr.getAnalysisDeclContext(D);
444    ParentOfCallee = BlockCtx->getBlockInvocationContext(CallerSFC,
445                                                         cast<BlockDecl>(D),
446                                                         BR);
447  }
448
449  // This may be NULL, but that's fine.
450  const Expr *CallE = Call.getOriginExpr();
451
452  // Construct a new stack frame for the callee.
453  AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(D);
454  const StackFrameContext *CalleeSFC =
455      CalleeADC->getStackFrame(ParentOfCallee, CallE, currBldrCtx->getBlock(),
456                               currBldrCtx->blockCount(), currStmtIdx);
457
458  CallEnter Loc(CallE, CalleeSFC, CurLC);
459
460  // Construct a new state which contains the mapping from actual to
461  // formal arguments.
462  State = State->enterStackFrame(Call, CalleeSFC);
463
464  bool isNew;
465  if (ExplodedNode *N = G.getNode(Loc, State, false, &isNew)) {
466    N->addPredecessor(Pred, G);
467    if (isNew)
468      Engine.getWorkList()->enqueue(N);
469  }
470
471  // If we decided to inline the call, the successor has been manually
472  // added onto the work list so remove it from the node builder.
473  Bldr.takeNodes(Pred);
474
475  NumInlinedCalls++;
476  Engine.FunctionSummaries->bumpNumTimesInlined(D);
477
478  // Mark the decl as visited.
479  if (VisitedCallees)
480    VisitedCallees->insert(D);
481
482  return true;
483}
484
485static ProgramStateRef getInlineFailedState(ProgramStateRef State,
486                                            const Stmt *CallE) {
487  const void *ReplayState = State->get<ReplayWithoutInlining>();
488  if (!ReplayState)
489    return nullptr;
490
491  assert(ReplayState == CallE && "Backtracked to the wrong call.");
492  (void)CallE;
493
494  return State->remove<ReplayWithoutInlining>();
495}
496
497void ExprEngine::VisitCallExpr(const CallExpr *CE, ExplodedNode *Pred,
498                               ExplodedNodeSet &dst) {
499  // Perform the previsit of the CallExpr.
500  ExplodedNodeSet dstPreVisit;
501  getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, CE, *this);
502
503  // Get the call in its initial state. We use this as a template to perform
504  // all the checks.
505  CallEventManager &CEMgr = getStateManager().getCallEventManager();
506  CallEventRef<> CallTemplate
507    = CEMgr.getSimpleCall(CE, Pred->getState(), Pred->getLocationContext());
508
509  // Evaluate the function call.  We try each of the checkers
510  // to see if the can evaluate the function call.
511  ExplodedNodeSet dstCallEvaluated;
512  for (ExplodedNodeSet::iterator I = dstPreVisit.begin(), E = dstPreVisit.end();
513       I != E; ++I) {
514    evalCall(dstCallEvaluated, *I, *CallTemplate);
515  }
516
517  // Finally, perform the post-condition check of the CallExpr and store
518  // the created nodes in 'Dst'.
519  // Note that if the call was inlined, dstCallEvaluated will be empty.
520  // The post-CallExpr check will occur in processCallExit.
521  getCheckerManager().runCheckersForPostStmt(dst, dstCallEvaluated, CE,
522                                             *this);
523}
524
525ProgramStateRef ExprEngine::finishArgumentConstruction(ProgramStateRef State,
526                                                       const CallEvent &Call) {
527  const Expr *E = Call.getOriginExpr();
528  // FIXME: Constructors to placement arguments of operator new
529  // are not supported yet.
530  if (!E || isa<CXXNewExpr>(E))
531    return State;
532
533  const LocationContext *LC = Call.getLocationContext();
534  for (unsigned CallI = 0, CallN = Call.getNumArgs(); CallI != CallN; ++CallI) {
535    unsigned I = Call.getASTArgumentIndex(CallI);
536    if (Optional<SVal> V =
537            getObjectUnderConstruction(State, {E, I}, LC)) {
538      SVal VV = *V;
539      (void)VV;
540      assert(cast<VarRegion>(VV.castAs<loc::MemRegionVal>().getRegion())
541                 ->getStackFrame()->getParent()
542                 ->getStackFrame() == LC->getStackFrame());
543      State = finishObjectConstruction(State, {E, I}, LC);
544    }
545  }
546
547  return State;
548}
549
550void ExprEngine::finishArgumentConstruction(ExplodedNodeSet &Dst,
551                                            ExplodedNode *Pred,
552                                            const CallEvent &Call) {
553  ProgramStateRef State = Pred->getState();
554  ProgramStateRef CleanedState = finishArgumentConstruction(State, Call);
555  if (CleanedState == State) {
556    Dst.insert(Pred);
557    return;
558  }
559
560  const Expr *E = Call.getOriginExpr();
561  const LocationContext *LC = Call.getLocationContext();
562  NodeBuilder B(Pred, Dst, *currBldrCtx);
563  static SimpleProgramPointTag Tag("ExprEngine",
564                                   "Finish argument construction");
565  PreStmt PP(E, LC, &Tag);
566  B.generateNode(PP, CleanedState, Pred);
567}
568
569void ExprEngine::evalCall(ExplodedNodeSet &Dst, ExplodedNode *Pred,
570                          const CallEvent &Call) {
571  // WARNING: At this time, the state attached to 'Call' may be older than the
572  // state in 'Pred'. This is a minor optimization since CheckerManager will
573  // use an updated CallEvent instance when calling checkers, but if 'Call' is
574  // ever used directly in this function all callers should be updated to pass
575  // the most recent state. (It is probably not worth doing the work here since
576  // for some callers this will not be necessary.)
577
578  // Run any pre-call checks using the generic call interface.
579  ExplodedNodeSet dstPreVisit;
580  getCheckerManager().runCheckersForPreCall(dstPreVisit, Pred,
581                                            Call, *this);
582
583  // Actually evaluate the function call.  We try each of the checkers
584  // to see if the can evaluate the function call, and get a callback at
585  // defaultEvalCall if all of them fail.
586  ExplodedNodeSet dstCallEvaluated;
587  getCheckerManager().runCheckersForEvalCall(dstCallEvaluated, dstPreVisit,
588                                             Call, *this);
589
590  // If there were other constructors called for object-type arguments
591  // of this call, clean them up.
592  ExplodedNodeSet dstArgumentCleanup;
593  for (auto I : dstCallEvaluated)
594    finishArgumentConstruction(dstArgumentCleanup, I, Call);
595
596  ExplodedNodeSet dstPostCall;
597  getCheckerManager().runCheckersForPostCall(dstPostCall, dstArgumentCleanup,
598                                             Call, *this);
599
600  // Escaping symbols conjured during invalidating the regions above.
601  // Note that, for inlined calls the nodes were put back into the worklist,
602  // so we can assume that every node belongs to a conservative call at this
603  // point.
604
605  // Run pointerEscape callback with the newly conjured symbols.
606  SmallVector<std::pair<SVal, SVal>, 8> Escaped;
607  for (auto I : dstPostCall) {
608    NodeBuilder B(I, Dst, *currBldrCtx);
609    ProgramStateRef State = I->getState();
610    Escaped.clear();
611    {
612      unsigned Arg = -1;
613      for (const ParmVarDecl *PVD : Call.parameters()) {
614        ++Arg;
615        QualType ParamTy = PVD->getType();
616        if (ParamTy.isNull() ||
617            (!ParamTy->isPointerType() && !ParamTy->isReferenceType()))
618          continue;
619        QualType Pointee = ParamTy->getPointeeType();
620        if (Pointee.isConstQualified() || Pointee->isVoidType())
621          continue;
622        if (const MemRegion *MR = Call.getArgSVal(Arg).getAsRegion())
623          Escaped.emplace_back(loc::MemRegionVal(MR), State->getSVal(MR, Pointee));
624      }
625    }
626
627    State = processPointerEscapedOnBind(State, Escaped, I->getLocationContext(),
628                                        PSK_EscapeOutParameters, &Call);
629
630    if (State == I->getState())
631      Dst.insert(I);
632    else
633      B.generateNode(I->getLocation(), State, I);
634  }
635}
636
637ProgramStateRef ExprEngine::bindReturnValue(const CallEvent &Call,
638                                            const LocationContext *LCtx,
639                                            ProgramStateRef State) {
640  const Expr *E = Call.getOriginExpr();
641  if (!E)
642    return State;
643
644  // Some method families have known return values.
645  if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(&Call)) {
646    switch (Msg->getMethodFamily()) {
647    default:
648      break;
649    case OMF_autorelease:
650    case OMF_retain:
651    case OMF_self: {
652      // These methods return their receivers.
653      return State->BindExpr(E, LCtx, Msg->getReceiverSVal());
654    }
655    }
656  } else if (const CXXConstructorCall *C = dyn_cast<CXXConstructorCall>(&Call)){
657    SVal ThisV = C->getCXXThisVal();
658    ThisV = State->getSVal(ThisV.castAs<Loc>());
659    return State->BindExpr(E, LCtx, ThisV);
660  }
661
662  SVal R;
663  QualType ResultTy = Call.getResultType();
664  unsigned Count = currBldrCtx->blockCount();
665  if (auto RTC = getCurrentCFGElement().getAs<CFGCXXRecordTypedCall>()) {
666    // Conjure a temporary if the function returns an object by value.
667    SVal Target;
668    assert(RTC->getStmt() == Call.getOriginExpr());
669    EvalCallOptions CallOpts; // FIXME: We won't really need those.
670    std::tie(State, Target) =
671        prepareForObjectConstruction(Call.getOriginExpr(), State, LCtx,
672                                     RTC->getConstructionContext(), CallOpts);
673    const MemRegion *TargetR = Target.getAsRegion();
674    assert(TargetR);
675    // Invalidate the region so that it didn't look uninitialized. If this is
676    // a field or element constructor, we do not want to invalidate
677    // the whole structure. Pointer escape is meaningless because
678    // the structure is a product of conservative evaluation
679    // and therefore contains nothing interesting at this point.
680    RegionAndSymbolInvalidationTraits ITraits;
681    ITraits.setTrait(TargetR,
682        RegionAndSymbolInvalidationTraits::TK_DoNotInvalidateSuperRegion);
683    State = State->invalidateRegions(TargetR, E, Count, LCtx,
684                                     /* CausesPointerEscape=*/false, nullptr,
685                                     &Call, &ITraits);
686
687    R = State->getSVal(Target.castAs<Loc>(), E->getType());
688  } else {
689    // Conjure a symbol if the return value is unknown.
690
691    // See if we need to conjure a heap pointer instead of
692    // a regular unknown pointer.
693    bool IsHeapPointer = false;
694    if (const auto *CNE = dyn_cast<CXXNewExpr>(E))
695      if (CNE->getOperatorNew()->isReplaceableGlobalAllocationFunction()) {
696        // FIXME: Delegate this to evalCall in MallocChecker?
697        IsHeapPointer = true;
698      }
699
700    R = IsHeapPointer ? svalBuilder.getConjuredHeapSymbolVal(E, LCtx, Count)
701                      : svalBuilder.conjureSymbolVal(nullptr, E, LCtx, ResultTy,
702                                                     Count);
703  }
704  return State->BindExpr(E, LCtx, R);
705}
706
707// Conservatively evaluate call by invalidating regions and binding
708// a conjured return value.
709void ExprEngine::conservativeEvalCall(const CallEvent &Call, NodeBuilder &Bldr,
710                                      ExplodedNode *Pred, ProgramStateRef State) {
711  State = Call.invalidateRegions(currBldrCtx->blockCount(), State);
712  State = bindReturnValue(Call, Pred->getLocationContext(), State);
713
714  // And make the result node.
715  Bldr.generateNode(Call.getProgramPoint(), State, Pred);
716}
717
718ExprEngine::CallInlinePolicy
719ExprEngine::mayInlineCallKind(const CallEvent &Call, const ExplodedNode *Pred,
720                              AnalyzerOptions &Opts,
721                              const ExprEngine::EvalCallOptions &CallOpts) {
722  const LocationContext *CurLC = Pred->getLocationContext();
723  const StackFrameContext *CallerSFC = CurLC->getStackFrame();
724  switch (Call.getKind()) {
725  case CE_Function:
726  case CE_Block:
727    break;
728  case CE_CXXMember:
729  case CE_CXXMemberOperator:
730    if (!Opts.mayInlineCXXMemberFunction(CIMK_MemberFunctions))
731      return CIP_DisallowedAlways;
732    break;
733  case CE_CXXConstructor: {
734    if (!Opts.mayInlineCXXMemberFunction(CIMK_Constructors))
735      return CIP_DisallowedAlways;
736
737    const CXXConstructorCall &Ctor = cast<CXXConstructorCall>(Call);
738
739    const CXXConstructExpr *CtorExpr = Ctor.getOriginExpr();
740
741    auto CCE = getCurrentCFGElement().getAs<CFGConstructor>();
742    const ConstructionContext *CC = CCE ? CCE->getConstructionContext()
743                                        : nullptr;
744
745    if (CC && isa<NewAllocatedObjectConstructionContext>(CC) &&
746        !Opts.MayInlineCXXAllocator)
747      return CIP_DisallowedOnce;
748
749    // FIXME: We don't handle constructors or destructors for arrays properly.
750    // Even once we do, we still need to be careful about implicitly-generated
751    // initializers for array fields in default move/copy constructors.
752    // We still allow construction into ElementRegion targets when they don't
753    // represent array elements.
754    if (CallOpts.IsArrayCtorOrDtor)
755      return CIP_DisallowedOnce;
756
757    // Inlining constructors requires including initializers in the CFG.
758    const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
759    assert(ADC->getCFGBuildOptions().AddInitializers && "No CFG initializers");
760    (void)ADC;
761
762    // If the destructor is trivial, it's always safe to inline the constructor.
763    if (Ctor.getDecl()->getParent()->hasTrivialDestructor())
764      break;
765
766    // For other types, only inline constructors if destructor inlining is
767    // also enabled.
768    if (!Opts.mayInlineCXXMemberFunction(CIMK_Destructors))
769      return CIP_DisallowedAlways;
770
771    if (CtorExpr->getConstructionKind() == CXXConstructExpr::CK_Complete) {
772      // If we don't handle temporary destructors, we shouldn't inline
773      // their constructors.
774      if (CallOpts.IsTemporaryCtorOrDtor &&
775          !Opts.ShouldIncludeTemporaryDtorsInCFG)
776        return CIP_DisallowedOnce;
777
778      // If we did not find the correct this-region, it would be pointless
779      // to inline the constructor. Instead we will simply invalidate
780      // the fake temporary target.
781      if (CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion)
782        return CIP_DisallowedOnce;
783
784      // If the temporary is lifetime-extended by binding it to a reference-type
785      // field within an aggregate, automatic destructors don't work properly.
786      if (CallOpts.IsTemporaryLifetimeExtendedViaAggregate)
787        return CIP_DisallowedOnce;
788    }
789
790    break;
791  }
792  case CE_CXXDestructor: {
793    if (!Opts.mayInlineCXXMemberFunction(CIMK_Destructors))
794      return CIP_DisallowedAlways;
795
796    // Inlining destructors requires building the CFG correctly.
797    const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
798    assert(ADC->getCFGBuildOptions().AddImplicitDtors && "No CFG destructors");
799    (void)ADC;
800
801    // FIXME: We don't handle constructors or destructors for arrays properly.
802    if (CallOpts.IsArrayCtorOrDtor)
803      return CIP_DisallowedOnce;
804
805    // Allow disabling temporary destructor inlining with a separate option.
806    if (CallOpts.IsTemporaryCtorOrDtor &&
807        !Opts.MayInlineCXXTemporaryDtors)
808      return CIP_DisallowedOnce;
809
810    // If we did not find the correct this-region, it would be pointless
811    // to inline the destructor. Instead we will simply invalidate
812    // the fake temporary target.
813    if (CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion)
814      return CIP_DisallowedOnce;
815    break;
816  }
817  case CE_CXXAllocator:
818    if (Opts.MayInlineCXXAllocator)
819      break;
820    // Do not inline allocators until we model deallocators.
821    // This is unfortunate, but basically necessary for smart pointers and such.
822    return CIP_DisallowedAlways;
823  case CE_ObjCMessage:
824    if (!Opts.MayInlineObjCMethod)
825      return CIP_DisallowedAlways;
826    if (!(Opts.getIPAMode() == IPAK_DynamicDispatch ||
827          Opts.getIPAMode() == IPAK_DynamicDispatchBifurcate))
828      return CIP_DisallowedAlways;
829    break;
830  }
831
832  return CIP_Allowed;
833}
834
835/// Returns true if the given C++ class contains a member with the given name.
836static bool hasMember(const ASTContext &Ctx, const CXXRecordDecl *RD,
837                      StringRef Name) {
838  const IdentifierInfo &II = Ctx.Idents.get(Name);
839  DeclarationName DeclName = Ctx.DeclarationNames.getIdentifier(&II);
840  if (!RD->lookup(DeclName).empty())
841    return true;
842
843  CXXBasePaths Paths(false, false, false);
844  if (RD->lookupInBases(
845          [DeclName](const CXXBaseSpecifier *Specifier, CXXBasePath &Path) {
846            return CXXRecordDecl::FindOrdinaryMember(Specifier, Path, DeclName);
847          },
848          Paths))
849    return true;
850
851  return false;
852}
853
854/// Returns true if the given C++ class is a container or iterator.
855///
856/// Our heuristic for this is whether it contains a method named 'begin()' or a
857/// nested type named 'iterator' or 'iterator_category'.
858static bool isContainerClass(const ASTContext &Ctx, const CXXRecordDecl *RD) {
859  return hasMember(Ctx, RD, "begin") ||
860         hasMember(Ctx, RD, "iterator") ||
861         hasMember(Ctx, RD, "iterator_category");
862}
863
864/// Returns true if the given function refers to a method of a C++ container
865/// or iterator.
866///
867/// We generally do a poor job modeling most containers right now, and might
868/// prefer not to inline their methods.
869static bool isContainerMethod(const ASTContext &Ctx,
870                              const FunctionDecl *FD) {
871  if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(FD))
872    return isContainerClass(Ctx, MD->getParent());
873  return false;
874}
875
876/// Returns true if the given function is the destructor of a class named
877/// "shared_ptr".
878static bool isCXXSharedPtrDtor(const FunctionDecl *FD) {
879  const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(FD);
880  if (!Dtor)
881    return false;
882
883  const CXXRecordDecl *RD = Dtor->getParent();
884  if (const IdentifierInfo *II = RD->getDeclName().getAsIdentifierInfo())
885    if (II->isStr("shared_ptr"))
886        return true;
887
888  return false;
889}
890
891/// Returns true if the function in \p CalleeADC may be inlined in general.
892///
893/// This checks static properties of the function, such as its signature and
894/// CFG, to determine whether the analyzer should ever consider inlining it,
895/// in any context.
896bool ExprEngine::mayInlineDecl(AnalysisDeclContext *CalleeADC) const {
897  AnalyzerOptions &Opts = AMgr.getAnalyzerOptions();
898  // FIXME: Do not inline variadic calls.
899  if (CallEvent::isVariadic(CalleeADC->getDecl()))
900    return false;
901
902  // Check certain C++-related inlining policies.
903  ASTContext &Ctx = CalleeADC->getASTContext();
904  if (Ctx.getLangOpts().CPlusPlus) {
905    if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(CalleeADC->getDecl())) {
906      // Conditionally control the inlining of template functions.
907      if (!Opts.MayInlineTemplateFunctions)
908        if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate)
909          return false;
910
911      // Conditionally control the inlining of C++ standard library functions.
912      if (!Opts.MayInlineCXXStandardLibrary)
913        if (Ctx.getSourceManager().isInSystemHeader(FD->getLocation()))
914          if (AnalysisDeclContext::isInStdNamespace(FD))
915            return false;
916
917      // Conditionally control the inlining of methods on objects that look
918      // like C++ containers.
919      if (!Opts.MayInlineCXXContainerMethods)
920        if (!AMgr.isInCodeFile(FD->getLocation()))
921          if (isContainerMethod(Ctx, FD))
922            return false;
923
924      // Conditionally control the inlining of the destructor of C++ shared_ptr.
925      // We don't currently do a good job modeling shared_ptr because we can't
926      // see the reference count, so treating as opaque is probably the best
927      // idea.
928      if (!Opts.MayInlineCXXSharedPtrDtor)
929        if (isCXXSharedPtrDtor(FD))
930          return false;
931    }
932  }
933
934  // It is possible that the CFG cannot be constructed.
935  // Be safe, and check if the CalleeCFG is valid.
936  const CFG *CalleeCFG = CalleeADC->getCFG();
937  if (!CalleeCFG)
938    return false;
939
940  // Do not inline large functions.
941  if (isHuge(CalleeADC))
942    return false;
943
944  // It is possible that the live variables analysis cannot be
945  // run.  If so, bail out.
946  if (!CalleeADC->getAnalysis<RelaxedLiveVariables>())
947    return false;
948
949  return true;
950}
951
952bool ExprEngine::shouldInlineCall(const CallEvent &Call, const Decl *D,
953                                  const ExplodedNode *Pred,
954                                  const EvalCallOptions &CallOpts) {
955  if (!D)
956    return false;
957
958  AnalysisManager &AMgr = getAnalysisManager();
959  AnalyzerOptions &Opts = AMgr.options;
960  AnalysisDeclContextManager &ADCMgr = AMgr.getAnalysisDeclContextManager();
961  AnalysisDeclContext *CalleeADC = ADCMgr.getContext(D);
962
963  // The auto-synthesized bodies are essential to inline as they are
964  // usually small and commonly used. Note: we should do this check early on to
965  // ensure we always inline these calls.
966  if (CalleeADC->isBodyAutosynthesized())
967    return true;
968
969  if (!AMgr.shouldInlineCall())
970    return false;
971
972  // Check if this function has been marked as non-inlinable.
973  Optional<bool> MayInline = Engine.FunctionSummaries->mayInline(D);
974  if (MayInline.hasValue()) {
975    if (!MayInline.getValue())
976      return false;
977
978  } else {
979    // We haven't actually checked the static properties of this function yet.
980    // Do that now, and record our decision in the function summaries.
981    if (mayInlineDecl(CalleeADC)) {
982      Engine.FunctionSummaries->markMayInline(D);
983    } else {
984      Engine.FunctionSummaries->markShouldNotInline(D);
985      return false;
986    }
987  }
988
989  // Check if we should inline a call based on its kind.
990  // FIXME: this checks both static and dynamic properties of the call, which
991  // means we're redoing a bit of work that could be cached in the function
992  // summary.
993  CallInlinePolicy CIP = mayInlineCallKind(Call, Pred, Opts, CallOpts);
994  if (CIP != CIP_Allowed) {
995    if (CIP == CIP_DisallowedAlways) {
996      assert(!MayInline.hasValue() || MayInline.getValue());
997      Engine.FunctionSummaries->markShouldNotInline(D);
998    }
999    return false;
1000  }
1001
1002  // Do not inline if recursive or we've reached max stack frame count.
1003  bool IsRecursive = false;
1004  unsigned StackDepth = 0;
1005  examineStackFrames(D, Pred->getLocationContext(), IsRecursive, StackDepth);
1006  if ((StackDepth >= Opts.InlineMaxStackDepth) &&
1007      (!isSmall(CalleeADC) || IsRecursive))
1008    return false;
1009
1010  // Do not inline large functions too many times.
1011  if ((Engine.FunctionSummaries->getNumTimesInlined(D) >
1012       Opts.MaxTimesInlineLarge) &&
1013      isLarge(CalleeADC)) {
1014    NumReachedInlineCountMax++;
1015    return false;
1016  }
1017
1018  if (HowToInline == Inline_Minimal && (!isSmall(CalleeADC) || IsRecursive))
1019    return false;
1020
1021  return true;
1022}
1023
1024static bool isTrivialObjectAssignment(const CallEvent &Call) {
1025  const CXXInstanceCall *ICall = dyn_cast<CXXInstanceCall>(&Call);
1026  if (!ICall)
1027    return false;
1028
1029  const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(ICall->getDecl());
1030  if (!MD)
1031    return false;
1032  if (!(MD->isCopyAssignmentOperator() || MD->isMoveAssignmentOperator()))
1033    return false;
1034
1035  return MD->isTrivial();
1036}
1037
1038void ExprEngine::defaultEvalCall(NodeBuilder &Bldr, ExplodedNode *Pred,
1039                                 const CallEvent &CallTemplate,
1040                                 const EvalCallOptions &CallOpts) {
1041  // Make sure we have the most recent state attached to the call.
1042  ProgramStateRef State = Pred->getState();
1043  CallEventRef<> Call = CallTemplate.cloneWithState(State);
1044
1045  // Special-case trivial assignment operators.
1046  if (isTrivialObjectAssignment(*Call)) {
1047    performTrivialCopy(Bldr, Pred, *Call);
1048    return;
1049  }
1050
1051  // Try to inline the call.
1052  // The origin expression here is just used as a kind of checksum;
1053  // this should still be safe even for CallEvents that don't come from exprs.
1054  const Expr *E = Call->getOriginExpr();
1055
1056  ProgramStateRef InlinedFailedState = getInlineFailedState(State, E);
1057  if (InlinedFailedState) {
1058    // If we already tried once and failed, make sure we don't retry later.
1059    State = InlinedFailedState;
1060  } else {
1061    RuntimeDefinition RD = Call->getRuntimeDefinition();
1062    const Decl *D = RD.getDecl();
1063    if (shouldInlineCall(*Call, D, Pred, CallOpts)) {
1064      if (RD.mayHaveOtherDefinitions()) {
1065        AnalyzerOptions &Options = getAnalysisManager().options;
1066
1067        // Explore with and without inlining the call.
1068        if (Options.getIPAMode() == IPAK_DynamicDispatchBifurcate) {
1069          BifurcateCall(RD.getDispatchRegion(), *Call, D, Bldr, Pred);
1070          return;
1071        }
1072
1073        // Don't inline if we're not in any dynamic dispatch mode.
1074        if (Options.getIPAMode() != IPAK_DynamicDispatch) {
1075          conservativeEvalCall(*Call, Bldr, Pred, State);
1076          return;
1077        }
1078      }
1079
1080      // We are not bifurcating and we do have a Decl, so just inline.
1081      if (inlineCall(*Call, D, Bldr, Pred, State))
1082        return;
1083    }
1084  }
1085
1086  // If we can't inline it, handle the return value and invalidate the regions.
1087  conservativeEvalCall(*Call, Bldr, Pred, State);
1088}
1089
1090void ExprEngine::BifurcateCall(const MemRegion *BifurReg,
1091                               const CallEvent &Call, const Decl *D,
1092                               NodeBuilder &Bldr, ExplodedNode *Pred) {
1093  assert(BifurReg);
1094  BifurReg = BifurReg->StripCasts();
1095
1096  // Check if we've performed the split already - note, we only want
1097  // to split the path once per memory region.
1098  ProgramStateRef State = Pred->getState();
1099  const unsigned *BState =
1100                        State->get<DynamicDispatchBifurcationMap>(BifurReg);
1101  if (BState) {
1102    // If we are on "inline path", keep inlining if possible.
1103    if (*BState == DynamicDispatchModeInlined)
1104      if (inlineCall(Call, D, Bldr, Pred, State))
1105        return;
1106    // If inline failed, or we are on the path where we assume we
1107    // don't have enough info about the receiver to inline, conjure the
1108    // return value and invalidate the regions.
1109    conservativeEvalCall(Call, Bldr, Pred, State);
1110    return;
1111  }
1112
1113  // If we got here, this is the first time we process a message to this
1114  // region, so split the path.
1115  ProgramStateRef IState =
1116      State->set<DynamicDispatchBifurcationMap>(BifurReg,
1117                                               DynamicDispatchModeInlined);
1118  inlineCall(Call, D, Bldr, Pred, IState);
1119
1120  ProgramStateRef NoIState =
1121      State->set<DynamicDispatchBifurcationMap>(BifurReg,
1122                                               DynamicDispatchModeConservative);
1123  conservativeEvalCall(Call, Bldr, Pred, NoIState);
1124
1125  NumOfDynamicDispatchPathSplits++;
1126}
1127
1128void ExprEngine::VisitReturnStmt(const ReturnStmt *RS, ExplodedNode *Pred,
1129                                 ExplodedNodeSet &Dst) {
1130  ExplodedNodeSet dstPreVisit;
1131  getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, RS, *this);
1132
1133  StmtNodeBuilder B(dstPreVisit, Dst, *currBldrCtx);
1134
1135  if (RS->getRetValue()) {
1136    for (ExplodedNodeSet::iterator it = dstPreVisit.begin(),
1137                                  ei = dstPreVisit.end(); it != ei; ++it) {
1138      B.generateNode(RS, *it, (*it)->getState());
1139    }
1140  }
1141}
1142